Compare commits

...

646 Commits

Author SHA1 Message Date
Sofia Rodrigues
b2791f1564 fix: body refactor 2026-03-20 23:51:33 -03:00
Sofia Rodrigues
c69f5d63dc Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-20 22:44:58 -03:00
Sofia Rodrigues
41470c1c0a Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-20 22:44:51 -03:00
Sofia Rodrigues
a5551e3291 refactor: to stream again 2026-03-20 18:27:50 -03:00
Sofia Rodrigues
96253d357f Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-19 16:28:53 -03:00
Sofia Rodrigues
db1d553245 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-19 16:28:35 -03:00
Sofia Rodrigues
286182df24 feat: getKnownSize and setKnownSize 2026-03-19 16:28:28 -03:00
Sofia Rodrigues
3eee136224 fix: server 2026-03-19 16:23:24 -03:00
Sofia Rodrigues
38f189dab2 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-19 15:56:00 -03:00
Sofia Rodrigues
55ce4dc2b0 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-19 15:52:50 -03:00
Sofia Rodrigues
bb90f72a40 fix: remove useless comments 2026-03-19 15:52:43 -03:00
Sofia Rodrigues
c485824d11 fix: tests 2026-03-19 15:50:08 -03:00
Sofia Rodrigues
afe1676e4a Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-19 15:48:10 -03:00
Sofia Rodrigues
64889857b2 Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-03-19 15:42:35 -03:00
Sofia Rodrigues
0ac5d75bac fix: body 2026-03-17 19:36:36 -03:00
Sofia Rodrigues
e4f2f5717c refactor: architecture 2026-03-17 16:42:14 -03:00
Sofia Rodrigues
abbe36c0d2 refactor: architecture 2026-03-17 16:42:10 -03:00
Sofia Rodrigues
7ef652911e revert: uri 2026-03-17 12:18:20 -03:00
Sofia Rodrigues
9ef386d7c3 revert: uri changes 2026-03-17 12:15:43 -03:00
Sofia Rodrigues
b9b2e08181 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-03-17 12:13:57 -03:00
Sofia Rodrigues
33caa4e82f fix: test 2026-03-17 12:03:35 -03:00
Sofia Rodrigues
8c292c70ee Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-03-17 10:42:30 -03:00
Sofia Rodrigues
4f4ee7c789 Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-03-17 10:42:19 -03:00
Sofia Rodrigues
d7ea3a5984 fix: redundant end and namespace 2026-03-17 10:23:17 -03:00
Sofia Rodrigues
33c36c7466 fix: absolute-form parse and add helper functions 2026-03-17 10:17:00 -03:00
Sofia Rodrigues
7fbecca6f0 fix: test 2026-03-13 23:54:45 -03:00
Sofia Rodrigues
ae5a3d2c8b Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-13 23:51:15 -03:00
Sofia Rodrigues
1a270555ae fix: uri 2026-03-13 23:51:00 -03:00
Sofia Rodrigues
72702c3538 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-13 23:47:38 -03:00
Sofia Rodrigues
e86dbf3992 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-13 23:46:40 -03:00
Sofia Rodrigues
d71f0bdae7 fix: uri test 2026-03-13 23:46:35 -03:00
Sofia Rodrigues
6ae49d7639 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-13 23:13:32 -03:00
Sofia Rodrigues
232d173af3 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-13 23:13:19 -03:00
Sofia Rodrigues
3a4a309aed feat: split uri types 2026-03-13 23:13:05 -03:00
Sofia Rodrigues
9c87a9f044 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-12 15:52:36 -03:00
Sofia Rodrigues
34c9cafc12 fix: type 2026-03-12 15:52:29 -03:00
Sofia Rodrigues
014dd1d263 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-12 15:43:28 -03:00
Sofia Rodrigues
2a7a407875 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-12 15:43:06 -03:00
Sofia Rodrigues
e359001026 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-12 15:39:08 -03:00
Sofia Rodrigues
72244398dc fix: test 2026-03-12 15:38:59 -03:00
Sofia Rodrigues
c0e60b797c Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-12 15:33:48 -03:00
Sofia Rodrigues
400908a2f4 fix: port and fragment 2026-03-12 15:09:49 -03:00
Sofia Rodrigues
394c999c2a fix: uri 2026-03-12 15:03:49 -03:00
Sofia Rodrigues
b7e88dadeb Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-12 15:01:03 -03:00
Sofia Rodrigues
a39a0575a0 Merge branch 'master' into sofia/async-http-headers 2026-03-12 14:35:25 -03:00
Sofia Rodrigues
5815f33342 Merge branch 'sofia/fix-native-decide' into sofia/async-http-headers 2026-03-12 14:17:13 -03:00
Sofia Rodrigues
4fdf94ed3d refactor: simplify error 2026-03-10 15:58:40 -03:00
Sofia Rodrigues
66743e80a6 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-10 15:58:20 -03:00
Sofia Rodrigues
2d0d63f5d3 refactor: move logic 2026-03-10 15:58:07 -03:00
Sofia Rodrigues
10951fdb57 refactor: use closewitherror 2026-03-10 15:37:06 -03:00
Sofia Rodrigues
71d3967338 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-10 15:36:54 -03:00
Sofia Rodrigues
34dbcb2ca5 refactor: add close with error 2026-03-10 15:35:34 -03:00
Sofia Rodrigues
abb60e47c8 refactor: make smaller 2026-03-10 15:19:27 -03:00
Sofia Rodrigues
7a852aedb6 fix: squeeze simp and paren 2026-03-10 10:08:22 -03:00
Sofia Rodrigues
1554f57525 fix: import 2026-03-09 21:18:57 -03:00
Sofia Rodrigues
1fa01cdadb style: just removed variable 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
758e5afb07 refactor: simplify 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
11516bbf09 fix: import 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
f76dca5bba fix: proof 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
fe6ac812af fix: panic 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
51a00843ea fix: remove usage of 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
c8c702af8d Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-04 16:40:39 -03:00
Sofia Rodrigues
5b5b0fad70 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-04 16:40:33 -03:00
Sofia Rodrigues
eab144bbb2 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-04 16:40:27 -03:00
Sofia Rodrigues
cfe282f024 fix: port parse 2026-03-04 16:40:03 -03:00
Sofia Rodrigues
e7f06c8fa2 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-04 16:34:41 -03:00
Sofia Rodrigues
beb85dd6b0 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-04 16:34:27 -03:00
Sofia Rodrigues
debafcf0ef Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-04 16:29:43 -03:00
Sofia Rodrigues
2668f07808 fix: alpha and isdigit 2026-03-04 16:29:31 -03:00
Sofia Rodrigues
e3928b7b1a Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-04 16:26:56 -03:00
Sofia Rodrigues
2f3a97ed8a Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-04 16:26:48 -03:00
Sofia Rodrigues
0315d56389 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-04 16:26:38 -03:00
Sofia Rodrigues
b9e489cc8f Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-04 16:22:58 -03:00
Sofia Rodrigues
135b049080 Merge branch 'master' into sofia/async-http-headers 2026-03-04 13:22:45 -03:00
Sofia Rodrigues
4005bd027b fix: size 2026-03-04 12:04:53 -03:00
Sofia Rodrigues
fbf03e31f9 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-04 10:21:46 -03:00
Sofia Rodrigues
39ab2b289c test: move test from encode to the uri tests 2026-03-04 10:20:25 -03:00
Sofia Rodrigues
6c6f9a5d83 refactor: change Char.isDigit and Char.isAlpha 2026-03-04 10:00:41 -03:00
Sofia Rodrigues
a7aea9a12d style: format 2026-03-04 09:50:30 -03:00
Sofia Rodrigues
9517b5bc2d fix: h1 informational 2026-03-04 09:27:56 -03:00
Sofia Rodrigues
71debba5a2 refactor: change agentName field 2026-03-03 14:24:46 -03:00
Sofia Rodrigues
a2c5f3c79e Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 14:22:47 -03:00
Sofia Rodrigues
fd9117fc12 fix: server name 2026-03-03 14:22:39 -03:00
Sofia Rodrigues
1b6357dc03 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 13:59:18 -03:00
Sofia Rodrigues
38cb50d629 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 13:59:13 -03:00
Sofia Rodrigues
74af777707 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-03 13:59:07 -03:00
Sofia Rodrigues
3dfb5e002a Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 13:59:01 -03:00
Sofia Rodrigues
3075e5091b Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 13:58:55 -03:00
Sofia Rodrigues
af12f7e9be revert: wrong comment 2026-03-03 13:58:08 -03:00
Sofia Rodrigues
a2f9f74740 refactor: remove h1.0 tests 2026-03-03 13:57:14 -03:00
Sofia Rodrigues
13fb8a5980 fix: h1 discovers the port to host match 2026-03-03 13:56:41 -03:00
Sofia Rodrigues
41d2984f25 refactor: head -> line 2026-03-03 13:56:09 -03:00
Sofia Rodrigues
f63639d42b Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 13:42:13 -03:00
Sofia Rodrigues
6df74943e0 fix: http and https host port match 2026-03-03 13:41:30 -03:00
Sofia Rodrigues
865b147a91 fix: http1.0 behavior 2026-03-03 13:30:28 -03:00
Sofia Rodrigues
c2f2b3cf32 fix: refactor changes 2026-03-03 12:50:31 -03:00
Sofia Rodrigues
4173713f94 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 12:45:10 -03:00
Sofia Rodrigues
53c9277209 test: rename head to line 2026-03-03 12:43:44 -03:00
Sofia Rodrigues
f14977f495 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-03 12:42:31 -03:00
Sofia Rodrigues
cfa5cf76fc Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 12:42:23 -03:00
Sofia Rodrigues
238925a681 style: change field names 2026-03-03 12:38:52 -03:00
Sofia Rodrigues
8cb236e9eb style: remove parenthesis 2026-03-03 12:35:29 -03:00
Sofia Rodrigues
3d039f8dba fix: bugs and code style 2026-03-03 12:34:12 -03:00
Sofia Rodrigues
203d5362d4 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 12:12:38 -03:00
Sofia Rodrigues
6189d4c130 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 12:10:31 -03:00
Sofia Rodrigues
58f14d34d7 chore: improve comments 2026-03-03 12:10:22 -03:00
Sofia Rodrigues
710eee2b49 refactor: head to line 2026-03-03 12:08:26 -03:00
Sofia Rodrigues
bd4af50d04 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 12:03:55 -03:00
Sofia Rodrigues
8cb30347b6 fix: rename head to line 2026-03-03 12:03:47 -03:00
Sofia Rodrigues
d8e6b09b90 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 11:58:30 -03:00
Sofia Rodrigues
df8abc2b3f fix: remove token let tchar 2026-03-03 11:58:21 -03:00
Sofia Rodrigues
5a852bdffd Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 11:57:32 -03:00
Sofia Rodrigues
11d3860c69 fix: remove char testBit 2026-03-03 11:57:20 -03:00
Sofia Rodrigues
5a253001b3 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 11:39:12 -03:00
Sofia Rodrigues
083fec29c8 test: fix 2026-03-03 11:38:54 -03:00
Sofia Rodrigues
d41753a5f9 fix: suggestions 2026-03-03 10:11:25 -03:00
Sofia Rodrigues
a086a817e0 feat: v10 2026-03-03 09:26:45 -03:00
Sofia Rodrigues
e434a4d44b feat: v10 2026-03-03 09:26:27 -03:00
Sofia Rodrigues
7295389284 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 08:34:31 -03:00
Sofia Rodrigues
f8e1bc685a Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 08:34:23 -03:00
Sofia Rodrigues
5e1204e70d fix: test 2026-03-03 08:34:18 -03:00
Sofia Rodrigues
a00ec10261 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 08:31:51 -03:00
Sofia Rodrigues
cb9b182824 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 08:31:44 -03:00
Sofia Rodrigues
61d7c151da fix: close status for empty 2026-03-03 08:31:33 -03:00
Sofia Rodrigues
f9f1bdc77b chore: comments 2026-03-03 08:29:55 -03:00
Sofia Rodrigues
f3452c09a9 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 08:26:37 -03:00
Sofia Rodrigues
2bed27681a chore: comments 2026-03-03 01:06:23 -03:00
Sofia Rodrigues
5bb3b08698 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 00:55:17 -03:00
Sofia Rodrigues
82645d0953 docs: improve comments on h1 machine and errors 2026-03-03 00:55:12 -03:00
Sofia Rodrigues
2ab52fb864 fix: test 2026-03-03 00:46:06 -03:00
Sofia Rodrigues
1bba3082f0 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 00:37:22 -03:00
Sofia Rodrigues
7ed7a1b69d fix: rfc 2026-03-03 00:37:13 -03:00
Sofia Rodrigues
bd10d0193e Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-03 00:36:46 -03:00
Sofia Rodrigues
67822f4c42 refactor: remove bv 2026-03-03 00:36:38 -03:00
Sofia Rodrigues
e7f6fbb473 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 00:35:13 -03:00
Sofia Rodrigues
1cb3d56618 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 00:34:37 -03:00
Sofia Rodrigues
d99485dd79 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-03 00:34:30 -03:00
Sofia Rodrigues
f85b9b8d09 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 00:34:21 -03:00
Sofia Rodrigues
5fb254b7ef Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 00:24:40 -03:00
Sofia Rodrigues
6e202e34a4 feat: all char predicates 2026-03-03 00:24:16 -03:00
Sofia Rodrigues
843c814778 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 00:18:49 -03:00
Sofia Rodrigues
c7d4d8d799 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 00:17:31 -03:00
Sofia Rodrigues
91c60f801c fix: rstore treeMap tests from master 2026-03-03 00:16:26 -03:00
Sofia Rodrigues
ae30f55728 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-02 20:52:24 -03:00
Sofia Rodrigues
63b0cc17c4 fix: char predicates 2026-03-02 20:51:55 -03:00
Sofia Rodrigues
c9a5111dcc feat: add client states 2026-03-02 20:02:00 -03:00
Sofia Rodrigues
8e12a4181c Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-02 19:48:01 -03:00
Sofia Rodrigues
33393a7c00 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-02 19:47:55 -03:00
Sofia Rodrigues
7434b97511 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-02 19:47:48 -03:00
Sofia Rodrigues
29c8f8cfa1 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-02 19:44:03 -03:00
Sofia Rodrigues
36b2d99e3d fix: encode 2026-03-02 19:43:57 -03:00
Sofia Rodrigues
4b8a48c817 fix: method parsing 2026-03-02 19:36:08 -03:00
Sofia Rodrigues
e0862a0220 fix: tests 2026-03-02 19:35:36 -03:00
Sofia Rodrigues
10fc7da3fa Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-02 12:26:17 -03:00
Sofia Rodrigues
a1f535d9d8 fix: remove treemap 2026-03-02 12:26:13 -03:00
Sofia Rodrigues
993c87dd80 fix: methods 2026-03-02 12:26:00 -03:00
Sofia Rodrigues
742e3080c9 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-02 12:02:25 -03:00
Sofia Rodrigues
3de1d21c86 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-02 12:01:25 -03:00
Sofia Rodrigues
83a0756b05 fix: remove treemap 2026-03-02 12:01:18 -03:00
Sofia Rodrigues
b8f2cd94aa Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-02 12:00:58 -03:00
Sofia Rodrigues
64ff045559 fix: remove treemap 2026-03-02 12:00:55 -03:00
Sofia Rodrigues
109ab8eb68 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-02 11:59:21 -03:00
Sofia Rodrigues
bf09ea8ff5 feat: remove tests temporarily 2026-03-02 11:56:45 -03:00
Sofia Rodrigues
7ce9fe9f97 feat: remove tests temporarily 2026-03-02 11:54:16 -03:00
Sofia Rodrigues
aff9e0c459 refactor: rust-types-rs like method enum with IANA specification 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
a74df33feb fix: method 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
dd63b614eb fix: comments 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
515e6e20c0 fix: test 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
cc45fc9cc2 fix: dots 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
bc9c18f0b0 fix: small changes 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
8ee21a7176 fix: comment 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
92aa9f2b8a fix: RFC checks and small improvements 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
c2243a0ea5 fix: tests 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
efbd23a6d9 fix: format 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
26440fcf6a fix: extension values 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
ac4c5451e4 fix: data char 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
c94c5cb7e4 fix: comments 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
78ca6edc99 feat: specialize quote 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
d92dc22df3 fix: test 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
48ab74f044 fix: status code 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
da68a63902 feat: reason phrase in custom status code 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
db99fd2d7d feat: ignore reasonphrase 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
a61712c962 feat: validation in reasonPhrase 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
ea36555588 fix: reasonPhrase 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
b02bc4d6d2 feat: reason phrase 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
c836fe8723 fix: typos and compareName 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
8068ed317c fix: typos 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
0bd44ab745 fix: comment 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
172d12c75c refactor: move trailers 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
6b6b9fffff feat: add extension handling of quotes and ExtensionName 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
f3fa5c8242 fix: chunked 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
b0c5667f06 fix: import 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
2d262c9755 fix: interpolation 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
571898bf63 fix: extensions 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
0570277a2e feat: add extensions 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
557709d9bb fix: apply suggestions 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
0229508ca7 refactor: remove headers 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
ace10ee42b fix: default size 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
4e36dcc98f fix: apply suggestions
Co-authored-by: Rob23oba <152706811+Rob23oba@users.noreply.github.com>
2026-03-02 11:53:55 -03:00
Sofia Rodrigues
a93ea184fe fix: status and chunkedbuffer 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
c309a3c07e feat: basic headers structure to more structured approach 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
30641c617f feat: data components 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
37fcb2ce55 refactor: comments 2026-03-02 09:22:43 -03:00
Sofia Rodrigues
97cd66afde fix: comments 2026-03-01 16:45:51 -03:00
Sofia Rodrigues
6dbb6b8d0e Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-28 15:41:39 -03:00
Sofia Rodrigues
4306782b93 Merge branch 'sofia/async-http-uri' into sofia/async-http-server 2026-02-28 15:41:35 -03:00
Sofia Rodrigues
6935306439 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-28 15:41:25 -03:00
Sofia Rodrigues
1aa23cd92b Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-28 15:41:18 -03:00
Sofia Rodrigues
0bb4ba72d4 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-28 15:41:11 -03:00
Sofia Rodrigues
57a4d9ad4b Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-28 15:41:04 -03:00
Sofia Rodrigues
bfc6617c12 fix: method 2026-02-28 15:40:55 -03:00
Sofia Rodrigues
c1b5b64797 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-28 12:41:44 -03:00
Sofia Rodrigues
9b563220b2 fix: strict method 2026-02-28 12:41:39 -03:00
Sofia Rodrigues
0eb4a6e8c6 fix: timeout and config 2026-02-28 12:40:46 -03:00
Sofia Rodrigues
4614def4cd Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-27 21:14:21 -03:00
Sofia Rodrigues
c97dfe585a feat: headers rfc refs 2026-02-27 21:14:09 -03:00
Sofia Rodrigues
74ecbca430 feat: tests 2026-02-27 21:12:33 -03:00
Sofia Rodrigues
6fa6d2e3f7 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-27 14:58:31 -03:00
Sofia Rodrigues
05c4d9202a fix: comments 2026-02-27 14:58:24 -03:00
Sofia Rodrigues
3a4e9f6eca Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-27 14:52:23 -03:00
Sofia Rodrigues
aa09ab0cd9 fix: function name 2026-02-27 14:52:16 -03:00
Sofia Rodrigues
8affe05767 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-27 14:31:17 -03:00
Sofia Rodrigues
3aa02eede3 fix: comments 2026-02-27 14:31:09 -03:00
Sofia Rodrigues
c86f926d1b Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-27 14:22:38 -03:00
Sofia Rodrigues
ff4419357c Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-27 14:20:02 -03:00
Sofia Rodrigues
3c131da050 fix: comments 2026-02-27 14:19:32 -03:00
Sofia Rodrigues
5fd94a1e1d Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-27 13:24:18 -03:00
Sofia Rodrigues
fcc4185bb2 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-27 13:24:08 -03:00
Sofia Rodrigues
bae251d15a Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-27 13:23:58 -03:00
Sofia Rodrigues
6edc0c7427 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-27 13:22:51 -03:00
Sofia Rodrigues
563189fec9 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-27 13:04:51 -03:00
Sofia Rodrigues
25d7db2e62 Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http-data 2026-02-27 10:08:06 -03:00
Sofia Rodrigues
e569c9ef64 feat: remove unacurate test 2026-02-27 09:04:24 -03:00
Sofia Rodrigues
c467175336 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-26 18:57:21 -03:00
Sofia Rodrigues
7562c103dd Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-26 18:57:17 -03:00
Sofia Rodrigues
1be8c11cee fix: empty chunk 2026-02-26 18:57:05 -03:00
Sofia Rodrigues
ea6c1e65f6 fix: small changes 2026-02-26 18:56:48 -03:00
Sofia Rodrigues
67300c640c fix: tests 2026-02-26 18:56:16 -03:00
Sofia Rodrigues
625e1c9a32 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-26 17:33:41 -03:00
Sofia Rodrigues
b09946684b Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-26 17:09:33 -03:00
Sofia Rodrigues
beedfa1e4e fix: small comments fix and parameters 2026-02-26 16:49:15 -03:00
Sofia Rodrigues
f68c2420e7 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-26 16:17:48 -03:00
Sofia Rodrigues
cdfd24171a fix: test 2026-02-26 16:15:03 -03:00
Sofia Rodrigues
718e549de3 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-26 16:10:56 -03:00
Sofia Rodrigues
81f76a24d8 fix:lower case scheme 2026-02-26 16:10:46 -03:00
Sofia Rodrigues
292f297006 feat: small comments 2026-02-26 15:41:20 -03:00
Sofia Rodrigues
b7be57272a feat: forIn 2026-02-26 15:24:17 -03:00
Sofia Rodrigues
a0dc1dbbc0 fix: test 2026-02-26 15:18:32 -03:00
Sofia Rodrigues
2e604884dd Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-26 15:16:35 -03:00
Sofia Rodrigues
2049542833 feat: order 2026-02-26 15:16:23 -03:00
Sofia Rodrigues
caf19b8458 feat: order 2026-02-26 15:13:56 -03:00
Sofia Rodrigues
c5180b2dfc Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-26 01:23:46 -03:00
Sofia Rodrigues
91c5b717f0 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-26 01:23:24 -03:00
Sofia Rodrigues
cb6f540efb fix: test 2026-02-26 01:22:46 -03:00
Sofia Rodrigues
ec833b52ee Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-26 01:01:46 -03:00
Sofia Rodrigues
ba36c1dee2 fix: comments 2026-02-26 01:01:41 -03:00
Sofia Rodrigues
5cb510cdf7 fix: precedence 2026-02-26 01:01:24 -03:00
Sofia Rodrigues
a72de461cd Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-25 22:34:45 -03:00
Sofia Rodrigues
228f0d24a7 fix: remove unused headers 2026-02-25 22:34:41 -03:00
Sofia Rodrigues
73cf41d7e5 fix: comments 2026-02-25 22:30:43 -03:00
Sofia Rodrigues
819d4c6c1f fix: uri comments 2026-02-25 22:13:19 -03:00
Sofia Rodrigues
4de3e40349 fix: builder 2026-02-25 22:00:11 -03:00
Sofia Rodrigues
03f1d47462 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-25 21:59:08 -03:00
Sofia Rodrigues
a88908572c Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-25 21:54:21 -03:00
Sofia Rodrigues
55d357dbb4 fix: dots 2026-02-25 21:54:18 -03:00
Sofia Rodrigues
49d00ae056 fix: comments and small formatting errors 2026-02-25 21:52:12 -03:00
Sofia Rodrigues
e9eed5cbe4 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-25 20:57:02 -03:00
Sofia Rodrigues
2652ae0fb8 fix: small changes 2026-02-25 20:56:19 -03:00
Sofia Rodrigues
3f48ef4af9 fix: comment 2026-02-25 20:45:57 -03:00
Sofia Rodrigues
a9de308aea fix: RFC checks and small improvements 2026-02-25 20:45:22 -03:00
Sofia Rodrigues
405d03aac9 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-25 17:53:29 -03:00
Sofia Rodrigues
d5a819f30f fix: config names 2026-02-25 17:53:25 -03:00
Sofia Rodrigues
81c3e5034a fix: pull 2026-02-25 17:52:59 -03:00
Sofia Rodrigues
c971d3f490 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-25 15:06:39 -03:00
Sofia Rodrigues
26bcd2d065 feat: avoid forbidden trailer headers 2026-02-25 15:06:32 -03:00
Sofia Rodrigues
9c1054adca fix: slow attack 2026-02-25 15:06:19 -03:00
Sofia Rodrigues
cba7bfbbe7 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-25 14:15:51 -03:00
Sofia Rodrigues
2990b41d44 feat: improve incrementality 2026-02-25 13:43:47 -03:00
Sofia Rodrigues
f543206d4a fix: test 2026-02-25 08:53:15 -03:00
Sofia Rodrigues
1cd2cba130 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-25 00:53:38 -03:00
Sofia Rodrigues
a009ad2a68 fix: transfer 2026-02-25 00:53:04 -03:00
Sofia Rodrigues
6a19fc5a21 fix: host 2026-02-25 00:51:41 -03:00
Sofia Rodrigues
91275b3747 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-24 23:24:28 -03:00
Sofia Rodrigues
df80ac720a fix: semaphore 2026-02-24 23:24:16 -03:00
Sofia Rodrigues
6797ca9345 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-24 20:16:10 -03:00
Sofia Rodrigues
c266649454 fix: sleep 2026-02-24 20:16:05 -03:00
Sofia Rodrigues
7160b92bfb fix: semaphore 2026-02-24 20:15:56 -03:00
Sofia Rodrigues
6d1a0ecc8a fix: semaphore 2026-02-24 20:15:22 -03:00
Sofia Rodrigues
fd96be3870 feat: rfc compliance with some features 2026-02-24 19:09:33 -03:00
Sofia Rodrigues
3a3620e8aa fix: tests 2026-02-24 14:48:08 -03:00
Sofia Rodrigues
11fd4c8244 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-24 14:43:49 -03:00
Sofia Rodrigues
2731e1d942 fix: headers 2026-02-24 14:43:44 -03:00
Sofia Rodrigues
0ef3c83ed8 feat: ignore prior crlf 2026-02-24 14:39:02 -03:00
Sofia Rodrigues
edad8a090b Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-24 14:31:54 -03:00
Sofia Rodrigues
74dc55152f fix: test 2026-02-24 14:31:29 -03:00
Sofia Rodrigues
bf2471b8f1 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-24 14:28:32 -03:00
Sofia Rodrigues
21821ef062 fix: encoding duplication 2026-02-24 14:28:14 -03:00
Sofia Rodrigues
5ba3a6d4fc Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-24 10:47:43 -03:00
Sofia Rodrigues
8492e58a82 fix: comments 2026-02-24 10:38:11 -03:00
Sofia Rodrigues
e65e20e1cb feat: field content 2026-02-24 09:57:22 -03:00
Sofia Rodrigues
de7c029c9f feat: field content 2026-02-24 09:56:52 -03:00
Sofia Rodrigues
89c992a3c9 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-24 09:26:59 -03:00
Sofia Rodrigues
0b76c3de69 fix: tests 2026-02-24 09:20:52 -03:00
Sofia Rodrigues
ff99979855 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-24 09:09:39 -03:00
Sofia Rodrigues
9ddbb59fe1 fix: format 2026-02-24 09:09:35 -03:00
Sofia Rodrigues
36f87f98f8 fix: char 2026-02-24 09:09:14 -03:00
Sofia Rodrigues
5914fe3a4a Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-24 08:54:43 -03:00
Sofia Rodrigues
29f651a89c fix: extension values 2026-02-24 08:53:16 -03:00
Sofia Rodrigues
2e1bdd922e 2026-02-24 08:35:36 -03:00
Sofia Rodrigues
ab5d50cbc3 fix: data char 2026-02-23 22:18:15 -03:00
Sofia Rodrigues
7902db17c2 fix: comments 2026-02-21 01:25:05 -03:00
Sofia Rodrigues
5626ee369c feat: specialize quote 2026-02-21 00:53:15 -03:00
Sofia Rodrigues
682e2b99f3 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 22:30:31 -03:00
Sofia Rodrigues
6ed32edec0 feat: reorganize fix EOF 2026-02-20 22:30:24 -03:00
Sofia Rodrigues
662bed5a28 tests: add 2026-02-20 22:28:45 -03:00
Sofia Rodrigues
d0e884dc54 fix: config 2026-02-20 18:28:42 -03:00
Sofia Rodrigues
abf3305397 fix: move test from 200 to 400 2026-02-20 18:27:51 -03:00
Sofia Rodrigues
a6f42abe62 feat: remove lenience 2026-02-20 18:26:57 -03:00
Sofia Rodrigues
7a50344af4 feat: add header max config 2026-02-20 18:24:17 -03:00
Sofia Rodrigues
c7bcd4fbed Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 18:22:43 -03:00
Sofia Rodrigues
d367a9fe80 fix: enforce crlf and header bytes 2026-02-20 18:22:34 -03:00
Sofia Rodrigues
0e0578eacb Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 18:03:08 -03:00
Sofia Rodrigues
663eec9dc3 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 18:03:03 -03:00
Sofia Rodrigues
e62f8d608d Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 18:02:56 -03:00
Sofia Rodrigues
0fb57a405f fix: wrong comment 2026-02-20 18:02:46 -03:00
Sofia Rodrigues
ce009e2dca fix: remove double crlf 2026-02-20 18:02:28 -03:00
Sofia Rodrigues
c9cf60f173 fix: timeout on slow connections 2026-02-20 18:02:14 -03:00
Sofia Rodrigues
5263c32ea4 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 17:17:50 -03:00
Sofia Rodrigues
89191367b7 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 17:11:33 -03:00
Sofia Rodrigues
999ce40ca6 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 17:11:27 -03:00
Sofia Rodrigues
bfa18ef30c fix: uri builder 2026-02-20 17:11:05 -03:00
Sofia Rodrigues
a850879adf fix: reason phrase 2026-02-20 17:06:06 -03:00
Sofia Rodrigues
34c5c70ec6 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 17:00:21 -03:00
Sofia Rodrigues
81492aa5b2 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 17:00:13 -03:00
Sofia Rodrigues
e0efb8aec9 fix: scheme parser 2026-02-20 16:59:24 -03:00
Sofia Rodrigues
530f6865f9 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-20 16:40:06 -03:00
Sofia Rodrigues
f97d86cf4b feat: trim headers 2026-02-20 16:40:00 -03:00
Sofia Rodrigues
781b9f561e Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-20 16:30:28 -03:00
Sofia Rodrigues
a9ac33d994 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-20 16:24:06 -03:00
Sofia Rodrigues
c457a98d6a fix: test 2026-02-20 16:23:24 -03:00
Sofia Rodrigues
8d8439bf0b fix: status code 2026-02-20 16:19:55 -03:00
Sofia Rodrigues
7cf419491a Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 16:11:33 -03:00
Sofia Rodrigues
4cbdb39211 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 16:11:27 -03:00
Sofia Rodrigues
54ac93fb32 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 16:11:09 -03:00
Sofia Rodrigues
eddb5e139d Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-20 16:10:13 -03:00
Sofia Rodrigues
5a53207723 feat: remove direct access 2026-02-20 16:10:06 -03:00
Sofia Rodrigues
0d3f6e5481 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-20 16:09:16 -03:00
Sofia Rodrigues
96a017262c feat: reason phrase in custom status code 2026-02-20 16:08:26 -03:00
Sofia Rodrigues
04c73b64a5 test: fuzz 2026-02-20 16:05:09 -03:00
Sofia Rodrigues
02adf1fae0 fix: dedup 2026-02-20 16:04:56 -03:00
Sofia Rodrigues
9291e925ff fix: commment 2026-02-20 15:33:14 -03:00
Sofia Rodrigues
1d0e26e494 tests: dedu 2026-02-20 15:33:07 -03:00
Sofia Rodrigues
5528f97c8f fix: dedup 2026-02-20 14:12:00 -03:00
Sofia Rodrigues
32d42b52e9 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 14:07:10 -03:00
Sofia Rodrigues
f1ed971f26 fix: omit body 2026-02-20 14:06:15 -03:00
Sofia Rodrigues
b5610a43db feat: test 2026-02-20 14:04:39 -03:00
Sofia Rodrigues
a182a6652e feat: omit body 2026-02-20 14:04:22 -03:00
Sofia Rodrigues
cf51a32ffb fix: space sequence 2026-02-20 14:04:03 -03:00
Sofia Rodrigues
11cc11bc2f fix: test 2026-02-20 13:42:54 -03:00
Sofia Rodrigues
8cef903224 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 13:40:46 -03:00
Sofia Rodrigues
f5492db7fa fix: host validation and rreasonphrase 2026-02-20 13:40:39 -03:00
Sofia Rodrigues
cf603cdc7c Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 13:05:01 -03:00
Sofia Rodrigues
d07e1a6341 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 13:04:56 -03:00
Sofia Rodrigues
549e16f069 feat: add reader 2026-02-20 13:04:51 -03:00
Sofia Rodrigues
2e1406b683 fix: connection handler 2026-02-20 13:04:41 -03:00
Sofia Rodrigues
bfdfabd4a5 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 12:33:37 -03:00
Sofia Rodrigues
004c076236 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 12:33:30 -03:00
Sofia Rodrigues
93a6ecbbbc feat: reader features to write only things 2026-02-20 12:33:23 -03:00
Sofia Rodrigues
3c877f9604 feat: body 2026-02-20 12:32:51 -03:00
Sofia Rodrigues
d317c0208b feat: body 2026-02-20 12:32:40 -03:00
Sofia Rodrigues
4716725e81 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 12:01:57 -03:00
Sofia Rodrigues
4f15fe36e0 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 12:01:51 -03:00
Sofia Rodrigues
8bcc838f47 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 12:01:46 -03:00
Sofia Rodrigues
462e3d02dd fix: import coe 2026-02-20 12:01:21 -03:00
Sofia Rodrigues
541f9b2dc9 fix: rendezvouz stream 2026-02-20 12:01:02 -03:00
Sofia Rodrigues
86107e2b5a feat: discard reason-phrase 2026-02-20 11:49:30 -03:00
Sofia Rodrigues
5cc0026f3d Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 11:46:15 -03:00
Sofia Rodrigues
c5db47444e fix: bodyt ests 2026-02-20 11:46:08 -03:00
Sofia Rodrigues
fffc2b5633 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 11:40:58 -03:00
Sofia Rodrigues
637f260529 feat: discard reason-phrase 2026-02-20 11:40:52 -03:00
Sofia Rodrigues
469f466832 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 11:39:17 -03:00
Sofia Rodrigues
ecb7480b37 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 11:37:42 -03:00
Sofia Rodrigues
42800e4037 feat: body type class 2026-02-20 11:37:35 -03:00
Sofia Rodrigues
b52bbc9ae4 fix: rfc expected 2026-02-19 17:14:53 -03:00
Sofia Rodrigues
eaa1390a36 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-19 14:21:41 -03:00
Sofia Rodrigues
b38f01ef51 feat: ignore reasonphrase 2026-02-19 14:01:21 -03:00
Sofia Rodrigues
73bf2b5e04 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-19 13:36:29 -03:00
Sofia Rodrigues
c8c92fcf92 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-19 13:13:28 -03:00
Sofia Rodrigues
cf6b159da5 feat: validation in reasonPhrase 2026-02-19 13:13:22 -03:00
Sofia Rodrigues
330e1c5340 fix: config 2026-02-19 11:45:49 -03:00
Sofia Rodrigues
b40bc2e89c Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-19 11:39:36 -03:00
Sofia Rodrigues
e8347e9e9b fix: comments 2026-02-19 11:25:44 -03:00
Sofia Rodrigues
d051b967ed fix: method limit and comments 2026-02-19 11:18:18 -03:00
Sofia Rodrigues
cf4776ef92 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-19 10:51:52 -03:00
Sofia Rodrigues
b1ff312ef5 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-19 10:51:39 -03:00
Sofia Rodrigues
319214cfb3 feat: config URI 2026-02-19 10:51:32 -03:00
Sofia Rodrigues
e75049b604 feat: remove identity 2026-02-19 10:19:06 -03:00
Sofia Rodrigues
836cdf47a5 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 21:19:51 -03:00
Sofia Rodrigues
01f9c257e8 fix: comments 2026-02-18 21:19:29 -03:00
Sofia Rodrigues
3d07f4fd56 fix: comments 2026-02-18 21:19:08 -03:00
Sofia Rodrigues
7dc97a02fd fix: comments 2026-02-18 21:18:51 -03:00
Sofia Rodrigues
afd2f12242 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 20:56:42 -03:00
Sofia Rodrigues
5faf0572f6 feat: improve manyItems 2026-02-18 20:56:35 -03:00
Sofia Rodrigues
8d349ccbaa Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 19:46:46 -03:00
Sofia Rodrigues
9c35a91e0f feat: better validation in parser 2026-02-18 19:43:57 -03:00
Sofia Rodrigues
2da4e1b572 feat: connection lmit 2026-02-18 19:25:07 -03:00
Sofia Rodrigues
5368b134bb fix: test 2026-02-18 14:14:58 -03:00
Sofia Rodrigues
d1f090ee98 fix: test 2026-02-18 13:48:45 -03:00
Sofia Rodrigues
f311c9594f feat: unsuppoted method 2026-02-18 12:22:50 -03:00
Sofia Rodrigues
c6a3ab0a77 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 12:22:26 -03:00
Sofia Rodrigues
ba25ab3490 fix: method validation 2026-02-18 12:22:12 -03:00
Sofia Rodrigues
1095ebbeed fix: config 2026-02-18 11:49:45 -03:00
Sofia Rodrigues
299b15c8e9 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 11:47:39 -03:00
Sofia Rodrigues
091cb00ab9 feat: add max bytes 2026-02-18 11:47:33 -03:00
Sofia Rodrigues
2b408d2699 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 11:18:34 -03:00
Sofia Rodrigues
702efcacca refactor: remove duplication 2026-02-18 11:18:25 -03:00
Sofia Rodrigues
98ba01dc49 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 11:04:55 -03:00
Sofia Rodrigues
e1225efa03 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-18 11:03:40 -03:00
Sofia Rodrigues
37c7b1e22c feat: reuse char 2026-02-18 11:03:35 -03:00
Sofia Rodrigues
eea8e06d6b Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 10:57:12 -03:00
Sofia Rodrigues
c4234961bc Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-18 10:56:29 -03:00
Sofia Rodrigues
42cfda23f3 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-18 10:56:23 -03:00
Sofia Rodrigues
78316b9ade fix: isToken 2026-02-18 10:55:55 -03:00
Sofia Rodrigues
dd09289d2b fix: duplication 2026-02-18 10:36:17 -03:00
Sofia Rodrigues
10a66e9f9a fix: uri duplication checkers 2026-02-18 10:35:04 -03:00
Sofia Rodrigues
ad4719399d fix: trailing peridos 2026-02-18 10:11:47 -03:00
Sofia Rodrigues
892ab921b7 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 09:46:00 -03:00
Sofia Rodrigues
6551c32f6b Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 09:45:50 -03:00
Sofia Rodrigues
b8eac648ab Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-18 09:45:13 -03:00
Sofia Rodrigues
53fb1a25b3 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-18 09:45:02 -03:00
Sofia Rodrigues
3fdaf2df0c Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-18 09:44:01 -03:00
Sofia Rodrigues
4ba722f51c fix: reasonPhrase 2026-02-18 09:40:25 -03:00
Sofia Rodrigues
42b726c376 fix: misleading comment 2026-02-18 09:26:59 -03:00
Sofia Rodrigues
8bec5f4b98 fix: comments 2026-02-18 09:25:49 -03:00
Sofia Rodrigues
9a8bc523c5 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 08:53:23 -03:00
Sofia Rodrigues
59253973ce Merge branch 'sofia/async-http-body' into sofia/async-http-server 2026-02-18 08:53:19 -03:00
Sofia Rodrigues
205149a884 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 08:53:10 -03:00
Sofia Rodrigues
a89a69e7da fix: queue test 2026-02-18 08:52:59 -03:00
Sofia Rodrigues
9bb429d4e7 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 08:43:58 -03:00
Sofia Rodrigues
542a3a4e71 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 08:43:49 -03:00
Sofia Rodrigues
3646590506 feat: queue 2026-02-18 08:43:33 -03:00
Sofia Rodrigues
cf87c9594c feat: failure gate 2026-02-18 08:37:59 -03:00
Sofia Rodrigues
71420f6c81 feat: tests 2026-02-18 08:37:46 -03:00
Sofia Rodrigues
b6fdd8adc3 feat: failure gate 2026-02-18 08:37:13 -03:00
Sofia Rodrigues
45747bd2ef Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 23:46:33 -03:00
Sofia Rodrigues
69c75c1b56 fix: incomplete fixed chung 2026-02-17 23:46:28 -03:00
Sofia Rodrigues
bed5d8567c feat: incomplete chunks 2026-02-17 23:42:53 -03:00
Sofia Rodrigues
0c5d25a763 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 23:37:36 -03:00
Sofia Rodrigues
c324ee8347 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-17 23:37:29 -03:00
Sofia Rodrigues
193bbddb4e feat: incomlpete chunks 2026-02-17 23:37:16 -03:00
Sofia Rodrigues
6821bb82db feat: close after generate 2026-02-17 21:25:06 -03:00
Sofia Rodrigues
1cbd0569eb Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 21:09:13 -03:00
Sofia Rodrigues
14dbb661f8 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-17 21:08:50 -03:00
Sofia Rodrigues
ea5a986693 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-17 21:08:38 -03:00
Sofia Rodrigues
37ec94e2f0 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-17 21:08:30 -03:00
Sofia Rodrigues
157e3b032d Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-17 21:08:21 -03:00
Sofia Rodrigues
910c71954e feat: reason phrase 2026-02-17 21:07:48 -03:00
Sofia Rodrigues
27107066e3 fix: small issues with framing 2026-02-17 21:03:00 -03:00
Sofia Rodrigues
fd1843e120 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-17 12:12:11 -03:00
Sofia Rodrigues
dd2ab67d2b Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-17 11:51:41 -03:00
Sofia Rodrigues
9dd5634759 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-17 11:47:57 -03:00
Sofia Rodrigues
a521ba3abd fix: typos and connection token 2026-02-17 11:47:46 -03:00
Sofia Rodrigues
6b0f05d075 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-17 11:12:22 -03:00
Sofia Rodrigues
61d6c02ecd fix: typos and compareName 2026-02-17 11:12:13 -03:00
Sofia Rodrigues
b7d4e12fbf Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 03:02:56 -03:00
Sofia Rodrigues
dc6d015870 fix: get-size 2026-02-17 03:02:53 -03:00
Sofia Rodrigues
07a05a3995 fix: 100-expect 2026-02-17 03:02:44 -03:00
Sofia Rodrigues
182625774d Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 03:00:43 -03:00
Sofia Rodrigues
b4684a2406 fix: 100-expect case, quoted parser and te 2026-02-17 03:00:14 -03:00
Sofia Rodrigues
ecc0ec05bd fix: typos 2026-02-17 02:55:21 -03:00
Sofia Rodrigues
5193b739ca fix: typos 2026-02-17 02:54:19 -03:00
Sofia Rodrigues
70c0a902f4 fix: api 2026-02-17 02:05:42 -03:00
Sofia Rodrigues
7f29fd0fcd Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 02:01:27 -03:00
Sofia Rodrigues
239536f1d8 fix: transfer-encoding gzip 2026-02-17 02:00:11 -03:00
Sofia Rodrigues
71be391dd3 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-17 01:55:33 -03:00
Sofia Rodrigues
df738acaa4 fix: direction 2026-02-17 01:55:26 -03:00
Sofia Rodrigues
8ed56677e5 feat: stale 2026-02-17 01:26:21 -03:00
Sofia Rodrigues
60d0b7c97a feat: server 2026-02-17 01:25:53 -03:00
Sofia Rodrigues
17a2c9e0c2 feat: server 2026-02-17 01:25:42 -03:00
Sofia Rodrigues
7ee37564d3 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-16 23:56:03 -03:00
Sofia Rodrigues
2ee7513f80 feat: pull-based body 2026-02-16 23:55:58 -03:00
Sofia Rodrigues
7d6505d296 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-16 22:33:45 -03:00
Sofia Rodrigues
8722e50897 feat: pull-based body 2026-02-16 22:33:35 -03:00
Sofia Rodrigues
fa8d76fa37 fix: frameCancellation with error 2026-02-16 01:22:00 -03:00
Sofia Rodrigues
c50fca363a fix: comments 2026-02-16 01:12:49 -03:00
Sofia Rodrigues
e8ff308154 fix: handler 2026-02-16 01:10:19 -03:00
Sofia Rodrigues
cdcb9db4ba Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-14 06:14:38 -03:00
Sofia Rodrigues
a8e405ac5d fix: knownsize 2026-02-14 06:13:39 -03:00
Sofia Rodrigues
b6705cceb2 fix: server 2026-02-14 05:59:54 -03:00
Sofia Rodrigues
af58b4f286 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-14 05:51:28 -03:00
Sofia Rodrigues
02dc048ad2 feat: improve h1 2026-02-14 05:51:03 -03:00
Sofia Rodrigues
a981d91552 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 11:58:23 -03:00
Sofia Rodrigues
96ffa3e354 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 11:56:52 -03:00
Sofia Rodrigues
1c564ed5f7 fix: comment 2026-02-13 11:56:44 -03:00
Sofia Rodrigues
9dd5f62e0e Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 11:53:16 -03:00
Sofia Rodrigues
c4737fb66a fix: parse ab-empty 2026-02-13 11:52:53 -03:00
Sofia Rodrigues
43d3b2df91 merge: 'sofia/async-http-headers' 2026-02-13 11:43:55 -03:00
Sofia Rodrigues
87c5488c20 merge: 'sofia/async-http-headers' 2026-02-13 11:39:09 -03:00
Sofia Rodrigues
e0d5596e63 fix: typo 2026-02-13 11:37:08 -03:00
Sofia Rodrigues
1f2671db3d merge: branch 'sofia/async-http-data' 2026-02-13 11:35:33 -03:00
Sofia Rodrigues
940ab9bdb5 fix: typos 2026-02-13 11:33:18 -03:00
Sofia Rodrigues
8017d39c4e Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 10:29:22 -03:00
Sofia Rodrigues
25bb4ee812 feat: protocol 2026-02-13 10:28:48 -03:00
Sofia Rodrigues
7c1aff34e2 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:28:13 -03:00
Sofia Rodrigues
28670d4420 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:26:13 -03:00
Sofia Rodrigues
30f3a3520e Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:22:47 -03:00
Sofia Rodrigues
9acca40aaf revert: h1 2026-02-13 10:21:57 -03:00
Sofia Rodrigues
bf2ed2c87a revert: h1 2026-02-13 10:20:35 -03:00
Sofia Rodrigues
3561d58203 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-13 10:14:03 -03:00
Sofia Rodrigues
1d80616068 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-13 10:13:55 -03:00
Sofia Rodrigues
61c93a7f57 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-13 10:13:40 -03:00
Sofia Rodrigues
b042b8efbd fix: parser path 2026-02-13 10:13:00 -03:00
Sofia Rodrigues
8c00ba48ae fix: parser 2026-02-13 10:12:22 -03:00
Sofia Rodrigues
991a27b7f2 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-13 10:11:30 -03:00
Sofia Rodrigues
69e38e9495 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-13 10:11:23 -03:00
Sofia Rodrigues
16d0162ef0 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-13 10:11:10 -03:00
Sofia Rodrigues
d07f5c502f feat: specialize encodedstrings 2026-02-13 10:10:34 -03:00
Sofia Rodrigues
5b1493507d feat: body channel should close on completion 2026-02-13 02:53:16 -03:00
Sofia Rodrigues
1180572926 fix: test 2026-02-13 02:29:55 -03:00
Sofia Rodrigues
6dc19ef871 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:23:20 -03:00
Sofia Rodrigues
4a641fc498 revert: bytearray parser 2026-02-13 02:22:43 -03:00
Sofia Rodrigues
2a04014fa7 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:18:18 -03:00
Sofia Rodrigues
4f20a815ec fix: extension name 2026-02-13 02:18:09 -03:00
Sofia Rodrigues
4906e14e51 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:13:50 -03:00
Sofia Rodrigues
c9296c7371 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 02:12:07 -03:00
Sofia Rodrigues
4db36b214b feat: improve parser 2026-02-13 02:11:38 -03:00
Sofia Rodrigues
a6d94c7504 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 01:58:16 -03:00
Sofia Rodrigues
045abb48bb Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:57:40 -03:00
Sofia Rodrigues
10337c620b fix: test 2026-02-13 01:57:23 -03:00
Sofia Rodrigues
698f557aa3 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:56:37 -03:00
Sofia Rodrigues
692c7c1a09 fix: test 2026-02-13 01:56:29 -03:00
Sofia Rodrigues
1bdfdcdb38 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:55:05 -03:00
Sofia Rodrigues
cacfe00c1d fix: test 2026-02-13 01:54:52 -03:00
Sofia Rodrigues
0fd0fa9c73 fix: test 2026-02-13 01:54:26 -03:00
Sofia Rodrigues
52fdc0f734 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:52:49 -03:00
Sofia Rodrigues
451c11d5a1 fix: make strict 2026-02-13 01:52:04 -03:00
Sofia Rodrigues
e92fcf6d46 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-13 01:41:20 -03:00
Sofia Rodrigues
07140aceb8 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:39:32 -03:00
Sofia Rodrigues
2cc32928a4 feat: add parser features to path 2026-02-13 01:39:12 -03:00
Sofia Rodrigues
153513d5e2 fix: typos 2026-02-13 01:29:12 -03:00
Sofia Rodrigues
94308408a9 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-13 01:20:31 -03:00
Sofia Rodrigues
1ae6970b77 fix: comment 2026-02-13 01:19:51 -03:00
Sofia Rodrigues
0704f877f5 fix: tests 2026-02-13 01:07:32 -03:00
Sofia Rodrigues
7ff0e6f9c0 feat: 100-continue 2026-02-13 00:56:08 -03:00
Sofia Rodrigues
5b4498ac9d Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 00:47:12 -03:00
Sofia Rodrigues
976cc79b0c feat: 100-continue 2026-02-13 00:45:38 -03:00
Sofia Rodrigues
8d6ff0d727 feat: handler 2026-02-13 00:19:36 -03:00
Sofia Rodrigues
26c0e4dac4 feat: date header 2026-02-13 00:06:41 -03:00
Sofia Rodrigues
9ce1821be0 feat: add trailers some type of headers 2026-02-12 12:46:15 -03:00
Sofia Rodrigues
eeff4847fe Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-12 12:31:30 -03:00
Sofia Rodrigues
2956f88050 refactor: move trailers 2026-02-12 12:30:59 -03:00
Sofia Rodrigues
26d9c1c07b feat: add extension handling of quotes and ExtensionName 2026-02-12 12:21:47 -03:00
Sofia Rodrigues
73af014cbd fix: documentation 2026-02-12 11:55:15 -03:00
Sofia Rodrigues
d206f437ef Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-12 10:47:19 -03:00
Sofia Rodrigues
d099586632 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-12 10:46:56 -03:00
Sofia Rodrigues
058d95e441 feat: maximum size in readAll 2026-02-12 10:46:43 -03:00
Sofia Rodrigues
b40ac55755 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 19:42:01 -03:00
Sofia Rodrigues
43aa88e5a6 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:40:58 -03:00
Sofia Rodrigues
8fe2d519d2 revert: chunk changes 2026-02-11 19:40:34 -03:00
Sofia Rodrigues
07ed645f45 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:35:43 -03:00
Sofia Rodrigues
9485e8f5eb revert: add toString head 2026-02-11 19:35:31 -03:00
Sofia Rodrigues
dc96616781 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-11 19:30:41 -03:00
Sofia Rodrigues
0c44b4ae05 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-11 19:29:54 -03:00
Sofia Rodrigues
3568464ca7 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-11 19:23:59 -03:00
Sofia Rodrigues
8e5296c71a fix: chunked 2026-02-11 19:22:30 -03:00
Sofia Rodrigues
eee971e3ef Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:10:02 -03:00
Sofia Rodrigues
7a1f8b2d30 fix: readAll 2026-02-11 19:09:45 -03:00
Sofia Rodrigues
157e122891 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 18:51:21 -03:00
Sofia Rodrigues
b12ab7eae4 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 18:49:17 -03:00
Sofia Rodrigues
10c8a923e6 feat: readAll functions 2026-02-11 18:48:10 -03:00
Sofia Rodrigues
2b91589750 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 18:08:30 -03:00
Sofia Rodrigues
3e9674eaa9 feat: avoid more than one host 2026-02-11 18:08:16 -03:00
Sofia Rodrigues
d902c6a9f4 fix: mock double close 2026-02-11 18:07:58 -03:00
Sofia Rodrigues
04a17e8c55 fix: fail event should end everything 2026-02-11 18:06:16 -03:00
Sofia Rodrigues
1b6cd457d3 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 17:40:54 -03:00
Sofia Rodrigues
2bc2080fbe fix: bad request behavior 2026-02-11 17:40:19 -03:00
Sofia Rodrigues
6b6425e8d7 fix: close mock bidirectionaly and fix test 2026-02-11 17:39:48 -03:00
Sofia Rodrigues
fb0e95d8ce fix: avoid gate errors 2026-02-11 17:25:26 -03:00
Sofia Rodrigues
4e4702a31f Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 17:22:53 -03:00
Sofia Rodrigues
5a2ad22f97 fix: selectable.one can used to change register/unregister order causing double register 2026-02-11 17:22:37 -03:00
Sofia Rodrigues
f02139f7ce fix: skipBytes 2026-02-11 17:14:52 -03:00
Sofia Rodrigues
d004e175e2 fix: error message 2026-02-11 17:03:27 -03:00
Sofia Rodrigues
7928a95c34 tests: add more tests 2026-02-11 16:53:11 -03:00
Sofia Rodrigues
202e6c5228 fix: transport, add explicit close that is no-op for tp 2026-02-11 14:54:08 -03:00
Sofia Rodrigues
0aeaa5e71d Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-10 17:30:08 -03:00
Sofia Rodrigues
9ad4ee304b fix: imports 2026-02-10 17:29:04 -03:00
Sofia Rodrigues
5bd280553d Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-10 17:25:08 -03:00
Sofia Rodrigues
7e215c8220 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-10 17:24:00 -03:00
Sofia Rodrigues
2c23680163 fix: imports 2026-02-10 17:23:14 -03:00
Sofia Rodrigues
c4f179daa0 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-10 17:15:31 -03:00
Sofia Rodrigues
c2f657a15a Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 17:12:45 -03:00
Sofia Rodrigues
9332081875 fix: import 2026-02-10 17:12:20 -03:00
Sofia Rodrigues
1cec97568b fix: imports 2026-02-10 17:11:11 -03:00
Sofia Rodrigues
b567713641 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 17:03:44 -03:00
Sofia Rodrigues
de776c1f32 fix: interpolation 2026-02-10 17:03:02 -03:00
Sofia Rodrigues
c498ea74ec Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-10 16:47:08 -03:00
Sofia Rodrigues
f4aad3a494 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 16:46:48 -03:00
Sofia Rodrigues
1cebf576c3 Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http-data 2026-02-10 16:44:20 -03:00
Sofia Rodrigues
25dac2e239 fix: test 2026-02-09 22:25:14 -03:00
Sofia Rodrigues
4a9de7094c feat: new body 2026-02-09 22:20:05 -03:00
Sofia Rodrigues
c4eab3b677 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-09 21:58:26 -03:00
Sofia Rodrigues
dd125c7999 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-09 21:57:42 -03:00
Sofia Rodrigues
5e3dce8088 fix: chunk stream will only deal with content-size of the chunks not with the wireFormatSize 2026-02-09 21:57:26 -03:00
Sofia Rodrigues
4c64f2c2e8 fix: suggestions 2026-02-09 21:55:38 -03:00
Sofia Rodrigues
aa6e11dfc0 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-09 21:25:02 -03:00
Sofia Rodrigues
e7d1e7dd54 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-09 21:09:55 -03:00
Sofia Rodrigues
03843fd3f0 fix: suggestions 2026-02-09 21:09:38 -03:00
Sofia Rodrigues
294e9900ea feat: unify all in stream 2026-02-09 20:29:18 -03:00
Sofia Rodrigues
f13651979e fix: wireFormatSize 2026-02-09 19:31:41 -03:00
Sofia Rodrigues
3d8ba4d09b Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-09 12:30:01 -03:00
Sofia Rodrigues
63984c8dda fix: header value 2026-02-09 12:01:25 -03:00
Sofia Rodrigues
e2fd8a5835 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-09 11:28:52 -03:00
Sofia Rodrigues
a0263870b9 fix: extensions 2026-02-09 11:24:48 -03:00
Sofia Rodrigues
3c4ae58aff feat: add extensions 2026-02-09 11:18:24 -03:00
Sofia Rodrigues
5965707575 fix: apply suggestions 2026-02-09 11:17:03 -03:00
Sofia Rodrigues
dbe0140578 fix: enforce validations 2026-02-09 10:28:43 -03:00
Sofia Rodrigues
bc21289793 feat: http docs 2026-01-25 12:48:06 -03:00
Sofia Rodrigues
f11bd0928d feat: server basics 2026-01-25 12:48:06 -03:00
Sofia Rodrigues
6ffd5ad2a4 fix: incremental parsing 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
7ce8cbc01c feat: remove toString instances 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
12a7603c77 fix: orphan module 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
53a6355074 feat: H1 protocol 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
f8ad249e42 test: wrong test 2026-01-25 12:40:41 -03:00
Sofia Rodrigues
3c41d3961e feat: empty body and constructors 2026-01-25 12:39:43 -03:00
Sofia Rodrigues
18bc715bad feat: remove useless functions 2026-01-25 12:39:43 -03:00
Sofia Rodrigues
3349d20663 feat: body 2026-01-25 12:39:41 -03:00
Sofia Rodrigues
bad70e3eab feat: request type has request target 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
21286eb163 fix: domain name comment 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
0e5f07558c feat: introduce data type for HTTP 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
6e26b901e4 fix: encoding 2026-01-25 12:33:07 -03:00
Sofia Rodrigues
81c67c8f12 revert: levenshtein test 2026-01-25 12:29:37 -03:00
Sofia Rodrigues
990e21eefc fix: namespace 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
7141144a2f fix: remove native_decide 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
8c343501c1 fix: apply suggestions
Co-authored-by: Rob23oba <152706811+Rob23oba@users.noreply.github.com>
2026-01-25 12:27:16 -03:00
Sofia Rodrigues
44f08686cd feat: connection values 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
65883f8c2a fix: levenshtein test is using the new Decidable instance 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
bd28a8fad5 fix: tests and type class 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
8ba86c2c67 fix: case and usage of native_decide 2026-01-25 12:24:52 -03:00
Sofia Rodrigues
d3cddf9e44 fix: Headers.Basic comment 2026-01-25 12:24:52 -03:00
Sofia Rodrigues
5f3babee5c feat: headers data structure 2026-01-25 12:24:51 -03:00
Sofia Rodrigues
26dfc9a872 refactor: remove headers 2026-01-25 12:16:13 -03:00
Sofia Rodrigues
e47439e8be fix: default size 2026-01-25 11:26:03 -03:00
Sofia Rodrigues
1ef53758be fix: apply suggestions
Co-authored-by: Rob23oba <152706811+Rob23oba@users.noreply.github.com>
2026-01-25 11:21:06 -03:00
Sofia Rodrigues
8544042789 fix: status and chunkedbuffer 2026-01-25 11:19:34 -03:00
Sofia Rodrigues
f564d43d98 feat: basic headers structure to more structured approach 2026-01-23 17:58:44 -03:00
Sofia Rodrigues
32fa0666c9 feat: data components 2026-01-23 17:14:53 -03:00
53 changed files with 15152 additions and 118 deletions

View File

@@ -10,6 +10,8 @@ public import Init.Data.Random
public import Std.Internal.Async.Basic
import Init.Data.ByteArray.Extra
import Init.Data.Array.Lemmas
public import Std.Sync.Mutex
public import Std.Sync.Barrier
import Init.Omega
public section
@@ -132,6 +134,8 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let gen := mkStdGen seed
let selectables := shuffleIt selectables gen
let gate IO.Promise.new
for selectable in selectables do
if let some val selectable.selector.tryFn then
let result selectable.cont val
@@ -141,6 +145,9 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let promise IO.Promise.new
for selectable in selectables do
if finished.get then
break
let waiterPromise IO.Promise.new
let waiter := Waiter.mk finished waiterPromise
selectable.selector.registerFn waiter
@@ -157,18 +164,20 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let async : Async _ :=
try
let res IO.ofExcept res
discard <| await gate.result?
for selectable in selectables do
selectable.selector.unregisterFn
let contRes selectable.cont res
promise.resolve (.ok contRes)
promise.resolve (.ok ( selectable.cont res))
catch e =>
promise.resolve (.error e)
async.toBaseIO
Async.ofPromise (pure promise)
gate.resolve ()
let result Async.ofPromise (pure promise)
return result
/--
Performs fair and data-loss free non-blocking multiplexing on the `Selectable`s in `selectables`.
@@ -224,6 +233,8 @@ def Selectable.combine (selectables : Array (Selectable α)) : IO (Selector α)
let derivedWaiter := Waiter.mk waiter.finished waiterPromise
selectable.selector.registerFn derivedWaiter
let barrier IO.Promise.new
discard <| IO.bindTask (t := waiterPromise.result?) fun res? => do
match res? with
| none => return (Task.pure (.ok ()))
@@ -231,6 +242,7 @@ def Selectable.combine (selectables : Array (Selectable α)) : IO (Selector α)
let async : Async _ := do
let mainPromise := waiter.promise
await barrier
for selectable in selectables do
selectable.selector.unregisterFn

View File

@@ -6,4 +6,188 @@ Authors: Sofia Rodrigues
module
prelude
public import Std.Internal.Http.Data
public import Std.Internal.Http.Server
public section
/-!
# HTTP Library
A low-level HTTP/1.1 server implementation for Lean. This library provides a pure,
sans-I/O protocol implementation that can be used with the `Async` library or with
custom connection handlers.
## Overview
This module provides a complete HTTP/1.1 server implementation with support for:
- Request/response handling with directional streaming bodies
- Keep-alive connections
- Chunked transfer encoding
- Header validation and management
- Configurable timeouts and limits
**Sans I/O Architecture**: The core protocol logic doesn't perform any actual I/O itself -
it just defines how data should be processed. This separation allows the protocol implementation
to remain pure and testable, while different transports (TCP sockets, mock clients) handle
the actual reading and writing of bytes.
## Quick Start
The main entry point is `Server.serve`, which starts an HTTP/1.1 server. Implement the
`Server.Handler` type class to define how the server handles requests, errors, and
`Expect: 100-continue` headers:
```lean
import Std.Internal.Http
open Std Internal IO Async
open Std Http Server
structure MyHandler
instance : Handler MyHandler where
onRequest _ req := do
Response.ok |>.text "Hello, World!"
def main : IO Unit := Async.block do
let addr : Net.SocketAddress := .v4 ⟨.ofParts 127 0 0 1, 8080⟩
let server ← Server.serve addr MyHandler.mk
server.waitShutdown
```
## Working with Requests
Incoming requests are represented by `Request Body.Stream`, which bundles the request
line, parsed headers, and a lazily-consumed body. Headers are available immediately,
while the body can be streamed or collected on demand, allowing handlers to process both
small and large payloads efficiently.
### Reading Headers
```lean
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
-- Access request method and URI
let method := req.head.method -- Method.get, Method.post, etc.
let uri := req.head.uri -- RequestTarget
-- Read a specific header
if let some contentType := req.head.headers.get? (.mk "content-type") then
IO.println s!"Content-Type: {contentType}"
Response.ok |>.text "OK"
```
### URI Query Semantics
`RequestTarget.query` is parsed using form-style key/value conventions (`k=v&...`), and `+` is decoded as a
space in query components. If you need RFC 3986 opaque query handling, use the raw request target string
(`toString req.head.uri`) and parse it with custom logic.
### Reading the Request Body
The request body is exposed as `Body.Stream`, which can be consumed incrementally or
collected into memory. The `readAll` method reads the entire body, with an optional size
limit to protect against unbounded payloads.
```lean
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
-- Collect entire body as a String
let bodyStr : String ← req.body.readAll
-- Or with a maximum size limit
let bodyStr : String ← req.body.readAll (maximumSize := some 1024)
Response.ok |>.text s!"Received: {bodyStr}"
```
## Building Responses
Responses are constructed using a builder API that starts from a status code and adds
headers and a body. Common helpers exist for text, HTML, JSON, and binary responses, while
still allowing full control over status codes and header values.
Response builders produce `Async (Response Body.Stream)`.
```lean
-- Text response
Response.ok |>.text "Hello!"
-- HTML response
Response.ok |>.html "<h1>Hello!</h1>"
-- JSON response
Response.ok |>.json "{\"key\": \"value\"}"
-- Binary response
Response.ok |>.bytes someByteArray
-- Custom status
Response.new |>.status .created |>.text "Resource created"
-- With custom headers
Response.ok
|>.header! "X-Custom-Header" "value"
|>.header! "Cache-Control" "no-cache"
|>.text "Response with headers"
```
### Streaming Responses
For large responses or server-sent events, use streaming:
```lean
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
Response.ok
|>.header! "Content-Type" "text/plain"
|>.stream fun stream => do
for i in [0:10] do
stream.send { data := s!"chunk {i}\n".toUTF8 }
Async.sleep 1000
stream.close
```
## Server Configuration
Configure server behavior with `Config`:
```lean
def config : Config := {
maxRequests := 10000000,
lingeringTimeout := 5000,
}
let server ← Server.serve addr MyHandler.mk config
```
## Handler Type Class
Implement `Server.Handler` to define how the server processes events. The class has three
methods, all with default implementations:
- `onRequest` — called for each incoming request; returns a response inside `ContextAsync`
- `onFailure` — called when an error occurs while processing a request
- `onContinue` — called when a request includes an `Expect: 100-continue` header; return
`true` to accept the body or `false` to reject it
```lean
structure MyHandler where
greeting : String
instance : Handler MyHandler where
onRequest self req := do
Response.ok |>.text self.greeting
onFailure self err := do
IO.eprintln s!"Error: {err}"
```
The handler methods operate in the following monads:
- `onRequest` uses `ContextAsync` — an asynchronous monad (`ReaderT CancellationContext Async`) that provides:
- Full access to `Async` operations (spawning tasks, sleeping, concurrent I/O)
- A `CancellationContext` tied to the client connection — when the client disconnects, the
context is cancelled, allowing your handler to detect this and stop work early
- `onFailure` uses `Async`
- `onContinue` uses `Async`
-/

View File

@@ -13,6 +13,8 @@ public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Status
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.URI
public import Std.Internal.Http.Data.Body
/-!
# HTTP Data Types

View File

@@ -0,0 +1,24 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Body.Basic
public import Std.Internal.Http.Data.Body.Length
public import Std.Internal.Http.Data.Body.Any
public import Std.Internal.Http.Data.Body.Stream
public import Std.Internal.Http.Data.Body.Empty
public import Std.Internal.Http.Data.Body.Full
public section
/-!
# Body
This module re-exports all HTTP body types: `Body.Empty`, `Body.Full`, `Body.Stream`,
`Body.Any`, and `Body.Length`, along with the `Http.Body` typeclass and conversion
utilities (`ToByteArray`, `FromByteArray`).
-/

View File

@@ -0,0 +1,83 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Body.Basic
public section
/-!
# Body.Any
A type-erased body backed by closures. Implements `Http.Body` and can be constructed from any
type that also implements `Http.Body`. Used as the default handler response body type.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
/--
A type-erased body handle. Operations are stored as closures, making it open to any body type
that implements `Http.Body`.
-/
structure Any where
/--
Receives the next body chunk. Returns `none` at end-of-stream.
-/
recv : Async (Option Chunk)
/--
Closes the body stream.
-/
close : Async Unit
/--
Returns `true` when the body stream is closed.
-/
isClosed : Async Bool
/--
Selector that resolves when a chunk is available or EOF is reached.
-/
recvSelector : Selector (Option Chunk)
/--
Returns the declared size.
-/
getKnownSize : Async (Option Body.Length)
/--
Sets the size of the body.
-/
setKnownSize : Option Body.Length Async Unit
namespace Any
/--
Erases a body of any `Http.Body` instance into a `Body.Any`.
-/
def ofBody [Http.Body α] (body : α) : Any where
recv := Http.Body.recv body
close := Http.Body.close body
isClosed := Http.Body.isClosed body
recvSelector := Http.Body.recvSelector body
getKnownSize := Http.Body.getKnownSize body
setKnownSize := Http.Body.setKnownSize body
end Any
instance : Http.Body Any where
recv := Any.recv
close := Any.close
isClosed := Any.isClosed
recvSelector := Any.recvSelector
getKnownSize := Any.getKnownSize
setKnownSize := Any.setKnownSize
end Std.Http.Body

View File

@@ -0,0 +1,102 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Body.Length
public section
/-!
# Body.Basic
This module defines the `Body` typeclass for HTTP body streams, and shared conversion types
`ToByteArray` and `FromByteArray` used for encoding and decoding body content.
-/
namespace Std.Http
open Std Internal IO Async
set_option linter.all true
/--
Typeclass for values that can be read as HTTP body streams.
-/
class Body (α : Type) where
/--
Receives the next body chunk. Returns `none` at end-of-stream.
-/
recv : α Async (Option Chunk)
/--
Closes the body stream.
-/
close : α Async Unit
/--
Returns `true` when the body stream is closed.
-/
isClosed : α Async Bool
/--
Selector that resolves when a chunk is available or EOF is reached.
-/
recvSelector : α Selector (Option Chunk)
/--
Gets the declared size of the body.
-/
getKnownSize : α Async (Option Body.Length)
/--
Sets the declared size of a body.
-/
setKnownSize : α Option Body.Length Async Unit
end Std.Http
namespace Std.Http.Body
/--
Typeclass for types that can be converted to a `ByteArray`.
-/
class ToByteArray (α : Type) where
/--
Transforms into a `ByteArray`.
-/
toByteArray : α ByteArray
instance : ToByteArray ByteArray where
toByteArray := id
instance : ToByteArray String where
toByteArray := String.toUTF8
/--
Typeclass for types that can be decoded from a `ByteArray`. The conversion may fail with an error
message if the bytes are not valid for the target type.
-/
class FromByteArray (α : Type) where
/--
Attempts to decode a `ByteArray` into the target type, returning an error message on failure.
-/
fromByteArray : ByteArray Except String α
instance : FromByteArray ByteArray where
fromByteArray := .ok
instance : FromByteArray String where
fromByteArray bs :=
match String.fromUTF8? bs with
| some s => .ok s
| none => .error "invalid UTF-8 encoding"
end Std.Http.Body

View File

@@ -0,0 +1,116 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Body.Any
public section
/-!
# Body.Empty
Represents an always-empty, already-closed body handle.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
/--
An empty body handle.
-/
structure Empty where
deriving Inhabited, BEq
namespace Empty
/--
Receives from an empty body, always returning end-of-stream.
-/
@[inline]
def recv (_ : Empty) : Async (Option Chunk) :=
pure none
/--
Closes an empty body (no-op).
-/
@[inline]
def close (_ : Empty) : Async Unit :=
pure ()
/--
Empty bodies are always closed for reading.
-/
@[inline]
def isClosed (_ : Empty) : Async Bool :=
pure true
/--
Selector that immediately resolves with end-of-stream for an empty body.
-/
@[inline]
def recvSelector (_ : Empty) : Selector (Option Chunk) where
tryFn := pure (some none)
registerFn waiter := do
let lose := pure ()
let win promise := do
promise.resolve (.ok none)
waiter.race lose win
unregisterFn := pure ()
end Empty
instance : Http.Body Empty where
recv := Empty.recv
close := Empty.close
isClosed := Empty.isClosed
recvSelector := Empty.recvSelector
getKnownSize _ := pure (some <| .fixed 0)
setKnownSize _ _ := pure ()
instance : Coe Empty Any := Any.ofBody
instance : Coe (Response Empty) (Response Any) where
coe f := { f with }
instance : Coe (ContextAsync (Response Empty)) (ContextAsync (Response Any)) where
coe action := do
let response action
pure (response : Response Any)
instance : Coe (Async (Response Empty)) (ContextAsync (Response Any)) where
coe action := do
let response action
pure (response : Response Any)
end Body
namespace Request.Builder
open Internal.IO.Async
/--
Builds a request with no body.
-/
def empty (builder : Builder) : Async (Request Body.Empty) :=
pure <| builder.body {}
end Request.Builder
namespace Response.Builder
open Internal.IO.Async
/--
Builds a response with no body.
-/
def empty (builder : Builder) : Async (Response Body.Empty) :=
pure <| builder.body {}
end Response.Builder

View File

@@ -0,0 +1,232 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Sync
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Body.Any
public import Init.Data.ByteArray
public section
/-!
# Body.Full
A body backed by a fixed `ByteArray` held in a `Mutex`.
The byte array is consumed at most once: the first call to `recv` atomically takes the data
and returns it as a single chunk; subsequent calls return `none` (end-of-stream).
Closing the body discards any unconsumed data.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
/--
A body backed by a fixed, mutex-protected `ByteArray`.
The data is consumed on the first read. Once consumed (or explicitly closed), the body
behaves as a closed, empty channel.
-/
structure Full where
private mk ::
private state : Mutex (Option ByteArray)
deriving Nonempty
namespace Full
private def takeChunk : AtomicT (Option ByteArray) Async (Option Chunk) := do
match get with
| none =>
pure none
| some data =>
set (none : Option ByteArray)
if data.isEmpty then
pure none
else
pure (some (Chunk.ofByteArray data))
/--
Creates a `Full` body from a `ByteArray`.
-/
def ofByteArray (data : ByteArray) : Async Full := do
let state Mutex.new (some data)
return { state }
/--
Creates a `Full` body from a `String`.
-/
def ofString (data : String) : Async Full := do
let state Mutex.new (some data.toUTF8)
return { state }
/--
Receives the body data. Returns the full byte array on the first call as a single chunk,
then `none` on all subsequent calls.
-/
def recv (full : Full) : Async (Option Chunk) :=
full.state.atomically do
takeChunk
/--
Closes the body, discarding any unconsumed data.
-/
def close (full : Full) : Async Unit :=
full.state.atomically do
set (none : Option ByteArray)
/--
Returns `true` when the data has been consumed or the body has been closed.
-/
def isClosed (full : Full) : Async Bool :=
full.state.atomically do
return ( get).isNone
/--
Returns the known size of the remaining data.
Returns `some (.fixed n)` with the current byte count, or `some (.fixed 0)` if the body has
already been consumed or closed.
-/
def getKnownSize (full : Full) : Async (Option Body.Length) :=
full.state.atomically do
match get with
| none => pure (some (.fixed 0))
| some data => pure (some (.fixed data.size))
/--
Selector that immediately resolves to the remaining chunk (or EOF).
-/
def recvSelector (full : Full) : Selector (Option Chunk) where
tryFn := do
let chunk full.state.atomically do
takeChunk
pure (some chunk)
registerFn waiter := do
full.state.atomically do
let lose := pure ()
let win promise := do
let chunk takeChunk
promise.resolve (.ok chunk)
waiter.race lose win
unregisterFn := pure ()
end Full
instance : Http.Body Full where
recv := Full.recv
close := Full.close
isClosed := Full.isClosed
recvSelector := Full.recvSelector
getKnownSize := Full.getKnownSize
setKnownSize _ _ := pure ()
instance : Coe Full Any := Any.ofBody
instance : Coe (Response Full) (Response Any) where
coe f := { f with }
instance : Coe (ContextAsync (Response Full)) (ContextAsync (Response Any)) where
coe action := do
let response action
pure (response : Response Any)
instance : Coe (Async (Response Full)) (ContextAsync (Response Any)) where
coe action := do
let response action
pure (response : Response Any)
end Body
namespace Request.Builder
open Internal.IO.Async
/--
Builds a request body from raw bytes without setting any headers.
Use `bytes` instead if you want `Content-Type: application/octet-stream` set automatically.
-/
def fromBytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) := do
return builder.body ( Body.Full.ofByteArray content)
/--
Builds a request with a binary body.
Sets `Content-Type: application/octet-stream`.
Use `fromBytes` instead if you need to set a different `Content-Type` or none at all.
-/
def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) :=
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "application/octet-stream")) content
/--
Builds a request with a text body.
Sets `Content-Type: text/plain; charset=utf-8`.
-/
def text (builder : Builder) (content : String) : Async (Request Body.Full) :=
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8")) content.toUTF8
/--
Builds a request with a JSON body.
Sets `Content-Type: application/json`.
-/
def json (builder : Builder) (content : String) : Async (Request Body.Full) :=
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "application/json")) content.toUTF8
/--
Builds a request with an HTML body.
Sets `Content-Type: text/html; charset=utf-8`.
-/
def html (builder : Builder) (content : String) : Async (Request Body.Full) :=
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8")) content.toUTF8
end Request.Builder
namespace Response.Builder
open Internal.IO.Async
/--
Builds a response body from raw bytes without setting any headers.
Use `bytes` instead if you want `Content-Type: application/octet-stream` set automatically.
-/
def fromBytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) := do
return builder.body ( Body.Full.ofByteArray content)
/--
Builds a response with a binary body.
Sets `Content-Type: application/octet-stream`.
Use `fromBytes` instead if you need to set a different `Content-Type` or none at all.
-/
def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) :=
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "application/octet-stream")) content
/--
Builds a response with a text body.
Sets `Content-Type: text/plain; charset=utf-8`.
-/
def text (builder : Builder) (content : String) : Async (Response Body.Full) :=
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8")) content.toUTF8
/--
Builds a response with a JSON body.
Sets `Content-Type: application/json`.
-/
def json (builder : Builder) (content : String) : Async (Response Body.Full) :=
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "application/json")) content.toUTF8
/--
Builds a response with an HTML body.
Sets `Content-Type: text/html; charset=utf-8`.
-/
def html (builder : Builder) (content : String) : Async (Response Body.Full) :=
fromBytes (builder.header Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8")) content.toUTF8
end Response.Builder

View File

@@ -0,0 +1,60 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Repr
public section
/-!
# Body.Length
This module defines the `Length` type, that represents the Content-Length or Transfer-Encoding
of an HTTP request or response.
-/
namespace Std.Http.Body
set_option linter.all true
/--
Size of the body of a response or request.
-/
inductive Length
/--
Indicates that the HTTP message body uses **chunked transfer encoding**.
-/
| chunked
/--
Indicates that the HTTP message body has a **fixed, known length**, as specified by the
`Content-Length` header.
-/
| fixed (n : Nat)
deriving Repr, BEq
namespace Length
/--
Checks if the `Length` is chunked.
-/
@[inline]
def isChunked : Length Bool
| .chunked => true
| _ => false
/--
Checks if the `Length` is a fixed size.
-/
@[inline]
def isFixed : Length Bool
| .fixed _ => true
| _ => false
end Length
end Std.Http.Body

View File

@@ -0,0 +1,650 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Sync
public import Std.Internal.Async
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Body.Basic
public import Std.Internal.Http.Data.Body.Any
public import Init.Data.ByteArray
public section
/-!
# Body.Stream
This module defines a zero-buffer rendezvous body channel (`Body.Stream`) that supports
both sending and receiving chunks.
There is no queue and no capacity. A send waits for a receiver and a receive waits for a sender.
At most one blocked producer and one blocked consumer are supported.
-/
namespace Std.Http
namespace Body
open Std Internal IO Async
set_option linter.all true
namespace Channel
open Internal.IO.Async in
private inductive Consumer where
| normal (promise : IO.Promise (Option Chunk))
| select (finished : Waiter (Option Chunk))
private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := do
match c with
| .normal promise =>
promise.resolve x
return true
| .select waiter =>
let lose := return false
let win promise := do
promise.resolve (.ok x)
return true
waiter.race lose win
private structure Producer where
chunk : Chunk
/--
Resolved with `true` when consumed by a receiver, `false` when the channel closes.
-/
done : IO.Promise Bool
open Internal.IO.Async in
private def resolveInterestWaiter (waiter : Waiter Bool) (x : Bool) : BaseIO Bool := do
let lose := return false
let win promise := do
promise.resolve (.ok x)
return true
waiter.race lose win
private structure State where
/--
A single blocked producer waiting for a receiver.
-/
pendingProducer : Option Producer
/--
A single blocked consumer waiting for a producer.
-/
pendingConsumer : Option Consumer
/--
A waiter for `Stream.interestSelector`.
-/
interestWaiter : Option (Internal.IO.Async.Waiter Bool)
/--
Whether the channel is closed.
-/
closed : Bool
/--
Known size of the stream if available.
-/
knownSize : Option Body.Length
/--
Buffered partial chunk data accumulated from `Stream.send ... (incomplete := true)`.
These partial pieces are collapsed and emitted as a single chunk on the next complete send.
-/
pendingIncompleteChunk : Option Chunk := none
deriving Nonempty
end Channel
/--
A zero-buffer rendezvous body channel that supports both sending and receiving chunks.
-/
structure Stream where
private mk ::
private state : Mutex Channel.State
deriving Nonempty, TypeName
/--
Creates a rendezvous body stream.
-/
def mkStream : Async Stream := do
let state Mutex.new {
pendingProducer := none
pendingConsumer := none
interestWaiter := none
closed := false
knownSize := none
}
return { state }
namespace Channel
private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length :=
match knownSize with
| some (.fixed res) => some (Body.Length.fixed (res - chunk.data.size))
| _ => knownSize
private def pruneFinishedWaiters [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT State m Unit := do
let st get
let pendingConsumer
match st.pendingConsumer with
| some (.select waiter) =>
if waiter.checkFinished then
pure none
else
pure st.pendingConsumer
| _ =>
pure st.pendingConsumer
let interestWaiter
match st.interestWaiter with
| some waiter =>
if waiter.checkFinished then
pure none
else
pure st.interestWaiter
| none =>
pure none
set { st with pendingConsumer, interestWaiter }
private def signalInterest [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m Unit := do
let st get
if let some waiter := st.interestWaiter then
discard <| resolveInterestWaiter waiter true
set { st with interestWaiter := none }
private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT State m Bool := do
let st get
return st.pendingProducer.isSome || st.closed
private def hasInterest' [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT State m Bool := do
let st get
return st.pendingConsumer.isSome
private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m (Option Chunk) := do
let st get
if let some producer := st.pendingProducer then
set {
st with
pendingProducer := none
knownSize := decreaseKnownSize st.knownSize producer.chunk
}
discard <| producer.done.resolve true
return some producer.chunk
else
return none
private def close' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m Unit := do
let st get
if st.closed then
return ()
if let some consumer := st.pendingConsumer then
discard <| consumer.resolve none
if let some waiter := st.interestWaiter then
discard <| resolveInterestWaiter waiter false
if let some producer := st.pendingProducer then
discard <| producer.done.resolve false
set {
st with
pendingProducer := none
pendingConsumer := none
interestWaiter := none
pendingIncompleteChunk := none
closed := true
}
end Channel
namespace Stream
/--
Attempts to receive a chunk from the channel without blocking.
Returns `some chunk` only when a producer is already waiting.
-/
def tryRecv (stream : Stream) : Async (Option Chunk) :=
stream.state.atomically do
Channel.pruneFinishedWaiters
Channel.tryRecv'
private def recv' (stream : Stream) : BaseIO (AsyncTask (Option Chunk)) := do
stream.state.atomically do
Channel.pruneFinishedWaiters
if let some chunk Channel.tryRecv' then
return AsyncTask.pure (some chunk)
let st get
if st.closed then
return AsyncTask.pure none
if st.pendingConsumer.isSome then
return Task.pure (.error (IO.Error.userError "only one blocked consumer is allowed"))
let promise IO.Promise.new
set { st with pendingConsumer := some (.normal promise) }
Channel.signalInterest
return promise.result?.map (sync := true) fun
| none => .error (IO.Error.userError "the promise linked to the consumer was dropped")
| some res => .ok res
/--
Receives a chunk from the channel. Blocks until a producer sends one.
Returns `none` if the channel is closed and no producer is waiting.
-/
def recv (stream : Stream) : Async (Option Chunk) := do
Async.ofAsyncTask ( recv' stream)
/--
Closes the channel.
-/
def close (stream : Stream) : Async Unit :=
stream.state.atomically do
Channel.close'
/--
Checks whether the channel is closed.
-/
@[always_inline, inline]
def isClosed (stream : Stream) : Async Bool :=
stream.state.atomically do
return ( get).closed
/--
Gets the known size if available.
-/
@[always_inline, inline]
def getKnownSize (stream : Stream) : Async (Option Body.Length) :=
stream.state.atomically do
return ( get).knownSize
/--
Sets known size metadata.
-/
@[always_inline, inline]
def setKnownSize (stream : Stream) (size : Option Body.Length) : Async Unit :=
stream.state.atomically do
modify fun st => { st with knownSize := size }
open Internal.IO.Async in
/--
Creates a selector that resolves when a producer is waiting (or the channel closes).
-/
def recvSelector (stream : Stream) : Selector (Option Chunk) where
tryFn := do
stream.state.atomically do
Channel.pruneFinishedWaiters
if Channel.recvReady' then
return some ( Channel.tryRecv')
else
return none
registerFn waiter := do
stream.state.atomically do
Channel.pruneFinishedWaiters
if Channel.recvReady' then
let lose := return ()
let win promise := do
promise.resolve (.ok ( Channel.tryRecv'))
waiter.race lose win
else
let st get
if st.pendingConsumer.isSome then
throw (.userError "only one blocked consumer is allowed")
set { st with pendingConsumer := some (.select waiter) }
Channel.signalInterest
unregisterFn := do
stream.state.atomically do
Channel.pruneFinishedWaiters
/--
Iterates over chunks until the channel closes.
-/
@[inline]
protected partial def forIn
{β : Type} (stream : Stream) (acc : β)
(step : Chunk β Async (ForInStep β)) : Async β := do
let rec @[specialize] loop (stream : Stream) (acc : β) : Async β := do
if let some chunk stream.recv then
match step chunk acc with
| .done res => return res
| .yield res => loop stream res
else
return acc
loop stream acc
/--
Context-aware iteration over chunks until the channel closes.
-/
@[inline]
protected partial def forIn'
{β : Type} (stream : Stream) (acc : β)
(step : Chunk β ContextAsync (ForInStep β)) : ContextAsync β := do
let rec @[specialize] loop (stream : Stream) (acc : β) : ContextAsync β := do
let data Selectable.one #[
.case stream.recvSelector pure,
.case ( ContextAsync.doneSelector) (fun _ => pure none),
]
if let some chunk := data then
match step chunk acc with
| .done res => return res
| .yield res => loop stream res
else
return acc
loop stream acc
/--
Abstracts over how the next chunk is received, allowing `readAll` to work in both `Async`
(no cancellation) and `ContextAsync` (races with cancellation via `doneSelector`).
-/
class NextChunk (m : Type Type) where
/--
Receives the next chunk, stopping at EOF or (in `ContextAsync`) when the context is cancelled.
-/
nextChunk : Stream m (Option Chunk)
instance : NextChunk Async where
nextChunk := Stream.recv
instance : NextChunk ContextAsync where
nextChunk stream := do
Selectable.one #[
.case stream.recvSelector pure,
.case ( ContextAsync.doneSelector) (fun _ => pure none),
]
/--
Reads all remaining chunks and decodes them into `α`.
Works in both `Async` (reads until EOF, no cancellation) and `ContextAsync` (also stops if the
context is cancelled).
-/
partial def readAll
[FromByteArray α]
[Monad m] [MonadExceptOf IO.Error m] [NextChunk m]
(stream : Stream)
(maximumSize : Option UInt64 := none) :
m α := do
let rec loop (result : ByteArray) : m ByteArray := do
match NextChunk.nextChunk stream with
| none => return result
| some chunk =>
let result := result ++ chunk.data
if let some max := maximumSize then
if result.size.toUInt64 > max then
throw (.userError s!"body exceeded maximum size of {max} bytes")
loop result
let result loop ByteArray.empty
match FromByteArray.fromByteArray result with
| .ok a => return a
| .error msg => throw (.userError msg)
private def collapseForSend
(stream : Stream)
(chunk : Chunk)
(incomplete : Bool) : BaseIO (Except IO.Error (Option Chunk)) := do
stream.state.atomically do
Channel.pruneFinishedWaiters
let st get
if st.closed then
return .error (.userError "channel closed")
let merged := match st.pendingIncompleteChunk with
| some pending =>
{
data := pending.data ++ chunk.data
extensions := if pending.extensions.isEmpty then chunk.extensions else pending.extensions
}
| none => chunk
if incomplete then
set { st with pendingIncompleteChunk := some merged }
return .ok none
else
set { st with pendingIncompleteChunk := none }
return .ok (some merged)
/--
Sends a chunk, retrying if a select-mode consumer races and loses. If no consumer is ready,
installs the chunk as a pending producer and awaits acknowledgement from the receiver.
-/
private partial def send' (stream : Stream) (chunk : Chunk) : Async Unit := do
let done IO.Promise.new
let result : Except IO.Error (Option Bool) stream.state.atomically do
Channel.pruneFinishedWaiters
let st get
if st.closed then
return .error (IO.Error.userError "channel closed")
if let some consumer := st.pendingConsumer then
let success consumer.resolve (some chunk)
if success then
set {
st with
pendingConsumer := none
knownSize := Channel.decreaseKnownSize st.knownSize chunk
}
return .ok (some true)
else
set { st with pendingConsumer := none }
return .ok (some false)
else if st.pendingProducer.isSome then
return .error (IO.Error.userError "only one blocked producer is allowed")
else
set { st with pendingProducer := some { chunk, done } }
return .ok none
match result with
| .error err =>
throw err
| .ok (some true) =>
return ()
| .ok (some false) =>
-- The select-mode consumer raced and lost; recurse to allocate a fresh `done` promise.
send' stream chunk
| .ok none =>
match await done.result? with
| some true => return ()
| _ => throw (IO.Error.userError "channel closed")
/--
Sends a chunk.
If `incomplete := true`, the chunk is buffered and collapsed with subsequent chunks, and is not
delivered to the receiver yet.
If `incomplete := false`, any buffered incomplete pieces are collapsed with this chunk and the
single merged chunk is sent.
-/
def send (stream : Stream) (chunk : Chunk) (incomplete : Bool := false) : Async Unit := do
match ( collapseForSend stream chunk incomplete) with
| .error err => throw err
| .ok none => pure ()
| .ok (some toSend) =>
if toSend.data.isEmpty toSend.extensions.isEmpty then
return ()
send' stream toSend
/--
Returns `true` when a consumer is currently blocked waiting for data.
-/
def hasInterest (stream : Stream) : Async Bool :=
stream.state.atomically do
Channel.pruneFinishedWaiters
Channel.hasInterest'
open Internal.IO.Async in
/--
Creates a selector that resolves when consumer interest is present.
Returns `true` when a consumer is waiting, `false` when the channel closes first.
-/
def interestSelector (stream : Stream) : Selector Bool where
tryFn := do
stream.state.atomically do
Channel.pruneFinishedWaiters
let st get
if st.pendingConsumer.isSome then
return some true
else if st.closed then
return some false
else
return none
registerFn waiter := do
stream.state.atomically do
Channel.pruneFinishedWaiters
let st get
if st.pendingConsumer.isSome then
let lose := return ()
let win promise := do
promise.resolve (.ok true)
waiter.race lose win
else if st.closed then
let lose := return ()
let win promise := do
promise.resolve (.ok false)
waiter.race lose win
else if st.interestWaiter.isSome then
throw (.userError "only one blocked interest selector is allowed")
else
set { st with interestWaiter := some waiter }
unregisterFn := do
stream.state.atomically do
Channel.pruneFinishedWaiters
end Stream
/--
Creates a body from a producer function.
Returns the stream immediately and runs `gen` in a detached task.
The channel is always closed when `gen` returns or throws.
Errors from `gen` are not rethrown here; consumers observe end-of-stream via `recv = none`.
-/
def stream (gen : Stream Async Unit) : Async Stream := do
let s mkStream
background <| do
try
gen s
finally
s.close
return s
/--
Creates a body from a fixed byte array.
-/
def fromBytes (content : ByteArray) : Async Stream := do
stream fun s => do
s.setKnownSize (some (.fixed content.size))
if content.size > 0 then
s.send (Chunk.ofByteArray content)
/--
Creates an empty `Stream` body channel (already closed, no data).
Prefer `Body.Empty` when you need a concrete zero-cost type. Use this when the calling
context requires a `Stream` specifically.
-/
def empty : Async Stream := do
let s mkStream
s.setKnownSize (some (.fixed 0))
s.close
return s
instance : ForIn Async Stream Chunk where
forIn := Stream.forIn
instance : ForIn ContextAsync Stream Chunk where
forIn := Stream.forIn'
instance : Http.Body Stream where
recv := Stream.recv
close := Stream.close
isClosed := Stream.isClosed
recvSelector := Stream.recvSelector
getKnownSize := Stream.getKnownSize
setKnownSize := Stream.setKnownSize
instance : Coe Stream Any := Any.ofBody
instance : Coe (Response Stream) (Response Any) where
coe f := { f with }
instance : Coe (ContextAsync (Response Stream)) (ContextAsync (Response Any)) where
coe action := do
let response action
pure (response : Response Any)
instance : Coe (Async (Response Stream)) (ContextAsync (Response Any)) where
coe action := do
let response action
pure (response : Response Any)
end Body
namespace Request.Builder
open Internal.IO.Async
/--
Builds a request with a streaming body generator.
-/
def stream
(builder : Builder)
(gen : Body.Stream Async Unit) :
Async (Request Body.Stream) := do
let s Body.stream gen
return Request.Builder.body builder s
end Request.Builder
namespace Response.Builder
open Internal.IO.Async
/--
Builds a response with a streaming body generator.
-/
def stream
(builder : Builder)
(gen : Body.Stream Async Unit) :
Async (Response Body.Stream) := do
let s Body.stream gen
return Response.Builder.body builder s
end Response.Builder

View File

@@ -6,6 +6,7 @@ Authors: Sofia Rodrigues
module
prelude
public import Std.Internal.Http.Data.URI
public import Std.Internal.Http.Data.Headers.Name
public import Std.Internal.Http.Data.Headers.Value
public import Std.Internal.Parsec.Basic
@@ -214,4 +215,97 @@ def serialize (connection : Connection) : Header.Name × Header.Value :=
instance : Header Connection := parse, serialize
end Std.Http.Header.Connection
end Connection
/--
The `Host` header.
Represents the authority component of a URI:
host [ ":" port ]
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-host-and-authority
-/
structure Host where
/--
Host name (reg-name, IPv4, or IPv6 literal).
-/
host : URI.Host
/--
Optional port.
-/
port : URI.Port
deriving Repr, BEq
namespace Host
/--
Parses a `Host` header value.
-/
def parse (v : Value) : Option Host :=
let parsed := (Std.Http.URI.Parser.parseHostHeader <* Std.Internal.Parsec.eof).run v.value.toUTF8
match parsed with
| .ok host, port => some host, port
| .error _ => none
/--
Serializes a `Host` header back to a name and a value.
-/
def serialize (host : Host) : Header.Name × Header.Value :=
let value := match host.port with
| .value port => Header.Value.ofString! s!"{host.host}:{port}"
| .empty => Header.Value.ofString! s!"{host.host}:"
| .omitted => Header.Value.ofString! <| toString host.host
(.mk "host", value)
instance : Header Host := parse, serialize
end Host
/--
The `Expect` header.
Represents an expectation token.
The only standardized expectation is `100-continue`.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-expect
-/
structure Expect where
/--
True if the client expects `100-continue`.
-/
expect : Bool
deriving Repr, BEq
namespace Expect
/--
Parses an `Expect` header.
Succeeds only if the value is exactly `100-continue`
(case-insensitive, trimmed).
-/
def parse (v : Value) : Option Expect :=
let normalized := v.value.trimAscii.toString.toLower
if normalized == "100-continue" then
some true
else
none
/--
Serializes an `Expect` header.
-/
def serialize (e : Expect) : Header.Name × Header.Value :=
if e.expect then
(Header.Name.expect, Value.ofString! "100-continue")
else
(Header.Name.expect, Value.ofString! "")
instance : Header Expect := parse, serialize
end Expect
end Std.Http.Header

View File

@@ -10,6 +10,7 @@ public import Std.Internal.Http.Data.Extensions
public import Std.Internal.Http.Data.Method
public import Std.Internal.Http.Data.Version
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.URI
public section
@@ -50,7 +51,7 @@ structure Request.Head where
/--
The raw request-target string (commonly origin-form path/query, `"*"`, or authority-form).
-/
uri : String
uri : RequestTarget := .asteriskForm
/--
Collection of HTTP headers for the request (Content-Type, Authorization, etc.).
@@ -85,7 +86,7 @@ structure Request.Builder where
/--
The request-line of an HTTP request.
-/
line : Head := { method := .get, version := .v11, uri := "*" }
line : Head := { method := .get, version := .v11, uri := .asteriskForm }
/--
Optional dynamic metadata attached to the request.
@@ -108,7 +109,7 @@ instance : Encode .v11 Head where
encode buffer req :=
let buffer := Encode.encode (v := .v11) buffer req.method
let buffer := buffer.writeChar ' '
let buffer := buffer.writeString req.uri
let buffer := Encode.encode (v := .v11) buffer req.uri
let buffer := buffer.writeChar ' '
let buffer := Encode.encode (v := .v11) buffer req.version
let buffer := buffer.writeString "\r\n"
@@ -123,12 +124,6 @@ def new : Builder := { }
namespace Builder
/--
Creates a new HTTP request builder with the default head
(method: GET, version: HTTP/1.1, target: `*`).
-/
def empty : Builder := { }
/--
Sets the HTTP method for the request being built.
-/
@@ -144,11 +139,18 @@ def version (builder : Builder) (version : Version) : Builder :=
/--
Sets the request target/URI for the request being built.
-/
def uri (builder : Builder) (uri : String) : Builder :=
def uri (builder : Builder) (uri : RequestTarget) : Builder :=
{ builder with line := { builder.line with uri := uri } }
/--
Sets the headers for the request being built.
Sets the request target/URI for the request being built
-/
def uri! (builder : Builder) (uri : String) : Builder :=
let uri := RequestTarget.parse! uri
{ builder with line := { builder.line with uri } }
/--
Sets the headers for the request being built
-/
def headers (builder : Builder) (headers : Headers) : Builder :=
{ builder with line := { builder.line with headers } }
@@ -201,7 +203,7 @@ end Builder
/--
Creates a new HTTP GET Request with the specified URI.
-/
def get (uri : String) : Builder :=
def get (uri : RequestTarget) : Builder :=
new
|>.method .get
|>.uri uri
@@ -209,7 +211,7 @@ def get (uri : String) : Builder :=
/--
Creates a new HTTP POST Request builder with the specified URI.
-/
def post (uri : String) : Builder :=
def post (uri : RequestTarget) : Builder :=
new
|>.method .post
|>.uri uri
@@ -217,7 +219,7 @@ def post (uri : String) : Builder :=
/--
Creates a new HTTP PUT Request builder with the specified URI.
-/
def put (uri : String) : Builder :=
def put (uri : RequestTarget) : Builder :=
new
|>.method .put
|>.uri uri
@@ -225,7 +227,7 @@ def put (uri : String) : Builder :=
/--
Creates a new HTTP DELETE Request builder with the specified URI.
-/
def delete (uri : String) : Builder :=
def delete (uri : RequestTarget) : Builder :=
new
|>.method .delete
|>.uri uri
@@ -233,7 +235,7 @@ def delete (uri : String) : Builder :=
/--
Creates a new HTTP PATCH Request builder with the specified URI.
-/
def patch (uri : String) : Builder :=
def patch (uri : RequestTarget) : Builder :=
new
|>.method .patch
|>.uri uri
@@ -241,25 +243,25 @@ def patch (uri : String) : Builder :=
/--
Creates a new HTTP HEAD Request builder with the specified URI.
-/
def head (uri : String) : Builder :=
def head (uri : RequestTarget) : Builder :=
new
|>.method .head
|>.uri uri
/--
Creates a new HTTP OPTIONS Request builder with the specified URI.
Use `Request.options "*"` for server-wide OPTIONS.
Use `Request.options (RequestTarget.asteriskForm)` for server-wide OPTIONS.
-/
def options (uri : String) : Builder :=
def options (uri : RequestTarget) : Builder :=
new
|>.method .options
|>.uri uri
/--
Creates a new HTTP CONNECT Request builder with the specified URI.
Typically used with authority-form URIs such as `"example.com:443"` for tunneling.
Typically used with `RequestTarget.authorityForm` for tunneling.
-/
def connect (uri : String) : Builder :=
def connect (uri : RequestTarget) : Builder :=
new
|>.method .connect
|>.uri uri
@@ -267,7 +269,7 @@ def connect (uri : String) : Builder :=
/--
Creates a new HTTP TRACE Request builder with the specified URI.
-/
def trace (uri : String) : Builder :=
def trace (uri : RequestTarget) : Builder :=
new
|>.method .trace
|>.uri uri

View File

@@ -111,7 +111,7 @@ namespace Builder
/--
Creates a new HTTP Response builder with default head (status: 200 OK, version: HTTP/1.1).
-/
def empty : Builder := { }
def new : Builder := { }
/--
Sets the HTTP status code for the response being built.
@@ -173,66 +173,66 @@ end Builder
Creates a new HTTP Response builder with the 200 status code.
-/
def ok : Builder :=
.empty |>.status .ok
.new |>.status .ok
/--
Creates a new HTTP Response builder with the provided status.
-/
def withStatus (status : Status) : Builder :=
.empty |>.status status
.new |>.status status
/--
Creates a new HTTP Response builder with the 404 status code.
-/
def notFound : Builder :=
.empty |>.status .notFound
.new |>.status .notFound
/--
Creates a new HTTP Response builder with the 500 status code.
-/
def internalServerError : Builder :=
.empty |>.status .internalServerError
.new |>.status .internalServerError
/--
Creates a new HTTP Response builder with the 400 status code.
-/
def badRequest : Builder :=
.empty |>.status .badRequest
.new |>.status .badRequest
/--
Creates a new HTTP Response builder with the 201 status code.
-/
def created : Builder :=
.empty |>.status .created
.new |>.status .created
/--
Creates a new HTTP Response builder with the 202 status code.
-/
def accepted : Builder :=
.empty |>.status .accepted
.new |>.status .accepted
/--
Creates a new HTTP Response builder with the 401 status code.
-/
def unauthorized : Builder :=
.empty |>.status .unauthorized
.new |>.status .unauthorized
/--
Creates a new HTTP Response builder with the 403 status code.
-/
def forbidden : Builder :=
.empty |>.status .forbidden
.new |>.status .forbidden
/--
Creates a new HTTP Response builder with the 409 status code.
-/
def conflict : Builder :=
.empty |>.status .conflict
.new |>.status .conflict
/--
Creates a new HTTP Response builder with the 503 status code.
-/
def serviceUnavailable : Builder :=
.empty |>.status .serviceUnavailable
.new |>.status .serviceUnavailable
end Response

View File

@@ -0,0 +1,96 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.URI.Basic
public import Std.Internal.Http.Data.URI.Parser
public section
/-!
# URI
This module defines the `URI` and `RequestTarget` types that represent and manipulate components of
URIs as defined by RFC 3986. It provides parsing, rendering, and normalization utilities for working
with URIs and request targets in HTTP messages.
References:
* https://www.rfc-editor.org/rfc/rfc3986.html
* https://www.rfc-editor.org/rfc/rfc9112.html#section-3.3
-/
namespace Std.Http.RequestTarget
set_option linter.all true
/--
Attempts to parse a `RequestTarget` from the given string.
-/
@[inline]
def parse? (string : String) : Option RequestTarget :=
(URI.Parser.parseRequestTarget <* Std.Internal.Parsec.eof).run string.toUTF8 |>.toOption
/--
Parses a `RequestTarget` from the given string. Panics if parsing fails. Use `parse?`
if you need a safe option-returning version.
-/
@[inline]
def parse! (string : String) : RequestTarget :=
match parse? string with
| some res => res
| none => panic! "invalid request target"
/--
Creates an origin-form request target from a path string.
The path should start with '/' (e.g., "/api/users" or "/search?q=test").
Panics if the string is not a valid origin-form request target.
-/
@[inline]
def originForm! (path : String) : RequestTarget :=
match parse? path with
| some (.originForm p q) => .originForm p q
| _ => panic! s!"invalid origin-form request target: {path}"
end RequestTarget
namespace URI
/--
Attempts to parse a `URI` from the given string.
-/
@[inline]
def parse? (string : String) : Option URI :=
(Parser.parseURI <* Std.Internal.Parsec.eof).run string.toUTF8 |>.toOption
/--
Parses a `URI` from the given string. Panics if parsing fails. Use `parse?` if you need a safe
option-returning version.
-/
@[inline]
def parse! (string : String) : URI :=
match parse? string with
| some res => res
| none => panic! "invalid URI"
namespace Path
/--
Attempts to parse a URI path from the given string.
Returns `none` if the string is not a valid path.
-/
@[inline]
def parse? (s : String) : Option Std.Http.URI.Path :=
(Std.Http.URI.Parser.parsePath {} true true <* Std.Internal.Parsec.eof).run s.toUTF8 |>.toOption
/--
Parses a URI path from the given string. Returns the root path `"/"` if parsing fails.
-/
@[inline]
def parseOrRoot (s : String) : Std.Http.URI.Path :=
parse? s |>.getD { segments := #[], absolute := true }
end Std.Http.URI.Path

View File

@@ -0,0 +1,916 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.ToString
public import Std.Net
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Data.URI.Encoding
public section
/-!
# URI Structure
This module defines an HTTP-oriented URI structure aligned with RFC 3986 and RFC 9110, including
schemes, authorities, paths, queries, fragments, and request targets.
Host handling is intentionally strict: this module accepts IPv4, bracketed IPv6, and DNS-style
domain names (LDH labels). RFC 3986 `reg-name` forms that are not DNS-compatible are rejected.
All text components use the encoding types from `Std.Http.URI.Encoding` to ensure proper
percent-encoding is maintained throughout.
References:
* https://www.rfc-editor.org/rfc/rfc3986.html
* https://www.rfc-editor.org/rfc/rfc9110.html#name-uri-references
-/
namespace Std.Http
set_option linter.all true
open Internal Char
namespace URI
/--
Proposition that `s` is a valid URI scheme per RFC 3986:
`scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )`.
The scheme value stored in this module is normalized to lowercase.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.1
-/
abbrev IsValidScheme (s : String) : Prop :=
IsLowerCase s
s.toList.all isValidSchemeChar
(s.toList.head?.map isAlpha |>.getD false) -- first character must be ALPHA
/--
URI scheme identifier (e.g., "http", "https", "ftp").
-/
abbrev Scheme := { s : String // IsValidScheme s }
instance : Inhabited Scheme where
default := "http", by decide, by decide, by decide
namespace Scheme
/--
Attempts to create a `Scheme` from a string, normalizing to lowercase.
Returns `none` if the scheme is invalid per RFC 3986 Section 3.1.
-/
def ofString? (s : String) : Option Scheme :=
let lower := s.toLower
if h : IsValidScheme lower then
some lower, h
else
none
/--
Creates a `Scheme` from a string, normalizing to lowercase. Panics if invalid.
-/
def ofString! (s : String) : Scheme :=
match ofString? s with
| some scheme => scheme
| none => panic! s!"invalid URI scheme: {s.quote}"
/--
Returns the default port number for this URI scheme: 443 for `https`, 80 for everything else.
-/
def defaultPort (scheme : URI.Scheme) : UInt16 :=
if scheme.val == "https" then 443 else 80
/--
Returns the URI scheme for a given port: `"https"` for 443, `"http"` otherwise.
-/
def ofPort (port : UInt16) : URI.Scheme :=
if port == 443 then "https", by decide else "http", by decide
end Scheme
/--
User information component containing an encoded username and optional encoded password.
The stored strings use URI userinfo percent-encoding so parsed URIs can be rendered without
losing percent-encoding choices (for example, `%3A` versus `:`).
Note: embedding passwords in URIs is deprecated per RFC 9110 Section 4.2.4. Avoid using the
password field in new code, and never include it in logs or error messages.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.1
-/
structure UserInfo where
/--
The encoded username.
-/
username : EncodedUserInfo
/--
The optional encoded password.
-/
password : Option EncodedUserInfo
deriving Inhabited, Repr, BEq
namespace UserInfo
/--
Builds a `UserInfo` value from raw strings by applying userinfo percent-encoding.
-/
@[inline]
def ofStrings (username : String) (password : Option String := none) : UserInfo where
username := EncodedUserInfo.encode username
password := EncodedUserInfo.encode <$> password
/--
Returns the decoded username, or `none` if decoding fails UTF-8 validation.
-/
@[inline]
def username? (ui : UserInfo) : Option String :=
ui.username.decode
/--
Returns the decoded password when present, or `none` if absent or decoding fails UTF-8 validation.
-/
@[inline]
def password? (ui : UserInfo) : Option String :=
ui.password.bind EncodedUserInfo.decode
end UserInfo
/--
Checks whether a single domain label is valid. A label must be non-empty, contain only ASCII
alphanumeric characters and `-`, cannot start or end with `-`, and must be at most 63 characters.
References:
* https://www.rfc-editor.org/rfc/rfc1034.html#section-3.5
* https://www.rfc-editor.org/rfc/rfc1123.html#section-2.1
-/
def isValidDomainLabel (s : String) : Bool :=
let chars := s.toList
decide (chars.length 63) &&
chars.all (fun c => isAsciiAlphaNumChar c c = '-') &&
(chars.head?.map isAsciiAlphaNumChar |>.getD false) &&
(chars.getLast?.map isAsciiAlphaNumChar |>.getD false)
/--
Proposition that asserts `s` is a valid dot-separated domain name.
Each label must satisfy `IsValidDomainLabel`, and the full name must be at most 255 characters.
-/
abbrev IsValidDomainName (s : String) : Prop :=
let labels := s.splitOn "."
¬labels.isEmpty labels.all isValidDomainLabel s.length 255
/--
A domain name represented as a validated, lowercase-normalized string.
Only ASCII alphanumeric characters, hyphens, and dots are allowed.
Each label cannot start or end with `-` and is limited to 63 characters.
Internationalized domain names must be converted to punycode before use.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.2
-/
abbrev DomainName := { s : String // IsLowerCase s IsValidDomainName s ¬s.isEmpty }
namespace DomainName
/--
Attempts to create a normalized domain name from a string.
Returns `none` if the name is empty, longer than 255 characters, or any label violates DNS label
constraints.
-/
def ofString? (s : String) : Option DomainName :=
let lower := s.toLower
if h₁ : lower.isEmpty then
none
else if h₃ : IsValidDomainName lower then
some lower, IsLowerCase.isLowerCase_toLower, h₃, h₁
else
none
end DomainName
/--
Host component of a URI, supporting domain names and IP addresses.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.2
-/
inductive Host
/--
A domain name (lowercase-normalized).
-/
| name (name : DomainName)
/--
An IPv4 address.
-/
| ipv4 (ipv4 : Net.IPv4Addr)
/--
An IPv6 address.
-/
| ipv6 (ipv6 : Net.IPv6Addr)
deriving Inhabited, BEq
instance : Repr Host where
reprPrec x prec :=
let nestPrec := (if prec 1024 then 1 else 2)
let name := "Std.Http.URI.Host"
let repr (ctr : String) a :=
Repr.addAppParen (Format.nest nestPrec (.text s!"{name}.{ctr}" ++ .line ++ a)).group prec
match x with
| Host.name a => repr "name" (reprArg a)
| Host.ipv4 a => repr "ipv4" (toString a)
| Host.ipv6 a => repr "ipv6" (toString a)
instance : ToString Host where
toString
| .name n => n
| .ipv4 addr => toString addr
| .ipv6 addr => s!"[{toString addr}]"
/--
Authority port representation, preserving the distinction between:
* no port separator (`example.com`)
* empty port (`example.com:`)
* numeric port (`example.com:443`)
-/
inductive Port where
/--
No `:` port separator is present (for example, `example.com`).
-/
| omitted
/--
A `:` port separator is present with no digits after it (for example, `example.com:`).
-/
| empty
/--
A numeric port value is present (for example, `example.com:443`).
-/
| value (port : UInt16)
deriving Inhabited, Repr, DecidableEq
/--
The authority component of a URI, identifying the network location of the resource.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2
-/
structure Authority where
/--
Optional user information (username and password).
-/
userInfo : Option UserInfo := none
/--
The host identifying the network location.
-/
host : Host
/--
Port component, preserving whether it is omitted (`example.com`),
explicitly empty (`example.com:`), or numeric (`example.com:443`).
-/
port : Port := .omitted
deriving Inhabited, Repr, BEq
instance : ToString Authority where
toString auth :=
let userPart := match auth.userInfo with
| none => ""
| some name, some pass => s!"{name}:{pass}@"
| some name, none => s!"{name}@"
let hostPart := toString auth.host
let portPart := match auth.port with
| .omitted => ""
| .empty => ":"
| .value p => s!":{p}"
s!"{userPart}{hostPart}{portPart}"
/--
Hierarchical path component of a URI. Each segment is stored as an `EncodedSegment` to maintain
proper percent-encoding.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.3
-/
structure Path where
/--
The path segments making up the hierarchical structure (each segment is percent-encoded).
-/
segments : Array EncodedSegment
/--
Whether the path is absolute (begins with '/') or relative.
-/
absolute : Bool
deriving Inhabited, Repr, BEq
instance : ToString Path where
toString path :=
let result := String.intercalate "/" (path.segments.map toString).toList
if path.absolute then "/" ++ result else result
namespace Path
/--
Returns true if the path has no segments.
-/
def isEmpty (p : Path) : Bool := p.segments.isEmpty
/--
Returns the parent path by removing the last segment. If the path is empty, returns the path unchanged.
-/
def parent (p : Path) : Path :=
if p.segments.isEmpty then p
else { p with segments := p.segments.pop }
/--
Joins two paths. If the second path is absolute, it is returned as-is. Otherwise, the second path's
segments are appended to the first path.
-/
def join (p1 : Path) (p2 : Path) : Path :=
if p2.absolute then p2
else { p1 with segments := p1.segments ++ p2.segments }
/--
Appends a single segment to the path. The segment will be percent-encoded.
-/
def append (p : Path) (segment : String) : Path :=
{ p with segments := p.segments.push (EncodedSegment.encode segment) }
/--
Appends an already-encoded segment to the path.
-/
def appendEncoded (p : Path) (segment : EncodedSegment) : Path :=
{ p with segments := p.segments.push segment }
/--
Removes dot segments from the path according to RFC 3986 Section 5.2.4. This handles "."
(current directory) and ".." (parent directory) segments.
-/
def normalize (p : Path) : Path :=
let rec loop (input : List (EncodedSegment)) (output : List (EncodedSegment)) : List (EncodedSegment) :=
match input with
| [] =>
output.reverse
| segStr :: rest =>
if toString segStr == "." then
loop rest output
else if toString segStr == ".." then
match output with
| [] => loop rest []
| _ :: tail => loop rest tail
else
loop rest (segStr :: output)
{ p with segments := (loop p.segments.toList []).toArray }
/--
Returns the path segments as decoded strings.
Segments that cannot be decoded as UTF-8 are returned as their raw encoded form.
-/
def toDecodedSegments (p : Path) : Array String :=
p.segments.map fun seg =>
seg.decode.getD (toString seg)
end Path
/--
Query string represented as an array of key-value pairs. Both keys and values are stored as
`EncodedQueryParam` for proper application/x-www-form-urlencoded encoding. Values are optional to
support parameters without values (e.g., "?flag"). Order is preserved based on insertion order.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.4
-/
@[expose]
def Query := Array (EncodedQueryParam × Option EncodedQueryParam)
deriving Repr, Inhabited, BEq
namespace Query
/--
Extracts all unique query parameter names.
-/
@[expose]
def names (query : Query) : Array EncodedQueryParam :=
query.map (fun p => p.fst)
|> Array.toList
|> List.eraseDups
|> List.toArray
/--
Extracts all query parameter values.
-/
@[expose]
def values (query : Query) : Array (Option EncodedQueryParam) :=
query.map (fun p => p.snd)
/--
Returns the query as an array of (key, value) pairs. This is an identity function since Query is
already an array of pairs.
-/
@[expose]
def toArray (query : Query) : Array (EncodedQueryParam × Option EncodedQueryParam) :=
query
/--
Formats a query parameter as a string in the format "key" or "key=value". The key and value are
already percent-encoded as `EncodedQueryParam`.
-/
def formatQueryParam (key : EncodedQueryParam) (value : Option EncodedQueryParam) : String :=
match value with
| none => toString key
| some v => s!"{toString key}={toString v}"
/--
Finds the first value of a query parameter by key name. Returns `none` if the key is not found.
The value remains encoded as `EncodedQueryParam`.
-/
def findEncoded? (query : Query) (key : EncodedQueryParam) : Option (Option EncodedQueryParam) :=
let matchingKey := Array.find? (fun x => x.fst.toByteArray = key.toByteArray) query
matchingKey.map (fun x => x.snd)
/--
Finds the first value of a query parameter by raw key string. The key is percent-encoded before
matching. This avoids aliasing between raw and pre-encoded spellings.
-/
def find? (query : Query) (key : String) : Option (Option EncodedQueryParam) :=
query.findEncoded? (EncodedQueryParam.encode key)
/--
Finds all values of a query parameter by key name. Returns an empty array if the key is not found.
The values remain encoded as `EncodedQueryParam`.
-/
def findAllEncoded (query : Query) (key : EncodedQueryParam) : Array (Option EncodedQueryParam) :=
query.filterMap (fun x =>
if x.fst.toByteArray = key.toByteArray then
some x.snd
else
none)
/--
Finds all values of a query parameter by raw key string. The key is percent-encoded before matching.
-/
def findAll (query : Query) (key : String) : Array (Option EncodedQueryParam) :=
query.findAllEncoded (EncodedQueryParam.encode key)
/--
Adds a query parameter to the query string.
-/
def insert (query : Query) (key : String) (value : String) : Query :=
let encodedKey : EncodedQueryParam := EncodedQueryParam.encode key
let encodedValue : EncodedQueryParam := EncodedQueryParam.encode value
query.push (encodedKey, some encodedValue)
/--
Adds an already-encoded key-value pair to the query string.
-/
def insertEncoded (query : Query) (key : EncodedQueryParam) (value : Option EncodedQueryParam) : Query :=
query.push (key, value)
/--
Creates an empty query string.
-/
def empty : Query := #[]
/--
Creates a query string from a list of key-value pairs.
-/
def ofList (pairs : List (EncodedQueryParam × Option EncodedQueryParam)) : Query :=
pairs.toArray
/--
Checks if a query parameter exists.
-/
def containsEncoded (query : Query) (key : EncodedQueryParam) : Bool :=
query.any (fun x => x.fst.toByteArray = key.toByteArray)
/--
Checks if a query parameter exists by raw key string. The key is percent-encoded before matching.
-/
def contains (query : Query) (key : String) : Bool :=
query.containsEncoded (EncodedQueryParam.encode key)
/--
Removes all occurrences of a query parameter by key name.
-/
def eraseEncoded (query : Query) (key : EncodedQueryParam) : Query :=
query.filter (fun x =>
x.fst.toByteArray key.toByteArray
)
/--
Removes all occurrences of a query parameter by raw key string. The key is percent-encoded before matching.
-/
def erase (query : Query) (key : String) : Query :=
query.eraseEncoded (EncodedQueryParam.encode key)
/--
Gets the first value of a query parameter by key name, decoded as a string.
Returns `none` if the key is not found or if the value cannot be decoded as UTF-8.
-/
def get (query : Query) (key : String) : Option String :=
match query.find? key with
| none => none
| some none => some "" -- Key exists but has no value
| some (some encoded) => encoded.decode
/--
Gets the first value of a query parameter by key name, decoded as a string.
Returns the default value if the key is not found or if the value cannot be decoded.
-/
def getD (query : Query) (key : String) (default : String) : String :=
query.get key |>.getD default
/--
Sets a query parameter, replacing all existing values for that key.
Both key and value will be automatically percent-encoded.
-/
def set (query : Query) (key : String) (value : String) : Query :=
query.erase key |>.insert key value
/--
Converts the query to a properly encoded query string format.
Example: "key1=value1&key2=value2&flag"
-/
def toRawString (query : Query) : String :=
let params := query.map (fun (k, v) => formatQueryParam k v)
String.intercalate "&" params.toList
instance : EmptyCollection Query :=
Query.empty
instance : Singleton (String × String) Query :=
fun k, v => Query.empty.insert k v
instance : Insert (String × String) Query :=
fun k, v q => q.insert k v
instance : ToString Query where
toString q :=
if q.isEmpty then "" else
let encodedParams := q.toList.map fun (key, value) =>
Query.formatQueryParam key value
"?" ++ String.intercalate "&" encodedParams
end Query
end URI
/--
Complete URI structure following RFC 3986. All text components use encoded string types to ensure
proper percent-encoding.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html
-/
structure URI where
/--
The URI scheme (e.g., "http", "https", "ftp").
-/
scheme : URI.Scheme
/--
Optional authority component (user info, host, and port).
-/
authority : Option URI.Authority
/--
The hierarchical path component.
-/
path : URI.Path
/--
Optional query string as key-value pairs.
-/
query : URI.Query
/--
Optional fragment identifier (the part after '#'), percent-encoded.
-/
fragment : Option String
deriving Repr, Inhabited, BEq
instance : ToString URI where
toString uri :=
let schemePart := uri.scheme
let authorityPart := match uri.authority with
| none => ""
| some auth => s!"//{toString auth}"
let pathPart := toString uri.path
let queryPart := toString uri.query
let fragmentPart := uri.fragment.map (fun f => "#" ++ toString (URI.EncodedFragment.encode f)) |>.getD ""
s!"{schemePart}:{authorityPart}{pathPart}{queryPart}{fragmentPart}"
namespace URI
/--
Fluent builder for constructing URIs. Takes raw (unencoded) strings and handles encoding
automatically when building the final URI.
-/
structure Builder where
/--
The URI scheme (e.g., "http", "https").
-/
scheme : Option URI.Scheme := none
/--
User information (username and optional password).
-/
userInfo : Option UserInfo := none
/--
The host component.
-/
host : Option Host := none
/--
The port number.
-/
port : URI.Port := .omitted
/--
Path segments (will be encoded when building).
-/
pathSegments : Array String := #[]
/--
Query parameters as (key, optional value) pairs (will be encoded when building).
-/
query : Array (String × Option String) := #[]
/--
Fragment identifier (will be encoded when building).
-/
fragment : Option String := none
deriving Inhabited
namespace Builder
/--
Creates an empty URI builder.
-/
def empty : Builder := {}
/--
Attempts to set the URI scheme (e.g., "http", "https").
Returns `none` if the scheme is not a valid RFC 3986 scheme.
The stored scheme is normalized to lowercase.
-/
def setScheme? (b : Builder) (scheme : String) : Option Builder :=
URI.Scheme.ofString? scheme |>.map (fun scheme => { b with scheme := some scheme })
/--
Sets the URI scheme (e.g., "http", "https"). Panics if the scheme is invalid.
Use `setScheme?` if you need a safe option-returning version.
-/
def setScheme! (b : Builder) (scheme : String) : Builder :=
match b.setScheme? scheme with
| some b => b
| none => panic! s!"invalid URI scheme: {scheme.quote}"
/--
Sets the user information with username and optional password.
The strings will be automatically percent-encoded.
-/
def setUserInfo (b : Builder) (username : String) (password : Option String := none) : Builder :=
{ b with userInfo := some (UserInfo.ofStrings username password) }
/--
Sets the host as a domain name, returning `none` if the name contains invalid characters.
The domain name will be automatically lowercased.
Only ASCII alphanumeric characters, hyphens, and dots are allowed.
Each label cannot start or end with `-` and is limited to 63 characters.
Internationalized domain names must be converted to punycode before use.
-/
def setHost? (b : Builder) (name : String) : Option Builder :=
URI.DomainName.ofString? name |>.map (fun name => { b with host := some (Host.name name) })
/--
Sets the host as a domain name, panicking if the name contains invalid characters.
The domain name will be automatically lowercased.
Only ASCII alphanumeric characters, hyphens, and dots are allowed.
Each label cannot start or end with `-` and is limited to 63 characters.
Internationalized domain names must be converted to punycode before use.
-/
def setHost! (b : Builder) (name : String) : Builder :=
match b.setHost? name with
| some b => b
| none => panic! s!"invalid domain name: {name.quote}"
/--
Sets the host as an IPv4 address.
-/
def setHostIPv4 (b : Builder) (addr : Net.IPv4Addr) : Builder :=
{ b with host := some (Host.ipv4 addr) }
/--
Sets the host as an IPv6 address.
-/
def setHostIPv6 (b : Builder) (addr : Net.IPv6Addr) : Builder :=
{ b with host := some (Host.ipv6 addr) }
/--
Sets the port number.
-/
def setPort (b : Builder) (port : UInt16) : Builder :=
{ b with port := .value port }
/--
Replaces all path segments. Segments will be automatically percent-encoded when building.
-/
def setPath (b : Builder) (segments : Array String) : Builder :=
{ b with pathSegments := segments }
/--
Appends a single segment to the path. The segment will be automatically percent-encoded when building.
-/
def appendPathSegment (b : Builder) (segment : String) : Builder :=
{ b with pathSegments := b.pathSegments.push segment }
/--
Adds a query parameter with a value. Both key and value will be automatically percent-encoded when
building.
-/
def addQueryParam (b : Builder) (key : String) (value : String) : Builder :=
{ b with query := b.query.push (key, some value) }
/--
Adds a query parameter without a value (flag parameter). The key will be automatically
percent-encoded when building.
-/
def addQueryFlag (b : Builder) (key : String) : Builder :=
{ b with query := b.query.push (key, none) }
/--
Replaces all query parameters. Keys and values will be automatically percent-encoded when building.
-/
def setQuery (b : Builder) (query : Array (String × Option String)) : Builder :=
{ b with query := query }
/--
Sets the fragment identifier. The fragment will be automatically percent-encoded when building.
-/
def setFragment (b : Builder) (fragment : String) : Builder :=
{ b with fragment := some fragment }
/--
Builds a complete URI from the builder state, encoding all components. Defaults to "https" scheme if
none is specified.
-/
def build (b : Builder) : URI :=
let scheme := (b.scheme.getD "https", by decide)
let authority :=
if b.host.isSome then
some {
userInfo := b.userInfo
host := b.host.getD default
port := b.port
}
else none
let path : Path := {
segments := b.pathSegments.map EncodedSegment.encode
absolute := true
}
let query :=
b.query.map fun (k, v) =>
(EncodedQueryParam.encode k, v.map EncodedQueryParam.encode)
let query := URI.Query.ofList query.toList
{
scheme
authority := authority
path
query := query
fragment := b.fragment
}
end Builder
/--
Returns a new URI with the scheme replaced.
-/
def withScheme! (uri : URI) (scheme : String) : URI :=
{ uri with scheme := URI.Scheme.ofString! scheme }
/--
Returns a new URI with the authority replaced.
-/
def withAuthority (uri : URI) (authority : Option URI.Authority) : URI :=
{ uri with authority }
/--
Returns a new URI with the path replaced.
-/
def withPath (uri : URI) (path : URI.Path) : URI :=
{ uri with path }
/--
Returns a new URI with the query replaced.
-/
def withQuery (uri : URI) (query : URI.Query) : URI :=
{ uri with query }
/--
Returns a new URI with the fragment replaced.
-/
def withFragment (uri : URI) (fragment : Option String) : URI :=
{ uri with fragment }
/--
Partially normalizes a URI by removing dot-segments from the path (`.` and `..`)
according to RFC 3986 Section 5.2.4.
This does not apply the full normalization set from RFC 3986 Section 6 — for example,
case normalization, percent-encoding normalization, and default-port normalization are
not performed.
-/
def normalize (uri : URI) : URI :=
{ uri with path := uri.path.normalize }
end URI
/--
HTTP request target forms as defined in RFC 9112 Section 3.3.
Reference: https://www.rfc-editor.org/rfc/rfc9112.html#section-3.3
-/
inductive RequestTarget where
/--
Origin-form request target (most common for HTTP requests). Consists of a path and an optional query string.
Example: `/path/to/resource?key=value`
-/
| originForm (path : URI.Path) (query : Option URI.Query)
/--
Absolute-form request target containing a complete URI. Used when making requests through a proxy.
Example: `http://example.com:8080/path?key=value`
-/
| absoluteForm (uri : URI)
/--
Authority-form request target (used for CONNECT requests).
Example: `example.com:443`
-/
| authorityForm (authority : URI.Authority)
/--
Asterisk-form request target (used with OPTIONS requests).
Example: `*`
-/
| asteriskForm
deriving Inhabited, Repr
namespace RequestTarget
/--
Extracts the path component from a request target, if available.
Returns an empty relative path for targets without a path.
-/
def path : RequestTarget URI.Path
| .originForm p _ => p
| .absoluteForm uri => uri.path
| _ => { segments := #[], absolute := false }
/--
Extracts the query component from a request target, if available.
Returns an empty array for targets without a query.
-/
def query : RequestTarget URI.Query
| .originForm _ q => q.getD URI.Query.empty
| .absoluteForm uri => uri.query
| _ => URI.Query.empty
/--
Extracts the authority component from a request target, if available.
-/
def authority? : RequestTarget Option URI.Authority
| .authorityForm a => some a
| .absoluteForm uri => uri.authority
| _ => none
instance : ToString RequestTarget where
toString
| .originForm path query =>
let pathStr := toString path
let queryStr := query.map toString |>.getD ""
s!"{pathStr}{queryStr}"
| .absoluteForm uri => toString uri
| .authorityForm auth => toString auth
| .asteriskForm => "*"
instance : Encode .v11 RequestTarget where
encode buffer target := buffer.writeString (toString target)
end Std.Http.RequestTarget

View File

@@ -0,0 +1,80 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Nat
public section
/-!
# URI Parser Configuration
This module defines `URI.Config`, which controls per-component size limits used during URI
parsing. All limits default to the values previously hardcoded in the parser, except
`maxQueryLength`, which is raised from 1024 to 8192 to match `H1.Config.maxUriLength` and
accommodate real-world query strings.
-/
namespace Std.Http.URI
set_option linter.all true
/--
Per-component size limits for the URI parser.
-/
structure Config where
/--
Maximum length of the URI scheme component (e.g. `https`), in bytes.
The scheme grammar requires at least one leading ALPHA; the remaining budget is
`max(0, maxSchemeLength - 1)` additional characters.
-/
maxSchemeLength : Nat := 13
/--
Maximum length of the host `reg-name` component, in bytes.
-/
maxHostLength : Nat := 253
/--
Maximum length of the userinfo component (username and password each), in bytes.
-/
maxUserInfoLength : Nat := 1024
/--
Maximum length of a single path segment, in bytes.
-/
maxSegmentLength : Nat := 256
/--
Maximum length of the query string, in bytes.
Raised from the previously hardcoded 1024 to 8192 to match `H1.Config.maxUriLength`
and allow real-world query strings.
-/
maxQueryLength : Nat := 8192
/--
Maximum length of the fragment component, in bytes.
-/
maxFragmentLength : Nat := 1024
/--
Maximum number of path segments.
Prevents excessive segment counts that could arise from paths like `/a/b/c/…` repeated many times.
-/
maxPathSegments : Nat := 128
/--
Maximum total byte length of the path (all segments combined, including separating slashes).
-/
maxTotalPathLength : Nat := 8192
/--
Maximum number of query parameters (key-value pairs separated by `&`).
-/
maxQueryParams : Nat := 100
end Std.Http.URI

View File

@@ -0,0 +1,688 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Grind
import Init.While
import Init.Data.SInt.Lemmas
import Init.Data.UInt.Lemmas
import Init.Data.UInt.Bitwise
import Init.Data.Array.Lemmas
public import Init.Data.String
public import Std.Internal.Http.Internal.Char
public section
/-!
# URI Encoding
This module provides utilities for percent-encoding URI components according to RFC 3986. It includes
character validation, encoding/decoding functions, and types that maintain encoding invariants through
Lean's dependent type system.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-2.1
-/
namespace Std.Http.URI
set_option linter.all true
open Internal Char
/--
Checks if a byte is a valid character in a percent-encoded URI component. Valid characters are
unreserved characters or the percent sign (for escape sequences).
-/
def isEncodedChar (rule : UInt8 Bool) (c : UInt8) : Bool :=
isAsciiByte c (rule c isHexDigitByte c c = '%'.toUInt8)
/--
Checks if a byte is valid in a percent-encoded query string component. Extends `isEncodedChar` to also
allow '+' which represents space in application/x-www-form-urlencoded format.
-/
def isEncodedQueryChar (rule : UInt8 Bool) (c : UInt8) : Bool :=
isEncodedChar rule c c = '+'.toUInt8
/--
Checks if all characters in a `ByteArray` are allowed in an encoded URI component. This is a fast check
that only verifies the character set, not full encoding validity.
-/
@[inline]
abbrev IsAllowedEncodedChars (rule : UInt8 Bool) (s : ByteArray) : Prop :=
s.data.all (isEncodedChar rule)
instance : Decidable (IsAllowedEncodedChars r s) :=
inferInstanceAs (Decidable (s.data.all (isEncodedChar r) = true))
/--
Checks if all characters in a `ByteArray` are allowed in an encoded query parameter. Allows '+' as an
alternative encoding for space (application/x-www-form-urlencoded).
-/
@[inline]
abbrev IsAllowedEncodedQueryChars (rule : UInt8 Bool) (s : ByteArray) : Prop :=
s.data.all (isEncodedQueryChar rule)
instance : Decidable (IsAllowedEncodedQueryChars r s) :=
inferInstanceAs (Decidable (s.data.all (isEncodedQueryChar r) = true))
/--
Validates that all percent signs in a byte array are followed by exactly two hexadecimal digits.
This ensures proper percent-encoding according to RFC 3986.
For example:
- `%20` is valid (percent followed by two hex digits)
- `%` is invalid (percent with no following digits)
- `%2` is invalid (percent followed by only one digit)
- `%GG` is invalid (percent followed by non-hex characters)
-/
def isValidPercentEncoding (ba : ByteArray) : Bool :=
let rec loop (i : Nat) : Bool :=
if h : i < ba.size then
let c := ba[i]'h
if c = '%'.toUInt8 then
if h₂ : i + 2 < ba.size then
let d1 := ba[i + 1]'(by omega)
let d2 := ba[i + 2]'h₂
if isHexDigitByte d1 && isHexDigitByte d2 then
loop (i + 3)
else false
else false
else loop (i + 1)
else true
termination_by ba.size - i
loop 0
/--
Converts a nibble (4-bit value, 0-15) to its hexadecimal digit representation. Returns '0'-'9' for
values 0-9, and 'A'-'F' for values 10-15.
-/
def hexDigit (n : UInt8) : UInt8 :=
if n < 10 then (n + '0'.toUInt8)
else (n - 10 + 'A'.toUInt8)
/--
Converts a hexadecimal digit character to its numeric value (0-15).
Returns `none` if the character is not a valid hex digit.
-/
def hexDigitToUInt8? (c : UInt8) : Option UInt8 :=
if c '0'.toUInt8 && c '9'.toUInt8 then
some (c - '0'.toUInt8)
else if c 'a'.toUInt8 && c 'f'.toUInt8 then
some (c - 'a'.toUInt8 + 10)
else if c 'A'.toUInt8 && c 'F'.toUInt8 then
some (c - 'A'.toUInt8 + 10)
else
none
private theorem IsAllowedEncodedChars.push {bs : ByteArray} (h : IsAllowedEncodedChars r bs) (h₁ : isEncodedChar r c) :
IsAllowedEncodedChars r (bs.push c) := by
simpa [IsAllowedEncodedChars, ByteArray.push, Array.all_push, And.intro h h₁]
private theorem IsAllowedEncodedQueryChars.push {bs : ByteArray} (h : IsAllowedEncodedQueryChars r bs) (h₁ : isEncodedQueryChar r c) :
IsAllowedEncodedQueryChars r (bs.push c) := by
simpa [IsAllowedEncodedQueryChars, ByteArray.push, Array.all_push, And.intro h h₁]
private theorem isEncodedChar_isAscii (c : UInt8) (h : isEncodedChar r c) : isAsciiByte c := by
simp [isEncodedChar, isAsciiByte] at *
exact h.left
private theorem isEncodedQueryChar_isAscii (c : UInt8) (h : isEncodedQueryChar r c) : isAsciiByte c := by
unfold isEncodedQueryChar isAsciiByte at *
simp at h
rcases h
next h => exact isEncodedChar_isAscii c h
next h => subst_vars; decide
private theorem hexDigit_isHexDigit (h₀ : x < 16) : isHexDigitByte (hexDigit x) := by
unfold hexDigit isHexDigitByte
have h₁ : x.toNat < 16 := h₀
split <;> simp
next p =>
have h₂ : x.toNat < 10 := p
have h₂ : 48 x.toNat + 48 := by omega
have h₃ : x.toNat + 48 57 := by omega
have h₄ : x.toNat + 48 < 256 := by omega
refine Or.inl (Or.inl ?_, ?_)
· exact (UInt8.ofNat_le_iff_le (by decide) h₄ |>.mpr h₂)
· exact (UInt8.ofNat_le_iff_le h₄ (by decide) |>.mpr h₃)
next p =>
have h₂ : ¬(x.toNat < 10) := p
have h₃ : 65 x.toNat - 10 + 65 := by omega
have h₅ : x.toNat - 10 + 65 70 := by omega
have h₄ : x.toNat - 10 + 65 < 256 := by omega
refine Or.inr ?_, ?_
· simpa [UInt8.ofNat_sub (by omega : 10 x.toNat)] using
UInt8.ofNat_le_iff_le (by decide : 65 < 256) h₄ |>.mpr h₃
· simpa [UInt8.ofNat_add, UInt8.ofNat_sub (by omega : 10 x.toNat)] using
UInt8.ofNat_le_iff_le h₄ (by decide : 70 < 256) |>.mpr h₅
private theorem isHexDigit_isAscii {c : UInt8} (h : isHexDigitByte c) : isAsciiByte c := by
simp [isHexDigitByte, isAsciiByte] at *
rcases h with h1, h2 | h1, h2
· exact UInt8.lt_of_le_of_lt h2 (by decide)
next h => exact UInt8.lt_of_le_of_lt h.right (by decide)
· exact UInt8.lt_of_le_of_lt h2 (by decide)
private theorem isHexDigit_isEncodedChar {c : UInt8} (h : isHexDigitByte c) : isEncodedChar r c := by
unfold isEncodedChar
simp at *
exact And.intro (isHexDigit_isAscii h) (Or.inr (Or.inl h))
private theorem isHexDigit_isEncodedQueryChar {c : UInt8} (h : isHexDigitByte c) : isEncodedQueryChar r c := by
unfold isEncodedQueryChar isEncodedChar
simp at *
exact Or.inl (And.intro (isHexDigit_isAscii h) (Or.inr (Or.inl h)))
theorem all_of_all_of_imp {b : ByteArray} (h : b.data.all p) (imp : c, p c q c) : b.data.all q := by
rw [Array.all_eq] at *
simp at *
intro i x
exact (imp b.data[i]) (h i x)
private theorem autf8EncodeChar_flatMap_ascii {a : List UInt8}
(is_ascii_list : (x : UInt8), x a x < 128) :
List.flatMap (fun a => String.utf8EncodeChar (Char.ofUInt8 a)) a = a := by
have h_encode {i : UInt8} (h : i < 128) : String.utf8EncodeChar (Char.ofUInt8 i) = [i] := by
simp [Char.ofUInt8, String.utf8EncodeChar, show ¬127 < i.toNat from Nat.not_lt_of_le (Nat.le_pred_of_lt h)]
induction a with
| nil => simp
| cons head tail ih =>
simp [List.flatMap_cons]
rw [h_encode]
· simp
rw [ih]
intro x hx
exact is_ascii_list x (by simp [hx])
· exact is_ascii_list head (by simp)
private theorem List.toByteArray_loop_eq (xs : List UInt8) (acc : ByteArray) :
(List.toByteArray.loop xs acc).data = acc.data ++ xs.toArray := by
induction xs generalizing acc with
| nil => simp [List.toByteArray.loop]
| cons x xs ih => simp [List.toByteArray.loop, ih, Array.push]
private theorem ByteArray.toList_toByteArray (ba : ByteArray) :
ba.data.toList.toByteArray = ba := by
cases ba with
| mk data =>
simp [List.toByteArray]
apply ByteArray.ext
simp [List.toByteArray_loop_eq, ByteArray.empty]
decide
theorem isValidUTF8_of_isAsciiByte (ba : ByteArray) (s : ba.data.all isAsciiByte) : ByteArray.IsValidUTF8 ba := by
refine ba.data.toList.map Char.ofUInt8, ?_
rw [List.utf8Encode]
simp only [List.flatMap_map]
have is_ascii : (x : UInt8), x ba.data.toList x < 128 := by
let is_ascii := Array.all_eq_true_iff_forall_mem.mp s
simp [isAsciiByte] at is_ascii
intro x hx
exact is_ascii x (by simp_all)
rw [autf8EncodeChar_flatMap_ascii is_ascii]
exact ByteArray.toList_toByteArray ba |>.symm
/--
A percent-encoded URI component with a compile-time proof that it contains only valid encoded characters.
This provides type-safe URI encoding without runtime validation.
The invariant guarantees that the string contains only unreserved characters (alphanumeric, hyphen, period,
underscore, tilde) and percent signs (for escape sequences).
-/
structure EncodedString (r : UInt8 Bool) where
private mk ::
/--
The underlying byte array containing the percent-encoded data.
-/
toByteArray : ByteArray
/--
Proof that all characters in the byte array are valid encoded characters.
-/
valid : IsAllowedEncodedChars r toByteArray
namespace EncodedString
/--
Creates an empty encoded string.
-/
def empty : EncodedString r :=
.empty, by simp []; exact fun i h => by contradiction
instance : Inhabited (EncodedString r) where
default := EncodedString.empty
/--
Appends a single encoded character to an encoded string.
Requires that the character is not '%' to maintain the percent-encoding invariant.
-/
private def push (s : EncodedString r) (c : UInt8) (h : isEncodedChar r c) : EncodedString r :=
s.toByteArray.push c, IsAllowedEncodedChars.push s.valid h
/--
Converts a byte to its percent-encoded hexadecimal representation (%XX). For example, a space
character (0x20) becomes "%20".
-/
private def byteToHex (b : UInt8) (s : EncodedString r) : EncodedString r :=
let ba := s.toByteArray.push '%'.toUInt8
|>.push (hexDigit (b >>> 4))
|>.push (hexDigit (b &&& 0xF))
let valid := by
have h1 : isEncodedChar r '%'.toUInt8 :=
by simp [isEncodedChar]; decide
have h2 : isEncodedChar r (hexDigit (b >>> 4)) :=
let h₀ := hexDigit_isHexDigit (BitVec.toNat_ushiftRight_lt b.toBitVec 4 (by decide))
isHexDigit_isEncodedChar h₀
have h3 : isEncodedChar r (hexDigit (b &&& 0xF)) :=
let h₀ := hexDigit_isHexDigit (@UInt8.and_lt_add_one b 0xF (by decide))
isHexDigit_isEncodedChar h₀
exact IsAllowedEncodedChars.push (IsAllowedEncodedChars.push (IsAllowedEncodedChars.push s.valid h1) h2) h3
ba, valid
/--
Encodes a raw string into an `EncodedString` with automatic proof construction. Unreserved characters
(alphanumeric, hyphen, period, underscore, tilde) are kept as-is, while all other characters are percent-encoded.
-/
def encode (s : String) : EncodedString r :=
s.toUTF8.foldl (init := EncodedString.empty) fun acc c =>
if h : isAsciiByte c r c then
acc.push c (by simp [isEncodedChar]; exact And.intro h.left (Or.inl h.right))
else
byteToHex c acc
/--
Attempts to create an `EncodedString` from a `ByteArray`. Returns `some` if the byte array contains only
valid encoded characters and all percent signs are followed by exactly two hex digits, `none` otherwise.
-/
def ofByteArray? (ba : ByteArray) : Option (EncodedString r) :=
if h : IsAllowedEncodedChars r ba then
if isValidPercentEncoding ba then some ba, h else none
else none
/--
Creates an `EncodedString` from a `ByteArray`, panicking if the byte array is invalid.
-/
def ofByteArray! (ba : ByteArray) : EncodedString r :=
match ofByteArray? ba with
| some es => es
| none => panic! "invalid encoded string"
/--
Creates an `EncodedString` from a `String` by checking if it's already a valid percent-encoded string.
Returns `some` if valid, `none` otherwise.
-/
def ofString? (s : String) : Option (EncodedString r) :=
ofByteArray? s.toUTF8
/--
Creates an `EncodedString` from a `String`, panicking if the string is not a valid percent-encoded string.
-/
def ofString! (s : String) : EncodedString r :=
ofByteArray! s.toUTF8
/--
Creates an `EncodedString` from a `ByteArray` with compile-time proofs.
Use this when you have proofs that the byte array is valid.
-/
def new (ba : ByteArray) (valid : IsAllowedEncodedChars r ba) (_validEncoding : isValidPercentEncoding ba) : EncodedString r :=
ba, valid
instance : ToString (EncodedString r) where
toString es := es.toByteArray, isValidUTF8_of_isAsciiByte es.toByteArray (all_of_all_of_imp es.valid (fun c h => by simp [isEncodedChar] at h; exact h.left))
/--
Decodes an `EncodedString` back to a regular `String`. Converts percent-encoded sequences (e.g., "%20")
back to their original characters. Returns `none` if the decoded bytes are not valid UTF-8.
-/
def decode (es : EncodedString r) : Option String := Id.run do
let mut decoded : ByteArray := ByteArray.empty
let rawBytes := es.toByteArray
let len := rawBytes.size
let mut i := 0
let percent := '%'.toNat.toUInt8
while h : i < len do
let c := rawBytes[i]
(decoded, i) := if h₁ : c == percent i + 1 < len then
let h1 := rawBytes[i + 1]
if let some hd1 := hexDigitToUInt8? h1 then
if h₂ : i + 2 < len then
let h2 := rawBytes[i + 2]
if let some hd2 := hexDigitToUInt8? h2 then
(decoded.push (hd1 * 16 + hd2), i + 3)
else
(((decoded.push c).push h1).push h2, i + 3)
else
((decoded.push c).push h1, i + 2)
else
((decoded.push c).push h1, i + 2)
else
(decoded.push c, i + 1)
return String.fromUTF8? decoded
instance : Repr (EncodedString r) where
reprPrec es n := reprPrec (toString es) n
instance : BEq (EncodedString r) where
beq x y := x.toByteArray = y.toByteArray
instance : Hashable (EncodedString r) where
hash x := Hashable.hash x.toByteArray
end EncodedString
/--
A percent-encoded query string component with a compile-time proof that it contains only valid encoded
query characters. Extends `EncodedString` to support the '+' character for spaces, following the
application/x-www-form-urlencoded format.
This type is specifically designed for encoding query parameters where spaces can be represented as '+'
instead of "%20".
-/
structure EncodedQueryString (r : UInt8 Bool) where
private mk ::
/--
The underlying byte array containing the percent-encoded query data.
-/
toByteArray : ByteArray
/--
Proof that all characters in the byte array are valid encoded query characters.
-/
valid : IsAllowedEncodedQueryChars r toByteArray
namespace EncodedQueryString
/--
Creates an empty encoded query string.
-/
def empty : EncodedQueryString r :=
.empty, by simp; intro a h; contradiction
instance : Inhabited (EncodedQueryString r) where
default := EncodedQueryString.empty
/--
Appends a single encoded query character to an encoded query string.
-/
private def push (s : EncodedQueryString r) (c : UInt8) (h : isEncodedQueryChar r c) : EncodedQueryString r :=
s.toByteArray.push c, IsAllowedEncodedQueryChars.push s.valid h
/--
Attempts to create an `EncodedQueryString` from a `ByteArray`. Returns `some` if the byte array contains
only valid encoded query characters and all percent signs are followed by exactly two hex digits, `none` otherwise.
-/
def ofByteArray? (ba : ByteArray) (r : UInt8 Bool := isQueryChar) : Option (EncodedQueryString r) :=
if h : IsAllowedEncodedQueryChars r ba then
if isValidPercentEncoding ba then some ba, h else none
else none
/--
Creates an `EncodedQueryString` from a `ByteArray`, panicking if the byte array is invalid.
-/
def ofByteArray! (ba : ByteArray) (r : UInt8 Bool := isQueryChar) : EncodedQueryString r :=
match ofByteArray? ba r with
| some es => es
| none => panic! "invalid encoded query string"
/--
Creates an `EncodedQueryString` from a `String` by checking if it's already a valid percent-encoded string.
Returns `some` if valid, `none` otherwise.
-/
def ofString? (s : String) (r : UInt8 Bool := isQueryChar) : Option (EncodedQueryString r) :=
ofByteArray? s.toUTF8 r
/--
Creates an `EncodedQueryString` from a `String`, panicking if the string is not a valid percent-encoded string.
-/
def ofString! (s : String) (r : UInt8 Bool := isQueryChar) : EncodedQueryString r :=
ofByteArray! s.toUTF8 r
/--
Creates an `EncodedQueryString` from a `ByteArray` with compile-time proofs.
Use this when you have proofs that the byte array is valid.
-/
def new (ba : ByteArray) (valid : IsAllowedEncodedQueryChars r ba) (_validEncoding : isValidPercentEncoding ba) : EncodedQueryString r :=
ba, valid
/--
Converts a byte to its percent-encoded hexadecimal representation (%XX). For example, a space character
(0x20) becomes "%20".
-/
private def byteToHex (b : UInt8) (s : EncodedQueryString r) : EncodedQueryString r :=
let ba := s.toByteArray.push '%'.toUInt8
|>.push (hexDigit (b >>> 4))
|>.push (hexDigit (b &&& 0xF))
let valid := by
have h1 : isEncodedQueryChar r '%'.toUInt8 := by
simp [isEncodedQueryChar, isEncodedChar]; decide
have h2 : isEncodedQueryChar r (hexDigit (b >>> 4)) :=
isHexDigit_isEncodedQueryChar (hexDigit_isHexDigit (BitVec.toNat_ushiftRight_lt b.toBitVec 4 (by decide)))
have h3 : isEncodedQueryChar r (hexDigit (b &&& 0xF)) :=
isHexDigit_isEncodedQueryChar (hexDigit_isHexDigit (@UInt8.and_lt_add_one b 0xF (by decide)))
exact IsAllowedEncodedQueryChars.push (IsAllowedEncodedQueryChars.push (IsAllowedEncodedQueryChars.push s.valid h1) h2) h3
ba, valid
/--
Encodes a raw string into an `EncodedQueryString` with automatic proof construction. Unreserved characters
are kept as-is, spaces are encoded as '+', and all other characters are percent-encoded.
-/
def encode (s : String) (r : UInt8 Bool := isQueryChar) : EncodedQueryString r :=
s.toUTF8.foldl (init := EncodedQueryString.empty) fun acc c =>
if h : isAsciiByte c r c then
acc.push c (by simp [isEncodedQueryChar, isEncodedChar]; exact Or.inl (And.intro h.left (Or.inl h.right)))
else if _ : c = ' '.toUInt8 then
acc.push '+'.toUInt8 (by simp [isEncodedQueryChar])
else
byteToHex c acc
/--
Converts an `EncodedQueryString` to a `String`, given a proof that all characters satisfying `r` are ASCII.
-/
def toString (es : EncodedQueryString r) : String :=
es.toByteArray, isValidUTF8_of_isAsciiByte es.toByteArray (all_of_all_of_imp es.valid (fun c h => isEncodedQueryChar_isAscii c h))
/--
Decodes an `EncodedQueryString` back to a regular `String`. Converts percent-encoded sequences and '+'
signs back to their original characters. Returns `none` if the decoded bytes are not valid UTF-8.
This is almost the same code from `System.Uri.UriEscape.decodeUri`, but with `Option` instead.
-/
def decode (es : EncodedQueryString r) : Option String := Id.run do
let mut decoded : ByteArray := ByteArray.empty
let rawBytes := es.toByteArray
let len := rawBytes.size
let mut i := 0
let percent := '%'.toNat.toUInt8
let plus := '+'.toNat.toUInt8
while h : i < len do
let c := rawBytes[i]
(decoded, i) := if c == plus then
(decoded.push ' '.toNat.toUInt8, i + 1)
else if h₁ : c == percent i + 1 < len then
let h1 := rawBytes[i + 1]
if let some hd1 := hexDigitToUInt8? h1 then
if h₂ : i + 2 < len then
let h2 := rawBytes[i + 2]
if let some hd2 := hexDigitToUInt8? h2 then
(decoded.push (hd1 * 16 + hd2), i + 3)
else
(((decoded.push c).push h1).push h2, i + 3)
else
((decoded.push c).push h1, i + 2)
else
((decoded.push c).push h1, i + 2)
else
(decoded.push c, i + 1)
return String.fromUTF8? decoded
end EncodedQueryString
instance : ToString (EncodedQueryString r) where
toString := EncodedQueryString.toString
instance : Repr (EncodedQueryString r) where
reprPrec es n := reprPrec (toString es) n
instance : BEq (EncodedQueryString r) where
beq x y := x.toByteArray = y.toByteArray
instance : Hashable (EncodedQueryString r) where
hash x := Hashable.hash x.toByteArray
instance : Hashable (Option (EncodedQueryString r)) where
hash
| some x => Hashable.hash ((ByteArray.mk #[1] ++ x.toByteArray))
| none => Hashable.hash (ByteArray.mk #[0])
/--
A percent-encoded URI path segment. Valid characters are `pchar` (unreserved, sub-delims, ':', '@').
-/
abbrev EncodedSegment := EncodedString isPChar
namespace EncodedSegment
/--
Encodes a raw string into an encoded path segment.
-/
def encode (s : String) : EncodedSegment :=
EncodedString.encode (r := isPChar) s
/--
Attempts to create an encoded path segment from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedSegment :=
EncodedString.ofByteArray? (r := isPChar) ba
/--
Creates an encoded path segment from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedSegment :=
EncodedString.ofByteArray! (r := isPChar) ba
/--
Decodes an encoded path segment back to a UTF-8 string.
-/
def decode (segment : EncodedSegment) : Option String :=
EncodedString.decode segment
end EncodedSegment
/--
A percent-encoded URI fragment component. Valid characters are `pchar / "/" / "?"`.
-/
abbrev EncodedFragment := EncodedString isFragmentChar
namespace EncodedFragment
/--
Encodes a raw string into an encoded fragment component.
-/
def encode (s : String) : EncodedFragment :=
EncodedString.encode (r := isFragmentChar) s
/--
Attempts to create an encoded fragment component from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedFragment :=
EncodedString.ofByteArray? (r := isFragmentChar) ba
/--
Creates an encoded fragment component from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedFragment :=
EncodedString.ofByteArray! (r := isFragmentChar) ba
/--
Decodes an encoded fragment component back to a UTF-8 string.
-/
def decode (fragment : EncodedFragment) : Option String :=
EncodedString.decode fragment
end EncodedFragment
/--
A percent-encoded URI userinfo component. Valid characters are `unreserved / sub-delims / ":"`.
-/
abbrev EncodedUserInfo := EncodedString isUserInfoChar
namespace EncodedUserInfo
/--
Encodes a raw string into an encoded userinfo component.
-/
def encode (s : String) : EncodedUserInfo :=
EncodedString.encode (r := isUserInfoChar) s
/--
Attempts to create an encoded userinfo component from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedUserInfo :=
EncodedString.ofByteArray? (r := isUserInfoChar) ba
/--
Creates an encoded userinfo component from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedUserInfo :=
EncodedString.ofByteArray! (r := isUserInfoChar) ba
/--
Decodes an encoded userinfo component back to a UTF-8 string.
-/
def decode (userInfo : EncodedUserInfo) : Option String :=
EncodedString.decode userInfo
end EncodedUserInfo
/--
A percent-encoded URI query parameter. Valid characters are `pchar / "/" / "?"` with '+' for spaces.
-/
abbrev EncodedQueryParam := EncodedQueryString isQueryDataChar
namespace EncodedQueryParam
/--
Encodes a raw string into an encoded query parameter.
-/
def encode (s : String) : EncodedQueryParam :=
EncodedQueryString.encode (r := isQueryDataChar) s
/--
Attempts to create an encoded query parameter from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedQueryParam :=
EncodedQueryString.ofByteArray? (r := isQueryDataChar) ba
/--
Creates an encoded query parameter from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedQueryParam :=
EncodedQueryString.ofByteArray! (r := isQueryDataChar) ba
/--
Attempts to create an encoded query parameter from an encoded string.
-/
def fromString? (s : String) : Option EncodedQueryParam :=
EncodedQueryString.ofString? (r := isQueryDataChar) s
/--
Decodes an encoded query parameter back to a UTF-8 string.
-/
def decode (param : EncodedQueryParam) : Option String :=
EncodedQueryString.decode param
end EncodedQueryParam
end Std.Http.URI

View File

@@ -0,0 +1,430 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.While
public import Init.Data.String
public import Std.Internal.Parsec
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Data.URI.Basic
public import Std.Internal.Http.Data.URI.Config
public section
/-!
# URI Parser
This module provides parsers for HTTP request targets and HTTP-oriented URIs aligned with RFC 3986.
It handles parsing of schemes, authorities, paths, queries, and fragments.
Notable intentional constraints:
* hosts are limited to IPv4, bracketed IPv6, and DNS-style domain names
* IPvFuture (`v...`) inside `IP-literal` is currently rejected
References:
* https://www.rfc-editor.org/rfc/rfc3986.html
* https://www.rfc-editor.org/rfc/rfc9110.html#name-uri-references
* https://www.rfc-editor.org/rfc/rfc9112.html#section-3.3
-/
namespace Std.Http.URI.Parser
set_option linter.all true
open Internal Char
open Std Internal Parsec ByteArray
@[inline]
private def tryOpt (p : Parser α) : Parser (Option α) :=
optional (attempt p)
@[inline]
private def peekIs (p : UInt8 Bool) : Parser Bool := do
return ( peekWhen? p).isSome
-- scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
private def parseScheme (config : URI.Config) : Parser URI.Scheme := do
if config.maxSchemeLength = 0 then
fail "scheme length limit is 0 (no scheme allowed)"
let first : UInt8 satisfy (fun b : UInt8 => Internal.Char.isAlphaByte b)
let rest takeWhileAtMost
(fun c =>
isAlphaNum c
c = '+'.toUInt8 c = '-'.toUInt8 c = '.'.toUInt8)
(config.maxSchemeLength - 1)
let schemeBytes := ByteArray.empty.push first ++ rest.toByteArray
let str := String.fromUTF8! schemeBytes |>.toLower
if h : URI.IsValidScheme str then
return str, h
else
fail "invalid scheme"
-- port = 1*DIGIT
private def parsePortNumber : Parser UInt16 := do
let portBytes takeWhileAtMost isDigitByte 5
let portStr := String.fromUTF8! portBytes.toByteArray
let some portNum := String.toNat? portStr
| fail s!"invalid port number: {portStr}"
if portNum > 65535 then
fail s!"port number too large: {portNum}"
return portNum.toUInt16
-- userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
let userBytesName takeWhileAtMost
(fun x =>
x ':'.toUInt8
(isUserInfoChar x x = '%'.toUInt8))
config.maxUserInfoLength
let some userNameEncoded := URI.EncodedUserInfo.ofByteArray? userBytesName.toByteArray
| fail "invalid percent encoding in user info"
let userPassEncoded if peekIs (· == ':'.toUInt8) then
skip
let userBytesPass takeWhileAtMost
(fun x => isUserInfoChar x x = '%'.toUInt8)
config.maxUserInfoLength
let some userPassEncoded := URI.EncodedUserInfo.ofByteArray? userBytesPass.toByteArray
| fail "invalid percent encoding in user info"
pure <| some userPassEncoded
else
pure none
return userNameEncoded, userPassEncoded
-- Parses bracketed IPv6 literals.
-- Note: RFC 3986 also allows IPvFuture inside `IP-literal`; this parser
-- currently rejects IPvFuture.
private def parseIPv6 : Parser Net.IPv6Addr := do
skipByte '['.toUInt8
let result takeWhile1AtMost
(fun x => x = ':'.toUInt8 x = '.'.toUInt8 isHexDigitByte x)
256
skipByte ']'.toUInt8
let ipv6Str := String.fromUTF8! result.toByteArray
let some ipv6Addr := Std.Net.IPv6Addr.ofString ipv6Str
| fail s!"invalid IPv6 address: {ipv6Str}"
return ipv6Addr
-- IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
private def parseIPv4 : Parser Net.IPv4Addr := do
let result takeWhile1AtMost
(fun x => x = '.'.toUInt8 isDigitByte x)
256
let ipv4Str := String.fromUTF8! result.toByteArray
let some ipv4Addr := Std.Net.IPv4Addr.ofString ipv4Str
| fail s!"invalid IPv4 address: {ipv4Str}"
return ipv4Addr
-- host = IP-literal / IPv4address / reg-name
-- Note: RFC 1123 allows domain labels to start with digits, so we must try IPv4
-- first and fall back to reg-name parsing if it fails.
private def parseHost (config : URI.Config) : Parser URI.Host := do
if ( peekWhen? (· == '['.toUInt8)).isSome then
return .ipv6 ( parseIPv6)
else
if ( peekWhen? isDigitByte).isSome then
if let some ipv4 tryOpt parseIPv4 then
return .ipv4 ipv4
-- It needs to be a legal DNS label, so it differs from reg-name.
let some str := String.fromUTF8? ( takeWhile1AtMost
(fun x => isAlphaNum x x = '-'.toUInt8 x = '.'.toUInt8)
config.maxHostLength).toByteArray
| fail s!"invalid host"
let lower := str.toLower
if h : URI.IsValidDomainName lower ¬lower.isEmpty then
return .name lower, .isLowerCase_toLower, h
else
fail s!"invalid domain name: {str}"
-- authority = [ userinfo "@" ] host [ ":" port ]
private def parseAuthority (config : URI.Config) : Parser URI.Authority := do
let userInfo tryOpt do
let ui parseUserInfo config
skipByte '@'.toUInt8
return ui
let host parseHost config
let port : URI.Port
if peekIs (· == ':'.toUInt8) then
skipByte ':'.toUInt8
if ( peekWhen? isDigitByte).isSome then
pure (.value ( parsePortNumber))
else
let next peek?
if next.isNone || next.any (fun c => c = '/'.toUInt8 c = '?'.toUInt8 c = '#'.toUInt8) then
pure .empty
else
fail "invalid port number"
else
pure .omitted
return { userInfo, host, port }
-- segment = *pchar
private def parseSegment (config : URI.Config) : Parser ByteSlice := do
takeWhileAtMost (fun c => isPChar c c = '%'.toUInt8) config.maxSegmentLength
/-
path = path-abempty ; begins with "/" or is empty
/ path-absolute ; begins with "/" but not "//"
/ path-noscheme ; begins with a non-colon segment
/ path-rootless ; begins with a segment
/ path-empty ; zero characters
path-abempty = *( "/" segment )
path-absolute = "/" [ segment-nz *( "/" segment ) ]
path-noscheme = segment-nz-nc *( "/" segment )
path-rootless = segment-nz *( "/" segment )
path-empty = 0<pchar>
-/
/--
Parses a URI path with combined parsing and validation.
-/
def parsePath (config : URI.Config) (forceAbsolute : Bool) (allowEmpty : Bool) : Parser URI.Path := do
let isPathDelimiter : UInt8 Bool := fun c => c = '?'.toUInt8 c = '#'.toUInt8
let mut isAbsolute := false
let mut segments : Array _ := #[]
let mut totalLength := 0
let isSegmentOrSlash
peekIs (fun c => isPChar c c = '%'.toUInt8 c = '/'.toUInt8)
if ¬allowEmpty (( isEof) ¬isSegmentOrSlash) then
fail "need a path"
-- Check if path is absolute
if peekIs (· == '/'.toUInt8) then
isAbsolute := true
totalLength := totalLength + 1
skip
else if forceAbsolute then
if allowEmpty (( isEof) ¬isSegmentOrSlash) then
return { segments := segments, absolute := isAbsolute }
else
fail "require '/' in path"
else
pure ()
-- Parse segments
while ( peek?).isSome do
let some next := ( peek?) | break
if isPathDelimiter next then
break
if ¬(next = '/'.toUInt8 isPChar next next = '%'.toUInt8) then
break
if segments.size >= config.maxPathSegments then
fail s!"too many path segments (limit: {config.maxPathSegments})"
let segmentBytes parseSegment config
let some segmentStr := URI.EncodedSegment.ofByteArray? segmentBytes.toByteArray
| fail "invalid percent encoding in path segment"
totalLength := totalLength + segmentBytes.size
if totalLength > config.maxTotalPathLength then
fail s!"path too long (limit: {config.maxTotalPathLength} bytes)"
segments := segments.push segmentStr
if ( peek?).any (· == '/'.toUInt8) then
totalLength := totalLength + 1
if totalLength > config.maxTotalPathLength then
fail s!"path too long (limit: {config.maxTotalPathLength} bytes)"
skip
-- If path ends with '/', add empty segment
let next peek?
if next.isNone || next.any isPathDelimiter then
if segments.size >= config.maxPathSegments then
fail s!"too many path segments (limit: {config.maxPathSegments})"
segments := segments.push (URI.EncodedString.empty)
else
break
return { segments := segments, absolute := isAbsolute }
-- query = *( pchar / "/" / "?" )
private def parseQuery (config : URI.Config) : Parser URI.Query := do
let queryBytes
takeWhileAtMost (fun c => isQueryChar c c = '%'.toUInt8) config.maxQueryLength
let some queryStr := String.fromUTF8? queryBytes.toByteArray
| fail "invalid query string"
if queryStr.isEmpty then
return URI.Query.empty
let rawPairs := queryStr.splitOn "&"
if rawPairs.length > config.maxQueryParams then
fail s!"too many query parameters (limit: {config.maxQueryParams})"
let pairs : Option URI.Query := rawPairs.foldlM (init := URI.Query.empty) fun acc pair => do
match pair.splitOn "=" with
| [key] =>
let key URI.EncodedQueryParam.fromString? key
pure (acc.insertEncoded key none)
| key :: value =>
let key URI.EncodedQueryParam.fromString? key
let value URI.EncodedQueryParam.fromString? (String.intercalate "=" value)
pure (acc.insertEncoded key (some value))
| [] => pure acc -- unreachable: splitOn always returns at least one element
if let some pairs := pairs then
return pairs
else
fail "invalid query string"
-- fragment = *( pchar / "/" / "?" )
private def parseFragment (config : URI.Config) : Parser URI.EncodedFragment := do
let fragmentBytes
takeWhileAtMost (fun c => isFragmentChar c c = '%'.toUInt8) config.maxFragmentLength
let some fragmentStr := URI.EncodedFragment.ofByteArray? fragmentBytes.toByteArray
| fail "invalid percent encoding in fragment"
return fragmentStr
private def parseHierPart (config : URI.Config) : Parser (Option URI.Authority × URI.Path) := do
-- Check for "//" authority path-abempty
if ( tryOpt (skipString "//")).isSome then
let authority parseAuthority config
let path parsePath config true true -- path-abempty (must start with "/" or be empty)
return (some authority, path)
else
-- path-absolute / path-rootless / path-empty
let path parsePath config false true
return (none, path)
/--
Parses a URI (Uniform Resource Identifier).
URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
hier-part = "//" authority path-abempty / path-absolute / path-rootless / path-empty
-/
public def parseURI (config : URI.Config := {}) : Parser URI := do
let scheme parseScheme config
skipByte ':'.toUInt8
let (authority, path) parseHierPart config
let query optional (skipByteChar '?' *> parseQuery config)
let query := query.getD .empty
let fragment optional do
let some result := ( (skipByteChar '#' *> parseFragment config)) |>.decode
| fail "invalid fragment parse encoding"
return result
return { scheme, authority, path, query, fragment }
/--
Parses a request target with combined parsing and validation.
-/
public def parseRequestTarget (config : URI.Config := {}) : Parser RequestTarget :=
asterisk <|> origin <|> absoluteHttp <|> authority <|> absolute
where
-- The asterisk form
asterisk : Parser RequestTarget := do
skipByte '*'.toUInt8
return .asteriskForm
-- origin-form = absolute-path [ "?" query ]
-- absolute-path = 1*( "/" segment )
origin : Parser RequestTarget := attempt do
if peekIs (· == '/'.toUInt8) then
let path parsePath config true true
let query optional (skipByte '?'.toUInt8 *> parseQuery config)
return .originForm path query
else
fail "not origin"
absoluteFromScheme (scheme : URI.Scheme) : Parser RequestTarget := do
skipByte ':'.toUInt8
let (auth, path) parseHierPart config
let query optional (skipByteChar '?' *> parseQuery config)
let query := query.getD URI.Query.empty
return .absoluteForm { path, scheme, authority := auth, query, fragment := none }
-- Prefer absolute-form for explicit HTTP(S) scheme targets with a path or authority.
-- This avoids misclassifying `http://host/path` as authority-form while still
-- letting `http:80` fall through to authority-form parsing.
absoluteHttp : Parser RequestTarget := attempt do
let scheme parseScheme config
if scheme.val = "http" || scheme.val = "https" then
skipByte ':'.toUInt8
if peekIs (· == '/'.toUInt8) then
let (authority, path) parseHierPart config
let query optional (skipByteChar '?' *> parseQuery config)
let query := query.getD .empty
return .absoluteForm { scheme, path, authority, query, fragment := none }
else
fail "not http absolute uri with path"
else
fail "not http absolute uri"
-- absolute-URI = scheme ":" hier-part [ "?" query ]
absolute : Parser RequestTarget := attempt do
let scheme parseScheme config
absoluteFromScheme scheme
-- authority-form = host ":" port
authority : Parser RequestTarget := attempt do
let host parseHost config
skipByteChar ':'
let port parsePortNumber
return .authorityForm { host, port := .value port }
/--
Parses an HTTP `Host` header value.
-/
public def parseHostHeader (config : URI.Config := {}) : Parser (URI.Host × URI.Port) := do
let host parseHost config
let port : URI.Port
if peekIs (· == ':'.toUInt8) then
skipByte ':'.toUInt8
if ( peekWhen? isDigitByte).isSome then
pure (.value ( parsePortNumber))
else
let next peek?
if next.isNone then
pure .empty
else
fail "invalid host header port"
else
pure .omitted
if ¬( isEof) then
fail "invalid host header"
return (host, port)
end Std.Http.URI.Parser

View File

@@ -299,4 +299,15 @@ that provides it.
def isUserInfoChar (c : UInt8) : Bool :=
isUnreserved c || isSubDelims c || c = ':'.toUInt8
/--
Checks if a byte is a valid character in a URI query component,
excluding the typical key/value separators `&` and `=`.
Inspired by `query = *( pchar / "/" / "?" )` from RFC 3986,
but disallows `&` and `=` so they can be treated as structural separators.
-/
@[inline, expose]
def isQueryDataChar (c : UInt8) : Bool :=
isQueryChar c && c ≠ '&'.toUInt8 && c ≠ '='.toUInt8
end Std.Http.Internal.Char

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,134 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public section
/-!
# HTTP/1.1 Configuration
This module defines the configuration options for HTTP/1.1 protocol processing,
including connection limits, header constraints, and various size limits.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Std Internal Parsec ByteArray
open Internal
/--
Connection limits and parser bounds configuration.
-/
structure Config where
/--
Maximum number of requests (server) or responses (client) per connection.
-/
maxMessages : Nat := 100
/--
Maximum number of headers allowed per message.
-/
maxHeaders : Nat := 100
/--
Maximum aggregate byte size of all header field lines in a single message
(name + value bytes plus 4 bytes per line for `: ` and `\r\n`). Default: 64 KiB.
-/
maxHeaderBytes : Nat := 65536
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The `Server` header value injected into outgoing responses (receiving mode) or the
`User-Agent` header value injected into outgoing requests (sending mode).
`none` suppresses the header entirely.
-/
agentName : Option Header.Value := none
/--
Maximum length of request URI (default: 8192 bytes).
-/
maxUriLength : Nat := 8192
/--
Maximum number of bytes consumed while parsing request/status start-lines (default: 8192 bytes).
-/
maxStartLineLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes).
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes).
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 16).
-/
maxSpaceSequence : Nat := 16
/--
Maximum number of leading empty lines (bare CRLF) to skip before a request-line
(RFC 9112 §2.2 robustness). Default: 8.
-/
maxLeadingEmptyLines : Nat := 8
/--
Maximum number of extensions on a single chunk-size line (default: 16).
-/
maxChunkExtensions : Nat := 16
/--
Maximum length of chunk extension name (default: 256 bytes).
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes).
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum number of bytes consumed while parsing one chunk-size line with extensions (default: 8192 bytes).
-/
maxChunkLineLength : Nat := 8192
/--
Maximum allowed chunk payload size in bytes (default: 8 MiB).
-/
maxChunkSize : Nat := 8 * 1024 * 1024
/--
Maximum allowed total body size per message in bytes (default: 64 MiB).
This limit applies across all body framing modes. For chunked transfer encoding,
chunk-size lines (including extensions) and the trailer section also count toward
this limit, so the total wire bytes consumed by the body cannot exceed this value.
-/
maxBodySize : Nat := 64 * 1024 * 1024
/--
Maximum length of reason phrase (default: 512 bytes).
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 20).
-/
maxTrailerHeaders : Nat := 20
end Std.Http.Protocol.H1

View File

@@ -0,0 +1,110 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public section
/-!
# HTTP/1.1 Errors
This module defines the error types for HTTP/1.1 protocol processing,
including parsing errors, timeout errors, and connection errors.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Specific HTTP processing errors with detailed information.
-/
inductive Error
/--
Malformed start line (request-line or status-line).
-/
| invalidStatusLine
/--
Invalid or malformed header.
-/
| invalidHeader
/--
Request timeout occurred.
-/
| timeout
/--
Request entity too large.
-/
| entityTooLarge
/--
Request URI is too long.
-/
| uriTooLong
/--
Unsupported HTTP version.
-/
| unsupportedVersion
/--
Invalid chunk encoding.
-/
| invalidChunk
/--
Connection closed.
-/
| connectionClosed
/--
Bad request or response message.
-/
| badMessage
/--
The number of header fields in the message exceeds the configured limit.
Maps to HTTP 431 Request Header Fields Too Large.
-/
| tooManyHeaders
/--
The aggregate byte size of all header fields exceeds the configured limit.
Maps to HTTP 431 Request Header Fields Too Large.
-/
| headersTooLarge
/--
Generic error with message.
-/
| other (message : String)
deriving Repr, BEq
instance : ToString Error where
toString
| .invalidStatusLine => "Invalid status line"
| .invalidHeader => "Invalid header"
| .timeout => "Timeout"
| .entityTooLarge => "Entity too large"
| .uriTooLong => "URI too long"
| .unsupportedVersion => "Unsupported version"
| .invalidChunk => "Invalid chunk"
| .connectionClosed => "Connection closed"
| .badMessage => "Bad message"
| .tooManyHeaders => "Too many headers"
| .headersTooLarge => "Headers too large"
| .other msg => s!"Other error: {msg}"

View File

@@ -0,0 +1,73 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Events
This module defines the events that can occur during HTTP/1.1 message processing,
including header completion and control/error signals.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Events emitted during HTTP message processing.
-/
inductive Event (dir : Direction)
/--
Indicates that all headers have been successfully parsed.
-/
| endHeaders (head : Message.Head dir)
/--
Signals that additional input data is required to continue processing.
-/
| needMoreData (size : Option Nat)
/--
Indicates a failure during parsing or processing.
-/
| failed (err : Error)
/--
Requests that the connection be closed.
-/
| close
/--
The body should be closed.
-/
| closeBody
/--
Indicates that a response is required.
-/
| needAnswer
/--
Indicates readiness to process the next message.
-/
| next
/--
Signals that an `Expect: 100-continue` decision is pending.
-/
| «continue»
deriving Inhabited, Repr

View File

@@ -0,0 +1,139 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.Array
public import Std.Internal.Http.Data
public section
/-!
# Message
This module provides types and operations for HTTP/1.1 messages, centered around the `Direction`
type which models the server's role in message exchange: `Direction.receiving` for parsing incoming
requests from clients, and `Direction.sending` for generating outgoing responses to clients.
The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or
`Response.Head` accordingly, enabling generic code that works uniformly across both phases
while exposing common operations such as headers, version, and `shouldKeepAlive`
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Direction of message flow from the server's perspective.
-/
inductive Direction
/--
Receiving and parsing incoming requests from clients.
-/
| receiving
/--
Client perspective: writing outgoing requests and reading incoming responses.
-/
| sending
deriving BEq
/--
Inverts the message direction.
-/
@[expose]
abbrev Direction.swap : Direction Direction
| .receiving => .sending
| .sending => .receiving
/--
Gets the message head type based on direction.
-/
@[expose]
def Message.Head : Direction Type
| .receiving => Request.Head
| .sending => Response.Head
/--
Gets the headers of a `Message`.
-/
def Message.Head.headers (m : Message.Head dir) : Headers :=
match dir with
| .receiving => Request.Head.headers m
| .sending => Response.Head.headers m
/--
Gets the version of a `Message`.
-/
def Message.Head.version (m : Message.Head dir) : Version :=
match dir with
| .receiving => Request.Head.version m
| .sending => Response.Head.version m
/--
Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag.
-/
def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length :=
let contentLength := message.headers.getAll? .contentLength
match message.headers.getAll? .transferEncoding with
| none =>
match contentLength with
| some #[cl] => .fixed <$> cl.value.toNat?
| some _ => none -- To avoid request smuggling with malformed/multiple content-length headers.
| none => if allowEOFBody then some (.fixed 0) else none
-- Single transfer-encoding header.
| some #[header] =>
let te := Header.TransferEncoding.parse header
match Header.TransferEncoding.isChunked <$> te, contentLength with
| some true, none =>
-- HTTP/1.0 does not define chunked transfer encoding (RFC 2068 §19.4.6).
-- A server MUST NOT use chunked with an HTTP/1.0 peer; likewise, an
-- HTTP/1.0 request carrying Transfer-Encoding: chunked is malformed.
if message.version == .v10 then none else some .chunked
| _, _ => none -- To avoid request smuggling when TE and CL are mixed.
-- We disallow multiple transfer-encoding headers.
| some _ => none
/--
Checks whether the message indicates that the connection should be kept alive.
-/
def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool :=
let tokens? : Option (Array String) :=
match message.headers.getAll? .connection with
| none => some #[]
| some values =>
values.foldl (fun acc raw => do
let acc acc
let parsed Header.Connection.parse raw
pure (acc ++ parsed.tokens)
) (some #[])
match tokens? with
| none =>false
| some tokens =>
if message.version == .v11 then
!tokens.any (· == "close")
else
tokens.any (· == "keep-alive")
instance : Repr (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Repr Request.Head)
| .sending => inferInstanceAs (Repr Response.Head)
instance : Internal.Encode .v11 (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Internal.Encode .v11 Request.Head)
| .sending => inferInstanceAs (Internal.Encode .v11 Response.Head)
instance : EmptyCollection (Message.Head dir) where
emptyCollection :=
match dir with
| .receiving => { method := .get, version := .v11 }
| .sending => {}

View File

@@ -0,0 +1,548 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Parsec
public import Std.Internal.Http.Data
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Protocol.H1.Config
/-!
This module defines parsers for HTTP/1.1 request and response lines, headers, and body framing. The
reference used is https://httpwg.org/specs/rfc9112.html.
-/
namespace Std.Http.Protocol.H1
open Std Internal Parsec ByteArray Internal Internal.Char
set_option linter.all true
/--
Checks if a byte may appear inside a field value.
This parser enforces strict ASCII-only field values and allows only `field-content`
(`HTAB / SP / VCHAR`).
-/
@[inline]
def isFieldVChar (c : UInt8) : Bool :=
fieldContent (Char.ofUInt8 c)
/--
Checks if a byte may appear unescaped inside a quoted-string value.
Allows `HTAB / SP / %x21 / %x23-5B / %x5D-7E` (strict ASCII-only; no obs-text).
-/
@[inline]
def isQdText (c : UInt8) : Bool :=
qdtext (Char.ofUInt8 c)
/--
Checks if a byte is optional whitespace (`OWS = SP / HTAB`, RFC 9110 §5.6.3).
-/
@[inline]
def isOwsByte (c : UInt8) : Bool :=
ows (Char.ofUInt8 c)
-- Parser blocks
/--
Repeatedly applies `parser` until it returns `none` or the `maxCount` limit is
exceeded. Returns the collected results as an array.
-/
partial def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do
let rec go (acc : Array α) : Parser (Array α) := do
let step optional <| attempt do
match parser with
| none => fail "end of items"
| some x => return x
match step with
| none =>
return acc
| some x =>
let acc := acc.push x
if acc.size > maxCount then
fail s!"too many items: {acc.size} > {maxCount}"
go acc
go #[]
/--
Lifts an `Option` into the parser monad, failing with a generic message if the value is `none`.
-/
def liftOption (x : Option α) : Parser α :=
if let some res := x then
return res
else
fail "expected value but got none"
/--
Parses an HTTP token (RFC 9110 §5.6.2): one or more token characters, up to `limit` bytes.
Fails if the input starts with a non-token character or is empty.
-/
@[inline]
def parseToken (limit : Nat) : Parser ByteSlice :=
takeWhileUpTo1 (fun c => tchar (Char.ofUInt8 c)) limit
/--
Parses a line terminator.
-/
@[inline]
def crlf : Parser Unit := do
skipBytes "\r\n".toUTF8
/--
Consumes and ignores empty lines (`CRLF`) that appear before a request-line.
https://httpwg.org/specs/rfc9112.html#rfc.section.2.2:
"In the interest of robustness, a server that is expecting to receive and parse a request-line SHOULD
ignore at least one empty line (CRLF) received prior to the request-line."
-/
def skipLeadingRequestEmptyLines (limits : H1.Config) : Parser Unit := do
let mut count := 0
while ( peekWhen? (· == '\r'.toUInt8)).isSome do
if count >= limits.maxLeadingEmptyLines then
fail "too many leading empty lines"
crlf
count := count + 1
/--
Parses a single space (SP, 0x20).
-/
@[inline]
def sp : Parser Unit :=
skipByte ' '.toUInt8
/--
Parses optional whitespace (OWS = *(SP / HTAB), RFC 9110 §5.6.3), bounded by
`limits.maxSpaceSequence`. Fails if more whitespace follows the limit, so oversized
padding is rejected rather than silently truncated.
-/
@[inline]
def ows (limits : H1.Config) : Parser Unit := do
discard <| takeWhileUpTo isOwsByte limits.maxSpaceSequence
if ( peekWhen? isOwsByte) |>.isSome then
fail "invalid space sequence"
else
pure ()
/--
Parses a single ASCII hex digit and returns its numeric value (`0``15`).
-/
def hexDigit : Parser UInt8 := do
let b any
if isHexDigitByte b then
if b '0'.toUInt8 && b '9'.toUInt8 then return b - '0'.toUInt8
else if b 'A'.toUInt8 && b 'F'.toUInt8 then return b - 'A'.toUInt8 + 10
else return b - 'a'.toUInt8 + 10
else fail s!"invalid hex digit {Char.ofUInt8 b |>.quote}"
/--
Parses a hexadecimal integer (one or more hex digits, up to 16 digits).
Used for chunk-size lines in chunked transfer encoding.
-/
partial def hex : Parser Nat := do
let rec go (acc : Nat) (count : Nat) : Parser Nat := do
match optional (attempt hexDigit) with
| some d =>
if count + 1 > 16 then
fail "chunk size too large"
else
go (acc * 16 + d.toNat) (count + 1)
| none =>
if count = 0 then
-- Preserve EOF as incremental chunk-size parsing can request more data.
-- For non-EOF invalid bytes, keep the specific parse failure.
let _ peek!
fail "expected hex digit"
else
return acc
go 0 0
-- Actual parsers
/--
Parses `HTTP-version = HTTP-name "/" DIGIT "." DIGIT` and returns the major and
minor version numbers as a pair.
-/
def parseHttpVersionNumber : Parser (Nat × Nat) := do
skipBytes "HTTP/".toUTF8
let major digit
skipByte '.'.toUInt8
let minor digit
pure ((major.toNat - 48), (minor.toNat - 48))
/--
Parses an HTTP version string and returns the corresponding `Version` value.
Fails if the version is not recognized by `Version.ofNumber?`.
-/
def parseHttpVersion : Parser Version := do
let (major, minor) parseHttpVersionNumber
liftOption <| Version.ofNumber? major minor
/-
method = token
Every branch is wrapped in `attempt` so that `<|>` always backtracks on
failure, even after consuming bytes. This is strictly necessary only for the
P-group (POST / PUT / PATCH) which share a common first byte, but wrapping
all alternatives keeps the parser defensively correct if new methods are
added in the future.
-/
def parseMethod : Parser Method :=
(attempt <| skipBytes "GET".toUTF8 <&> fun _ => Method.get)
<|> (attempt <| skipBytes "HEAD".toUTF8 <&> fun _ => Method.head)
<|> (attempt <| skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete)
<|> (attempt <| skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace)
<|> (attempt <| skipBytes "ACL".toUTF8 <&> fun _ => Method.acl)
<|> (attempt <| skipBytes "QUERY".toUTF8 <&> fun _ => Method.query)
<|> (attempt <| skipBytes "SEARCH".toUTF8 <&> fun _ => Method.search)
<|> (attempt <| skipBytes "BASELINE-CONTROL".toUTF8 <&> fun _ => Method.baselineControl)
<|> (attempt <| skipBytes "BIND".toUTF8 <&> fun _ => Method.bind)
<|> (attempt <| skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect)
<|> (attempt <| skipBytes "CHECKIN".toUTF8 <&> fun _ => Method.checkin)
<|> (attempt <| skipBytes "CHECKOUT".toUTF8 <&> fun _ => Method.checkout)
<|> (attempt <| skipBytes "COPY".toUTF8 <&> fun _ => Method.copy)
<|> (attempt <| skipBytes "LABEL".toUTF8 <&> fun _ => Method.label)
<|> (attempt <| skipBytes "LINK".toUTF8 <&> fun _ => Method.link)
<|> (attempt <| skipBytes "LOCK".toUTF8 <&> fun _ => Method.lock)
<|> (attempt <| skipBytes "MERGE".toUTF8 <&> fun _ => Method.merge)
<|> (attempt <| skipBytes "MKACTIVITY".toUTF8 <&> fun _ => Method.mkactivity)
<|> (attempt <| skipBytes "MKCALENDAR".toUTF8 <&> fun _ => Method.mkcalendar)
<|> (attempt <| skipBytes "MKCOL".toUTF8 <&> fun _ => Method.mkcol)
<|> (attempt <| skipBytes "MKREDIRECTREF".toUTF8 <&> fun _ => Method.mkredirectref)
<|> (attempt <| skipBytes "MKWORKSPACE".toUTF8 <&> fun _ => Method.mkworkspace)
<|> (attempt <| skipBytes "MOVE".toUTF8 <&> fun _ => Method.move)
<|> (attempt <| skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options)
<|> (attempt <| skipBytes "ORDERPATCH".toUTF8 <&> fun _ => Method.orderpatch)
<|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post)
<|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put)
<|> (attempt <| skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch)
<|> (attempt <| skipBytes "PRI".toUTF8 <&> fun _ => Method.pri)
<|> (attempt <| skipBytes "PROPFIND".toUTF8 <&> fun _ => Method.propfind)
<|> (attempt <| skipBytes "PROPPATCH".toUTF8 <&> fun _ => Method.proppatch)
<|> (attempt <| skipBytes "REBIND".toUTF8 <&> fun _ => Method.rebind)
<|> (attempt <| skipBytes "REPORT".toUTF8 <&> fun _ => Method.report)
<|> (attempt <| skipBytes "UNBIND".toUTF8 <&> fun _ => Method.unbind)
<|> (attempt <| skipBytes "UNCHECKOUT".toUTF8 <&> fun _ => Method.uncheckout)
<|> (attempt <| skipBytes "UNLINK".toUTF8 <&> fun _ => Method.unlink)
<|> (attempt <| skipBytes "UNLOCK".toUTF8 <&> fun _ => Method.unlock)
<|> (attempt <| skipBytes "UPDATEREDIRECTREF".toUTF8 <&> fun _ => Method.updateredirectref)
<|> (attempt <| skipBytes "UPDATE".toUTF8 <&> fun _ => Method.update)
<|> (attempt <| skipBytes "VERSION-CONTROL".toUTF8 <&> fun _ => Method.versionControl)
<|> (parseToken 64 *> fail "unrecognized method")
/--
Parses a request-target URI, up to `limits.maxUriLength` bytes.
Fails with `"uri too long"` if the target exceeds the configured limit.
-/
def parseURI (limits : H1.Config) : Parser ByteArray := do
let uri takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength
if uri.size == limits.maxUriLength then
if ( peekWhen? (· != ' '.toUInt8)) |>.isSome then
fail "uri too long"
return uri.toByteArray
/--
Shared core for request-line parsing: parses `request-target SP HTTP-version CRLF`
and returns the `RequestTarget` together with the raw major/minor version numbers.
Both `parseRequestLine` and `parseRequestLineRawVersion` call this after consuming
the method token, keeping URI validation and version parsing in one place.
-/
private def parseRequestLineBody (limits : H1.Config) : Parser (RequestTarget × Nat × Nat) := do
let rawUri parseURI limits <* sp
let uri match (Std.Http.URI.Parser.parseRequestTarget <* eof).run rawUri with
| .ok res => pure res
| .error res => fail res
let versionPair parseHttpVersionNumber <* crlf
return (uri, versionPair)
/--
Parses a request line and returns a fully-typed `Request.Head`.
`request-line = method SP request-target SP HTTP-version`
-/
public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do
skipLeadingRequestEmptyLines limits
let method parseMethod <* sp
let (uri, (major, minor)) parseRequestLineBody limits
if major == 1 minor == 1 then
return method, .v11, uri, .empty
else if major == 1 minor == 0 then
return method, .v10, uri, .empty
else
fail "unsupported HTTP version"
/--
Parses a request line and returns the recognized HTTP method and version when available.
request-line = method SP request-target SP HTTP-version
-/
public def parseRequestLineRawVersion (limits : H1.Config) : Parser (Method × RequestTarget × Option Version) := do
skipLeadingRequestEmptyLines limits
let method parseMethod <* sp
let (uri, (major, minor)) parseRequestLineBody limits
return (method, uri, Version.ofNumber? major minor)
/--
Parses a single header field line.
`field-line = field-name ":" OWS field-value OWS`
-/
def parseFieldLine (limits : H1.Config) : Parser (String × String) := do
let name parseToken limits.maxHeaderNameLength
let value skipByte ':'.toUInt8 *> ows limits *> optional (takeWhileUpTo isFieldVChar limits.maxHeaderValueLength) <* ows limits
let name liftOption <| String.fromUTF8? name.toByteArray
let value liftOption <| String.fromUTF8? <| value.map (·.toByteArray) |>.getD .empty
let value := value.trimAsciiEnd.toString
return (name, value)
/--
Parses a single header field line, or returns `none` when it sees the blank line that
terminates the header section.
```
field-line = field-name ":" OWS field-value OWS CRLF
```
-/
public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do
let next peek?
if next == some '\r'.toUInt8 next == some '\n'.toUInt8 then
crlf
pure none
else
some <$> (parseFieldLine limits <* crlf)
/--
Parses a backslash-escaped character inside a quoted-string.
`quoted-pair = "\" ( HTAB / SP / VCHAR )` — strict ASCII-only (no obs-text).
-/
def parseQuotedPair : Parser UInt8 := do
skipByte '\\'.toUInt8
let b any
if quotedPairChar (Char.ofUInt8 b) then
return b
else
fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}"
/--
Parses a quoted-string value, unescaping quoted-pairs.
`quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE`
-/
partial def parseQuotedString (maxLength : Nat) : Parser String := do
skipByte '"'.toUInt8
let rec loop (buf : ByteArray) (length : Nat) : Parser ByteArray := do
let b ← any
if b == '"'.toUInt8 then
return buf
else if b == '\\'.toUInt8 then
let next any
if quotedPairChar (Char.ofUInt8 next)
then
let length := length + 1
if length > maxLength then
fail "quoted-string too long"
else
loop (buf.push next) length
else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}"
else if isQdText b then
let length := length + 1
if length > maxLength then
fail "quoted-string too long"
else
loop (buf.push b) length
else
fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}"
liftOption <| String.fromUTF8? ( loop .empty 0)
-- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] )
def parseChunkExt (limits : H1.Config) : Parser (Chunk.ExtensionName × Option Chunk.ExtensionValue) := do
ows limits *> skipByte ';'.toUInt8 *> ows limits
let name (liftOption =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtNameLength) <* ows limits
let some name := Chunk.ExtensionName.ofString? name
| fail "invalid extension name"
if ( peekWhen? (· == '='.toUInt8)) |>.isSome then
-- RFC 9112 §7.1.1: BWS is allowed around "=".
-- The `<* ows limits` after the name already consumed any trailing whitespace,
-- so these ows calls are no-ops in practice, but kept for explicit grammar correspondence.
ows limits *> skipByte '='.toUInt8 *> ows limits
let value ows limits *> (parseQuotedString limits.maxChunkExtValueLength <|> liftOption =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtValueLength))
let some value := Chunk.ExtensionValue.ofString? value
| fail "invalid extension value"
return (name, some value)
return (name, none)
/--
Parses the size and extensions of a chunk.
-/
public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) := do
let size hex
let ext manyItems (optional (attempt (parseChunkExt limits))) limits.maxChunkExtensions
crlf
return (size, ext)
/--
Result of parsing partial or complete information.
-/
public inductive TakeResult
| complete (data : ByteSlice)
| incomplete (data : ByteSlice) (remaining : Nat)
/--
Parses a single chunk in chunked transfer encoding.
-/
public def parseChunkPartial (limits : H1.Config) : Parser (Option (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue) × ByteSlice)) := do
let (size, ext) parseChunkSize limits
if size == 0 then
return none
else
let data take size
return some size, ext, data
/--
Parses fixed-size data that can be incomplete.
-/
public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it =>
if it.remainingBytes = 0 then
.error it .eof
else if it.remainingBytes < size then
.success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes))
else
.success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)]))
/--
Parses fixed-size chunk data that can be incomplete.
-/
public def parseChunkSizedData (size : Nat) : Parser TakeResult := do
match parseFixedSizeData size with
| .complete data => crlf *> return .complete data
| .incomplete data res => return .incomplete data res
/--
Returns `true` if `name` (compared case-insensitively) is a field that MUST NOT appear in HTTP/1.1
trailer sections per RFC 9112 §6.5. Forbidden fields are those required for message framing
(`content-length`, `transfer-encoding`), routing (`host`), or connection management (`connection`).
-/
private def isForbiddenTrailerField (name : String) : Bool :=
let n := name.toLower
n == "content-length" || n == "transfer-encoding" || n == "host" ||
n == "connection" || n == "expect" || n == "te" ||
n == "authorization" || n == "max-forwards" || n == "cache-control" ||
n == "content-encoding" || n == "upgrade" || n == "trailer"
/--
Parses a trailer header (used after a chunked body), rejecting forbidden field names per RFC 9112
§6.5. Fields used for message framing (`content-length`, `transfer-encoding`), routing (`host`),
or connection management (`connection`, `te`, `upgrade`) are rejected to prevent trailer injection
attacks where a downstream proxy might re-interpret them.
-/
def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := do
let result parseSingleHeader limits
if let some (name, _) := result then
if isForbiddenTrailerField name then
fail s!"forbidden trailer field: {name}"
return result
/--
Parses trailer headers after a chunked body and returns them as an array of name-value pairs.
This is exposed for callers that need the trailer values directly (e.g. clients). The
internal protocol machine uses `parseLastChunkBody` instead, which discards trailer values.
-/
public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do
let trailers manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
return trailers
/--
Returns `true` if `c` is a valid reason-phrase byte (`HTAB / SP / VCHAR`, strict ASCII-only).
-/
@[inline]
def isReasonPhraseByte (c : UInt8) : Bool :=
fieldContent (Char.ofUInt8 c)
/--
Parses a reason phrase (text after status code).
Allows only `HTAB / SP / VCHAR` bytes (strict ASCII-only).
-/
def parseReasonPhrase (limits : H1.Config) : Parser String := do
let bytes takeWhileUpTo isReasonPhraseByte limits.maxReasonPhraseLength
liftOption <| String.fromUTF8? bytes.toByteArray
/--
Parses a status-code (3 decimal digits), the following reason phrase, and the
terminating CRLF; returns a typed `Status`.
-/
def parseStatusCode (limits : H1.Config) : Parser Status := do
let d1 digit
let d2 digit
let d3 digit
let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48)
sp
let phrase parseReasonPhrase limits <* crlf
if h : IsValidReasonPhrase phrase then
if let some status := Status.ofCode (some phrase, h) code.toUInt16 then
return status
fail "invalid status code"
/--
Parses a status line and returns a fully-typed `Response.Head`.
`status-line = HTTP-version SP status-code SP [ reason-phrase ]`
Accepts only HTTP/1.1. For parsing where the version may be unrecognized and must be
mapped to an error event, use `parseStatusLineRawVersion`.
-/
public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do
let (major, minor) parseHttpVersionNumber <* sp
let status parseStatusCode limits
if major == 1 minor == 1 then
return { status, version := .v11, headers := .empty }
else if major == 1 minor == 0 then
return { status, version := .v10, headers := .empty }
else
fail "unsupported HTTP version"
/--
Parses a status line and returns the status code plus recognized HTTP version when available.
Consumes and discards the reason phrase.
status-line = HTTP-version SP status-code SP [ reason-phrase ] CRLF
-/
public def parseStatusLineRawVersion (limits : H1.Config) : Parser (Status × Option Version) := do
let (major, minor) parseHttpVersionNumber <* sp
let status parseStatusCode limits
return (status, Version.ofNumber? major minor)
/--
Parses the trailer section that follows the last chunk size line (`0\r\n`).
-/
public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do
discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
end Std.Http.Protocol.H1

View File

@@ -0,0 +1,319 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Reader
This module defines the reader state machine for parsing incoming HTTP/1.1 messages.
It tracks the parsing state including start line, headers, and body handling for
both fixed-length and chunked transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
The body-framing sub-state of the `Reader` state machine.
-/
inductive Reader.BodyState where
/--
Parse fixed-length body bytes, tracking the number of bytes remaining.
-/
| fixed (remaining : Nat)
/--
Parse the next chunk-size line in chunked transfer encoding.
-/
| chunkedSize
/--
Parse chunk data for the current chunk.
-/
| chunkedBody (ext : Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) (remaining : Nat)
/--
Parse body bytes until EOF (connection close).
-/
| closeDelimited
deriving Inhabited, Repr, BEq
/--
The state of the `Reader` state machine.
-/
inductive Reader.State (dir : Direction) : Type
/--
Initial state waiting for HTTP start line.
-/
| needStartLine : State dir
/--
State waiting for HTTP headers, tracking number of headers parsed.
-/
| needHeader : Nat State dir
/--
Unified body-reading state.
-/
| readBody : Reader.BodyState State dir
/--
Paused waiting for a `canContinue` decision, carrying the next state.
-/
| continue : State dir State dir
/--
State waiting to be able to read new data.
-/
| pending : State dir
/--
State that it completed a single request or response and can go to the next one
-/
| complete
/--
State that it has completed and cannot process more data.
-/
| closed
/--
The input is malformed.
-/
| failed (error : Error) : State dir
deriving Inhabited, Repr, BEq
/--
Manages the reading state of the HTTP parsing and processing machine.
-/
structure Reader (dir : Direction) where
/--
The current state of the machine.
-/
state : Reader.State dir := match dir with | .receiving => .needStartLine | .sending => .pending
/--
The input byte array.
-/
input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter
/--
The incoming message head.
-/
messageHead : Message.Head dir := {}
/--
Count of messages that this connection has already parsed.
-/
messageCount : Nat := 0
/--
Number of body bytes read for the current message.
-/
bodyBytesRead : Nat := 0
/--
Number of header bytes accumulated for the current message.
Counts name + value bytes plus 4 bytes per line for `: ` and `\r\n`.
-/
headerBytesRead : Nat := 0
/--
Set when no further input bytes will arrive (the remote end has closed the connection).
-/
noMoreInput : Bool := false
namespace Reader
/--
Checks if the reader is in a closed state and cannot process more messages.
-/
@[inline]
def isClosed (reader : Reader dir) : Bool :=
match reader.state with
| .closed => true
| _ => false
/--
Checks if the reader has completed parsing the current message.
-/
@[inline]
def isComplete (reader : Reader dir) : Bool :=
match reader.state with
| .complete => true
| _ => false
/--
Checks if the reader has encountered an error.
-/
@[inline]
def hasFailed (reader : Reader dir) : Bool :=
match reader.state with
| .failed _ => true
| _ => false
/--
Feeds new data into the reader's input buffer.
If the current input is exhausted, replaces it; otherwise compacts the buffer
by discarding already-parsed bytes before appending.
-/
@[inline]
def feed (data : ByteArray) (reader : Reader dir) : Reader dir :=
{ reader with input :=
if reader.input.atEnd
then data.iter
else (reader.input.array.extract reader.input.pos reader.input.array.size ++ data).iter }
/--
Replaces the reader's input iterator with a new one.
-/
@[inline]
def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir :=
{ reader with input }
/--
Updates the message head being constructed.
-/
@[inline]
def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir :=
{ reader with messageHead }
/--
Adds a header to the current message head.
-/
@[inline]
def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir :=
match dir with
| .sending | .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
/--
Closes the reader, transitioning to the closed state.
-/
@[inline]
def close (reader : Reader dir) : Reader dir :=
{ reader with state := .closed, noMoreInput := true }
/--
Marks the current message as complete and prepares for the next message.
-/
@[inline]
def markComplete (reader : Reader dir) : Reader dir :=
{ reader with
state := .complete
messageCount := reader.messageCount + 1 }
/--
Transitions the reader to a failed state with the given error.
-/
@[inline]
def fail (error : Error) (reader : Reader dir) : Reader dir :=
{ reader with state := .failed error }
/--
Resets the reader to parse a new message on the same connection.
-/
@[inline]
def reset (reader : Reader dir) : Reader dir :=
{ reader with
state := .needStartLine
bodyBytesRead := 0
headerBytesRead := 0
messageHead := {} }
/--
Checks if more input is needed to continue parsing.
-/
@[inline]
def needsMoreInput (reader : Reader dir) : Bool :=
reader.input.atEnd && !reader.noMoreInput &&
match reader.state with
| .complete | .closed | .failed _ | .«continue» _ => false
| _ => true
/--
Returns the current parse error if the reader has failed.
-/
@[inline]
def getError (reader : Reader dir) : Option Error :=
match reader.state with
| .failed err => some err
| _ => none
/--
Gets the number of bytes remaining in the input buffer.
-/
@[inline]
def remainingBytes (reader : Reader dir) : Nat :=
reader.input.array.size - reader.input.pos
/--
Advances the input iterator by n bytes.
-/
@[inline]
def advance (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with input := reader.input.forward n }
/--
Transitions to the state for reading headers.
-/
@[inline]
def startHeaders (reader : Reader dir) : Reader dir :=
{ reader with state := .needHeader 0, bodyBytesRead := 0, headerBytesRead := 0 }
/--
Adds body bytes parsed for the current message.
-/
@[inline]
def addBodyBytes (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with bodyBytesRead := reader.bodyBytesRead + n }
/--
Adds header bytes accumulated for the current message.
-/
@[inline]
def addHeaderBytes (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with headerBytesRead := reader.headerBytesRead + n }
/--
Transitions to the state for reading a fixed-length body.
-/
@[inline]
def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir :=
{ reader with state := .readBody (.fixed size) }
/--
Transitions to the state for reading chunked transfer encoding.
-/
@[inline]
def startChunkedBody (reader : Reader dir) : Reader dir :=
{ reader with state := .readBody .chunkedSize }
/--
Marks that no more input will be provided (connection closed).
-/
@[inline]
def markNoMoreInput (reader : Reader dir) : Reader dir :=
{ reader with noMoreInput := true }
/--
Checks if the connection should be kept alive for the next message.
-/
def shouldKeepAlive (reader : Reader dir) : Bool :=
reader.messageHead.shouldKeepAlive
end Reader

View File

@@ -0,0 +1,284 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Writer
This module defines the writer state machine for generating outgoing HTTP/1.1 messages.
It handles encoding headers and body content for both fixed-length and chunked
transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Internal
/--
The state of the `Writer` state machine.
-/
inductive Writer.State
/--
Initial state before any outgoing message has been prepared.
-/
| pending
/--
Waiting for the application to provide the outgoing message head via `send`.
-/
| waitingHeaders
/--
The message head has been provided; waiting for `shouldFlush` to become true before
serializing headers to output.
-/
| waitingForFlush
/--
Reserved; not currently entered by the state machine.
-/
| writingHeaders
/--
Writing the body output (either fixed-length or chunked).
-/
| writingBody (mode : Body.Length)
/--
Waiting for all buffered output to drain before transitioning to `complete`.
-/
| shuttingDown
/--
Completed writing a single message and ready to begin the next one.
-/
| complete
/--
Closed; no further data can be written.
-/
| closed
deriving Inhabited, Repr, BEq
/--
Manages the writing state of the HTTP generating and writing machine.
-/
structure Writer (dir : Direction) where
/--
Body chunks supplied by the user, accumulated before being flushed to output.
-/
userData : Array Chunk := .empty
/--
All the data produced by the writer, ready to be sent to the socket.
-/
outputData : ChunkedBuffer := .empty
/--
The state of the writer machine.
-/
state : Writer.State := match dir with | .receiving => .pending | .sending => .waitingHeaders
/--
When the user specifies the exact body size upfront, `Content-Length` framing is
used instead of chunked transfer encoding.
-/
knownSize : Option Body.Length := none
/--
The outgoing message that will be written to the output.
-/
messageHead : Message.Head dir.swap := {}
/--
Whether the user has called `send` to provide the outgoing message head.
-/
sentMessage : Bool := false
/--
Set when the user has finished sending body data, allowing fixed-size framing
to be determined upfront.
-/
userClosedBody : Bool := false
/--
When `true`, body bytes are intentionally omitted from the wire for this message
(e.g. HEAD responses), while headers/framing metadata may still describe the
hypothetical representation.
-/
omitBody : Bool := false
namespace Writer
/--
Returns `true` when no more user body data will arrive: either the user called
`closeBody`, or the writer has already transitioned to `complete` or `closed`.
Note: this does **not** mean the wire is ready to accept new bytes — a `closed`
writer cannot send anything. Use this to decide whether to flush pending body
data rather than to check writability.
-/
@[inline]
def noMoreUserData {dir} (writer : Writer dir) : Bool :=
match writer.state with
| .closed | .complete => true
| _ => writer.userClosedBody
/--
Checks if the writer is closed (cannot process more data).
-/
@[inline]
def isClosed (writer : Writer dir) : Bool :=
match writer.state with
| .closed => true
| _ => false
/--
Checks if the writer has completed processing a request.
-/
@[inline]
def isComplete (writer : Writer dir) : Bool :=
match writer.state with
| .complete => true
| _ => false
/--
Checks if the writer can accept more data from the user.
-/
@[inline]
def canAcceptData (writer : Writer dir) : Bool :=
match writer.state with
| .waitingHeaders => true
| .waitingForFlush => true
| .writingBody _ => !writer.userClosedBody
| _ => false
/--
Marks the body as closed, indicating no more user data will be added.
-/
@[inline]
def closeBody (writer : Writer dir) : Writer dir :=
{ writer with userClosedBody := true }
/--
Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked.
-/
def determineTransferMode (writer : Writer dir) : Body.Length :=
if let some mode := writer.knownSize then
mode
else if writer.userClosedBody then
let size := writer.userData.foldl (fun x y => x + y.data.size) 0
.fixed size
else
.chunked
/--
Adds user data chunks to the writer's buffer if the writer can accept data.
-/
@[inline]
def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir :=
if writer.canAcceptData then
{ writer with userData := writer.userData ++ data }
else
writer
/--
Writes accumulated user data to output using fixed-size encoding.
-/
def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat :=
if writer.userData.size = 0 then
(writer, limitSize)
else
let (chunks, pending, totalSize) := writer.userData.foldl (fun (state : Array ByteArray × Array Chunk × Nat) chunk =>
let (acc, pending, size) := state
if size >= limitSize then
(acc, pending.push chunk, size)
else
let remaining := limitSize - size
let takeSize := min chunk.data.size remaining
let dataPart := chunk.data.extract 0 takeSize
let acc := if takeSize = 0 then acc else acc.push dataPart
let size := size + takeSize
if takeSize < chunk.data.size then
let pendingChunk : Chunk := { chunk with data := chunk.data.extract takeSize chunk.data.size }
(acc, pending.push pendingChunk, size)
else
(acc, pending, size)
) (#[], #[], 0)
let outputData := writer.outputData.append (ChunkedBuffer.ofArray chunks)
let remaining := limitSize - totalSize
({ writer with userData := pending, outputData }, remaining)
/--
Writes accumulated user data to output using chunked transfer encoding.
-/
def writeChunkedBody (writer : Writer dir) : Writer dir :=
if writer.userData.size = 0 then
writer
else
let data := writer.userData
{ writer with userData := #[], outputData := data.foldl (Encode.encode .v11) writer.outputData }
/--
Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state.
-/
def writeFinalChunk (writer : Writer dir) : Writer dir :=
let writer := writer.writeChunkedBody
{ writer with
outputData := writer.outputData.write "0\r\n\r\n".toUTF8
state := .complete
}
/--
Extracts all accumulated output data and returns it with a cleared output buffer.
-/
@[inline]
def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) :=
let output := writer.outputData.toByteArray
some ({ writer with outputData := ChunkedBuffer.empty }, output)
/--
Updates the writer's state machine to a new state.
-/
@[inline]
def setState (state : Writer.State) (writer : Writer dir) : Writer dir :=
{ writer with state }
/--
Writes the message headers to the output buffer.
-/
private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir :=
{ writer with outputData := Internal.Encode.encode (v := .v11) writer.outputData messageHead }
/--
Checks if the connection should be kept alive based on the Connection header.
-/
def shouldKeepAlive (writer : Writer dir) : Bool :=
writer.messageHead.headers.get? .connection
|>.map (fun v => v.value.toLower != "close")
|>.getD true
/--
Closes the writer, transitioning to the closed state.
-/
@[inline]
def close (writer : Writer dir) : Writer dir :=
{ writer with state := .closed }
end Writer

View File

@@ -0,0 +1,188 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Async.TCP
public import Std.Sync.CancellationToken
public import Std.Sync.Semaphore
public import Std.Internal.Http.Server.Config
public import Std.Internal.Http.Server.Handler
public import Std.Internal.Http.Server.Connection
public section
/-!
# HTTP Server
This module defines a simple, asynchronous HTTP/1.1 server implementation.
It provides the `Std.Http.Server` structure, which encapsulates all server state, and functions for
starting, managing, and gracefully shutting down the server.
The server runs entirely using `Async` and uses a shared `CancellationContext` to signal shutdowns.
Each active client connection is tracked, and the server automatically resolves its shutdown
promise once all connections have closed.
-/
namespace Std.Http
open Std.Internal.IO.Async TCP
set_option linter.all true
/--
The `Server` structure holds all state required to manage the lifecycle of an HTTP server, including
connection tracking and shutdown coordination.
-/
structure Server where
/--
The context used for shutting down all connections and the server.
-/
context : Std.CancellationContext
/--
Active HTTP connections
-/
activeConnections : Std.Mutex UInt64
/--
Semaphore used to enforce the maximum number of simultaneous active connections.
`none` means no connection limit.
-/
connectionLimit : Option Std.Semaphore
/--
Indicates when the server has successfully shut down.
-/
shutdownPromise : Std.Channel Unit
/--
Configuration of the server
-/
config : Std.Http.Config
namespace Server
/--
Create a new `Server` structure with an optional configuration.
-/
def new (config : Std.Http.Config := {}) : IO Server := do
let context Std.CancellationContext.new
let activeConnections Std.Mutex.new 0
let connectionLimit
if config.maxConnections = 0 then
pure none
else
some <$> Std.Semaphore.new config.maxConnections
let shutdownPromise Std.Channel.new
return { context, activeConnections, connectionLimit, shutdownPromise, config }
/--
Triggers cancellation of all requests and the accept loop in the server. This function should be used
in conjunction with `waitShutdown` to properly coordinate the shutdown sequence.
-/
@[inline]
def shutdown (s : Server) : Async Unit :=
s.context.cancel .shutdown
/--
Waits for the server to shut down. Blocks until another task or async operation calls the `shutdown` function.
-/
@[inline]
def waitShutdown (s : Server) : Async Unit := do
Async.ofAsyncTask (( s.shutdownPromise.recv).map Except.ok)
/--
Returns a `Selector` that waits for the server to shut down.
-/
@[inline]
def waitShutdownSelector (s : Server) : Selector Unit :=
s.shutdownPromise.recvSelector
/--
Triggers cancellation of all requests and the accept loop, then waits for the server to fully shut down.
This is a convenience function combining `shutdown` and then `waitShutdown`.
-/
@[inline]
def shutdownAndWait (s : Server) : Async Unit := do
s.context.cancel .shutdown
s.waitShutdown
@[inline]
private def frameCancellation (s : Server) (releaseConnectionPermit : Bool := false)
(action : ContextAsync α) : ContextAsync α := do
s.activeConnections.atomically (modify (· + 1))
try
action
finally
if releaseConnectionPermit then
if let some limit := s.connectionLimit then
limit.release
s.activeConnections.atomically do
modify (· - 1)
if ( get) = 0 ( s.context.isCancelled) then
discard <| s.shutdownPromise.send ()
/--
Start a new HTTP/1.1 server on the given socket address. This function uses `Async` to handle tasks
and TCP connections, and returns a `Server` structure that can be used to cancel the server.
-/
def serve {σ : Type} [Handler σ]
(addr : Net.SocketAddress)
(handler : σ)
(config : Config := {}) (backlog : UInt32 := 1024) : Async Server := do
let httpServer Server.new config
let server Socket.Server.mk
server.bind addr
server.listen backlog
server.noDelay
let runServer := do
frameCancellation httpServer (action := do
while true do
let permitAcquired
if let some limit := httpServer.connectionLimit then
let permit limit.acquire
await permit
pure true
else
pure false
let result Selectable.one #[
.case (server.acceptSelector) (fun x => pure <| some x),
.case ( ContextAsync.doneSelector) (fun _ => pure none)
]
match result with
| some client =>
let extensions do
match ( EIO.toBaseIO client.getPeerName) with
| .ok addr => pure <| Extensions.empty.insert (Server.RemoteAddr.mk addr)
| .error _ => pure Extensions.empty
ContextAsync.background
(frameCancellation httpServer (releaseConnectionPermit := permitAcquired)
(action := do
serveConnection client handler config extensions))
| none =>
if permitAcquired then
if let some limit := httpServer.connectionLimit then
limit.release
break
)
background (runServer httpServer.context)
return httpServer
end Std.Http.Server

View File

@@ -0,0 +1,196 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Config
This module exposes the `Config`, a structure that describes timeout, request and headers
configuration of an HTTP Server.
-/
namespace Std.Http
set_option linter.all true
/--
Connection limits configuration with validation.
-/
structure Config where
/--
Maximum number of simultaneous active connections (default: 1024).
Setting this to `0` disables the limit entirely: the server will accept any number of
concurrent connections and no semaphore-based cap is enforced. Use with care — an
unconstrained server may exhaust file descriptors or memory under adversarial load.
-/
maxConnections : Nat := 1024
/--
Maximum number of requests per connection.
-/
maxRequests : Nat := 100
/--
Maximum number of headers allowed per request.
-/
maxHeaders : Nat := 50
/--
Maximum aggregate byte size of all header field lines in a single message
(name + value bytes plus 4 bytes per line for `: ` and `\r\n`). Default: 64 KiB.
-/
maxHeaderBytes : Nat := 65536
/--
Timeout (in milliseconds) for receiving additional data while a request is actively being
processed (e.g. reading the request body). Applies after the request headers have been parsed
and replaces the keep-alive timeout for the duration of the request.
-/
lingeringTimeout : Time.Millisecond.Offset := 10000
/--
Timeout for keep-alive connections
-/
keepAliveTimeout : { x : Time.Millisecond.Offset // 0 < x } := 12000, by decide
/--
Maximum time (in milliseconds) allowed to receive the complete request headers after the first
byte of a new request arrives. This prevents Slowloris-style attacks where a client sends bytes
at a slow rate to hold a connection slot open without completing a request. Once a request starts,
each individual read must complete within this window. Default: 5 seconds.
-/
headerTimeout : Time.Millisecond.Offset := 5000
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The maximum size that the connection can receive in a single recv call.
-/
maximumRecvSize : Nat := 8192
/--
Default buffer size for the connection
-/
defaultPayloadBytes : Nat := 8192
/--
Whether to automatically generate the `Date` header in responses.
-/
generateDate : Bool := true
/--
The `Server` header value injected into outgoing responses.
`none` suppresses the header entirely.
-/
serverName : Option Header.Value := some (.mk "LeanHTTP/1.1")
/--
Maximum length of request URI (default: 8192 bytes)
-/
maxUriLength : Nat := 8192
/--
Maximum number of bytes consumed while parsing request start-lines (default: 8192 bytes).
-/
maxStartLineLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes)
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes)
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 16)
-/
maxSpaceSequence : Nat := 16
/--
Maximum number of leading empty lines (bare CRLF) to skip before a request-line
(RFC 9112 §2.2 robustness). Default: 8.
-/
maxLeadingEmptyLines : Nat := 8
/--
Maximum length of chunk extension name (default: 256 bytes)
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes)
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum number of bytes consumed while parsing one chunk-size line with extensions (default: 8192 bytes).
-/
maxChunkLineLength : Nat := 8192
/--
Maximum allowed chunk payload size in bytes (default: 8 MiB).
-/
maxChunkSize : Nat := 8 * 1024 * 1024
/--
Maximum allowed total body size per request in bytes (default: 64 MiB).
-/
maxBodySize : Nat := 64 * 1024 * 1024
/--
Maximum length of reason phrase (default: 512 bytes)
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 20)
-/
maxTrailerHeaders : Nat := 20
/--
Maximum number of extensions on a single chunk-size line (default: 16).
-/
maxChunkExtensions : Nat := 16
namespace Config
/--
Converts to HTTP/1.1 config.
-/
def toH1Config (config : Config) : Protocol.H1.Config where
maxMessages := config.maxRequests
maxHeaders := config.maxHeaders
maxHeaderBytes := config.maxHeaderBytes
enableKeepAlive := config.enableKeepAlive
agentName := config.serverName
maxUriLength := config.maxUriLength
maxStartLineLength := config.maxStartLineLength
maxHeaderNameLength := config.maxHeaderNameLength
maxHeaderValueLength := config.maxHeaderValueLength
maxSpaceSequence := config.maxSpaceSequence
maxLeadingEmptyLines := config.maxLeadingEmptyLines
maxChunkExtensions := config.maxChunkExtensions
maxChunkExtNameLength := config.maxChunkExtNameLength
maxChunkExtValueLength := config.maxChunkExtValueLength
maxChunkLineLength := config.maxChunkLineLength
maxChunkSize := config.maxChunkSize
maxBodySize := config.maxBodySize
maxReasonPhraseLength := config.maxReasonPhraseLength
maxTrailerHeaders := config.maxTrailerHeaders
end Std.Http.Config

View File

@@ -0,0 +1,526 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.TCP
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Transport
public import Std.Internal.Http.Protocol.H1
public import Std.Internal.Http.Server.Config
public import Std.Internal.Http.Server.Handler
public section
namespace Std
namespace Http
namespace Server
open Std Internal IO Async TCP Protocol
open Time
/-!
# Connection
This module defines `Server.Connection`, a structure used to handle a single HTTP connection with
possibly multiple requests.
-/
set_option linter.all true
/--
Represents the remote address of a client connection.
-/
public structure RemoteAddr where
/--
The socket address of the remote client.
-/
addr : Net.SocketAddress
deriving TypeName
instance : ToString RemoteAddr where
toString addr := toString addr.addr.ipAddr ++ ":" ++ toString addr.addr.port
/--
A single HTTP connection.
-/
public structure Connection (α : Type) where
/--
The client connection.
-/
socket : α
/--
The processing machine for HTTP/1.1.
-/
machine : H1.Machine .receiving
/--
Extensions to attach to each request (e.g., remote address).
-/
extensions : Extensions := .empty
namespace Connection
/--
Events produced by the async select loop in `receiveWithTimeout`.
Each variant corresponds to one possible outcome of waiting for I/O.
-/
private inductive Recv (β : Type)
| bytes (x : Option ByteArray)
| responseBody (x : Option Chunk)
| bodyInterest (x : Bool)
| response (x : (Except Error (Response β)))
| timeout
| shutdown
| close
/--
The set of I/O sources to wait on during a single poll iteration.
Each `Option` field is `none` when that source is not currently active.
-/
private structure PollSources (α β : Type) where
socket : Option α
expect : Option Nat
response : Option (Std.Channel (Except Error (Response β)))
responseBody : Option β
requestBody : Option Body.Stream
timeout : Millisecond.Offset
keepAliveTimeout : Option Millisecond.Offset
headerTimeout : Option Timestamp
connectionContext : CancellationContext
/--
Waits for the next I/O event across all active sources described by `sources`.
Computes the socket recv size from `config`, then races all active selectables.
Calls `Handler.onFailure` and returns `.close` on transport errors.
-/
private def pollNextEvent
{σ β : Type} [Transport α] [Handler σ] [Body β]
(config : Config) (handler : σ) (sources : PollSources α β)
: Async (Recv β) := do
let expectedBytes := sources.expect
|>.getD config.defaultPayloadBytes
|>.min config.maximumRecvSize
|>.toUInt64
let mut selectables : Array (Selectable (Recv β)) := #[
.case sources.connectionContext.doneSelector (fun _ => do
let reason sources.connectionContext.getCancellationReason
match reason with
| some .deadline => pure .timeout
| _ => pure .shutdown)
]
if let some socket := sources.socket then
selectables := selectables.push (.case (Transport.recvSelector socket expectedBytes) (Recv.bytes · |> pure))
if let some keepAliveTimeout := sources.keepAliveTimeout then
selectables := selectables.push (.case ( Selector.sleep keepAliveTimeout) (fun _ => pure .timeout))
else if let some timeout := sources.headerTimeout then
selectables := selectables.push (.case ( Selector.sleep (timeout - ( Timestamp.now)).toMilliseconds) (fun _ => pure .timeout))
else
selectables := selectables.push (.case ( Selector.sleep sources.timeout) (fun _ => pure .timeout))
if let some responseBody := sources.responseBody then
selectables := selectables.push (.case (Body.recvSelector responseBody) (Recv.responseBody · |> pure))
if let some requestBody := sources.requestBody then
selectables := selectables.push (.case (requestBody.interestSelector) (Recv.bodyInterest · |> pure))
if let some response := sources.response then
selectables := selectables.push (.case response.recvSelector (Recv.response · |> pure))
try Selectable.one selectables
catch e =>
Handler.onFailure handler e
pure .close
/--
Handles the `Expect: 100-continue` protocol for a pending request head.
Races between the handler's decision (`Handler.onContinue`), the connection being
cancelled, and a lingering timeout. Returns the updated machine and whether
`pendingHead` should be cleared (i.e. when the request is rejected).
-/
private def handleContinueEvent
{σ : Type} [Handler σ]
(handler : σ) (machine : H1.Machine .receiving) (head : Request.Head)
(config : Config) (connectionContext : CancellationContext)
: Async (H1.Machine .receiving × Bool) := do
let continueChannel : Std.Channel Bool Std.Channel.new
let continueTask Handler.onContinue handler head |>.asTask
BaseIO.chainTask continueTask fun
| .ok v => discard <| continueChannel.send v
| .error _ => discard <| continueChannel.send false
let canContinue Selectable.one #[
.case continueChannel.recvSelector pure,
.case connectionContext.doneSelector (fun _ => pure false),
.case ( Selector.sleep config.lingeringTimeout) (fun _ => pure false)
]
let status := if canContinue then Status.«continue» else Status.expectationFailed
return (machine.canContinue status, !canContinue)
/--
Injects a `Date` header into a response head if `Config.generateDate` is set
and the response does not already include one.
-/
private def prepareResponseHead (config : Config) (head : Response.Head) : Async Response.Head := do
if config.generateDate ¬head.headers.contains Header.Name.date then
let now Std.Time.DateTime.now (tz := .UTC)
return { head with headers := head.headers.insert Header.Name.date (Header.Value.ofString! now.toRFC822String) }
else
return head
/--
Applies a successful handler response to the machine.
Optionally injects a `Date` header, records the known body size, and sends the
response head. Returns the updated machine and the body stream to drain, or `none`
when the body should be omitted (e.g., for HEAD requests).
-/
private def applyResponse
{β : Type} [Body β]
(config : Config) (machine : H1.Machine .receiving) (res : Response β)
: Async (H1.Machine .receiving × Option β) := do
let size Body.getKnownSize res.body
let machineSized :=
if let some knownSize := size then machine.setKnownSize knownSize
else machine
let responseHead prepareResponseHead config res.line
let machineWithHead := machineSized.send responseHead
if machineWithHead.writer.omitBody then
if ¬( Body.isClosed res.body) then
Body.close res.body
return (machineWithHead, none)
else
return (machineWithHead, some res.body)
/--
All mutable state carried through the connection processing loop.
Bundled into a struct so it can be passed to and returned from helper functions.
-/
private structure ConnectionState (β : Type) where
machine : H1.Machine .receiving
requestStream : Body.Stream
keepAliveTimeout : Option Millisecond.Offset
currentTimeout : Millisecond.Offset
headerTimeout : Option Timestamp
response : Std.Channel (Except Error (Response β))
respStream : Option β
requiresData : Bool
expectData : Option Nat
handlerDispatched : Bool
pendingHead : Option Request.Head
/--
Processes all H1 events from a single machine step, updating the connection state.
Handles keep-alive resets, body-size tracking, `Expect: 100-continue`, and parse errors.
Returns the updated state; stops early on `.failed`.
-/
private def processH1Events
{σ β : Type} [Handler σ] [Body β]
(handler : σ) (config : Config) (connectionContext : CancellationContext)
(events : Array (H1.Event .receiving))
(state : ConnectionState β)
: Async (ConnectionState β) := do
let mut st := state
for event in events do
match event with
| .needMoreData expect =>
st := { st with requiresData := true, expectData := expect }
| .needAnswer => pure ()
| .endHeaders head =>
-- Sets the pending head and removes the KeepAlive or Header timeout.
st := { st with
currentTimeout := config.lingeringTimeout
keepAliveTimeout := none
headerTimeout := none
pendingHead := some head
}
if let some length := head.getSize true then
-- Sets the size of the body that is going out of the connection.
Body.setKnownSize st.requestStream (some length)
| .«continue» =>
if let some head := st.pendingHead then
let (newMachine, clearPending) handleContinueEvent handler st.machine head config connectionContext
st := { st with machine := newMachine }
if clearPending then
st := { st with pendingHead := none }
| .next =>
-- Reset all per-request state for the next pipelined request.
if ¬( Body.isClosed st.requestStream) then
Body.close st.requestStream
if let some res := st.respStream then
if ¬( Body.isClosed res) then
Body.close res
let newStream Body.mkStream
st := { st with
requestStream := newStream
response := Std.Channel.new
respStream := none
keepAliveTimeout := some config.keepAliveTimeout.val
currentTimeout := config.keepAliveTimeout.val
headerTimeout := none
handlerDispatched := false
}
| .failed err =>
Handler.onFailure handler (toString err)
if ¬( Body.isClosed st.requestStream) then
Body.close st.requestStream
st := { st with requiresData := false, pendingHead := none }
break
| .closeBody =>
if ¬( Body.isClosed st.requestStream) then
Body.close st.requestStream
| .close => pure ()
return st
/--
Dispatches a pending request head to the handler if one is waiting.
Spawns the handler as an async task and routes its result back through `state.response`.
Returns the updated state with `pendingHead` cleared and `handlerDispatched` set.
-/
private def dispatchPendingRequest
{σ : Type} [Handler σ]
(handler : σ) (extensions : Extensions) (connectionContext : CancellationContext)
(state : ConnectionState (Handler.ResponseBody σ))
: Async (ConnectionState (Handler.ResponseBody σ)) := do
if let some line := state.pendingHead then
let task Handler.onRequest handler { line, body := state.requestStream, extensions } connectionContext
|>.asTask
BaseIO.chainTask task (discard state.response.send)
return { state with pendingHead := none, handlerDispatched := true }
else
return state
/--
Processes a single async I/O event and updates the connection state.
Returns the updated state and `true` if the connection should be closed immediately.
-/
private def handleRecvEvent
{σ β : Type} [Handler σ] [Body β]
(handler : σ) (config : Config)
(event : Recv β) (state : ConnectionState β)
: Async (ConnectionState β × Bool) := do
match event with
| .bytes (some bs) =>
let mut st := state
-- After the first byte after idle we switch from keep-alive timeout to per-request header timeout.
if st.keepAliveTimeout.isSome then
st := { st with
keepAliveTimeout := none
headerTimeout := some <| ( Timestamp.now) + config.headerTimeout
}
return ({ st with machine := st.machine.feed bs }, false)
| .bytes none =>
return ({ state with machine := state.machine.noMoreInput }, false)
| .responseBody (some chunk) =>
return ({ state with machine := state.machine.sendData #[chunk] }, false)
| .responseBody none =>
if let some res := state.respStream then
if ¬( Body.isClosed res) then Body.close res
return ({ state with machine := state.machine.userClosedBody, respStream := none }, false)
| .bodyInterest interested =>
if interested then
let (newMachine, pulledChunk) := state.machine.pullBody
let mut st := { state with machine := newMachine }
if let some pulled := pulledChunk then
try st.requestStream.send pulled.chunk pulled.incomplete
catch e => Handler.onFailure handler e
if pulled.final then
if ¬( Body.isClosed st.requestStream) then
Body.close st.requestStream
return (st, false)
else
return (state, false)
| .close => return (state, true)
| .timeout =>
Handler.onFailure handler "request header timeout"
return ({ state with machine := state.machine.closeWithError .requestTimeout, handlerDispatched := false }, false)
| .shutdown =>
return ({ state with machine := state.machine.closeWithError .serviceUnavailable, handlerDispatched := false }, false)
| .response (.error err) =>
Handler.onFailure handler err
return ({ state with machine := state.machine.closeWithError .internalServerError, handlerDispatched := false }, false)
| .response (.ok res) =>
if state.machine.failed then
if ¬( Body.isClosed res.body) then Body.close res.body
return ({ state with handlerDispatched := false }, false)
else
let (newMachine, newRespStream) applyResponse config state.machine res
return ({ state with machine := newMachine, handlerDispatched := false, respStream := newRespStream }, false)
/--
Computes the active `PollSources` for the current connection state.
Determines which IO sources need attention and whether to include the socket.
-/
private def buildPollSources
{α β : Type} [Transport α]
(socket : α) (connectionContext : CancellationContext) (state : ConnectionState β)
: Async (PollSources α β) := do
let requestBodyOpen
if state.machine.canPullBody then pure !( Body.isClosed state.requestStream)
else pure false
let requestBodyInterested
if state.machine.canPullBody requestBodyOpen then state.requestStream.hasInterest
else pure false
let requestBody
if state.machine.canPullBodyNow requestBodyOpen then pure (some state.requestStream)
else pure none
-- Include the socket only when there is more to do than waiting for the handler alone.
let pollSocket :=
state.requiresData !state.handlerDispatched state.respStream.isSome
state.machine.writer.sentMessage (state.machine.canPullBody requestBodyInterested)
return {
socket := if pollSocket then some socket else none
expect := state.expectData
response := if state.handlerDispatched then some state.response else none
responseBody := state.respStream
requestBody := requestBody
timeout := state.currentTimeout
keepAliveTimeout := state.keepAliveTimeout
headerTimeout := state.headerTimeout
connectionContext := connectionContext
}
/--
Runs the main request/response processing loop for a single connection.
Drives the HTTP/1.1 state machine through four phases each iteration:
send buffered output, process H1 events, dispatch pending requests, poll for I/O.
-/
private def handle
{σ : Type} [Transport α] [h : Handler σ]
(connection : Connection α)
(config : Config)
(connectionContext : CancellationContext)
(handler : σ) : Async Unit := do
let _ : Body (Handler.ResponseBody σ) := Handler.responseBodyInstance
let socket := connection.socket
let initStream Body.mkStream
let mut state : ConnectionState (Handler.ResponseBody σ) := {
machine := connection.machine
requestStream := initStream
keepAliveTimeout := some config.keepAliveTimeout.val
currentTimeout := config.keepAliveTimeout.val
headerTimeout := none
response := Std.Channel.new
respStream := none
requiresData := false
expectData := none
handlerDispatched := false
pendingHead := none
}
while ¬state.machine.halted do
-- Phase 1: advance the state machine and flush any output.
let (newMachine, step) := state.machine.step
state := { state with machine := newMachine }
if step.output.size > 0 then
try Transport.sendAll socket step.output.data
catch e =>
Handler.onFailure handler e
break
-- Phase 2: process all events emitted by this step.
state processH1Events handler config connectionContext step.events state
-- Phase 3: dispatch any newly parsed request to the handler.
state dispatchPendingRequest handler connection.extensions connectionContext state
-- Phase 4: wait for the next IO event when any source needs attention.
if state.requiresData state.handlerDispatched state.respStream.isSome state.machine.canPullBody then
state := { state with requiresData := false }
let sources buildPollSources socket connectionContext state
let event pollNextEvent config handler sources
let (newState, shouldClose) handleRecvEvent handler config event state
state := newState
if shouldClose then break
-- Clean up: close all open streams and the socket.
if ¬( Body.isClosed state.requestStream) then
Body.close state.requestStream
if let some res := state.respStream then
if ¬( Body.isClosed res) then Body.close res
Transport.close socket
end Connection
/--
Handles request/response processing for a single connection using an `Async` handler.
The library-level entry point for running a server is `Server.serve`.
This function can be used with a `TCP.Socket` or any other type that implements
`Transport` to build custom server loops.
# Example
```lean
-- Create a TCP socket server instance
let server ← Socket.Server.mk
server.bind addr
server.listen backlog
-- Enter an infinite loop to handle incoming client connections
while true do
let client ← server.accept
background (serveConnection client handler config)
```
-/
def serveConnection
{σ : Type} [Transport t] [Handler σ]
(client : t) (handler : σ)
(config : Config) (extensions : Extensions := .empty) : ContextAsync Unit := do
(Connection.mk client { config := config.toH1Config } extensions)
|>.handle config ( ContextAsync.getContext) handler
end Std.Http.Server

View File

@@ -0,0 +1,60 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Http.Data
public import Std.Internal.Async.ContextAsync
public section
namespace Std.Http.Server
open Std.Internal.IO.Async
set_option linter.all true
/--
A type class for handling HTTP server requests. Implement this class to define how the server
responds to incoming requests, failures, and `Expect: 100-continue` headers.
-/
class Handler (σ : Type) where
/--
Concrete body type produced by `onRequest`.
Defaults to `Body.Any`, but handlers may override it with any reader/writer-compatible body.
-/
ResponseBody : Type := Body.Any
/--
Body instance required by the connection loop for receiving response chunks.
-/
[responseBodyInstance : Body ResponseBody]
/--
Called for each incoming HTTP request.
-/
onRequest (self : σ) (request : Request Body.Stream) : ContextAsync (Response ResponseBody)
/--
Called when an I/O or transport error occurs while processing a request (e.g. broken socket,
handler exception). This is a **notification only**: the connection will close regardless of
the handler's response. Use this for logging and metrics. The default implementation does nothing.
-/
onFailure (self : σ) (error : IO.Error) : Async Unit :=
pure ()
/--
Called when a request includes an `Expect: 100-continue` header. Return `true` to send a
`100 Continue` response and accept the body. If `false` is returned the server sends
`417 Expectation Failed`, disables keep-alive, and closes the request body reader.
This function is guarded by `Config.lingeringTimeout` and may be cancelled on server shutdown.
The default implementation always returns `true`.
-/
onContinue (self : σ) (request : Request.Head) : Async Bool :=
pure true
end Std.Http.Server

View File

@@ -0,0 +1,249 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Transport
This module exposes a `Transport` type class that is used to represent different transport mechanisms
that can be used with an HTTP connection.
-/
namespace Std.Http
open Std Internal IO Async TCP
set_option linter.all true
/--
Generic HTTP interface that abstracts over different transport mechanisms.
-/
class Transport (α : Type) where
/--
Receive data from the client connection, up to the expected size.
Returns None if the connection is closed or no data is available.
-/
recv : α UInt64 Async (Option ByteArray)
/--
Send all data through the client connection.
-/
sendAll : α Array ByteArray Async Unit
/--
Get a selector for receiving data asynchronously.
-/
recvSelector : α UInt64 Selector (Option ByteArray)
/--
Close the transport connection.
The default implementation is a no-op; override this for transports that require explicit teardown.
For `Socket.Client`, the runtime closes the file descriptor when the object is finalized.
-/
close : α IO Unit := fun _ => pure ()
instance : Transport Socket.Client where
recv client expect := client.recv? expect
sendAll client data := client.sendAll data
recvSelector client expect := client.recvSelector expect
open Internal.IO.Async in
/--
Shared state for a bidirectional mock connection.
-/
private structure MockLink.SharedState where
/--
Client to server direction.
-/
clientToServer : Std.CloseableChannel ByteArray
/--
Server to client direction.
-/
serverToClient : Std.CloseableChannel ByteArray
/--
Mock client endpoint for testing.
-/
structure Mock.Client where
private shared : MockLink.SharedState
/--
Mock server endpoint for testing.
-/
structure Mock.Server where
private shared : MockLink.SharedState
namespace Mock
/--
Creates a mock server and client that are connected to each other and share the
same underlying state, enabling bidirectional communication.
-/
def new : BaseIO (Mock.Client × Mock.Server) := do
let first Std.CloseableChannel.new
let second Std.CloseableChannel.new
return (first, second, first, second)
/--
Receives data from a channel, joining all available data up to the expected size. First does a
blocking recv, then greedily consumes available data with tryRecv until `expect` bytes are reached.
-/
def recvJoined (recvChan : Std.CloseableChannel ByteArray) (expect : Option UInt64) : Async (Option ByteArray) := do
match await ( recvChan.recv) with
| none => return none
| some first =>
let mut result := first
repeat
if let some expect := expect then
if result.size.toUInt64 expect then break
match recvChan.tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Sends a single ByteArray through a channel.
-/
def send (sendChan : Std.CloseableChannel ByteArray) (data : ByteArray) : Async Unit := do
Async.ofAsyncTask (( sendChan.send data) |>.map (Except.mapError (IO.userError toString)))
/--
Sends ByteArrays through a channel.
-/
def sendAll (sendChan : Std.CloseableChannel ByteArray) (data : Array ByteArray) : Async Unit := do
for chunk in data do
send sendChan chunk
/--
Creates a selector for receiving from a channel.
-/
def recvSelector (recvChan : Std.CloseableChannel ByteArray) : Selector (Option ByteArray) :=
recvChan.recvSelector
end Mock
namespace Mock.Client
/--
Gets the receive channel for a client (server to client direction).
-/
def getRecvChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
client.shared.serverToClient
/--
Gets the send channel for a client (client to server direction).
-/
def getSendChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
client.shared.clientToServer
/--
Sends a single ByteArray.
-/
def send (client : Mock.Client) (data : ByteArray) : Async Unit :=
Mock.send (getSendChan client) data
/--
Receives data, joining all available chunks.
-/
def recv? (client : Mock.Client) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
Mock.recvJoined (getRecvChan client) expect
/--
Tries to receive data without blocking, joining all immediately available chunks.
Returns `none` if no data is available.
-/
def tryRecv? (client : Mock.Client) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
match (getRecvChan client).tryRecv with
| none => return none
| some first =>
let mut result := first
repeat
match (getRecvChan client).tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Closes the mock server and client.
-/
def close (client : Mock.Client) : IO Unit := do
if !( client.shared.clientToServer.isClosed) then client.shared.clientToServer.close
if !( client.shared.serverToClient.isClosed) then client.shared.serverToClient.close
end Mock.Client
namespace Mock.Server
/--
Gets the receive channel for a server (client to server direction).
-/
def getRecvChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
server.shared.clientToServer
/--
Gets the send channel for a server (server to client direction).
-/
def getSendChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
server.shared.serverToClient
/--
Sends a single ByteArray.
-/
def send (server : Mock.Server) (data : ByteArray) : Async Unit :=
Mock.send (getSendChan server) data
/--
Receives data, joining all available chunks.
-/
def recv? (server : Mock.Server) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
Mock.recvJoined (getRecvChan server) expect
/--
Tries to receive data without blocking, joining all immediately available chunks. Returns `none` if no
data is available.
-/
def tryRecv? (server : Mock.Server) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
match (getRecvChan server).tryRecv with
| none => return none
| some first =>
let mut result := first
repeat
match (getRecvChan server).tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Closes the mock server and client.
-/
def close (server : Mock.Server) : IO Unit := do
if !( server.shared.clientToServer.isClosed) then server.shared.clientToServer.close
if !( server.shared.serverToClient.isClosed) then server.shared.serverToClient.close
end Mock.Server
instance : Transport Mock.Client where
recv client expect := Mock.recvJoined (Mock.Client.getRecvChan client) (some expect)
sendAll client data := Mock.sendAll (Mock.Client.getSendChan client) data
recvSelector client _ := Mock.recvSelector (Mock.Client.getRecvChan client)
close client := client.close
instance : Transport Mock.Server where
recv server expect := Mock.recvJoined (Mock.Server.getRecvChan server) (some expect)
sendAll server data := Mock.sendAll (Mock.Server.getSendChan server) data
recvSelector server _ := Mock.recvSelector (Mock.Server.getRecvChan server)
close server := server.close
end Std.Http

View File

@@ -44,8 +44,15 @@ protected def Parser.run (p : Parser α) (arr : ByteArray) : Except String α :=
Parse a single byte equal to `b`, fails if different.
-/
@[inline]
def pbyte (b : UInt8) : Parser UInt8 := attempt do
if ( any) = b then pure b else fail s!"expected: '{b}'"
def pbyte (b : UInt8) : Parser UInt8 := fun it =>
if h : it.hasNext then
let got := it.curr' h
if got = b then
.success (it.next' h) got
else
.error it (.other s!"expected: '{b}'")
else
.error it .eof
/--
Skip a single byte equal to `b`, fails if different.
@@ -57,16 +64,29 @@ def skipByte (b : UInt8) : Parser Unit :=
/--
Skip a sequence of bytes equal to the given `ByteArray`.
-/
def skipBytes (arr : ByteArray) : Parser Unit := do
for b in arr do
skipByte b
def skipBytes (arr : ByteArray) : Parser Unit := fun it =>
let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator :=
if h : idx < arr.size then
if hnext : it.hasNext then
let got := it.curr' hnext
let want := arr[idx]
if got = want then
go (idx + 1) (it.next' hnext)
else
.error it (.other s!"expected byte {want}, got {got}")
else
.error it .eof
else
.success it ()
go 0 it
/--
Parse a string by matching its UTF-8 bytes, returns the string on success.
-/
@[inline]
def pstring (s : String) : Parser String := do
skipBytes s.toUTF8
let utf8 := s.toUTF8
skipBytes utf8
return s
/--
@@ -193,19 +213,47 @@ def take (n : Nat) : Parser ByteSlice := fun it =>
else
.success (it.forward n) (it.array[it.idx...(it.idx+n)])
/--
Scans while `pred` is satisfied. Returns `(count, iterator, hitEof)`.
-/
private partial def scanWhile (pred : UInt8 Bool) (count : Nat) (iter : ByteArray.Iterator) :
Nat × ByteArray.Iterator × Bool :=
if h : iter.hasNext then
if pred (iter.curr' h) then
scanWhile pred (count + 1) (iter.next' h)
else
(count, iter, false)
else
(count, iter, true)
/--
Scans while `pred` is satisfied, bounded by `limit`.
Returns `(count, iterator, hitEof)`.
-/
private partial def scanWhileUpTo (pred : UInt8 Bool) (limit : Nat) (count : Nat)
(iter : ByteArray.Iterator) : Nat × ByteArray.Iterator × Bool :=
if count limit then
(count, iter, false)
else if h : iter.hasNext then
if pred (iter.curr' h) then
scanWhileUpTo pred limit (count + 1) (iter.next' h)
else
(count, iter, false)
else
(count, iter, true)
/--
Parses while a predicate is satisfied.
Fails with `.eof` if input ends while the predicate still holds.
-/
@[inline]
partial def takeWhile (pred : UInt8 Bool) : Parser ByteSlice :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
let (length, newIt) := findEnd 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
let (length, newIt, hitEof) := scanWhile pred 0 it
if hitEof then
.error newIt .eof
else
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses until a predicate is satisfied (exclusive).
@@ -216,16 +264,16 @@ def takeUntil (pred : UInt8 → Bool) : Parser ByteSlice :=
/--
Skips while a predicate is satisfied.
Fails with `.eof` if input ends while the predicate still holds.
-/
@[inline]
partial def skipWhile (pred : UInt8 Bool) : Parser Unit :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
if ¬iter.hasNext then iter
else if pred iter.curr then findEnd (count + 1) iter.next
else iter
.success (findEnd 0 it) ()
let (_, newIt, hitEof) := scanWhile pred 0 it
if hitEof then
.error newIt .eof
else
.success newIt ()
/--
Skips until a predicate is satisfied.
@@ -236,34 +284,31 @@ def skipUntil (pred : UInt8 → Bool) : Parser Unit :=
/--
Parses while a predicate is satisfied, up to a given limit.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
partial def takeWhileUpTo (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if count limit then (count, iter)
else if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
let (length, newIt) := findEnd 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
if hitEof then
.error newIt .eof
else
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses while a predicate is satisfied, up to a given limit, requiring at least one byte.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
def takeWhileUpTo1 (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if count limit then (count, iter)
else if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
let (length, newIt) := findEnd 0 it
if length = 0 then
.error it (if newIt.atEnd then .eof else .other "expected at least one char")
if hitEof then
.error newIt .eof
else if length = 0 then
.error it (.other "expected at least one char")
else
.success newIt (it.array[it.idx...(it.idx + length)])
@@ -274,19 +319,42 @@ Parses until a predicate is satisfied (exclusive), up to a given limit.
def takeUntilUpTo (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
takeWhileUpTo (fun b => ¬pred b) limit
/--
Parses while a predicate is satisfied, consuming at most `limit` bytes.
Unlike `takeWhileUpTo`, succeeds even if input ends before the predicate stops holding.
-/
@[inline]
def takeWhileAtMost (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses while a predicate is satisfied, consuming at most `limit` bytes, requiring at least one.
Unlike `takeWhileUpTo1`, succeeds even if input ends before the predicate stops holding.
-/
@[inline]
def takeWhile1AtMost (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
if length = 0 then
.error it (.other "expected at least one char")
else
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Skips while a predicate is satisfied, up to a given limit.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
partial def skipWhileUpTo (pred : UInt8 Bool) (limit : Nat) : Parser Unit :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
if count limit then iter
else if ¬iter.hasNext then iter
else if pred iter.curr then findEnd (count + 1) iter.next
else iter
let (_, newIt, hitEof) := scanWhileUpTo pred limit 0 it
.success (findEnd 0 it) ()
if hitEof then
.error newIt .eof
else
.success newIt ()
/--
Skips until a predicate is satisfied, up to a given limit.

View File

@@ -11,6 +11,7 @@ public import Std.Sync.Channel
public import Std.Sync.Mutex
public import Std.Sync.RecursiveMutex
public import Std.Sync.Barrier
public import Std.Sync.Semaphore
public import Std.Sync.SharedMutex
public import Std.Sync.Notify
public import Std.Sync.Broadcast

View File

@@ -0,0 +1,96 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Lean FRO Contributors
-/
module
prelude
public import Init.Data.Queue
public import Init.System.Promise
public import Std.Sync.Mutex
public section
namespace Std
private structure SemaphoreState where
permits : Nat
waiters : Std.Queue (IO.Promise Unit) :=
deriving Nonempty
/--
Counting semaphore.
`Semaphore.acquire` returns a promise that is resolved once a permit is available.
If a permit is currently available, the returned promise is already resolved.
`Semaphore.release` either resolves one waiting promise or increments the available permits.
-/
structure Semaphore where private mk ::
private lock : Mutex SemaphoreState
/--
Creates a resolved promise.
-/
private def mkResolvedPromise [Nonempty α] (a : α) : BaseIO (IO.Promise α) := do
let promise IO.Promise.new
promise.resolve a
return promise
/--
Creates a new semaphore with `permits` initially available permits.
-/
def Semaphore.new (permits : Nat) : BaseIO Semaphore := do
return { lock := Mutex.new { permits } }
/--
Requests one permit.
Returns a promise that resolves once the permit is acquired.
-/
def Semaphore.acquire (sem : Semaphore) : BaseIO (IO.Promise Unit) := do
sem.lock.atomically do
let st get
if st.permits > 0 then
set { st with permits := st.permits - 1 }
mkResolvedPromise ()
else
let promise IO.Promise.new
set { st with waiters := st.waiters.enqueue promise }
return promise
/--
Tries to acquire a permit without blocking. Returns `true` on success.
-/
def Semaphore.tryAcquire (sem : Semaphore) : BaseIO Bool := do
sem.lock.atomically do
let st get
if st.permits > 0 then
set { st with permits := st.permits - 1 }
return true
else
return false
/--
Releases one permit and resolves one waiting acquirer, if any.
-/
def Semaphore.release (sem : Semaphore) : BaseIO Unit := do
let waiter? sem.lock.atomically do
let st get
match st.waiters.dequeue? with
| some (waiter, waiters) =>
set { st with waiters }
return some waiter
| none =>
set { st with permits := st.permits + 1 }
return none
if let some waiter := waiter? then
waiter.resolve ()
/--
Returns the number of currently available permits.
-/
def Semaphore.availablePermits (sem : Semaphore) : BaseIO Nat :=
sem.lock.atomically do
return ( get).permits
end Std

View File

@@ -0,0 +1,807 @@
import Std.Internal.Http.Data.Body
open Std.Internal.IO Async
open Std.Http
open Std.Http.Body
/-! ## Stream tests -/
-- Test send and recv on stream
def channelSendRecv : Async Unit := do
let stream Body.mkStream
let chunk := Chunk.ofByteArray "hello".toUTF8
let sendTask async (t := AsyncTask) <| stream.send chunk
let result stream.recv
assert! result.isSome
assert! result.get!.data == "hello".toUTF8
await sendTask
#eval channelSendRecv.block
-- Test tryRecv on empty stream returns none
def channelTryRecvEmpty : Async Unit := do
let stream Body.mkStream
let result stream.tryRecv
assert! result.isNone
#eval channelTryRecvEmpty.block
-- Test tryRecv consumes a waiting producer
def channelTryRecvWithPendingSend : Async Unit := do
let stream Body.mkStream
let sendTask async (t := AsyncTask) <| stream.send (Chunk.ofByteArray "data".toUTF8)
let mut result := none
let mut fuel := 100
while result.isNone && fuel > 0 do
result stream.tryRecv
if result.isNone then
let _ Selectable.one #[
.case ( Selector.sleep 1) pure
]
fuel := fuel - 1
assert! result.isSome
assert! result.get!.data == "data".toUTF8
await sendTask
#eval channelTryRecvWithPendingSend.block
-- Test close sets closed flag
def channelClose : Async Unit := do
let stream Body.mkStream
assert! !( stream.isClosed)
stream.close
assert! ( stream.isClosed)
#eval channelClose.block
-- Test recv on closed stream returns none
def channelRecvAfterClose : Async Unit := do
let stream Body.mkStream
stream.close
let result stream.recv
assert! result.isNone
#eval channelRecvAfterClose.block
-- Test Body.stream runs producer concurrently and transfers chunks
def bodyStreamSends : Async Unit := do
let incoming Body.stream fun outgoing => do
outgoing.send (Chunk.ofByteArray "x".toUTF8)
let first incoming.recv
assert! first.isSome
assert! first.get!.data == "x".toUTF8
let done incoming.recv
assert! done.isNone
#eval bodyStreamSends.block
-- Test Body.stream closes channel when generator throws
def bodyStreamThrowCloses : Async Unit := do
let incoming Body.stream fun _ => do
throw (.userError "boom")
let result incoming.recv
assert! result.isNone
#eval bodyStreamThrowCloses.block
-- Test for-in iteration collects chunks until close
def channelForIn : Async Unit := do
let stream Body.mkStream
let producer async (t := AsyncTask) <| do
stream.send (Chunk.ofByteArray "a".toUTF8)
stream.send (Chunk.ofByteArray "b".toUTF8)
stream.close
let mut acc : ByteArray := .empty
for chunk in stream do
acc := acc ++ chunk.data
assert! acc == "ab".toUTF8
await producer
#eval channelForIn.block
-- Test chunk extensions are preserved
def channelExtensions : Async Unit := do
let stream Body.mkStream
let chunk := { data := "hello".toUTF8, extensions := #[(.mk "key", some (Chunk.ExtensionValue.ofString! "value"))] : Chunk }
let sendTask async (t := AsyncTask) <| stream.send chunk
let result stream.recv
assert! result.isSome
assert! result.get!.extensions.size == 1
assert! result.get!.extensions[0]! == (Chunk.ExtensionName.mk "key", some <| .ofString! "value")
await sendTask
#eval channelExtensions.block
-- Test incomplete sends are collapsed before delivery
def channelCollapseIncompleteChunks : Async Unit := do
let stream Body.mkStream
let first : Chunk := { data := "aaaaaaaaaa".toUTF8, extensions := #[(.mk "part", some <| .ofString! "first")] }
let second : Chunk := { data := "bbbbbbbbbb".toUTF8, extensions := #[(.mk "part", some <| .ofString! "second")] }
let last : Chunk := { data := "cccccccccccccccccccc".toUTF8, extensions := #[(.mk "part", some <| .ofString! "last")] }
stream.send first (incomplete := true)
stream.send second (incomplete := true)
let noChunkYet stream.tryRecv
assert! noChunkYet.isNone
let sendFinal async (t := AsyncTask) <| stream.send last
let result stream.recv
assert! result.isSome
let merged := result.get!
assert! merged.data == "aaaaaaaaaabbbbbbbbbbcccccccccccccccccccc".toUTF8
assert! merged.data.size == 40
assert! merged.extensions == #[(.mk "part", some <| .ofString! "first")]
await sendFinal
#eval channelCollapseIncompleteChunks.block
-- Test known size metadata
def channelKnownSize : Async Unit := do
let stream Body.mkStream
stream.setKnownSize (some (.fixed 100))
let size stream.getKnownSize
assert! size == some (.fixed 100)
#eval channelKnownSize.block
-- Test known size decreases when a chunk is consumed
def channelKnownSizeDecreases : Async Unit := do
let stream Body.mkStream
stream.setKnownSize (some (.fixed 5))
let sendTask async (t := AsyncTask) <| stream.send (Chunk.ofByteArray "hello".toUTF8)
let _ stream.recv
await sendTask
let size stream.getKnownSize
assert! size == some (.fixed 0)
#eval channelKnownSizeDecreases.block
-- Test only one blocked producer is allowed
def channelSingleProducerRule : Async Unit := do
let stream Body.mkStream
let send1 async (t := AsyncTask) <| stream.send (Chunk.ofByteArray "one".toUTF8)
-- Yield so `send1` can occupy the single pending-producer slot.
let _ Selectable.one #[
.case ( Selector.sleep 5) pure
]
let send2Failed
try
stream.send (Chunk.ofByteArray "two".toUTF8)
pure false
catch _ =>
pure true
assert! send2Failed
let first stream.recv
assert! first.isSome
assert! first.get!.data == "one".toUTF8
await send1
#eval channelSingleProducerRule.block
-- Test only one blocked consumer is allowed
def channelSingleConsumerRule : Async Unit := do
let stream Body.mkStream
let recv1 async (t := AsyncTask) <| stream.recv
let hasInterest Selectable.one #[
.case stream.interestSelector pure
]
assert! hasInterest
let recv2Failed
try
let _ stream.recv
pure false
catch _ =>
pure true
assert! recv2Failed
let sendTask async (t := AsyncTask) <| stream.send (Chunk.ofByteArray "ok".toUTF8)
let r1 await recv1
assert! r1.isSome
assert! r1.get!.data == "ok".toUTF8
await sendTask
#eval channelSingleConsumerRule.block
-- Test hasInterest reflects blocked receiver state
def channelHasInterest : Async Unit := do
let stream Body.mkStream
assert! !( stream.hasInterest)
let recvTask async (t := AsyncTask) <| stream.recv
let hasInterest Selectable.one #[
.case stream.interestSelector pure
]
assert! hasInterest
assert! ( stream.hasInterest)
let sendTask async (t := AsyncTask) <| stream.send (Chunk.ofByteArray "x".toUTF8)
let _ await recvTask
await sendTask
assert! !( stream.hasInterest)
#eval channelHasInterest.block
-- Test interestSelector resolves false when stream closes first
def channelInterestSelectorClose : Async Unit := do
let stream Body.mkStream
let waitInterest async (t := AsyncTask) <|
Selectable.one #[
.case stream.interestSelector pure
]
stream.close
let interested await waitInterest
assert! interested == false
#eval channelInterestSelectorClose.block
-- Test incomplete sends are buffered and merged into one chunk on the final send
def channelIncompleteChunks : Async Unit := do
let stream Body.mkStream
let sendTask async (t := AsyncTask) <| do
stream.send (Chunk.ofByteArray "hel".toUTF8) (incomplete := true)
stream.send (Chunk.ofByteArray "lo".toUTF8)
let result stream.recv
assert! result.isSome
assert! result.get!.data == "hello".toUTF8
await sendTask
#eval channelIncompleteChunks.block
-- Test sending to a closed stream raises an error
def channelSendAfterClose : Async Unit := do
let stream Body.mkStream
stream.close
let failed
try
stream.send (Chunk.ofByteArray "test".toUTF8)
pure false
catch _ =>
pure true
assert! failed
#eval channelSendAfterClose.block
-- Test Body.stream runs producer and returns the stream handle
def channelStreamHelper : Async Unit := do
let stream Body.stream fun s => do
s.send (Chunk.ofByteArray "hello".toUTF8)
let result stream.recv
assert! result.isSome
assert! result.get!.data == "hello".toUTF8
let eof stream.recv
assert! eof.isNone
#eval channelStreamHelper.block
-- Test Body.fromBytes creates a Stream with correct known-size metadata
def channelFromBytesHelper : Async Unit := do
let stream Body.fromBytes "world".toUTF8
let size stream.getKnownSize
assert! size == some (.fixed 5)
let result stream.recv
assert! result.isSome
assert! result.get!.data == "world".toUTF8
#eval channelFromBytesHelper.block
-- Test Body.empty creates an already-closed Stream
def channelEmptyHelper : Async Unit := do
let stream Body.empty
assert! ( stream.isClosed)
let result stream.recv
assert! result.isNone
#eval channelEmptyHelper.block
-- Test Stream.readAll concatenates all chunks
def channelReadAll : Async Unit := do
let stream Body.mkStream
let sendTask async (t := AsyncTask) <| do
stream.send (Chunk.ofByteArray "foo".toUTF8)
stream.send (Chunk.ofByteArray "bar".toUTF8)
stream.close
let result : ByteArray stream.readAll
assert! result == "foobar".toUTF8
await sendTask
#eval channelReadAll.block
-- Test Stream.readAll enforces a maximum size limit
def channelReadAllMaxSize : Async Unit := do
let stream Body.mkStream
let sendTask async (t := AsyncTask) <| do
stream.send (Chunk.ofByteArray "abcdefgh".toUTF8)
stream.close
let failed
try
let _ : ByteArray stream.readAll (maximumSize := some 4)
pure false
catch _ =>
pure true
assert! failed
await sendTask
#eval channelReadAllMaxSize.block
-- Test Stream.getKnownSize reflects the value set via setKnownSize
def channelKnownSizeRoundtrip : Async Unit := do
let stream Body.mkStream
stream.setKnownSize (some (.fixed 42))
let size stream.getKnownSize
assert! size == some (.fixed 42)
#eval channelKnownSizeRoundtrip.block
/-! ## Full tests -/
-- Test Full.recv returns content once then EOF
def fullRecvConsumesOnce : Async Unit := do
let full Body.Full.ofString "hello"
let first full.recv
let second full.recv
assert! first.isSome
assert! first.get!.data == "hello".toUTF8
assert! second.isNone
#eval fullRecvConsumesOnce.block
-- Test Full known-size metadata tracks consumption
def fullKnownSizeLifecycle : Async Unit := do
let data := ByteArray.mk #[0x01, 0x02, 0x03, 0x04]
let full Body.Full.ofByteArray data
assert! ( full.getKnownSize) == some (.fixed 4)
let chunk full.recv
assert! chunk.isSome
assert! chunk.get!.data == data
assert! ( full.getKnownSize) == some (.fixed 0)
#eval fullKnownSizeLifecycle.block
-- Test Full.close discards remaining content
def fullClose : Async Unit := do
let full Body.Full.ofString "bye"
assert! !( full.isClosed)
full.close
assert! ( full.isClosed)
assert! ( full.recv).isNone
#eval fullClose.block
-- Test Full from an empty ByteArray returns none on the first recv
def fullEmptyBytes : Async Unit := do
let full Body.Full.ofByteArray ByteArray.empty
let result full.recv
assert! result.isNone
#eval fullEmptyBytes.block
-- Test Full.recvSelector resolves immediately with the stored chunk
def fullRecvSelectorResolves : Async Unit := do
let full Body.Full.ofString "world"
let result Selectable.one #[
.case full.recvSelector pure
]
assert! result.isSome
assert! result.get!.data == "world".toUTF8
#eval fullRecvSelectorResolves.block
-- Test Full.getKnownSize returns 0 after close
def fullKnownSizeAfterClose : Async Unit := do
let full Body.Full.ofString "data"
assert! ( full.getKnownSize) == some (.fixed 4)
full.close
assert! ( full.getKnownSize) == some (.fixed 0)
#eval fullKnownSizeAfterClose.block
-- Test Full.tryRecv succeeds once and returns none thereafter
def fullTryRecvIdempotent : Async Unit := do
let full Body.Full.ofString "once"
let first full.recv
let second full.recv
assert! first.isSome
assert! first.get!.data == "once".toUTF8
assert! second.isNone
#eval fullTryRecvIdempotent.block
/-! ## Empty tests -/
-- Test Empty.recv always returns none
def emptyBodyRecv : Async Unit := do
let body : Body.Empty := {}
let result body.recv
assert! result.isNone
#eval emptyBodyRecv.block
-- Test Empty.isClosed is always true
def emptyBodyIsClosed : Async Unit := do
let body : Body.Empty := {}
assert! ( body.isClosed)
#eval emptyBodyIsClosed.block
-- Test Empty.close is a no-op: still closed and recv still returns none
def emptyBodyClose : Async Unit := do
let body : Body.Empty := {}
body.close
assert! ( body.isClosed)
let result body.recv
assert! result.isNone
#eval emptyBodyClose.block
-- Test Empty.recvSelector resolves immediately with none
def emptyBodyRecvSelector : Async Unit := do
let body : Body.Empty := {}
let result Selectable.one #[
.case body.recvSelector pure
]
assert! result.isNone
#eval emptyBodyRecvSelector.block
/-! ## Any tests -/
-- Test Any wrapping a Full body forwards recv correctly
def anyFromFull : Async Unit := do
let full Body.Full.ofString "hello"
let any : Body.Any := full
let result any.recv
assert! result.isSome
assert! result.get!.data == "hello".toUTF8
#eval anyFromFull.block
-- Test Any wrapping an Empty body returns none and reports closed
def anyFromEmpty : Async Unit := do
let empty : Body.Empty := {}
let any : Body.Any := empty
let result any.recv
assert! result.isNone
assert! ( any.isClosed)
#eval anyFromEmpty.block
-- Test Any wrapping an Incoming channel receives chunks
def anyFromChannel : Async Unit := do
let outgoing Body.mkStream
let any := Body.Any.ofBody outgoing
let sendTask async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "data".toUTF8)
let result any.recv
assert! result.isSome
assert! result.get!.data == "data".toUTF8
await sendTask
#eval anyFromChannel.block
-- Test Any.close closes the underlying body
def anyCloseForwards : Async Unit := do
let full Body.Full.ofString "test"
let any : Body.Any := full
any.close
assert! ( any.isClosed)
let result any.recv
assert! result.isNone
#eval anyCloseForwards.block
-- Test Any.recvSelector resolves immediately for a Full body
def anyRecvSelectorFromFull : Async Unit := do
let full Body.Full.ofString "sel"
let any : Body.Any := full
let result Selectable.one #[
.case any.recvSelector pure
]
assert! result.isSome
assert! result.get!.data == "sel".toUTF8
#eval anyRecvSelectorFromFull.block
/-! ## Request.Builder body tests -/
private def recvBuiltBody (body : Body.Full) : Async (Option Chunk) :=
body.recv
-- Test Request.Builder.text sets correct headers
def requestBuilderText : Async Unit := do
let req Request.post (.originForm! "/api")
|>.text "Hello, World!"
assert! req.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8")
assert! req.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody req.body
assert! body.isSome
assert! body.get!.data == "Hello, World!".toUTF8
#eval requestBuilderText.block
-- Test Request.Builder.json sets correct headers
def requestBuilderJson : Async Unit := do
let req Request.post (.originForm! "/api")
|>.json "{\"key\": \"value\"}"
assert! req.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json")
assert! req.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody req.body
assert! body.isSome
assert! body.get!.data == "{\"key\": \"value\"}".toUTF8
#eval requestBuilderJson.block
-- Test Request.Builder.fromBytes sets body
def requestBuilderFromBytes : Async Unit := do
let data := ByteArray.mk #[0x01, 0x02, 0x03]
let req Request.post (.originForm! "/api")
|>.fromBytes data
assert! req.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody req.body
assert! body.isSome
assert! body.get!.data == data
#eval requestBuilderFromBytes.block
-- Test Request.Builder.noBody creates empty body
def requestBuilderNoBody : Async Unit := do
let req Request.get (.originForm! "/api")
|>.empty
assert! req.body == {}
#eval requestBuilderNoBody.block
-- Test Request.Builder.bytes sets application/octet-stream content type
def requestBuilderBytes : Async Unit := do
let data := ByteArray.mk #[0xde, 0xad, 0xbe, 0xef]
let req Request.post (.originForm! "/api")
|>.bytes data
assert! req.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/octet-stream")
let body recvBuiltBody req.body
assert! body.isSome
assert! body.get!.data == data
#eval requestBuilderBytes.block
-- Test Request.Builder.html sets text/html content type
def requestBuilderHtml : Async Unit := do
let req Request.post (.originForm! "/api")
|>.html "<h1>Hello</h1>"
assert! req.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/html; charset=utf-8")
let body recvBuiltBody req.body
assert! body.isSome
assert! body.get!.data == "<h1>Hello</h1>".toUTF8
#eval requestBuilderHtml.block
-- Test Request.Builder.stream creates a streaming body
def requestBuilderStream : Async Unit := do
let req Request.post (.originForm! "/api")
|>.stream fun s => do
s.send (Chunk.ofByteArray "streamed".toUTF8)
let result req.body.recv
assert! result.isSome
assert! result.get!.data == "streamed".toUTF8
#eval requestBuilderStream.block
-- Test Request.Builder.noBody body is always closed and returns none
def requestBuilderNoBodyAlwaysClosed : Async Unit := do
let req Request.get (.originForm! "/api")
|>.empty
assert! ( req.body.isClosed)
let result req.body.recv
assert! result.isNone
#eval requestBuilderNoBodyAlwaysClosed.block
/-! ## Response.Builder body tests -/
-- Test Response.Builder.text sets correct headers
def responseBuilderText : Async Unit := do
let res Response.ok
|>.text "Hello, World!"
assert! res.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8")
assert! res.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody res.body
assert! body.isSome
assert! body.get!.data == "Hello, World!".toUTF8
#eval responseBuilderText.block
-- Test Response.Builder.json sets correct headers
def responseBuilderJson : Async Unit := do
let res Response.ok
|>.json "{\"status\": \"ok\"}"
assert! res.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json")
assert! res.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody res.body
assert! body.isSome
assert! body.get!.data == "{\"status\": \"ok\"}".toUTF8
#eval responseBuilderJson.block
-- Test Response.Builder.fromBytes sets body
def responseBuilderFromBytes : Async Unit := do
let data := ByteArray.mk #[0xaa, 0xbb]
let res Response.ok
|>.fromBytes data
assert! res.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody res.body
assert! body.isSome
assert! body.get!.data == data
#eval responseBuilderFromBytes.block
-- Test Response.Builder.noBody creates empty body
def responseBuilderNoBody : Async Unit := do
let res Response.ok
|>.empty
assert! res.body == {}
#eval responseBuilderNoBody.block
-- Test Response.Builder.bytes sets application/octet-stream content type
def responseBuilderBytes : Async Unit := do
let data := ByteArray.mk #[0xca, 0xfe]
let res Response.ok
|>.bytes data
assert! res.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/octet-stream")
let body recvBuiltBody res.body
assert! body.isSome
assert! body.get!.data == data
#eval responseBuilderBytes.block
-- Test Response.Builder.html sets text/html content type
def responseBuilderHtml : Async Unit := do
let res Response.ok
|>.html "<p>OK</p>"
assert! res.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/html; charset=utf-8")
let body recvBuiltBody res.body
assert! body.isSome
assert! body.get!.data == "<p>OK</p>".toUTF8
#eval responseBuilderHtml.block
-- Test Response.Builder.stream creates a streaming body
def responseBuilderStream : Async Unit := do
let res Response.ok
|>.stream fun s => do
s.send (Chunk.ofByteArray "streamed".toUTF8)
let result res.body.recv
assert! result.isSome
assert! result.get!.data == "streamed".toUTF8
#eval responseBuilderStream.block
-- Test Response.Builder.noBody body is always closed and returns none
def responseBuilderNoBodyAlwaysClosed : Async Unit := do
let res Response.ok
|>.empty
assert! ( res.body.isClosed)
let result res.body.recv
assert! result.isNone
#eval responseBuilderNoBodyAlwaysClosed.block

View File

@@ -0,0 +1,325 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def sendRawAndClose
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
client.getSendChan.close
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def responseText (response : ByteArray) : String :=
String.fromUTF8! response
def assertStatusPrefix (name : String) (response : ByteArray) (prefix_ : String) : IO Unit := do
let text := responseText response
unless text.startsWith prefix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected status prefix {prefix_.quote}\nGot:\n{text.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := responseText response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nMissing {needle.quote}\nGot:\n{text.quote}"
def assertNotContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := responseText response
if text.contains needle then
throw <| IO.userError s!"Test '{name}' failed:\nUnexpected {needle.quote}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := responseText response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then
0
else
(s.splitOn needle).length - 1
def assertStatusCount (name : String) (response : ByteArray) (expected : Nat) : IO Unit := do
let text := responseText response
let got := countOccurrences text "HTTP/1.1 "
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} responses but saw {got}\n{text.quote}"
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def notImplemented : String :=
"HTTP/1.1 501 Not Implemented\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def echoBodyHandler : TestHandler := fun req => do
let body : String req.body.readAll
Response.ok |>.text body
-- Content-Length body is read exactly.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /echo HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "CL body accepted" response "HTTP/1.1 200"
assertContains "CL body echoed" response "hello"
-- Chunked body baseline.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Chunked baseline" response "HTTP/1.1 200"
assertContains "Chunked baseline body" response "hello"
-- Uppercase and leading-zero chunk-size are accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n000A\x0d\n0123456789\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Chunk-size uppercase+leading-zero" response "HTTP/1.1 200"
assertContains "Chunk-size uppercase+leading-zero body" response "0123456789"
-- Chunk extensions with token and quoted value are accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=value;quoted=\"ok\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Chunk extensions accepted" response "HTTP/1.1 200"
assertContains "Chunk extensions body" response "hello"
-- h11-inspired: invalid chunk-size token is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /bad HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nG\x0d\nabc\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Invalid chunk-size token" response bad400
-- h11-inspired: reject bad bytes where chunk terminator must be CRLF.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /smuggle HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nxxx__1a\x0d\n".toUTF8
let response sendRawAndClose client server raw echoBodyHandler
assertExact "Chunk terminator bytes validated" response bad400
-- Missing terminal 0-chunk is rejected once EOF arrives.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /incomplete HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n".toUTF8
let response sendRawAndClose client server raw echoBodyHandler
assertExact "Missing terminal zero chunk" response bad400
-- TE+CL mixed framing is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /mix HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "TE+CL rejected" response bad400
-- Duplicate chunked coding is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /dup HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Duplicate chunked coding" response bad400
-- Duplicate Transfer-Encoding lines with unsupported coding are rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /dup-lines HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nTransfer-Encoding: gzip\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Duplicate TE headers with gzip" response bad400
-- Transfer-coding chains that end in chunked are accepted as chunked framing.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /gzip HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: gzip, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "gzip, chunked accepted as chunked framing" response "HTTP/1.1 200"
assertContains "gzip, chunked body delivered" response "hello"
-- Unsupported transfer codings without chunked framing are rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /identity HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: identity\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "identity transfer-coding rejected with 400" response bad400
-- Malformed Transfer-Encoding token list is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /bad-te-list HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: ,chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Malformed Transfer-Encoding token list" response bad400
-- Strict chunk-extension name/value limits.
#eval show IO _ from do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxChunkExtNameLength := 4
maxChunkExtValueLength := 4
}
let (clientA, serverA) Mock.new
let okName := "POST /ok-name HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;name=1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA okName echoBodyHandler (config := config)
assertStatusPrefix "Chunk ext name at limit" responseA "HTTP/1.1 200"
let (clientB, serverB) Mock.new
let badName := "POST /bad-name HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;toolong=1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB badName echoBodyHandler (config := config)
assertExact "Chunk ext name too long" responseB bad400
let (clientC, serverC) Mock.new
let okValue := "POST /ok-value HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;name=abcd\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC okValue echoBodyHandler (config := config)
assertStatusPrefix "Chunk ext value at limit" responseC "HTTP/1.1 200"
let (clientD, serverD) Mock.new
let badQuoted := "POST /bad-value HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;name=\"abcde\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD badQuoted echoBodyHandler (config := config)
assertExact "Quoted chunk ext value too long" responseD bad400
let (clientE, serverE) Mock.new
let badToken := "POST /bad-token HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;na@e=1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE badToken echoBodyHandler (config := config)
assertExact "Invalid chunk ext token char" responseE bad400
let (clientF, serverF) Mock.new
let mixed := "POST /mixed-ext HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2;toolong=3\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF mixed echoBodyHandler (config := config)
assertExact "Mixed valid/invalid chunk extensions" responseF bad400
-- maxChunkExtensions boundary is enforced.
#eval show IO _ from do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxChunkExtensions := 2
}
let (clientA, serverA) Mock.new
let okRaw := "POST /ext-count HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let okResponse sendRaw clientA serverA okRaw echoBodyHandler (config := config)
assertStatusPrefix "maxChunkExtensions at limit" okResponse "HTTP/1.1 200"
let (clientB, serverB) Mock.new
let badRaw := "POST /ext-count-overflow HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2;c=3\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let badResponse sendRaw clientB serverB badRaw echoBodyHandler (config := config)
assertExact "maxChunkExtensions overflow" badResponse bad400
-- Content-Length with leading zeros is accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /leading-zeros HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 005\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Content-Length leading zeros" response "HTTP/1.1 200"
assertContains "Content-Length leading zeros body" response "hello"
-- Very large Content-Length is rejected with 413.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /too-large HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 99999999999999999999\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Huge Content-Length" response "HTTP/1.1 413"
-- Duplicate Content-Length (same and different values) are rejected.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let same := "POST /dup-cl-same HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let responseA sendRaw clientA serverA same echoBodyHandler
assertExact "Duplicate Content-Length same" responseA bad400
let (clientB, serverB) Mock.new
let diff := "POST /dup-cl-diff HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 3\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nabc".toUTF8
let responseB sendRaw clientB serverB diff echoBodyHandler
assertExact "Duplicate Content-Length different" responseB bad400
-- Chunk-size line trailing whitespace is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /bad-space HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5 \x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Chunk-size trailing space" response bad400
-- Transfer-Encoding trailing OWS is currently accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /te-ows HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked \x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Transfer-Encoding trailing OWS" response "HTTP/1.1 200"
assertContains "Transfer-Encoding trailing OWS body" response "hello"
-- h11-inspired: early invalid-byte detection before CRLF.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let responseA sendRawAndClose clientA serverA (ByteArray.mk #[0x00]) echoBodyHandler
assertExact "Early invalid NUL" responseA bad400
let (clientB, serverB) Mock.new
let responseB sendRawAndClose clientB serverB (ByteArray.mk #[0x20]) echoBodyHandler
assertExact "Early invalid SP" responseB bad400
let (clientC, serverC) Mock.new
let responseC sendRawAndClose clientC serverC (ByteArray.mk #[0x16, 0x03, 0x01, 0x00, 0xa5]) echoBodyHandler
assertExact "Early invalid TLS prefix" responseC bad400
-- h11-inspired: reject garbage after request-line version token.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1 xxxxxx\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Garbage after request-line" response bad400
-- Extra bytes beyond Content-Length become the next pipelined request.
#eval show IO _ from do
let (client, server) Mock.new
let raw :=
("POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello" ++
"GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRaw client server raw (fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text s!"{toString req.line.uri}:{String.fromUTF8! body}")
assertStatusCount "Pipelined parse after exact CL" response 2
assertContains "Pipelined first response" response "/:hello"
assertContains "Pipelined second response" response "/second:"
assertNotContains "No parse confusion" response "/second:hello"

View File

@@ -0,0 +1,189 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {status}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := String.fromUTF8! response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def echoHandler : TestHandler :=
fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
-- Chunk extension name length limits.
-- Default maxChunkExtNameLength = 256.
#eval show IO _ from do
let name256 := String.ofList (List.replicate 256 'a')
let name257 := String.ofList (List.replicate 257 'a')
-- Name at exactly the limit → accepted.
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;{name256}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler
assertStatus "Ext name at 256 → accepted" responseA "HTTP/1.1 200"
-- Name one byte over the limit → rejected.
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;{name257}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler
assertExact "Ext name at 257 → rejected" responseB bad400
-- Chunk extension value length limits (unquoted token value).
-- Default maxChunkExtValueLength = 256.
#eval show IO _ from do
let val256 := String.ofList (List.replicate 256 'v')
let val257 := String.ofList (List.replicate 257 'v')
-- Token value at exactly the limit → accepted.
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext={val256}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler
assertStatus "Ext token value at 256 → accepted" responseA "HTTP/1.1 200"
-- Token value one byte over the limit → rejected.
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext={val257}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler
assertExact "Ext token value at 257 → rejected" responseB bad400
-- Chunk extension value length limits (quoted string value).
-- The limit applies to the unquoted content length.
#eval show IO _ from do
let val256 := String.ofList (List.replicate 256 'v')
let val257 := String.ofList (List.replicate 257 'v')
-- Quoted value at exactly the limit → accepted.
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=\"{val256}\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler
assertStatus "Ext quoted value at 256 → accepted" responseA "HTTP/1.1 200"
-- Quoted value one byte over the limit → rejected.
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=\"{val257}\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler
assertExact "Ext quoted value at 257 → rejected" responseB bad400
-- Chunk extension count limits.
-- Default maxChunkExtensions = 16.
#eval show IO _ from do
-- Build extension lists: ";ext0;ext1;...;extN"
let exts16 := (List.range 16).foldl (fun acc i => acc ++ s!";e{i}") ""
let exts17 := (List.range 17).foldl (fun acc i => acc ++ s!";e{i}") ""
-- Exactly 16 extensions → accepted.
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5{exts16}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler
assertStatus "16 extensions → accepted" responseA "HTTP/1.1 200"
-- 17 extensions → rejected.
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5{exts17}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler
assertExact "17 extensions → rejected" responseB bad400
-- maxChunkExtensions config override.
#eval show IO _ from do
let cfg1 : Config := { lingeringTimeout := 1000, maxChunkExtensions := 1, generateDate := false }
-- 1 extension with limit=1 → accepted.
let (clientA, serverA) Mock.new
let rawA := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler (config := cfg1)
assertStatus "1 ext with limit=1 → accepted" responseA "HTTP/1.1 200"
-- 2 extensions with limit=1 → rejected.
let (clientB, serverB) Mock.new
let rawB := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext1;ext2\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler (config := cfg1)
assertExact "2 exts with limit=1 → rejected" responseB bad400
-- 0 extensions with limit=1 → accepted (no extensions is always fine).
let (clientC, serverC) Mock.new
let rawC := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rawC echoHandler (config := cfg1)
assertStatus "0 exts with limit=1 → accepted" responseC "HTTP/1.1 200"
-- Extension with no value (name-only extension token).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;novalue\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "Ext with no value → accepted" response "HTTP/1.1 200"
-- Body is still correctly delivered.
let text := String.fromUTF8! response
unless text.contains "hello" do
throw <| IO.userError "Body not delivered with name-only extension"
-- Extensions on the terminal (last) chunk are accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0;final-ext=done\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "Extension on last chunk → accepted" response "HTTP/1.1 200"
-- Extension with name and quoted-string value.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=\"hello world\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "Ext with quoted-string value → accepted" response "HTTP/1.1 200"
-- Extension with non-token character in name → rejected.
#eval show IO _ from do
let (client, server) Mock.new
-- '@' is not a valid token character
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;bad@name\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertExact "Ext with non-token name char → rejected" response bad400
-- Multiple extensions with name=value pairs.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2;c=3\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "Multiple name=value extensions → accepted" response "HTTP/1.1 200"

View File

@@ -0,0 +1,620 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
structure TestCase where
/-- Descriptive name for the test -/
name : String
/-- The HTTP request to send -/
request : Request (Array Chunk)
/-- Handler function to process the request -/
handler : Request Body.Stream ContextAsync (Response Body.Any)
/-- Expected response string -/
expected : String
/-- Whether to use chunked encoding -/
chunked : Bool := false
deriving Inhabited
def toByteArray (req : Request (Array Chunk)) (chunked := false) : IO ByteArray := Async.block do
let mut data := Internal.Encode.encode (v := .v11) .empty req.line
let toByteArray (chunkData : Internal.ChunkedBuffer) (part : Chunk) := Internal.Encode.encode .v11 chunkData part
for part in req.body do data := toByteArray data part
if chunked then data := toByteArray data (Chunk.mk .empty .empty)
return data.toByteArray
/-- Send multiple requests through a mock connection and return the response data. -/
def sendRequests (client : Mock.Client) (server : Mock.Server) (reqs : Array (Request (Array Chunk)))
(onRequest : Request Body.Stream ContextAsync (Response Body.Any))
(chunked : Bool := false) : IO ByteArray := Async.block do
let mut data := .empty
for req in reqs do data := data ++ ( toByteArray req chunked)
client.send data
Std.Http.Server.serveConnection server onRequest { lingeringTimeout := 3000, generateDate := false }
|>.run
let res client.recv?
pure <| res.getD .empty
/-- Run a single test case, comparing actual response against expected response. -/
def runTest (name : String) (client : Mock.Client) (server : Mock.Server) (req : Request (Array Chunk))
(handler : Request Body.Stream ContextAsync (Response Body.Any)) (expected : String) (chunked : Bool := false) :
IO Unit := do
let response sendRequests client server #[req] handler chunked
let responseData := String.fromUTF8! response
if responseData != expected then
throw <| IO.userError s!
"Test '{name}' failed:\n\
Expected:\n{expected.quote}\n\
Got:\n{responseData.quote}"
def runTestCase (tc : TestCase) : IO Unit := do
let (client, server) Mock.new
Async.block <| runTest tc.name client server tc.request tc.handler tc.expected tc.chunked
-- Request Predicates
/-- Check if request is a basic GET requests to the specified URI and host. -/
def isBasicGetRequest (req : Request Body.Stream) (uri : String) (host : String) : Bool :=
req.line.method == .get
req.line.version == .v11
toString req.line.uri = uri
req.line.headers.hasEntry (.mk "host") (.ofString! host)
/-- Check if request has a specific Content-Length header. -/
def hasContentLength (req : Request Body.Stream) (length : String) : Bool :=
req.line.headers.hasEntry (.mk "content-length") (.ofString! length)
/-- Check if request uses chunked transfer encoding. -/
def isChunkedRequest (req : Request Body.Stream) : Bool :=
if let some te := req.line.headers.get? (.mk "transfer-encoding") then
match Header.TransferEncoding.parse te with
| some te => te.isChunked
| none => false
else
false
/-- Check if request has a specific header with a specific value. -/
def hasHeader (req : Request Body.Stream) (name : String) (value : String) : Bool :=
if let some name := Header.Name.ofString? name then
req.line.headers.hasEntry name (.ofString! value)
else
false
/-- Check if request method matches the expected method. -/
def hasMethod (req : Request Body.Stream) (method : Method) : Bool :=
req.line.method == method
/-- Check if request URI matches the expected URI string. -/
def hasUri (req : Request Body.Stream) (uri : String) : Bool :=
toString req.line.uri = uri
-- Tests
#eval runTestCase {
name := "GET with Content-Length"
request := Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.header! "Content-Length" "7"
|>.body #[.mk "survive".toUTF8 #[]]
handler := fun req => do
if isBasicGetRequest req "/" "example.com" hasContentLength req "7"
then Response.ok |>.text "ok"
else Response.badRequest |>.text "closed"
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 2\x0d\n\x0d\nok"
}
#eval runTestCase {
name := "Simple GET request"
request := Request.new
|>.method .get
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .get hasUri req "/api/users"
then Response.ok |>.text "users list"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 10\x0d\n\x0d\nusers list"
}
#eval runTestCase {
name := "POST with body"
request := Request.new
|>.method .post
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/json"
|>.header! "Content-Length" "16"
|>.header! "Connection" "close"
|>.body #[.mk "{\"name\":\"Alice\"}".toUTF8 #[]]
handler := fun req => do
if hasMethod req .post hasHeader req "Content-Type" "application/json"
then Response.new |>.status .created |>.text "Created"
else Response.badRequest |>.text ""
expected := "HTTP/1.1 201 Created\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 7\x0d\n\x0d\nCreated"
}
#eval runTestCase {
name := "DELETE request"
request := Request.new
|>.method .delete
|>.uri! "/api/users/123"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .delete hasUri req "/api/users/123"
then Response.new |>.status .noContent |>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 204 No Content\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\n\x0d\n"
}
#eval runTestCase {
name := "HEAD request"
request := Request.new
|>.method .head
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .head
then Response.ok |>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "OPTIONS request"
request := Request.new
|>.method .options
|>.uri! "*"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .options
then Response.new
|>.status .ok
|>.header! "Allow" "GET, POST, PUT, DELETE, OPTIONS"
|>.text ""
else Response.badRequest |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nAllow: GET, POST, PUT, DELETE, OPTIONS\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Request with multiple headers"
request := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Accept" "application/json"
|>.header! "User-Agent" "TestClient/1.0"
|>.header! "Authorization" "Bearer token123"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasHeader req "Authorization" "Bearer token123" hasHeader req "Accept" "application/json"
then Response.ok |>.text "authenticated"
else Response.new |>.status .unauthorized |>.text "unauthorized"
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 13\x0d\n\x0d\nauthenticated"
}
#eval runTestCase {
name := "Request with query parameters"
request := Request.new
|>.method .get
|>.uri! "/api/search?q=test&limit=10"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasUri req "/api/search?q=test&limit=10"
then Response.ok |>.text "search results"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 14\x0d\n\x0d\nsearch results"
}
#eval runTestCase {
name := "POST with empty body"
request := Request.new
|>.method .post
|>.uri! "/api/trigger"
|>.header! "Host" "api.example.com"
|>.header! "Content-Length" "0"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .post hasContentLength req "0"
then Response.new |>.status .accepted |>.text "triggered"
else Response.badRequest |>.text ""
expected := "HTTP/1.1 202 Accepted\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 9\x0d\n\x0d\ntriggered"
}
#eval runTestCase {
name := "Large response body"
request := Request.new
|>.method .get
|>.uri! "/api/large"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let largeBody := String.ofList (List.replicate 1000 'X')
Response.ok |>.text largeBody
expected := s!"HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 1000\x0d\n\x0d\n{String.ofList (List.replicate 1000 'X')}"
}
#eval runTestCase {
name := "Custom status code"
request := Request.new
|>.method .get
|>.uri! "/api/teapot"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
Response.new
|>.status .imATeapot
|>.text "I'm a teapot"
expected := "HTTP/1.1 418 I'm a teapot\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 12\x0d\n\x0d\nI'm a teapot"
}
#eval runTestCase {
name := "Request with special characters in URI"
request := Request.new
|>.method .get
|>.uri! "/api/users/%C3%A9"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasUri req "/api/users/%C3%A9"
then Response.ok |>.text "found"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 5\x0d\n\x0d\nfound"
}
#eval runTestCase {
name := "Response with custom headers"
request := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.header! "Cache-Control" "no-cache"
|>.body #[]
handler := fun _ => do
Response.new
|>.status .ok
|>.header! "Cache-Control" "no-cache"
|>.header! "X-Custom-Header" "custom-value"
|>.text "data"
expected := "HTTP/1.1 200 OK\x0d\nCache-Control: no-cache\x0d\nX-Custom-Header: custom-value\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 4\x0d\n\x0d\ndata"
}
#eval runTestCase {
name := "Request with Content-Type and body"
request := Request.new
|>.method .post
|>.uri! "/api/xml"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/xml"
|>.header! "Content-Length" "17"
|>.header! "Connection" "close"
|>.body #[.mk "<data>test</data>".toUTF8 #[]]
handler := fun req => do
if hasHeader req "Content-Type" "application/xml"
then Response.ok |>.text "processed xml"
else Response.new |>.status .unsupportedMediaType |>.text "unsupported"
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 13\x0d\n\x0d\nprocessed xml"
}
-- Limits
#eval
let bigString := String.fromUTF8! (ByteArray.mk (Array.ofFn (n := 257) (fun _ => 65)))
runTestCase {
name := "Huge String request"
request := Request.new
|>.method .head
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! bigString "a"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .head
then Response.ok
|>.header (.ofString! bigString) (.ofString! "ata")
|>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Request line too long"
request :=
Request.new
|>.method .get
|>.uri (.originForm (.mk #[URI.EncodedString.encode <| String.ofList (List.replicate 2000 'a')] true) none)
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
Response.ok |>.text (toString (toString req.line.uri).length)
expected := "HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Header long"
request :=
Request.new
|>.method .get
|>.uri (.originForm (.mk #[URI.EncodedString.encode <| String.ofList (List.replicate 200 'a')] true) none)
|>.header! "Host" (String.ofList (List.replicate 8230 'a'))
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
Response.ok |>.text (toString (toString req.line.uri).length)
expected := "HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Too many headers"
request := Id.run do
let mut req := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
for i in [0:101] do
req := req |>.header! s!"X-Header-{i}" s!"value{i}"
return req |>.body #[]
handler := fun _ => do
Response.ok |>.text "success"
expected := "HTTP/1.1 431 Request Header Fields Too Large\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Header value too long"
request := Request.new
|>.method .get
|>.uri! "/api/test"
|>.header! "Host" "api.example.com"
|>.header! "X-Long-Value" (String.ofList (List.replicate 9000 'x'))
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
Response.ok |>.text "ok"
expected := "HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Total headers size too large"
request := Id.run do
let mut req := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
for i in [0:200] do
req := req |>.header! s!"X-Header-{i}" (String.ofList (List.replicate 200 'a'))
return req |>.body #[]
handler := fun _ => do
Response.ok |>.text "success"
expected := "HTTP/1.1 431 Request Header Fields Too Large\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
-- Tests
#eval runTestCase {
name := "Streaming response with fixed Content-Length"
request := Request.new
|>.method .get
|>.uri! "/stream"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let stream Body.mkStream
background do
for i in [0:3] do
let sleep Sleep.mk 5
sleep.wait
stream.send <| Chunk.ofByteArray s!"chunk{i}\n".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "21")
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 21\x0d\n\x0d\nchunk0\nchunk1\nchunk2\n"
}
#eval runTestCase {
name := "Streaming response with setKnownSize fixed"
request := Request.new
|>.method .get
|>.uri! "/stream-sized"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let stream Body.mkStream
stream.setKnownSize (some (.fixed 15))
background do
for i in [0:3] do
stream.send <| Chunk.ofByteArray s!"data{i}".toUTF8
stream.close
return Response.ok
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 15\x0d\n\x0d\ndata0data1data2"
}
#eval runTestCase {
name := "Streaming response with chunked encoding"
request := Request.new
|>.method .get
|>.uri! "/stream-chunked"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let stream Body.mkStream
background do
stream.send <| Chunk.ofByteArray "hello".toUTF8
stream.send <| Chunk.ofByteArray "world".toUTF8
stream.close
return Response.ok
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n5\x0d\nworld\x0d\n0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Chunked request with streaming response"
request := Request.new
|>.method .post
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Transfer-Encoding" "chunked"
|>.header! "Connection" "close"
|>.body #[
.mk "data1".toUTF8 #[],
.mk "data2".toUTF8 #[]
]
handler := fun req => do
let stream Body.mkStream
if isChunkedRequest req
then
background do
for i in [0:2] do
stream.send <| Chunk.ofByteArray s!"response{i}".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "18")
|>.body stream
else
stream.send <| Chunk.ofByteArray "not chunked".toUTF8
stream.close
return Response.badRequest
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 18\x0d\n\x0d\nresponse0response1"
chunked := true
}
#eval runTestCase {
name := "Fixed-length response overflow closes connection"
request := Request.new
|>.method .get
|>.uri! "/overflow"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let stream Body.mkStream
background do
stream.send <| Chunk.ofByteArray "abcdef".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "3")
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 3\x0d\n\x0d\n"
}

View File

@@ -141,13 +141,13 @@ info: "X-Custom-Header: value\x0d\n"
info: "GET /path HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/path" } : Request.Head)
#eval encodeStr ({ method := .get, version := .v11, uri := .parse! "/path" } : Request.Head)
/--
info: "POST /submit HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .post, version := .v11, uri := "/submit" } : Request.Head)
#eval encodeStr ({ method := .post, version := .v11, uri := .parse! "/submit" } : Request.Head)
/--
info: "PUT /resource HTTP/2.0\x0d\nContent-Type: application/json\x0d\n\x0d\n"
@@ -156,7 +156,7 @@ info: "PUT /resource HTTP/2.0\x0d\nContent-Type: application/json\x0d\n\x0d\n"
#eval encodeStr ({
method := .put
version := .v20
uri := "/resource"
uri := .parse! "/resource"
headers := Headers.empty.insert! "content-type" "application/json"
} : Request.Head)
@@ -222,61 +222,61 @@ info: "a\x0d\n0123456789\x0d\n"
info: "GET /index.html HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.get "/index.html" |>.body ()).line
#eval encodeStr (Request.get (.parse! "/index.html") |>.body ()).line
/--
info: "POST /api/data HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.post "/api/data" |>.body ()).line
#eval encodeStr (Request.post (.parse! "/api/data") |>.body ()).line
/--
info: "PUT /resource HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.put "/resource" |>.body ()).line
#eval encodeStr (Request.put (.parse! "/resource") |>.body ()).line
/--
info: "DELETE /item HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.delete "/item" |>.body ()).line
#eval encodeStr (Request.delete (.parse! "/item") |>.body ()).line
/--
info: "PATCH /update HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.patch "/update" |>.body ()).line
#eval encodeStr (Request.patch (.parse! "/update") |>.body ()).line
/--
info: "HEAD /check HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.head "/check" |>.body ()).line
#eval encodeStr (Request.head (.parse! "/check") |>.body ()).line
/--
info: "OPTIONS * HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.options "*" |>.body ()).line
#eval encodeStr (Request.options (.parse! "*") |>.body ()).line
/--
info: "CONNECT proxy:8080 HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.connect "proxy:8080" |>.body ()).line
#eval encodeStr (Request.connect (.parse! "proxy:8080") |>.body ()).line
/--
info: "TRACE /debug HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.trace "/debug" |>.body ()).line
#eval encodeStr (Request.trace (.parse! "/debug") |>.body ()).line
/--
info: "POST /v2 HTTP/2.0\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.new |>.method .post |>.uri "/v2" |>.version .v20 |>.body ()).line
#eval encodeStr (Request.new |>.method .post |>.uri (.parse! "/v2") |>.version .v20 |>.body ()).line
/-! ## Response builder -/
@@ -348,21 +348,21 @@ info: "HTTP/1.1 418 I'm a teapot\x0d\n\x0d\n"
/-! ## Edge cases: Status encoding -/
-- Status.other 0: minimum possible value
-- Status.other 104: minimum valid non-known code (100103 are all named)
/--
info: "999 Unknown"
-/
#guard_msgs in
#eval encodeStr (Status.other 999, "Unknown", by decide, by decide, by decide)
-- Status.other that overlaps with a named status (100 = Continue)
-- Status.other 209: non-named code between two known blocks
/--
info: "888 Unknown"
-/
#guard_msgs in
#eval encodeStr (Status.other 888, "Unknown", by decide, by decide, by decide)
-- Status.other max UInt16
-- Status.other 999: maximum valid code
/--
info: "999 Unknown"
-/
@@ -578,35 +578,14 @@ info: true
info: "GET /search?q=hello&lang=en HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/search?q=hello&lang=en" } : Request.Head)
-- URI with fragment
/--
info: "GET /page#section HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/page#section" } : Request.Head)
#eval encodeStr ({ method := .get, version := .v11, uri := .parse! "/search?q=hello&lang=en" } : Request.Head)
-- URI with percent-encoded characters
/--
info: "GET /path%20with%20spaces HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/path%20with%20spaces" } : Request.Head)
-- URI with special characters (brackets, colons)
/--
info: "GET /api/v1/users/[id]:action HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/api/v1/users/[id]:action" } : Request.Head)
-- Empty URI
/--
info: "GET HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "" } : Request.Head)
#eval encodeStr ({ method := .get, version := .v11, uri :=.parse! "/path%20with%20spaces" } : Request.Head)
/-! ## Edge cases: Response with unusual statuses -/

View File

@@ -0,0 +1,203 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
structure RejectContinueHandler where
onRequestCalls : IO.Ref Nat
instance : Std.Http.Server.Handler RejectContinueHandler where
onRequest self _request := do
self.onRequestCalls.modify (· + 1)
Response.ok |>.text "request-ran"
onContinue _self _head :=
pure false
structure AcceptContinueHandler where
onRequestCalls : IO.Ref Nat
instance : Std.Http.Server.Handler AcceptContinueHandler where
onRequest self request := do
self.onRequestCalls.modify (· + 1)
let body : String request.body.readAll
Response.ok |>.text s!"accepted:{body}"
onContinue _self _head :=
pure true
def sendRaw {σ : Type} [Std.Http.Server.Handler σ]
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : σ)
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to contain: {needle.quote}\nGot:\n{text.quote}"
def assertNotContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
if text.contains needle then
throw <| IO.userError s!"Test '{name}' failed:\nDid not expect: {needle.quote}\nGot:\n{text.quote}"
def assertCallCount (name : String) (calls : IO.Ref Nat) (expected : Nat) : IO Unit := do
let got calls.get
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected onRequest calls {expected}, got {got}"
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then
0
else
(s.splitOn needle).length - 1
def assertOccurrenceCount (name : String) (response : ByteArray) (needle : String) (expected : Nat) : IO Unit := do
let text := String.fromUTF8! response
let got := countOccurrences text needle
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} occurrences of {needle.quote}, got {got}\n{text.quote}"
-- Rejecting Expect returns 417 and does not execute user handler.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : RejectContinueHandler := { onRequestCalls := calls }
let raw := "POST /upload HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: 100-continue\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "Expect rejected status" response "HTTP/1.1 417 Expectation Failed"
assertNotContains "Expect rejected no 100 Continue" response "100 Continue"
assertNotContains "Expect rejected no handler body" response "request-ran"
assertOccurrenceCount "Expect rejected single response" response "HTTP/1.1 " 1
assertCallCount "Expect rejected call count" calls 0
-- Rejected Expect closes the exchange and blocks pipelined follow-up requests.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : RejectContinueHandler := { onRequestCalls := calls }
let req1 := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: 100-continue\x0d\nContent-Length: 5\x0d\n\x0d\nhello"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 handler
assertContains "Expect rejected still 417" response "HTTP/1.1 417 Expectation Failed"
assertNotContains "Expect rejected no second request" response "/second"
assertCallCount "Expect rejected pipelined call count" calls 0
-- Accepted Expect emits 100 Continue followed by final 200.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : AcceptContinueHandler := { onRequestCalls := calls }
let raw := "POST /ok HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: 100-continue\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "Expect accepted has 100" response "HTTP/1.1 100 Continue"
assertContains "Expect accepted has final 200" response "HTTP/1.1 200 OK"
assertContains "Expect accepted body" response "accepted:hello"
assertOccurrenceCount "Expect accepted exactly one 100" response "HTTP/1.1 100 Continue" 1
assertOccurrenceCount "Expect accepted exactly one 200" response "HTTP/1.1 200 OK" 1
assertCallCount "Expect accepted call count" calls 1
-- Non-100 Expect token proceeds as a normal request.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : RejectContinueHandler := { onRequestCalls := calls }
let raw := "POST /odd HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: something-else\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "Non-100 Expect status" response "HTTP/1.1 200 OK"
assertContains "Non-100 Expect handler ran" response "request-ran"
assertNotContains "Non-100 Expect no 100 Continue" response "100 Continue"
assertCallCount "Non-100 Expect call count" calls 1
-- h11-inspired: Expect token matching is case-insensitive.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : AcceptContinueHandler := { onRequestCalls := calls }
let raw := "POST /case HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: 100-CONTINUE\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "Case-insensitive Expect has 100" response "HTTP/1.1 100 Continue"
assertContains "Case-insensitive Expect final 200" response "HTTP/1.1 200 OK"
assertCallCount "Case-insensitive Expect call count" calls 1
-- Normal requests without Expect do not emit 100 Continue.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : AcceptContinueHandler := { onRequestCalls := calls }
let raw := "POST /no-expect HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "No Expect status" response "HTTP/1.1 200 OK"
assertContains "No Expect body" response "accepted:hello"
assertNotContains "No Expect no interim 100" response "100 Continue"
assertOccurrenceCount "No Expect exactly one 200" response "HTTP/1.1 200 OK" 1
assertCallCount "No Expect call count" calls 1
-- Date header is generated when enabled.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET /date HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let handler : TestHandler := fun _ => Response.ok |>.text "hello"
let response sendRaw client server raw handler (config := { lingeringTimeout := 3000, generateDate := true })
assertContains "Date generated status" response "HTTP/1.1 200 OK"
assertContains "Date generated header" response "Date: "
-- Date header is absent when disabled.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET /no-date HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let handler : TestHandler := fun _ => Response.ok |>.text "hello"
let response sendRaw client server raw handler (config := { lingeringTimeout := 3000, generateDate := false })
assertContains "Date disabled status" response "HTTP/1.1 200 OK"
assertNotContains "Date disabled header" response "Date: "
-- User-specified Date header is preserved and not duplicated.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET /custom-date HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let handler : TestHandler := fun _ =>
Response.ok
|>.header! "Date" "Mon, 01 Jan 2024 00:00:00 GMT"
|>.text "hello"
let response sendRaw client server raw handler (config := { lingeringTimeout := 3000, generateDate := true })
assertContains "User Date preserved" response "Date: Mon, 01 Jan 2024 00:00:00 GMT"
let text := String.fromUTF8! response
let count := countOccurrences text "Date: "
if count != 1 then
throw <| IO.userError s!"Test 'User Date not duplicated' failed:\nExpected one Date header, got {count}\n{text.quote}"

View File

@@ -0,0 +1,642 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def defaultConfig : Config :=
{ lingeringTimeout := 1000, generateDate := false }
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def runWithTimeout {α : Type} (name : String) (timeoutMs : Nat := 15000) (action : IO α) : IO α := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO α := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 =>
IO.cancel task
throw <| IO.userError s!"Test '{name}' timed out after {timeoutMs}ms (possible hang/regression)"
| n + 1 =>
IO.sleep 10
loop n
loop ticks
def closeChannelIdempotent {α : Type} (ch : Std.CloseableChannel α) : IO Unit := do
match EIO.toBaseIO ch.close with
| .ok _ => pure ()
| .error .alreadyClosed => pure ()
| .error err => throw <| IO.userError (toString err)
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := defaultConfig) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def sendRawAndClose
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := defaultConfig) : IO ByteArray := Async.block do
client.send raw
closeChannelIdempotent client.getSendChan
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def sendFragmentedAndClose
(client : Mock.Client)
(server : Mock.Server)
(parts : Array ByteArray)
(handler : TestHandler)
(config : Config := defaultConfig) : IO ByteArray := Async.block do
let serverTask async (t := AsyncTask) do
Std.Http.Server.serveConnection server handler config
|>.run
for part in parts do
client.send part
closeChannelIdempotent client.getSendChan
await serverTask
let res client.recv?
pure (res.getD .empty)
def responseText (response : ByteArray) : String :=
String.fromUTF8! response
def responseBody (response : ByteArray) : String :=
let parts := (responseText response).splitOn "\x0d\n\x0d\n"
match parts.drop 1 with
| [] => ""
| body :: _ => body
def assertStatusPrefix (name : String) (response : ByteArray) (prefix_ : String) : IO Unit := do
let text := responseText response
unless text.startsWith prefix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected status prefix {prefix_.quote}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := responseText response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then
0
else
(s.splitOn needle).length - 1
def assertStatusCount (name : String) (response : ByteArray) (expected : Nat) : IO Unit := do
let text := responseText response
let got := countOccurrences text "HTTP/1.1 "
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} responses but saw {got}\n{text.quote}"
def nextSeed (seed : Nat) : Nat :=
(1664525 * seed + 1013904223) % 4294967296
def randBelow (seed : Nat) (maxExclusive : Nat) : Nat × Nat :=
let seed' := nextSeed seed
if maxExclusive == 0 then
(0, seed')
else
(seed' % maxExclusive, seed')
def randIn (seed : Nat) (low : Nat) (high : Nat) : Nat × Nat :=
if high < low then
(low, seed)
else
let (n, seed') := randBelow seed (high - low + 1)
(low + n, seed')
def randomAsciiBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 38
s := s'
let code :=
if r < 26 then
97 + r
else if r < 36 then
48 + (r - 26)
else if r == 36 then
45
else
95
out := out.push (UInt8.ofNat code)
(out, s)
def randomTokenBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 36
s := s'
let code :=
if r < 26 then
97 + r
else
48 + (r - 26)
out := out.push (UInt8.ofNat code)
(out, s)
def randomSplit (seed : Nat) (data : ByteArray) (maxPart : Nat := 17) : Array ByteArray × Nat := Id.run do
let mut s := seed
let mut out : Array ByteArray := #[]
let mut i := 0
while i < data.size do
let remaining := data.size - i
let upper := Nat.min maxPart remaining
let (partLen, s') := randIn s 1 upper
s := s'
out := out.push (data.extract i (i + partLen))
i := i + partLen
(out, s)
def randomChunkedPayload (seed : Nat) (body : ByteArray) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
let mut i := 0
while i < body.size do
let remaining := body.size - i
let maxChunk := Nat.min 9 remaining
let (chunkLen, s') := randIn s 1 maxChunk
s := s'
out := out ++ s!"{chunkLen}\x0d\n".toUTF8
out := out ++ body.extract i (i + chunkLen)
out := out ++ "\x0d\n".toUTF8
i := i + chunkLen
out := out ++ "0\x0d\n\x0d\n".toUTF8
(out, s)
def mkContentLengthHead (path : String) (bodySize : Nat) : ByteArray :=
s!"POST {path} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {bodySize}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
def mkChunkedHead (path : String) : ByteArray :=
s!"POST {path} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
def randomChunkExtensionList (seed : Nat) (count : Nat) : String × Nat := Id.run do
let mut s := seed
let mut ext := ""
for _ in [0:count] do
let (nameLen, s1) := randIn s 1 3
s := s1
let (valueLen, s2) := randIn s 1 3
s := s2
let (nameBytes, s3) := randomTokenBytes s nameLen
s := s3
let (valueBytes, s4) := randomTokenBytes s valueLen
s := s4
let name := String.fromUTF8! nameBytes
let value := String.fromUTF8! valueBytes
ext := ext ++ s!";{name}={value}"
(ext, s)
def randomTrailerLines (seed : Nat) (count : Nat) : String × Nat := Id.run do
let mut s := seed
let mut lines := ""
for i in [0:count] do
let (nameLen, s1) := randIn s 1 4
s := s1
let (valueLen, s2) := randIn s 1 6
s := s2
let (nameBytes, s3) := randomTokenBytes s nameLen
s := s3
let (valueBytes, s4) := randomTokenBytes s valueLen
s := s4
let name := String.fromUTF8! nameBytes
let value := String.fromUTF8! valueBytes
lines := lines ++ s!"X{i}-{name}: {value}\x0d\n"
(lines, s)
def echoBodyHandler : TestHandler := fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def runPipelinedReadAll
(raw : ByteArray)
(config : Config := defaultConfig) : IO (ByteArray × Array String) := Async.block do
let (client, server) Mock.new
let seenRef IO.mkRef (#[] : Array String)
let handler : TestHandler := fun req => do
let uri := toString req.line.uri
seenRef.modify (·.push uri)
let _body : String req.body.readAll
Response.ok |>.text uri
client.send raw
closeChannelIdempotent client.getSendChan
Std.Http.Server.serveConnection server handler config
|>.run
let response client.recv?
let seen seenRef.get
pure (response.getD .empty, seen)
def fuzzContentLengthEcho (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 0 128
seed := seed1
let (body, seed2) := randomAsciiBytes seed len
seed := seed2
let head := mkContentLengthHead s!"/fuzz-cl-{i}" body.size
let (bodyParts, seed3) := randomSplit seed body
seed := seed3
let parts := #[head] ++ bodyParts
let (client, server) Mock.new
let response sendFragmentedAndClose client server parts echoBodyHandler
let expectedBody := String.fromUTF8! body
assertStatusPrefix s!"fuzzContentLengthEcho case={i} seed={caseSeed}" response "HTTP/1.1 200"
let gotBody := responseBody response
if gotBody != expectedBody then
throw <| IO.userError s!"fuzzContentLengthEcho case={i} seed={caseSeed} failed:\nExpected body {expectedBody.quote}\nGot body {gotBody.quote}\nFull response:\n{(responseText response).quote}"
def fuzzContentLengthLeadingZerosAccepted (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 1 96
seed := seed1
let (leadingZeros, seed2) := randIn seed 1 5
seed := seed2
let (body, seed3) := randomAsciiBytes seed len
seed := seed3
let clToken := String.ofList (List.replicate leadingZeros '0') ++ toString len
let raw :=
s!"POST /cl-leading-zeros-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {clToken}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8 ++ body
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
let expectedBody := String.fromUTF8! body
assertStatusPrefix s!"fuzzContentLengthLeadingZerosAccepted case={i} seed={caseSeed} len={len} zeros={leadingZeros}" response "HTTP/1.1 200"
let gotBody := responseBody response
if gotBody != expectedBody then
throw <| IO.userError s!"fuzzContentLengthLeadingZerosAccepted case={i} seed={caseSeed} failed:\nExpected body {expectedBody.quote}\nGot body {gotBody.quote}\nFull response:\n{(responseText response).quote}"
def fuzzChunkedEcho (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 0 128
seed := seed1
let (body, seed2) := randomAsciiBytes seed len
seed := seed2
let (chunkedBody, seed3) := randomChunkedPayload seed body
seed := seed3
let head := mkChunkedHead s!"/fuzz-chunked-{i}"
let raw := head ++ chunkedBody
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
let expectedBody := String.fromUTF8! body
assertStatusPrefix s!"fuzzChunkedEcho case={i} seed={caseSeed}" response "HTTP/1.1 200"
let gotBody := responseBody response
if gotBody != expectedBody then
throw <| IO.userError s!"fuzzChunkedEcho case={i} seed={caseSeed} failed:\nExpected body {expectedBody.quote}\nGot body {gotBody.quote}\nFull response:\n{(responseText response).quote}"
def fuzzMixedTransferEncodingAndContentLengthRejected (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 0 96
seed := seed1
let (body, seed2) := randomAsciiBytes seed len
seed := seed2
let (chunkedBody, seed3) := randomChunkedPayload seed body
seed := seed3
let (declaredCl, seed4) := randIn seed 0 128
seed := seed4
let raw :=
s!"POST /te-cl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nContent-Length: {declaredCl}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8 ++ chunkedBody
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
assertExact s!"fuzzMixedTransferEncodingAndContentLengthRejected case={i} seed={caseSeed} declaredCl={declaredCl}" response bad400
def fuzzInvalidChunkSizeRejected (iterations : Nat) (seed0 : Nat) : IO Unit := do
let badTokens : Array String := #["g", "G", "z", "Z", "@", "!", "x"]
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (idx, seed1) := randBelow seed badTokens.size
seed := seed1
let token := badTokens[idx]!
let raw :=
s!"POST /bad-size-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n{token}\x0d\nabc\x0d\n0\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
assertExact s!"fuzzInvalidChunkSizeRejected case={i} seed={caseSeed} token={token}" response bad400
def fuzzDuplicateContentLengthRejected (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (cl1, seed1) := randIn seed 0 64
seed := seed1
let (same, seed2) := randBelow seed 2
seed := seed2
let (delta, seed3) := randIn seed 1 10
seed := seed3
let cl2 := if same == 0 then cl1 else cl1 + delta
let (body, seed4) := randomAsciiBytes seed cl1
seed := seed4
let raw :=
s!"POST /dup-cl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {cl1}\x0d\nContent-Length: {cl2}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8 ++ body
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
assertExact s!"fuzzDuplicateContentLengthRejected case={i} seed={caseSeed} cl1={cl1} cl2={cl2}" response bad400
def fuzzChunkExtensionLimits (iterations : Nat) (seed0 : Nat) : IO Unit := do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxChunkExtNameLength := 4
maxChunkExtValueLength := 4
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (nameLen, seed1) := randIn seed 1 8
seed := seed1
let (valueLen, seed2) := randIn seed 1 8
seed := seed2
let (nameBytes, seed3) := randomTokenBytes seed nameLen
seed := seed3
let (valueBytes, seed4) := randomTokenBytes seed valueLen
seed := seed4
let name := String.fromUTF8! nameBytes
let value := String.fromUTF8! valueBytes
let raw :=
s!"POST /ext-limit-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n1;{name}={value}\x0d\nx\x0d\n0\x0d\n\x0d\n".toUTF8
let expectOk := nameLen 4 valueLen 4
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler (config := config)
if expectOk then
assertStatusPrefix s!"fuzzChunkExtensionLimits case={i} seed={caseSeed} nameLen={nameLen} valueLen={valueLen}" response "HTTP/1.1 200"
else
assertExact s!"fuzzChunkExtensionLimits case={i} seed={caseSeed} nameLen={nameLen} valueLen={valueLen}" response bad400
def fuzzChunkExtensionCountLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxChunkExtensions := 2
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (extCount, seed1) := randIn seed 0 5
seed := seed1
let (extList, seed2) := randomChunkExtensionList seed extCount
seed := seed2
let raw :=
s!"POST /ext-count-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n1{extList}\x0d\nx\x0d\n0\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler (config := config)
if extCount 2 then
assertStatusPrefix s!"fuzzChunkExtensionCountLimit case={i} seed={caseSeed} extCount={extCount}" response "HTTP/1.1 200"
else
assertExact s!"fuzzChunkExtensionCountLimit case={i} seed={caseSeed} extCount={extCount}" response bad400
def fuzzTrailerHeaderCountLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxTrailerHeaders := 2
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (trailerCount, seed1) := randIn seed 0 5
seed := seed1
let (trailers, seed2) := randomTrailerLines seed trailerCount
seed := seed2
let raw :=
s!"POST /trailers-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n1\x0d\na\x0d\n0\x0d\n{trailers}\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler (config := config)
if trailerCount 2 then
assertStatusPrefix s!"fuzzTrailerHeaderCountLimit case={i} seed={caseSeed} trailerCount={trailerCount}" response "HTTP/1.1 200"
else
assertExact s!"fuzzTrailerHeaderCountLimit case={i} seed={caseSeed} trailerCount={trailerCount}" response bad400
def fuzzCompleteFirstBodyAllowsPipeline (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 0 32
seed := seed1
let (body, seed2) := randomAsciiBytes seed len
seed := seed2
let uri1 := s!"/first-complete-{i}"
let req1 :=
s!"POST {uri1} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {len}\x0d\n\x0d\n".toUTF8 ++ body
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (response, seen) runPipelinedReadAll (req1 ++ req2)
let text := responseText response
assertStatusCount s!"fuzzCompleteFirstBodyAllowsPipeline case={i} seed={caseSeed}" response 2
unless text.contains uri1 do
throw <| IO.userError s!"fuzzCompleteFirstBodyAllowsPipeline case={i} seed={caseSeed} failed:\nMissing first URI {uri1.quote}\n{text.quote}"
unless text.contains "/second" do
throw <| IO.userError s!"fuzzCompleteFirstBodyAllowsPipeline case={i} seed={caseSeed} failed:\nMissing second response\n{text.quote}"
if seen.size != 2 seen[0]! != uri1 seen[1]! != "/second" then
throw <| IO.userError s!"fuzzCompleteFirstBodyAllowsPipeline case={i} seed={caseSeed} failed:\nExpected seen=[{uri1.quote}, \"/second\"] got {seen}"
-- Property: Content-Length framing is stable across random payloads and random transport splits.
#eval runWithTimeout "fuzz_content_length_echo" 20000 do
fuzzContentLengthEcho 40 0x00C0FFEE
-- Property: Content-Length with randomized leading zeros is accepted and decoded to exact body length.
#eval runWithTimeout "fuzz_content_length_leading_zeros_accepted" 20000 do
fuzzContentLengthLeadingZerosAccepted 30 0x00CAB005
-- Property: Chunked framing reconstructs exact bodies under random chunking and transport splits.
#eval runWithTimeout "fuzz_chunked_echo" 20000 do
fuzzChunkedEcho 40 0x00123456
-- Property: Mixing Transfer-Encoding with Content-Length is always rejected.
#eval runWithTimeout "fuzz_te_cl_mixed_rejected" 20000 do
fuzzMixedTransferEncodingAndContentLengthRejected 30 0x0010CE11
-- Property: Invalid chunk-size tokens are rejected deterministically with 400.
#eval runWithTimeout "fuzz_invalid_chunk_size_rejected" 20000 do
fuzzInvalidChunkSizeRejected 30 0x00BAD001
-- Property: Duplicate Content-Length headers are always rejected (same or different values).
#eval runWithTimeout "fuzz_duplicate_content_length_rejected" 20000 do
fuzzDuplicateContentLengthRejected 30 0x00D0C1A7
-- Property: Chunk extension name/value limits are enforced under randomized lengths.
#eval runWithTimeout "fuzz_chunk_extension_limits" 20000 do
fuzzChunkExtensionLimits 40 0x00A11CE5
-- Property: Chunk extension count limit is enforced under randomized extension lists.
#eval runWithTimeout "fuzz_chunk_extension_count_limit" 20000 do
fuzzChunkExtensionCountLimit 35 0x00E77E11
-- Property: Trailer header count limit is enforced under randomized trailer sections.
#eval runWithTimeout "fuzz_trailer_header_count_limit" 20000 do
fuzzTrailerHeaderCountLimit 35 0x00A71A12
-- Property: Complete first request body allows pipelined follow-up parsing.
#eval runWithTimeout "fuzz_complete_first_body_allows_pipeline" 20000 do
fuzzCompleteFirstBodyAllowsPipeline 30 0x00777777

View File

@@ -0,0 +1,462 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
/-!
# Limit-enforcement fuzzing for the HTTP/1.1 server.
Tests that every configurable limit in `H1.Config` and `Server.Config` is
correctly enforced under randomized inputs. Inspired by hyper's fuzzing of
size and count limits.
-/
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def closeChannelIdempotent {α : Type} (ch : Std.CloseableChannel α) : IO Unit := do
match EIO.toBaseIO ch.close with
| .ok _ => pure ()
| .error .alreadyClosed => pure ()
| .error err => throw <| IO.userError (toString err)
def sendRaw
(client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : TestHandler) (config : Config) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config |>.run
let res client.recv?
pure (res.getD .empty)
def sendRawAndClose
(client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : TestHandler) (config : Config) : IO ByteArray := Async.block do
client.send raw
closeChannelIdempotent client.getSendChan
Std.Http.Server.serveConnection server handler config |>.run
let res client.recv?
pure (res.getD .empty)
def runWithTimeout {α : Type} (name : String) (timeoutMs : Nat := 20000) (action : IO α) : IO α := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO α := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 => IO.cancel task; throw <| IO.userError s!"Test '{name}' timed out"
| n + 1 => IO.sleep 10; loop n
loop ticks
-- PRNG
def nextSeed (seed : Nat) : Nat := (1664525 * seed + 1013904223) % 4294967296
def randBelow (seed : Nat) (n : Nat) : Nat × Nat :=
let s := nextSeed seed
(if n == 0 then 0 else s % n, s)
def randIn (seed : Nat) (lo hi : Nat) : Nat × Nat :=
if hi < lo then (lo, seed) else
let (r, s) := randBelow seed (hi - lo + 1)
(lo + r, s)
def randomTokenBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed; let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 36; s := s'
let code := if r < 26 then 97 + r else 48 + (r - 26)
out := out.push (UInt8.ofNat code)
(out, s)
def randomAsciiBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed; let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 26; s := s'
out := out.push (UInt8.ofNat (97 + r))
(out, s)
private def toHexAux : Nat Nat String String
| 0, _, acc => acc
| fuel + 1, n, acc =>
if n == 0 then acc
else
let d := n % 16
let c : Char := if d < 10 then Char.ofNat (48 + d) else Char.ofNat (87 + d)
toHexAux fuel (n / 16) (String.ofList [c] ++ acc)
def natToHex (n : Nat) : String :=
if n == 0 then "0" else toHexAux 16 n ""
def assertStatusPrefix (name : String) (response : ByteArray) (pfx : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith pfx do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {pfx.quote}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := String.fromUTF8! response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def countOccurrences (s needle : String) : Nat :=
if needle.isEmpty then 0 else (s.splitOn needle).length - 1
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def echoBodyHandler : TestHandler := fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def okHandler : TestHandler := fun _ => Response.ok |>.text "ok"
-- ============================================================================
-- maxBodySize — Content-Length framing
-- ============================================================================
-- Property: Content-Length body exactly at or below maxBodySize → 200.
-- Content-Length body above maxBodySize → 413 (no body bytes needed).
def fuzzBodySizeLimitContentLength (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 64
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxBodySize := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (bodySize, s1) := randIn seed 0 (limit + 20); seed := s1
let (bodyBytes, s2) := randomAsciiBytes seed bodySize; seed := s2
let raw :=
s!"POST /bl-cl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {bodySize}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
++ bodyBytes
let (client, server) Mock.new
let response sendRaw client server raw (fun req => do
let _body : String req.body.readAll; Response.ok |>.text "ok") config
if bodySize limit then
assertStatusPrefix s!"fuzzBodySizeLimitCL iter={i} seed={caseSeed} size={bodySize}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzBodySizeLimitCL iter={i} seed={caseSeed} size={bodySize}" response "HTTP/1.1 413"
-- ============================================================================
-- maxBodySize — chunked framing
-- ============================================================================
-- Property: chunked bodies with total bytes at or below maxBodySize → 200.
-- Chunked bodies exceeding maxBodySize → 413.
def fuzzBodySizeLimitChunked (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 64
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxBodySize := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
-- Total body size across 1-4 chunks
let (totalSize, s1) := randIn seed 0 (limit + 16); seed := s1
let (numChunks, s2) := randIn seed 1 4; seed := s2
-- Build chunks that sum to totalSize
let chunkSize := (totalSize + numChunks - 1) / numChunks
let mut chunkedBody := ByteArray.empty
let mut remaining := totalSize
for _ in [0:numChunks] do
if remaining == 0 then break
let thisChunk := Nat.min chunkSize remaining
let (chunkBytes, s3) := randomAsciiBytes seed thisChunk; seed := s3
chunkedBody := chunkedBody ++ s!"{natToHex thisChunk}\x0d\n".toUTF8 ++ chunkBytes ++ "\x0d\n".toUTF8
remaining := remaining - thisChunk
chunkedBody := chunkedBody ++ "0\x0d\n\x0d\n".toUTF8
let raw :=
s!"POST /bl-ch-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
++ chunkedBody
let (client, server) Mock.new
let response sendRaw client server raw (fun req => do
let _body : String req.body.readAll; Response.ok |>.text "ok") config
if totalSize limit then
assertStatusPrefix s!"fuzzBodySizeLimitChunked iter={i} seed={caseSeed} total={totalSize}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzBodySizeLimitChunked iter={i} seed={caseSeed} total={totalSize}" response "HTTP/1.1 413"
-- ============================================================================
-- maxHeaders — header count limit
-- ============================================================================
-- Property: header count at or below maxHeaders → 200.
-- header count above maxHeaders → 400.
def fuzzHeaderCountLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 5
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxHeaders := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
-- Host counts as 1 header, Connection as 1, so extra headers = headerCount - 2
let (headerCount, s1) := randIn seed 2 (limit + 4); seed := s1
let extraCount := headerCount - 2 -- we always add Host + Connection
let mut extraHeaders := ""
let mut s := s1
for j in [0:extraCount] do
let (nameLen, s2) := randIn s 1 8; s := s2
let (nameBytes, s3) := randomTokenBytes s nameLen; s := s3
let name := String.fromUTF8! nameBytes
extraHeaders := extraHeaders ++ s!"X-Extra-{j}-{name}: value\x0d\n"
seed := s
let raw :=
s!"GET /hc-{i} HTTP/1.1\x0d\nHost: example.com\x0d\n{extraHeaders}Connection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
-- headerCount includes Host and Connection (always present)
if headerCount limit then
assertStatusPrefix s!"fuzzHeaderCount iter={i} seed={caseSeed} count={headerCount}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzHeaderCount iter={i} seed={caseSeed} count={headerCount}" response "HTTP/1.1 431"
-- ============================================================================
-- maxHeaderBytes — total header bytes limit
-- ============================================================================
-- Property: headers whose aggregate bytes (name + ": " + value + "\r\n") are at or
-- below maxHeaderBytes are accepted; above it they are rejected with 400.
def fuzzHeaderTotalBytesLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
-- Fixed baseline: "Host: example.com\r\n" + "Connection: close\r\n" = 20 + 20 = 40 bytes.
-- We'll add a single large X-Payload header to cross the boundary.
let limit : Nat := 200
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxHeaderBytes := limit
maxHeaderValueLength := limit + 100 -- allow value longer than total limit for testing
}
let baseline := ("Host: example.com\x0d\nConnection: close\x0d\n".toUTF8).size
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
-- Pick a value size that puts total bytes just under or over the limit
-- Each "X-Payload: " header = name(9) + ": "(2) + value + "\r\n"(2) = value + 13
let overhead := baseline + 13 -- "X-Payload" (9) + ": " (2) + "\r\n" (2) + baseline
-- We want value sizes that land on both sides of (limit - overhead)
let boundary := if limit > overhead then limit - overhead else 0
let (valueSize, s1) := randIn seed 0 (boundary + 20); seed := s1
let (valueBytes, s2) := randomAsciiBytes seed valueSize; seed := s2
let value := String.fromUTF8! valueBytes
let raw :=
s!"GET /hb-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nX-Payload: {value}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
-- Total bytes = baseline + "X-Payload: " + value + "\r\n"
let totalBytes := baseline + 9 + 2 + valueSize + 2
if totalBytes limit then
assertStatusPrefix s!"fuzzHeaderBytes iter={i} seed={caseSeed} total={totalBytes}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzHeaderBytes iter={i} seed={caseSeed} total={totalBytes}" response "HTTP/1.1 431"
-- ============================================================================
-- maxMessages — requests per connection
-- ============================================================================
-- Property: after maxMessages requests on a single connection, the server
-- closes the connection (disables keep-alive). All maxMessages
-- requests receive a valid response.
def fuzzMaxMessagesPerConnection (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 3
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxRequests := limit
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (reqCount, s1) := randIn seed 1 (limit + 3); seed := s1
-- Build reqCount keep-alive requests followed by close
let mut raw := ByteArray.empty
for j in [0:reqCount] do
let connHeader :=
if j + 1 == reqCount then "Connection: close\x0d\n" else "Connection: keep-alive\x0d\n"
raw := raw ++ s!"GET /msg-{i}-{j} HTTP/1.1\x0d\nHost: example.com\x0d\n{connHeader}\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
let text := String.fromUTF8! response
let seen := countOccurrences text "HTTP/1.1 200"
let expected := Nat.min reqCount limit
if seen != expected then
throw <| IO.userError
s!"fuzzMaxMessages iter={i} seed={caseSeed} reqCount={reqCount}: expected {expected} responses, got {seen}\n{text.quote}"
-- ============================================================================
-- maxLeadingEmptyLines — leading CRLF before request-line
-- ============================================================================
-- Property: at most maxLeadingEmptyLines bare CRLFs before the request-line are tolerated.
-- More than that → 400.
def fuzzLeadingEmptyLinesLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 4
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxLeadingEmptyLines := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (lineCount, s1) := randIn seed 0 (limit + 4); seed := s1
let leadingCRLF := (List.replicate lineCount "\x0d\n").foldl (· ++ ·) ""
let raw :=
(leadingCRLF ++ s!"GET /le-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
if lineCount limit then
assertStatusPrefix s!"fuzzLeadingEmptyLines iter={i} seed={caseSeed} count={lineCount}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzLeadingEmptyLines iter={i} seed={caseSeed} count={lineCount}" response "HTTP/1.1 400"
-- ============================================================================
-- maxSpaceSequence — OWS (optional whitespace) limit
-- ============================================================================
-- Property: OWS sequences at or below maxSpaceSequence are accepted.
-- OWS sequences exceeding the limit → 400.
def fuzzOWSSequenceLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 4
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxSpaceSequence := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (spaceCount, s1) := randIn seed 0 (limit + 4); seed := s1
let spaces := String.ofList (List.replicate spaceCount ' ')
-- OWS appears between ':' and value in header fields
let raw :=
s!"GET /ows-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nX-OWS:{spaces}value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
if spaceCount limit then
assertStatusPrefix s!"fuzzOWSLimit iter={i} seed={caseSeed} spaces={spaceCount}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzOWSLimit iter={i} seed={caseSeed} spaces={spaceCount}" response "HTTP/1.1 400"
-- ============================================================================
-- maxStartLineLength — request-line length limit
-- ============================================================================
-- Property: request lines at or below maxStartLineLength → 200.
-- Request lines above maxStartLineLength → 414 (URI too long) or 400.
def fuzzStartLineLengthLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 64
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxStartLineLength := limit
maxUriLength := limit
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
-- "GET " (4) + path + " HTTP/1.1\r\n" (12) → path can be up to (limit - 16)
let pathBudget := if limit > 16 then limit - 16 else 1
let (pathLen, s1) := randIn seed 1 (pathBudget + 10); seed := s1
let path := "/" ++ String.ofList (List.replicate (pathLen - 1) 'a')
let raw :=
s!"GET {path} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
-- start-line = "GET " + path + " HTTP/1.1\r\n"
let lineLen := 4 + pathLen + 11
if lineLen limit then
assertStatusPrefix s!"fuzzStartLineLen iter={i} seed={caseSeed} len={lineLen}" response "HTTP/1.1 200"
else
-- Either 414 (URI too long) or 400
let text := String.fromUTF8! response
unless text.startsWith "HTTP/1.1 414" || text.startsWith "HTTP/1.1 400" do
throw <| IO.userError
s!"fuzzStartLineLen iter={i} seed={caseSeed} len={lineLen}: expected 414 or 400, got {text.quote}"
-- ============================================================================
-- maxHeaderNameLength and maxHeaderValueLength
-- ============================================================================
-- Property: header names exceeding maxHeaderNameLength → 400.
def fuzzHeaderNameLengthLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 16
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxHeaderNameLength := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (nameLen, s1) := randIn seed 1 (limit + 8); seed := s1
let (nameBytes, s2) := randomTokenBytes seed nameLen; seed := s2
let name := String.fromUTF8! nameBytes
let raw :=
s!"GET /hnl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\n{name}: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
if nameLen limit then
assertStatusPrefix s!"fuzzHeaderNameLen iter={i} seed={caseSeed} len={nameLen}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzHeaderNameLen iter={i} seed={caseSeed} len={nameLen}" response "HTTP/1.1 400"
-- Property: header values exceeding maxHeaderValueLength → 400.
def fuzzHeaderValueLengthLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 32
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxHeaderValueLength := limit
maxHeaderBytes := 65536 -- don't let total bytes limit interfere
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (valueLen, s1) := randIn seed 0 (limit + 8); seed := s1
let (valueBytes, s2) := randomAsciiBytes seed valueLen; seed := s2
let value := String.fromUTF8! valueBytes
let raw :=
s!"GET /hvl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nX-Long: {value}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
if valueLen limit then
assertStatusPrefix s!"fuzzHeaderValueLen iter={i} seed={caseSeed} len={valueLen}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzHeaderValueLen iter={i} seed={caseSeed} len={valueLen}" response "HTTP/1.1 400"
-- ============================================================================
-- Run all properties
-- ============================================================================
-- Property: maxBodySize enforced for Content-Length framing.
#eval runWithTimeout "fuzz_body_limit_content_length" 30000 do
fuzzBodySizeLimitContentLength 50 0x00B0DEC0
-- Property: maxBodySize enforced for chunked framing.
#eval runWithTimeout "fuzz_body_limit_chunked" 30000 do
fuzzBodySizeLimitChunked 40 0x00C8BE11
-- Property: maxHeaders count limit is enforced.
#eval runWithTimeout "fuzz_header_count_limit" 30000 do
fuzzHeaderCountLimit 50 0x00AA55AA
-- Property: maxHeaderBytes aggregate limit is enforced.
#eval runWithTimeout "fuzz_header_bytes_limit" 30000 do
fuzzHeaderTotalBytesLimit 40 0x00FACE77
-- Property: maxMessages per connection closes keep-alive after the configured count.
#eval runWithTimeout "fuzz_max_messages_per_connection" 30000 do
fuzzMaxMessagesPerConnection 30 0x00123ABC
-- Property: maxLeadingEmptyLines limit is enforced.
#eval runWithTimeout "fuzz_leading_empty_lines_limit" 30000 do
fuzzLeadingEmptyLinesLimit 50 0x00EEF00D
-- Property: maxSpaceSequence (OWS) limit is enforced.
#eval runWithTimeout "fuzz_ows_sequence_limit" 30000 do
fuzzOWSSequenceLimit 50 0x00ABE5AB
-- Property: maxStartLineLength / maxUriLength is enforced, returning 414 or 400.
#eval runWithTimeout "fuzz_start_line_length_limit" 30000 do
fuzzStartLineLengthLimit 50 0x00C0FFEE
-- Property: maxHeaderNameLength is enforced.
#eval runWithTimeout "fuzz_header_name_length_limit" 30000 do
fuzzHeaderNameLengthLimit 50 0x00DEAD01
-- Property: maxHeaderValueLength is enforced.
#eval runWithTimeout "fuzz_header_value_length_limit" 30000 do
fuzzHeaderValueLengthLimit 50 0x00BEEF02

View File

@@ -0,0 +1,294 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
/-!
# Random-byte fuzzing for the HTTP/1.1 parser.
Inspired by hyper's `fuzz_h1_req` libFuzzer target. The core property: any byte
sequence fed to the server must be handled without panicking, hanging, or
producing a malformed response. The server must either:
- Send no bytes (connection closed before a complete request arrives), or
- Send a response that starts with "HTTP/1.1 ".
-/
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def defaultConfig : Config :=
{ lingeringTimeout := 1000, generateDate := false }
def closeChannelIdempotent {α : Type} (ch : Std.CloseableChannel α) : IO Unit := do
match EIO.toBaseIO ch.close with
| .ok _ => pure ()
| .error .alreadyClosed => pure ()
| .error err => throw <| IO.userError (toString err)
def sendRawAndClose
(client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : TestHandler) (config : Config := defaultConfig) : IO ByteArray := Async.block do
client.send raw
closeChannelIdempotent client.getSendChan
Std.Http.Server.serveConnection server handler config |>.run
let res client.recv?
pure (res.getD .empty)
def runWithTimeout {α : Type} (name : String) (timeoutMs : Nat := 20000) (action : IO α) : IO α := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO α := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 => IO.cancel task; throw <| IO.userError s!"Test '{name}' timed out"
| n + 1 => IO.sleep 10; loop n
loop ticks
-- PRNG
def nextSeed (seed : Nat) : Nat := (1664525 * seed + 1013904223) % 4294967296
def randBelow (seed : Nat) (n : Nat) : Nat × Nat :=
let s := nextSeed seed
(if n == 0 then 0 else s % n, s)
def randIn (seed : Nat) (lo hi : Nat) : Nat × Nat :=
if hi < lo then (lo, seed) else
let (r, s) := randBelow seed (hi - lo + 1)
(lo + r, s)
-- All 256 byte values
def randomFullBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed; let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 256; s := s'
out := out.push (UInt8.ofNat r)
(out, s)
def okHandler : TestHandler := fun _ => Response.ok |>.text "ok"
-- Server-generated responses are always valid ASCII. Verify the response is
-- either empty or starts with the HTTP/1.1 status-line prefix.
def assertValidHttpOrEmpty (name : String) (response : ByteArray) : IO Unit := do
if response.size == 0 then pure ()
else
let pfx := "HTTP/1.1 ".toUTF8
if response.size >= pfx.size && response.extract 0 pfx.size == pfx then pure ()
else
let display := match String.fromUTF8? (response.extract 0 (Nat.min 200 response.size)) with
| some s => s.quote | none => "(non-UTF-8 bytes)"
throw <| IO.userError
s!"Test '{name}' failed:\nResponse is neither empty nor valid HTTP/1.1:\n{display}"
-- Property: any fully-random byte sequence never causes a panic or malformed response.
-- Direct analogue of hyper's fuzz_h1_req libFuzzer target.
def fuzzRandomBytesNoPanic (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, s1) := randIn seed 0 96; seed := s1
let (bytes, s2) := randomFullBytes seed len; seed := s2
let (client, server) Mock.new
let response sendRawAndClose client server bytes okHandler
assertValidHttpOrEmpty s!"fuzzRandomBytesNoPanic iter={i} seed={caseSeed} len={len}" response
-- Property: flipping a single bit in any valid request must not cause a panic.
def fuzzBitFlipOnValidRequests (iterations : Nat) (seed0 : Nat) : IO Unit := do
let corpus : Array ByteArray := #[
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
"POST /submit HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8,
"POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
"OPTIONS * HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
"CONNECT example.com:443 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
"DELETE /resource HTTP/1.1\x0d\nHost: api.example.com\x0d\nAuthorization: Bearer token123\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
]
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (idx, s1) := randBelow seed corpus.size; seed := s1
let req := corpus[idx]!
let (pos, s2) := randBelow seed req.size; seed := s2
let (bit, s3) := randBelow seed 8; seed := s3
let orig := req[pos]!
let mask : UInt8 := (1 : UInt8) <<< bit.toUInt8
let flipped := orig ^^^ mask
let mutated := (req.extract 0 pos).push flipped ++ req.extract (pos + 1) req.size
let (client, server) Mock.new
let response sendRawAndClose client server mutated okHandler
assertValidHttpOrEmpty
s!"fuzzBitFlip iter={i} seed={caseSeed} reqIdx={idx} pos={pos} bit={bit}" response
-- Property: truncating a valid request at any byte boundary must not cause a panic.
def fuzzTruncatedRequests (iterations : Nat) (seed0 : Nat) : IO Unit := do
let corpus : Array ByteArray := #[
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
"POST /data HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 4\x0d\nConnection: close\x0d\n\x0d\ndata".toUTF8,
"POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n4\x0d\ndata\x0d\n0\x0d\n\x0d\n".toUTF8,
"HEAD /resource HTTP/1.1\x0d\nHost: example.com\x0d\nAccept: application/json\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
]
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (idx, s1) := randBelow seed corpus.size; seed := s1
let req := corpus[idx]!
let (truncLen, s2) := randBelow seed req.size; seed := s2
let truncated := req.extract 0 truncLen
let (client, server) Mock.new
let response sendRawAndClose client server truncated okHandler
assertValidHttpOrEmpty
s!"fuzzTruncated iter={i} seed={caseSeed} reqIdx={idx} truncLen={truncLen}" response
-- Property: a valid HTTP method prefix followed by garbage must not cause a panic.
def fuzzMethodPrefixWithGarbage (iterations : Nat) (seed0 : Nat) : IO Unit := do
let methods : Array ByteArray := #[
"GET ".toUTF8, "POST ".toUTF8, "PUT ".toUTF8, "DELETE ".toUTF8,
"HEAD ".toUTF8, "OPTIONS ".toUTF8, "PATCH ".toUTF8, "CONNECT ".toUTF8,
"HTTP/1.1 ".toUTF8,
ByteArray.empty,
]
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (mIdx, s1) := randBelow seed methods.size; seed := s1
let pfx := methods[mIdx]!
let (gLen, s2) := randIn seed 0 64; seed := s2
let (garbage, s3) := randomFullBytes seed gLen; seed := s3
let request := pfx ++ garbage
let (client, server) Mock.new
let response sendRawAndClose client server request okHandler
assertValidHttpOrEmpty
s!"fuzzMethodPrefix iter={i} seed={caseSeed} mIdx={mIdx} gLen={gLen}" response
-- Property: high-byte (0x800xFF, non-ASCII) sequences must not cause a panic.
def fuzzHighByteValues (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, s1) := randIn seed 0 48; seed := s1
let mut out := ByteArray.empty
let mut s := s1
for _ in [0:len] do
let (r, s') := randBelow s 128; s := s'
out := out.push (UInt8.ofNat (r + 128))
seed := s
let (client, server) Mock.new
let response sendRawAndClose client server out okHandler
assertValidHttpOrEmpty s!"fuzzHighBytes iter={i} seed={caseSeed} len={len}" response
-- Property: garbage appended after a complete request must not cause a panic.
def fuzzGarbageAfterCompleteRequest (iterations : Nat) (seed0 : Nat) : IO Unit := do
let validReq :=
"GET /check HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (gLen, s1) := randIn seed 0 32; seed := s1
let (garbage, s2) := randomFullBytes seed gLen; seed := s2
let request := validReq ++ garbage
let (client, server) Mock.new
let response sendRawAndClose client server request okHandler
assertValidHttpOrEmpty
s!"fuzzGarbageAfter iter={i} seed={caseSeed} gLen={gLen}" response
-- Property: every single-byte input is handled safely (all 256 values).
def fuzzSingleByteInputs : IO Unit := do
for b in List.range 256 do
let bytes := ByteArray.mk #[b.toUInt8]
let (client, server) Mock.new
let response sendRawAndClose client server bytes okHandler
assertValidHttpOrEmpty s!"fuzzSingleByte byte={b}" response
-- Property: known attack patterns and real-world malformed inputs are handled safely.
-- This is the Lean analogue of hyper's denial-of-service and smuggling corpus.
def fuzzKnownMaliciousPatterns : IO Unit := do
let patterns : Array ByteArray := #[
-- TLS 1.2 client hello prefix
ByteArray.mk #[0x16, 0x03, 0x01, 0x00, 0xa5, 0x01, 0x00, 0x00],
-- TLS 1.3 client hello prefix
ByteArray.mk #[0x16, 0x03, 0x03, 0x00, 0x7c, 0x01, 0x00, 0x00],
-- HTTP/2 connection preface
"PRI * HTTP/2.0\x0d\n\x0d\nSM\x0d\n\x0d\n".toUTF8,
-- Bare LF line endings
"GET / HTTP/1.1\nHost: example.com\n\n".toUTF8,
-- CR-only line endings
"GET / HTTP/1.1\x0dHost: example.com\x0d\x0d".toUTF8,
-- Null bytes everywhere
ByteArray.mk #[0x00, 0x00, 0x00, 0x00],
-- CRLF injection attempt in request-line
"GET /path%0d%0aInjected: header HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8,
-- Unicode in path (raw multibyte UTF-8)
"GET /caf\xc3\xa9 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8,
-- HTTP response sent as a request (should fail, not panic)
"HTTP/1.1 200 OK\x0d\nContent-Length: 2\x0d\n\x0d\nok".toUTF8,
-- Request smuggling: CL.TE
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 6\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n0\x0d\n\x0d\nX".toUTF8,
-- Request smuggling: TE.CL
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nContent-Length: 3\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- Duplicate chunked coding
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- TE with tab (whitespace obfuscation)
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding:\x09chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- TE with null byte injection
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunk\x00ed\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- Extremely long method token
(String.ofList (List.replicate 8192 'A') ++ " / HTTP/1.1\x0d\nHost: h\x0d\n\x0d\n").toUTF8,
-- SSRF-like absolute-form URI targeting internal host
"GET http://169.254.169.254/latest/meta-data/ HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8,
-- Chunk size with non-hex chars
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nGG\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- Chunk size overflow attempt (16+ hex digits)
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nffffffffffffffff1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- Header with embedded CRLF in value
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Inject: foo\x0d\nEvil: injected\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
-- Multiple Host headers (smuggling attempt)
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nHost: evil.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
-- Absolute-form URI with bad host overrides Host header
"GET http://evil.internal/steal HTTP/1.1\x0d\nHost: public.example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
-- Folded header (obs-fold, rejected per RFC 9112)
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Folded: value\x0d\n continuation\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
]
for i in [:patterns.size] do
let pattern := patterns[i]!
let (client, server) Mock.new
let response sendRawAndClose client server pattern okHandler
assertValidHttpOrEmpty s!"fuzzKnownMalicious pattern={i}" response
-- ============================================================================
-- Run all properties
-- ============================================================================
-- Property: any random byte sequence is handled safely (core libFuzzer equivalent).
#eval runWithTimeout "fuzz_random_bytes_no_panic" 30000 do
fuzzRandomBytesNoPanic 200 0x00FACADE
-- Property: single bit mutations on valid requests are handled safely.
#eval runWithTimeout "fuzz_bit_flip_valid_requests" 30000 do
fuzzBitFlipOnValidRequests 150 0x00B1FF10
-- Property: truncation at any byte boundary is handled safely.
#eval runWithTimeout "fuzz_truncated_requests" 30000 do
fuzzTruncatedRequests 150 0x00DEAD42
-- Property: HTTP method prefix followed by random garbage is handled safely.
#eval runWithTimeout "fuzz_method_prefix_with_garbage" 30000 do
fuzzMethodPrefixWithGarbage 100 0x00CA7500
-- Property: high-byte (non-ASCII) sequences are handled safely.
#eval runWithTimeout "fuzz_high_byte_values" 30000 do
fuzzHighByteValues 120 0x00FF8000
-- Property: garbage appended after a complete request is handled safely.
#eval runWithTimeout "fuzz_garbage_after_complete_request" 30000 do
fuzzGarbageAfterCompleteRequest 100 0x00A1B2C3
-- Property: every single-byte input is handled safely (all 256 cases).
#eval runWithTimeout "fuzz_single_byte_inputs" 30000 do
fuzzSingleByteInputs
-- Property: known attack patterns and malformed inputs are handled safely.
#eval runWithTimeout "fuzz_known_malicious_patterns" 30000 do
fuzzKnownMaliciousPatterns

View File

@@ -0,0 +1,419 @@
import Std.Internal.Http
open Std Http
open Std.Http.Protocol.H1
private def ensure (name : String) (cond : Bool) (msg : String) : IO Unit := do
unless cond do
throw <| IO.userError s!"Test '{name}' failed:\n{msg}"
private def hasFailedEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .failed _ => true
| _ => false
private def hasNeedMoreDataEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .needMoreData _ => true
| _ => false
private def hasEndHeadersEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .endHeaders _ => true
| _ => false
private def hasCloseBodyEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .closeBody => true
| _ => false
private def hasContinueEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .«continue» => true
| _ => false
private def countNeedAnswerEvents (events : Array (Event .receiving)) : Nat :=
events.foldl (init := 0) fun n e =>
match e with
| .needAnswer => n + 1
| _ => n
private def countFailedEvents (events : Array (Event .receiving)) : Nat :=
events.foldl (init := 0) fun n e =>
match e with
| .failed _ => n + 1
| _ => n
private def pulledBodyBytes (chunks : Array PulledChunk) : ByteArray :=
chunks.foldl (fun acc c => acc ++ c.chunk.data) .empty
private def splitEveryByte (data : ByteArray) : Array ByteArray := Id.run do
let mut parts : Array ByteArray := #[]
for i in [0:data.size] do
parts := parts.push (data.extract i (i + 1))
parts
private def nextSeed (seed : Nat) : Nat :=
(1664525 * seed + 1013904223) % 4294967296
private def randBelow (seed : Nat) (maxExclusive : Nat) : Nat × Nat :=
let seed' := nextSeed seed
if maxExclusive = 0 then
(0, seed')
else
(seed' % maxExclusive, seed')
private def randIn (seed : Nat) (low : Nat) (high : Nat) : Nat × Nat :=
if high < low then
(low, seed)
else
let (n, seed') := randBelow seed (high - low + 1)
(low + n, seed')
private def randomAsciiBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 38
s := s'
let code :=
if r < 26 then 97 + r
else if r < 36 then 48 + (r - 26)
else if r = 36 then 45
else 95
out := out.push (UInt8.ofNat code)
(out, s)
private def randomSplit (seed : Nat) (data : ByteArray) (maxPart : Nat := 13) : Array ByteArray × Nat := Id.run do
let mut s := seed
let mut out : Array ByteArray := #[]
let mut i := 0
while i < data.size do
let remaining := data.size - i
let upper := Nat.min maxPart remaining
let (partLen, s') := randIn s 1 upper
s := s'
out := out.push (data.extract i (i + partLen))
i := i + partLen
(out, s)
private def randomChunkedPayload (seed : Nat) (body : ByteArray) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
let mut i := 0
while i < body.size do
let remaining := body.size - i
let upper := Nat.min 9 remaining
let (chunkLen, s') := randIn s 1 upper
s := s'
out := out ++ s!"{chunkLen}\r\n".toUTF8
out := out ++ body.extract i (i + chunkLen)
out := out ++ "\r\n".toUTF8
i := i + chunkLen
out := out ++ "0\r\n\r\n".toUTF8
(out, s)
private def mkContentLengthRequest (path : String) (body : ByteArray) : ByteArray :=
s!"POST {path} HTTP/1.1\r\nHost: example.com\r\nContent-Length: {body.size}\r\nConnection: keep-alive\r\n\r\n".toUTF8 ++ body
private def mkChunkedRequest (path : String) (chunkedPayload : ByteArray) : ByteArray :=
s!"POST {path} HTTP/1.1\r\nHost: example.com\r\nTransfer-Encoding: chunked\r\nConnection: keep-alive\r\n\r\n".toUTF8 ++ chunkedPayload
private def mkChunkedHead (path : String) : ByteArray :=
s!"POST {path} HTTP/1.1\r\nHost: example.com\r\nTransfer-Encoding: chunked\r\nConnection: keep-alive\r\n\r\n".toUTF8
private structure IncrementalTrace where
machine : Machine .receiving
events : Array (Event .receiving) := #[]
output : ByteArray := .empty
pulled : Array PulledChunk := #[]
private def runIncrementalReceiving
(parts : Array ByteArray)
(config : Protocol.H1.Config := {}) : IncrementalTrace := Id.run do
let mut machine : Machine .receiving := { config := config }
let mut events : Array (Event .receiving) := #[]
let mut output := ByteArray.empty
let mut pulled : Array PulledChunk := #[]
for part in parts do
machine := machine.feed part
let (machine', step) := machine.step
machine := machine'
events := events ++ step.events
output := output ++ step.output.toByteArray
-- Pull at most one body chunk per input part to simulate streaming callers.
-- Guard on buffered bytes to avoid calling into body parsing on an empty buffer.
if machine.canPullBodyNow && machine.reader.input.remainingBytes > 0 then
let (machine', pulledNow?) := machine.pullBody
let (machine', pullEvents) := machine'.takeEvents
machine := machine'
if let some pulledNow := pulledNow? then
pulled := pulled.push pulledNow
events := events ++ pullEvents
else
pure ()
let (machine', finalStep) := machine.step
machine := machine'
events := events ++ finalStep.events
output := output ++ finalStep.output.toByteArray
-- After all input has arrived, drain the remaining ready body chunks.
let mut fuel := 4096
while fuel > 0 && machine.canPullBodyNow && machine.reader.input.remainingBytes > 0 do
fuel := fuel - 1
let (machine', pulledNow?) := machine.pullBody
let (machine', pullEvents) := machine'.takeEvents
machine := machine'
events := events ++ pullEvents
match pulledNow? with
| some pulledNow =>
pulled := pulled.push pulledNow
| none =>
break
return { machine, events, output, pulled }
private def assertIncrementalSuccess
(name : String)
(trace : IncrementalTrace)
(expectedBody : ByteArray)
(expectNeedMoreData : Bool := true) : IO Unit := do
ensure name (!trace.machine.failed) s!"machine ended failed with events:\n{repr trace.events}"
ensure name (!hasFailedEvent trace.events) s!"unexpected failure events:\n{repr trace.events}"
ensure name (hasEndHeadersEvent trace.events) s!"missing endHeaders event:\n{repr trace.events}"
if expectNeedMoreData then
ensure name (hasNeedMoreDataEvent trace.events) s!"expected at least one needMoreData event:\n{repr trace.events}"
else
pure ()
let got := pulledBodyBytes trace.pulled
ensure name (got == expectedBody)
s!"body mismatch:\nexpected={String.fromUTF8! expectedBody |>.quote}\nactual={String.fromUTF8! got |>.quote}"
let expectsPullSignals := expectedBody.size > 0 || trace.pulled.size > 0
if expectsPullSignals then
ensure name (hasCloseBodyEvent trace.events) s!"missing closeBody event:\n{repr trace.events}"
ensure name (trace.pulled.any (·.final)) "expected at least one final pulled chunk"
ensure name ((trace.pulled.back?.map (·.final)).getD false) "expected last pulled chunk to be final"
else
pure ()
private def runOneStepReceiving
(payload : ByteArray)
(config : Protocol.H1.Config := {}) :
Machine .receiving × StepResult .receiving :=
let machine0 : Machine .receiving := { config := config }
(machine0.feed payload).step
private def assertFailedWith
(name : String)
(payload : ByteArray)
(expected : Error)
(config : Protocol.H1.Config := {}) : IO Unit := do
let (machine, step) := runOneStepReceiving payload config
ensure name (hasFailedEvent step.events) s!"expected failure event, events:\n{repr step.events}"
ensure name (machine.error == some expected)
s!"expected error {repr expected}, got {repr machine.error}"
-- Deterministic: one-byte incremental content-length request.
#eval show IO Unit from do
let body := "hello".toUTF8
let request := mkContentLengthRequest "/inc-every-byte" body
let trace := runIncrementalReceiving (splitEveryByte request)
assertIncrementalSuccess "CL one-byte incremental parse" trace body
-- Deterministic: fragmented chunked request with boundaries through chunk metadata and payload.
#eval show IO Unit from do
let body := "abcXYZ".toUTF8
let payload := "3\r\nabc\r\n3\r\nXYZ\r\n0\r\n\r\n".toUTF8
let request := mkChunkedRequest "/inc-chunked" payload
let parts : Array ByteArray := #[
request.extract 0 17,
request.extract 17 39,
request.extract 39 58,
request.extract 58 (request.size - 4),
request.extract (request.size - 4) request.size
]
let trace := runIncrementalReceiving parts
assertIncrementalSuccess "Chunked fragmented parse" trace body
-- Regression: calling `pullBody` in chunked mode before any chunk-size byte arrives
-- must request more data rather than failing the machine.
#eval show IO Unit from do
let headOnly := mkChunkedHead "/wait-for-chunk-size"
let machine0 : Machine .receiving := { config := {} }
let (machine1, step1) := (machine0.feed headOnly).step
ensure "Chunked pull on empty input (setup)" (!machine1.failed) s!"unexpected setup failure events:\n{repr step1.events}"
ensure "Chunked pull on empty input (setup)" (hasEndHeadersEvent step1.events) "expected endHeaders in setup"
ensure "Chunked pull on empty input (setup)" machine1.canPullBodyNow "expected body state to be pullable"
let (machine2, pulled?) := machine1.pullBody
let (machine2, pullEvents) := machine2.takeEvents
ensure "Chunked pull on empty input" pulled?.isNone "expected no pulled body chunk"
ensure "Chunked pull on empty input" (!machine2.failed) s!"unexpected machine failure:\n{repr pullEvents}"
ensure "Chunked pull on empty input" (hasNeedMoreDataEvent pullEvents)
s!"expected needMoreData after empty pull:\n{repr pullEvents}"
-- Regression: unread buffered input must stay bounded to avoid memory blowups
-- when upper layers stall request-body consumption.
#eval show IO Unit from do
let cfg : Protocol.H1.Config := {
maxBodySize := 32
maxHeaderBytes := 16
maxStartLineLength := 16
maxChunkLineLength := 16
}
let cap := cfg.maxBodySize + cfg.maxHeaderBytes + cfg.maxStartLineLength + cfg.maxChunkLineLength
let payload := ByteArray.mk (Array.replicate (cap + 1) (UInt8.ofNat 97))
let machine0 : Machine .receiving := { config := cfg }
let machine1 := machine0.feed payload
ensure "Buffered input cap triggers failure" machine1.failed "expected machine to fail on oversized buffered input"
ensure "Buffered input cap triggers entityTooLarge" (machine1.error == some .entityTooLarge)
s!"expected entityTooLarge failure, got {repr machine1.error}"
-- Property-style: randomized content-length body and randomized split boundaries.
#eval show IO Unit from do
let mut seed : Nat := 0x21436587
for i in [0:120] do
let (bodyLen, s1) := randIn seed 0 128
seed := s1
let (body, s2) := randomAsciiBytes seed bodyLen
seed := s2
let request := mkContentLengthRequest s!"/prop-cl-{i}" body
let (parts, s3) := randomSplit seed request 11
seed := s3
let trace := runIncrementalReceiving parts
assertIncrementalSuccess s!"Property CL #{i}" trace body (expectNeedMoreData := parts.size > 1)
-- Property-style: randomized chunked payload and randomized split boundaries.
#eval show IO Unit from do
let mut seed : Nat := 0x89abcdef
for i in [0:120] do
let (bodyLen, s1) := randIn seed 0 128
seed := s1
let (body, s2) := randomAsciiBytes seed bodyLen
seed := s2
let (payload, s3) := randomChunkedPayload seed body
seed := s3
let request := mkChunkedRequest s!"/prop-chunked-{i}" payload
let (parts, s4) := randomSplit seed request 9
seed := s4
let trace := runIncrementalReceiving parts
assertIncrementalSuccess s!"Property chunked #{i}" trace body (expectNeedMoreData := parts.size > 1)
-- Edge case: unsupported HTTP version in request-line.
#eval show IO Unit from do
assertFailedWith
"Unsupported HTTP version"
"GET / HTTP/2.0\r\nHost: example.com\r\n\r\n".toUTF8
.unsupportedVersion
-- Edge case: URI length limit must map to uriTooLong.
#eval show IO Unit from do
let cfg : Protocol.H1.Config := { maxUriLength := 8, maxStartLineLength := 256 }
assertFailedWith
"URI too long"
"GET /this/path/is/way/too/long HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
.uriTooLong
cfg
-- Edge case: header count limit exceeded.
#eval show IO Unit from do
let cfg : Protocol.H1.Config := { maxHeaders := 1 }
assertFailedWith
"Too many headers"
"GET / HTTP/1.1\r\nHost: example.com\r\nX-Test: 1\r\n\r\n".toUTF8
.tooManyHeaders
cfg
-- Edge case: aggregate header bytes limit exceeded.
#eval show IO Unit from do
let cfg : Protocol.H1.Config := { maxHeaderBytes := 8 }
assertFailedWith
"Headers too large"
"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
.headersTooLarge
cfg
-- Edge case: duplicate Host headers rejected.
#eval show IO Unit from do
assertFailedWith
"Duplicate Host rejected"
"GET / HTTP/1.1\r\nHost: a.example\r\nHost: b.example\r\n\r\n".toUTF8
.badMessage
-- Edge case: mixed Content-Length and Transfer-Encoding rejected.
#eval show IO Unit from do
assertFailedWith
"TE and CL mixed"
"POST / HTTP/1.1\r\nHost: example.com\r\nContent-Length: 4\r\nTransfer-Encoding: chunked\r\n\r\n4\r\nWiki\r\n0\r\n\r\n".toUTF8
.badMessage
-- Edge case: CONNECT must use authority-form target.
#eval show IO Unit from do
assertFailedWith
"CONNECT non-authority target rejected"
"CONNECT / HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
.badMessage
-- Edge case: asterisk-form reserved for OPTIONS.
#eval show IO Unit from do
assertFailedWith
"GET * rejected"
"GET * HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
.badMessage
-- Edge case: Expect: 100-continue pauses reader and can be accepted/rejected.
#eval show IO Unit from do
let request :=
"POST /expect HTTP/1.1\r\nHost: example.com\r\nExpect: 100-continue\r\nContent-Length: 4\r\n\r\n".toUTF8
let machine0 : Machine .receiving := { config := {} }
let (machine1, step1) := (machine0.feed request).step
ensure "Expect setup not failed" (!machine1.failed) s!"unexpected failure:\n{repr step1.events}"
ensure "Expect emitted continue event" (hasContinueEvent step1.events) s!"missing continue event:\n{repr step1.events}"
ensure "Expect reader paused"
(match machine1.reader.state with
| .«continue» _ => true
| _ => false)
s!"expected continue state, got {repr machine1.reader.state}"
let machine2 := machine1.canContinue .«continue»
ensure "Expect accepted"
(match machine2.reader.state with
| .readBody (.fixed 4) => true
| _ => false)
s!"expected fixed(4) body state, got {repr machine2.reader.state}"
let machine3 := machine1.canContinue .ok
ensure "Expect rejected closes reader"
(match machine3.reader.state with
| .closed => true
| _ => false)
s!"expected closed reader, got {repr machine3.reader.state}"
ensure "Expect rejected disables keep-alive" (!machine3.keepAlive) "keepAlive should be disabled after reject"
-- Regression: receiving path should emit needAnswer only once per message.
#eval show IO Unit from do
let payload := "GET / HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
let (_machine, step) := runOneStepReceiving payload
ensure "Single needAnswer event" (countNeedAnswerEvents step.events == 1)
s!"expected exactly one needAnswer event, got events:\n{repr step.events}"
-- Regression: malformed request should emit failed only once.
#eval show IO Unit from do
let payload := "GET / HTTP/2.0\r\nHost: example.com\r\n\r\n".toUTF8
let (_machine, step) := runOneStepReceiving payload
ensure "Single failed event" (countFailedEvents step.events == 1)
s!"expected exactly one failed event, got events:\n{repr step.events}"

View File

@@ -0,0 +1,202 @@
import Std.Internal.Http
import Std.Internal.Http.Protocol.H1.Parser
open Std Internal Parsec ByteArray
open Std.Http.Protocol.H1
private def ensure (name : String) (cond : Bool) (msg : String) : IO Unit := do
unless cond do
throw <| IO.userError s!"{name}: {msg}"
private def runParser (p : Parser α) (s : String) : Except String α :=
match (p <* eof).run s.toUTF8 with
| .ok x => .ok x
| .error e => .error e
private def randBelow (gen : StdGen) (maxExclusive : Nat) : Nat × StdGen :=
if maxExclusive = 0 then
(0, gen)
else
randNat gen 0 (maxExclusive - 1)
private def pick! [Inhabited α] (gen : StdGen) (xs : Array α) : α × StdGen :=
let (i, gen') := randBelow gen xs.size
(xs[i]!, gen')
private def randomToken (gen : StdGen) (len : Nat) : String × StdGen := Id.run do
let mut g := gen
let mut out := ""
for _ in [0:len] do
let (r, g') := randBelow g 38
g := g'
let c :=
if r < 26 then Char.ofNat (97 + r)
else if r < 36 then Char.ofNat (48 + (r - 26))
else if r = 36 then '-'
else '_'
out := out.push c
(out, g)
private def randomReason (gen : StdGen) (len : Nat) : String × StdGen := Id.run do
let mut g := gen
let mut out := ""
for _ in [0:len] do
let (r, g') := randBelow g 30
g := g'
let c := if r < 26 then Char.ofNat (65 + r) else ' '
out := out.push c
(out.trimAscii.toString, g)
private def pad3 (n : Nat) : String :=
if n < 10 then s!"00{n}" else if n < 100 then s!"0{n}" else s!"{n}"
private def expectRequestOk (s : String) : IO Unit := do
match runParser (parseRequestLine {}) s with
| .ok _ => pure ()
| .error e => throw <| IO.userError s!"expected request-line success for {s.quote}, got: {e}"
private def expectRequestFail (s : String) : IO Unit := do
match runParser (parseRequestLine {}) s with
| .ok _ => throw <| IO.userError s!"expected request-line failure for {s.quote}"
| .error _ => pure ()
private def expectStatusOk (s : String) : IO Unit := do
match runParser (parseStatusLine {}) s with
| .ok _ => pure ()
| .error e => throw <| IO.userError s!"expected status-line success for {s.quote}, got: {e}"
private def expectStatusFail (s : String) : IO Unit := do
match runParser (parseStatusLine {}) s with
| .ok _ => throw <| IO.userError s!"expected status-line failure for {s.quote}"
| .error _ => pure ()
private def expectOk {α} (name : String) (p : Parser α) (s : String) : IO α := do
match runParser p s with
| .ok x => pure x
| .error e => throw <| IO.userError s!"{name}: expected success for {s.quote}, got {e}"
private def expectFail {α} (name : String) (p : Parser α) (s : String) : IO Unit := do
match runParser p s with
| .ok _ => throw <| IO.userError s!"{name}: expected failure for {s.quote}"
| .error _ => pure ()
#eval show IO Unit from do
let methods : Array String := #["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS", "HEAD", "CONNECT"]
let targets : Array String := #["/", "/a", "/a/b", "/a/b?q=1", "*", "http://example.com", "example.com:443"]
let versions : Array String := #["HTTP/1.1", "HTTP/1.0"]
let mut gen : StdGen := StdGen.mk 0x5eed1234 0x12345
for i in [0:400] do
let (m, g1) := pick! gen methods
let (t, g2) := pick! g1 targets
let (v, g3) := pick! g2 versions
gen := g3
let line := s!"{m} {t} {v}\r\n"
expectRequestOk line
-- Mutation 1: drop the first space
expectRequestFail s!"{m}{t} {v}\r\n"
-- Mutation 2: invalid version token
expectRequestFail s!"{m} {t} HTTP/2.0\r\n"
-- Mutation 3: bad method character
expectRequestFail s!"{m}! {t} {v}\r\n"
ensure "request fuzz progress" (i < 100000) "unreachable safety check"
#eval show IO Unit from do
let knownCodes : Array Nat := #[200, 201, 204, 301, 400, 404, 500, 503]
let mut gen : StdGen := StdGen.mk 0xabcde123 0x777
for _ in [0:400] do
let (code, g1) := pick! gen knownCodes
let (len, g2) := randBelow g1 20
let (reasonRaw, g3) := randomReason g2 (len + 1)
gen := g3
let reason := if reasonRaw.isEmpty then "OK" else reasonRaw
let line := s!"HTTP/1.1 {pad3 code} {reason}\r\n"
expectStatusOk line
-- Mutation 1: unsupported version
expectStatusFail s!"HTTP/2.0 {pad3 code} {reason}\r\n"
-- Mutation 2: non-digit in status code
expectStatusFail s!"HTTP/1.1 A{(pad3 code).drop 1} {reason}\r\n"
-- Mutation 3: illegal reason byte (DEL)
expectStatusFail s!"HTTP/1.1 {pad3 code} bad{Char.ofNat 127}\r\n"
#eval show IO Unit from do
-- Randomized malformed gibberish smoke: parser must simply return error or success,
-- but never crash/panic.
let mut gen : StdGen := StdGen.mk 0x31415926 0x27182818
for _ in [0:300] do
let (len, g1) := randBelow gen 80
let (tok, g2) := randomToken g1 (len + 1)
gen := g2
let _ := runParser (parseRequestLine {}) (tok ++ "\r\n")
let _ := runParser (parseStatusLine {}) (tok ++ "\r\n")
pure ()
-- Component tests for individual parser parts.
#eval show IO Unit from do
-- parseSingleHeader
let sh1 expectOk "parseSingleHeader some" (parseSingleHeader {} <* eof) "Host: x\r\n"
ensure "parseSingleHeader some present" sh1.isSome "expected some header"
let sh2 expectOk "parseSingleHeader none" (parseSingleHeader {} <* eof) "\r\n"
ensure "parseSingleHeader none present" sh2.isNone "expected header terminator"
-- parseChunkSize / parseChunkPartial
let (n1, ext1) expectOk "parseChunkSize bare" (parseChunkSize {} <* eof) "A\r\n"
ensure "parseChunkSize value" (n1 == 10) "chunk-size mismatch"
ensure "parseChunkSize ext empty" (ext1.isEmpty) "expected no extensions"
let (n2, ext2) expectOk "parseChunkSize ext" (parseChunkSize {} <* eof) "4;foo=bar;baz=\"qux\"\r\n"
ensure "parseChunkSize ext value" (n2 == 4) "chunk-size mismatch with ext"
ensure "parseChunkSize ext count" (ext2.size == 2) "expected 2 extensions"
let cp1 expectOk "parseChunkPartial some" (parseChunkPartial {} <* eof) "4\r\nWiki"
ensure "parseChunkPartial some isSome" cp1.isSome "expected chunk data"
ensure "parseChunkPartial some size" ((cp1.map (fun (n, _, _) => n)).getD 0 == 4) "size mismatch"
let cp0 expectOk "parseChunkPartial none" (parseChunkPartial {} <* eof) "0\r\n"
ensure "parseChunkPartial none isNone" cp0.isNone "expected last-chunk marker"
-- parseFixedSizeData / parseChunkSizedData
let fs1 expectOk "parseFixedSizeData complete" (parseFixedSizeData 4 <* eof) "Wiki"
ensure "parseFixedSizeData complete shape"
(match fs1 with | .complete _ => true | _ => false)
"expected complete result"
let fs2 expectOk "parseFixedSizeData incomplete" (parseFixedSizeData 4 <* eof) "Wi"
ensure "parseFixedSizeData incomplete shape"
(match fs2 with | .incomplete _ 2 => true | _ => false)
"expected incomplete result with remaining=2"
let cs1 expectOk "parseChunkSizedData complete" (parseChunkSizedData 4 <* eof) "Wiki\r\n"
ensure "parseChunkSizedData complete shape"
(match cs1 with | .complete _ => true | _ => false)
"expected complete chunk-sized result"
let cs2 expectOk "parseChunkSizedData incomplete" (parseChunkSizedData 4 <* eof) "Wi"
ensure "parseChunkSizedData incomplete shape"
(match cs2 with | .incomplete _ 2 => true | _ => false)
"expected incomplete chunk-sized result with remaining=2"
-- parseTrailers
let trailers expectOk "parseTrailers ok" (parseTrailers {} <* eof) "X-Test: a\r\nY-Test: b\r\n\r\n"
ensure "parseTrailers count" (trailers.size == 2) "expected 2 trailers"
expectFail "parseTrailers forbidden" (parseTrailers {} <* eof) "Content-Length: 1\r\n\r\n"
-- parseRequestLineRawVersion / parseStatusLineRawVersion
let (m1, _, v1) expectOk "parseRequestLineRawVersion" (parseRequestLineRawVersion {} <* eof) "GET / HTTP/1.1\r\n"
ensure "parseRequestLineRawVersion method" (m1 == Std.Http.Method.get) "method mismatch"
ensure "parseRequestLineRawVersion version" (v1 == some Std.Http.Version.v11) "expected recognized v11"
let (_, rv) expectOk "parseStatusLineRawVersion" (parseStatusLineRawVersion {} <* eof) "HTTP/1.1 204 No Content\r\n"
ensure "parseStatusLineRawVersion recognized" (rv == some Std.Http.Version.v11) "expected v11"
-- parseRequestLine / parseStatusLine failures
expectFail "parseRequestLine invalid version" (parseRequestLine {} <* eof) "GET / HTTP/2.0\r\n"
expectFail "parseStatusLine invalid version" (parseStatusLine {} <* eof) "HTTP/2.0 200 OK\r\n"

View File

@@ -0,0 +1,366 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def defaultConfig : Config :=
{ lingeringTimeout := 500, generateDate := false }
def runWithTimeout {α : Type} (name : String) (timeoutMs : Nat := 2000) (action : IO α) : IO α := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO α := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 =>
IO.cancel task
throw <| IO.userError s!"Test '{name}' timed out after {timeoutMs}ms (possible hang/loop)"
| n + 1 =>
IO.sleep 10
loop n
loop ticks
def sendRaw (client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : TestHandler) (config : Config := defaultConfig) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def sendRawTimed (name : String) (raw : ByteArray)
(handler : TestHandler) (config : Config := defaultConfig) : IO ByteArray :=
runWithTimeout name 2000 do
let (client, server) Mock.new
sendRaw client server raw handler config
def runClosedClientTimed (name : String) (raw : ByteArray)
(handler : TestHandler) (config : Config := defaultConfig) : IO Unit :=
runWithTimeout name 2000 do
Async.block do
let (client, server) Mock.new
client.send raw
client.close
Std.Http.Server.serveConnection server handler config
|>.run
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then 0 else (s.splitOn needle).length - 1
def assertStatusPrefix (name : String) (response : ByteArray) (prefix_ : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith prefix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected prefix: {prefix_.quote}\nGot:\n{text.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nMissing: {needle.quote}\nGot:\n{text.quote}"
def assertNotContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
if text.contains needle then
throw <| IO.userError s!"Test '{name}' failed:\nDid not expect {needle.quote}\nGot:\n{text.quote}"
def assertEndsWith (name : String) (response : ByteArray) (suffix_ : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.endsWith suffix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected suffix: {suffix_.quote}\nGot:\n{text.quote}"
def assertStatusCount (name : String) (response : ByteArray) (expected : Nat) : IO Unit := do
let text := String.fromUTF8! response
let got := countOccurrences text "HTTP/1.1 "
unless got == expected do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} HTTP responses, got {got}\n{text.quote}"
def onesChunked (n : Nat) : String := Id.run do
let mut body := ""
for i in [0:n] do
body := body ++ s!"{toString i |>.length}\x0d\n{toString i}\x0d\n"
body ++ "0\x0d\n\x0d\n"
def ignoreHandler : TestHandler := fun _ => Response.ok |>.text "ok"
def uriHandler : TestHandler := fun req => Response.ok |>.text (toString req.line.uri)
def echoBodyHandler : TestHandler := fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text (String.fromUTF8! body)
def firstChunkHandler : TestHandler := fun req => do
let first req.body.recv
let text :=
match first with
| some chunk => String.fromUTF8! chunk.data
| none => "none"
Response.ok |>.text text
def streamPiecesHandler (n : Nat) : TestHandler := fun _ => do
let outgoing Body.mkStream
background do
for i in [0:n] do
outgoing.send <| Chunk.ofByteArray s!"piece-{i};".toUTF8
outgoing.close
return Response.ok
|>.body outgoing
def stressResponseHandler (n : Nat) : TestHandler := fun _ => do
let outgoing Body.mkStream
background do
for i in [0:n] do
outgoing.send <| Chunk.ofByteArray s!"x{i},".toUTF8
outgoing.close
return Response.ok
|>.body outgoing
-- 01: Ignore fixed-size request body and respond immediately.
#eval runWithTimeout "01_ignore_fixed_length_body" 2000 do
let raw := "POST /fixed HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 10\x0d\nConnection: close\x0d\n\x0d\n0123456789".toUTF8
let response sendRawTimed "01_ignore_fixed_length_body/send" raw ignoreHandler
assertStatusPrefix "01_ignore_fixed_length_body" response "HTTP/1.1 200"
-- 02: Ignore chunked request body and respond immediately.
#eval runWithTimeout "02_ignore_chunked_body" 2000 do
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n6\x0d\n world\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRawTimed "02_ignore_chunked_body/send" raw ignoreHandler
assertStatusPrefix "02_ignore_chunked_body" response "HTTP/1.1 200"
-- 03: Large fixed-size body ignored by handler (regression for stalled body transfer).
#eval runWithTimeout "03_ignore_large_fixed_body" 2000 do
let body := String.ofList (List.replicate 8192 'A')
let raw := s!"POST /large HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 8192\x0d\nConnection: close\x0d\n\x0d\n{body}".toUTF8
let response sendRawTimed "03_ignore_large_fixed_body/send" raw ignoreHandler
assertStatusPrefix "03_ignore_large_fixed_body" response "HTTP/1.1 200"
-- 04: Read full request body and echo it.
#eval runWithTimeout "04_echo_full_body" 2000 do
let raw := "POST /echo HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 11\x0d\nConnection: close\x0d\n\x0d\nhello world".toUTF8
let response sendRawTimed "04_echo_full_body/send" raw echoBodyHandler
assertContains "04_echo_full_body" response "hello world"
-- 05: Read only first chunk and reply (should not deadlock connection).
#eval runWithTimeout "05_read_first_chunk_only" 2000 do
let raw := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 11\x0d\nConnection: close\x0d\n\x0d\nhello world".toUTF8
let response sendRawTimed "05_read_first_chunk_only/send" raw firstChunkHandler
assertStatusPrefix "05_read_first_chunk_only" response "HTTP/1.1 200"
assertContains "05_read_first_chunk_only" response "hello world"
-- 06: Stream many response chunks.
#eval runWithTimeout "06_stream_many_response_chunks" 2000 do
let raw := "GET /stream HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "06_stream_many_response_chunks/send" raw (streamPiecesHandler 40)
assertStatusPrefix "06_stream_many_response_chunks" response "HTTP/1.1 200"
assertContains "06_stream_many_response_chunks" response "piece-0;"
assertContains "06_stream_many_response_chunks" response "piece-39;"
-- 07: Stream response with known fixed size.
#eval runWithTimeout "07_stream_known_size" 2000 do
let raw := "GET /known HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "07_stream_known_size/send" raw (fun _ => do
let outgoing Body.mkStream
outgoing.setKnownSize (some (.fixed 8))
background do
outgoing.send <| Chunk.ofByteArray "abcd".toUTF8
outgoing.send <| Chunk.ofByteArray "efgh".toUTF8
outgoing.close
return Response.ok
|>.body outgoing)
assertStatusPrefix "07_stream_known_size" response "HTTP/1.1 200"
assertContains "07_stream_known_size" response "Content-Length: 8"
assertContains "07_stream_known_size" response "abcdefgh"
-- 08: Use interestSelector before sending response data.
#eval runWithTimeout "08_interest_selector_gating" 2000 do
let raw := "GET /interest HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "08_interest_selector_gating/send" raw (fun _ => do
let outgoing Body.mkStream
background do
let interested Selectable.one #[
.case outgoing.interestSelector pure
]
if interested then
outgoing.send <| Chunk.ofByteArray "interest-ok".toUTF8
outgoing.close
return Response.ok
|>.body outgoing)
assertStatusPrefix "08_interest_selector_gating" response "HTTP/1.1 200"
assertContains "08_interest_selector_gating" response "interest-ok"
-- 09: Incomplete sends collapse into one payload.
#eval runWithTimeout "09_incomplete_send_collapse" 2000 do
let raw := "GET /collapse HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "09_incomplete_send_collapse/send" raw (fun _ => do
let outgoing Body.mkStream
background do
outgoing.send ({ data := "hello ".toUTF8, extensions := #[] } : Chunk) (incomplete := true)
outgoing.send ({ data := "wor".toUTF8, extensions := #[] } : Chunk) (incomplete := true)
outgoing.send ({ data := "ld".toUTF8, extensions := #[] } : Chunk)
outgoing.close
return Response.ok
|>.body outgoing)
assertStatusPrefix "09_incomplete_send_collapse" response "HTTP/1.1 200"
assertContains "09_incomplete_send_collapse" response "hello world"
-- 10: Pipeline fixed-body POST then GET.
#eval runWithTimeout "10_pipeline_fixed_then_get" 2000 do
let raw := ("POST /one HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello" ++
"GET /two HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "10_pipeline_fixed_then_get/send" raw uriHandler
assertStatusCount "10_pipeline_fixed_then_get" response 2
assertContains "10_pipeline_fixed_then_get" response "/one"
assertContains "10_pipeline_fixed_then_get" response "/two"
-- 11: Pipeline chunked-body POST then GET.
#eval runWithTimeout "11_pipeline_chunked_then_get" 2000 do
let raw := ("POST /chunk HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n" ++
"GET /two HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "11_pipeline_chunked_then_get/send" raw uriHandler
assertStatusCount "11_pipeline_chunked_then_get" response 2
assertContains "11_pipeline_chunked_then_get" response "/chunk"
assertContains "11_pipeline_chunked_then_get" response "/two"
-- 12: Malformed first request should not loop into second.
#eval runWithTimeout "12_malformed_closes_connection" 2000 do
let raw := ("GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBadHeader value\x0d\n\x0d\n" ++
"GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "12_malformed_closes_connection/send" raw uriHandler
assertStatusPrefix "12_malformed_closes_connection" response "HTTP/1.1 400"
assertStatusCount "12_malformed_closes_connection" response 1
-- 13: Client closes while server is streaming response.
#eval runWithTimeout "13_client_close_while_streaming" 2000 do
let raw := "GET /close-stream HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
runClosedClientTimed "13_client_close_while_streaming/run" raw (stressResponseHandler 600)
-- 14: Client closes before sending full body.
#eval runWithTimeout "14_client_close_mid_body" 2000 do
let raw := "POST /mid-body HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 100\x0d\nConnection: close\x0d\n\x0d\nabcde".toUTF8
runClosedClientTimed "14_client_close_mid_body/run" raw ignoreHandler
-- 15: Handler throws while request body is present.
#eval runWithTimeout "15_handler_throw_unread_body" 2000 do
let raw := "POST /throw HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRawTimed "15_handler_throw_unread_body/send" raw (fun _ => throw <| IO.userError "boom")
assertStatusPrefix "15_handler_throw_unread_body" response "HTTP/1.1 500"
-- 16: Many tiny chunked request chunks ignored by handler.
#eval runWithTimeout "16_many_tiny_chunked_ignored" 2000 do
let body := onesChunked 80
let raw := s!"POST /tiny HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n{body}".toUTF8
let response sendRawTimed "16_many_tiny_chunked_ignored/send" raw ignoreHandler
assertStatusPrefix "16_many_tiny_chunked_ignored" response "HTTP/1.1 200"
-- 17: Many tiny chunked request chunks consumed and counted.
#eval runWithTimeout "17_many_tiny_chunked_consumed" 2000 do
let body := onesChunked 25
let raw := s!"POST /count HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n{body}".toUTF8
let response sendRawTimed "17_many_tiny_chunked_consumed/send" raw (fun req => do
let mut count := 0
for _ in req.body do
count := count + 1
Response.ok |>.text (toString count))
assertStatusPrefix "17_many_tiny_chunked_consumed" response "HTTP/1.1 200"
assertEndsWith "17_many_tiny_chunked_consumed" response "25"
pure ()
-- 18: Stress response streaming with many chunks and active client.
#eval runWithTimeout "18_stress_streaming_active_client" 2000 do
let raw := "GET /stress HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "18_stress_streaming_active_client/send" raw (stressResponseHandler 120)
assertStatusPrefix "18_stress_streaming_active_client" response "HTTP/1.1 200"
assertContains "18_stress_streaming_active_client" response "x0,"
assertContains "18_stress_streaming_active_client" response "x119,"
-- 19: Pipeline with large unread first body still processes second request.
#eval runWithTimeout "19_pipeline_large_unread_then_get" 2000 do
let body := String.ofList (List.replicate 5000 'b')
let raw := (s!"POST /big HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5000\x0d\n\x0d\n{body}" ++
"GET /after HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "19_pipeline_large_unread_then_get/send" raw uriHandler
assertStatusCount "19_pipeline_large_unread_then_get" response 2
assertContains "19_pipeline_large_unread_then_get" response "/big"
assertContains "19_pipeline_large_unread_then_get" response "/after"
-- 20: Triple pipeline mixed body styles.
#eval runWithTimeout "20_triple_pipeline_mixed" 2000 do
let raw := ("POST /a HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 4\x0d\n\x0d\ndata" ++
"POST /b HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n3\x0d\nhey\x0d\n0\x0d\n\x0d\n" ++
"GET /c HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "20_triple_pipeline_mixed/send" raw uriHandler
assertStatusCount "20_triple_pipeline_mixed" response 3
assertContains "20_triple_pipeline_mixed" response "/a"
assertContains "20_triple_pipeline_mixed" response "/b"
assertContains "20_triple_pipeline_mixed" response "/c"
-- 21: Slow/incomplete active body transfer must time out (no connection pinning).
#eval runWithTimeout "21_incomplete_slow_post_times_out" 2000 do
let raw := "POST /slow HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 100\x0d\nConnection: close\x0d\n\x0d\nabcde".toUTF8
let response sendRawTimed
"21_incomplete_slow_post_times_out/send"
raw
(fun req => do
let _s : String req.body.readAll
Response.ok |>.text "unreachable")
(config := { lingeringTimeout := 200, generateDate := false })
assertStatusPrefix "21_incomplete_slow_post_times_out" response "HTTP/1.1 408"
-- 22: Keep-alive + unknown-size stream flushes once first chunk is available.
#eval runWithTimeout "22_keepalive_unknown_size_flushes_early" 3000 do
Async.block do
let (client, server) Mock.new
let handler : TestHandler := fun _ => do
let outgoing Body.mkStream
background do
outgoing.send <| Chunk.ofByteArray "aaa".toUTF8
let sleep Sleep.mk 300
sleep.wait
outgoing.send <| Chunk.ofByteArray "bbb".toUTF8
outgoing.close
return Response.ok
|>.body outgoing
background <| (Std.Http.Server.serveConnection server handler {
lingeringTimeout := 800
keepAliveTimeout := 1500, by decide
generateDate := false
}).run
client.send "GET /stream HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let mut early : Option ByteArray := none
for _ in [0:5] do
if early.isNone then
let sleep Sleep.mk 40
sleep.wait
early client.tryRecv?
let earlyBytes := early.getD ByteArray.empty
if earlyBytes.isEmpty then
throw <| IO.userError "Test '22_keepalive_unknown_size_flushes_early' failed:\nExpected early streamed bytes before producer EOF"
assertContains "22_keepalive_unknown_size_flushes_early header" earlyBytes "Transfer-Encoding: chunked"
assertContains "22_keepalive_unknown_size_flushes_early first chunk" earlyBytes "aaa"
assertNotContains "22_keepalive_unknown_size_flushes_early no second chunk yet" earlyBytes "bbb"
let sleep Sleep.mk 420
sleep.wait
let later := ( client.tryRecv?).getD ByteArray.empty
assertContains "22_keepalive_unknown_size_flushes_early second chunk later" later "bbb"
client.close

View File

@@ -1,4 +1,5 @@
import Std.Internal.Http.Data.Headers
import Std.Internal.Http.Protocol.H1
open Std.Http
open Std.Http.Header
@@ -338,3 +339,42 @@ info: ("connection", "keep-alive,close")
let c : Header.Connection := #["keep-alive", "close"], by native_decide
let (name, value) := Header.Connection.serialize c
return (name.value, value.value)
/-! ## Aggregate header byte limit (maxHeaderBytes) -/
section HeaderByteLimit
open Std.Http.Protocol.H1
-- Helper: feed all bytes at once and run one step, return the machine state.
private def runMachine (raw : String) (cfg : Config) : Machine .receiving :=
let machine : Machine .receiving := { config := cfg }
(machine.feed raw.toUTF8).step.fst
-- With a tight limit (35 bytes), two headers whose combined byte count exceeds
-- the limit should cause the machine to fail.
-- "host" (4) + "example.com" (11) + 4 = 19 bytes for the first header.
-- "x-a" (3) + "somevalue1" (10) + 4 = 17 bytes for the second → total 36 > 35.
#guard
let raw := "GET / HTTP/1.1\r\nhost: example.com\r\nx-a: somevalue1\r\n\r\n"
let cfg : Config := { maxHeaderBytes := 35 }
(runMachine raw cfg).failed
-- With a generous limit the same request succeeds (machine is not failed).
#guard
let raw := "GET / HTTP/1.1\r\nhost: example.com\r\nx-a: somevalue1\r\n\r\n"
let cfg : Config := { maxHeaderBytes := 100 }
!(runMachine raw cfg).failed
-- Exactly at the boundary: 19 bytes for host header alone, limit = 19 → ok.
#guard
let raw := "GET / HTTP/1.1\r\nhost: example.com\r\n\r\n"
let cfg : Config := { maxHeaderBytes := 19 }
!(runMachine raw cfg).failed
-- One byte under the two-header total → second header pushes it over.
#guard
let raw := "GET / HTTP/1.1\r\nhost: example.com\r\nx-a: somevalue1\r\n\r\n"
let cfg : Config := { maxHeaderBytes := 19 }
(runMachine raw cfg).failed
end HeaderByteLimit

View File

@@ -0,0 +1,214 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def runPipelined
(raw : String)
(readBody : Bool)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO (ByteArray × Array String) := Async.block do
let (client, server) Mock.new
let seenRef IO.mkRef (#[] : Array String)
let handler : TestHandler := fun req => do
let uri := toString req.line.uri
seenRef.modify (·.push uri)
let body
if readBody then
req.body.readAll
else
pure "<ignored>"
Response.ok |>.text s!"{uri}:{body}"
client.send raw.toUTF8
client.getSendChan.close
Std.Http.Server.serveConnection server handler config
|>.run
let response client.recv?
let seen seenRef.get
pure (response.getD .empty, seen)
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to contain {needle.quote}\nGot:\n{text.quote}"
def assertNotContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
if text.contains needle then
throw <| IO.userError s!"Test '{name}' failed:\nDid not expect {needle.quote}\nGot:\n{text.quote}"
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then
0
else
(s.splitOn needle).length - 1
def assertStatusCount (name : String) (response : ByteArray) (expected : Nat) : IO Unit := do
let text := String.fromUTF8! response
let got := countOccurrences text "HTTP/1.1 "
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} responses, got {got}\n{text.quote}"
def assertSeenCount (name : String) (seen : Array String) (expected : Nat) : IO Unit := do
if seen.size != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} handler calls, got {seen.size}: {seen}"
-- Two sequential requests on the same HTTP/1.1 connection.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /first HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 (fun req =>
Response.ok |>.text (toString req.line.uri))
assertStatusCount "Two keep-alive responses" response 2
assertContains "Two keep-alive first" response "/first"
assertContains "Two keep-alive second" response "/second"
-- Connection: close on first request blocks pipelined second request.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /first HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 (fun req =>
Response.ok |>.text (toString req.line.uri))
assertStatusCount "Connection close response count" response 1
assertContains "Connection close first served" response "/first"
assertNotContains "Connection close second blocked" response "/second"
-- Disabling keep-alive via config forces one response.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /1 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /2 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8
(fun req => Response.ok |>.text (toString req.line.uri))
(config := { lingeringTimeout := 3000, enableKeepAlive := false, generateDate := false })
assertStatusCount "Keep-alive disabled response count" response 1
assertContains "Keep-alive disabled first served" response "/1"
assertNotContains "Keep-alive disabled second blocked" response "/2"
-- maxRequests cap enforces hard limit on responses per connection.
#eval show IO _ from do
let (client, server) Mock.new
let req0 := "GET /0 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req1 := "GET /1 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /2 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req0 ++ req1 ++ req2).toUTF8
(fun req => Response.ok |>.text (toString req.line.uri))
(config := { lingeringTimeout := 3000, maxRequests := 2, generateDate := false })
assertStatusCount "maxRequests response count" response 2
assertContains "maxRequests /0 served" response "/0"
assertContains "maxRequests /1 served" response "/1"
assertNotContains "maxRequests /2 blocked" response "/2"
-- Handler that ignores a fixed-size body still allows next keep-alive request.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /ignore HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello"
let req2 := "GET /after HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 (fun req =>
Response.ok |>.text (toString req.line.uri))
assertStatusCount "Unread CL body keep-alive responses" response 2
assertContains "Unread CL body first" response "/ignore"
assertContains "Unread CL body second" response "/after"
-- Handler that ignores chunked body still allows next keep-alive request.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n"
let req2 := "GET /next HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 (fun req =>
Response.ok |>.text (toString req.line.uri))
assertStatusCount "Unread chunked body keep-alive responses" response 2
assertContains "Unread chunked first" response "/chunked"
assertContains "Unread chunked second" response "/next"
-- Exact first Content-Length allows pipelined second request.
#eval show IO _ from do
let req1 := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 3\x0d\n\x0d\nabc"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertStatusCount "Exact CL pipelined responses" response 2
assertContains "Exact CL first" response "/first"
assertContains "Exact CL second" response "/second"
assertSeenCount "Exact CL seen count" seen 2
-- Incomplete first Content-Length blocks pipelined second request.
#eval show IO _ from do
let req1 := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 10\x0d\n\x0d\nabc"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertContains "Incomplete CL first served" response "/first"
assertNotContains "Incomplete CL second blocked" response "/second"
assertSeenCount "Incomplete CL seen count" seen 1
-- Incomplete first chunked body blocks pipelined second request.
#eval show IO _ from do
let req1 := "POST /chunked-first HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\nF\x0d\nhel"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertNotContains "Incomplete chunked second blocked" response "/second"
if seen.contains "/second" then
throw <| IO.userError s!"Test 'Incomplete chunked seen list' failed: {seen}"
-- Content-Length: 0 on first request allows immediate second request.
#eval show IO _ from do
let req1 := "POST /empty HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 0\x0d\n\x0d\n"
let req2 := "GET /tail HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertStatusCount "CL=0 pipelined responses" response 2
assertContains "CL=0 first" response "/empty"
assertContains "CL=0 second" response "/tail"
assertSeenCount "CL=0 seen count" seen 2
-- Complete chunked first request allows second request.
#eval show IO _ from do
let req1 := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n"
let req2 := "GET /tail HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertStatusCount "Complete chunked pipelined responses" response 2
assertContains "Complete chunked first" response "/chunked"
assertContains "Complete chunked second" response "/tail"
assertSeenCount "Complete chunked seen count" seen 2

View File

@@ -0,0 +1,493 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def sendRawAndClose
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
client.getSendChan.close
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected status: {status}\nGot:\n{responseStr.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let responseStr := String.fromUTF8! response
if responseStr != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{responseStr.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to contain: {needle.quote}\nGot:\n{responseStr.quote}"
def headerSection (response : ByteArray) : String :=
(String.fromUTF8! response).splitOn "\x0d\n\x0d\n" |>.headD ""
def okHandler : TestHandler :=
fun _ => Response.ok |>.text "ok"
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def bad505 : String :=
"HTTP/1.1 505 HTTP Version Not Supported\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def ok200 : String :=
"HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 2\x0d\n\x0d\nok"
def ok200Head : String :=
"HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 2\x0d\n\x0d\n"
def notImplemented : String :=
"HTTP/1.1 501 Not Implemented\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
-- Client-mode response parsing regressions.
#eval show IO _ from do
-- Initialize reader to needStartLine: for .sending machines the reader starts in .pending
-- and only advances after send/request is issued. Here we test the reader in isolation.
let machineA : Protocol.H1.Machine .sending := { config := {}, reader := { state := .needStartLine } }
let rawA := "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\n\x0d\n"
let (machineA, stepA) := (machineA.feed rawA.toUTF8).step
let failedA := stepA.events.any fun
| .failed _ => true
| _ => false
if failedA then
throw <| IO.userError s!"Test 'Client mode parses response status-line with headers' failed:\nUnexpected failure events: {repr stepA.events}"
let endedA := stepA.events.any fun
| .endHeaders _ => true
| _ => false
unless endedA do
throw <| IO.userError s!"Test 'Client mode parses response status-line with headers' failed:\nMissing endHeaders event: {repr stepA.events}"
unless machineA.reader.messageHead.status == .ok do
throw <| IO.userError s!"Test 'Client mode parses response status-line with headers' failed:\nUnexpected status: {repr machineA.reader.messageHead.status}"
unless machineA.reader.messageHead.headers.hasEntry Header.Name.contentLength (Header.Value.ofString! "0") do
throw <| IO.userError s!"Test 'Client mode parses response status-line with headers' failed:\nMissing Content-Length header in parsed response"
let machineB : Protocol.H1.Machine .sending := { config := {}, reader := { state := .needStartLine } }
let rawB := "HTTP/1.1 204 No Content\x0d\n\x0d\n"
let (_machineB, stepB) := (machineB.feed rawB.toUTF8).step
let failedB := stepB.events.any fun
| .failed _ => true
| _ => false
if failedB then
throw <| IO.userError s!"Test 'Client mode parses headerless response status-line' failed:\nUnexpected failure events: {repr stepB.events}"
let needMoreB := stepB.events.any fun
| .needMoreData _ => true
| _ => false
if needMoreB then
throw <| IO.userError s!"Test 'Client mode parses headerless response status-line' failed:\nUnexpected needMoreData event: {repr stepB.events}"
let endedB := stepB.events.any fun
| .endHeaders _ => true
| _ => false
unless endedB do
throw <| IO.userError s!"Test 'Client mode parses headerless response status-line' failed:\nMissing endHeaders event: {repr stepB.events}"
let machineC : Protocol.H1.Machine .sending := { config := {}, reader := { state := .needStartLine } }
let rawC := "HTTP/1.1 204 No Content\x0d\nContent-Length: 5\x0d\n\x0d\nHELLO"
let (machineC, stepC) := (machineC.feed rawC.toUTF8).step
let failedC := stepC.events.any fun
| .failed _ => true
| _ => false
if failedC then
throw <| IO.userError s!"Test 'Client mode ignores body framing on 204' failed:\nUnexpected failure events: {repr stepC.events}"
match machineC.reader.state with
| .readBody (.fixed 0) =>
pure ()
| .complete =>
pure ()
| .closed =>
pure ()
| _ =>
throw <| IO.userError s!"Test 'Client mode ignores body framing on 204' failed:\nUnexpected body-reading state: {repr machineC.reader.state}"
unless machineC.reader.input.remainingBytes == 5 do
throw <| IO.userError s!"Test 'Client mode ignores body framing on 204' failed:\nExpected 5 unread bytes, got {machineC.reader.input.remainingBytes}"
-- Outgoing response framing regressions for bodyless statuses.
#eval show IO _ from do
let request := "GET /cache HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let machine0 : Protocol.H1.Machine .receiving := { config := {} }
let (machine1, _step1) := (machine0.feed request).step
let headers304 := Headers.empty.insert Header.Name.contentLength (Header.Value.ofString! "5")
let machine304 := machine1.send ({ status := .notModified, headers := headers304 } : Response.Head)
let (_machine304, step304) := machine304.step
let text304 := String.fromUTF8! step304.output.toByteArray
unless text304.contains "HTTP/1.1 304 Not Modified" do
throw <| IO.userError s!"Test '304 preserves explicit Content-Length' failed:\n{text304.quote}"
unless text304.contains "Content-Length: 5" do
throw <| IO.userError s!"Test '304 preserves explicit Content-Length' failed:\n{text304.quote}"
if text304.contains "Content-Length: 0" then
throw <| IO.userError s!"Test '304 preserves explicit Content-Length' failed:\nUnexpected rewritten Content-Length: {text304.quote}"
let headers204 := Headers.empty
|>.insert Header.Name.contentLength (Header.Value.ofString! "9")
let machine204 := machine1.send ({ status := .noContent, headers := headers204 } : Response.Head)
let (_machine204, step204) := machine204.step
let text204 := String.fromUTF8! step204.output.toByteArray
unless step204.output.size > 0 do
throw <| IO.userError "Test '204 strips framing headers' failed:\nExpected serialized response output"
unless text204.contains "HTTP/1.1 204 No Content" do
throw <| IO.userError s!"Test '204 strips framing headers' failed:\n{text204.quote}"
if text204.contains "Content-Length:" text204.contains "Transfer-Encoding:" then
throw <| IO.userError s!"Test '204 strips framing headers' failed:\nUnexpected framing headers:\n{text204.quote}"
-- Host header rules.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let missingHost := "GET / HTTP/1.1\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA missingHost okHandler
assertExact "Missing Host header" responseA bad400
let (clientB, serverB) Mock.new
let emptyHost := "GET / HTTP/1.1\x0d\nHost: \x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB emptyHost okHandler
-- RFC 9110 §7.2: for origin-form URIs (no authority), an empty Host field value is valid.
assertExact "Empty Host header allowed" responseB ok200
let (clientC, serverC) Mock.new
let multiHost := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nHost: other.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC multiHost okHandler
assertExact "Multiple Host headers" responseC bad400
let (clientD, serverD) Mock.new
let absoluteIgnoresHost := "GET http://good.example/path HTTP/1.1\x0d\nHost: good.example\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD absoluteIgnoresHost okHandler
assertExact "Absolute-form authority takes precedence over Host" responseD ok200
-- HTTP version handling.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let rawA := "GET / HTTP/2.0\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA okHandler
assertExact "HTTP/2.0 rejected" responseA bad505
-- Request-line parsing failures.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let missingVersion := "GET /\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA missingVersion okHandler
assertExact "Missing version in request-line" responseA bad400
let (clientB, serverB) Mock.new
let missingUri := "GET HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB missingUri okHandler
assertExact "Missing URI in request-line" responseB bad400
let (clientC, serverC) Mock.new
let extraSpaces := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC extraSpaces okHandler
assertExact "Extra spaces in request-line" responseC bad400
let (clientD, serverD) Mock.new
let emptyLine := "\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD emptyLine okHandler
assertExact "Empty request-line" responseD ""
let (clientE, serverE) Mock.new
let whitespaceOnly := " \x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE whitespaceOnly okHandler
assertExact "Whitespace-only request-line" responseE bad400
let (clientF, serverF) Mock.new
let noSpaces := "GETHTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF noSpaces okHandler
assertExact "No spaces in request-line" responseF bad400
let (clientG, serverG) Mock.new
let leadingCRLF := "\x0d\nGET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseG sendRaw clientG serverG leadingCRLF okHandler
assertExact "Leading CRLF before request-line" responseG ok200
let (clientH, serverH) Mock.new
let garbageAfterVersion := "GET / HTTP/1.1 xxxxxx\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseH sendRaw clientH serverH garbageAfterVersion okHandler
assertExact "Garbage after request-line version" responseH bad400
-- Method rules. Only IANA-registered methods are accepted; all others return 400.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let invalidMethod := "FOOBAR / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA invalidMethod okHandler
assertExact "Unknown method rejected" responseA bad400
let (clientB, serverB) Mock.new
let lowercaseMethod := "get / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB lowercaseMethod okHandler
assertExact "Lowercase method rejected" responseB bad400
let (clientC, serverC) Mock.new
let nonAsciiMethod := "GÉT / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC nonAsciiMethod okHandler
assertExact "Non-ASCII method rejected" responseC bad400
let (clientD, serverD) Mock.new
let longMethod := String.ofList (List.replicate 20 'G')
let rawD := s!"{longMethod} / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD rawD okHandler
assertExact "Long unrecognized method rejected" responseD bad400
let (clientE, serverE) Mock.new
let tokenMethod := "X-CUSTOM / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE tokenMethod okHandler
assertExact "Token method with hyphen rejected" responseE bad400
-- HEAD framing and authority-form rules.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let headReq := "HEAD / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA headReq okHandler
assertExact "HEAD omits body bytes" responseA ok200Head
let (clientB, serverB) Mock.new
let badAuthority := "GET example.com:443 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB badAuthority okHandler
assertExact "GET authority-form rejected" responseB bad400
let (clientC, serverC) Mock.new
let okAuthority := "CONNECT example.com:443 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC okAuthority okHandler
assertExact "CONNECT authority-form accepted" responseC ok200
let (clientD, serverD) Mock.new
let badAuthorityPort := "CONNECT example.com:444 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD badAuthorityPort okHandler
assertExact "CONNECT authority-form non-default port mismatch rejected" responseD bad400
let (clientE, serverE) Mock.new
let getAsterisk := "GET * HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE getAsterisk okHandler
assertExact "Asterisk-form rejected for non-OPTIONS" responseE bad400
let (clientF, serverF) Mock.new
let optionsAsterisk := "OPTIONS * HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF optionsAsterisk okHandler
assertExact "Asterisk-form accepted for OPTIONS" responseF ok200
-- h11-inspired: GET and HEAD should use the same framing headers.
#eval show IO _ from do
let handler : TestHandler := fun _ => Response.ok |>.text "hello"
let (clientA, serverA) Mock.new
let getReq := "GET /frame HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let getResponse sendRaw clientA serverA getReq handler
let (clientB, serverB) Mock.new
let headReq := "HEAD /frame HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let headResponse sendRaw clientB serverB headReq handler
let getHeaders := headerSection getResponse
let headHeaders := headerSection headResponse
if getHeaders != headHeaders then
throw <| IO.userError s!"Test 'HEAD framing headers parity' failed:\nGET headers:\n{getHeaders.quote}\nHEAD headers:\n{headHeaders.quote}"
assertContains "GET framing body present" getResponse "hello"
let headText := String.fromUTF8! headResponse
if headText.contains "hello" then
throw <| IO.userError s!"Test 'HEAD framing body omitted' failed:\n{headText.quote}"
-- Header syntax and byte-level validation.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let noColon := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBadHeader value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA noColon okHandler
assertExact "Header without colon" responseA bad400
let (clientB, serverB) Mock.new
let obsFold := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\n X-Bad: folded\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB obsFold okHandler
assertExact "Leading whitespace header" responseB bad400
let (clientC, serverC) Mock.new
let beforeName := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Bad".toUTF8
let afterName := "Header: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC (beforeName ++ ByteArray.mk #[0] ++ afterName) okHandler
assertExact "NUL in header name" responseC bad400
let (clientD, serverD) Mock.new
let beforeValue := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Header: bad".toUTF8
let afterValue := "value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD (beforeValue ++ ByteArray.mk #[0] ++ afterValue) okHandler
assertExact "NUL in header value" responseD bad400
let (clientE, serverE) Mock.new
let beforeCtrl := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Header: bad".toUTF8
let afterCtrl := "value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE (beforeCtrl ++ ByteArray.mk #[0x01] ++ afterCtrl) okHandler
assertExact "Control char in header value" responseE bad400
let (clientF, serverF) Mock.new
let spaceInName := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBad Header: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF spaceInName okHandler
assertExact "Space in header name" responseF bad400
-- Lenient-but-supported parsing behavior.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let bareLF := "GET / HTTP/1.1\nHost: example.com\nConnection: close\n\n".toUTF8
let responseA sendRaw clientA serverA bareLF okHandler
assertExact "Bare LF line endings accepted" responseA bad400
let (clientB, serverB) Mock.new
let splitHeaders := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Inject: value\x0d\nEvil: injected\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB splitHeaders okHandler
assertExact "CRLF split into two headers" responseB ok200
let (clientC, serverC) Mock.new
let tabValue := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Tab: value\twith\ttabs\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC tabValue okHandler
assertExact "Tab in header value accepted" responseC ok200
let (clientD, serverD) Mock.new
let absoluteUri := "GET http://example.com/path HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD absoluteUri okHandler
assertExact "Absolute-form URI accepted" responseD ok200
let (clientE, serverE) Mock.new
let colonValue := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBad:Name: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE colonValue okHandler
assertExact "Additional colon remains in value" responseE ok200
-- Content-Length validation.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let nonNumeric := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: abc\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let responseA sendRaw clientA serverA nonNumeric okHandler
assertExact "Non-numeric Content-Length" responseA bad400
let (clientB, serverB) Mock.new
let negative := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: -1\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB negative okHandler
assertExact "Negative Content-Length" responseB bad400
let (clientC, serverC) Mock.new
let duplicateCl := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nContent-Length: 10\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let responseC sendRaw clientC serverC duplicateCl okHandler
assertExact "Duplicate Content-Length mismatch" responseC bad400
-- Transfer-Encoding normalization.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let mixedCase := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: Chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA mixedCase (fun req => do
let body : String req.body.readAll
Response.ok |>.text body)
assertStatus "Mixed-case chunked accepted" responseA "HTTP/1.1 200"
assertContains "Mixed-case chunked body" responseA "hello"
let (clientB, serverB) Mock.new
let teOWS := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked \x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB teOWS (fun req => do
let body : String req.body.readAll
Response.ok |>.text body)
assertStatus "Transfer-Encoding trailing OWS accepted" responseB "HTTP/1.1 200"
assertContains "Transfer-Encoding trailing OWS body" responseB "hello"
let (clientC, serverC) Mock.new
let doubleChunked := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC doubleChunked okHandler
assertExact "Double chunked rejected" responseC bad400
let (clientD, serverD) Mock.new
let unsupportedTe := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: gzip, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD unsupportedTe (fun req => do
let seenTe := match req.line.headers.getAll? Header.Name.transferEncoding with
| some values => String.intercalate "|" (values.map (·.value) |>.toList)
| none => "<none>"
Response.ok |>.text seenTe)
assertStatus "Chunked transfer-coding chain accepted" responseD "HTTP/1.1 200"
assertContains "Transfer-coding chain is visible to handler" responseD "gzip, chunked"
-- Size limits.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let longName := String.ofList (List.replicate 257 'X')
let rawA := s!"GET / HTTP/1.1\x0d\nHost: example.com\x0d\n{longName}: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA okHandler
assertExact "Header name too long" responseA bad400
let cfg : Config := {
lingeringTimeout := 1000
generateDate := false
maxStartLineLength := 16384
}
let (clientB, serverB) Mock.new
let segment := String.ofList (List.replicate 255 'a')
let maxUri := "/" ++ String.intercalate "/" (List.replicate 32 segment)
let rawB := s!"GET {maxUri} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB okHandler (config := cfg)
assertExact "URI at limit accepted" responseB ok200
let (clientC, serverC) Mock.new
let longUri := maxUri ++ "/x"
let rawC := s!"GET {longUri} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rawC okHandler (config := cfg)
assertStatus "URI too long returns 414" responseC "HTTP/1.1 414"
-- Empty connection closes silently (no response bytes).
#eval show IO _ from do
let (client, server) Mock.new
let response sendRawAndClose client server ByteArray.empty okHandler
assert! response.size == 0
-- h11-inspired: early invalid-byte detection before CRLF.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let responseA sendRawAndClose clientA serverA (ByteArray.mk #[0x00]) okHandler
assertExact "Early invalid NUL" responseA bad400
let (clientB, serverB) Mock.new
let responseB sendRawAndClose clientB serverB (ByteArray.mk #[0x20]) okHandler
assertExact "Early invalid SP" responseB bad400
let (clientC, serverC) Mock.new
let responseC sendRawAndClose clientC serverC (ByteArray.mk #[0x16, 0x03, 0x01, 0x00, 0xa5]) okHandler
assertExact "Early invalid TLS prefix" responseC bad400

View File

@@ -0,0 +1,174 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {status}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := String.fromUTF8! response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nMissing {needle.quote}\nGot:\n{text.quote}"
def echoHandler : TestHandler :=
fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def okHandler : TestHandler :=
fun _ => Response.ok |>.text "ok"
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def bad501 : String :=
"HTTP/1.1 501 Not Implemented\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
-- Baseline: normal chunked request is accepted and body is delivered.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: chunked baseline" response "HTTP/1.1 200"
assertContains "TE: chunked body delivered" response "hello"
-- TE: gzip alone (chunked not last) → 400.
-- The server can only frame bodies with chunked; an unknown-only coding has no supported framing.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: gzip\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: gzip alone → 400" response bad400
-- TE: deflate alone → 400 (chunked not last).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: deflate\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: deflate alone → 400" response bad400
-- TE: identity alone → 400 (chunked not last).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: identity\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: identity alone → 400" response bad400
-- TE: chunked, gzip → 400. Chunked is not last; Validate rejects this.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, gzip\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: chunked not last → 400" response bad400
-- TE: gzip, chunked → 200. Unknown coding before chunked is accepted; body is framed as chunked.
-- The server reads the raw chunked bytes without applying the gzip layer.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: gzip, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: gzip, chunked → 200" response "HTTP/1.1 200"
assertContains "TE: gzip, chunked body delivered" response "hello"
-- TE: br, chunked → 200. Same as above with a different preceding coding.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: br, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: br, chunked → 200" response "HTTP/1.1 200"
-- TE + Content-Length → 400 (request smuggling prevention, RFC 9112 §6.1).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE + Content-Length → 400 (smuggling)" response bad400
-- TE: chunked, chunked → 400 (duplicate chunked, Validate rejects).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: chunked, chunked → 400" response bad400
-- Mixed case: Chunked (capital C) is accepted (codings are lowercased when parsed).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: Chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: Chunked (mixed case) → 200" response "HTTP/1.1 200"
assertContains "TE: Chunked body delivered" response "hello"
-- NUL byte inside TE value → 400 (non-token character).
#eval show IO _ from do
let (client, server) Mock.new
let before := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunk".toUTF8
let after := "ed\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server (before ++ ByteArray.mk #[0x00] ++ after) okHandler
assertExact "NUL in TE value → 400" response bad400
-- Control character (0x01) inside TE value → 400.
#eval show IO _ from do
let (client, server) Mock.new
let before := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunk".toUTF8
let after := "ed\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server (before ++ ByteArray.mk #[0x01] ++ after) okHandler
assertExact "Control char in TE value → 400" response bad400
-- Empty TE value → 400 (empty coding list is invalid per TransferEncoding.Validate).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: \x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertStatus "TE: empty value → error" response "HTTP/1.1 4"
-- TE: chunked with trailing OWS is accepted (OWS is stripped before parsing).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked \x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: chunked with trailing OWS → 200" response "HTTP/1.1 200"

View File

@@ -0,0 +1,272 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
open Std.Http.Internal
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Any)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def sendRawAndClose
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
client.close
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {status}\nGot:\n{text.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nMissing {needle.quote}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := String.fromUTF8! response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def bodyHandler : TestHandler :=
fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
-- Chunked body without trailers.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Chunked no trailers" response "HTTP/1.1 200"
assertContains "Chunked no trailers body" response "hello"
-- Single trailer header.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nChecksum: abc123\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Single trailer" response "HTTP/1.1 200"
assertContains "Single trailer body" response "hello"
-- Multiple trailer headers.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nChecksum: abc123\x0d\nExpires: Thu, 01 Dec 1994 16:00:00 GMT\x0d\nX-Custom: value\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Multiple trailers" response "HTTP/1.1 200"
assertContains "Multiple trailers body" response "hello"
-- Terminal chunk extensions can precede trailers.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0;ext=val\x0d\nX-Trailer: yes\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Terminal chunk extensions + trailers" response "HTTP/1.1 200"
assertContains "Terminal chunk extensions + trailers body" response "hello"
-- Trailer name and value limits.
#eval show IO _ from do
let exactName := String.ofList (List.replicate 256 'X')
let longName := String.ofList (List.replicate 257 'X')
let exactValue := String.ofList (List.replicate 8192 'v')
let longValue := String.ofList (List.replicate 8193 'v')
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{exactName}: value\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA bodyHandler
assertStatus "Trailer name at 256" responseA "HTTP/1.1 200"
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{longName}: value\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB bodyHandler
assertExact "Trailer name exceeds 256" responseB bad400
let (clientC, serverC) Mock.new
let rawC := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Exact: {exactValue}\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rawC bodyHandler
assertStatus "Trailer value at 8192" responseC "HTTP/1.1 200"
let (clientD, serverD) Mock.new
let rawD := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Too-Long: {longValue}\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD rawD bodyHandler
assertExact "Trailer value exceeds 8192" responseD bad400
-- maxTrailerHeaders enforcement.
#eval show IO _ from do
let config2 : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 2, generateDate := false }
let (clientA, serverA) Mock.new
let okRaw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nT1: a\x0d\nT2: b\x0d\n\x0d\n".toUTF8
let okResponse sendRaw clientA serverA okRaw bodyHandler (config := config2)
assertStatus "maxTrailerHeaders exact limit" okResponse "HTTP/1.1 200"
let (clientB, serverB) Mock.new
let badRaw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nT1: a\x0d\nT2: b\x0d\nT3: c\x0d\n\x0d\n".toUTF8
let badResponse sendRaw clientB serverB badRaw bodyHandler (config := config2)
assertExact "maxTrailerHeaders overflow" badResponse bad400
let config0 : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 0, generateDate := false }
let (clientC, serverC) Mock.new
let rejectAny := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Trailer: rejected\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rejectAny bodyHandler (config := config0)
assertExact "maxTrailerHeaders=0 rejects trailers" responseC bad400
let (clientD, serverD) Mock.new
let noTrailer := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD noTrailer bodyHandler (config := config0)
assertStatus "maxTrailerHeaders=0 no trailers" responseD "HTTP/1.1 200"
-- Trailer syntax validation.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let noColon := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nBadTrailer value\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA noColon bodyHandler
assertExact "Trailer without colon" responseA bad400
let (clientB, serverB) Mock.new
let leadingWS := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n X-Bad: folded\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB leadingWS bodyHandler
assertExact "Trailer leading whitespace" responseB bad400
let (clientC, serverC) Mock.new
let spaceName := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nBad Name: value\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC spaceName bodyHandler
assertExact "Trailer name contains space" responseC bad400
-- Trailer byte-level validation.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let beforeName := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Bad".toUTF8
let afterName := "Name: value\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA (beforeName ++ ByteArray.mk #[0] ++ afterName) bodyHandler
assertExact "NUL in trailer name" responseA bad400
let (clientB, serverB) Mock.new
let beforeValue := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Header: bad".toUTF8
let afterValue := "value\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB (beforeValue ++ ByteArray.mk #[0] ++ afterValue) bodyHandler
assertExact "NUL in trailer value" responseB bad400
let (clientC, serverC) Mock.new
let responseC sendRaw clientC serverC (beforeValue ++ ByteArray.mk #[0x01] ++ afterValue) bodyHandler
assertExact "Control char in trailer value" responseC bad400
-- Incomplete trailer section with client close yields no response bytes.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Trailer: value\x0d\n".toUTF8
let response sendRawAndClose client server raw bodyHandler
assert! response.size == 0
-- Trailer encoding emits terminal chunk plus trailer headers.
#eval show IO _ from Async.block do
let trailer := Trailer.empty
|>.insert (.mk "checksum") (.mk "abc123")
|>.insert (.mk "expires") (.mk "Thu, 01 Dec 1994")
let encoded := (Encode.encode (v := .v11) ChunkedBuffer.empty trailer).toByteArray
let text := String.fromUTF8! encoded
assert! text.contains "0\x0d\n"
assert! text.contains "Checksum: abc123\x0d\n"
assert! text.contains "Expires: Thu, 01 Dec 1994\x0d\n"
-- Empty trailer encoding is exactly terminal chunk CRLF CRLF.
#eval show IO _ from Async.block do
let encoded := (Encode.encode (v := .v11) ChunkedBuffer.empty Trailer.empty).toByteArray
let text := String.fromUTF8! encoded
assert! text == "0\x0d\n\x0d\n"
-- Trailer injection: forbidden field names must be rejected (RFC 9112 §6.5).
-- A client injecting framing or routing fields via trailers could confuse proxies.
#eval show IO _ from do
-- content-length in trailer must be rejected
let (clientA, serverA) Mock.new
let rawA := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nContent-Length: 1000\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA bodyHandler
assertExact "content-length in trailer rejected" responseA bad400
-- transfer-encoding in trailer must be rejected
let (clientB, serverB) Mock.new
let rawB := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB bodyHandler
assertExact "transfer-encoding in trailer rejected" responseB bad400
-- host in trailer must be rejected
let (clientC, serverC) Mock.new
let rawC := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nHost: evil.example\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rawC bodyHandler
assertExact "host in trailer rejected" responseC bad400
-- connection in trailer must be rejected
let (clientD, serverD) Mock.new
let rawD := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nConnection: keep-alive\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD rawD bodyHandler
assertExact "connection in trailer rejected" responseD bad400
-- authorization in trailer must be rejected
let (clientE, serverE) Mock.new
let rawE := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nAuthorization: Bearer token\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE rawE bodyHandler
assertExact "authorization in trailer rejected" responseE bad400
-- cache-control in trailer must be rejected
let (clientF, serverF) Mock.new
let rawF := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nCache-Control: no-cache\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF rawF bodyHandler
assertExact "cache-control in trailer rejected" responseF bad400
-- te in trailer must be rejected
let (clientG, serverG) Mock.new
let rawG := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nTE: trailers\x0d\n\x0d\n".toUTF8
let responseG sendRaw clientG serverG rawG bodyHandler
assertExact "te in trailer rejected" responseG bad400
-- Forbidden trailer field names are rejected regardless of case.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let rawA := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nCONTENT-LENGTH: 0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA bodyHandler
assertExact "CONTENT-LENGTH in trailer rejected (uppercase)" responseA bad400
let (clientB, serverB) Mock.new
let rawB := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nContent-Length: 0\x0d\nChecksum: abc\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB bodyHandler
assertExact "forbidden trailer among others rejected" responseB bad400
-- Non-forbidden custom trailers are still allowed after the fix.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nChecksum: deadbeef\x0d\nX-Timing: 12ms\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "non-forbidden trailers accepted" response "HTTP/1.1 200"
assertContains "body delivered with custom trailers" response "hello"

View File

@@ -0,0 +1,930 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
import Std.Internal.Http.Data.URI
import Std.Internal.Http.Data.URI.Encoding
open Std.Http
open Std.Http.URI
open Std.Http.URI.Parser
/-!
# URI Tests
Comprehensive tests for URI parsing, encoding, normalization, and manipulation.
This file consolidates tests from multiple URI-related test files.
-/
-- ============================================================================
-- Helper Functions
-- ============================================================================
def runParser (parser : Std.Internal.Parsec.ByteArray.Parser α) (s : String) : IO α :=
IO.ofExcept ((parser <* Std.Internal.Parsec.eof).run s.toUTF8)
def parseCheck (s : String) (exact : String := s) : IO Unit := do
let result runParser parseRequestTarget s
if toString result = exact then
pure ()
else
throw (.userError s!"expect {exact.quote} but got {(toString result).quote}")
def parseCheckFail (s : String) : IO Unit := do
match (parseRequestTarget <* Std.Internal.Parsec.eof).run s.toUTF8 with
| .ok r =>
throw <| .userError
s!"expected parse failure, but succeeded with {(repr r)}"
| .error _ =>
pure ()
-- ============================================================================
-- Percent Encoding Tests (EncodedString)
-- ============================================================================
-- Valid percent encoding validation
/--
info: some "abc"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "abc".toUTF8))
/--
info: some "%20"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%20".toUTF8))
/--
info: some "hello%20world"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "hello%20world".toUTF8))
/--
info: some "%FF"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%FF".toUTF8))
/--
info: some "%00"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%00".toUTF8))
-- Invalid percent encoding: incomplete
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "hello%".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%2".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%A".toUTF8))
-- Invalid percent encoding: non-hex characters
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%GG".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%2G".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%G2".toUTF8))
-- ============================================================================
-- Percent Encoding Decode Tests
-- ============================================================================
/--
info: some "abc"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "abc".toUTF8))
/--
info: some " "
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%20".toUTF8))
/--
info: some "hello world"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "hello%20world".toUTF8))
/--
info: some " !"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%20%21".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%FF".toUTF8))
/--
info: some "\x00"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%00".toUTF8))
-- ============================================================================
-- Query String Encoding Tests
-- ============================================================================
/--
info: some "hello+world"
-/
#guard_msgs in
#eval IO.println (repr (EncodedQueryString.ofByteArray? "hello+world".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedQueryString.ofByteArray? "%".toUTF8))
/--
info: some "hello world"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedQueryString.decode =<< (EncodedQueryString.ofByteArray? "hello+world".toUTF8))
/--
info: some " "
-/
#guard_msgs in
#eval IO.println (repr <| EncodedQueryString.decode =<< (EncodedQueryString.ofByteArray? "%20".toUTF8))
-- ============================================================================
-- Request Target Parsing - Basic Tests
-- ============================================================================
#eval parseCheck "///path/with/encoded%20space"
#eval parseCheck "/path/with/encoded%20space"
#eval parseCheck "/path/with/encoded%20space/"
#eval parseCheck "*"
#eval parseCheck "/api/search?q=hello%20world&category=tech%2Bgames"
#eval parseCheck "/"
#eval parseCheck "/api/v1/users/123/posts/456/comments/789"
#eval parseCheck "/files/../etc/passwd"
#eval parseCheck "example.com:8080"
#eval parseCheck "https://example.com:8080/ata"
#eval parseCheck "https://example.com:8080////./ata"
#eval parseCheck "192.168.1.1:3000"
#eval parseCheck "[::1]:8080"
#eval parseCheck "http://example.com/path/to/resource?query=value"
#eval parseCheck "https://api.example.com:443/v1/users?limit=10"
#eval parseCheck "http://[2001:db8::1]:8080/path"
#eval parseCheck "https://xn--nxasmq6b.xn--o3cw4h/path"
#eval parseCheck "localhost:65535"
#eval parseCheck "http:80"
#eval parseCheck "https://user:pass@secure.example.com/private"
#eval parseCheck "/double//slash//path"
#eval parseCheck "http://user%40example:pass%3Aword@host.com"
#eval parseCheck "http://[::ffff:192.168.1.1]/path"
#eval parseCheck "http://example.com:/"
#eval parseCheck "http://example.com:/?q=1"
#eval parseCheck "///////"
-- `&` in a key must be percent-encoded so toRawString round-trips correctly.
#guard
let query := URI.Query.empty.insert "a&b" "1"
query.toRawString == "a%26b=1"
-- `=` in a key must be percent-encoded so re-parsing preserves the key.
#guard
let query := URI.Query.empty.insert "a=b" "1"
query.toRawString == "a%3Db=1"
-- `&` in a value must be percent-encoded.
#guard
let query := URI.Query.empty.insert "key" "a&b"
query.toRawString == "key=a%26b"
-- `=` in a value is technically safe (parser uses first `=`), but encoding it
-- is still correct and keeps representation unambiguous.
#guard
let query := URI.Query.empty.insert "key" "a=b"
query.toRawString == "key=a%3Db"
-- Round-trip: insert → toRawString → re-parse should preserve the parameter.
#guard
let original := URI.Query.empty.insert "a&b" "c=d"
let raw := original.toRawString
-- Parse via a synthetic origin-form request target
match (URI.Parser.parseRequestTarget <* Std.Internal.Parsec.eof).run
s!"/path?{raw}".toUTF8 with
| .ok result =>
(result.query.get "a&b" == some "c=d")
| .error _ => false
#guard
match (parseRequestTarget <* Std.Internal.Parsec.eof).run "http:80".toUTF8 with
| .ok (.authorityForm _) => true
| _ => false
-- Parse failure tests
#eval parseCheckFail "/path with space"
#eval parseCheckFail "/path/%"
#eval parseCheckFail "/path/%2"
#eval parseCheckFail "/path/%ZZ"
#eval parseCheckFail ""
#eval parseCheckFail "[::1"
#eval parseCheckFail "[:::1]:80"
#eval parseCheckFail "http://exa_mple.com/path"
#eval parseCheckFail "http://[fe80::1%25eth0]/path"
#eval parseCheckFail "#frag"
#eval parseCheckFail "/path/\n"
#eval parseCheckFail "/path/\u0000"
#eval parseCheckFail "/page#section"
#eval parseCheckFail "/api/v1/users/[id]:action"
-- maxPathSegments should apply to trailing empty segments as well.
#guard
match (parseURI { maxPathSegments := 1 } <* Std.Internal.Parsec.eof).run
"http://example.com/a/".toUTF8 with
| .error _ => true
| .ok _ => false
-- ============================================================================
-- Request Target Parsing - Detailed Output Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.originForm { segments := #["path", "with", "encoded%20space"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path/with/encoded%20space"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { segments := #["", "", "path", "with", "encoded%20space"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "///path/with/encoded%20space"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.asteriskForm
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "*"
IO.println (repr result)
/--
info: #[("q", some "hello%20world"), ("category", some "tech%2Bgames")]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api/search?q=hello%20world&category=tech%2Bgames"
IO.println (repr result.query)
/--
info: Std.Http.RequestTarget.originForm { segments := #[], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "example.com", port := Std.Http.URI.Port.value 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "example.com:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.ipv4 192.168.1.1, port := Std.Http.URI.Port.value 3000 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "192.168.1.1:3000"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.ipv6 ::1, port := Std.Http.URI.Port.value 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "[::1]:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "https",
authority := some { userInfo := none,
host := Std.Http.URI.Host.name "example.com",
port := Std.Http.URI.Port.value 8080 },
path := { segments := #["ata"], absolute := true },
query := #[],
fragment := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://example.com:8080/ata"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none,
host := Std.Http.URI.Host.ipv6 2001:db8::1,
port := Std.Http.URI.Port.value 8080 },
path := { segments := #["path"], absolute := true },
query := #[],
fragment := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[2001:db8::1]:8080/path"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "https",
authority := some { userInfo := some { username := "user%20b", password := some "pass" },
host := Std.Http.URI.Host.name "secure.example.com",
port := Std.Http.URI.Port.omitted },
path := { segments := #["private"], absolute := true },
query := #[],
fragment := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user%20b:pass@secure.example.com/private"
IO.println (repr result)
-- ============================================================================
-- IPv6 Host Tests
-- ============================================================================
/--
info: Std.Http.URI.Host.ipv6 ::1
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "[::1]:8080"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
/--
info: Std.Http.URI.Host.ipv6 2001:db8::8a2e:370:7334
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[2001:db8::8a2e:370:7334]:8080/api"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
/--
info: Std.Http.URI.Host.ipv6 ::
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[::]/path"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
-- ============================================================================
-- UserInfo Tests
-- ============================================================================
/--
info: some { username := "user", password := some "pass" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user:pass@example.com/private"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "user%20only", password := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user%20only@example.com/path"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "", password := some "pass" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://:pass@example.com/path"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "user", password := some "p%40ss%3Aw0rd" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user:p%40ss%3Aw0rd@example.com/"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
-- ============================================================================
-- Path.normalize Tests (RFC 3986 Section 5.2.4)
-- ============================================================================
/--
info: /a/b
-/
#guard_msgs in
#eval IO.println <| toString (URI.parse! "http://example.com/a/./b").path.normalize
/--
info: /a
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/..").path.normalize
/--
info: /a/g
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c/./../../g").path.normalize
/--
info: /g
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/../../../g").path.normalize
/--
info: /a/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../c").path.normalize
/--
info: /a/
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c/../.././").path.normalize
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../../..").path.normalize
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/../../../").path.normalize
/--
info: /a/b/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/./a/./b/./c/.").path.normalize
/--
info: /c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/../b/../c").path.normalize
-- ============================================================================
-- Path.parent Tests
-- ============================================================================
/--
info: /a/b
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c").path.parent
/--
info: /a
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b").path.parent
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a").path.parent
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/").path.parent
-- ============================================================================
-- Path.join Tests
-- ============================================================================
/--
info: /a/b/c/d
-/
#guard_msgs in
#eval do
let p1 := (URI.parse! "http://example.com/a/b").path
let p2 : URI.Path := { segments := #[URI.EncodedString.encode "c", URI.EncodedString.encode "d"], absolute := false }
IO.println (p1.join p2)
/--
info: /x/y
-/
#guard_msgs in
#eval do
let p1 := (URI.parse! "http://example.com/a/b").path
let p2 : URI.Path := { segments := #[URI.EncodedString.encode "x", URI.EncodedString.encode "y"], absolute := true }
IO.println (p1.join p2)
-- ============================================================================
-- Path.isEmpty Tests
-- ============================================================================
#guard (URI.parse! "http://example.com").path.isEmpty = true
#guard (URI.parse! "http://example.com/").path.absolute = true
#guard (URI.parse! "http://example.com/a").path.isEmpty = false
#guard (URI.parse! "http://example.com/a").path.absolute = true
-- ============================================================================
-- URI Modification Helpers
-- ============================================================================
#guard ((URI.parse! "http://example.com").withScheme! "htTps" |>.scheme) == "https"
#guard ((URI.parse! "http://example.com").withScheme! "ftP" |>.scheme) == "ftp"
/--
info: http://example.com/#section1
-/
#guard_msgs in
#eval IO.println ((URI.parse! "http://example.com/").withFragment (some (toString (URI.EncodedString.encode "section1" : URI.EncodedFragment))))
/--
info: http://example.com/?key=value
-/
#guard_msgs in
#eval do
let uri := URI.parse! "http://example.com/"
let query := URI.Query.empty.insert "key" "value"
IO.println (uri.withQuery query)
/--
info: http://example.com/new/path
-/
#guard_msgs in
#eval do
let uri := URI.parse! "http://example.com/old/path"
let newPath : URI.Path := { segments := #[URI.EncodedString.encode "new", URI.EncodedString.encode "path"], absolute := true }
IO.println (uri.withPath newPath)
-- ============================================================================
-- URI.normalize Tests (RFC 3986 Section 6)
-- ============================================================================
#guard (URI.parse! "HTTP://example.com").normalize.scheme == "http"
#guard (URI.parse! "HtTpS://example.com").normalize.scheme == "https"
/--
info: http://example.com/
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://EXAMPLE.COM/").normalize
/--
info: http://example.com/
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTP://Example.COM/").normalize
/--
info: http://example.com/a/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../c").normalize
/--
info: http://example.com/a/g
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTP://EXAMPLE.COM/a/b/c/./../../g").normalize
/--
info: https://www.example.com/PATH
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTPS://WWW.EXAMPLE.COM/PATH").normalize
-- ============================================================================
-- Query Parameter Tests
-- ============================================================================
-- Query with duplicate keys
/--
info: 3
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?tag=a&tag=b&tag=c"
let all := result.query.findAll "tag"
IO.println all.size
/--
info: #[some "a", some "b", some "c"]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?tag=a&tag=b&tag=c"
let all := result.query.findAll "tag"
IO.println (repr all)
/--
info: some (some "a")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?key=a&key=b&key=c"
IO.println (repr (result.query.find? "key"))
-- Empty value vs no value
/--
info: some (some "")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key="
IO.println (repr (result.query.find? "key"))
/--
info: some none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key"
IO.println (repr (result.query.find? "key"))
/--
info: some (some "value")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key=value"
IO.println (repr (result.query.find? "key"))
-- Raw lookup APIs should not alias with pre-encoded key spellings.
#guard
match (parseRequestTarget <* Std.Internal.Parsec.eof).run "/api?%61=1&a=2".toUTF8 with
| .ok result =>
let encodedA? := EncodedQueryParam.fromString? "%61"
((result.query.find? "a" |>.bind id |>.bind EncodedQueryParam.decode) == some "2") &&
(result.query.find? "%61").isNone &&
result.query.contains "a" &&
!result.query.contains "%61" &&
(match encodedA? with
| some encodedA =>
((result.query.findEncoded? encodedA |>.bind id |>.bind EncodedQueryParam.decode) == some "1") &&
result.query.containsEncoded encodedA
| none => false)
| .error _ => false
#guard
match (parseRequestTarget <* Std.Internal.Parsec.eof).run "/api?%61=1&a=2".toUTF8 with
| .ok result =>
match EncodedQueryParam.fromString? "%61" with
| some encodedA =>
let erasedRaw := result.query.erase "a"
let erasedEncoded := result.query.eraseEncoded encodedA
!erasedRaw.contains "a" &&
erasedRaw.containsEncoded encodedA &&
!erasedEncoded.containsEncoded encodedA &&
erasedEncoded.contains "a"
| none => false
| .error _ => false
-- ============================================================================
-- Query Operations
-- ============================================================================
#guard (URI.Query.empty.insert "a" "1" |>.contains "a") = true
#guard (URI.Query.empty.contains "nonexistent") = false
/--
info: a=1&b=2
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "a" "1"
|>.insert "b" "2"
IO.println query.toRawString
/--
info: b=2
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "a" "1"
|>.insert "b" "2"
|>.erase "a"
IO.println query.toRawString
/--
info: key=new
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "key" "old"
|>.set "key" "new"
IO.println query.toRawString
-- ============================================================================
-- URI Builder Tests
-- ============================================================================
-- Domain names longer than 255 characters are rejected.
#guard
let label := String.ofList (List.replicate 63 'a')
let longDomain := s!"{label}.{label}.{label}.{label}."
(URI.DomainName.ofString? longDomain).isNone
#guard
let label := String.ofList (List.replicate 63 'a')
let longDomain := s!"{label}.{label}.{label}.{label}."
(URI.Builder.empty.setHost? longDomain).isNone
/--
info: https://example.com/api/users?page=1
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme! "https"
|>.setHost! "example.com"
|>.appendPathSegment "api"
|>.appendPathSegment "users"
|>.addQueryParam "page" "1"
|>.build
IO.println uri
/--
info: http://localhost:8080/
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme! "http"
|>.setHost! "localhost"
|>.setPort 8080
|>.build
IO.println uri
/--
info: https://user:pass@secure.example.com/private
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme! "https"
|>.setUserInfo "user" (some "pass")
|>.setHost! "secure.example.com"
|>.appendPathSegment "private"
|>.build
IO.println uri
-- ============================================================================
-- Encoded Path Segment Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.originForm { segments := #["path%2Fwith%2Fslashes"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path%2Fwith%2Fslashes"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { segments := #["file%20name.txt"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/file%20name.txt"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { segments := #["caf%C3%A9"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/caf%C3%A9"
IO.println (repr result)
-- ============================================================================
-- Authority Form Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "proxy.example.com", port := Std.Http.URI.Port.value 3128 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "proxy.example.com:3128"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.ipv4 127.0.0.1, port := Std.Http.URI.Port.value 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "127.0.0.1:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "1example.com", port := Std.Http.URI.Port.value 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "1example.com:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none,
host := Std.Http.URI.Host.name "1example.com",
port := Std.Http.URI.Port.omitted },
path := { segments := #["path"], absolute := true },
query := #[],
fragment := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://1example.com/path"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none,
host := Std.Http.URI.Host.name "123abc.example.com",
port := Std.Http.URI.Port.omitted },
path := { segments := #["page"], absolute := true },
query := #[],
fragment := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://123abc.example.com/page"
IO.println (repr result)