Compare commits

...

626 Commits

Author SHA1 Message Date
Sofia Rodrigues
af40af987c fix: tests 2026-03-16 10:45:35 -03:00
Sofia Rodrigues
65da1ee047 feat: client 2026-03-16 00:01:33 -03:00
Sofia Rodrigues
d4884cde14 fix: client uri 2026-03-14 00:48:51 -03:00
Sofia Rodrigues
49da0f2d9c Merge branch 'sofia/async-http-server' into sofia/async-http-client 2026-03-13 23:58:02 -03:00
Sofia Rodrigues
7fbecca6f0 fix: test 2026-03-13 23:54:45 -03:00
Sofia Rodrigues
ae5a3d2c8b Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-13 23:51:15 -03:00
Sofia Rodrigues
1a270555ae fix: uri 2026-03-13 23:51:00 -03:00
Sofia Rodrigues
72702c3538 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-13 23:47:38 -03:00
Sofia Rodrigues
e86dbf3992 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-13 23:46:40 -03:00
Sofia Rodrigues
d71f0bdae7 fix: uri test 2026-03-13 23:46:35 -03:00
Sofia Rodrigues
6ae49d7639 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-13 23:13:32 -03:00
Sofia Rodrigues
232d173af3 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-13 23:13:19 -03:00
Sofia Rodrigues
3a4a309aed feat: split uri types 2026-03-13 23:13:05 -03:00
Sofia Rodrigues
033b7b537a feat: client 2026-03-13 22:42:43 -03:00
Sofia Rodrigues
9c87a9f044 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-12 15:52:36 -03:00
Sofia Rodrigues
34c9cafc12 fix: type 2026-03-12 15:52:29 -03:00
Sofia Rodrigues
014dd1d263 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-12 15:43:28 -03:00
Sofia Rodrigues
2a7a407875 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-12 15:43:06 -03:00
Sofia Rodrigues
e359001026 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-12 15:39:08 -03:00
Sofia Rodrigues
72244398dc fix: test 2026-03-12 15:38:59 -03:00
Sofia Rodrigues
c0e60b797c Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-12 15:33:48 -03:00
Sofia Rodrigues
400908a2f4 fix: port and fragment 2026-03-12 15:09:49 -03:00
Sofia Rodrigues
394c999c2a fix: uri 2026-03-12 15:03:49 -03:00
Sofia Rodrigues
b7e88dadeb Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-12 15:01:03 -03:00
Sofia Rodrigues
a39a0575a0 Merge branch 'master' into sofia/async-http-headers 2026-03-12 14:35:25 -03:00
Sofia Rodrigues
5815f33342 Merge branch 'sofia/fix-native-decide' into sofia/async-http-headers 2026-03-12 14:17:13 -03:00
Sofia Rodrigues
4fdf94ed3d refactor: simplify error 2026-03-10 15:58:40 -03:00
Sofia Rodrigues
66743e80a6 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-10 15:58:20 -03:00
Sofia Rodrigues
2d0d63f5d3 refactor: move logic 2026-03-10 15:58:07 -03:00
Sofia Rodrigues
10951fdb57 refactor: use closewitherror 2026-03-10 15:37:06 -03:00
Sofia Rodrigues
71d3967338 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-10 15:36:54 -03:00
Sofia Rodrigues
34dbcb2ca5 refactor: add close with error 2026-03-10 15:35:34 -03:00
Sofia Rodrigues
abb60e47c8 refactor: make smaller 2026-03-10 15:19:27 -03:00
Sofia Rodrigues
7a852aedb6 fix: squeeze simp and paren 2026-03-10 10:08:22 -03:00
Sofia Rodrigues
1554f57525 fix: import 2026-03-09 21:18:57 -03:00
Sofia Rodrigues
1fa01cdadb style: just removed variable 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
758e5afb07 refactor: simplify 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
11516bbf09 fix: import 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
f76dca5bba fix: proof 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
fe6ac812af fix: panic 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
51a00843ea fix: remove usage of 2026-03-09 20:35:53 -03:00
Sofia Rodrigues
c8c702af8d Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-04 16:40:39 -03:00
Sofia Rodrigues
5b5b0fad70 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-04 16:40:33 -03:00
Sofia Rodrigues
eab144bbb2 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-04 16:40:27 -03:00
Sofia Rodrigues
cfe282f024 fix: port parse 2026-03-04 16:40:03 -03:00
Sofia Rodrigues
e7f06c8fa2 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-04 16:34:41 -03:00
Sofia Rodrigues
beb85dd6b0 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-04 16:34:27 -03:00
Sofia Rodrigues
debafcf0ef Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-04 16:29:43 -03:00
Sofia Rodrigues
2668f07808 fix: alpha and isdigit 2026-03-04 16:29:31 -03:00
Sofia Rodrigues
e3928b7b1a Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-04 16:26:56 -03:00
Sofia Rodrigues
2f3a97ed8a Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-04 16:26:48 -03:00
Sofia Rodrigues
0315d56389 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-04 16:26:38 -03:00
Sofia Rodrigues
b9e489cc8f Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-04 16:22:58 -03:00
Sofia Rodrigues
135b049080 Merge branch 'master' into sofia/async-http-headers 2026-03-04 13:22:45 -03:00
Sofia Rodrigues
4005bd027b fix: size 2026-03-04 12:04:53 -03:00
Sofia Rodrigues
fbf03e31f9 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-04 10:21:46 -03:00
Sofia Rodrigues
39ab2b289c test: move test from encode to the uri tests 2026-03-04 10:20:25 -03:00
Sofia Rodrigues
6c6f9a5d83 refactor: change Char.isDigit and Char.isAlpha 2026-03-04 10:00:41 -03:00
Sofia Rodrigues
a7aea9a12d style: format 2026-03-04 09:50:30 -03:00
Sofia Rodrigues
9517b5bc2d fix: h1 informational 2026-03-04 09:27:56 -03:00
Sofia Rodrigues
71debba5a2 refactor: change agentName field 2026-03-03 14:24:46 -03:00
Sofia Rodrigues
a2c5f3c79e Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 14:22:47 -03:00
Sofia Rodrigues
fd9117fc12 fix: server name 2026-03-03 14:22:39 -03:00
Sofia Rodrigues
1b6357dc03 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 13:59:18 -03:00
Sofia Rodrigues
38cb50d629 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 13:59:13 -03:00
Sofia Rodrigues
74af777707 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-03 13:59:07 -03:00
Sofia Rodrigues
3dfb5e002a Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 13:59:01 -03:00
Sofia Rodrigues
3075e5091b Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 13:58:55 -03:00
Sofia Rodrigues
af12f7e9be revert: wrong comment 2026-03-03 13:58:08 -03:00
Sofia Rodrigues
a2f9f74740 refactor: remove h1.0 tests 2026-03-03 13:57:14 -03:00
Sofia Rodrigues
13fb8a5980 fix: h1 discovers the port to host match 2026-03-03 13:56:41 -03:00
Sofia Rodrigues
41d2984f25 refactor: head -> line 2026-03-03 13:56:09 -03:00
Sofia Rodrigues
f63639d42b Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 13:42:13 -03:00
Sofia Rodrigues
6df74943e0 fix: http and https host port match 2026-03-03 13:41:30 -03:00
Sofia Rodrigues
865b147a91 fix: http1.0 behavior 2026-03-03 13:30:28 -03:00
Sofia Rodrigues
c2f2b3cf32 fix: refactor changes 2026-03-03 12:50:31 -03:00
Sofia Rodrigues
4173713f94 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 12:45:10 -03:00
Sofia Rodrigues
53c9277209 test: rename head to line 2026-03-03 12:43:44 -03:00
Sofia Rodrigues
f14977f495 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-03 12:42:31 -03:00
Sofia Rodrigues
cfa5cf76fc Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 12:42:23 -03:00
Sofia Rodrigues
238925a681 style: change field names 2026-03-03 12:38:52 -03:00
Sofia Rodrigues
8cb236e9eb style: remove parenthesis 2026-03-03 12:35:29 -03:00
Sofia Rodrigues
3d039f8dba fix: bugs and code style 2026-03-03 12:34:12 -03:00
Sofia Rodrigues
203d5362d4 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 12:12:38 -03:00
Sofia Rodrigues
6189d4c130 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 12:10:31 -03:00
Sofia Rodrigues
58f14d34d7 chore: improve comments 2026-03-03 12:10:22 -03:00
Sofia Rodrigues
710eee2b49 refactor: head to line 2026-03-03 12:08:26 -03:00
Sofia Rodrigues
bd4af50d04 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 12:03:55 -03:00
Sofia Rodrigues
8cb30347b6 fix: rename head to line 2026-03-03 12:03:47 -03:00
Sofia Rodrigues
d8e6b09b90 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 11:58:30 -03:00
Sofia Rodrigues
df8abc2b3f fix: remove token let tchar 2026-03-03 11:58:21 -03:00
Sofia Rodrigues
5a852bdffd Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 11:57:32 -03:00
Sofia Rodrigues
11d3860c69 fix: remove char testBit 2026-03-03 11:57:20 -03:00
Sofia Rodrigues
5a253001b3 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 11:39:12 -03:00
Sofia Rodrigues
083fec29c8 test: fix 2026-03-03 11:38:54 -03:00
Sofia Rodrigues
d41753a5f9 fix: suggestions 2026-03-03 10:11:25 -03:00
Sofia Rodrigues
a086a817e0 feat: v10 2026-03-03 09:26:45 -03:00
Sofia Rodrigues
e434a4d44b feat: v10 2026-03-03 09:26:27 -03:00
Sofia Rodrigues
7295389284 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 08:34:31 -03:00
Sofia Rodrigues
f8e1bc685a Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 08:34:23 -03:00
Sofia Rodrigues
5e1204e70d fix: test 2026-03-03 08:34:18 -03:00
Sofia Rodrigues
a00ec10261 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 08:31:51 -03:00
Sofia Rodrigues
cb9b182824 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 08:31:44 -03:00
Sofia Rodrigues
61d7c151da fix: close status for empty 2026-03-03 08:31:33 -03:00
Sofia Rodrigues
f9f1bdc77b chore: comments 2026-03-03 08:29:55 -03:00
Sofia Rodrigues
f3452c09a9 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 08:26:37 -03:00
Sofia Rodrigues
2bed27681a chore: comments 2026-03-03 01:06:23 -03:00
Sofia Rodrigues
5bb3b08698 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 00:55:17 -03:00
Sofia Rodrigues
82645d0953 docs: improve comments on h1 machine and errors 2026-03-03 00:55:12 -03:00
Sofia Rodrigues
2ab52fb864 fix: test 2026-03-03 00:46:06 -03:00
Sofia Rodrigues
1bba3082f0 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 00:37:22 -03:00
Sofia Rodrigues
7ed7a1b69d fix: rfc 2026-03-03 00:37:13 -03:00
Sofia Rodrigues
bd10d0193e Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-03 00:36:46 -03:00
Sofia Rodrigues
67822f4c42 refactor: remove bv 2026-03-03 00:36:38 -03:00
Sofia Rodrigues
e7f6fbb473 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-03 00:35:13 -03:00
Sofia Rodrigues
1cb3d56618 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-03 00:34:37 -03:00
Sofia Rodrigues
d99485dd79 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-03 00:34:30 -03:00
Sofia Rodrigues
f85b9b8d09 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 00:34:21 -03:00
Sofia Rodrigues
5fb254b7ef Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 00:24:40 -03:00
Sofia Rodrigues
6e202e34a4 feat: all char predicates 2026-03-03 00:24:16 -03:00
Sofia Rodrigues
843c814778 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-03 00:18:49 -03:00
Sofia Rodrigues
c7d4d8d799 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-03 00:17:31 -03:00
Sofia Rodrigues
91c60f801c fix: rstore treeMap tests from master 2026-03-03 00:16:26 -03:00
Sofia Rodrigues
ae30f55728 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-02 20:52:24 -03:00
Sofia Rodrigues
63b0cc17c4 fix: char predicates 2026-03-02 20:51:55 -03:00
Sofia Rodrigues
c9a5111dcc feat: add client states 2026-03-02 20:02:00 -03:00
Sofia Rodrigues
8e12a4181c Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-02 19:48:01 -03:00
Sofia Rodrigues
33393a7c00 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-02 19:47:55 -03:00
Sofia Rodrigues
7434b97511 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-02 19:47:48 -03:00
Sofia Rodrigues
29c8f8cfa1 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-02 19:44:03 -03:00
Sofia Rodrigues
36b2d99e3d fix: encode 2026-03-02 19:43:57 -03:00
Sofia Rodrigues
4b8a48c817 fix: method parsing 2026-03-02 19:36:08 -03:00
Sofia Rodrigues
e0862a0220 fix: tests 2026-03-02 19:35:36 -03:00
Sofia Rodrigues
10fc7da3fa Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-03-02 12:26:17 -03:00
Sofia Rodrigues
a1f535d9d8 fix: remove treemap 2026-03-02 12:26:13 -03:00
Sofia Rodrigues
993c87dd80 fix: methods 2026-03-02 12:26:00 -03:00
Sofia Rodrigues
742e3080c9 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-03-02 12:02:25 -03:00
Sofia Rodrigues
3de1d21c86 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-03-02 12:01:25 -03:00
Sofia Rodrigues
83a0756b05 fix: remove treemap 2026-03-02 12:01:18 -03:00
Sofia Rodrigues
b8f2cd94aa Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-03-02 12:00:58 -03:00
Sofia Rodrigues
64ff045559 fix: remove treemap 2026-03-02 12:00:55 -03:00
Sofia Rodrigues
109ab8eb68 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-03-02 11:59:21 -03:00
Sofia Rodrigues
bf09ea8ff5 feat: remove tests temporarily 2026-03-02 11:56:45 -03:00
Sofia Rodrigues
7ce9fe9f97 feat: remove tests temporarily 2026-03-02 11:54:16 -03:00
Sofia Rodrigues
aff9e0c459 refactor: rust-types-rs like method enum with IANA specification 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
a74df33feb fix: method 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
dd63b614eb fix: comments 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
515e6e20c0 fix: test 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
cc45fc9cc2 fix: dots 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
bc9c18f0b0 fix: small changes 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
8ee21a7176 fix: comment 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
92aa9f2b8a fix: RFC checks and small improvements 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
c2243a0ea5 fix: tests 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
efbd23a6d9 fix: format 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
26440fcf6a fix: extension values 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
ac4c5451e4 fix: data char 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
c94c5cb7e4 fix: comments 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
78ca6edc99 feat: specialize quote 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
d92dc22df3 fix: test 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
48ab74f044 fix: status code 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
da68a63902 feat: reason phrase in custom status code 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
db99fd2d7d feat: ignore reasonphrase 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
a61712c962 feat: validation in reasonPhrase 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
ea36555588 fix: reasonPhrase 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
b02bc4d6d2 feat: reason phrase 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
c836fe8723 fix: typos and compareName 2026-03-02 11:53:56 -03:00
Sofia Rodrigues
8068ed317c fix: typos 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
0bd44ab745 fix: comment 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
172d12c75c refactor: move trailers 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
6b6b9fffff feat: add extension handling of quotes and ExtensionName 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
f3fa5c8242 fix: chunked 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
b0c5667f06 fix: import 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
2d262c9755 fix: interpolation 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
571898bf63 fix: extensions 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
0570277a2e feat: add extensions 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
557709d9bb fix: apply suggestions 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
0229508ca7 refactor: remove headers 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
ace10ee42b fix: default size 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
4e36dcc98f fix: apply suggestions
Co-authored-by: Rob23oba <152706811+Rob23oba@users.noreply.github.com>
2026-03-02 11:53:55 -03:00
Sofia Rodrigues
a93ea184fe fix: status and chunkedbuffer 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
c309a3c07e feat: basic headers structure to more structured approach 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
30641c617f feat: data components 2026-03-02 11:53:55 -03:00
Sofia Rodrigues
37fcb2ce55 refactor: comments 2026-03-02 09:22:43 -03:00
Sofia Rodrigues
97cd66afde fix: comments 2026-03-01 16:45:51 -03:00
Sofia Rodrigues
6dbb6b8d0e Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-28 15:41:39 -03:00
Sofia Rodrigues
4306782b93 Merge branch 'sofia/async-http-uri' into sofia/async-http-server 2026-02-28 15:41:35 -03:00
Sofia Rodrigues
6935306439 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-28 15:41:25 -03:00
Sofia Rodrigues
1aa23cd92b Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-28 15:41:18 -03:00
Sofia Rodrigues
0bb4ba72d4 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-28 15:41:11 -03:00
Sofia Rodrigues
57a4d9ad4b Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-28 15:41:04 -03:00
Sofia Rodrigues
bfc6617c12 fix: method 2026-02-28 15:40:55 -03:00
Sofia Rodrigues
c1b5b64797 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-28 12:41:44 -03:00
Sofia Rodrigues
9b563220b2 fix: strict method 2026-02-28 12:41:39 -03:00
Sofia Rodrigues
0eb4a6e8c6 fix: timeout and config 2026-02-28 12:40:46 -03:00
Sofia Rodrigues
4614def4cd Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-27 21:14:21 -03:00
Sofia Rodrigues
c97dfe585a feat: headers rfc refs 2026-02-27 21:14:09 -03:00
Sofia Rodrigues
74ecbca430 feat: tests 2026-02-27 21:12:33 -03:00
Sofia Rodrigues
6fa6d2e3f7 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-27 14:58:31 -03:00
Sofia Rodrigues
05c4d9202a fix: comments 2026-02-27 14:58:24 -03:00
Sofia Rodrigues
3a4e9f6eca Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-27 14:52:23 -03:00
Sofia Rodrigues
aa09ab0cd9 fix: function name 2026-02-27 14:52:16 -03:00
Sofia Rodrigues
8affe05767 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-27 14:31:17 -03:00
Sofia Rodrigues
3aa02eede3 fix: comments 2026-02-27 14:31:09 -03:00
Sofia Rodrigues
c86f926d1b Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-27 14:22:38 -03:00
Sofia Rodrigues
ff4419357c Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-27 14:20:02 -03:00
Sofia Rodrigues
3c131da050 fix: comments 2026-02-27 14:19:32 -03:00
Sofia Rodrigues
5fd94a1e1d Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-27 13:24:18 -03:00
Sofia Rodrigues
fcc4185bb2 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-27 13:24:08 -03:00
Sofia Rodrigues
bae251d15a Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-27 13:23:58 -03:00
Sofia Rodrigues
6edc0c7427 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-27 13:22:51 -03:00
Sofia Rodrigues
563189fec9 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-27 13:04:51 -03:00
Sofia Rodrigues
25d7db2e62 Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http-data 2026-02-27 10:08:06 -03:00
Sofia Rodrigues
e569c9ef64 feat: remove unacurate test 2026-02-27 09:04:24 -03:00
Sofia Rodrigues
c467175336 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-26 18:57:21 -03:00
Sofia Rodrigues
7562c103dd Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-26 18:57:17 -03:00
Sofia Rodrigues
1be8c11cee fix: empty chunk 2026-02-26 18:57:05 -03:00
Sofia Rodrigues
ea6c1e65f6 fix: small changes 2026-02-26 18:56:48 -03:00
Sofia Rodrigues
67300c640c fix: tests 2026-02-26 18:56:16 -03:00
Sofia Rodrigues
625e1c9a32 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-26 17:33:41 -03:00
Sofia Rodrigues
b09946684b Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-26 17:09:33 -03:00
Sofia Rodrigues
beedfa1e4e fix: small comments fix and parameters 2026-02-26 16:49:15 -03:00
Sofia Rodrigues
f68c2420e7 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-26 16:17:48 -03:00
Sofia Rodrigues
cdfd24171a fix: test 2026-02-26 16:15:03 -03:00
Sofia Rodrigues
718e549de3 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-26 16:10:56 -03:00
Sofia Rodrigues
81f76a24d8 fix:lower case scheme 2026-02-26 16:10:46 -03:00
Sofia Rodrigues
292f297006 feat: small comments 2026-02-26 15:41:20 -03:00
Sofia Rodrigues
b7be57272a feat: forIn 2026-02-26 15:24:17 -03:00
Sofia Rodrigues
a0dc1dbbc0 fix: test 2026-02-26 15:18:32 -03:00
Sofia Rodrigues
2e604884dd Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-26 15:16:35 -03:00
Sofia Rodrigues
2049542833 feat: order 2026-02-26 15:16:23 -03:00
Sofia Rodrigues
caf19b8458 feat: order 2026-02-26 15:13:56 -03:00
Sofia Rodrigues
c5180b2dfc Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-26 01:23:46 -03:00
Sofia Rodrigues
91c5b717f0 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-26 01:23:24 -03:00
Sofia Rodrigues
cb6f540efb fix: test 2026-02-26 01:22:46 -03:00
Sofia Rodrigues
ec833b52ee Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-26 01:01:46 -03:00
Sofia Rodrigues
ba36c1dee2 fix: comments 2026-02-26 01:01:41 -03:00
Sofia Rodrigues
5cb510cdf7 fix: precedence 2026-02-26 01:01:24 -03:00
Sofia Rodrigues
a72de461cd Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-25 22:34:45 -03:00
Sofia Rodrigues
228f0d24a7 fix: remove unused headers 2026-02-25 22:34:41 -03:00
Sofia Rodrigues
73cf41d7e5 fix: comments 2026-02-25 22:30:43 -03:00
Sofia Rodrigues
819d4c6c1f fix: uri comments 2026-02-25 22:13:19 -03:00
Sofia Rodrigues
4de3e40349 fix: builder 2026-02-25 22:00:11 -03:00
Sofia Rodrigues
03f1d47462 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-25 21:59:08 -03:00
Sofia Rodrigues
a88908572c Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-25 21:54:21 -03:00
Sofia Rodrigues
55d357dbb4 fix: dots 2026-02-25 21:54:18 -03:00
Sofia Rodrigues
49d00ae056 fix: comments and small formatting errors 2026-02-25 21:52:12 -03:00
Sofia Rodrigues
e9eed5cbe4 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-25 20:57:02 -03:00
Sofia Rodrigues
2652ae0fb8 fix: small changes 2026-02-25 20:56:19 -03:00
Sofia Rodrigues
3f48ef4af9 fix: comment 2026-02-25 20:45:57 -03:00
Sofia Rodrigues
a9de308aea fix: RFC checks and small improvements 2026-02-25 20:45:22 -03:00
Sofia Rodrigues
405d03aac9 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-25 17:53:29 -03:00
Sofia Rodrigues
d5a819f30f fix: config names 2026-02-25 17:53:25 -03:00
Sofia Rodrigues
81c3e5034a fix: pull 2026-02-25 17:52:59 -03:00
Sofia Rodrigues
c971d3f490 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-25 15:06:39 -03:00
Sofia Rodrigues
26bcd2d065 feat: avoid forbidden trailer headers 2026-02-25 15:06:32 -03:00
Sofia Rodrigues
9c1054adca fix: slow attack 2026-02-25 15:06:19 -03:00
Sofia Rodrigues
cba7bfbbe7 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-25 14:15:51 -03:00
Sofia Rodrigues
2990b41d44 feat: improve incrementality 2026-02-25 13:43:47 -03:00
Sofia Rodrigues
f543206d4a fix: test 2026-02-25 08:53:15 -03:00
Sofia Rodrigues
1cd2cba130 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-25 00:53:38 -03:00
Sofia Rodrigues
a009ad2a68 fix: transfer 2026-02-25 00:53:04 -03:00
Sofia Rodrigues
6a19fc5a21 fix: host 2026-02-25 00:51:41 -03:00
Sofia Rodrigues
91275b3747 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-24 23:24:28 -03:00
Sofia Rodrigues
df80ac720a fix: semaphore 2026-02-24 23:24:16 -03:00
Sofia Rodrigues
6797ca9345 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-24 20:16:10 -03:00
Sofia Rodrigues
c266649454 fix: sleep 2026-02-24 20:16:05 -03:00
Sofia Rodrigues
7160b92bfb fix: semaphore 2026-02-24 20:15:56 -03:00
Sofia Rodrigues
6d1a0ecc8a fix: semaphore 2026-02-24 20:15:22 -03:00
Sofia Rodrigues
fd96be3870 feat: rfc compliance with some features 2026-02-24 19:09:33 -03:00
Sofia Rodrigues
3a3620e8aa fix: tests 2026-02-24 14:48:08 -03:00
Sofia Rodrigues
11fd4c8244 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-24 14:43:49 -03:00
Sofia Rodrigues
2731e1d942 fix: headers 2026-02-24 14:43:44 -03:00
Sofia Rodrigues
0ef3c83ed8 feat: ignore prior crlf 2026-02-24 14:39:02 -03:00
Sofia Rodrigues
edad8a090b Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-24 14:31:54 -03:00
Sofia Rodrigues
74dc55152f fix: test 2026-02-24 14:31:29 -03:00
Sofia Rodrigues
bf2471b8f1 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-24 14:28:32 -03:00
Sofia Rodrigues
21821ef062 fix: encoding duplication 2026-02-24 14:28:14 -03:00
Sofia Rodrigues
5ba3a6d4fc Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-24 10:47:43 -03:00
Sofia Rodrigues
8492e58a82 fix: comments 2026-02-24 10:38:11 -03:00
Sofia Rodrigues
e65e20e1cb feat: field content 2026-02-24 09:57:22 -03:00
Sofia Rodrigues
de7c029c9f feat: field content 2026-02-24 09:56:52 -03:00
Sofia Rodrigues
89c992a3c9 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-24 09:26:59 -03:00
Sofia Rodrigues
0b76c3de69 fix: tests 2026-02-24 09:20:52 -03:00
Sofia Rodrigues
ff99979855 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-24 09:09:39 -03:00
Sofia Rodrigues
9ddbb59fe1 fix: format 2026-02-24 09:09:35 -03:00
Sofia Rodrigues
36f87f98f8 fix: char 2026-02-24 09:09:14 -03:00
Sofia Rodrigues
5914fe3a4a Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-24 08:54:43 -03:00
Sofia Rodrigues
29f651a89c fix: extension values 2026-02-24 08:53:16 -03:00
Sofia Rodrigues
2e1bdd922e 2026-02-24 08:35:36 -03:00
Sofia Rodrigues
ab5d50cbc3 fix: data char 2026-02-23 22:18:15 -03:00
Sofia Rodrigues
7902db17c2 fix: comments 2026-02-21 01:25:05 -03:00
Sofia Rodrigues
5626ee369c feat: specialize quote 2026-02-21 00:53:15 -03:00
Sofia Rodrigues
682e2b99f3 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 22:30:31 -03:00
Sofia Rodrigues
6ed32edec0 feat: reorganize fix EOF 2026-02-20 22:30:24 -03:00
Sofia Rodrigues
662bed5a28 tests: add 2026-02-20 22:28:45 -03:00
Sofia Rodrigues
d0e884dc54 fix: config 2026-02-20 18:28:42 -03:00
Sofia Rodrigues
abf3305397 fix: move test from 200 to 400 2026-02-20 18:27:51 -03:00
Sofia Rodrigues
a6f42abe62 feat: remove lenience 2026-02-20 18:26:57 -03:00
Sofia Rodrigues
7a50344af4 feat: add header max config 2026-02-20 18:24:17 -03:00
Sofia Rodrigues
c7bcd4fbed Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 18:22:43 -03:00
Sofia Rodrigues
d367a9fe80 fix: enforce crlf and header bytes 2026-02-20 18:22:34 -03:00
Sofia Rodrigues
0e0578eacb Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 18:03:08 -03:00
Sofia Rodrigues
663eec9dc3 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 18:03:03 -03:00
Sofia Rodrigues
e62f8d608d Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 18:02:56 -03:00
Sofia Rodrigues
0fb57a405f fix: wrong comment 2026-02-20 18:02:46 -03:00
Sofia Rodrigues
ce009e2dca fix: remove double crlf 2026-02-20 18:02:28 -03:00
Sofia Rodrigues
c9cf60f173 fix: timeout on slow connections 2026-02-20 18:02:14 -03:00
Sofia Rodrigues
5263c32ea4 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 17:17:50 -03:00
Sofia Rodrigues
89191367b7 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 17:11:33 -03:00
Sofia Rodrigues
999ce40ca6 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 17:11:27 -03:00
Sofia Rodrigues
bfa18ef30c fix: uri builder 2026-02-20 17:11:05 -03:00
Sofia Rodrigues
a850879adf fix: reason phrase 2026-02-20 17:06:06 -03:00
Sofia Rodrigues
34c5c70ec6 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 17:00:21 -03:00
Sofia Rodrigues
81492aa5b2 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 17:00:13 -03:00
Sofia Rodrigues
e0efb8aec9 fix: scheme parser 2026-02-20 16:59:24 -03:00
Sofia Rodrigues
530f6865f9 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-20 16:40:06 -03:00
Sofia Rodrigues
f97d86cf4b feat: trim headers 2026-02-20 16:40:00 -03:00
Sofia Rodrigues
781b9f561e Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-20 16:30:28 -03:00
Sofia Rodrigues
a9ac33d994 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-20 16:24:06 -03:00
Sofia Rodrigues
c457a98d6a fix: test 2026-02-20 16:23:24 -03:00
Sofia Rodrigues
8d8439bf0b fix: status code 2026-02-20 16:19:55 -03:00
Sofia Rodrigues
7cf419491a Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 16:11:33 -03:00
Sofia Rodrigues
4cbdb39211 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 16:11:27 -03:00
Sofia Rodrigues
54ac93fb32 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 16:11:09 -03:00
Sofia Rodrigues
eddb5e139d Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-20 16:10:13 -03:00
Sofia Rodrigues
5a53207723 feat: remove direct access 2026-02-20 16:10:06 -03:00
Sofia Rodrigues
0d3f6e5481 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-20 16:09:16 -03:00
Sofia Rodrigues
96a017262c feat: reason phrase in custom status code 2026-02-20 16:08:26 -03:00
Sofia Rodrigues
04c73b64a5 test: fuzz 2026-02-20 16:05:09 -03:00
Sofia Rodrigues
02adf1fae0 fix: dedup 2026-02-20 16:04:56 -03:00
Sofia Rodrigues
9291e925ff fix: commment 2026-02-20 15:33:14 -03:00
Sofia Rodrigues
1d0e26e494 tests: dedu 2026-02-20 15:33:07 -03:00
Sofia Rodrigues
5528f97c8f fix: dedup 2026-02-20 14:12:00 -03:00
Sofia Rodrigues
32d42b52e9 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 14:07:10 -03:00
Sofia Rodrigues
f1ed971f26 fix: omit body 2026-02-20 14:06:15 -03:00
Sofia Rodrigues
b5610a43db feat: test 2026-02-20 14:04:39 -03:00
Sofia Rodrigues
a182a6652e feat: omit body 2026-02-20 14:04:22 -03:00
Sofia Rodrigues
cf51a32ffb fix: space sequence 2026-02-20 14:04:03 -03:00
Sofia Rodrigues
11cc11bc2f fix: test 2026-02-20 13:42:54 -03:00
Sofia Rodrigues
8cef903224 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 13:40:46 -03:00
Sofia Rodrigues
f5492db7fa fix: host validation and rreasonphrase 2026-02-20 13:40:39 -03:00
Sofia Rodrigues
cf603cdc7c Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 13:05:01 -03:00
Sofia Rodrigues
d07e1a6341 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 13:04:56 -03:00
Sofia Rodrigues
549e16f069 feat: add reader 2026-02-20 13:04:51 -03:00
Sofia Rodrigues
2e1406b683 fix: connection handler 2026-02-20 13:04:41 -03:00
Sofia Rodrigues
bfdfabd4a5 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 12:33:37 -03:00
Sofia Rodrigues
004c076236 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 12:33:30 -03:00
Sofia Rodrigues
93a6ecbbbc feat: reader features to write only things 2026-02-20 12:33:23 -03:00
Sofia Rodrigues
3c877f9604 feat: body 2026-02-20 12:32:51 -03:00
Sofia Rodrigues
d317c0208b feat: body 2026-02-20 12:32:40 -03:00
Sofia Rodrigues
4716725e81 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 12:01:57 -03:00
Sofia Rodrigues
4f15fe36e0 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 12:01:51 -03:00
Sofia Rodrigues
8bcc838f47 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-20 12:01:46 -03:00
Sofia Rodrigues
462e3d02dd fix: import coe 2026-02-20 12:01:21 -03:00
Sofia Rodrigues
541f9b2dc9 fix: rendezvouz stream 2026-02-20 12:01:02 -03:00
Sofia Rodrigues
86107e2b5a feat: discard reason-phrase 2026-02-20 11:49:30 -03:00
Sofia Rodrigues
5cc0026f3d Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 11:46:15 -03:00
Sofia Rodrigues
c5db47444e fix: bodyt ests 2026-02-20 11:46:08 -03:00
Sofia Rodrigues
fffc2b5633 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 11:40:58 -03:00
Sofia Rodrigues
637f260529 feat: discard reason-phrase 2026-02-20 11:40:52 -03:00
Sofia Rodrigues
469f466832 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-20 11:39:17 -03:00
Sofia Rodrigues
ecb7480b37 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-20 11:37:42 -03:00
Sofia Rodrigues
42800e4037 feat: body type class 2026-02-20 11:37:35 -03:00
Sofia Rodrigues
b52bbc9ae4 fix: rfc expected 2026-02-19 17:14:53 -03:00
Sofia Rodrigues
eaa1390a36 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-19 14:21:41 -03:00
Sofia Rodrigues
b38f01ef51 feat: ignore reasonphrase 2026-02-19 14:01:21 -03:00
Sofia Rodrigues
73bf2b5e04 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-19 13:36:29 -03:00
Sofia Rodrigues
c8c92fcf92 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-19 13:13:28 -03:00
Sofia Rodrigues
cf6b159da5 feat: validation in reasonPhrase 2026-02-19 13:13:22 -03:00
Sofia Rodrigues
330e1c5340 fix: config 2026-02-19 11:45:49 -03:00
Sofia Rodrigues
b40bc2e89c Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-19 11:39:36 -03:00
Sofia Rodrigues
e8347e9e9b fix: comments 2026-02-19 11:25:44 -03:00
Sofia Rodrigues
d051b967ed fix: method limit and comments 2026-02-19 11:18:18 -03:00
Sofia Rodrigues
cf4776ef92 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-19 10:51:52 -03:00
Sofia Rodrigues
b1ff312ef5 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-19 10:51:39 -03:00
Sofia Rodrigues
319214cfb3 feat: config URI 2026-02-19 10:51:32 -03:00
Sofia Rodrigues
e75049b604 feat: remove identity 2026-02-19 10:19:06 -03:00
Sofia Rodrigues
836cdf47a5 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 21:19:51 -03:00
Sofia Rodrigues
01f9c257e8 fix: comments 2026-02-18 21:19:29 -03:00
Sofia Rodrigues
3d07f4fd56 fix: comments 2026-02-18 21:19:08 -03:00
Sofia Rodrigues
7dc97a02fd fix: comments 2026-02-18 21:18:51 -03:00
Sofia Rodrigues
afd2f12242 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 20:56:42 -03:00
Sofia Rodrigues
5faf0572f6 feat: improve manyItems 2026-02-18 20:56:35 -03:00
Sofia Rodrigues
8d349ccbaa Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 19:46:46 -03:00
Sofia Rodrigues
9c35a91e0f feat: better validation in parser 2026-02-18 19:43:57 -03:00
Sofia Rodrigues
2da4e1b572 feat: connection lmit 2026-02-18 19:25:07 -03:00
Sofia Rodrigues
5368b134bb fix: test 2026-02-18 14:14:58 -03:00
Sofia Rodrigues
d1f090ee98 fix: test 2026-02-18 13:48:45 -03:00
Sofia Rodrigues
f311c9594f feat: unsuppoted method 2026-02-18 12:22:50 -03:00
Sofia Rodrigues
c6a3ab0a77 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 12:22:26 -03:00
Sofia Rodrigues
ba25ab3490 fix: method validation 2026-02-18 12:22:12 -03:00
Sofia Rodrigues
1095ebbeed fix: config 2026-02-18 11:49:45 -03:00
Sofia Rodrigues
299b15c8e9 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 11:47:39 -03:00
Sofia Rodrigues
091cb00ab9 feat: add max bytes 2026-02-18 11:47:33 -03:00
Sofia Rodrigues
2b408d2699 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 11:18:34 -03:00
Sofia Rodrigues
702efcacca refactor: remove duplication 2026-02-18 11:18:25 -03:00
Sofia Rodrigues
98ba01dc49 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 11:04:55 -03:00
Sofia Rodrigues
e1225efa03 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-18 11:03:40 -03:00
Sofia Rodrigues
37c7b1e22c feat: reuse char 2026-02-18 11:03:35 -03:00
Sofia Rodrigues
eea8e06d6b Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 10:57:12 -03:00
Sofia Rodrigues
c4234961bc Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-18 10:56:29 -03:00
Sofia Rodrigues
42cfda23f3 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-18 10:56:23 -03:00
Sofia Rodrigues
78316b9ade fix: isToken 2026-02-18 10:55:55 -03:00
Sofia Rodrigues
dd09289d2b fix: duplication 2026-02-18 10:36:17 -03:00
Sofia Rodrigues
10a66e9f9a fix: uri duplication checkers 2026-02-18 10:35:04 -03:00
Sofia Rodrigues
ad4719399d fix: trailing peridos 2026-02-18 10:11:47 -03:00
Sofia Rodrigues
892ab921b7 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 09:46:00 -03:00
Sofia Rodrigues
6551c32f6b Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 09:45:50 -03:00
Sofia Rodrigues
b8eac648ab Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-18 09:45:13 -03:00
Sofia Rodrigues
53fb1a25b3 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-18 09:45:02 -03:00
Sofia Rodrigues
3fdaf2df0c Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-18 09:44:01 -03:00
Sofia Rodrigues
4ba722f51c fix: reasonPhrase 2026-02-18 09:40:25 -03:00
Sofia Rodrigues
42b726c376 fix: misleading comment 2026-02-18 09:26:59 -03:00
Sofia Rodrigues
8bec5f4b98 fix: comments 2026-02-18 09:25:49 -03:00
Sofia Rodrigues
9a8bc523c5 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 08:53:23 -03:00
Sofia Rodrigues
59253973ce Merge branch 'sofia/async-http-body' into sofia/async-http-server 2026-02-18 08:53:19 -03:00
Sofia Rodrigues
205149a884 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 08:53:10 -03:00
Sofia Rodrigues
a89a69e7da fix: queue test 2026-02-18 08:52:59 -03:00
Sofia Rodrigues
9bb429d4e7 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-18 08:43:58 -03:00
Sofia Rodrigues
542a3a4e71 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-18 08:43:49 -03:00
Sofia Rodrigues
3646590506 feat: queue 2026-02-18 08:43:33 -03:00
Sofia Rodrigues
cf87c9594c feat: failure gate 2026-02-18 08:37:59 -03:00
Sofia Rodrigues
71420f6c81 feat: tests 2026-02-18 08:37:46 -03:00
Sofia Rodrigues
b6fdd8adc3 feat: failure gate 2026-02-18 08:37:13 -03:00
Sofia Rodrigues
45747bd2ef Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 23:46:33 -03:00
Sofia Rodrigues
69c75c1b56 fix: incomplete fixed chung 2026-02-17 23:46:28 -03:00
Sofia Rodrigues
bed5d8567c feat: incomplete chunks 2026-02-17 23:42:53 -03:00
Sofia Rodrigues
0c5d25a763 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 23:37:36 -03:00
Sofia Rodrigues
c324ee8347 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-17 23:37:29 -03:00
Sofia Rodrigues
193bbddb4e feat: incomlpete chunks 2026-02-17 23:37:16 -03:00
Sofia Rodrigues
6821bb82db feat: close after generate 2026-02-17 21:25:06 -03:00
Sofia Rodrigues
1cbd0569eb Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 21:09:13 -03:00
Sofia Rodrigues
14dbb661f8 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-17 21:08:50 -03:00
Sofia Rodrigues
ea5a986693 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-17 21:08:38 -03:00
Sofia Rodrigues
37ec94e2f0 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-17 21:08:30 -03:00
Sofia Rodrigues
157e3b032d Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-17 21:08:21 -03:00
Sofia Rodrigues
910c71954e feat: reason phrase 2026-02-17 21:07:48 -03:00
Sofia Rodrigues
27107066e3 fix: small issues with framing 2026-02-17 21:03:00 -03:00
Sofia Rodrigues
fd1843e120 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-17 12:12:11 -03:00
Sofia Rodrigues
dd2ab67d2b Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-17 11:51:41 -03:00
Sofia Rodrigues
9dd5634759 Merge branch 'sofia/async-http-headers' into sofia/async-http-uri 2026-02-17 11:47:57 -03:00
Sofia Rodrigues
a521ba3abd fix: typos and connection token 2026-02-17 11:47:46 -03:00
Sofia Rodrigues
6b0f05d075 Merge branch 'sofia/async-http-data' into sofia/async-http-headers 2026-02-17 11:12:22 -03:00
Sofia Rodrigues
61d6c02ecd fix: typos and compareName 2026-02-17 11:12:13 -03:00
Sofia Rodrigues
b7d4e12fbf Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 03:02:56 -03:00
Sofia Rodrigues
dc6d015870 fix: get-size 2026-02-17 03:02:53 -03:00
Sofia Rodrigues
07a05a3995 fix: 100-expect 2026-02-17 03:02:44 -03:00
Sofia Rodrigues
182625774d Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 03:00:43 -03:00
Sofia Rodrigues
b4684a2406 fix: 100-expect case, quoted parser and te 2026-02-17 03:00:14 -03:00
Sofia Rodrigues
ecc0ec05bd fix: typos 2026-02-17 02:55:21 -03:00
Sofia Rodrigues
5193b739ca fix: typos 2026-02-17 02:54:19 -03:00
Sofia Rodrigues
70c0a902f4 fix: api 2026-02-17 02:05:42 -03:00
Sofia Rodrigues
7f29fd0fcd Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-17 02:01:27 -03:00
Sofia Rodrigues
239536f1d8 fix: transfer-encoding gzip 2026-02-17 02:00:11 -03:00
Sofia Rodrigues
71be391dd3 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-17 01:55:33 -03:00
Sofia Rodrigues
df738acaa4 fix: direction 2026-02-17 01:55:26 -03:00
Sofia Rodrigues
8ed56677e5 feat: stale 2026-02-17 01:26:21 -03:00
Sofia Rodrigues
60d0b7c97a feat: server 2026-02-17 01:25:53 -03:00
Sofia Rodrigues
17a2c9e0c2 feat: server 2026-02-17 01:25:42 -03:00
Sofia Rodrigues
7ee37564d3 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-16 23:56:03 -03:00
Sofia Rodrigues
2ee7513f80 feat: pull-based body 2026-02-16 23:55:58 -03:00
Sofia Rodrigues
7d6505d296 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-16 22:33:45 -03:00
Sofia Rodrigues
8722e50897 feat: pull-based body 2026-02-16 22:33:35 -03:00
Sofia Rodrigues
fa8d76fa37 fix: frameCancellation with error 2026-02-16 01:22:00 -03:00
Sofia Rodrigues
c50fca363a fix: comments 2026-02-16 01:12:49 -03:00
Sofia Rodrigues
e8ff308154 fix: handler 2026-02-16 01:10:19 -03:00
Sofia Rodrigues
cdcb9db4ba Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-14 06:14:38 -03:00
Sofia Rodrigues
a8e405ac5d fix: knownsize 2026-02-14 06:13:39 -03:00
Sofia Rodrigues
b6705cceb2 fix: server 2026-02-14 05:59:54 -03:00
Sofia Rodrigues
af58b4f286 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-14 05:51:28 -03:00
Sofia Rodrigues
02dc048ad2 feat: improve h1 2026-02-14 05:51:03 -03:00
Sofia Rodrigues
a981d91552 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 11:58:23 -03:00
Sofia Rodrigues
96ffa3e354 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 11:56:52 -03:00
Sofia Rodrigues
1c564ed5f7 fix: comment 2026-02-13 11:56:44 -03:00
Sofia Rodrigues
9dd5f62e0e Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 11:53:16 -03:00
Sofia Rodrigues
c4737fb66a fix: parse ab-empty 2026-02-13 11:52:53 -03:00
Sofia Rodrigues
43d3b2df91 merge: 'sofia/async-http-headers' 2026-02-13 11:43:55 -03:00
Sofia Rodrigues
87c5488c20 merge: 'sofia/async-http-headers' 2026-02-13 11:39:09 -03:00
Sofia Rodrigues
e0d5596e63 fix: typo 2026-02-13 11:37:08 -03:00
Sofia Rodrigues
1f2671db3d merge: branch 'sofia/async-http-data' 2026-02-13 11:35:33 -03:00
Sofia Rodrigues
940ab9bdb5 fix: typos 2026-02-13 11:33:18 -03:00
Sofia Rodrigues
8017d39c4e Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 10:29:22 -03:00
Sofia Rodrigues
25bb4ee812 feat: protocol 2026-02-13 10:28:48 -03:00
Sofia Rodrigues
7c1aff34e2 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:28:13 -03:00
Sofia Rodrigues
28670d4420 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:26:13 -03:00
Sofia Rodrigues
30f3a3520e Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:22:47 -03:00
Sofia Rodrigues
9acca40aaf revert: h1 2026-02-13 10:21:57 -03:00
Sofia Rodrigues
bf2ed2c87a revert: h1 2026-02-13 10:20:35 -03:00
Sofia Rodrigues
3561d58203 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-13 10:14:03 -03:00
Sofia Rodrigues
1d80616068 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-13 10:13:55 -03:00
Sofia Rodrigues
61c93a7f57 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-13 10:13:40 -03:00
Sofia Rodrigues
b042b8efbd fix: parser path 2026-02-13 10:13:00 -03:00
Sofia Rodrigues
8c00ba48ae fix: parser 2026-02-13 10:12:22 -03:00
Sofia Rodrigues
991a27b7f2 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-13 10:11:30 -03:00
Sofia Rodrigues
69e38e9495 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-13 10:11:23 -03:00
Sofia Rodrigues
16d0162ef0 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-13 10:11:10 -03:00
Sofia Rodrigues
d07f5c502f feat: specialize encodedstrings 2026-02-13 10:10:34 -03:00
Sofia Rodrigues
5b1493507d feat: body channel should close on completion 2026-02-13 02:53:16 -03:00
Sofia Rodrigues
1180572926 fix: test 2026-02-13 02:29:55 -03:00
Sofia Rodrigues
6dc19ef871 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:23:20 -03:00
Sofia Rodrigues
4a641fc498 revert: bytearray parser 2026-02-13 02:22:43 -03:00
Sofia Rodrigues
2a04014fa7 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:18:18 -03:00
Sofia Rodrigues
4f20a815ec fix: extension name 2026-02-13 02:18:09 -03:00
Sofia Rodrigues
4906e14e51 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:13:50 -03:00
Sofia Rodrigues
c9296c7371 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 02:12:07 -03:00
Sofia Rodrigues
4db36b214b feat: improve parser 2026-02-13 02:11:38 -03:00
Sofia Rodrigues
a6d94c7504 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 01:58:16 -03:00
Sofia Rodrigues
045abb48bb Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:57:40 -03:00
Sofia Rodrigues
10337c620b fix: test 2026-02-13 01:57:23 -03:00
Sofia Rodrigues
698f557aa3 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:56:37 -03:00
Sofia Rodrigues
692c7c1a09 fix: test 2026-02-13 01:56:29 -03:00
Sofia Rodrigues
1bdfdcdb38 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:55:05 -03:00
Sofia Rodrigues
cacfe00c1d fix: test 2026-02-13 01:54:52 -03:00
Sofia Rodrigues
0fd0fa9c73 fix: test 2026-02-13 01:54:26 -03:00
Sofia Rodrigues
52fdc0f734 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:52:49 -03:00
Sofia Rodrigues
451c11d5a1 fix: make strict 2026-02-13 01:52:04 -03:00
Sofia Rodrigues
e92fcf6d46 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-13 01:41:20 -03:00
Sofia Rodrigues
07140aceb8 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:39:32 -03:00
Sofia Rodrigues
2cc32928a4 feat: add parser features to path 2026-02-13 01:39:12 -03:00
Sofia Rodrigues
153513d5e2 fix: typos 2026-02-13 01:29:12 -03:00
Sofia Rodrigues
94308408a9 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-13 01:20:31 -03:00
Sofia Rodrigues
1ae6970b77 fix: comment 2026-02-13 01:19:51 -03:00
Sofia Rodrigues
0704f877f5 fix: tests 2026-02-13 01:07:32 -03:00
Sofia Rodrigues
7ff0e6f9c0 feat: 100-continue 2026-02-13 00:56:08 -03:00
Sofia Rodrigues
5b4498ac9d Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 00:47:12 -03:00
Sofia Rodrigues
976cc79b0c feat: 100-continue 2026-02-13 00:45:38 -03:00
Sofia Rodrigues
8d6ff0d727 feat: handler 2026-02-13 00:19:36 -03:00
Sofia Rodrigues
26c0e4dac4 feat: date header 2026-02-13 00:06:41 -03:00
Sofia Rodrigues
9ce1821be0 feat: add trailers some type of headers 2026-02-12 12:46:15 -03:00
Sofia Rodrigues
eeff4847fe Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-12 12:31:30 -03:00
Sofia Rodrigues
2956f88050 refactor: move trailers 2026-02-12 12:30:59 -03:00
Sofia Rodrigues
26d9c1c07b feat: add extension handling of quotes and ExtensionName 2026-02-12 12:21:47 -03:00
Sofia Rodrigues
73af014cbd fix: documentation 2026-02-12 11:55:15 -03:00
Sofia Rodrigues
d206f437ef Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-12 10:47:19 -03:00
Sofia Rodrigues
d099586632 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-12 10:46:56 -03:00
Sofia Rodrigues
058d95e441 feat: maximum size in readAll 2026-02-12 10:46:43 -03:00
Sofia Rodrigues
b40ac55755 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 19:42:01 -03:00
Sofia Rodrigues
43aa88e5a6 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:40:58 -03:00
Sofia Rodrigues
8fe2d519d2 revert: chunk changes 2026-02-11 19:40:34 -03:00
Sofia Rodrigues
07ed645f45 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:35:43 -03:00
Sofia Rodrigues
9485e8f5eb revert: add toString head 2026-02-11 19:35:31 -03:00
Sofia Rodrigues
dc96616781 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-11 19:30:41 -03:00
Sofia Rodrigues
0c44b4ae05 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-11 19:29:54 -03:00
Sofia Rodrigues
3568464ca7 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-11 19:23:59 -03:00
Sofia Rodrigues
8e5296c71a fix: chunked 2026-02-11 19:22:30 -03:00
Sofia Rodrigues
eee971e3ef Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:10:02 -03:00
Sofia Rodrigues
7a1f8b2d30 fix: readAll 2026-02-11 19:09:45 -03:00
Sofia Rodrigues
157e122891 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 18:51:21 -03:00
Sofia Rodrigues
b12ab7eae4 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 18:49:17 -03:00
Sofia Rodrigues
10c8a923e6 feat: readAll functions 2026-02-11 18:48:10 -03:00
Sofia Rodrigues
2b91589750 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 18:08:30 -03:00
Sofia Rodrigues
3e9674eaa9 feat: avoid more than one host 2026-02-11 18:08:16 -03:00
Sofia Rodrigues
d902c6a9f4 fix: mock double close 2026-02-11 18:07:58 -03:00
Sofia Rodrigues
04a17e8c55 fix: fail event should end everything 2026-02-11 18:06:16 -03:00
Sofia Rodrigues
1b6cd457d3 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 17:40:54 -03:00
Sofia Rodrigues
2bc2080fbe fix: bad request behavior 2026-02-11 17:40:19 -03:00
Sofia Rodrigues
6b6425e8d7 fix: close mock bidirectionaly and fix test 2026-02-11 17:39:48 -03:00
Sofia Rodrigues
fb0e95d8ce fix: avoid gate errors 2026-02-11 17:25:26 -03:00
Sofia Rodrigues
4e4702a31f Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 17:22:53 -03:00
Sofia Rodrigues
5a2ad22f97 fix: selectable.one can used to change register/unregister order causing double register 2026-02-11 17:22:37 -03:00
Sofia Rodrigues
f02139f7ce fix: skipBytes 2026-02-11 17:14:52 -03:00
Sofia Rodrigues
d004e175e2 fix: error message 2026-02-11 17:03:27 -03:00
Sofia Rodrigues
7928a95c34 tests: add more tests 2026-02-11 16:53:11 -03:00
Sofia Rodrigues
202e6c5228 fix: transport, add explicit close that is no-op for tp 2026-02-11 14:54:08 -03:00
Sofia Rodrigues
0aeaa5e71d Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-10 17:30:08 -03:00
Sofia Rodrigues
9ad4ee304b fix: imports 2026-02-10 17:29:04 -03:00
Sofia Rodrigues
5bd280553d Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-10 17:25:08 -03:00
Sofia Rodrigues
7e215c8220 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-10 17:24:00 -03:00
Sofia Rodrigues
2c23680163 fix: imports 2026-02-10 17:23:14 -03:00
Sofia Rodrigues
c4f179daa0 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-10 17:15:31 -03:00
Sofia Rodrigues
c2f657a15a Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 17:12:45 -03:00
Sofia Rodrigues
9332081875 fix: import 2026-02-10 17:12:20 -03:00
Sofia Rodrigues
1cec97568b fix: imports 2026-02-10 17:11:11 -03:00
Sofia Rodrigues
b567713641 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 17:03:44 -03:00
Sofia Rodrigues
de776c1f32 fix: interpolation 2026-02-10 17:03:02 -03:00
Sofia Rodrigues
c498ea74ec Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-10 16:47:08 -03:00
Sofia Rodrigues
f4aad3a494 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 16:46:48 -03:00
Sofia Rodrigues
1cebf576c3 Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http-data 2026-02-10 16:44:20 -03:00
Sofia Rodrigues
25dac2e239 fix: test 2026-02-09 22:25:14 -03:00
Sofia Rodrigues
4a9de7094c feat: new body 2026-02-09 22:20:05 -03:00
Sofia Rodrigues
c4eab3b677 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-09 21:58:26 -03:00
Sofia Rodrigues
dd125c7999 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-09 21:57:42 -03:00
Sofia Rodrigues
5e3dce8088 fix: chunk stream will only deal with content-size of the chunks not with the wireFormatSize 2026-02-09 21:57:26 -03:00
Sofia Rodrigues
4c64f2c2e8 fix: suggestions 2026-02-09 21:55:38 -03:00
Sofia Rodrigues
aa6e11dfc0 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-09 21:25:02 -03:00
Sofia Rodrigues
e7d1e7dd54 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-09 21:09:55 -03:00
Sofia Rodrigues
03843fd3f0 fix: suggestions 2026-02-09 21:09:38 -03:00
Sofia Rodrigues
294e9900ea feat: unify all in stream 2026-02-09 20:29:18 -03:00
Sofia Rodrigues
f13651979e fix: wireFormatSize 2026-02-09 19:31:41 -03:00
Sofia Rodrigues
3d8ba4d09b Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-09 12:30:01 -03:00
Sofia Rodrigues
63984c8dda fix: header value 2026-02-09 12:01:25 -03:00
Sofia Rodrigues
e2fd8a5835 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-09 11:28:52 -03:00
Sofia Rodrigues
a0263870b9 fix: extensions 2026-02-09 11:24:48 -03:00
Sofia Rodrigues
3c4ae58aff feat: add extensions 2026-02-09 11:18:24 -03:00
Sofia Rodrigues
5965707575 fix: apply suggestions 2026-02-09 11:17:03 -03:00
Sofia Rodrigues
dbe0140578 fix: enforce validations 2026-02-09 10:28:43 -03:00
Sofia Rodrigues
bc21289793 feat: http docs 2026-01-25 12:48:06 -03:00
Sofia Rodrigues
f11bd0928d feat: server basics 2026-01-25 12:48:06 -03:00
Sofia Rodrigues
6ffd5ad2a4 fix: incremental parsing 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
7ce8cbc01c feat: remove toString instances 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
12a7603c77 fix: orphan module 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
53a6355074 feat: H1 protocol 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
f8ad249e42 test: wrong test 2026-01-25 12:40:41 -03:00
Sofia Rodrigues
3c41d3961e feat: empty body and constructors 2026-01-25 12:39:43 -03:00
Sofia Rodrigues
18bc715bad feat: remove useless functions 2026-01-25 12:39:43 -03:00
Sofia Rodrigues
3349d20663 feat: body 2026-01-25 12:39:41 -03:00
Sofia Rodrigues
bad70e3eab feat: request type has request target 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
21286eb163 fix: domain name comment 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
0e5f07558c feat: introduce data type for HTTP 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
6e26b901e4 fix: encoding 2026-01-25 12:33:07 -03:00
Sofia Rodrigues
81c67c8f12 revert: levenshtein test 2026-01-25 12:29:37 -03:00
Sofia Rodrigues
990e21eefc fix: namespace 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
7141144a2f fix: remove native_decide 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
8c343501c1 fix: apply suggestions
Co-authored-by: Rob23oba <152706811+Rob23oba@users.noreply.github.com>
2026-01-25 12:27:16 -03:00
Sofia Rodrigues
44f08686cd feat: connection values 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
65883f8c2a fix: levenshtein test is using the new Decidable instance 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
bd28a8fad5 fix: tests and type class 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
8ba86c2c67 fix: case and usage of native_decide 2026-01-25 12:24:52 -03:00
Sofia Rodrigues
d3cddf9e44 fix: Headers.Basic comment 2026-01-25 12:24:52 -03:00
Sofia Rodrigues
5f3babee5c feat: headers data structure 2026-01-25 12:24:51 -03:00
Sofia Rodrigues
26dfc9a872 refactor: remove headers 2026-01-25 12:16:13 -03:00
Sofia Rodrigues
e47439e8be fix: default size 2026-01-25 11:26:03 -03:00
Sofia Rodrigues
1ef53758be fix: apply suggestions
Co-authored-by: Rob23oba <152706811+Rob23oba@users.noreply.github.com>
2026-01-25 11:21:06 -03:00
Sofia Rodrigues
8544042789 fix: status and chunkedbuffer 2026-01-25 11:19:34 -03:00
Sofia Rodrigues
f564d43d98 feat: basic headers structure to more structured approach 2026-01-23 17:58:44 -03:00
Sofia Rodrigues
32fa0666c9 feat: data components 2026-01-23 17:14:53 -03:00
75 changed files with 20708 additions and 136 deletions

View File

@@ -230,7 +230,7 @@ Examples:
* `"empty".isEmpty = false`
* `" ".isEmpty = false`
-/
@[inline] def isEmpty (s : String) : Bool :=
@[inline, expose] def isEmpty (s : String) : Bool :=
s.utf8ByteSize == 0
@[export lean_string_isempty]

View File

@@ -57,4 +57,14 @@ theorem length_map {f : Char → Char} {s : String} : (s.map f).length = s.lengt
theorem map_eq_empty {f : Char Char} {s : String} : s.map f = "" s = "" := by
simp [ toList_eq_nil_iff]
@[simp]
theorem map_map {f g : Char Char} {s : String} : String.map g (String.map f s) = String.map (g f) s := by
apply String.ext
simp [List.map_map]
@[simp]
theorem map_id {s : String} : String.map id s = s := by
apply String.ext
simp [List.map_id]
end String

View File

@@ -229,7 +229,7 @@ Examples:
* `"Orange".toLower = "orange"`
* `"ABc123".toLower = "abc123"`
-/
@[inline] def toLower (s : String) : String :=
@[inline, expose] def toLower (s : String) : String :=
s.map Char.toLower
/--

View File

@@ -10,6 +10,8 @@ public import Init.Data.Random
public import Std.Internal.Async.Basic
import Init.Data.ByteArray.Extra
import Init.Data.Array.Lemmas
public import Std.Sync.Mutex
public import Std.Sync.Barrier
import Init.Omega
public section
@@ -132,6 +134,8 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let gen := mkStdGen seed
let selectables := shuffleIt selectables gen
let gate IO.Promise.new
for selectable in selectables do
if let some val selectable.selector.tryFn then
let result selectable.cont val
@@ -141,6 +145,9 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let promise IO.Promise.new
for selectable in selectables do
if finished.get then
break
let waiterPromise IO.Promise.new
let waiter := Waiter.mk finished waiterPromise
selectable.selector.registerFn waiter
@@ -157,18 +164,20 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let async : Async _ :=
try
let res IO.ofExcept res
discard <| await gate.result?
for selectable in selectables do
selectable.selector.unregisterFn
let contRes selectable.cont res
promise.resolve (.ok contRes)
promise.resolve (.ok ( selectable.cont res))
catch e =>
promise.resolve (.error e)
async.toBaseIO
Async.ofPromise (pure promise)
gate.resolve ()
let result Async.ofPromise (pure promise)
return result
/--
Performs fair and data-loss free non-blocking multiplexing on the `Selectable`s in `selectables`.
@@ -224,6 +233,8 @@ def Selectable.combine (selectables : Array (Selectable α)) : IO (Selector α)
let derivedWaiter := Waiter.mk waiter.finished waiterPromise
selectable.selector.registerFn derivedWaiter
let barrier IO.Promise.new
discard <| IO.bindTask (t := waiterPromise.result?) fun res? => do
match res? with
| none => return (Task.pure (.ok ()))
@@ -231,6 +242,7 @@ def Selectable.combine (selectables : Array (Selectable α)) : IO (Selector α)
let async : Async _ := do
let mainPromise := waiter.promise
await barrier
for selectable in selectables do
selectable.selector.unregisterFn

View File

@@ -6,4 +6,189 @@ Authors: Sofia Rodrigues
module
prelude
public import Std.Internal.Http.Data
public import Std.Internal.Http.Server
public import Std.Internal.Http.Client
public section
/-!
# HTTP Library
A low-level HTTP/1.1 server implementation for Lean. This library provides a pure,
sans-I/O protocol implementation that can be used with the `Async` library or with
custom connection handlers.
## Overview
This module provides a complete HTTP/1.1 server implementation with support for:
- Request/response handling with directional streaming bodies
- Keep-alive connections
- Chunked transfer encoding
- Header validation and management
- Configurable timeouts and limits
**Sans I/O Architecture**: The core protocol logic doesn't perform any actual I/O itself -
it just defines how data should be processed. This separation allows the protocol implementation
to remain pure and testable, while different transports (TCP sockets, mock clients) handle
the actual reading and writing of bytes.
## Quick Start
The main entry point is `Server.serve`, which starts an HTTP/1.1 server. Implement the
`Server.Handler` type class to define how the server handles requests, errors, and
`Expect: 100-continue` headers:
```lean
import Std.Internal.Http
open Std Internal IO Async
open Std Http Server
structure MyHandler
instance : Handler MyHandler where
onRequest _ req := do
Response.ok |>.text "Hello, World!"
def main : IO Unit := Async.block do
let addr : Net.SocketAddress := .v4 ⟨.ofParts 127 0 0 1, 8080⟩
let server ← Server.serve addr MyHandler.mk
server.waitShutdown
```
## Working with Requests
Incoming requests are represented by `Request Body.Incoming`, which bundles the request
line, parsed headers, and a lazily-consumed body. Headers are available immediately,
while the body can be streamed or collected on demand, allowing handlers to process both
small and large payloads efficiently.
### Reading Headers
```lean
def handler (req : Request Body.Incoming) : ContextAsync (Response Body.Outgoing) := do
-- Access request method and URI
let method := req.head.method -- Method.get, Method.post, etc.
let uri := req.head.uri -- RequestTarget
-- Read a specific header
if let some contentType := req.head.headers.get? (.mk "content-type") then
IO.println s!"Content-Type: {contentType}"
Response.ok |>.text "OK"
```
### URI Query Semantics
`RequestTarget.query` is parsed using form-style key/value conventions (`k=v&...`), and `+` is decoded as a
space in query components. If you need RFC 3986 opaque query handling, use the raw request target string
(`toString req.head.uri`) and parse it with custom logic.
### Reading the Request Body
The request body is exposed as `Body.Incoming`, which can be consumed incrementally or
collected into memory. The `readAll` method reads the entire body, with an optional size
limit to protect against unbounded payloads.
```lean
def handler (req : Request Body.Incoming) : ContextAsync (Response Body.Outgoing) := do
-- Collect entire body as a String
let bodyStr : String ← req.body.readAll
-- Or with a maximum size limit
let bodyStr : String ← req.body.readAll (maximumSize := some 1024)
Response.ok |>.text s!"Received: {bodyStr}"
```
## Building Responses
Responses are constructed using a builder API that starts from a status code and adds
headers and a body. Common helpers exist for text, HTML, JSON, and binary responses, while
still allowing full control over status codes and header values.
Response builders produce `Async (Response Body.Outgoing)`.
```lean
-- Text response
Response.ok |>.text "Hello!"
-- HTML response
Response.ok |>.html "<h1>Hello!</h1>"
-- JSON response
Response.ok |>.json "{\"key\": \"value\"}"
-- Binary response
Response.ok |>.bytes someByteArray
-- Custom status
Response.new |>.status .created |>.text "Resource created"
-- With custom headers
Response.ok
|>.header! "X-Custom-Header" "value"
|>.header! "Cache-Control" "no-cache"
|>.text "Response with headers"
```
### Streaming Responses
For large responses or server-sent events, use streaming:
```lean
def handler (req : Request Body.Incoming) : ContextAsync (Response Body.Outgoing) := do
Response.ok
|>.header! "Content-Type" "text/plain"
|>.stream fun stream => do
for i in [0:10] do
stream.send { data := s!"chunk {i}\n".toUTF8 }
Async.sleep 1000
stream.close
```
## Server Configuration
Configure server behavior with `Config`:
```lean
def config : Config := {
maxRequests := 10000000,
lingeringTimeout := 5000,
}
let server ← Server.serve addr MyHandler.mk config
```
## Handler Type Class
Implement `Server.Handler` to define how the server processes events. The class has three
methods, all with default implementations:
- `onRequest` — called for each incoming request; returns a response inside `ContextAsync`
- `onFailure` — called when an error occurs while processing a request
- `onContinue` — called when a request includes an `Expect: 100-continue` header; return
`true` to accept the body or `false` to reject it
```lean
structure MyHandler where
greeting : String
instance : Handler MyHandler where
onRequest self req := do
Response.ok |>.text self.greeting
onFailure self err := do
IO.eprintln s!"Error: {err}"
```
The handler methods operate in the following monads:
- `onRequest` uses `ContextAsync` — an asynchronous monad (`ReaderT CancellationContext Async`) that provides:
- Full access to `Async` operations (spawning tasks, sleeping, concurrent I/O)
- A `CancellationContext` tied to the client connection — when the client disconnects, the
context is cancelled, allowing your handler to detect this and stop work early
- `onFailure` uses `Async`
- `onContinue` uses `Async`
-/

View File

@@ -0,0 +1,304 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Client.Pool
public section
namespace Std.Http
set_option linter.all true
open Std Internal IO Async TCP Protocol
open Time
/-!
# Client
A top-level HTTP client backed by a connection pool, similar to `reqwest::Client`.
Use `Client.builder` to construct, then `client.get "https://..."` etc.
```lean
let client ← Client.builder
|>.proxy! "http://proxy.example.com:8080"
|>.build
let res ← client.get "https://api.example.com/data"
|>.header! "Accept" "application/json"
|>.send
```
-/
/--
A top-level HTTP client backed by a connection pool.
-/
abbrev Client := Client.Agent.Pool
/--
Builder for `Client`. Chain configuration setters then call `.build`.
-/
public structure Client.Builder where
/--
Configuration applied to all sessions created by this client.
-/
config : Config := {}
/--
Maximum number of pooled connections per host.
-/
maxPerHost : Nat := 4
namespace Client.Builder
/--
Routes all connections through a proxy.
`host` is the proxy hostname, `port` is the proxy port.
Only HTTP proxies are supported.
-/
def proxy (b : Client.Builder) (host : String) (port : UInt16) : Client.Builder :=
{ b with config := { b.config with proxy := some (host, port) } }
/--
Routes all connections through a proxy specified as a URL string.
Returns `none` if the URL is invalid or has no authority component.
Only HTTP proxies are supported. The scheme determines the default port
when no explicit port is specified (`http` → 80, `https` → 443). TLS
(HTTPS proxy CONNECT tunnels) is not supported.
-/
def proxy? (b : Client.Builder) (url : String) : Option Client.Builder := do
let uri URI.parse? url
let auth uri.authority
let host := toString auth.host
let port : UInt16 := match auth.port with
| .value p => p
| _ => URI.Scheme.defaultPort uri.scheme
pure { b with config := { b.config with proxy := some (host, port) } }
/--
Sets the request timeout (send + receive).
DNS resolution and TCP connect are not covered by this timeout;
use the OS-level socket timeout for those.
-/
def timeout (b : Client.Builder) (ms : Time.Millisecond.Offset) : Client.Builder :=
if h : 0 < ms then
{ b with config := { b.config with requestTimeout := ms, h } }
else b
/--
Sets the `User-Agent` header sent with every request.
-/
def userAgent (b : Client.Builder) (ua : String) : Client.Builder :=
{ b with config := { b.config with userAgent := Header.Value.ofString? ua } }
/--
Sets the maximum number of pooled connections per host.
-/
def maxConnectionsPerHost (b : Client.Builder) (n : Nat) : Client.Builder :=
{ b with maxPerHost := n }
/--
Sets the maximum number of redirects to follow automatically.
-/
def maxRedirects (b : Client.Builder) (n : Nat) : Client.Builder :=
{ b with config := { b.config with maxRedirects := n } }
/--
Sets the predicate that decides whether a response status is acceptable.
When set, the final response status is passed to `f`; if `f` returns `false`
an `IO.Error` is thrown with the numeric status code.
-/
def validateStatus (b : Client.Builder) (f : Status Bool) : Client.Builder :=
{ b with config := { b.config with validateStatus := some f } }
/--
Builds the `Client`.
-/
def build (b : Client.Builder) : Async Client := do
Agent.Pool.new b.config b.maxPerHost
end Builder
/--
A request builder bound to a `Client`. Build up headers, query parameters, and body,
then dispatch with one of the `send*` methods.
-/
public structure RequestBuilder where
/--
The client that will dispatch this request.
-/
client : Client
/--
Resolved hostname for this request.
-/
host : URI.Host
/--
Target port.
-/
port : UInt16
/--
The underlying request builder.
-/
builder : Request.Builder
namespace RequestBuilder
/--
Injects a `Host` header if not already present.
-/
private def withHostHeader (rb : RequestBuilder) : RequestBuilder :=
if rb.builder.line.headers.contains Header.Name.host then rb
else
-- Use the scheme derived from the port to pick the correct default.
let scheme := URI.Scheme.ofPort rb.port
let defaultPort := URI.Scheme.defaultPort scheme
let hostValue :=
if rb.port == defaultPort then toString rb.host
else s!"{rb.host}:{rb.port}"
{ rb with builder := rb.builder.header! "Host" hostValue }
/--
Adds a typed header to the request.
-/
def header (rb : RequestBuilder) (key : Header.Name) (value : Header.Value) : RequestBuilder :=
{ rb with builder := rb.builder.header key value }
/--
Adds a header to the request. Panics if the name or value is invalid.
-/
def header! (rb : RequestBuilder) (key : String) (value : String) : RequestBuilder :=
{ rb with builder := rb.builder.header! key value }
/--
Adds a header to the request. Returns `none` if the name or value is invalid.
-/
def header? (rb : RequestBuilder) (key : String) (value : String) : Option RequestBuilder := do
let builder rb.builder.header? key value
pure { rb with builder }
/--
Sets the request URI from a string. Panics if the string is not a valid request target.
-/
def uri! (rb : RequestBuilder) (u : String) : RequestBuilder :=
{ rb with builder := rb.builder.uri! u }
/--
Adds a query parameter to the request URI.
-/
def queryParam (rb : RequestBuilder) (key : String) (value : String) : RequestBuilder :=
let newTarget := match rb.builder.line.uri with
| .originForm o =>
.originForm { o with query := some ((o.query.getD URI.Query.empty).insert key value) }
| .absoluteForm af =>
.absoluteForm { af with query := af.query.insert key value }
| other => other
{ rb with builder := { rb.builder with line := { rb.builder.line with uri := newTarget } } }
/--
Sends the request with an empty body.
-/
def send (rb : RequestBuilder) : Async (Response Body.Incoming) := do
let rb := rb.withHostHeader
rb.client.send rb.host rb.port ( rb.builder.blank)
/--
Sends the request with a plain-text body. Sets `Content-Type: text/plain; charset=utf-8`.
-/
def text (rb : RequestBuilder) (content : String) : Async (Response Body.Incoming) := do
let rb := rb.withHostHeader
rb.client.send rb.host rb.port ( rb.builder.text content)
/--
Sends the request with a JSON body. Sets `Content-Type: application/json`.
-/
def json (rb : RequestBuilder) (content : String) : Async (Response Body.Incoming) := do
let rb := rb.withHostHeader
rb.client.send rb.host rb.port ( rb.builder.json content)
/--
Sends the request with a raw binary body. Sets `Content-Type: application/octet-stream`.
-/
def bytes (rb : RequestBuilder) (content : ByteArray) : Async (Response Body.Incoming) := do
let rb := rb.withHostHeader
rb.client.send rb.host rb.port ( rb.builder.bytes content)
/--
Sends the request with a streaming body produced by `gen`.
-/
def sendStream (rb : RequestBuilder) (gen : Body.Outgoing Async Unit) : Async (Response Body.Incoming) := do
let rb := rb.withHostHeader
rb.client.send rb.host rb.port ( rb.builder.stream gen)
end RequestBuilder
/--
Returns a `Client.Builder` with default configuration.
-/
def new : Client.Builder := {}
/--
Parses `url` into `(host, port, origin-form target)`.
Returns `none` if the URL is invalid or has no authority component.
-/
private def mkRequest
(method : Request.Builder Request.Builder)
(client : Client) (url : URI.AuthorityForm) : Client.RequestBuilder :=
let target : RequestTarget :=
.originForm (RequestTarget.PathAndQuery.mk url.path (if url.query.isEmpty then none else some url.query))
{ client, host := url.host, port := url.port,
builder := method (Request.new |>.uri target) }
/--
Creates a GET request builder for `url`.
-/
def get (client : Client) (url : URI.AuthorityForm) : Client.RequestBuilder :=
mkRequest (·.method .get) client url
/--
Creates a POST request builder for `url`.
-/
def post (client : Client) (url : URI.AuthorityForm) : Client.RequestBuilder :=
mkRequest (·.method .post) client url
/--
Creates a PUT request builder for `url`.
-/
def put (client : Client) (url : URI.AuthorityForm) : Client.RequestBuilder :=
mkRequest (·.method .put) client url
/--
Creates a DELETE request builder for `url`.
-/
def delete (client : Client) (url : URI.AuthorityForm) : Client.RequestBuilder :=
mkRequest (·.method .delete) client url
/--
Creates a PATCH request builder for `url`.
-/
def patch (client : Client) (url : URI.AuthorityForm) : Client.RequestBuilder :=
mkRequest (·.method .patch) client url
/--
Creates a HEAD request builder for `url`.
-/
def head (client : Client) (url : URI.AuthorityForm) : Client.RequestBuilder :=
mkRequest (·.method .head) client url
/--
Creates an OPTIONS request builder for `url`.
-/
def options (client : Client) (url : URI.AuthorityForm) : Client.RequestBuilder :=
mkRequest (·.method .options) client url
end Client
end Http
end Std

View File

@@ -0,0 +1,547 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Client.Session
public import Std.Internal.Http.Data.Cookie
import Init.Data.Array
public section
namespace Std
namespace Http
namespace Client
set_option linter.all true
open Std Internal IO Async TCP Protocol
open Time
/-!
# Agent
This module defines `Client.Agent`, a transport-agnostic HTTP user-agent that wraps a `Session`
and adds automatic redirect following, cookie jar support, response interceptors, and configurable
retries.
`Agent` is parameterized by the transport type `α` and contains no TCP-specific code.
Use `Agent.ofTransport` to create an `Agent` from any connected transport. Pass a `connectTo`
factory to enable cross-host redirect following and automatic reconnection on error.
On each redirect the `Location` header is parsed as a URI. If the redirect targets a different
host or port the agent closes the current session and opens a new one using `connectTo` (when
available). A `Array URI` tracks every URI visited in the current redirect chain so that cycles
are detected and broken immediately.
When crossing to a different host the `Authorization` header is stripped from the redirected
request to prevent credential leakage.
-/
/--
An HTTP user-agent that manages a connection to a host. It follows redirects, maintains a cookie
jar for automatic cookie handling, applies response interceptors, and retries on connection errors.
-/
public structure Agent (α : Type) where
/--
The underlying HTTP session over the transport.
-/
session : Session α
/--
URI scheme for this connection (e.g., `"http"` or `"https"`).
Used when constructing absolute-form request URIs for proxy requests and some redirects.
-/
scheme : URI.Scheme
/--
The hostname this agent is currently connected to.
-/
host : URI.Host
/--
The port this agent is currently connected to.
-/
port : UInt16
/--
Cookie jar shared across all requests and redirects through this agent.
-/
cookieJar : Cookie.Jar
/--
Response interceptors applied (in order) after every response, including intermediate
redirect responses. Each interceptor receives the response and returns a (possibly
modified) response. Interceptors run before cookie processing and redirect evaluation
so they can, e.g., unwrap envelopes or transparently decompress bodies.
-/
interceptors : Array (Response Body.Incoming Async (Response Body.Incoming)) := #[]
/--
Optional factory for opening a new session to `(host, port)`. Used for:
* Automatic retry after connection errors (`maxRetries`): reconnects to the same host.
* Cross-host redirects: connects to the new host.
`none` for agents created via `Agent.ofTransport` without a factory; cross-host redirects
are not followed and connection errors are not retried automatically for such agents.
-/
connectTo : Option (URI.Host UInt16 Async (Session α)) := none
/--
Called when a connection error is confirmed (i.e., `session.send` threw and all retries
are committed to using a fresh session). Receives the broken session together with the
host and port so the caller can:
* For pool agents: evict every session to that host so the next retry gets a fresh one.
* For standalone agents: close the session's request channel so the background loop exits.
The default closes the session channel; pool agents set this to an eviction function.
-/
onBrokenSession : Session α URI.Host UInt16 Async Unit :=
fun s _ _ => discard <| s.close
namespace Agent
/--
Returns `true` for HTTP methods that are safe to retry on connection failure.
Non-idempotent methods (POST, PATCH) must not be retried automatically because
the server may have already processed the request before the connection dropped.
-/
private def isIdempotentMethod (m : Method) : Bool :=
m == .get || m == .head || m == .put || m == .delete || m == .options || m == .trace
/--
Rewrites an origin-form request target to absolute-form for proxy forwarding.
`GET /path?q=1 HTTP/1.1` becomes `GET http://host:port/path?q=1 HTTP/1.1`.
No-op for targets that are already in absolute-form or do not carry a path.
-/
def toAbsoluteFormRequest
(request : Request Body.AnyBody)
(scheme : URI.Scheme) (host : URI.Host) (port : UInt16) : Request Body.AnyBody :=
match request.line.uri with
| .originForm o =>
{ request with
line := { request.line with uri := .absoluteForm {
scheme,
path := o.path,
query := o.query.getD .empty,
authority := some { host, port := .value port }
}
}
}
| _ => request
/--
Creates an `Agent` from an already-connected transport `socket`.
Pass a `connectTo` factory to enable automatic reconnection on error and cross-host redirect
following; omit it (or pass `none`) to disable both.
-/
def ofTransport [Transport α] (socket : α) (scheme : URI.Scheme)
(host : URI.Host) (port : UInt16)
(connectTo : Option (URI.Host UInt16 Async (Session α)) := none)
(config : Config := {}) : Async (Agent α) := do
let session Session.new socket config
let cookieJar Cookie.Jar.new
pure { session, scheme, host, port, cookieJar, connectTo }
/--
Injects matching cookies from `cookieJar` into the request headers for `host`.
Does nothing if the jar contains no matching cookies.
-/
def injectCookies (cookieJar : Cookie.Jar) (host : URI.Host) (scheme : URI.Scheme)
(request : Request Body.AnyBody) : Async (Request Body.AnyBody) := do
-- Respect an explicit Cookie header set by the caller.
if request.line.headers.contains .cookie then return request
let path := match request.line.uri with
| .originForm o => o.path
| .absoluteForm af => af.path
| _ => URI.Path.parseOrRoot "/"
match cookieJar.cookiesFor host path (secure := scheme.val == "https") with
| none => return request
| some cookieValue =>
let newHeaders := request.line.headers.insert .cookie cookieValue
return { request with line := { request.line with headers := newHeaders } }
/--
Reads all `Set-Cookie` headers from `responseHeaders` and stores the cookies in `cookieJar`.
-/
def processCookies (cookieJar : Cookie.Jar) (host : URI.Host)
(responseHeaders : Headers) : BaseIO Unit := do
if let some values := responseHeaders.getAll? Header.Name.setCookie then
for v in values do
cookieJar.processSetCookie host v.value
/--
Applies all response interceptors to `response` in order, returning the final result.
-/
def applyInterceptors
(interceptors : Array (Response Body.Incoming Async (Response Body.Incoming)))
(response : Response Body.Incoming) : Async (Response Body.Incoming) :=
interceptors.foldlM (init := response) (fun r f => f r)
/--
Outcome of evaluating whether a response should trigger an automatic redirect.
-/
inductive RedirectDecision where
/--
Response is final, should validate status and return it.
-/
| done
/--
Follow a redirect to `(host, port, scheme)` with `request`, updating `history`.
-/
| follow (host : URI.Host) (port : UInt16) (scheme : URI.Scheme) (request : Request Body.AnyBody)
/--
Inspects `response` and decides whether to follow a redirect.
Returns `.done` when:
- `remaining` is 0 or the response is not a redirection,
- the `Location` header is absent, or
- the `Location` value cannot be parsed.
Returns `.follow` with the rewritten request (method, body, and headers adjusted per
RFC 9110 §15.4, including `Authorization` stripped on cross-origin hops) when a valid
redirect target is found. The response body is drained (up to `drainLimit` bytes) before
returning `.follow`; if the body exceeds `drainLimit` the incoming channel is closed and
the connection is left to recover or time out.
-/
def decideRedirect
(remaining : Nat)
(currentHost : URI.Host) (currentPort : UInt16) (currentScheme : URI.Scheme)
(request : Request Body.AnyBody) (response : Response Body.Incoming)
(drainLimit : Nat)
: Async RedirectDecision := do
if remaining == 0 !response.line.status.isRedirection then
return .done
let some locationValue := response.line.headers.get? .location
| return .done
let locationStr := locationValue.value
let some target := RequestTarget.parse? locationStr
| return .done
-- Drain
discard <| ContextAsync.run do
try
discard <| response.body.readAll (α := ByteArray) (maximumSize := some drainLimit.toUInt64)
catch _ =>
response.body.close
let newMethod := match response.line.status with
| .seeOther => .get
| .movedPermanently | .found =>
if request.line.method == .post then .get else request.line.method
| _ => request.line.method
let bodyIsStreaming := match request.body with | .outgoing _ => true | _ => false
let newBody : Body.AnyBody :=
if newMethod == .get || newMethod == .head || newMethod != request.line.method then .empty {}
else if bodyIsStreaming then .empty {}
else request.body
let (newHost, newPort, newScheme) := match target with
| .absoluteForm af =>
let h := af.authority.map URI.Authority.host |>.getD currentHost
let p : UInt16 :=
match af.authority with
| some auth => match auth.port with
| URI.Port.value v => v
| _ => URI.Scheme.defaultPort af.scheme
| none => URI.Scheme.defaultPort af.scheme
(h, p, af.scheme)
| _ => (currentHost, currentPort, currentScheme)
-- Avoid SSRF.
if newScheme.val != "http" && newScheme.val != "https" then
return .done
-- Strip Authorization
let isCrossOrigin := newHost != currentHost || newPort != currentPort || newScheme != currentScheme
let newHeaders :=
if isCrossOrigin then
request.line.headers
|>.erase Header.Name.authorization
|>.erase Header.Name.proxyAuthorization
|>.erase Header.Name.cookie
else request.line.headers
return .follow newHost newPort newScheme
{ line := { request.line with uri := target, method := newMethod, headers := newHeaders }
body := newBody
extensions := request.extensions }
private partial def sendWithRedirects [Transport α]
(agent : Agent α) (request : Request Body.AnyBody)
(remaining : Nat) (retriesLeft : Nat)
(history : Array (URI.Host × UInt16 × String) := #[]) : Async (Response Body.Incoming) := do
-- Record the current URL in the history and detect redirect cycles.
let currentKey := (agent.host, agent.port, toString request.line.uri)
let history := history.push currentKey
-- Rewrite to absolute-form when a proxy is configured.
let request :=
if agent.session.config.proxy.isSome then
toAbsoluteFormRequest request agent.scheme agent.host agent.port
else
request
let request injectCookies agent.cookieJar agent.host agent.scheme request
let response try agent.session.send request
catch err => do
agent.onBrokenSession agent.session agent.host agent.port
let bodyIsReplayable := match request.body with | .outgoing _ => false | _ => true
if retriesLeft > 0 && isIdempotentMethod request.line.method && bodyIsReplayable then
if let some factory := agent.connectTo then
sleep agent.session.config.retryDelay
let newSession factory agent.host agent.port
return sendWithRedirects { agent with session := newSession } request remaining (retriesLeft - 1) history
throw err
let response applyInterceptors agent.interceptors response
processCookies agent.cookieJar agent.host response.line.headers
match decideRedirect remaining agent.host agent.port agent.scheme request response
agent.session.config.redirectBodyDrainLimit with
| .done =>
if let some validate := agent.session.config.validateStatus then
if !validate response.line.status then
throw (.userError s!"unexpected HTTP status: {response.line.status.toCode}")
return response
| .follow newHost newPort newScheme newRequest =>
if let some policy := agent.session.config.redirectPolicy then
if !policy newHost newPort then
return response
let nextKey := (newHost, newPort, toString newRequest.line.uri)
if history.contains nextKey then
return response
if newHost != agent.host || newPort != agent.port then
-- For custom transports without a connectTo factory we cannot open a new
-- connection to a different host; return the redirect response as-is.
let some factory := agent.connectTo
| return response
let newSession factory newHost newPort
sendWithRedirects
{ session := newSession
scheme := newScheme
host := newHost
port := newPort
cookieJar := agent.cookieJar
interceptors := agent.interceptors
connectTo := some factory
onBrokenSession := agent.onBrokenSession }
newRequest (remaining - 1) retriesLeft history
else
sendWithRedirects agent newRequest (remaining - 1) retriesLeft history
/--
Send a request, automatically following redirects up to `config.maxRedirects` hops and
retrying on connection errors up to `config.maxRetries` times.
For cross-host redirects the agent reconnects using its `connectTo` factory (if set).
Cookies are automatically injected from the jar and `Set-Cookie` responses are stored.
Response interceptors are applied after every response.
-/
def send {β : Type} [Coe β Body.AnyBody] [Transport α] (agent : Agent α) (request : Request β) : Async (Response Body.Incoming) :=
sendWithRedirects
agent
{ line := request.line, body := (request.body : Body.AnyBody), extensions := request.extensions }
agent.session.config.maxRedirects
agent.session.config.maxRetries
end Agent
/-!
# Agent.RequestBuilder
A fluent builder that attaches an `Agent` to a `Request.Builder`, letting callers chain header
and query-parameter setters before dispatching with a typed `send*` terminal.
```lean
let response ←
agent.get "/api/items"
|>.header! "Accept" "application/json"
|>.queryParam "page" "2"
|>.send
```
-/
/--
A `Request.Builder` bound to a specific `Agent`. Build up headers, query parameters, and body,
then call one of the `send*` methods to dispatch the request.
-/
public structure Agent.RequestBuilder (α : Type) where
/--
The agent that will send this request.
-/
agent : Agent α
/--
The underlying request builder.
-/
builder : Request.Builder
namespace Agent.RequestBuilder
/--
Injects a `Host` header derived from the agent's `host` and `port` if no `Host` header
is already present.
-/
private def withHostHeader [Transport α] (rb : Agent.RequestBuilder α) : Agent.RequestBuilder α :=
if rb.builder.line.headers.contains Header.Name.host then
rb
else
let defaultPort := URI.Scheme.defaultPort rb.agent.scheme
let hostValue :=
if rb.agent.port == defaultPort then toString rb.agent.host
else s!"{rb.agent.host}:{rb.agent.port}"
{ rb with builder := rb.builder.header! "Host" hostValue }
/--
Prepares the builder by injecting the `Host` header, then calls `f` to build and send the
request. Cookie injection is handled by `Agent.injectCookies` inside `sendWithRedirects`.
-/
private def prepare [Transport α] (rb : Agent.RequestBuilder α)
(f : Agent.RequestBuilder α Async (Response Body.Incoming)) : Async (Response Body.Incoming) :=
f rb.withHostHeader
/--
Adds a typed header to the request.
-/
def header [Transport α] (rb : Agent.RequestBuilder α) (key : Header.Name) (value : Header.Value) : Agent.RequestBuilder α :=
{ rb with builder := rb.builder.header key value }
/--
Adds a header to the request. Panics if the name or value is invalid.
-/
def header! [Transport α] (rb : Agent.RequestBuilder α) (key : String) (value : String) : Agent.RequestBuilder α :=
{ rb with builder := rb.builder.header! key value }
/--
Adds a header to the request. Returns `none` if the name or value is invalid.
-/
def header? [Transport α] (rb : Agent.RequestBuilder α) (key : String) (value : String) : Option (Agent.RequestBuilder α) := do
let builder rb.builder.header? key value
pure { rb with builder }
/--
Sets the request URI from a string. Panics if the string is not a valid request target.
-/
def uri! [Transport α] (rb : Agent.RequestBuilder α) (u : String) : Agent.RequestBuilder α :=
{ rb with builder := rb.builder.uri! u }
/--
Adds a query parameter to the request URI.
Works for both origin-form (e.g. set by `agent.get "/path"`) and absolute-form targets.
-/
def queryParam [Transport α] (rb : Agent.RequestBuilder α) (key : String) (value : String) : Agent.RequestBuilder α :=
let newTarget := match rb.builder.line.uri with
| .originForm o =>
.originForm { o with query := some ((o.query.getD URI.Query.empty).insert key value) }
| .absoluteForm af =>
.absoluteForm { af with query := af.query.insert key value }
| other => other
{ rb with builder := { rb.builder with line := { rb.builder.line with uri := newTarget } } }
/--
Sends the request with an empty body.
-/
def send [Transport α] (rb : Agent.RequestBuilder α) : Async (Response Body.Incoming) :=
rb.prepare fun rb => do rb.agent.send ( rb.builder.blank)
/--
Sends the request with a plain-text body.
Sets `Content-Type: text/plain; charset=utf-8`.
-/
def text [Transport α] (rb : Agent.RequestBuilder α) (content : String) : Async (Response Body.Incoming) :=
rb.prepare fun rb => do rb.agent.send ( rb.builder.text content)
/--
Sends the request with a JSON body.
Sets `Content-Type: application/json`.
-/
def json [Transport α] (rb : Agent.RequestBuilder α) (content : String) : Async (Response Body.Incoming) :=
rb.prepare fun rb => do rb.agent.send ( rb.builder.json content)
/--
Sends the request with a raw binary body.
Sets `Content-Type: application/octet-stream`.
-/
def bytes [Transport α] (rb : Agent.RequestBuilder α) (content : ByteArray) : Async (Response Body.Incoming) :=
rb.prepare fun rb => do rb.agent.send ( rb.builder.bytes content)
/--
Sends the request with a streaming body produced by `gen`.
-/
def sendStream [Transport α]
(rb : Agent.RequestBuilder α)
(gen : Body.Outgoing Async Unit) : Async (Response Body.Incoming) :=
rb.prepare fun rb => do rb.agent.send ( rb.builder.stream gen)
end Agent.RequestBuilder
namespace Agent
/--
Creates a GET request builder for the given path or URL
-/
def get [Transport α] (agent : Agent α) (path : String) : Agent.RequestBuilder α :=
{ agent, builder := Request.get (RequestTarget.parse! path) }
/--
Creates a POST request builder for the given path or URL
-/
def post [Transport α] (agent : Agent α) (path : String) : Agent.RequestBuilder α :=
{ agent, builder := Request.post (RequestTarget.parse! path) }
/--
Creates a PUT request builder for the given path or URL
-/
def put [Transport α] (agent : Agent α) (path : String) : Agent.RequestBuilder α :=
{ agent, builder := Request.put (RequestTarget.parse! path) }
/--
Creates a DELETE request builder for the given path or URL
-/
def delete [Transport α] (agent : Agent α) (path : String) : Agent.RequestBuilder α :=
{ agent, builder := Request.delete (RequestTarget.parse! path) }
/--
Creates a PATCH request builder for the given path or URL
-/
def patch [Transport α] (agent : Agent α) (path : String) : Agent.RequestBuilder α :=
{ agent, builder := Request.patch (RequestTarget.parse! path) }
/--
Creates a HEAD request builder for the given path or URL
-/
def headReq [Transport α] (agent : Agent α) (path : String) : Agent.RequestBuilder α :=
{ agent, builder := Request.head (RequestTarget.parse! path) }
/--
Creates an OPTIONS request builder for the given path or URL.
-/
def options [Transport α] (agent : Agent α) (path : String) : Agent.RequestBuilder α :=
{ agent, builder := Request.options (RequestTarget.parse! path) }
end Std.Http.Client.Agent

View File

@@ -0,0 +1,156 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Config
This module exposes the `Config` structure describing timeouts, connection,
and header configurations for an HTTP client.
-/
namespace Std.Http.Client
set_option linter.all true
/--
Client connection configuration with validation.
-/
structure Config where
/--
Maximum number of requests per connection (for keep-alive).
-/
maxRequestsPerConnection : Nat := 1000
/--
Maximum number of headers allowed per response.
-/
maxResponseHeaders : Nat := 200
/--
Maximum size of a single header name in bytes.
-/
maxHeaderNameSize : Nat := 256
/--
Maximum size of a single header value in bytes.
-/
maxHeaderValueSize : Nat := 16384
/--
Maximum waiting time for additional data before timing out.
-/
readTimeout : Time.Millisecond.Offset := 30000
/--
Timeout duration for keep-alive connections.
-/
keepAliveTimeout : { x : Time.Millisecond.Offset // 0 < x } := 60000, by decide
/--
Timeout for the request lifecycle (send + receive) per connection.
DNS resolution and TCP connect are not covered by this timeout.
-/
requestTimeout : { x : Time.Millisecond.Offset // 0 < x } := 120000, by decide
/--
Whether to enable keep-alive connections.
-/
enableKeepAlive : Bool := true
/--
Maximum number of bytes to receive in a single read call.
-/
maxRecvChunkSize : Nat := 16384
/--
Default buffer size for request payloads.
-/
defaultRequestBufferSize : Nat := 16384
/--
The user-agent string to send by default.
-/
userAgent : Option Header.Value := some (.mk "lean-http/1.1")
/--
Maximum number of redirects to follow automatically.
Set to 0 to disable automatic redirect following.
-/
maxRedirects : Nat := 10
/--
Maximum number of times to retry a request after a connection error.
Set to 0 to disable automatic retries.
-/
maxRetries : Nat := 3
/--
Delay in milliseconds between successive retry attempts.
-/
retryDelay : Time.Millisecond.Offset := 1000
/--
Optional HTTP proxy address as `(host, port)`.
When set, all TCP connections are routed through this proxy and
request URIs are rewritten to absolute-form (`GET http://host/path HTTP/1.1`).
-/
proxy : Option (String × UInt16) := none
/--
Maximum number of bytes allowed in a single response body.
When `some n`, reading more than `n` bytes from the body resolves the current
request with an error and closes the connection.
`none` (default) imposes no limit.
-/
maxResponseBodySize : Option Nat := none
/--
Optional predicate that decides whether a response status is acceptable.
When `none`, all status codes are accepted (no error is thrown).
When `some f`, the final response status is passed to `f`; if `f` returns `false`
an `IO.Error` is thrown with the numeric status code.
Only applied to the final (non-redirect) response, not intermediate `3xx` responses.
Example — reject anything outside 2xx:
```lean
validateStatus := some (fun s => s.toCode / 100 == 2)
```
-/
validateStatus : Option (Status Bool) := none
/--
Maximum number of bytes drained from an intermediate redirect response body before
-/
redirectBodyDrainLimit : Nat := 1024 * 1024
/--
Optional predicate called before following each redirect.
-/
redirectPolicy : Option (URI.Host UInt16 Bool) := none
namespace Config
/--
Convert this client config into an HTTP/1.1 protocol configuration.
-/
def toH1Config (config : Config) : Std.Http.Protocol.H1.Config :=
{ maxMessages := config.maxRequestsPerConnection
maxHeaders := config.maxResponseHeaders
maxHeaderNameLength := config.maxHeaderNameSize
maxHeaderValueLength := config.maxHeaderValueSize
enableKeepAlive := config.enableKeepAlive
agentName := config.userAgent
}
end Config
end Std.Http.Client

View File

@@ -0,0 +1,608 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.TCP
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Transport
public import Std.Internal.Http.Protocol.H1
public import Std.Internal.Http.Client.Config
public import Std.Sync.Watch
public section
namespace Std.Http.Client
open Std Internal IO Async TCP Protocol
open Time
/--
Type-erased body operations for use in the request pipeline.
Captures `Reader` and `Writer` methods as closures so the connection state
is not parameterized by the body type.
-/
structure Body.Operations where
/--
Selector that resolves when a chunk is available or the body reaches EOF.
-/
recvSelector : Selector (Option Chunk)
/--
Returns `true` when the body is closed for reading.
-/
isClosed : Async Bool
/--
Closes the body for reading.
-/
close : Async Unit
/--
Returns the known content length if available.
-/
getKnownSize : Async (Option Body.Length)
namespace Body.Operations
/--
Creates a `Body.Operations` from any type with `Body.Reader` and `Body.Writer` instances.
-/
def of [Body.Reader β] [Body.Writer β] (body : β) : Body.Operations where
recvSelector := Body.Reader.recvSelector body
isClosed := Body.Reader.isClosed body
close := Body.Reader.close body
getKnownSize := Body.Writer.getKnownSize body
end Body.Operations
/-!
# Connection
This module defines the `Connection.handle` loop, used to manage one persistent HTTP/1.1 client
connection and handle sequential request/response exchanges over it.
-/
set_option linter.all true
/--
A request packet queued to the background connection loop.
-/
structure RequestPacket where
/--
The request to send.
-/
request : Request Body.Operations
/--
Promise resolved with the eventual response.
-/
responsePromise : IO.Promise (Except Error (Response Body.Incoming))
/--
Watch channel updated with the cumulative number of request-body bytes sent.
`none` when the caller does not need upload-progress tracking.
-/
uploadProgress : Option (Watch UInt64) := none
/--
Watch channel updated with the cumulative number of response bytes received.
`none` when the caller does not need download-progress tracking.
-/
downloadProgress : Option (Watch UInt64) := none
namespace RequestPacket
/--
Resolve the packet with an error.
-/
def onError (packet : RequestPacket) (error : Error) : BaseIO Unit :=
discard <| packet.responsePromise.resolve (.error error)
/--
Resolve the packet with a response.
-/
def onResponse (packet : RequestPacket) (response : Response Body.Incoming) : BaseIO Unit :=
discard <| packet.responsePromise.resolve (.ok response)
end RequestPacket
namespace Connection
/--
Events produced by the async select loop in `pollNextEvent`.
Each variant corresponds to one possible outcome of waiting for I/O.
-/
private inductive Recv
| bytes (x : Option ByteArray)
| requestBody (x : Option Chunk)
| bodyInterest (x : Bool)
| packet (x : Option RequestPacket)
| timeout
| shutdown
| close
/--
The set of I/O sources to wait on during a single poll iteration.
Each `Option` field is `none` when that source is not currently active.
-/
private structure PollSources (α : Type) where
socket : Option α
expect : Option Nat
requestBody : Option Body.Operations
requestChannel : Option (Std.CloseableChannel RequestPacket)
responseBody : Option Body.Outgoing
timeout : Millisecond.Offset
keepAliveTimeout : Option Millisecond.Offset
connectionContext : CancellationContext
/--
All mutable state carried through the connection processing loop.
Bundled into a struct so it can be passed to and returned from helper functions.
-/
private structure ConnectionState where
machine : H1.Machine .sending
currentTimeout : Millisecond.Offset
keepAliveTimeout : Option Millisecond.Offset
currentRequest : Option RequestPacket
requestBody : Option Body.Operations
responseOutgoing : Option Body.Outgoing
responseIncoming : Option Body.Incoming
requiresData : Bool
expectData : Option Nat
waitingForRequest : Bool
isInformationalResponse : Bool
waitingForContinue : Bool
pendingRequestBody : Option Body.Operations
uploadProgress : Option (Watch UInt64) := none
uploadBytes : UInt64 := 0
downloadProgress : Option (Watch UInt64) := none
downloadBytes : UInt64 := 0
downloadBodyBytes : UInt64 := 0
@[inline]
private def requestHasExpectContinue (request : Request Body.Operations) : Bool :=
match request.line.headers.getAll? Header.Name.expect with
| some #[value] =>
match Header.Expect.parse value with
| some res => res.expect
| none => false
| _ => false
/--
Waits for the next I/O event across all active sources described by `sources`.
Computes the socket recv size from `config`, then races all active selectables.
Returns `.close` on transport errors.
-/
private def pollNextEvent
[Transport α]
(config : Config) (sources : PollSources α) : Async Recv := do
let expectedBytes := sources.expect
|>.getD config.defaultRequestBufferSize
|>.min config.maxRecvChunkSize
|>.toUInt64
let mut selectables : Array (Selectable Recv) := #[
.case sources.connectionContext.doneSelector (fun _ => do
let reason sources.connectionContext.getCancellationReason
match reason with
| some .deadline => pure .timeout
| _ => pure .shutdown)
]
if let some socket := sources.socket then
selectables := selectables.push (.case (Transport.recvSelector socket expectedBytes) (Recv.bytes · |> pure))
if let some keepAliveTimeout := sources.keepAliveTimeout then
selectables := selectables.push (.case ( Selector.sleep keepAliveTimeout) (fun _ => pure .timeout))
else
selectables := selectables.push (.case ( Selector.sleep sources.timeout) (fun _ => pure .timeout))
if let some requestBody := sources.requestBody then
selectables := selectables.push (.case requestBody.recvSelector (Recv.requestBody · |> pure))
if let some requestChannel := sources.requestChannel then
selectables := selectables.push (.case requestChannel.recvSelector (Recv.packet · |> pure))
if let some responseBody := sources.responseBody then
selectables := selectables.push (.case (Body.Writer.interestSelector responseBody) (Recv.bodyInterest · |> pure))
try Selectable.one selectables catch _ => pure .close
/--
Processes all H1 events from a single machine step, updating the connection state.
Handles keep-alive resets, body-size tracking, `Expect: 100-continue`, and parse errors.
Returns the updated state and `true` if a parse failure was encountered.
-/
private def processH1Events
(config : Config)
(events : Array (H1.Event .sending))
(state : ConnectionState) : Async (ConnectionState × Bool) := do
let mut st := state
let mut sawFailure := false
for event in events do
match event with
| .needMoreData expect =>
st := { st with requiresData := true, expectData := expect }
-- `.needAnswer` is emitted by processWrite when the writer is in `waitingHeaders`
-- state in `.sending` mode, signalling that the client machine needs the next request.
-- The client loop tracks this through `waitingForRequest` instead, so this event
-- is intentionally a no-op here.
| .needAnswer => pure ()
| .endHeaders head =>
if head.status.isInformational then
-- Informational (1xx) responses are interim; do not resolve the caller's
-- promise. The machine loops back to read the real response.
st := { st with isInformationalResponse := true }
-- A `100 Continue` response authorises the body: move it from the
-- pending slot into `requestBody` so the pump loop starts sending.
if head.status == .continue && st.waitingForContinue then
st := { st with
requestBody := st.pendingRequestBody
pendingRequestBody := none
waitingForContinue := false
}
else
st := { st with
isInformationalResponse := false
currentTimeout := config.readTimeout
keepAliveTimeout := none
}
-- A non-informational response while we were still waiting for
-- `100 Continue`: the server rejected (or bypassed) the expectation.
-- Discard the pending body — it must not be sent.
if st.waitingForContinue then
if let some body := st.pendingRequestBody then
if !( body.isClosed) then body.close
st := { st with pendingRequestBody := none, waitingForContinue := false }
if let some body := st.responseOutgoing then
if let some length := head.getSize true then
Body.Writer.setKnownSize body (some length)
if let some packet := st.currentRequest then
if let some incoming := st.responseIncoming then
packet.onResponse { line := head, body := incoming }
| .closeBody =>
-- Skip closing for informational (1xx) responses; the channel stays
-- open for the real response body that follows.
if !st.isInformationalResponse then
if let some body := st.responseOutgoing then
if ¬( Body.Writer.isClosed body) then Body.Writer.close body
| .next =>
-- Reset all per-request state for the next pipelined request.
if let some body := st.requestBody then
if ¬( body.isClosed) then body.close
if let some body := st.pendingRequestBody then
if ¬( body.isClosed) then body.close
if let some body := st.responseOutgoing then
if ¬( Body.Writer.isClosed body) then Body.Writer.close body
if let some w := st.uploadProgress then Watch.close w
if let some w := st.downloadProgress then Watch.close w
st := { st with
requestBody := none
pendingRequestBody := none
waitingForContinue := false
responseOutgoing := none
responseIncoming := none
currentRequest := none
isInformationalResponse := false
waitingForRequest := true
keepAliveTimeout := some config.keepAliveTimeout.val
currentTimeout := config.keepAliveTimeout.val
uploadProgress := none
uploadBytes := 0
downloadProgress := none
downloadBytes := 0
downloadBodyBytes := 0
}
| .failed err =>
if let some packet := st.currentRequest then
packet.onError (.userError (toString err))
sawFailure := true
| .«continue» => pure ()
| .close => pure ()
return (st, sawFailure)
/--
Computes the active `PollSources` for the current connection state.
Determines which I/O sources need attention and whether to include the socket.
-/
private def buildPollSources
[Transport α]
(socket : α) (requestChannel : Std.CloseableChannel RequestPacket)
(connectionContext : CancellationContext) (state : ConnectionState)
: Async (PollSources α) := do
let requestBodySource
if let some body := state.requestBody then
if ¬( body.isClosed) then pure (some body) else pure none
else
pure none
let responseBodySource
if state.machine.canPullBodyNow then
if let some body := state.responseOutgoing then
if ¬( Body.Writer.isClosed body) then pure (some body) else pure none
else
pure none
else
pure none
let pollSocket :=
state.requiresData
state.machine.writer.sentMessage
!state.waitingForRequest
requestBodySource.isSome
state.machine.canPullBody
return {
socket := if pollSocket then some socket else none
expect := state.expectData
requestBody := requestBodySource
requestChannel := if state.waitingForRequest then some requestChannel else none
responseBody := responseBodySource
timeout := state.currentTimeout
keepAliveTimeout := state.keepAliveTimeout
connectionContext := connectionContext
}
/--
Processes a single async I/O event and updates the connection state.
Returns the updated state and `true` if the connection should be closed immediately.
-/
private def handleRecvEvent
(config : Config)
(event : Recv) (state : ConnectionState) : Async (ConnectionState × Bool) := do
match event with
| .bytes (some bytes) =>
let newDownloadBytes := state.downloadBytes + bytes.size.toUInt64
if let some w := state.downloadProgress then
Watch.send w newDownloadBytes
return ({ state with machine := state.machine.feed bytes, downloadBytes := newDownloadBytes }, false)
| .bytes none =>
return ({ state with machine := state.machine.noMoreInput }, false)
| .requestBody (some chunk) =>
let newUploadBytes := state.uploadBytes + chunk.data.size.toUInt64
if let some w := state.uploadProgress then
Watch.send w newUploadBytes
return ({ state with machine := state.machine.sendData #[chunk], uploadBytes := newUploadBytes }, false)
| .requestBody none =>
if let some body := state.requestBody then
if ¬( body.isClosed) then body.close
return ({ state with machine := state.machine.userClosedBody, requestBody := none }, false)
| .bodyInterest interested =>
if interested then
let (newMachine, pulledChunk) := state.machine.pullBody
let mut st := { state with machine := newMachine }
if let some pulled := pulledChunk then
let newBodyBytes := st.downloadBodyBytes + pulled.chunk.data.size.toUInt64
st := { st with downloadBodyBytes := newBodyBytes }
-- Enforce the response body size limit before writing data to the caller.
if let some maxSize := config.maxResponseBodySize then
if newBodyBytes > maxSize.toUInt64 then
if let some packet := st.currentRequest then
packet.onError (.userError "response body exceeds maximum allowed size")
if let some body := st.responseOutgoing then
if ¬( Body.Writer.isClosed body) then Body.Writer.close body
if let some w := st.downloadProgress then Watch.close w
return ({ st with
machine := st.machine.closeWriter.closeReader.noMoreInput
currentRequest := none
responseOutgoing := none
downloadProgress := none
}, false)
if let some body := st.responseOutgoing then
-- If the caller has dropped/closed the incoming side, the write fails.
-- Silently swallowing the error is correct: the loop must continue pulling
-- wire bytes to keep the connection in a valid state for reuse.
try Body.Writer.send body pulled.chunk pulled.incomplete
catch _ => pure ()
if pulled.final then
if ¬( Body.Writer.isClosed body) then Body.Writer.close body
st := { st with responseOutgoing := none }
return (st, false)
else
return (state, false)
| .packet (some packet) =>
let mut machine := state.machine.send packet.request.line
let mut requestBody : Option Body.Operations := none
let mut pendingRequestBody : Option Body.Operations := none
let mut waitingForContinue := false
if requestHasExpectContinue packet.request then
-- Defer body pumping until the server sends `100 Continue`, but still
-- set the known size so that `Content-Length` is included in the request
-- headers (required by RFC 9112; servers need it to fire checkContinue).
if let some size packet.request.body.getKnownSize then
machine := machine.setKnownSize size
waitingForContinue := true
pendingRequestBody := some packet.request.body
else
if let some size packet.request.body.getKnownSize then
machine := machine.setKnownSize size
requestBody := some packet.request.body
let (responseOutgoing, responseIncoming) Body.mkChannel
return ({ state with
machine := machine
currentRequest := some packet
waitingForRequest := false
currentTimeout := config.requestTimeout.val
keepAliveTimeout := none
requestBody := requestBody
pendingRequestBody := pendingRequestBody
waitingForContinue := waitingForContinue
responseOutgoing := some responseOutgoing
responseIncoming := some responseIncoming
uploadProgress := packet.uploadProgress
uploadBytes := 0
downloadProgress := packet.downloadProgress
downloadBytes := 0
}, false)
| .packet none => return (state, true)
| .close => return (state, true)
| .timeout =>
if let some packet := state.currentRequest then
packet.onError (.userError "request timeout")
if let some body := state.responseOutgoing then
if ¬( Body.Writer.isClosed body) then Body.Writer.close body
if let some w := state.uploadProgress then Watch.close w
if let some w := state.downloadProgress then Watch.close w
return ({ state with
machine := state.machine.closeWriter.closeReader.noMoreInput
currentRequest := none
responseOutgoing := none
uploadProgress := none
downloadProgress := none
}, false)
| .shutdown =>
if let some packet := state.currentRequest then
packet.onError (.userError "connection shutdown")
if let some body := state.responseOutgoing then
if ¬( Body.Writer.isClosed body) then Body.Writer.close body
if let some w := state.uploadProgress then Watch.close w
if let some w := state.downloadProgress then Watch.close w
return ({ state with
machine := state.machine.closeWriter.closeReader.noMoreInput
currentRequest := none
responseOutgoing := none
uploadProgress := none
downloadProgress := none
}, false)
/--
Runs the main request/response processing loop for a single connection.
Drives the HTTP/1.1 state machine through four phases each iteration:
close finished readers, send buffered output, process H1 events, poll for I/O.
-/
protected def handle
[Transport α]
(socket : α)
(machine : H1.Machine .sending)
(config : Config)
(connectionContext : CancellationContext)
(requestChannel : Std.CloseableChannel RequestPacket) : Async Unit := do
let mut state : ConnectionState := {
machine := machine
currentTimeout := config.keepAliveTimeout.val
keepAliveTimeout := some config.keepAliveTimeout.val
currentRequest := none
requestBody := none
responseOutgoing := none
responseIncoming := none
requiresData := false
expectData := none
waitingForRequest := true
isInformationalResponse := false
waitingForContinue := false
pendingRequestBody := none
}
while ¬state.machine.halted do
-- Phase 1: close any reader that the user has signalled is done.
if let some body := state.requestBody then
if body.isClosed then
state := { state with machine := state.machine.userClosedBody, requestBody := none }
-- Phase 2: advance the state machine and flush any output.
let (newMachine, step) := state.machine.step
state := { state with machine := newMachine }
if step.output.size > 0 then
try Transport.sendAll socket #[step.output.toByteArray]
catch _ =>
if let some packet := state.currentRequest then
packet.onError (.userError "connection write failed")
if let some body := state.responseOutgoing then
if ¬( Body.Writer.isClosed body) then Body.Writer.close body
state := { state with
machine := state.machine.closeWriter.closeReader.noMoreInput
currentRequest := none
responseOutgoing := none
}
break
-- Phase 3: process all events emitted by this step.
let (newState, sawFailure) processH1Events config step.events state
state := newState
if sawFailure then break
-- Phase 4: wait for the next IO event when any source needs attention.
if state.requiresData state.waitingForRequest state.currentRequest.isSome state.requestBody.isSome state.machine.canPullBody then
let sources buildPollSources socket requestChannel connectionContext state
state := { state with requiresData := false }
let event pollNextEvent config sources
let (newState, shouldClose) handleRecvEvent config event state
state := newState
if shouldClose then break
-- Clean up: notify any in-flight request and close all open streams.
if let some packet := state.currentRequest then
packet.onError (.userError "connection closed")
if let some w := state.uploadProgress then
Watch.close w
if let some w := state.downloadProgress then
Watch.close w
if let some body := state.responseOutgoing then
if ¬( Body.Writer.isClosed body) then Body.Writer.close body
if let some body := state.requestBody then
if ¬( body.isClosed) then body.close
if let some body := state.pendingRequestBody then
if ¬( body.isClosed) then body.close
discard <| EIO.toBaseIO requestChannel.close
-- Drain any remaining queued packets.
repeat do
match requestChannel.tryRecv with
| some packet => packet.onError (.userError "connection closed")
| none => break
Transport.close socket
end Connection
end Std.Http.Client

View File

@@ -0,0 +1,204 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Client.Agent
import Std.Internal.Async.DNS
import Std.Data.HashMap
import Init.Data.Array
public section
namespace Std
namespace Http
namespace Client
set_option linter.all true
open Std Internal IO Async TCP Protocol
open Time
/-!
# Agent.Pool
A connection pool that maintains multiple reusable sessions per `(host, port)` pair,
enabling parallel request pipelines to the same host.
Use `Pool.new` to create a pool with a shared configuration and cookie jar, then call
`pool.send` to dispatch requests through managed sessions.
```lean
let pool ← Agent.Pool.new (maxPerHost := 4)
-- requests are distributed across up to 4 connections per host
let r1 ← pool.send "api.example.com" 80
(Request.get (.originForm! "/a") |>.header! "Host" "api.example.com" |>.blank)
```
-/
/--
Resolves `host` via DNS, opens a TCP socket to `port`, and creates an HTTP session.
When `config.proxy` is set the TCP connection is made to the proxy address instead.
-/
private def createTcpSession (host : URI.Host) (port : UInt16) (config : Config) : Async (Session Socket.Client) := do
let (connectHost, connectPort) := config.proxy.getD (toString host, port)
let addrs DNS.getAddrInfo connectHost (toString connectPort)
if addrs.isEmpty then
throw (IO.userError s!"could not resolve host: {connectHost.quote}")
-- Try each resolved address in order; return on first successful connect.
-- This handles hosts that resolve to both IPv6 (::1) and IPv4 (127.0.0.1).
let mut lastErr : IO.Error := IO.userError s!"could not connect to {connectHost.quote}:{connectPort}"
for ipAddr in addrs do
let socketAddr : Std.Net.SocketAddress := match ipAddr with
| .v4 ip => .v4 ip, connectPort
| .v6 ip => .v6 ip, connectPort
try
let socket Socket.Client.mk
let _ socket.connect socketAddr
return Session.new socket config
catch err =>
lastErr := err
throw lastErr
/--
A connection pool that manages multiple sessions per `(host, port)` pair.
Each value in the map is an array of live sessions paired with a round-robin counter.
-/
public structure Agent.Pool where
/--
Per-host session lists and round-robin counters, guarded by a mutex.
-/
state : Mutex (Std.HashMap (String × UInt16) (Array (Session Socket.Client) × Nat))
/--
Maximum number of sessions (connections) per host.
-/
maxPerHost : Nat
/--
Configuration used when creating new sessions.
-/
config : Config
/--
Cookie jar shared across all sessions in the pool.
-/
cookieJar : Cookie.Jar
/--
Monotonically increasing counter used to assign unique IDs to pooled sessions.
-/
nextId : Mutex UInt64
/--
Response interceptors applied (in order) after every response from any session in the pool.
-/
interceptors : Array (Response Body.Incoming Async (Response Body.Incoming)) := #[]
namespace Agent.Pool
/--
Creates a new, empty connection pool.
-/
def new (config : Config := {}) (maxPerHost : Nat := 4) : Async Agent.Pool := do
let state Mutex.new ( : Std.HashMap (String × UInt16) (Array (Session Socket.Client) × Nat))
let cookieJar Cookie.Jar.new
let nextId Mutex.new (1 : UInt64)
pure { state, maxPerHost, config, cookieJar, nextId }
/--
Returns a session for `(host, port)`, reusing an existing one when available or
creating a new one when the pool has room. Uses round-robin scheduling.
-/
def getOrCreateSession (pool : Agent.Pool) (host : URI.Host) (port : UInt16) : Async (Session Socket.Client) := do
-- Fast path: pick an existing session round-robin.
let maybeSession pool.state.atomically do
let st MonadState.get
let (sessions, idx) := (st.get? (toString host, port)).getD (#[], 0)
match sessions[idx % sessions.size]? with
| none => return none
| some selected =>
MonadState.set (st.insert (toString host, port) (sessions, idx + 1))
return some selected
if let some session := maybeSession then
return session
-- Slow path: create a new session and register it.
let session createTcpSession host port pool.config
let newId pool.nextId.atomically do
let id MonadState.get
MonadState.set (id + 1)
return id
let session := { session with id := newId }
pool.state.atomically do
let st MonadState.get
let (sessions, idx) := (st.get? (toString host, port)).getD (#[], 0)
-- Respect maxPerHost: only register if we are still under the limit.
if sessions.size < pool.maxPerHost then
MonadState.set (st.insert (toString host, port) (sessions.push session, idx))
-- If over the limit (concurrent creation race), this session is still
-- returned for the current request but not stored for future reuse.
return session
/--
Removes a single broken session from the pool by its unique ID.
Healthy sibling sessions to the same host are preserved.
-/
private def evictSession (pool : Agent.Pool) (host : URI.Host) (port : UInt16) (sessionId : UInt64) : Async Unit := do
pool.state.atomically do
let st MonadState.get
match st.get? (toString host, port) with
| none => pure ()
| some (sessions, idx) =>
let sessions' := sessions.filter (fun s => s.id != sessionId)
MonadState.set (st.insert (toString host, port) (sessions', idx))
/--
Sends a request through a pooled session for `(host, port)`, injecting cookies from the
shared jar, applying response interceptors, storing any `Set-Cookie` responses, following
redirects up to `config.maxRedirects` hops, and evicting dead sessions on connection
failure (retrying up to `config.maxRetries` times).
-/
def send {β : Type} [Coe β Body.AnyBody]
(pool : Agent.Pool) (host : URI.Host) (port : UInt16)
(request : Request β) : Async (Response Body.Incoming) := do
let session pool.getOrCreateSession host port
Agent.send {
session
scheme := URI.Scheme.ofPort port
host := host
port := port
cookieJar := pool.cookieJar
interceptors := pool.interceptors
connectTo := some pool.getOrCreateSession
onBrokenSession := fun brokenSession h p => pool.evictSession h p brokenSession.id
} request
end Agent.Pool
namespace Agent
/--
Resolves `host` via DNS and establishes a TCP connection on `port`, returning a new
`Agent Socket.Client`. Throws if DNS resolution returns no addresses.
When `config.proxy` is set every connection (including cross-host redirects) is routed
through the proxy.
-/
def connect (host : URI.Host) (port : UInt16) (config : Config := {}) : Async (Agent Socket.Client) := do
let session createTcpSession host port config
let cookieJar Cookie.Jar.new
let scheme := URI.Scheme.ofPort port
pure { session, scheme, host, port, cookieJar, connectTo := some (createTcpSession · · config) }
end Std.Http.Client.Agent

View File

@@ -0,0 +1,107 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Client.Connection
public section
namespace Std.Http.Client
open Std Internal IO Async TCP Protocol
open Time
set_option linter.all true
/-!
# Session
This module defines `Client.Session`, an HTTP/1.1 client session that manages a single
persistent connection and dispatches sequential request/response exchanges over it.
A background task drives the `Connection` loop; callers interact through a channel.
-/
/--
An HTTP client session that sends sequential requests over a persistent connection.
-/
public structure Session (α : Type) where
/--
Queue of requests sent by users.
-/
requestChannel : Std.CloseableChannel RequestPacket
/--
Resolves when the background loop exits.
-/
shutdown : IO.Promise Unit
/--
Configuration for this session.
-/
config : Config
/--
Unique identifier assigned by the pool when this session is registered.
Zero for sessions created outside a pool.
-/
id : UInt64 := 0
namespace Session
/--
Queue a request and await its response.
-/
def send [Transport α] {β : Type} [Body.Reader β] [Body.Writer β]
(session : Session α) (request : Request β) : Async (Response Body.Incoming) := do
let responsePromise IO.Promise.new
let task session.requestChannel.send {
request := { line := request.line, body := Body.Operations.of request.body, extensions := request.extensions }
responsePromise
}
let .ok _ await task
| throw (.userError "connection closed, cannot send more requests")
match await responsePromise.result! with
| .ok response => pure response
| .error error => throw error
/--
Wait for background loop shutdown.
-/
def waitShutdown (session : Session α) : Async Unit := do
await session.shutdown.result!
/--
Close the session's request channel.
-/
def close (session : Session α) : Async Unit := do
discard <| EIO.toBaseIO session.requestChannel.close
/--
Creates an HTTP client session over the given transport and starts its background loop.
-/
def new [Transport t] (client : t) (config : Config := {}) : Async (Session t) := do
let requestChannel Std.CloseableChannel.new
let shutdown IO.Promise.new
let context CancellationContext.new
background do
try
Std.Http.Client.Connection.handle client
({ config := config.toH1Config } : H1.Machine .sending)
config context requestChannel
finally
discard <| shutdown.resolve ()
pure { requestChannel, shutdown, config }
end Session
end Std.Http.Client

View File

@@ -12,6 +12,10 @@ public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Status
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.URI
public import Std.Internal.Http.Data.Body
public import Std.Internal.Http.Data.Cookie
/-!
# HTTP Data Types

View File

@@ -0,0 +1,17 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Body.Basic
public import Std.Internal.Http.Data.Body.Length
public import Std.Internal.Http.Data.Body.Reader
public import Std.Internal.Http.Data.Body.Writer
public import Std.Internal.Http.Data.Body.Stream
public import Std.Internal.Http.Data.Body.Empty
public import Std.Internal.Http.Data.Body.Full

View File

@@ -0,0 +1,61 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Body.Length
public section
/-!
# Body.Basic
This module defines shared types for HTTP body handling.
-/
namespace Std.Http.Body
set_option linter.all true
/--
Typeclass for types that can be converted to a `ByteArray`.
-/
class ToByteArray (α : Type) where
/--
Transforms into a `ByteArray`.
-/
toByteArray : α ByteArray
instance : ToByteArray ByteArray where
toByteArray := id
instance : ToByteArray String where
toByteArray := String.toUTF8
/--
Typeclass for types that can be decoded from a `ByteArray`. The conversion may fail with an error
message if the bytes are not valid for the target type.
-/
class FromByteArray (α : Type) where
/--
Attempts to decode a `ByteArray` into the target type, returning an error message on failure.
-/
fromByteArray : ByteArray Except String α
instance : FromByteArray ByteArray where
fromByteArray := .ok
instance : FromByteArray String where
fromByteArray bs :=
match String.fromUTF8? bs with
| some s => .ok s
| none => .error "invalid UTF-8 encoding"
end Std.Http.Body

View File

@@ -0,0 +1,103 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Body.Length
public import Std.Internal.Http.Data.Body.Reader
public import Std.Internal.Http.Data.Chunk
public section
/-!
# Body.Empty
Represents an always-empty, already-closed body handle.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
/--
An empty body handle.
-/
structure Empty where
deriving Inhabited
namespace Empty
/--
Receives from an empty body, always returning end-of-stream.
-/
@[inline]
def recv (_ : Empty) : Async (Option Chunk) :=
pure none
/--
Closes an empty body (no-op).
-/
@[inline]
def close (_ : Empty) : Async Unit :=
pure ()
/--
Empty bodies are always closed for reading.
-/
@[inline]
def isClosed (_ : Empty) : Async Bool :=
pure true
open Internal.IO.Async in
/--
Selector that immediately resolves with end-of-stream for an empty body.
-/
@[inline]
def recvSelector (_ : Empty) : Selector (Option Chunk) where
tryFn := pure (some none)
registerFn waiter := do
let lose := pure ()
let win promise := do
promise.resolve (.ok none)
waiter.race lose win
unregisterFn := pure ()
end Empty
instance : Reader Empty where
recv := Empty.recv
close := Empty.close
isClosed := Empty.isClosed
recvSelector := Empty.recvSelector
end Std.Http.Body
namespace Std.Http.Request.Builder
open Internal.IO.Async
/--
Builds a request with an empty body.
-/
def blank (builder : Builder) : Async (Request Body.Empty) :=
pure <| builder.body {}
end Std.Http.Request.Builder
namespace Std.Http.Response.Builder
open Internal.IO.Async
/--
Builds a response with an empty body.
-/
def blank (builder : Builder) : Async (Response Body.Empty) :=
pure <| builder.body {}
end Std.Http.Response.Builder

View File

@@ -0,0 +1,266 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Sync
public import Std.Internal.Async
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Body.Length
public import Std.Internal.Http.Data.Body.Reader
public import Std.Internal.Http.Data.Chunk
public import Init.Data.ByteArray
public section
/-!
# Body.Full
A body backed by a fixed `ByteArray` held in a `Mutex`.
The byte array is consumed at most once: the first call to `recv` or `tryRecv` atomically
takes the data and returns it as a single chunk; subsequent calls return `none` (end-of-stream).
Closing the body discards any unconsumed data.
`Full` implements `Body.Writer`. The `Writer` instance is a no-op for sends since the content is
fixed at construction; it is provided so that `Full` can substitute for a streaming channel in
contexts that require a writable body handle.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
/--
A body backed by a fixed, mutex-protected `ByteArray`.
The data is consumed on the first read. Once consumed (or explicitly closed), the body
behaves as a closed, empty channel.
-/
structure Full where
private mk ::
private state : Mutex (Option ByteArray)
deriving Nonempty
namespace Full
private def takeChunk [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT (Option ByteArray) m (Option Chunk) := do
match get with
| none =>
pure none
| some data =>
set (none : Option ByteArray)
if data.isEmpty then
pure none
else
pure (some (Chunk.ofByteArray data))
/--
Creates a `Full` body from a `ByteArray`.
-/
def ofByteArray (data : ByteArray) : Async Full := do
let state Mutex.new (some data)
return { state }
/--
Creates a `Full` body from a `String`.
-/
def ofString (data : String) : Async Full := do
let state Mutex.new (some data.toUTF8)
return { state }
/--
Atomically takes the byte array and returns it as a chunk.
Returns `none` if the data has already been consumed or the body is closed.
-/
def tryRecv (full : Full) : Async (Option Chunk) :=
full.state.atomically do
takeChunk
/--
Receives the body data. Returns the full byte array on the first call as a single chunk,
then `none` on all subsequent calls.
-/
def recv (full : Full) : Async (Option Chunk) :=
full.tryRecv
/--
No-op send for a fixed full body.
-/
@[inline]
def send (_ : Full) (_ : Chunk) (_incomplete : Bool := false) : Async Unit :=
pure ()
/--
Closes the body, discarding any unconsumed data.
-/
def close (full : Full) : Async Unit :=
full.state.atomically do
set (none : Option ByteArray)
/--
Returns `true` when the data has been consumed or the body has been closed.
-/
def isClosed (full : Full) : Async Bool :=
full.state.atomically do
return ( get).isNone
/--
A fixed full body never has consumer interest.
-/
@[inline]
def hasInterest (_ : Full) : Async Bool :=
pure false
/--
Returns known-size metadata based on current remaining bytes.
-/
def getKnownSize (full : Full) : Async (Option Body.Length) :=
full.state.atomically do
match get with
| none => pure (some (.fixed 0))
| some data => pure (some (.fixed data.size))
/--
No-op metadata setter for a fixed full body.
-/
@[inline]
def setKnownSize (_ : Full) (_ : Option Body.Length) : Async Unit :=
pure ()
open Internal.IO.Async in
/--
Selector that immediately resolves to `false` for interest.
-/
def interestSelector (_ : Full) : Selector Bool where
tryFn := pure (some false)
registerFn waiter := do
let lose := pure ()
let win promise := do
promise.resolve (.ok false)
waiter.race lose win
unregisterFn := pure ()
open Internal.IO.Async in
/--
Selector that immediately resolves to the remaining chunk (or EOF).
-/
def recvSelector (full : Full) : Selector (Option Chunk) where
tryFn := do
let chunk full.state.atomically do
takeChunk
pure (some chunk)
registerFn waiter := do
let chunk full.state.atomically do
takeChunk
let lose := pure ()
let win promise := do
promise.resolve (.ok chunk)
waiter.race lose win
unregisterFn := pure ()
end Full
instance : Reader Full where
recv := Full.recv
close := Full.close
isClosed := Full.isClosed
recvSelector := Full.recvSelector
end Std.Http.Body
namespace Std.Http.Request.Builder
open Internal.IO.Async
private def fromBytesCore
(builder : Builder)
(content : ByteArray) :
Async (Request Body.Full) := do
return builder.body ( Body.Full.ofByteArray content)
/--
Builds a request from raw bytes.
-/
def fromBytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) :=
fromBytesCore builder content
/--
Builds a request with a binary body.
-/
def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Full) := do
let builder := builder.header Header.Name.contentType (Header.Value.ofString! "application/octet-stream")
fromBytesCore builder content
/--
Builds a request with a text body.
-/
def text (builder : Builder) (content : String) : Async (Request Body.Full) := do
let builder := builder.header Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8")
fromBytesCore builder content.toUTF8
/--
Builds a request with a JSON body.
-/
def json (builder : Builder) (content : String) : Async (Request Body.Full) := do
let builder := builder.header Header.Name.contentType (Header.Value.ofString! "application/json")
fromBytesCore builder content.toUTF8
/--
Builds a request with an HTML body.
-/
def html (builder : Builder) (content : String) : Async (Request Body.Full) := do
let builder := builder.header Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8")
fromBytesCore builder content.toUTF8
end Std.Http.Request.Builder
namespace Std.Http.Response.Builder
open Internal.IO.Async
private def fromBytesCore
(builder : Builder)
(content : ByteArray) :
Async (Response Body.Full) := do
return builder.body ( Body.Full.ofByteArray content)
/--
Builds a response from raw bytes.
-/
def fromBytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) :=
fromBytesCore builder content
/--
Builds a response with a binary body.
-/
def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Full) := do
let builder := builder.header Header.Name.contentType (Header.Value.ofString! "application/octet-stream")
fromBytesCore builder content
/--
Builds a response with a text body.
-/
def text (builder : Builder) (content : String) : Async (Response Body.Full) := do
let builder := builder.header Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8")
fromBytesCore builder content.toUTF8
/--
Builds a response with a JSON body.
-/
def json (builder : Builder) (content : String) : Async (Response Body.Full) := do
let builder := builder.header Header.Name.contentType (Header.Value.ofString! "application/json")
fromBytesCore builder content.toUTF8
/--
Builds a response with an HTML body.
-/
def html (builder : Builder) (content : String) : Async (Response Body.Full) := do
let builder := builder.header Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8")
fromBytesCore builder content.toUTF8
end Std.Http.Response.Builder

View File

@@ -0,0 +1,49 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Repr
public section
/-!
# Body.Length
This module defines the `Length` type, that represents the Content-Length or Transfer-Encoding
of an HTTP request or response.
-/
namespace Std.Http.Body
set_option linter.all true
/--
Size of the body of a response or request.
-/
inductive Length
/--
Indicates that the HTTP message body uses **chunked transfer encoding**.
-/
| chunked
/--
Indicates that the HTTP message body has a **fixed, known length**, as specified by the
`Content-Length` header.
-/
| fixed (n : Nat)
deriving Repr, BEq
namespace Length
/--
Checks if the `Length` is chunked.
-/
def isChunked : Length Bool
| .chunked => true
| _ => false
end Std.Http.Body.Length

View File

@@ -0,0 +1,63 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Body.Basic
public import Std.Internal.Http.Data.Body.Stream
public section
/-!
# Body.Reader
Reader typeclass for body-like values that can be consumed as chunk streams.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
/--
Typeclass for values that can be read as HTTP body streams.
-/
class Reader (α : Type) where
/--
Receives the next body chunk. Returns `none` at end-of-stream.
-/
recv : α Async (Option Chunk)
/--
Closes the reader stream.
-/
close : α Async Unit
/--
Returns `true` when the reader stream is closed.
-/
isClosed : α Async Bool
/--
Selector that resolves when a chunk is available or EOF is reached.
-/
recvSelector : α Selector (Option Chunk)
instance : Reader Incoming where
recv := Incoming.recv
close := Incoming.close
isClosed := Incoming.isClosed
recvSelector := Incoming.recvSelector
instance : Reader Outgoing where
recv body := Reader.recv (Body.Internal.outgoingToIncoming body)
close body := Reader.close (Body.Internal.outgoingToIncoming body)
isClosed body := Reader.isClosed (Body.Internal.outgoingToIncoming body)
recvSelector body := Reader.recvSelector (Body.Internal.outgoingToIncoming body)
end Std.Http.Body

View File

@@ -0,0 +1,665 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Sync
public import Std.Internal.Async
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Body.Basic
public import Std.Internal.Http.Data.Body.Length
public import Init.Data.ByteArray
public section
/-!
# Body.Stream
This module defines a zero-buffer rendezvous body channel split into two faces:
- `Body.Outgoing`: producer side (send chunks)
- `Body.Incoming`: consumer side (receive chunks)
There is no queue and no capacity. A send waits for a receiver and a receive waits for a sender.
At most one blocked producer and one blocked consumer are supported.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
namespace Channel
open Internal.IO.Async in
private inductive Consumer where
| normal (promise : IO.Promise (Option Chunk))
| select (finished : Waiter (Option Chunk))
private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := do
match c with
| .normal promise =>
promise.resolve x
return true
| .select waiter =>
let lose := return false
let win promise := do
promise.resolve (.ok x)
return true
waiter.race lose win
private structure Producer where
chunk : Chunk
/--
Resolved with `true` when consumed by a receiver, `false` when the channel closes.
-/
done : IO.Promise Bool
open Internal.IO.Async in
private def resolveInterestWaiter (waiter : Waiter Bool) (x : Bool) : BaseIO Bool := do
let lose := return false
let win promise := do
promise.resolve (.ok x)
return true
waiter.race lose win
private structure State where
/--
A single blocked producer waiting for a receiver.
-/
pendingProducer : Option Producer
/--
A single blocked consumer waiting for a producer.
-/
pendingConsumer : Option Consumer
/--
A waiter for `Outgoing.interestSelector`.
-/
interestWaiter : Option (Internal.IO.Async.Waiter Bool)
/--
Whether the channel is closed.
-/
closed : Bool
/--
Known size of the stream if available.
-/
knownSize : Option Body.Length
/--
Buffered partial chunk data accumulated from `Outgoing.send ... (incomplete := true)`.
These partial pieces are collapsed and emitted as a single chunk on the next complete send.
-/
pendingIncompleteChunk : Option Chunk := none
deriving Nonempty
end Channel
/--
Receive-side face of a body channel.
-/
structure Incoming where
private mk ::
private state : Mutex Channel.State
deriving Nonempty, TypeName
/--
Send-side face of a body channel.
-/
structure Outgoing where
private mk ::
private state : Mutex Channel.State
deriving Nonempty, TypeName
/--
Creates a rendezvous body channel.
-/
def mkChannel : Async (Outgoing × Incoming) := do
let state Mutex.new {
pendingProducer := none
pendingConsumer := none
interestWaiter := none
closed := false
knownSize := none
}
return ({ state }, { state })
namespace Channel
private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length :=
match knownSize with
| some (.fixed res) => some (Body.Length.fixed (res - chunk.data.size))
| _ => knownSize
private def pruneFinishedWaiters [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT State m Unit := do
let st get
let pendingConsumer
match st.pendingConsumer with
| some (.select waiter) =>
if waiter.checkFinished then
pure none
else
pure st.pendingConsumer
| _ =>
pure st.pendingConsumer
let interestWaiter
match st.interestWaiter with
| some waiter =>
if waiter.checkFinished then
pure none
else
pure st.interestWaiter
| none =>
pure none
set { st with pendingConsumer, interestWaiter }
private def signalInterest [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m Unit := do
let st get
if let some waiter := st.interestWaiter then
discard <| resolveInterestWaiter waiter true
set { st with interestWaiter := none }
private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT State m Bool := do
let st get
return st.pendingProducer.isSome || st.closed
private def hasInterest' [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT State m Bool := do
let st get
return st.pendingConsumer.isSome
private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m (Option Chunk) := do
let st get
if let some producer := st.pendingProducer then
set {
st with
pendingProducer := none
knownSize := decreaseKnownSize st.knownSize producer.chunk
}
discard <| producer.done.resolve true
return some producer.chunk
else
return none
private def close' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m Unit := do
let st get
if st.closed then
return ()
if let some consumer := st.pendingConsumer then
discard <| consumer.resolve none
if let some waiter := st.interestWaiter then
discard <| resolveInterestWaiter waiter false
if let some producer := st.pendingProducer then
discard <| producer.done.resolve false
set {
st with
pendingProducer := none
pendingConsumer := none
interestWaiter := none
pendingIncompleteChunk := none
closed := true
}
end Channel
namespace Incoming
/--
Attempts to receive a chunk from the channel without blocking.
Returns `some chunk` only when a producer is already waiting.
-/
def tryRecv (incoming : Incoming) : Async (Option Chunk) :=
incoming.state.atomically do
Channel.pruneFinishedWaiters
Channel.tryRecv'
private def recv' (incoming : Incoming) : BaseIO (AsyncTask (Option Chunk)) := do
incoming.state.atomically do
Channel.pruneFinishedWaiters
if let some chunk Channel.tryRecv' then
return AsyncTask.pure (some chunk)
let st get
if st.closed then
return AsyncTask.pure none
if st.pendingConsumer.isSome then
return Task.pure (.error (IO.Error.userError "only one blocked consumer is allowed"))
let promise IO.Promise.new
set { st with pendingConsumer := some (.normal promise) }
Channel.signalInterest
return promise.result?.map (sync := true) fun
| none => .error (IO.Error.userError "the promise linked to the consumer was dropped")
| some res => .ok res
/--
Receives a chunk from the channel. Blocks until a producer sends one.
Returns `none` if the channel is closed and no producer is waiting.
-/
def recv (incoming : Incoming) : Async (Option Chunk) := do
Async.ofAsyncTask ( recv' incoming)
/--
Closes the channel.
-/
def close (incoming : Incoming) : Async Unit :=
incoming.state.atomically do
Channel.close'
/--
Checks whether the channel is closed.
-/
@[always_inline, inline]
def isClosed (incoming : Incoming) : Async Bool :=
incoming.state.atomically do
return ( get).closed
/--
Gets the known size if available.
-/
@[always_inline, inline]
def getKnownSize (incoming : Incoming) : Async (Option Body.Length) :=
incoming.state.atomically do
return ( get).knownSize
/--
Sets known size metadata.
-/
@[always_inline, inline]
def setKnownSize (incoming : Incoming) (size : Option Body.Length) : Async Unit :=
incoming.state.atomically do
modify fun st => { st with knownSize := size }
open Internal.IO.Async in
/--
Creates a selector that resolves when a producer is waiting (or the channel closes).
-/
def recvSelector (incoming : Incoming) : Selector (Option Chunk) where
tryFn := do
incoming.state.atomically do
Channel.pruneFinishedWaiters
if Channel.recvReady' then
return some ( Channel.tryRecv')
else
return none
registerFn waiter := do
incoming.state.atomically do
Channel.pruneFinishedWaiters
if Channel.recvReady' then
let lose := return ()
let win promise := do
promise.resolve (.ok ( Channel.tryRecv'))
waiter.race lose win
else
let st get
if st.pendingConsumer.isSome then
throw (.userError "only one blocked consumer is allowed")
set { st with pendingConsumer := some (.select waiter) }
Channel.signalInterest
unregisterFn := do
incoming.state.atomically do
Channel.pruneFinishedWaiters
/--
Iterates over chunks until the channel closes.
-/
@[inline]
protected partial def forIn
{β : Type} (incoming : Incoming) (acc : β)
(step : Chunk β Async (ForInStep β)) : Async β := do
let rec @[specialize] loop (incoming : Incoming) (acc : β) : Async β := do
if let some chunk incoming.recv then
match step chunk acc with
| .done res => return res
| .yield res => loop incoming res
else
return acc
loop incoming acc
/--
Context-aware iteration over chunks until the channel closes.
-/
@[inline]
protected partial def forIn'
{β : Type} (incoming : Incoming) (acc : β)
(step : Chunk β ContextAsync (ForInStep β)) : ContextAsync β := do
let rec @[specialize] loop (incoming : Incoming) (acc : β) : ContextAsync β := do
let data Selectable.one #[
.case incoming.recvSelector pure,
.case ( ContextAsync.doneSelector) (fun _ => pure none),
]
if let some chunk := data then
match step chunk acc with
| .done res => return res
| .yield res => loop incoming res
else
return acc
loop incoming acc
/--
Reads all remaining chunks and decodes them into `α`.
-/
partial def readAll
[FromByteArray α]
(incoming : Incoming)
(maximumSize : Option UInt64 := none) :
ContextAsync α := do
let rec loop (result : ByteArray) : ContextAsync ByteArray := do
let data Selectable.one #[
.case incoming.recvSelector pure,
.case ( ContextAsync.doneSelector) (fun _ => pure none),
]
match data with
| none => return result
| some chunk =>
let result := result ++ chunk.data
if let some max := maximumSize then
if result.size.toUInt64 > max then
throw (.userError s!"body exceeded maximum size of {max} bytes")
loop result
let result loop ByteArray.empty
match FromByteArray.fromByteArray result with
| .ok a => return a
| .error msg => throw (.userError msg)
end Incoming
namespace Outgoing
private def collapseForSend
(outgoing : Outgoing)
(chunk : Chunk)
(incomplete : Bool) : BaseIO (Except IO.Error (Option Chunk)) := do
outgoing.state.atomically do
Channel.pruneFinishedWaiters
let st get
if st.closed then
return .error (.userError "channel closed")
let merged := match st.pendingIncompleteChunk with
| some pending =>
{
data := pending.data ++ chunk.data
extensions := if pending.extensions.isEmpty then chunk.extensions else pending.extensions
}
| none => chunk
if incomplete then
set { st with pendingIncompleteChunk := some merged }
return .ok none
else
set { st with pendingIncompleteChunk := none }
return .ok (some merged)
/-
Returns `some true` = delivered directly, `some false` = consumer race lost (retry),
`none` = producer installed, caller must await `done`.
-/
private partial def send' (outgoing : Outgoing) (chunk : Chunk) : Async Unit := do
let done IO.Promise.new
while true do
let result : Except IO.Error (Option Bool) outgoing.state.atomically do
Channel.pruneFinishedWaiters
let st get
if st.closed then
return .error (IO.Error.userError "channel closed")
if let some consumer := st.pendingConsumer then
let success consumer.resolve (some chunk)
if success then
set {
st with
pendingConsumer := none
knownSize := Channel.decreaseKnownSize st.knownSize chunk
}
return .ok (some true)
else
set { st with pendingConsumer := none }
return .ok (some false)
else if st.pendingProducer.isSome then
return .error (IO.Error.userError "only one blocked producer is allowed")
else
set { st with pendingProducer := some { chunk, done } }
return .ok none
match result with
| .error err =>
throw err
| .ok (some true) =>
return ()
| .ok (some false) =>
send' outgoing chunk
| .ok none =>
match await done.result? with
| some true => return ()
| _ => throw (IO.Error.userError "channel closed")
/--
Sends a chunk.
If `incomplete := true`, the chunk is buffered and collapsed with subsequent chunks, and is not
delivered to the receiver yet.
If `incomplete := false`, any buffered incomplete pieces are collapsed with this chunk and the
single merged chunk is sent.
-/
def send (outgoing : Outgoing) (chunk : Chunk) (incomplete : Bool := false) : Async Unit := do
match ( collapseForSend outgoing chunk incomplete) with
| .error err => throw err
| .ok none => pure ()
| .ok (some toSend) =>
if toSend.data.isEmpty toSend.extensions.isEmpty then
return ()
send' outgoing toSend
/--
Closes the channel.
-/
def close (outgoing : Outgoing) : Async Unit :=
outgoing.state.atomically do
Channel.close'
/--
Checks whether the channel is closed.
-/
@[always_inline, inline]
def isClosed (outgoing : Outgoing) : Async Bool :=
outgoing.state.atomically do
return ( get).closed
/--
Returns `true` when a consumer is currently blocked waiting for data.
-/
def hasInterest (outgoing : Outgoing) : Async Bool :=
outgoing.state.atomically do
Channel.pruneFinishedWaiters
Channel.hasInterest'
/--
Gets the known size if available.
-/
@[always_inline, inline]
def getKnownSize (outgoing : Outgoing) : Async (Option Body.Length) :=
outgoing.state.atomically do
return ( get).knownSize
/--
Sets known size metadata.
-/
@[always_inline, inline]
def setKnownSize (outgoing : Outgoing) (size : Option Body.Length) : Async Unit :=
outgoing.state.atomically do
modify fun st => { st with knownSize := size }
open Internal.IO.Async in
/--
Creates a selector that resolves when consumer interest is present.
Returns `true` when a consumer is waiting, `false` when the channel closes first.
-/
def interestSelector (outgoing : Outgoing) : Selector Bool where
tryFn := do
outgoing.state.atomically do
Channel.pruneFinishedWaiters
let st get
if st.pendingConsumer.isSome then
return some true
else if st.closed then
return some false
else
return none
registerFn waiter := do
outgoing.state.atomically do
Channel.pruneFinishedWaiters
let st get
if st.pendingConsumer.isSome then
let lose := return ()
let win promise := do
promise.resolve (.ok true)
waiter.race lose win
else if st.closed then
let lose := return ()
let win promise := do
promise.resolve (.ok false)
waiter.race lose win
else if st.interestWaiter.isSome then
throw (.userError "only one blocked interest selector is allowed")
else
set { st with interestWaiter := some waiter }
unregisterFn := do
outgoing.state.atomically do
Channel.pruneFinishedWaiters
end Outgoing
/- Internal conversions between channel faces.
Use these only in HTTP internals where body direction must be adapted. -/
namespace Internal
/--
Reinterprets the receive-side handle as a send-side handle over the same channel.
-/
@[always_inline, inline]
def incomingToOutgoing (incoming : Incoming) : Outgoing :=
{ state := incoming.state }
/--
Reinterprets the send-side handle as a receive-side handle over the same channel.
-/
@[always_inline, inline]
def outgoingToIncoming (outgoing : Outgoing) : Incoming :=
{ state := outgoing.state }
end Internal
/--
Creates a body from a producer function.
Returns the receive-side handle immediately and runs `gen` in a detached task.
The channel is always closed when `gen` returns or throws.
Errors from `gen` are not rethrown here; consumers observe end-of-stream via `recv = none`.
-/
def stream (gen : Outgoing Async Unit) : Async Incoming := do
let (outgoing, incoming) mkChannel
background <| do
try
gen outgoing
finally
outgoing.close
return incoming
/--
Creates a body from a fixed byte array.
-/
def fromBytes (content : ByteArray) : Async Incoming := do
stream fun outgoing => do
outgoing.setKnownSize (some (.fixed content.size))
if content.size > 0 then
outgoing.send (Chunk.ofByteArray content)
/--
Creates an empty body.
-/
def empty : Async Incoming := do
let (outgoing, incoming) mkChannel
outgoing.setKnownSize (some (.fixed 0))
outgoing.close
return incoming
instance : ForIn Async Incoming Chunk where
forIn := Incoming.forIn
instance : ForIn ContextAsync Incoming Chunk where
forIn := Incoming.forIn'
end Std.Http.Body
namespace Std.Http.Request.Builder
open Internal.IO.Async
/--
Builds a request with a streaming body generator.
-/
def stream
(builder : Builder)
(gen : Body.Outgoing Async Unit) :
Async (Request Body.Outgoing) := do
let incoming Body.stream gen
return Request.Builder.body builder (Body.Internal.incomingToOutgoing incoming)
end Std.Http.Request.Builder
namespace Std.Http.Response.Builder
open Internal.IO.Async
/--
Builds a response with a streaming body generator.
-/
def stream
(builder : Builder)
(gen : Body.Outgoing Async Unit) :
Async (Response Body.Outgoing) := do
let incoming Body.stream gen
return Response.Builder.body builder (Body.Internal.incomingToOutgoing incoming)
end Std.Http.Response.Builder

View File

@@ -0,0 +1,227 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Http.Data.Body.Length
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Body.Stream
public import Std.Internal.Http.Data.Body.Full
public import Std.Internal.Http.Data.Body.Empty
public section
/-!
# Body.Writer
Writer typeclass for body-like values that can produce chunk streams.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
/--
Typeclass for values that can be written as HTTP body streams.
-/
class Writer (α : Type) where
/--
Sends a body chunk.
-/
send : α Chunk Bool Async Unit
/--
Closes the writer stream.
-/
close : α Async Unit
/--
Returns `true` when the writer stream is closed.
-/
isClosed : α Async Bool
/--
Returns `true` when a consumer is waiting for data.
-/
hasInterest : α Async Bool
/--
Gets known stream size metadata, if available.
-/
getKnownSize : α Async (Option Body.Length)
/--
Sets known stream size metadata.
-/
setKnownSize : α Option Body.Length Async Unit
/--
Selector that resolves when consumer interest appears.
-/
interestSelector : α Selector Bool
namespace Writer
/--
Sends a chunk with `incomplete := false`.
-/
@[inline]
def writeChunk [Writer α] (body : α) (chunk : Chunk) : Async Unit :=
Writer.send body chunk false
end Writer
/--
Union of writer-capable body variants.
-/
inductive AnyBody where
/--
Channel-backed streaming body writer.
-/
| outgoing (body : Outgoing)
/--
Fixed full-body writer handle.
-/
| full (body : Full)
/--
Always-empty writer handle.
-/
| empty (body : Empty)
instance : Coe Outgoing AnyBody where
coe := .outgoing
instance : Coe Full AnyBody where
coe := .full
instance : Coe Empty AnyBody where
coe := .empty
instance : Coe (Response Empty) (Response AnyBody) where
coe f := { f with }
instance : Coe (Response Full) (Response AnyBody) where
coe f := { f with }
instance : Coe (Response Outgoing) (Response AnyBody) where
coe f := { f with }
instance : Coe (ContextAsync (Response Empty)) (ContextAsync (Response AnyBody)) where
coe action := do
let response action
pure (response : Response AnyBody)
instance : Coe (ContextAsync (Response Full)) (ContextAsync (Response AnyBody)) where
coe action := do
let response action
pure (response : Response AnyBody)
instance : Coe (ContextAsync (Response Outgoing)) (ContextAsync (Response AnyBody)) where
coe action := do
let response action
pure (response : Response AnyBody)
instance : Coe (Async (Response Empty)) (ContextAsync (Response AnyBody)) where
coe action := do
let response action
pure (response : Response AnyBody)
instance : Coe (Async (Response Full)) (ContextAsync (Response AnyBody)) where
coe action := do
let response action
pure (response : Response AnyBody)
instance : Coe (Async (Response Outgoing)) (ContextAsync (Response AnyBody)) where
coe action := do
let response action
pure (response : Response AnyBody)
instance : Writer Outgoing where
send body chunk incomplete := Outgoing.send body chunk incomplete
close := Outgoing.close
isClosed := Outgoing.isClosed
hasInterest := Outgoing.hasInterest
getKnownSize := Outgoing.getKnownSize
setKnownSize := Outgoing.setKnownSize
interestSelector := Outgoing.interestSelector
instance : Writer Full where
send body chunk incomplete := Full.send body chunk incomplete
close := Full.close
isClosed := Full.isClosed
hasInterest := Full.hasInterest
getKnownSize := Full.getKnownSize
setKnownSize := Full.setKnownSize
interestSelector := Full.interestSelector
instance : Writer Empty where
send _ _ _ := throw <| .userError "cannot send"
close _ := pure ()
isClosed _ := pure true
hasInterest _ := pure false
getKnownSize _ := pure (some (.fixed 0))
setKnownSize _ _ := pure ()
interestSelector _ := {
tryFn := pure (some false)
registerFn waiter := do
let lose := pure ()
let win promise := do
promise.resolve (.ok false)
waiter.race lose win
unregisterFn := pure ()
}
instance : Writer AnyBody where
send
| .outgoing body, chunk, incomplete => Writer.send body chunk incomplete
| .full body, chunk, incomplete => Writer.send body chunk incomplete
| .empty body, chunk, incomplete => Writer.send body chunk incomplete
close
| .outgoing body => Writer.close body
| .full body => Writer.close body
| .empty body => Writer.close body
isClosed
| .outgoing body => Writer.isClosed body
| .full body => Writer.isClosed body
| .empty body => Writer.isClosed body
hasInterest
| .outgoing body => Writer.hasInterest body
| .full body => Writer.hasInterest body
| .empty body => Writer.hasInterest body
getKnownSize
| .outgoing body => Writer.getKnownSize body
| .full body => Writer.getKnownSize body
| .empty body => Writer.getKnownSize body
setKnownSize
| .outgoing body, size => Writer.setKnownSize body size
| .full body, size => Writer.setKnownSize body size
| .empty body, size => Writer.setKnownSize body size
interestSelector
| .outgoing body => Writer.interestSelector body
| .full body => Writer.interestSelector body
| .empty body => Writer.interestSelector body
instance : Reader AnyBody where
recv
| .outgoing body => Reader.recv body
| .full body => Reader.recv body
| .empty body => Reader.recv body
close
| .outgoing body => Reader.close body
| .full body => Reader.close body
| .empty body => Reader.close body
isClosed
| .outgoing body => Reader.isClosed body
| .full body => Reader.isClosed body
| .empty body => Reader.isClosed body
recvSelector
| .outgoing body => Reader.recvSelector body
| .full body => Reader.recvSelector body
| .empty body => Reader.recvSelector body
end Std.Http.Body

View File

@@ -7,6 +7,7 @@ module
prelude
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Data.Headers
public meta import Std.Internal.Http.Internal.String
public section
@@ -20,8 +21,7 @@ Reference: https://www.rfc-editor.org/rfc/rfc9112.html#section-7.1
-/
namespace Std.Http
open Internal
open Internal Char
set_option linter.all true
@@ -207,3 +207,114 @@ instance : Encode .v11 Chunk where
buffer.append #[size, exts.toUTF8, "\r\n".toUTF8, chunk.data, "\r\n".toUTF8]
end Chunk
/--
Trailer headers sent after the final chunk in HTTP/1.1 chunked transfer encoding.
Per RFC 9112 §7.1.2, trailers allow the sender to include additional metadata after
the message body, such as message integrity checks or digital signatures.
-/
structure Trailer where
/--
The trailer header fields as key-value pairs.
-/
headers : Headers
deriving Inhabited
namespace Trailer
/--
Creates an empty trailer with no headers.
-/
def empty : Trailer :=
{ headers := .empty }
/--
Inserts a trailer header field.
-/
@[inline]
def insert (trailer : Trailer) (name : Header.Name) (value : Header.Value) : Trailer :=
{ headers := trailer.headers.insert name value }
/--
Inserts a trailer header field from string name and value, panicking if either is invalid.
-/
@[inline]
def insert! (trailer : Trailer) (name : String) (value : String) : Trailer :=
{ headers := trailer.headers.insert! name value }
/--
Retrieves the first value for the given trailer header name.
Returns `none` if absent.
-/
@[inline]
def get? (trailer : Trailer) (name : Header.Name) : Option Header.Value :=
trailer.headers.get? name
/--
Retrieves all values for the given trailer header name.
Returns `none` if absent.
-/
@[inline]
def getAll? (trailer : Trailer) (name : Header.Name) : Option (Array Header.Value) :=
trailer.headers.getAll? name
/--
Checks if a trailer header with the given name exists.
-/
@[inline]
def contains (trailer : Trailer) (name : Header.Name) : Bool :=
trailer.headers.contains name
/--
Removes a trailer header with the given name.
-/
@[inline]
def erase (trailer : Trailer) (name : Header.Name) : Trailer :=
{ headers := trailer.headers.erase name }
/--
Gets the number of trailer headers.
-/
@[inline]
def size (trailer : Trailer) : Nat :=
trailer.headers.size
/--
Checks if the trailer has no headers.
-/
@[inline]
def isEmpty (trailer : Trailer) : Bool :=
trailer.headers.isEmpty
/--
Merges two trailers, accumulating values for duplicate keys from both.
-/
def merge (t1 t2 : Trailer) : Trailer :=
{ headers := t1.headers.merge t2.headers }
/--
Converts the trailer headers to a list of key-value pairs.
-/
def toList (trailer : Trailer) : List (Header.Name × Header.Value) :=
trailer.headers.toList
/--
Converts the trailer headers to an array of key-value pairs.
-/
def toArray (trailer : Trailer) : Array (Header.Name × Header.Value) :=
trailer.headers.toArray
/--
Folds over all key-value pairs in the trailer headers.
-/
def fold (trailer : Trailer) (init : α) (f : α Header.Name Header.Value α) : α :=
trailer.headers.fold init f
instance : Encode .v11 Trailer where
encode buffer trailer :=
buffer.write "0\r\n".toUTF8
|> (Encode.encode .v11 · trailer.headers)
|>.write "\r\n".toUTF8
end Trailer

View File

@@ -0,0 +1,349 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Sync.Mutex
public import Std.Internal.Http.Data.URI
public import Std.Internal.Http.Data.Cookie.Parser
public import Std.Internal.Http.Data.Headers
public import Init.Data.String
public import Init.Data.Array.Basic
public import Init.Data.List.Basic
public section
/-!
# Cookie
This module defines the `Cookie` and `Jar` types, a minimal RFC 6265-compliant
implementation for managing HTTP cookies.
Cookies are parsed from `Set-Cookie` response headers, stored in a thread-safe jar, and
injected as a `Cookie` request header on outgoing requests.
Supported `Set-Cookie` attributes: `Domain`, `Path`, `Secure`.
Unsupported: `Expires`, `Max-Age`, `HttpOnly`, `SameSite`. All cookies persist for the
lifetime of the jar regardless of any expiry directives.
Reference: https://www.rfc-editor.org/rfc/rfc6265
-/
namespace Std.Http
set_option linter.all true
open Internal Char
namespace Cookie
/--
Proposition asserting that a string is a valid cookie name: a non-empty HTTP token.
Cookie names are case-sensitive.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
-/
abbrev IsValidCookieName (s : String) : Prop :=
isToken s
/--
A validated HTTP cookie name. Cookie names are case-sensitive HTTP tokens.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
-/
@[ext]
structure Name where
/--
The cookie name string.
-/
value : String
/--
Proof that the name is a valid HTTP token.
-/
isValidCookieName : IsValidCookieName value := by decide
deriving BEq, DecidableEq, Repr
namespace Name
instance : Inhabited Name where
default := "_", by decide
/--
Attempts to create a `Cookie.Name` from a `String`, returning `none` if the string is
not a valid HTTP token or is empty.
-/
def ofString? (s : String) : Option Name :=
let val := s.trimAscii.toString
if h : IsValidCookieName val then
some val, h
else
none
/--
Creates a `Cookie.Name` from a string, panicking if the string is not a valid HTTP token.
-/
def ofString! (s : String) : Name :=
match ofString? s with
| some res => res
| none => panic! s!"invalid cookie name: {s.quote}"
instance : ToString Name where
toString n := n.value
end Name
/--
`cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E`
US-ASCII visible characters excluding SP, DQUOTE, comma, semicolon, and backslash.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
-/
def isCookieOctet (c : Char) : Bool :=
c = '!'
('#' c c '+')
('-' c c ':')
('<' c c '[')
(']' c c '~')
/--
Proposition asserting that a string is a valid cookie value: all characters are
`cookie-octet` characters. Empty values are permitted.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
-/
abbrev IsValidCookieValue (s : String) : Prop :=
s.toList.all isCookieOctet
/--
A validated HTTP cookie value. Empty values are permitted.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
-/
@[ext]
structure Value where
/--
The cookie value string.
-/
value : String
/--
Proof that the value contains only valid cookie-octet characters.
-/
isValidCookieValue : IsValidCookieValue value := by decide
deriving BEq, DecidableEq, Repr
namespace Value
instance : Inhabited Value where
default := "", by decide
/--
Attempts to create a `Cookie.Value` from a `String`, returning `none` if the string
contains characters not permitted in cookie values.
-/
def ofString? (s : String) : Option Value :=
let val := s.trimAscii.toString
if h : IsValidCookieValue val then
some val, h
else
none
/--
Creates a `Cookie.Value` from a string, panicking if the string contains characters not
permitted in cookie values.
-/
def ofString! (s : String) : Value :=
match ofString? s with
| some res => res
| none => panic! s!"invalid cookie value: {s.quote}"
instance : ToString Value where
toString v := v.value
end Value
end Cookie
/--
An HTTP cookie with its matching attributes.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1
-/
structure Cookie where
/--
The cookie name.
-/
name : Cookie.Name
/--
The cookie value.
-/
value : Cookie.Value
/--
The effective domain for this cookie. When `Set-Cookie` carries no `Domain` attribute this
equals the origin host and `hostOnly` is `true` — only that exact host will receive the
cookie. When `Domain` is set, `hostOnly` is `false` and subdomains also match.
-/
domain : URI.Host
/--
`true` when the cookie must only be sent to the exact origin host (no subdomain matching).
-/
hostOnly : Bool
/--
Path prefix for which the cookie is valid. Defaults to `"/"`.
-/
path : URI.Path
/--
When `true` the cookie must only be sent over a secure (HTTPS) channel.
-/
secure : Bool
/--
When `true` the cookie must not be exposed to non-HTTP APIs.
Stored for completeness; no client-side script enforcement applies here.
-/
httpOnly : Bool
/--
A HTTP cookie jar.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-5
-/
structure Cookie.Jar where
private mk ::
private cookies : Mutex (Array Cookie)
namespace Cookie.Jar
/--
Creates an empty cookie jar.
-/
def new : BaseIO Jar := do
let cookies Mutex.new #[]
return .mk cookies
/--
Domain matching per RFC 6265 §5.1.3.
-/
private def domainMatches (cookieDomain : URI.Host) (hostOnly : Bool) (host : URI.Host) : Bool :=
if hostOnly then
host == cookieDomain
else
let d := cookieDomain
host == d || (toString host).endsWith ("." ++ toString d)
/--
Path matching per RFC 6265 §5.1.4.
A request path matches a cookie path when they are identical, or when the cookie path is a
strict segment-wise prefix of the request path. Segment boundaries correspond to `/`, so
`/foo` never prefix-matches `/foobar` (different segments).
A trailing `/` in the cookie path is normalised away before the prefix test; this covers
both RFC conditions:
- cookie-path ends with `/` → its meaningful segments are a strict prefix of request-path.
- first char after prefix is `/` → satisfied automatically at segment boundaries.
-/
private def pathMatches (cookiePath : URI.Path) (requestPath : URI.Path) : Bool :=
requestPath == cookiePath ||
let cp :=
if cookiePath.hasTrailingSlash && !cookiePath.isEmpty
then cookiePath.segments.pop
else cookiePath.segments
requestPath.segments.size > cp.size &&
requestPath.startsWith { cookiePath with segments := cp }
/--
Parses a single `Set-Cookie` header value and stores the resulting cookie.
`host` is the origin host of the response (used as the effective domain when no
`Domain` attribute is present).
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-5.2
-/
def processSetCookie (jar : Jar) (host : URI.Host) (headerValue : String) : BaseIO Unit := do
let .ok parsed := Cookie.Parser.parseSetCookie.run headerValue.toUTF8
| return ()
let some cookieName := Cookie.Name.ofString? parsed.name
| return ()
let some cookieValue := Cookie.Value.ofString? parsed.value
| return ()
let cookiePath : URI.Path :=
if let some p := parsed.path then URI.Path.parseOrRoot p
else URI.Path.parseOrRoot "/"
-- RFC 6265 §5.2.3: resolve domain; missing or invalid Domain → host-only
let (domain, hostOnly) :=
match parsed.domain with
| some d =>
match URI.DomainName.ofString? d with
| some name => (URI.Host.name name, false)
| none => (host, true)
| none => (host, true)
-- RFC 6265 §5.3 step 6: if domain attribute is set, the origin host must domain-match it.
-- This prevents a server at api.example.com from setting Domain=evil.com or Domain=com.
if !hostOnly && !domainMatches domain false host then
return ()
-- RFC 6265 §5.2.2: Max-Age ≤ 0 signals deletion — remove any matching cookie and stop.
if let some maxAgeVal := parsed.maxAge then
if maxAgeVal 0 then
jar.cookies.atomically do
let cs get
set (cs.filter fun c => !(c.name == cookieName && c.domain == domain && c.path == cookiePath))
return ()
let cookie : Cookie := {
name := cookieName
value := cookieValue
domain
hostOnly
path := cookiePath
secure := parsed.secure
httpOnly := parsed.httpOnly
}
-- Limit the total cookie count to prevent unbounded memory growth.
-- RFC 6265 §6.1 recommends supporting at least 3000 cookies total.
let maxCookies := 3000
jar.cookies.atomically do
let cs get
let cs := cs.filter fun c => !(c.name == cookie.name && c.domain == cookie.domain && c.path == cookie.path)
if cs.size < maxCookies then
set (cs.push cookie)
/--
Returns the `Cookie` header value for all cookies that should be sent for a request to `host`
at `path`. Pass `secure := true` when the request channel is HTTPS. Returns `none` when no
cookies match.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-5.4
-/
def cookiesFor
(jar : Jar) (host : URI.Host) (path : URI.Path)
(secure : Bool := false) : BaseIO (Option Header.Value) :=
jar.cookies.atomically do
let cs get
let matching := cs.filter fun c =>
domainMatches c.domain c.hostOnly host &&
pathMatches c.path path &&
(!c.secure || secure)
if matching.isEmpty then
return none
else
return Header.Value.ofString? (String.intercalate "; " (matching.map (fun c => c.name.value ++ "=" ++ c.value.value)).toList)
end Std.Http.Cookie.Jar

View File

@@ -0,0 +1,225 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.String
public import Std.Internal.Parsec
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Internal.Char
public section
/-!
# Cookie Parser
This module provides a `Set-Cookie` response-header parser following RFC 6265 §4.1. The
`parseSetCookie` combinator returns a `Parsed` structure with raw `String` fields; callers are
responsible for validating cookie-name and cookie-value semantics (e.g. via `Cookie.Name.ofString?`
and `Cookie.Value.ofString?`).
On parse failure the cookie is silently discarded per RFC 6265 §5.2.
Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1
-/
namespace Std.Http.Cookie.Parser
set_option linter.all true
open Std Internal Parsec ByteArray Internal.Char
/-
cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
; US-ASCII visible characters excluding SP, DQUOTE,
; comma, semicolon, and backslash.
; Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
-/
@[inline]
private def isCookieOctetByte (c : UInt8) : Bool :=
c == 0x21 ||
(0x23 c && c 0x2B) ||
(0x2D c && c 0x3A) ||
(0x3C c && c 0x5B) ||
(0x5D c && c 0x7E)
/-
av-octet = %x20-3A / %x3C-7E
; any CHAR except CTLs or ";"
; Reference: https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
-/
@[inline]
private def isAvOctetByte (c : UInt8) : Bool :=
(0x20 c && c 0x3A) || (0x3C c && c 0x7E)
/-
token = 1*tchar
tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
"^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
; Reference: https://www.rfc-editor.org/rfc/rfc9110#section-5.6.2
-/
@[inline]
private def parseToken (limit : Nat) : Parser ByteSlice :=
takeWhile1AtMost (fun c => tchar (Char.ofUInt8 c)) limit
/--
Parsed result of a `Set-Cookie` header value, prior to semantic validation.
Cookie-name and cookie-value are raw strings that callers must validate
(e.g. via `Cookie.Name.ofString?` and `Cookie.Value.ofString?`).
- `domain`: the `Domain` attribute value with any leading `.` already stripped;
`none` if the attribute is absent.
- `path`: the `Path` attribute value (guaranteed to start with `/`);
`none` if the attribute is absent or does not start with `/`.
- `secure`: `true` when the `Secure` attribute is present.
- `httpOnly`: `true` when the `HttpOnly` attribute is present.
-/
structure Parsed where
/--
Raw cookie name (an HTTP token).
-/
name : String
/--
Raw cookie value (`*cookie-octet` or double-quoted).
-/
value : String
/--
`Domain` attribute value with any leading `.` stripped, or `none` if absent.
-/
domain : Option String
/--
`Path` attribute value starting with `/`, or `none` if absent or invalid.
-/
path : Option String
/--
`true` when the `Secure` attribute is present.
-/
secure : Bool
/-- `true` when the `HttpOnly` attribute is present. -/
httpOnly : Bool
/-- `Max-Age` attribute value in seconds, or `none` if absent or unparseable.
Values ≤ 0 signal cookie deletion per RFC 6265 §5.2.2. -/
maxAge : Option Int := none
-- cookie-name = token
private def parseCookieName : Parser String := do
let bytes parseToken 4096
let some str := String.fromUTF8? bytes.toByteArray
| fail "invalid cookie name encoding"
return str
/-
cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
-/
private def parseCookieValue : Parser String := do
let bytes
if ( peekWhen? (· == '"'.toUInt8)).isSome then
skipByte '"'.toUInt8
let inner takeWhileAtMost isCookieOctetByte 4096
skipByte '"'.toUInt8
pure inner
else
takeWhileAtMost isCookieOctetByte 4096
let some str := String.fromUTF8? bytes.toByteArray
| fail "invalid cookie value encoding"
return str
-- av-name = token (parsed case-insensitively)
private def parseAttrName : Parser String := do
let bytes ← takeWhileAtMost (fun c => tchar (Char.ofUInt8 c)) 256
return (String.fromUTF8! bytes.toByteArray).toLower
-- av-value = *av-octet
private def parseAttrValue : Parser String := do
let bytes ← takeWhileAtMost isAvOctetByte 4096
let some str := String.fromUTF8? bytes.toByteArray
| fail "invalid attribute value encoding"
return str
/-
cookie-av = expires-av / max-age-av / domain-av / path-av / secure-av /
httponly-av / extension-av
domain-av = "Domain=" domain-value
domain-value = <subdomain> ; as per RFC 1034, Section 3.5
path-av = "Path=" path-value
path-value = *av-octet
secure-av = "Secure"
httponly-av = "HttpOnly"
extension-av = *av-octet
-/
private def parseCookieAv : Parser (String × Option String) := do
let name ← parseAttrName
let value ← optional (attempt (skipByte '='.toUInt8 *> parseAttrValue))
return (name, value)
/-
set-cookie-string = cookie-pair *( ";" SP cookie-av )
cookie-pair = cookie-name "=" cookie-value
-/
/--
Parses a `Set-Cookie` header value and returns a `Parsed` result.
Attribute processing follows RFC 6265 §5.2:
- `Domain`: leading `.` is stripped; invalid domain strings set `domain` to `none`.
- `Path`: values not starting with `/` set `path` to `none` (caller uses the default `/`).
- `Secure`: sets `secure` to `true` regardless of whether a value follows the attribute name.
- `HttpOnly`: sets `httpOnly` to `true`.
- All other attributes (including `Expires`, `Max-Age`, `SameSite`) are ignored.
-/
public def parseSetCookie : Parser Parsed := do
let name ← parseCookieName
skipByte '='.toUInt8
let value ← parseCookieValue
-- *( ";" SP cookie-av )
let attrs ← many (attempt do
skipByte ';'.toUInt8
let _ ← optional (skipByte ' '.toUInt8)
parseCookieAv)
let mut domain : Option String := none
let mut path : Option String := none
let mut secure := false
let mut httpOnly := false
let mut maxAge : Option Int := none
for (attrName, attrVal) in attrs do
match attrName with
| "domain" =>
let v := (attrVal.getD "").trimAscii.toString
-- RFC 6265 §5.2.3: ignore a leading U+002E FULL STOP character
let v := if v.startsWith "." then (v.drop 1).toString else v
if !v.isEmpty then domain := some v
| "path" =>
let v := (attrVal.getD "").trimAscii.toString
-- RFC 6265 §5.2.4: if av-value is empty or does not start with "/", use default
if !v.isEmpty && v.startsWith "/" then path := some v
| "secure" => secure := true
| "httponly" => httpOnly := true
| "max-age" =>
-- RFC 6265 §5.2.2: parse an optional leading '-' followed by one or more digits.
if let some v := attrVal then
let s := v.trimAscii.toString
let (neg, digits) := if s.startsWith "-" then (true, s.drop 1) else (false, s)
if !digits.isEmpty && digits.all Char.isDigit then
let absVal : Nat := digits.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
maxAge := some (if neg then -(absVal : Int) else (absVal : Int))
| _ => pure ()
return { name, value, domain, path, secure, httpOnly, maxAge }
end Std.Http.Cookie.Parser

View File

@@ -0,0 +1,268 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Headers.Basic
public import Std.Internal.Http.Data.Headers.Name
public import Std.Internal.Http.Data.Headers.Value
public section
/-!
# Headers
This module defines the `Headers` type, which represents a collection of HTTP header name-value pairs.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#section-5
-/
namespace Std.Http
set_option linter.all true
open Internal
/--
A structure for managing HTTP headers as key-value pairs.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#section-5
-/
structure Headers where
/--
The underlying multimap that stores headers.
-/
map : MultiMap Header.Name Header.Value
deriving Inhabited, Repr
instance : Membership Header.Name Headers where
mem headers name := name headers.map
instance (name : Header.Name) (h : Headers) : Decidable (name h) :=
inferInstanceAs (Decidable (name h.map))
namespace Headers
/--
Retrieves the first `Header.Value` for the given key.
-/
@[inline]
def get (headers : Headers) (name : Header.Name) (h : name headers) : Header.Value :=
headers.map.get name h
/--
Retrieves all `Header.Value` entries for the given key.
-/
@[inline]
def getAll (headers : Headers) (name : Header.Name) (h : name headers) : Array Header.Value :=
headers.map.getAll name h
/--
Like `getAll`, but returns `none` instead of requiring a membership proof.
Returns `none` if the header is absent.
-/
@[inline]
def getAll? (headers : Headers) (name : Header.Name) : Option (Array Header.Value) :=
headers.map.getAll? name
/--
Retrieves the first `Header.Value` for the given key.
Returns `none` if the header is absent.
-/
@[inline]
def get? (headers : Headers) (name : Header.Name) : Option Header.Value :=
headers.map.get? name
/--
Checks if the entry is present in the `Headers`.
-/
@[inline]
def hasEntry (headers : Headers) (name : Header.Name) (value : Header.Value) : Bool :=
headers.map.hasEntry name value
/--
Retrieves the last header value for the given key.
Returns `none` if the header is absent.
-/
@[inline]
def getLast? (headers : Headers) (name : Header.Name) : Option Header.Value :=
headers.map.getLast? name
/--
Like `get?`, but returns a default value if absent.
-/
@[inline]
def getD (headers : Headers) (name : Header.Name) (d : Header.Value) : Header.Value :=
headers.map.getD name d
/--
Like `get?`, but panics if absent.
-/
@[inline]
def get! (headers : Headers) (name : Header.Name) : Header.Value :=
headers.map.get! name
/--
Inserts a new key-value pair into the headers.
-/
@[inline]
def insert (headers : Headers) (key : Header.Name) (value : Header.Value) : Headers :=
{ map := headers.map.insert key value }
/--
Adds a header from string name and value, panicking if either is invalid.
-/
@[inline]
def insert! (headers : Headers) (name : String) (value : String) : Headers :=
headers.insert (Header.Name.ofString! name) (Header.Value.ofString! value)
/--
Adds a header from string name and value.
Returns `none` if either the header name or value is invalid.
-/
@[inline]
def insert? (headers : Headers) (name : String) (value : String) : Option Headers := do
let name Header.Name.ofString? name
let value Header.Value.ofString? value
pure <| headers.insert name value
/--
Inserts a new key with an array of values.
-/
@[inline]
def insertMany (headers : Headers) (key : Header.Name) (values : Array Header.Value) : Headers :=
{ map := headers.map.insertMany key values }
/--
Creates empty headers.
-/
def empty : Headers :=
{ map := }
/--
Creates headers from a list of key-value pairs.
-/
def ofList (pairs : List (Header.Name × Header.Value)) : Headers :=
{ map := MultiMap.ofList pairs }
/--
Checks if a header with the given name exists.
-/
@[inline]
def contains (headers : Headers) (name : Header.Name) : Bool :=
headers.map.contains name
/--
Removes a header with the given name.
-/
@[inline]
def erase (headers : Headers) (name : Header.Name) : Headers :=
{ map := headers.map.erase name }
/--
Gets the number of headers.
-/
@[inline]
def size (headers : Headers) : Nat :=
headers.map.size
/--
Checks if the headers are empty.
-/
@[inline]
def isEmpty (headers : Headers) : Bool :=
headers.map.isEmpty
/--
Merges two headers, accumulating values for duplicate keys from both.
-/
def merge (headers1 headers2 : Headers) : Headers :=
{ map := headers1.map headers2.map }
/--
Converts the headers to a list of key-value pairs (flattened). Each header with multiple values produces
multiple pairs.
-/
def toList (headers : Headers) : List (Header.Name × Header.Value) :=
headers.map.toList
/--
Converts the headers to an array of key-value pairs (flattened). Each header with multiple values
produces multiple pairs.
-/
def toArray (headers : Headers) : Array (Header.Name × Header.Value) :=
headers.map.toArray
/--
Folds over all key-value pairs in the headers.
-/
def fold (headers : Headers) (init : α) (f : α Header.Name Header.Value α) : α :=
headers.map.toArray.foldl (fun acc (k, v) => f acc k v) init
/--
Maps a function over all header values, producing new headers.
-/
def mapValues (headers : Headers) (f : Header.Name Header.Value Header.Value) : Headers :=
let pairs := headers.map.toArray.map (fun (k, v) => (k, f k v))
{ map := pairs.foldl (fun acc (k, v) => acc.insert k v) MultiMap.empty }
/--
Filters and maps over header key-value pairs. Returns only the pairs for which the function returns `some`.
-/
def filterMap (headers : Headers) (f : Header.Name Header.Value Option Header.Value) : Headers :=
let pairs := headers.map.toArray.filterMap (fun (k, v) =>
match f k v with
| some v' => some (k, v')
| none => none)
{ map := pairs.foldl (fun acc (k, v) => acc.insert k v) MultiMap.empty }
/--
Filters header key-value pairs, keeping only those that satisfy the predicate.
-/
def filter (headers : Headers) (f : Header.Name Header.Value Bool) : Headers :=
headers.filterMap (fun k v => if f k v then some v else none)
/--
Updates all the values of a header if it exists.
-/
def update (headers : Headers) (name : Header.Name) (f : Header.Value Header.Value) : Headers :=
{ map := headers.map.update name f }
/--
Replaces the last value for the given header name.
If the header is absent, returns the headers unchanged.
-/
@[inline]
def replaceLast (headers : Headers) (name : Header.Name) (value : Header.Value) : Headers :=
{ map := headers.map.replaceLast name value }
instance : ToString Headers where
toString headers :=
let pairs := headers.map.toArray.map (fun (k, v) => s!"{k}: {v}")
String.intercalate "\r\n" pairs.toList
instance : Encode .v11 Headers where
encode buffer headers :=
headers.fold buffer (fun buf name value =>
buf.writeString s!"{name}: {value}\r\n")
instance : EmptyCollection Headers :=
Headers.empty
instance : Singleton (Header.Name × Header.Value) Headers :=
fun a, b => ( : Headers).insert a b
instance : Insert (Header.Name × Header.Value) Headers :=
fun a, b s => s.insert a b
instance : Union Headers :=
merge
instance [Monad m] : ForIn m Headers (Header.Name × Header.Value) where
forIn headers b f := forIn headers.map.entries b f
end Std.Http.Headers

View File

@@ -0,0 +1,311 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.URI
public import Std.Internal.Http.Data.Headers.Name
public import Std.Internal.Http.Data.Headers.Value
public import Std.Internal.Parsec.Basic
public section
/-!
# Header Typeclass and Common Headers
This module defines the `Header` typeclass for typed HTTP headers and some common header parsers.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-representation-data-and-met
-/
namespace Std.Http
set_option linter.all true
open Internal
/--
Typeclass for typed HTTP headers that can be parsed from and serialized to header values.
-/
class Header (α : Type) where
/--
Parses a header value into the typed representation.
-/
parse : Header.Value Option α
/--
Serializes the typed representation back to a name-value pair.
-/
serialize : α Header.Name × Header.Value
instance [h : Header α] : Encode .v11 α where
encode buffer a :=
let (name, value) := h.serialize a
buffer.writeString s!"{name}: {value}\r\n"
namespace Header
private def parseTokenList (v : Value) : Option (Array String) := do
let rawParts := v.value.split (· == ',')
let parts := rawParts.map (·.trimAscii)
guard (parts.all (¬·.isEmpty))
return parts.toArray.map (fun s => s.toString.toLower)
/--
The `Content-Length` header, representing the size of the message body in bytes.
Parses only valid natural number values.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-2
-/
structure ContentLength where
/--
The content length in bytes.
-/
length : Nat
deriving BEq, Repr
namespace ContentLength
/--
Parses a content length header value.
-/
def parse (v : Value) : Option ContentLength :=
v.value.toNat?.map (.mk)
/--
Serializes a content length header back to a name-value pair.
-/
def serialize (h : ContentLength) : Name × Value :=
(Header.Name.contentLength, Value.ofString! (toString h.length))
instance : Header ContentLength := parse, serialize
end ContentLength
/--
Validates the chunked placement rules for the Transfer Encoding header. Returns `false` if the
encoding list violates the constraints.
Reference: https://www.rfc-editor.org/rfc/rfc9112#section-6.1
-/
@[expose]
def TransferEncoding.Validate (codings : Array String) : Bool :=
if codings.isEmpty || codings.any (fun coding => !isToken coding) then
false
else
let chunkedCount := codings.filter (· == "chunked") |>.size
-- the sender MUST either apply chunked as the final transfer coding
let lastIsChunked := codings.back? == some "chunked"
if chunkedCount > 1 then
false
else if chunkedCount == 1 && !lastIsChunked then
false
else
true
/--
The `Transfer-Encoding` header, representing the list of transfer codings applied to the message body.
Validation rules (RFC 9112 Section 6.1):
- "chunked" may appear at most once.
- If "chunked" is present, it must be the last encoding in the list.
Reference: https://www.rfc-editor.org/rfc/rfc9112#section-6.1
-/
structure TransferEncoding where
/--
The ordered list of transfer codings.
-/
codings : Array String
/--
Proof that the transfer codings satisfy the chunked placement rules.
-/
isValid : TransferEncoding.Validate codings = true
deriving Repr
namespace TransferEncoding
/--
Returns `true` if the transfer encoding ends with chunked.
-/
def isChunked (te : TransferEncoding) : Bool :=
te.codings.back? == some "chunked"
/--
Parses a comma-separated list of transfer codings from a header value, validating chunked placement.
-/
def parse (v : Value) : Option TransferEncoding := do
let codings parseTokenList v
if h : TransferEncoding.Validate codings then
some codings, h
else
none
/--
Serializes a transfer encoding back to a comma-separated header value.
-/
def serialize (te : TransferEncoding) : Header.Name × Header.Value :=
let value := ",".intercalate (te.codings.toList)
(Header.Name.transferEncoding, Value.ofString! value)
instance : Header TransferEncoding := parse, serialize
end TransferEncoding
/--
The `Connection` header, represented as a list of connection option tokens.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-connection
-/
structure Connection where
/--
The normalized connection-option tokens.
-/
tokens : Array String
/--
Proof that all tokens satisfy `isToken`.
-/
valid : tokens.all isToken = true
deriving Repr
namespace Connection
/--
Checks whether a specific token is present in the `Connection` header value.
-/
def containsToken (connection : Connection) (token : String) : Bool :=
let token := token.trimAscii.toString.toLower
connection.tokens.any (· == token)
/--
Checks whether the `Connection` header requests connection close semantics.
-/
def shouldClose (connection : Connection) : Bool :=
connection.containsToken "close"
/--
Parses a `Connection` header value into normalized tokens.
-/
def parse (v : Value) : Option Connection := do
let tokens parseTokenList v
if h : tokens.all isToken = true then
some tokens, h
else
none
/--
Serializes a `Connection` header back to a comma-separated value.
-/
def serialize (connection : Connection) : Header.Name × Header.Value :=
let value := ",".intercalate connection.tokens.toList
(Header.Name.connection, Value.ofString! value)
instance : Header Connection := parse, serialize
end Connection
/--
The `Host` header.
Represents the authority component of a URI:
host [ ":" port ]
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-host-and-authority
-/
structure Host where
/--
Host name (reg-name, IPv4, or IPv6 literal).
-/
host : URI.Host
/--
Optional port.
-/
port : URI.Port
deriving Repr, BEq
namespace Host
/--
Parses a `Host` header value.
-/
def parse (v : Value) : Option Host :=
let parsed := (Std.Http.URI.Parser.parseHostHeader <* Std.Internal.Parsec.eof).run v.value.toUTF8
match parsed with
| .ok host, port => some host, port
| .error _ => none
/--
Serializes a `Host` header back to a name and a value.
-/
def serialize (host : Host) : Header.Name × Header.Value :=
let value := match host.port with
| .value port => Header.Value.ofString! s!"{host.host}:{port}"
| .empty => Header.Value.ofString! s!"{host.host}:"
| .omitted => Header.Value.ofString! <| toString host.host
(.mk "host", value)
instance : Header Host := parse, serialize
end Host
/--
The `Expect` header.
Represents an expectation token.
The only standardized expectation is `100-continue`.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-expect
-/
structure Expect where
/--
True if the client expects `100-continue`.
-/
expect : Bool
deriving Repr, BEq
namespace Expect
/--
Parses an `Expect` header.
Succeeds only if the value is exactly `100-continue`
(case-insensitive, trimmed).
-/
def parse (v : Value) : Option Expect :=
let normalized := v.value.trimAscii.toString.toLower
if normalized == "100-continue" then
some true
else
none
/--
Serializes an `Expect` header.
-/
def serialize (e : Expect) : Header.Name × Header.Value :=
if e.expect then
(Header.Name.expect, Value.ofString! "100-continue")
else
(Header.Name.expect, Value.ofString! "")
instance : Header Expect := parse, serialize
end Expect
end Std.Http.Header

View File

@@ -0,0 +1,200 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.ToString
public import Std.Internal.Http.Internal
public section
/-!
# Header Names
This module defines the `Name` type, which represents validated HTTP header names that conform to
HTTP standards.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#section-5
-/
namespace Std.Http.Header
set_option linter.all true
open Internal Char
/--
Proposition asserting that a string is a valid HTTP header name: all characters are valid token
characters and the string is non-empty.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-field-names
-/
abbrev IsValidHeaderName (s : String) : Prop :=
isToken s
/--
A validated HTTP header name that ensures all characters conform to HTTP standards. Header names are
case-insensitive according to HTTP specifications.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-field-names
-/
@[ext]
structure Name where
/--
The lowercased normalized header name string.
-/
value : String
/--
The proof that it's a valid header name.
-/
isValidHeaderValue : IsValidHeaderName value := by decide
/--
The proof that we stored the header name in stored as a lower case string.
-/
isLowerCase : IsLowerCase value := by decide
deriving Repr, DecidableEq
namespace Name
instance : BEq Name where
beq a b := a.value = b.value
instance : Hashable Name where
hash x := Hashable.hash x.value
theorem Name.beq_eq {x y : Name} : (x == y) = (x.value == y.value) :=
rfl
instance : LawfulBEq Name where
rfl {x} := by simp [Name.beq_eq]
eq_of_beq {x y} := by grind [Name.beq_eq, Name.ext]
instance : LawfulHashable Name := inferInstance
instance : Inhabited Name where
default := "_", by decide, by decide
/--
Attempts to create a `Name` from a `String`, returning `none` if the string contains invalid
characters for HTTP header names or is empty.
-/
def ofString? (s : String) : Option Name :=
let val := s.trimAscii.toString.toLower
if h : IsValidHeaderName val IsLowerCase val then
some val, h.left, h.right
else
none
/--
Creates a `Name` from a string, panicking with an error message if the string contains invalid
characters for HTTP header names or is empty.
-/
def ofString! (s : String) : Name :=
match ofString? s with
| some res => res
| none => panic! s!"invalid header name: {s.quote}"
/--
Converts the header name to title case (e.g., "Content-Type").
Note: some well-known headers have unconventional casing (e.g., "WWW-Authenticate"),
but since HTTP header names are case-insensitive, this always uses simple capitalization.
-/
@[inline]
def toCanonical (name : Name) : String :=
let it := name.value.splitOn "-"
|>.map (·.capitalize)
String.intercalate "-" it
/--
Performs a case-insensitive comparison between a `Name` and a `String`. Returns `true` if they match.
-/
@[expose]
def is (name : Name) (s : String) : Bool :=
name.value == s.toLower
instance : ToString Name where
toString name := name.toCanonical
/--
Standard Content-Type header name
-/
def contentType : Header.Name := .mk "content-type"
/--
Standard Content-Length header name
-/
def contentLength : Header.Name := .mk "content-length"
/--
Standard Host header name
-/
def host : Header.Name := .mk "host"
/--
Standard Authorization header name
-/
def authorization : Header.Name := .mk "authorization"
/--
Standard User-Agent header name
-/
def userAgent : Header.Name := .mk "user-agent"
/--
Standard Accept header name
-/
def accept : Header.Name := .mk "accept"
/--
Standard Connection header name
-/
def connection : Header.Name := .mk "connection"
/--
Standard Transfer-Encoding header name
-/
def transferEncoding : Header.Name := .mk "transfer-encoding"
/--
Standard Server header name
-/
def server : Header.Name := .mk "server"
/--
Standard Date header name
-/
def date : Header.Name := .mk "date"
/--
Standard Expect header name
-/
def expect : Header.Name := .mk "expect"
/--
Standard Cookie header name (client → server)
-/
def cookie : Header.Name := .mk "cookie"
/--
Standard Set-Cookie header name (server → client)
-/
def setCookie : Header.Name := .mk "set-cookie"
/--
Standard Location header name
-/
def location : Header.Name := .mk "location"
/--
Standard Proxy-Authorization header name
-/
def proxyAuthorization : Header.Name := .mk "proxy-authorization"
end Std.Http.Header.Name

View File

@@ -0,0 +1,103 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.ToString
public import Std.Internal.Http.Internal
public section
/-!
# Header Values
This module defines the `Value` type, which represents validated HTTP header values that conform to HTTP
standards.
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#name-field-values
-/
namespace Std.Http.Header
set_option linter.all true
open Internal
/--
Proposition that asserts all characters in a string are valid for HTTP header values,
and that the first and last characters (if present) are `field-vchar` (not SP/HTAB).
field-value = *field-content
field-content = field-vchar
[ 1*( SP / HTAB / field-vchar ) field-vchar ]
Reference: https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5
-/
abbrev IsValidHeaderValue (s : String) : Prop :=
let s := s.toList
s.all Char.fieldContent
(s.head?.map Char.fieldVchar |>.getD true)
(s.getLast?.map Char.fieldVchar |>.getD true)
/--
A validated HTTP header value that ensures all characters conform to HTTP standards.
-/
structure Value where
/--
The string data.
-/
value : String
/--
The proof that it's a valid header value.
-/
isValidHeaderValue : IsValidHeaderValue value := by decide
deriving BEq, DecidableEq, Repr
instance : Hashable Value where
hash := Hashable.hash Value.value
instance : Inhabited Value where
default := "", by decide
namespace Value
/--
Attempts to create a `Value` from a `String`, returning `none` if the string contains invalid characters
for HTTP header values.
-/
@[expose]
def ofString? (s : String) : Option Value :=
-- A field value does not include leading or trailing whitespace.
let val := s.trimAscii.toString
if h : IsValidHeaderValue val then
some val, h
else
none
/--
Creates a `Value` from a string, panicking with an error message if the string contains invalid
characters for HTTP header values.
-/
@[expose]
def ofString! (s : String) : Value :=
match ofString? s with
| some res => res
| none => panic! s!"invalid header value: {s.quote}"
/--
Performs a case-insensitive comparison between a `Value` and a `String`. Returns `true` if they match.
-/
@[expose]
def is (s : Value) (h : String) : Bool :=
s.value.toLower == h.toLower
instance : ToString Value where
toString v := v.value
end Std.Http.Header.Value

View File

@@ -9,6 +9,8 @@ prelude
public import Std.Internal.Http.Data.Extensions
public import Std.Internal.Http.Data.Method
public import Std.Internal.Http.Data.Version
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.URI
public section
@@ -49,7 +51,12 @@ structure Request.Head where
/--
The raw request-target string (commonly origin-form path/query, `"*"`, or authority-form).
-/
uri : String
uri : RequestTarget := .asteriskForm
/--
Collection of HTTP headers for the request (Content-Type, Authorization, etc.).
-/
headers : Headers := .empty
deriving Inhabited, Repr
/--
@@ -79,7 +86,7 @@ structure Request.Builder where
/--
The request-line of an HTTP request.
-/
line : Head := { method := .get, version := .v11, uri := "*" }
line : Head := { method := .get, version := .v11, uri := .asteriskForm }
/--
Optional dynamic metadata attached to the request.
@@ -93,6 +100,8 @@ instance : ToString Head where
toString req.method ++ " " ++
toString req.uri ++ " " ++
toString req.version ++
"\r\n" ++
toString req.headers ++
"\r\n"
open Internal in
@@ -100,9 +109,11 @@ instance : Encode .v11 Head where
encode buffer req :=
let buffer := Encode.encode (v := .v11) buffer req.method
let buffer := buffer.writeChar ' '
let buffer := buffer.writeString req.uri
let buffer := Encode.encode (v := .v11) buffer req.uri
let buffer := buffer.writeChar ' '
let buffer := Encode.encode (v := .v11) buffer req.version
let buffer := buffer.writeString "\r\n"
let buffer := Encode.encode (v := .v11) buffer req.headers
buffer.writeString "\r\n"
/--
@@ -134,9 +145,53 @@ def version (builder : Builder) (version : Version) : Builder :=
/--
Sets the request target/URI for the request being built.
-/
def uri (builder : Builder) (uri : String) : Builder :=
def uri (builder : Builder) (uri : RequestTarget) : Builder :=
{ builder with line := { builder.line with uri := uri } }
/--
Sets the request target/URI for the request being built
-/
def uri! (builder : Builder) (uri : String) : Builder :=
let uri := RequestTarget.parse! uri
{ builder with line := { builder.line with uri } }
/--
Sets the headers for the request being built
-/
def headers (builder : Builder) (headers : Headers) : Builder :=
{ builder with line := { builder.line with headers } }
/--
Adds a single header to the request being built.
-/
def header (builder : Builder) (key : Header.Name) (value : Header.Value) : Builder :=
{ builder with line := { builder.line with headers := builder.line.headers.insert key value } }
/--
Adds a single header to the request being built, panics if the header is invalid.
-/
def header! (builder : Builder) (key : String) (value : String) : Builder :=
let key := Header.Name.ofString! key
let value := Header.Value.ofString! value
{ builder with line := { builder.line with headers := builder.line.headers.insert key value } }
/--
Adds a single header to the request being built.
Returns `none` if the header name or value is invalid.
-/
def header? (builder : Builder) (key : String) (value : String) : Option Builder := do
let key Header.Name.ofString? key
let value Header.Value.ofString? value
pure <| { builder with line := { builder.line with headers := builder.line.headers.insert key value } }
/--
Adds a header to the request being built only if the Option Header.Value is some.
-/
def headerOpt (builder : Builder) (key : Header.Name) (value : Option Header.Value) : Builder :=
match value with
| some v => builder.header key v
| none => builder
/--
Inserts a typed extension value into the request being built.
-/
@@ -154,7 +209,7 @@ end Builder
/--
Creates a new HTTP GET Request with the specified URI.
-/
def get (uri : String) : Builder :=
def get (uri : RequestTarget) : Builder :=
new
|>.method .get
|>.uri uri
@@ -162,7 +217,7 @@ def get (uri : String) : Builder :=
/--
Creates a new HTTP POST Request builder with the specified URI.
-/
def post (uri : String) : Builder :=
def post (uri : RequestTarget) : Builder :=
new
|>.method .post
|>.uri uri
@@ -170,7 +225,7 @@ def post (uri : String) : Builder :=
/--
Creates a new HTTP PUT Request builder with the specified URI.
-/
def put (uri : String) : Builder :=
def put (uri : RequestTarget) : Builder :=
new
|>.method .put
|>.uri uri
@@ -178,7 +233,7 @@ def put (uri : String) : Builder :=
/--
Creates a new HTTP DELETE Request builder with the specified URI.
-/
def delete (uri : String) : Builder :=
def delete (uri : RequestTarget) : Builder :=
new
|>.method .delete
|>.uri uri
@@ -186,7 +241,7 @@ def delete (uri : String) : Builder :=
/--
Creates a new HTTP PATCH Request builder with the specified URI.
-/
def patch (uri : String) : Builder :=
def patch (uri : RequestTarget) : Builder :=
new
|>.method .patch
|>.uri uri
@@ -194,25 +249,25 @@ def patch (uri : String) : Builder :=
/--
Creates a new HTTP HEAD Request builder with the specified URI.
-/
def head (uri : String) : Builder :=
def head (uri : RequestTarget) : Builder :=
new
|>.method .head
|>.uri uri
/--
Creates a new HTTP OPTIONS Request builder with the specified URI.
Use `Request.options "*"` for server-wide OPTIONS.
Use `Request.options (RequestTarget.asteriskForm)` for server-wide OPTIONS.
-/
def options (uri : String) : Builder :=
def options (uri : RequestTarget) : Builder :=
new
|>.method .options
|>.uri uri
/--
Creates a new HTTP CONNECT Request builder with the specified URI.
Typically used with authority-form URIs such as `"example.com:443"` for tunneling.
Typically used with `RequestTarget.authorityForm` for tunneling.
-/
def connect (uri : String) : Builder :=
def connect (uri : RequestTarget) : Builder :=
new
|>.method .connect
|>.uri uri
@@ -220,7 +275,7 @@ def connect (uri : String) : Builder :=
/--
Creates a new HTTP TRACE Request builder with the specified URI.
-/
def trace (uri : String) : Builder :=
def trace (uri : RequestTarget) : Builder :=
new
|>.method .trace
|>.uri uri

View File

@@ -9,6 +9,7 @@ prelude
public import Std.Internal.Http.Data.Extensions
public import Std.Internal.Http.Data.Status
public import Std.Internal.Http.Data.Version
public import Std.Internal.Http.Data.Headers
public section
@@ -36,6 +37,12 @@ structure Response.Head where
The HTTP protocol version used in the response, e.g. `HTTP/1.1`.
-/
version : Version := .v11
/--
The set of response headers, providing metadata such as `Content-Type`,
`Content-Length`, and caching directives.
-/
headers : Headers := .empty
deriving Inhabited, Repr
/--
@@ -78,7 +85,9 @@ instance : ToString Head where
toString r :=
toString r.version ++ " " ++
toString r.status.toCode ++ " " ++
r.status.reasonPhrase ++ "\r\n"
r.status.reasonPhrase ++ "\r\n" ++
toString r.headers ++
"\r\n"
open Internal in
instance : Encode .v11 Head where
@@ -88,6 +97,8 @@ instance : Encode .v11 Head where
let buffer := buffer.writeString (toString r.status.toCode)
let buffer := buffer.writeChar ' '
let buffer := buffer.writeString r.status.reasonPhrase
let buffer := buffer.writeString "\r\n"
let buffer := Encode.encode (v := .v11) buffer r.headers
buffer.writeString "\r\n"
/--
@@ -108,6 +119,35 @@ Sets the HTTP status code for the response being built.
def status (builder : Builder) (status : Status) : Builder :=
{ builder with line := { builder.line with status := status } }
/--
Sets the headers for the response being built.
-/
def headers (builder : Builder) (headers : Headers) : Builder :=
{ builder with line := { builder.line with headers } }
/--
Adds a single header to the response being built.
-/
def header (builder : Builder) (key : Header.Name) (value : Header.Value) : Builder :=
{ builder with line := { builder.line with headers := builder.line.headers.insert key value } }
/--
Adds a single header to the response being built, panics if the header is invalid.
-/
def header! (builder : Builder) (key : String) (value : String) : Builder :=
let key := Header.Name.ofString! key
let value := Header.Value.ofString! value
{ builder with line := { builder.line with headers := builder.line.headers.insert key value } }
/--
Adds a single header to the response being built.
Returns `none` if the header name or value is invalid.
-/
def header? (builder : Builder) (key : String) (value : String) : Option Builder := do
let key Header.Name.ofString? key
let value Header.Value.ofString? value
pure <| { builder with line := { builder.line with headers := builder.line.headers.insert key value } }
/--
Inserts a typed extension value into the response being built.
-/

View File

@@ -0,0 +1,173 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.URI.Basic
public import Std.Internal.Http.Data.URI.Parser
public section
/-!
# URI
This module defines the `URI` and `RequestTarget` types that represent and manipulate components of
URIs as defined by RFC 3986. It provides parsing, rendering, and normalization utilities for working
with URIs and request targets in HTTP messages.
References:
* https://www.rfc-editor.org/rfc/rfc3986.html
* https://www.rfc-editor.org/rfc/rfc9112.html#section-3.3
-/
namespace Std.Http.RequestTarget
set_option linter.all true
/--
Attempts to parse a `RequestTarget` from the given string.
-/
@[inline]
def parse? (string : String) : Option RequestTarget :=
(URI.Parser.parseRequestTarget <* Std.Internal.Parsec.eof).run string.toUTF8 |>.toOption
/--
Parses a `RequestTarget` from the given string. Panics if parsing fails. Use `parse?`
if you need a safe option-returning version.
-/
@[inline]
def parse! (string : String) : RequestTarget :=
match parse? string with
| some res => res
| none => panic! "invalid request target"
/--
Creates an origin-form request target from a path string.
The path should start with '/' (e.g., "/api/users" or "/search?q=test").
Panics if the string is not a valid origin-form request target.
-/
@[inline]
def originForm! (path : String) : RequestTarget :=
match parse? path with
| some (.originForm o) => .originForm o
| _ => panic! s!"invalid origin-form request target: {path}"
namespace PathAndQuery
/--
Attempts to parse an origin-form request target from a string such as `"/path?key=value"`.
Returns `none` if the string is not a valid origin-form target.
-/
@[inline]
def parse? (s : String) : Option RequestTarget.PathAndQuery := do
match RequestTarget.parse? s with
| .originForm o => some o
| _ => none
/--
Parses an origin-form request target from a string. Panics if parsing fails.
Use `parse?` if you need a safe option-returning version.
-/
@[inline]
def parse! (s : String) : RequestTarget.PathAndQuery :=
match parse? s with
| some o => o
| none => panic! s!"invalid origin-form request target: {s.quote}"
end PathAndQuery
namespace Absolute
/--
Attempts to parse an absolute-form request target from a string such as `"http://host/path"`.
Returns `none` if the string is not a valid absolute-form target.
-/
@[inline]
def parse? (s : String) : Option RequestTarget.Absolute := do
match RequestTarget.parse? s with
| .absoluteForm af => some af
| _ => none
/--
Parses an absolute-form request target from a string. Panics if parsing fails.
Use `parse?` if you need a safe option-returning version.
-/
@[inline]
def parse! (s : String) : RequestTarget.Absolute :=
match parse? s with
| some af => af
| none => panic! s!"invalid absolute-form request target: {s.quote}"
end Absolute
end RequestTarget
namespace URI
/--
Attempts to parse a `URI` from the given string.
-/
@[inline]
def parse? (string : String) : Option URI :=
(Parser.parseURI <* Std.Internal.Parsec.eof).run string.toUTF8 |>.toOption
/--
Parses a `URI` from the given string. Panics if parsing fails. Use `parse?` if you need a safe
option-returning version.
-/
@[inline]
def parse! (string : String) : URI :=
match parse? string with
| some res => res
| none => panic! "invalid URI"
namespace Path
/--
Attempts to parse a URI path from the given string.
Returns `none` if the string is not a valid path.
-/
@[inline]
def parse? (s : String) : Option Std.Http.URI.Path :=
(Std.Http.URI.Parser.parsePath {} true true <* Std.Internal.Parsec.eof).run s.toUTF8 |>.toOption
/--
Parses a URI path from the given string. Returns the root path `"/"` if parsing fails.
-/
@[inline]
def parseOrRoot (s : String) : Std.Http.URI.Path :=
parse? s |>.getD { segments := #[], absolute := true }
end Std.Http.URI.Path
namespace Std.Http.URI.AuthorityForm
/--
Attempts to parse a URL with a required authority component.
Accepts absolute URLs such as `"http://host:8080/path?k=v"`.
The port defaults to 80 for `http` and 443 for `https` if omitted.
Returns `none` if the URL is invalid or has no authority.
-/
@[inline]
def parse? (s : String) : Option Std.Http.URI.AuthorityForm := do
let uri Std.Http.URI.parse? s
let auth uri.authority
let port : UInt16 := match auth.port with
| .value p => p
| _ => URI.Scheme.defaultPort uri.scheme
some { scheme := uri.scheme, host := auth.host, port, path := uri.path, query := uri.query }
/--
Parses a URL with a required authority component. Panics if parsing fails.
Use `parse?` if you need a safe option-returning version.
-/
@[inline]
def parse! (s : String) : Std.Http.URI.AuthorityForm :=
match parse? s with
| some af => af
| none => panic! s!"invalid URL (expected scheme://host/path): {s.quote}"
end Std.Http.URI.AuthorityForm

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,80 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Nat
public section
/-!
# URI Parser Configuration
This module defines `URI.Config`, which controls per-component size limits used during URI
parsing. All limits default to the values previously hardcoded in the parser, except
`maxQueryLength`, which is raised from 1024 to 8192 to match `H1.Config.maxUriLength` and
accommodate real-world query strings.
-/
namespace Std.Http.URI
set_option linter.all true
/--
Per-component size limits for the URI parser.
-/
structure Config where
/--
Maximum length of the URI scheme component (e.g. `https`), in bytes.
The scheme grammar requires at least one leading ALPHA; the remaining budget is
`max(0, maxSchemeLength - 1)` additional characters.
-/
maxSchemeLength : Nat := 13
/--
Maximum length of the host `reg-name` component, in bytes.
-/
maxHostLength : Nat := 253
/--
Maximum length of the userinfo component (username and password each), in bytes.
-/
maxUserInfoLength : Nat := 1024
/--
Maximum length of a single path segment, in bytes.
-/
maxSegmentLength : Nat := 256
/--
Maximum length of the query string, in bytes.
Raised from the previously hardcoded 1024 to 8192 to match `H1.Config.maxUriLength`
and allow real-world query strings.
-/
maxQueryLength : Nat := 8192
/--
Maximum length of the fragment component, in bytes.
-/
maxFragmentLength : Nat := 1024
/--
Maximum number of path segments.
Prevents excessive segment counts that could arise from paths like `/a/b/c/…` repeated many times.
-/
maxPathSegments : Nat := 128
/--
Maximum total byte length of the path (all segments combined, including separating slashes).
-/
maxTotalPathLength : Nat := 8192
/--
Maximum number of query parameters (key-value pairs separated by `&`).
-/
maxQueryParams : Nat := 100
end Std.Http.URI

View File

@@ -0,0 +1,688 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Grind
import Init.While
import Init.Data.SInt.Lemmas
import Init.Data.UInt.Lemmas
import Init.Data.UInt.Bitwise
import Init.Data.Array.Lemmas
public import Init.Data.String
public import Std.Internal.Http.Internal.Char
public section
/-!
# URI Encoding
This module provides utilities for percent-encoding URI components according to RFC 3986. It includes
character validation, encoding/decoding functions, and types that maintain encoding invariants through
Lean's dependent type system.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-2.1
-/
namespace Std.Http.URI
set_option linter.all true
open Internal Char
/--
Checks if a byte is a valid character in a percent-encoded URI component. Valid characters are
unreserved characters or the percent sign (for escape sequences).
-/
def isEncodedChar (rule : UInt8 Bool) (c : UInt8) : Bool :=
isAsciiByte c (rule c isHexDigitByte c c = '%'.toUInt8)
/--
Checks if a byte is valid in a percent-encoded query string component. Extends `isEncodedChar` to also
allow '+' which represents space in application/x-www-form-urlencoded format.
-/
def isEncodedQueryChar (rule : UInt8 Bool) (c : UInt8) : Bool :=
isEncodedChar rule c c = '+'.toUInt8
/--
Checks if all characters in a `ByteArray` are allowed in an encoded URI component. This is a fast check
that only verifies the character set, not full encoding validity.
-/
@[inline]
abbrev IsAllowedEncodedChars (rule : UInt8 Bool) (s : ByteArray) : Prop :=
s.data.all (isEncodedChar rule)
instance : Decidable (IsAllowedEncodedChars r s) :=
inferInstanceAs (Decidable (s.data.all (isEncodedChar r) = true))
/--
Checks if all characters in a `ByteArray` are allowed in an encoded query parameter. Allows '+' as an
alternative encoding for space (application/x-www-form-urlencoded).
-/
@[inline]
abbrev IsAllowedEncodedQueryChars (rule : UInt8 Bool) (s : ByteArray) : Prop :=
s.data.all (isEncodedQueryChar rule)
instance : Decidable (IsAllowedEncodedQueryChars r s) :=
inferInstanceAs (Decidable (s.data.all (isEncodedQueryChar r) = true))
/--
Validates that all percent signs in a byte array are followed by exactly two hexadecimal digits.
This ensures proper percent-encoding according to RFC 3986.
For example:
- `%20` is valid (percent followed by two hex digits)
- `%` is invalid (percent with no following digits)
- `%2` is invalid (percent followed by only one digit)
- `%GG` is invalid (percent followed by non-hex characters)
-/
def isValidPercentEncoding (ba : ByteArray) : Bool :=
let rec loop (i : Nat) : Bool :=
if h : i < ba.size then
let c := ba[i]'h
if c = '%'.toUInt8 then
if h₂ : i + 2 < ba.size then
let d1 := ba[i + 1]'(by omega)
let d2 := ba[i + 2]'h₂
if isHexDigitByte d1 && isHexDigitByte d2 then
loop (i + 3)
else false
else false
else loop (i + 1)
else true
termination_by ba.size - i
loop 0
/--
Converts a nibble (4-bit value, 0-15) to its hexadecimal digit representation. Returns '0'-'9' for
values 0-9, and 'A'-'F' for values 10-15.
-/
def hexDigit (n : UInt8) : UInt8 :=
if n < 10 then (n + '0'.toUInt8)
else (n - 10 + 'A'.toUInt8)
/--
Converts a hexadecimal digit character to its numeric value (0-15).
Returns `none` if the character is not a valid hex digit.
-/
def hexDigitToUInt8? (c : UInt8) : Option UInt8 :=
if c '0'.toUInt8 && c '9'.toUInt8 then
some (c - '0'.toUInt8)
else if c 'a'.toUInt8 && c 'f'.toUInt8 then
some (c - 'a'.toUInt8 + 10)
else if c 'A'.toUInt8 && c 'F'.toUInt8 then
some (c - 'A'.toUInt8 + 10)
else
none
private theorem IsAllowedEncodedChars.push {bs : ByteArray} (h : IsAllowedEncodedChars r bs) (h₁ : isEncodedChar r c) :
IsAllowedEncodedChars r (bs.push c) := by
simpa [IsAllowedEncodedChars, ByteArray.push, Array.all_push, And.intro h h₁]
private theorem IsAllowedEncodedQueryChars.push {bs : ByteArray} (h : IsAllowedEncodedQueryChars r bs) (h₁ : isEncodedQueryChar r c) :
IsAllowedEncodedQueryChars r (bs.push c) := by
simpa [IsAllowedEncodedQueryChars, ByteArray.push, Array.all_push, And.intro h h₁]
private theorem isEncodedChar_isAscii (c : UInt8) (h : isEncodedChar r c) : isAsciiByte c := by
simp [isEncodedChar, isAsciiByte] at *
exact h.left
private theorem isEncodedQueryChar_isAscii (c : UInt8) (h : isEncodedQueryChar r c) : isAsciiByte c := by
unfold isEncodedQueryChar isAsciiByte at *
simp at h
rcases h
next h => exact isEncodedChar_isAscii c h
next h => subst_vars; decide
private theorem hexDigit_isHexDigit (h₀ : x < 16) : isHexDigitByte (hexDigit x) := by
unfold hexDigit isHexDigitByte
have h₁ : x.toNat < 16 := h₀
split <;> simp
next p =>
have h₂ : x.toNat < 10 := p
have h₂ : 48 x.toNat + 48 := by omega
have h₃ : x.toNat + 48 57 := by omega
have h₄ : x.toNat + 48 < 256 := by omega
refine Or.inl (Or.inl ?_, ?_)
· exact (UInt8.ofNat_le_iff_le (by decide) h₄ |>.mpr h₂)
· exact (UInt8.ofNat_le_iff_le h₄ (by decide) |>.mpr h₃)
next p =>
have h₂ : ¬(x.toNat < 10) := p
have h₃ : 65 x.toNat - 10 + 65 := by omega
have h₅ : x.toNat - 10 + 65 70 := by omega
have h₄ : x.toNat - 10 + 65 < 256 := by omega
refine Or.inr ?_, ?_
· simpa [UInt8.ofNat_sub (by omega : 10 x.toNat)] using
UInt8.ofNat_le_iff_le (by decide : 65 < 256) h₄ |>.mpr h₃
· simpa [UInt8.ofNat_add, UInt8.ofNat_sub (by omega : 10 x.toNat)] using
UInt8.ofNat_le_iff_le h₄ (by decide : 70 < 256) |>.mpr h₅
private theorem isHexDigit_isAscii {c : UInt8} (h : isHexDigitByte c) : isAsciiByte c := by
simp [isHexDigitByte, isAsciiByte] at *
rcases h with h1, h2 | h1, h2
· exact UInt8.lt_of_le_of_lt h2 (by decide)
next h => exact UInt8.lt_of_le_of_lt h.right (by decide)
· exact UInt8.lt_of_le_of_lt h2 (by decide)
private theorem isHexDigit_isEncodedChar {c : UInt8} (h : isHexDigitByte c) : isEncodedChar r c := by
unfold isEncodedChar
simp at *
exact And.intro (isHexDigit_isAscii h) (Or.inr (Or.inl h))
private theorem isHexDigit_isEncodedQueryChar {c : UInt8} (h : isHexDigitByte c) : isEncodedQueryChar r c := by
unfold isEncodedQueryChar isEncodedChar
simp at *
exact Or.inl (And.intro (isHexDigit_isAscii h) (Or.inr (Or.inl h)))
theorem all_of_all_of_imp {b : ByteArray} (h : b.data.all p) (imp : c, p c q c) : b.data.all q := by
rw [Array.all_eq] at *
simp at *
intro i x
exact (imp b.data[i]) (h i x)
private theorem autf8EncodeChar_flatMap_ascii {a : List UInt8}
(is_ascii_list : (x : UInt8), x a x < 128) :
List.flatMap (fun a => String.utf8EncodeChar (Char.ofUInt8 a)) a = a := by
have h_encode {i : UInt8} (h : i < 128) : String.utf8EncodeChar (Char.ofUInt8 i) = [i] := by
simp [Char.ofUInt8, String.utf8EncodeChar, show ¬127 < i.toNat from Nat.not_lt_of_le (Nat.le_pred_of_lt h)]
induction a with
| nil => simp
| cons head tail ih =>
simp [List.flatMap_cons]
rw [h_encode]
· simp
rw [ih]
intro x hx
exact is_ascii_list x (by simp [hx])
· exact is_ascii_list head (by simp)
private theorem List.toByteArray_loop_eq (xs : List UInt8) (acc : ByteArray) :
(List.toByteArray.loop xs acc).data = acc.data ++ xs.toArray := by
induction xs generalizing acc with
| nil => simp [List.toByteArray.loop]
| cons x xs ih => simp [List.toByteArray.loop, ih, Array.push]
private theorem ByteArray.toList_toByteArray (ba : ByteArray) :
ba.data.toList.toByteArray = ba := by
cases ba with
| mk data =>
simp [List.toByteArray]
apply ByteArray.ext
simp [List.toByteArray_loop_eq, ByteArray.empty]
decide
theorem isValidUTF8_of_isAsciiByte (ba : ByteArray) (s : ba.data.all isAsciiByte) : ByteArray.IsValidUTF8 ba := by
refine ba.data.toList.map Char.ofUInt8, ?_
rw [List.utf8Encode]
simp only [List.flatMap_map]
have is_ascii : (x : UInt8), x ba.data.toList x < 128 := by
let is_ascii := Array.all_eq_true_iff_forall_mem.mp s
simp [isAsciiByte] at is_ascii
intro x hx
exact is_ascii x (by simp_all)
rw [autf8EncodeChar_flatMap_ascii is_ascii]
exact ByteArray.toList_toByteArray ba |>.symm
/--
A percent-encoded URI component with a compile-time proof that it contains only valid encoded characters.
This provides type-safe URI encoding without runtime validation.
The invariant guarantees that the string contains only unreserved characters (alphanumeric, hyphen, period,
underscore, tilde) and percent signs (for escape sequences).
-/
structure EncodedString (r : UInt8 Bool) where
private mk ::
/--
The underlying byte array containing the percent-encoded data.
-/
toByteArray : ByteArray
/--
Proof that all characters in the byte array are valid encoded characters.
-/
valid : IsAllowedEncodedChars r toByteArray
namespace EncodedString
/--
Creates an empty encoded string.
-/
def empty : EncodedString r :=
.empty, by simp []; exact fun i h => by contradiction
instance : Inhabited (EncodedString r) where
default := EncodedString.empty
/--
Appends a single encoded character to an encoded string.
Requires that the character is not '%' to maintain the percent-encoding invariant.
-/
private def push (s : EncodedString r) (c : UInt8) (h : isEncodedChar r c) : EncodedString r :=
s.toByteArray.push c, IsAllowedEncodedChars.push s.valid h
/--
Converts a byte to its percent-encoded hexadecimal representation (%XX). For example, a space
character (0x20) becomes "%20".
-/
private def byteToHex (b : UInt8) (s : EncodedString r) : EncodedString r :=
let ba := s.toByteArray.push '%'.toUInt8
|>.push (hexDigit (b >>> 4))
|>.push (hexDigit (b &&& 0xF))
let valid := by
have h1 : isEncodedChar r '%'.toUInt8 :=
by simp [isEncodedChar]; decide
have h2 : isEncodedChar r (hexDigit (b >>> 4)) :=
let h₀ := hexDigit_isHexDigit (BitVec.toNat_ushiftRight_lt b.toBitVec 4 (by decide))
isHexDigit_isEncodedChar h₀
have h3 : isEncodedChar r (hexDigit (b &&& 0xF)) :=
let h₀ := hexDigit_isHexDigit (@UInt8.and_lt_add_one b 0xF (by decide))
isHexDigit_isEncodedChar h₀
exact IsAllowedEncodedChars.push (IsAllowedEncodedChars.push (IsAllowedEncodedChars.push s.valid h1) h2) h3
ba, valid
/--
Encodes a raw string into an `EncodedString` with automatic proof construction. Unreserved characters
(alphanumeric, hyphen, period, underscore, tilde) are kept as-is, while all other characters are percent-encoded.
-/
def encode (s : String) : EncodedString r :=
s.toUTF8.foldl (init := EncodedString.empty) fun acc c =>
if h : isAsciiByte c r c then
acc.push c (by simp [isEncodedChar]; exact And.intro h.left (Or.inl h.right))
else
byteToHex c acc
/--
Attempts to create an `EncodedString` from a `ByteArray`. Returns `some` if the byte array contains only
valid encoded characters and all percent signs are followed by exactly two hex digits, `none` otherwise.
-/
def ofByteArray? (ba : ByteArray) : Option (EncodedString r) :=
if h : IsAllowedEncodedChars r ba then
if isValidPercentEncoding ba then some ba, h else none
else none
/--
Creates an `EncodedString` from a `ByteArray`, panicking if the byte array is invalid.
-/
def ofByteArray! (ba : ByteArray) : EncodedString r :=
match ofByteArray? ba with
| some es => es
| none => panic! "invalid encoded string"
/--
Creates an `EncodedString` from a `String` by checking if it's already a valid percent-encoded string.
Returns `some` if valid, `none` otherwise.
-/
def ofString? (s : String) : Option (EncodedString r) :=
ofByteArray? s.toUTF8
/--
Creates an `EncodedString` from a `String`, panicking if the string is not a valid percent-encoded string.
-/
def ofString! (s : String) : EncodedString r :=
ofByteArray! s.toUTF8
/--
Creates an `EncodedString` from a `ByteArray` with compile-time proofs.
Use this when you have proofs that the byte array is valid.
-/
def new (ba : ByteArray) (valid : IsAllowedEncodedChars r ba) (_validEncoding : isValidPercentEncoding ba) : EncodedString r :=
ba, valid
instance : ToString (EncodedString r) where
toString es := es.toByteArray, isValidUTF8_of_isAsciiByte es.toByteArray (all_of_all_of_imp es.valid (fun c h => by simp [isEncodedChar] at h; exact h.left))
/--
Decodes an `EncodedString` back to a regular `String`. Converts percent-encoded sequences (e.g., "%20")
back to their original characters. Returns `none` if the decoded bytes are not valid UTF-8.
-/
def decode (es : EncodedString r) : Option String := Id.run do
let mut decoded : ByteArray := ByteArray.empty
let rawBytes := es.toByteArray
let len := rawBytes.size
let mut i := 0
let percent := '%'.toNat.toUInt8
while h : i < len do
let c := rawBytes[i]
(decoded, i) := if h₁ : c == percent i + 1 < len then
let h1 := rawBytes[i + 1]
if let some hd1 := hexDigitToUInt8? h1 then
if h₂ : i + 2 < len then
let h2 := rawBytes[i + 2]
if let some hd2 := hexDigitToUInt8? h2 then
(decoded.push (hd1 * 16 + hd2), i + 3)
else
(((decoded.push c).push h1).push h2, i + 3)
else
((decoded.push c).push h1, i + 2)
else
((decoded.push c).push h1, i + 2)
else
(decoded.push c, i + 1)
return String.fromUTF8? decoded
instance : Repr (EncodedString r) where
reprPrec es n := reprPrec (toString es) n
instance : BEq (EncodedString r) where
beq x y := x.toByteArray = y.toByteArray
instance : Hashable (EncodedString r) where
hash x := Hashable.hash x.toByteArray
end EncodedString
/--
A percent-encoded query string component with a compile-time proof that it contains only valid encoded
query characters. Extends `EncodedString` to support the '+' character for spaces, following the
application/x-www-form-urlencoded format.
This type is specifically designed for encoding query parameters where spaces can be represented as '+'
instead of "%20".
-/
structure EncodedQueryString (r : UInt8 Bool) where
private mk ::
/--
The underlying byte array containing the percent-encoded query data.
-/
toByteArray : ByteArray
/--
Proof that all characters in the byte array are valid encoded query characters.
-/
valid : IsAllowedEncodedQueryChars r toByteArray
namespace EncodedQueryString
/--
Creates an empty encoded query string.
-/
def empty : EncodedQueryString r :=
.empty, by simp; intro a h; contradiction
instance : Inhabited (EncodedQueryString r) where
default := EncodedQueryString.empty
/--
Appends a single encoded query character to an encoded query string.
-/
private def push (s : EncodedQueryString r) (c : UInt8) (h : isEncodedQueryChar r c) : EncodedQueryString r :=
s.toByteArray.push c, IsAllowedEncodedQueryChars.push s.valid h
/--
Attempts to create an `EncodedQueryString` from a `ByteArray`. Returns `some` if the byte array contains
only valid encoded query characters and all percent signs are followed by exactly two hex digits, `none` otherwise.
-/
def ofByteArray? (ba : ByteArray) (r : UInt8 Bool := isQueryChar) : Option (EncodedQueryString r) :=
if h : IsAllowedEncodedQueryChars r ba then
if isValidPercentEncoding ba then some ba, h else none
else none
/--
Creates an `EncodedQueryString` from a `ByteArray`, panicking if the byte array is invalid.
-/
def ofByteArray! (ba : ByteArray) (r : UInt8 Bool := isQueryChar) : EncodedQueryString r :=
match ofByteArray? ba r with
| some es => es
| none => panic! "invalid encoded query string"
/--
Creates an `EncodedQueryString` from a `String` by checking if it's already a valid percent-encoded string.
Returns `some` if valid, `none` otherwise.
-/
def ofString? (s : String) (r : UInt8 Bool := isQueryChar) : Option (EncodedQueryString r) :=
ofByteArray? s.toUTF8 r
/--
Creates an `EncodedQueryString` from a `String`, panicking if the string is not a valid percent-encoded string.
-/
def ofString! (s : String) (r : UInt8 Bool := isQueryChar) : EncodedQueryString r :=
ofByteArray! s.toUTF8 r
/--
Creates an `EncodedQueryString` from a `ByteArray` with compile-time proofs.
Use this when you have proofs that the byte array is valid.
-/
def new (ba : ByteArray) (valid : IsAllowedEncodedQueryChars r ba) (_validEncoding : isValidPercentEncoding ba) : EncodedQueryString r :=
ba, valid
/--
Converts a byte to its percent-encoded hexadecimal representation (%XX). For example, a space character
(0x20) becomes "%20".
-/
private def byteToHex (b : UInt8) (s : EncodedQueryString r) : EncodedQueryString r :=
let ba := s.toByteArray.push '%'.toUInt8
|>.push (hexDigit (b >>> 4))
|>.push (hexDigit (b &&& 0xF))
let valid := by
have h1 : isEncodedQueryChar r '%'.toUInt8 := by
simp [isEncodedQueryChar, isEncodedChar]; decide
have h2 : isEncodedQueryChar r (hexDigit (b >>> 4)) :=
isHexDigit_isEncodedQueryChar (hexDigit_isHexDigit (BitVec.toNat_ushiftRight_lt b.toBitVec 4 (by decide)))
have h3 : isEncodedQueryChar r (hexDigit (b &&& 0xF)) :=
isHexDigit_isEncodedQueryChar (hexDigit_isHexDigit (@UInt8.and_lt_add_one b 0xF (by decide)))
exact IsAllowedEncodedQueryChars.push (IsAllowedEncodedQueryChars.push (IsAllowedEncodedQueryChars.push s.valid h1) h2) h3
ba, valid
/--
Encodes a raw string into an `EncodedQueryString` with automatic proof construction. Unreserved characters
are kept as-is, spaces are encoded as '+', and all other characters are percent-encoded.
-/
def encode (s : String) (r : UInt8 Bool := isQueryChar) : EncodedQueryString r :=
s.toUTF8.foldl (init := EncodedQueryString.empty) fun acc c =>
if h : isAsciiByte c r c then
acc.push c (by simp [isEncodedQueryChar, isEncodedChar]; exact Or.inl (And.intro h.left (Or.inl h.right)))
else if _ : c = ' '.toUInt8 then
acc.push '+'.toUInt8 (by simp [isEncodedQueryChar])
else
byteToHex c acc
/--
Converts an `EncodedQueryString` to a `String`, given a proof that all characters satisfying `r` are ASCII.
-/
def toString (es : EncodedQueryString r) : String :=
es.toByteArray, isValidUTF8_of_isAsciiByte es.toByteArray (all_of_all_of_imp es.valid (fun c h => isEncodedQueryChar_isAscii c h))
/--
Decodes an `EncodedQueryString` back to a regular `String`. Converts percent-encoded sequences and '+'
signs back to their original characters. Returns `none` if the decoded bytes are not valid UTF-8.
This is almost the same code from `System.Uri.UriEscape.decodeUri`, but with `Option` instead.
-/
def decode (es : EncodedQueryString r) : Option String := Id.run do
let mut decoded : ByteArray := ByteArray.empty
let rawBytes := es.toByteArray
let len := rawBytes.size
let mut i := 0
let percent := '%'.toNat.toUInt8
let plus := '+'.toNat.toUInt8
while h : i < len do
let c := rawBytes[i]
(decoded, i) := if c == plus then
(decoded.push ' '.toNat.toUInt8, i + 1)
else if h₁ : c == percent i + 1 < len then
let h1 := rawBytes[i + 1]
if let some hd1 := hexDigitToUInt8? h1 then
if h₂ : i + 2 < len then
let h2 := rawBytes[i + 2]
if let some hd2 := hexDigitToUInt8? h2 then
(decoded.push (hd1 * 16 + hd2), i + 3)
else
(((decoded.push c).push h1).push h2, i + 3)
else
((decoded.push c).push h1, i + 2)
else
((decoded.push c).push h1, i + 2)
else
(decoded.push c, i + 1)
return String.fromUTF8? decoded
end EncodedQueryString
instance : ToString (EncodedQueryString r) where
toString := EncodedQueryString.toString
instance : Repr (EncodedQueryString r) where
reprPrec es n := reprPrec (toString es) n
instance : BEq (EncodedQueryString r) where
beq x y := x.toByteArray = y.toByteArray
instance : Hashable (EncodedQueryString r) where
hash x := Hashable.hash x.toByteArray
instance : Hashable (Option (EncodedQueryString r)) where
hash
| some x => Hashable.hash ((ByteArray.mk #[1] ++ x.toByteArray))
| none => Hashable.hash (ByteArray.mk #[0])
/--
A percent-encoded URI path segment. Valid characters are `pchar` (unreserved, sub-delims, ':', '@').
-/
abbrev EncodedSegment := EncodedString isPChar
namespace EncodedSegment
/--
Encodes a raw string into an encoded path segment.
-/
def encode (s : String) : EncodedSegment :=
EncodedString.encode (r := isPChar) s
/--
Attempts to create an encoded path segment from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedSegment :=
EncodedString.ofByteArray? (r := isPChar) ba
/--
Creates an encoded path segment from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedSegment :=
EncodedString.ofByteArray! (r := isPChar) ba
/--
Decodes an encoded path segment back to a UTF-8 string.
-/
def decode (segment : EncodedSegment) : Option String :=
EncodedString.decode segment
end EncodedSegment
/--
A percent-encoded URI fragment component. Valid characters are `pchar / "/" / "?"`.
-/
abbrev EncodedFragment := EncodedString isFragmentChar
namespace EncodedFragment
/--
Encodes a raw string into an encoded fragment component.
-/
def encode (s : String) : EncodedFragment :=
EncodedString.encode (r := isFragmentChar) s
/--
Attempts to create an encoded fragment component from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedFragment :=
EncodedString.ofByteArray? (r := isFragmentChar) ba
/--
Creates an encoded fragment component from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedFragment :=
EncodedString.ofByteArray! (r := isFragmentChar) ba
/--
Decodes an encoded fragment component back to a UTF-8 string.
-/
def decode (fragment : EncodedFragment) : Option String :=
EncodedString.decode fragment
end EncodedFragment
/--
A percent-encoded URI userinfo component. Valid characters are `unreserved / sub-delims / ":"`.
-/
abbrev EncodedUserInfo := EncodedString isUserInfoChar
namespace EncodedUserInfo
/--
Encodes a raw string into an encoded userinfo component.
-/
def encode (s : String) : EncodedUserInfo :=
EncodedString.encode (r := isUserInfoChar) s
/--
Attempts to create an encoded userinfo component from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedUserInfo :=
EncodedString.ofByteArray? (r := isUserInfoChar) ba
/--
Creates an encoded userinfo component from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedUserInfo :=
EncodedString.ofByteArray! (r := isUserInfoChar) ba
/--
Decodes an encoded userinfo component back to a UTF-8 string.
-/
def decode (userInfo : EncodedUserInfo) : Option String :=
EncodedString.decode userInfo
end EncodedUserInfo
/--
A percent-encoded URI query parameter. Valid characters are `pchar / "/" / "?"` with '+' for spaces.
-/
abbrev EncodedQueryParam := EncodedQueryString isQueryDataChar
namespace EncodedQueryParam
/--
Encodes a raw string into an encoded query parameter.
-/
def encode (s : String) : EncodedQueryParam :=
EncodedQueryString.encode (r := isQueryDataChar) s
/--
Attempts to create an encoded query parameter from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedQueryParam :=
EncodedQueryString.ofByteArray? (r := isQueryDataChar) ba
/--
Creates an encoded query parameter from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedQueryParam :=
EncodedQueryString.ofByteArray! (r := isQueryDataChar) ba
/--
Attempts to create an encoded query parameter from an encoded string.
-/
def fromString? (s : String) : Option EncodedQueryParam :=
EncodedQueryString.ofString? (r := isQueryDataChar) s
/--
Decodes an encoded query parameter back to a UTF-8 string.
-/
def decode (param : EncodedQueryParam) : Option String :=
EncodedQueryString.decode param
end EncodedQueryParam
end Std.Http.URI

View File

@@ -0,0 +1,432 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.While
public import Init.Data.String
public import Std.Internal.Parsec
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Data.URI.Basic
public import Std.Internal.Http.Data.URI.Config
public section
/-!
# URI Parser
This module provides parsers for HTTP request targets and HTTP-oriented URIs aligned with RFC 3986.
It handles parsing of schemes, authorities, paths, queries, and fragments.
Notable intentional constraints:
* hosts are limited to IPv4, bracketed IPv6, and DNS-style domain names
* IPvFuture (`v...`) inside `IP-literal` is currently rejected
References:
* https://www.rfc-editor.org/rfc/rfc3986.html
* https://www.rfc-editor.org/rfc/rfc9110.html#name-uri-references
* https://www.rfc-editor.org/rfc/rfc9112.html#section-3.3
-/
namespace Std.Http.URI.Parser
set_option linter.all true
open Internal Char
open Std Internal Parsec ByteArray
@[inline]
private def tryOpt (p : Parser α) : Parser (Option α) :=
optional (attempt p)
@[inline]
private def peekIs (p : UInt8 Bool) : Parser Bool := do
return ( peekWhen? p).isSome
-- scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
private def parseScheme (config : URI.Config) : Parser URI.Scheme := do
if config.maxSchemeLength = 0 then
fail "scheme length limit is 0 (no scheme allowed)"
let first : UInt8 satisfy (fun b : UInt8 => Internal.Char.isAlphaByte b)
let rest takeWhileAtMost
(fun c =>
isAlphaNum c
c = '+'.toUInt8 c = '-'.toUInt8 c = '.'.toUInt8)
(config.maxSchemeLength - 1)
let schemeBytes := ByteArray.empty.push first ++ rest.toByteArray
let str := String.fromUTF8! schemeBytes |>.toLower
if h : URI.IsValidScheme str then
return str, h
else
fail "invalid scheme"
-- port = 1*DIGIT
private def parsePortNumber : Parser UInt16 := do
let portBytes takeWhileAtMost isDigitByte 5
let portStr := String.fromUTF8! portBytes.toByteArray
let some portNum := String.toNat? portStr
| fail s!"invalid port number: {portStr}"
if portNum > 65535 then
fail s!"port number too large: {portNum}"
return portNum.toUInt16
-- userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
private def parseUserInfo (config : URI.Config) : Parser URI.UserInfo := do
let userBytesName takeWhileAtMost
(fun x =>
x ':'.toUInt8
(isUserInfoChar x x = '%'.toUInt8))
config.maxUserInfoLength
let some userNameEncoded := URI.EncodedUserInfo.ofByteArray? userBytesName.toByteArray
| fail "invalid percent encoding in user info"
let userPassEncoded if peekIs (· == ':'.toUInt8) then
skip
let userBytesPass takeWhileAtMost
(fun x => isUserInfoChar x x = '%'.toUInt8)
config.maxUserInfoLength
let some userPassEncoded := URI.EncodedUserInfo.ofByteArray? userBytesPass.toByteArray
| fail "invalid percent encoding in user info"
pure <| some userPassEncoded
else
pure none
return userNameEncoded, userPassEncoded
-- Parses bracketed IPv6 literals.
-- Note: RFC 3986 also allows IPvFuture inside `IP-literal`; this parser
-- currently rejects IPvFuture.
private def parseIPv6 : Parser Net.IPv6Addr := do
skipByte '['.toUInt8
let result takeWhile1AtMost
(fun x => x = ':'.toUInt8 x = '.'.toUInt8 isHexDigitByte x)
256
skipByte ']'.toUInt8
let ipv6Str := String.fromUTF8! result.toByteArray
let some ipv6Addr := Std.Net.IPv6Addr.ofString ipv6Str
| fail s!"invalid IPv6 address: {ipv6Str}"
return ipv6Addr
-- IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
private def parseIPv4 : Parser Net.IPv4Addr := do
let result takeWhile1AtMost
(fun x => x = '.'.toUInt8 isDigitByte x)
256
let ipv4Str := String.fromUTF8! result.toByteArray
let some ipv4Addr := Std.Net.IPv4Addr.ofString ipv4Str
| fail s!"invalid IPv4 address: {ipv4Str}"
return ipv4Addr
-- host = IP-literal / IPv4address / reg-name
-- Note: RFC 1123 allows domain labels to start with digits, so we must try IPv4
-- first and fall back to reg-name parsing if it fails.
private def parseHost (config : URI.Config) : Parser URI.Host := do
if ( peekWhen? (· == '['.toUInt8)).isSome then
return .ipv6 ( parseIPv6)
else
if ( peekWhen? isDigitByte).isSome then
if let some ipv4 tryOpt parseIPv4 then
return .ipv4 ipv4
-- It needs to be a legal DNS label, so it differs from reg-name.
let some str := String.fromUTF8? ( takeWhile1AtMost
(fun x => isAlphaNum x x = '-'.toUInt8 x = '.'.toUInt8)
config.maxHostLength).toByteArray
| fail s!"invalid host"
let lower := str.toLower
if h : URI.IsValidDomainName lower ¬lower.isEmpty then
return .name lower, .isLowerCase_toLower, h
else
fail s!"invalid domain name: {str}"
-- authority = [ userinfo "@" ] host [ ":" port ]
private def parseAuthority (config : URI.Config) : Parser URI.Authority := do
let userInfo tryOpt do
let ui parseUserInfo config
skipByte '@'.toUInt8
return ui
let host parseHost config
let port : URI.Port
if peekIs (· == ':'.toUInt8) then
skipByte ':'.toUInt8
if ( peekWhen? isDigitByte).isSome then
pure (.value ( parsePortNumber))
else
let next peek?
if next.isNone || next.any (fun c => c = '/'.toUInt8 c = '?'.toUInt8 c = '#'.toUInt8) then
pure .empty
else
fail "invalid port number"
else
pure .omitted
return { userInfo, host, port }
-- segment = *pchar
private def parseSegment (config : URI.Config) : Parser ByteSlice := do
takeWhileAtMost (fun c => isPChar c c = '%'.toUInt8) config.maxSegmentLength
/-
path = path-abempty ; begins with "/" or is empty
/ path-absolute ; begins with "/" but not "//"
/ path-noscheme ; begins with a non-colon segment
/ path-rootless ; begins with a segment
/ path-empty ; zero characters
path-abempty = *( "/" segment )
path-absolute = "/" [ segment-nz *( "/" segment ) ]
path-noscheme = segment-nz-nc *( "/" segment )
path-rootless = segment-nz *( "/" segment )
path-empty = 0<pchar>
-/
/--
Parses a URI path with combined parsing and validation.
-/
def parsePath (config : URI.Config) (forceAbsolute : Bool) (allowEmpty : Bool) : Parser URI.Path := do
let isPathDelimiter : UInt8 Bool := fun c => c = '?'.toUInt8 c = '#'.toUInt8
let mut isAbsolute := false
let mut segments : Array _ := #[]
let mut totalLength := 0
let isSegmentOrSlash
peekIs (fun c => isPChar c c = '%'.toUInt8 c = '/'.toUInt8)
if ¬allowEmpty (( isEof) ¬isSegmentOrSlash) then
fail "need a path"
-- Check if path is absolute
if peekIs (· == '/'.toUInt8) then
isAbsolute := true
totalLength := totalLength + 1
skip
else if forceAbsolute then
if allowEmpty (( isEof) ¬isSegmentOrSlash) then
return { segments := segments, absolute := isAbsolute }
else
fail "require '/' in path"
else
pure ()
-- Parse segments
while ( peek?).isSome do
let some next := ( peek?) | break
if isPathDelimiter next then
break
if ¬(next = '/'.toUInt8 isPChar next next = '%'.toUInt8) then
break
if segments.size >= config.maxPathSegments then
fail s!"too many path segments (limit: {config.maxPathSegments})"
let segmentBytes parseSegment config
let some segmentStr := URI.EncodedSegment.ofByteArray? segmentBytes.toByteArray
| fail "invalid percent encoding in path segment"
totalLength := totalLength + segmentBytes.size
if totalLength > config.maxTotalPathLength then
fail s!"path too long (limit: {config.maxTotalPathLength} bytes)"
segments := segments.push segmentStr
if ( peek?).any (· == '/'.toUInt8) then
totalLength := totalLength + 1
if totalLength > config.maxTotalPathLength then
fail s!"path too long (limit: {config.maxTotalPathLength} bytes)"
skip
-- If path ends with '/', add empty segment
let next peek?
if next.isNone || next.any isPathDelimiter then
if segments.size >= config.maxPathSegments then
fail s!"too many path segments (limit: {config.maxPathSegments})"
segments := segments.push (URI.EncodedString.empty)
else
break
return { segments := segments, absolute := isAbsolute }
-- query = *( pchar / "/" / "?" )
private def parseQuery (config : URI.Config) : Parser URI.Query := do
let queryBytes
takeWhileAtMost (fun c => isQueryChar c c = '%'.toUInt8) config.maxQueryLength
let some queryStr := String.fromUTF8? queryBytes.toByteArray
| fail "invalid query string"
if queryStr.isEmpty then
return URI.Query.empty
let rawPairs := queryStr.splitOn "&"
if rawPairs.length > config.maxQueryParams then
fail s!"too many query parameters (limit: {config.maxQueryParams})"
let pairs : Option URI.Query := rawPairs.foldlM (init := URI.Query.empty) fun acc pair => do
match pair.splitOn "=" with
| [key] =>
let key URI.EncodedQueryParam.fromString? key
pure (acc.insertEncoded key none)
| key :: value =>
let key URI.EncodedQueryParam.fromString? key
let value URI.EncodedQueryParam.fromString? (String.intercalate "=" value)
pure (acc.insertEncoded key (some value))
| [] => pure acc -- unreachable: splitOn always returns at least one element
if let some pairs := pairs then
return pairs
else
fail "invalid query string"
-- fragment = *( pchar / "/" / "?" )
private def parseFragment (config : URI.Config) : Parser URI.EncodedFragment := do
let fragmentBytes
takeWhileAtMost (fun c => isFragmentChar c c = '%'.toUInt8) config.maxFragmentLength
let some fragmentStr := URI.EncodedFragment.ofByteArray? fragmentBytes.toByteArray
| fail "invalid percent encoding in fragment"
return fragmentStr
private def parseHierPart (config : URI.Config) : Parser (Option URI.Authority × URI.Path) := do
-- Check for "//" authority path-abempty
if ( tryOpt (skipString "//")).isSome then
let authority parseAuthority config
let path parsePath config true true -- path-abempty (must start with "/" or be empty)
return (some authority, path)
else
-- path-absolute / path-rootless / path-empty
let path parsePath config false true
return (none, path)
/--
Parses a URI (Uniform Resource Identifier).
URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
hier-part = "//" authority path-abempty / path-absolute / path-rootless / path-empty
-/
public def parseURI (config : URI.Config := {}) : Parser URI := do
let scheme parseScheme config
skipByte ':'.toUInt8
let (authority, path) parseHierPart config
let query optional (skipByteChar '?' *> parseQuery config)
let query := query.getD .empty
let fragment optional do
let some result := ( (skipByteChar '#' *> parseFragment config)) |>.decode
| fail "invalid fragment parse encoding"
return result
return { scheme, authority, path, query, fragment }
/--
Parses a request target with combined parsing and validation.
-/
public def parseRequestTarget (config : URI.Config := {}) : Parser RequestTarget :=
asterisk <|> origin <|> absoluteHttp <|> authority <|> absolute
where
-- The asterisk form
asterisk : Parser RequestTarget := do
skipByte '*'.toUInt8
return .asteriskForm
-- origin-form = absolute-path [ "?" query ]
-- absolute-path = 1*( "/" segment )
origin : Parser RequestTarget := attempt do
if peekIs (· == '/'.toUInt8) then
let path parsePath config true true
let query optional (skipByte '?'.toUInt8 *> parseQuery config)
return .originForm { path, query }
else
fail "not origin"
absoluteFromScheme (scheme : URI.Scheme) : Parser RequestTarget := do
skipByte ':'.toUInt8
let (auth, path) parseHierPart config
let query optional (skipByteChar '?' *> parseQuery config)
let query := query.getD URI.Query.empty
return .absoluteForm { path, scheme, authority := auth, query }
-- Prefer absolute-form for explicit HTTP(S) scheme targets.
-- This avoids misclassifying full URIs like `http://host/path` as authority-form.
absoluteHttp : Parser RequestTarget := attempt do
let scheme parseScheme config
skipByte ':'.toUInt8
let (authority, path) parseHierPart config
let query optional (skipByteChar '?' *> parseQuery config)
let query := query.getD .empty
let uri parseURI config
let schemeStr : String := uri.scheme
if schemeStr = "http" || schemeStr = "https" then
-- Strip any fragment: HTTP request targets do not carry fragments.
return .absoluteForm { scheme, path, authority, query }
else
fail "not http absolute uri"
-- absolute-URI = scheme ":" hier-part [ "?" query ]
absolute : Parser RequestTarget := attempt do
let scheme parseScheme config
absoluteFromScheme scheme
-- authority-form = host ":" port
authority : Parser RequestTarget := attempt do
let host parseHost config
skipByteChar ':'
let port parsePortNumber
return .authorityForm { host, port := .value port }
/--
Parses an HTTP `Host` header value.
-/
public def parseHostHeader (config : URI.Config := {}) : Parser (URI.Host × URI.Port) := do
let host parseHost config
let port : URI.Port
if peekIs (· == ':'.toUInt8) then
skipByte ':'.toUInt8
if ( peekWhen? isDigitByte).isSome then
pure (.value ( parsePortNumber))
else
let next peek?
if next.isNone then
pure .empty
else
fail "invalid host header port"
else
pure .omitted
if ¬( isEof) then
fail "invalid host header"
return (host, port)
end Std.Http.URI.Parser

View File

@@ -6,7 +6,10 @@ Authors: Sofia Rodrigues
module
prelude
public import Std.Internal.Http.Internal.Char
public import Std.Internal.Http.Internal.ChunkedBuffer
public import Std.Internal.Http.Internal.LowerCase
public import Std.Internal.Http.Internal.MultiMap
public import Std.Internal.Http.Internal.Encode
public import Std.Internal.Http.Internal.String
public import Std.Internal.Http.Internal.Char

View File

@@ -299,4 +299,15 @@ that provides it.
def isUserInfoChar (c : UInt8) : Bool :=
isUnreserved c || isSubDelims c || c = ':'.toUInt8
/--
Checks if a byte is a valid character in a URI query component,
excluding the typical key/value separators `&` and `=`.
Inspired by `query = *( pchar / "/" / "?" )` from RFC 3986,
but disallows `&` and `=` so they can be treated as structural separators.
-/
@[inline, expose]
def isQueryDataChar (c : UInt8) : Bool :=
isQueryChar c && c ≠ '&'.toUInt8 && c ≠ '='.toUInt8
end Std.Http.Internal.Char

View File

@@ -0,0 +1,68 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Grind
import Init.Data.Int.OfNat
import Init.Data.UInt.Lemmas
public import Init.Data.String
@[expose]
public section
/-!
# LowerCase
This module provides predicates and normalization functions for handling ASCII case-insensitivity.
It includes proofs of idempotency for lowercase transformations, as well as utilities for validating
the lowercase state of a String.
-/
namespace Std.Http.Internal
set_option linter.all true
/--
Predicate asserting that a string is in lowercase form.
-/
@[expose] def IsLowerCase (s : String) : Prop :=
s.toLower = s
private theorem Char.toLower_eq_self_iff {c : Char} : c.toLower = c c.isUpper = false := by
simp only [Char.toLower, Char.isUpper]
split <;> rename_i h <;> simpa [UInt32.le_iff_toNat_le, Char.ext_iff] using h
private theorem String.toLower_eq_self_iff {s : String} : s.toLower = s s.toList.any Char.isUpper = false := by
simp only [String.toLower, String.toList_inj, String.toList_map]
rw (occs := [2]) [ List.map_id s.toList]
rw [List.map_eq_map_iff]
simp [Char.toLower_eq_self_iff]
instance : Decidable (IsLowerCase s) :=
decidable_of_decidable_of_iff (p := s.toList.any Char.isUpper = false)
(by exact String.toLower_eq_self_iff.symm)
namespace IsLowerCase
private theorem Char.toLower_idempotent (c : Char) : c.toLower.toLower = c.toLower := by
grind [Char.toLower]
/--
Proof that applying `toLower` to any string results in a string that satisfies the `IsLowerCase`
predicate.
-/
theorem isLowerCase_toLower {s : String} : IsLowerCase s.toLower := by
unfold IsLowerCase String.toLower
rw [String.map_map, Function.comp_def]
simp [Char.toLower_idempotent]
theorem isLowerCase_empty : IsLowerCase "" := by
simp [IsLowerCase, String.toLower]
end IsLowerCase
end Std.Http.Internal

View File

@@ -0,0 +1,282 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Grind
public import Init.Data.Int.OfNat
public import Std.Data.HashMap
public section
/-!
# MultiMap
This module defines a generic `MultiMap` type that maps keys to multiple values.
The implementation stores entries in a flat array for iteration and an index HashMap
for fast key lookups. Each key always has at least one associated value.
-/
namespace Std
open Std Internal
set_option linter.all true
/--
A structure for managing ordered key-value pairs where each key can have multiple values.
-/
structure MultiMap (α : Type u) (β : Type v) [BEq α] [Hashable α] where
/--
Flat array of all key-value entries in insertion order.
-/
entries : Array (α × β)
/--
Maps each key to its indices in `entries`. Each array is non-empty.
-/
indexes : HashMap α (Array Nat)
/--
Invariant: every key in `indexes` maps to a non-empty array of valid indices into `entries`.
-/
validity : k : α, (p : k indexes)
let idx := (indexes.get k p);
idx.size > 0 ( i idx, i < entries.size)
deriving Repr
instance [BEq α] [Hashable α] [Inhabited α] [Inhabited β] : Inhabited (MultiMap α β) where
default := #[], .emptyWithCapacity, by intro h p; simp at p
namespace MultiMap
variable {α : Type u} {β : Type v} [BEq α] [Hashable α]
instance : Membership α (MultiMap α β) where
mem map key := key map.indexes
instance (key : α) (map : MultiMap α β) : Decidable (key map) :=
inferInstanceAs (Decidable (key map.indexes))
/--
Retrieves all values for the given key.
-/
@[inline]
def getAll (map : MultiMap α β) (key : α) (h : key map) : Array β :=
let entries := map.indexes.get key h |>.mapFinIdx fun idx _ h₁ =>
let proof := map.validity key h |>.right _ (Array.getElem_mem h₁)
map.entries[(map.indexes.get key h)[idx]]'proof |>.snd
entries
/--
Retrieves the first value for the given key.
-/
@[inline]
def get (map : MultiMap α β) (key : α) (h : key map) : β :=
let nonEmpty, isIn := map.validity key h
let entry := ((map.indexes.get key h)[0]'nonEmpty)
let proof := map.validity key h |>.right
entry
(by simp only [entry, HashMap.get_eq_getElem, Array.getElem_mem])
map.entries[entry]'proof |>.snd
/--
Retrieves all values for the given key, or `none` if the key is absent.
-/
@[inline]
def getAll? (map : MultiMap α β) (key : α) : Option (Array β) :=
if h : key map then
some (map.getAll key h)
else
none
/--
Retrieves the first value for the given key, or `none` if the key is absent.
-/
@[inline]
def get? (map : MultiMap α β) (key : α) : Option β :=
if h : key map then
some (map.get key h)
else
none
/--
Checks if the key-value pair is present in the map.
-/
@[inline]
def hasEntry (map : MultiMap α β) [BEq β] (key : α) (value : β) : Bool :=
map.getAll? key
|>.bind (fun arr => arr.find? (· == value))
|>.isSome
/--
Retrieves the last value for the given key.
Returns `none` if the key is absent.
-/
@[inline]
def getLast? (map : MultiMap α β) (key : α) : Option β :=
match map.getAll? key with
| none => none
| some idxs => idxs.back?
/--
Like `get?`, but returns a default value if absent.
-/
@[inline]
def getD (map : MultiMap α β) (key : α) (d : β) : β :=
map.get? key |>.getD d
/--
Like `get?`, but panics if absent.
-/
@[inline]
def get! [Inhabited β] (map : MultiMap α β) (key : α) : β :=
map.get? key |>.get!
/--
Inserts a new key-value pair into the map.
If the key already exists, appends the value to existing values.
-/
@[inline]
def insert [EquivBEq α] [LawfulHashable α] (map : MultiMap α β) (key : α) (value : β) : MultiMap α β :=
let i := map.entries.size
let entries := map.entries.push (key, value)
let f := fun
| some idxs => some (idxs.push i)
| none => some #[i]
let indexes := map.indexes.alter key f
{ entries, indexes, validity := ?_ }
where finally
have _ := map.validity
grind
/--
Inserts multiple values for a given key, appending to any existing values.
-/
@[inline]
def insertMany [EquivBEq α] [LawfulHashable α] (map : MultiMap α β) (key : α) (values : Array β) : MultiMap α β :=
values.foldl (insert · key) map
/--
Creates an empty multimap.
-/
def empty : MultiMap α β :=
#[], .emptyWithCapacity, by intro h p; simp at p
/--
Creates a multimap from a list of key-value pairs.
-/
def ofList [EquivBEq α] [LawfulHashable α] (pairs : List (α × β)) : MultiMap α β :=
pairs.foldl (fun acc (k, v) => acc.insert k v) empty
/--
Checks if a key exists in the map.
-/
@[inline]
def contains (map : MultiMap α β) (key : α) : Bool :=
map.indexes.contains key
/--
Updates all values associated with `key` by applying `f` to each one.
If the key is absent, returns the map unchanged.
-/
@[inline]
def update [EquivBEq α] [LawfulHashable α] (map : MultiMap α β) (key : α) (f : β β) : MultiMap α β :=
if key map then
map
else
map.entries.foldl (fun acc (k, v) => acc.insert k (if k == key then f v else v)) empty
/--
Replaces the last value associated with `key` with `value`.
If the key is absent, returns the map unchanged.
-/
@[inline]
def replaceLast (map : MultiMap α β) (key : α) (value : β) : MultiMap α β :=
if h : key map then
let idxs := map.indexes.get key h
let nonEmpty, isIn := map.validity key h
let lastPos : Fin idxs.size := idxs.size - 1, Nat.sub_lt nonEmpty (by omega)
let lastIdx : Nat := idxs[lastPos]
have lastIdxValid : lastIdx < map.entries.size := isIn lastIdx (Array.getElem_mem lastPos.isLt)
let entries := map.entries.set (Fin.mk lastIdx lastIdxValid) (key, value)
{ map with entries, validity := ?_ }
else
map
where finally
have _ := map.validity
grind
/--
Removes a key and all its values from the map. This function rebuilds the entire
`entries` array and `indexes` map from scratch by filtering out all pairs whose
key matches, then re-inserting the survivors.
-/
@[inline]
def erase [EquivBEq α] [LawfulHashable α] (map : MultiMap α β) (key : α) : MultiMap α β :=
if key map then
map
else
map.entries.filter (fun (k, _) => !(k == key))
|>.foldl (fun acc (k, v) => acc.insert k v) empty
/--
Gets the number of entries in the map.
-/
@[inline]
def size (map : MultiMap α β) : Nat :=
map.entries.size
/--
Checks if the map is empty.
-/
@[inline]
def isEmpty (map : MultiMap α β) : Bool :=
map.entries.isEmpty
/--
Converts the multimap to an array of key-value pairs (flattened).
-/
def toArray (map : MultiMap α β) : Array (α × β) :=
map.entries
/--
Converts the multimap to a list of key-value pairs (flattened).
-/
def toList (map : MultiMap α β) : List (α × β) :=
map.entries.toList
/--
Merges two multimaps, combining values for shared keys.
-/
def merge [EquivBEq α] [LawfulHashable α] (m1 m2 : MultiMap α β) : MultiMap α β :=
m2.entries.foldl (fun acc (k, v) => acc.insert k v) m1
instance : EmptyCollection (MultiMap α β) :=
MultiMap.empty
instance [EquivBEq α] [LawfulHashable α] : Singleton (α × β) (MultiMap α β) :=
fun a, b => ( : MultiMap α β).insert a b
instance [EquivBEq α] [LawfulHashable α] : Insert (α × β) (MultiMap α β) :=
fun a, b m => m.insert a b
instance [EquivBEq α] [LawfulHashable α] : Union (MultiMap α β) :=
merge
instance [Monad m] : ForIn m (MultiMap α β) (α × β) where
forIn map b f := forIn map.entries b f
end MultiMap
end Std

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,134 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public section
/-!
# HTTP/1.1 Configuration
This module defines the configuration options for HTTP/1.1 protocol processing,
including connection limits, header constraints, and various size limits.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Std Internal Parsec ByteArray
open Internal
/--
Connection limits and parser bounds configuration.
-/
structure Config where
/--
Maximum number of requests (server) or responses (client) per connection.
-/
maxMessages : Nat := 100
/--
Maximum number of headers allowed per message.
-/
maxHeaders : Nat := 100
/--
Maximum aggregate byte size of all header field lines in a single message
(name + value bytes plus 4 bytes per line for `: ` and `\r\n`). Default: 64 KiB.
-/
maxHeaderBytes : Nat := 65536
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The `Server` header value injected into outgoing responses (receiving mode) or the
`User-Agent` header value injected into outgoing requests (sending mode).
`none` suppresses the header entirely.
-/
agentName : Option Header.Value := none
/--
Maximum length of request URI (default: 8192 bytes).
-/
maxUriLength : Nat := 8192
/--
Maximum number of bytes consumed while parsing request/status start-lines (default: 8192 bytes).
-/
maxStartLineLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes).
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes).
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 16).
-/
maxSpaceSequence : Nat := 16
/--
Maximum number of leading empty lines (bare CRLF) to skip before a request-line
(RFC 9112 §2.2 robustness). Default: 8.
-/
maxLeadingEmptyLines : Nat := 8
/--
Maximum number of extensions on a single chunk-size line (default: 16).
-/
maxChunkExtensions : Nat := 16
/--
Maximum length of chunk extension name (default: 256 bytes).
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes).
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum number of bytes consumed while parsing one chunk-size line with extensions (default: 8192 bytes).
-/
maxChunkLineLength : Nat := 8192
/--
Maximum allowed chunk payload size in bytes (default: 8 MiB).
-/
maxChunkSize : Nat := 8 * 1024 * 1024
/--
Maximum allowed total body size per message in bytes (default: 64 MiB).
This limit applies across all body framing modes. For chunked transfer encoding,
chunk-size lines (including extensions) and the trailer section also count toward
this limit, so the total wire bytes consumed by the body cannot exceed this value.
-/
maxBodySize : Nat := 64 * 1024 * 1024
/--
Maximum length of reason phrase (default: 512 bytes).
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 20).
-/
maxTrailerHeaders : Nat := 20
end Std.Http.Protocol.H1

View File

@@ -0,0 +1,110 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public section
/-!
# HTTP/1.1 Errors
This module defines the error types for HTTP/1.1 protocol processing,
including parsing errors, timeout errors, and connection errors.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Specific HTTP processing errors with detailed information.
-/
inductive Error
/--
Malformed start line (request-line or status-line).
-/
| invalidStatusLine
/--
Invalid or malformed header.
-/
| invalidHeader
/--
Request timeout occurred.
-/
| timeout
/--
Request entity too large.
-/
| entityTooLarge
/--
Request URI is too long.
-/
| uriTooLong
/--
Unsupported HTTP version.
-/
| unsupportedVersion
/--
Invalid chunk encoding.
-/
| invalidChunk
/--
Connection closed.
-/
| connectionClosed
/--
Bad request or response message.
-/
| badMessage
/--
The number of header fields in the message exceeds the configured limit.
Maps to HTTP 431 Request Header Fields Too Large.
-/
| tooManyHeaders
/--
The aggregate byte size of all header fields exceeds the configured limit.
Maps to HTTP 431 Request Header Fields Too Large.
-/
| headersTooLarge
/--
Generic error with message.
-/
| other (message : String)
deriving Repr, BEq
instance : ToString Error where
toString
| .invalidStatusLine => "Invalid status line"
| .invalidHeader => "Invalid header"
| .timeout => "Timeout"
| .entityTooLarge => "Entity too large"
| .uriTooLong => "URI too long"
| .unsupportedVersion => "Unsupported version"
| .invalidChunk => "Invalid chunk"
| .connectionClosed => "Connection closed"
| .badMessage => "Bad message"
| .tooManyHeaders => "Too many headers"
| .headersTooLarge => "Headers too large"
| .other msg => s!"Other error: {msg}"

View File

@@ -0,0 +1,73 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Events
This module defines the events that can occur during HTTP/1.1 message processing,
including header completion and control/error signals.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Events emitted during HTTP message processing.
-/
inductive Event (dir : Direction)
/--
Indicates that all headers have been successfully parsed.
-/
| endHeaders (head : Message.Head dir)
/--
Signals that additional input data is required to continue processing.
-/
| needMoreData (size : Option Nat)
/--
Indicates a failure during parsing or processing.
-/
| failed (err : Error)
/--
Requests that the connection be closed.
-/
| close
/--
The body should be closed.
-/
| closeBody
/--
Indicates that a response is required.
-/
| needAnswer
/--
Indicates readiness to process the next message.
-/
| next
/--
Signals that an `Expect: 100-continue` decision is pending.
-/
| «continue»
deriving Inhabited, Repr

View File

@@ -0,0 +1,139 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.Array
public import Std.Internal.Http.Data
public section
/-!
# Message
This module provides types and operations for HTTP/1.1 messages, centered around the `Direction`
type which models the server's role in message exchange: `Direction.receiving` for parsing incoming
requests from clients, and `Direction.sending` for generating outgoing responses to clients.
The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or
`Response.Head` accordingly, enabling generic code that works uniformly across both phases
while exposing common operations such as headers, version, and `shouldKeepAlive`
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Direction of message flow from the server's perspective.
-/
inductive Direction
/--
Receiving and parsing incoming requests from clients.
-/
| receiving
/--
Client perspective: writing outgoing requests and reading incoming responses.
-/
| sending
deriving BEq
/--
Inverts the message direction.
-/
@[expose]
abbrev Direction.swap : Direction Direction
| .receiving => .sending
| .sending => .receiving
/--
Gets the message head type based on direction.
-/
@[expose]
def Message.Head : Direction Type
| .receiving => Request.Head
| .sending => Response.Head
/--
Gets the headers of a `Message`.
-/
def Message.Head.headers (m : Message.Head dir) : Headers :=
match dir with
| .receiving => Request.Head.headers m
| .sending => Response.Head.headers m
/--
Gets the version of a `Message`.
-/
def Message.Head.version (m : Message.Head dir) : Version :=
match dir with
| .receiving => Request.Head.version m
| .sending => Response.Head.version m
/--
Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag.
-/
def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length :=
let contentLength := message.headers.getAll? .contentLength
match message.headers.getAll? .transferEncoding with
| none =>
match contentLength with
| some #[cl] => .fixed <$> cl.value.toNat?
| some _ => none -- To avoid request smuggling with malformed/multiple content-length headers.
| none => if allowEOFBody then some (.fixed 0) else none
-- Single transfer-encoding header.
| some #[header] =>
let te := Header.TransferEncoding.parse header
match Header.TransferEncoding.isChunked <$> te, contentLength with
| some true, none =>
-- HTTP/1.0 does not define chunked transfer encoding (RFC 2068 §19.4.6).
-- A server MUST NOT use chunked with an HTTP/1.0 peer; likewise, an
-- HTTP/1.0 request carrying Transfer-Encoding: chunked is malformed.
if message.version == .v10 then none else some .chunked
| _, _ => none -- To avoid request smuggling when TE and CL are mixed.
-- We disallow multiple transfer-encoding headers.
| some _ => none
/--
Checks whether the message indicates that the connection should be kept alive.
-/
def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool :=
let tokens? : Option (Array String) :=
match message.headers.getAll? .connection with
| none => some #[]
| some values =>
values.foldl (fun acc raw => do
let acc acc
let parsed Header.Connection.parse raw
pure (acc ++ parsed.tokens)
) (some #[])
match tokens? with
| none =>false
| some tokens =>
if message.version == .v11 then
!tokens.any (· == "close")
else
tokens.any (· == "keep-alive")
instance : Repr (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Repr Request.Head)
| .sending => inferInstanceAs (Repr Response.Head)
instance : Internal.Encode .v11 (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Internal.Encode .v11 Request.Head)
| .sending => inferInstanceAs (Internal.Encode .v11 Response.Head)
instance : EmptyCollection (Message.Head dir) where
emptyCollection :=
match dir with
| .receiving => { method := .get, version := .v11 }
| .sending => {}

View File

@@ -0,0 +1,548 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Parsec
public import Std.Internal.Http.Data
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Protocol.H1.Config
/-!
This module defines parsers for HTTP/1.1 request and response lines, headers, and body framing. The
reference used is https://httpwg.org/specs/rfc9112.html.
-/
namespace Std.Http.Protocol.H1
open Std Internal Parsec ByteArray Internal Internal.Char
set_option linter.all true
/--
Checks if a byte may appear inside a field value.
This parser enforces strict ASCII-only field values and allows only `field-content`
(`HTAB / SP / VCHAR`).
-/
@[inline]
def isFieldVChar (c : UInt8) : Bool :=
fieldContent (Char.ofUInt8 c)
/--
Checks if a byte may appear unescaped inside a quoted-string value.
Allows `HTAB / SP / %x21 / %x23-5B / %x5D-7E` (strict ASCII-only; no obs-text).
-/
@[inline]
def isQdText (c : UInt8) : Bool :=
qdtext (Char.ofUInt8 c)
/--
Checks if a byte is optional whitespace (`OWS = SP / HTAB`, RFC 9110 §5.6.3).
-/
@[inline]
def isOwsByte (c : UInt8) : Bool :=
ows (Char.ofUInt8 c)
-- Parser blocks
/--
Repeatedly applies `parser` until it returns `none` or the `maxCount` limit is
exceeded. Returns the collected results as an array.
-/
partial def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do
let rec go (acc : Array α) : Parser (Array α) := do
let step optional <| attempt do
match parser with
| none => fail "end of items"
| some x => return x
match step with
| none =>
return acc
| some x =>
let acc := acc.push x
if acc.size > maxCount then
fail s!"too many items: {acc.size} > {maxCount}"
go acc
go #[]
/--
Lifts an `Option` into the parser monad, failing with a generic message if the value is `none`.
-/
def liftOption (x : Option α) : Parser α :=
if let some res := x then
return res
else
fail "expected value but got none"
/--
Parses an HTTP token (RFC 9110 §5.6.2): one or more token characters, up to `limit` bytes.
Fails if the input starts with a non-token character or is empty.
-/
@[inline]
def parseToken (limit : Nat) : Parser ByteSlice :=
takeWhileUpTo1 (fun c => tchar (Char.ofUInt8 c)) limit
/--
Parses a line terminator.
-/
@[inline]
def crlf : Parser Unit := do
skipBytes "\r\n".toUTF8
/--
Consumes and ignores empty lines (`CRLF`) that appear before a request-line.
https://httpwg.org/specs/rfc9112.html#rfc.section.2.2:
"In the interest of robustness, a server that is expecting to receive and parse a request-line SHOULD
ignore at least one empty line (CRLF) received prior to the request-line."
-/
def skipLeadingRequestEmptyLines (limits : H1.Config) : Parser Unit := do
let mut count := 0
while ( peekWhen? (· == '\r'.toUInt8)).isSome do
if count >= limits.maxLeadingEmptyLines then
fail "too many leading empty lines"
crlf
count := count + 1
/--
Parses a single space (SP, 0x20).
-/
@[inline]
def sp : Parser Unit :=
skipByte ' '.toUInt8
/--
Parses optional whitespace (OWS = *(SP / HTAB), RFC 9110 §5.6.3), bounded by
`limits.maxSpaceSequence`. Fails if more whitespace follows the limit, so oversized
padding is rejected rather than silently truncated.
-/
@[inline]
def ows (limits : H1.Config) : Parser Unit := do
discard <| takeWhileUpTo isOwsByte limits.maxSpaceSequence
if ( peekWhen? isOwsByte) |>.isSome then
fail "invalid space sequence"
else
pure ()
/--
Parses a single ASCII hex digit and returns its numeric value (`0``15`).
-/
def hexDigit : Parser UInt8 := do
let b any
if isHexDigitByte b then
if b '0'.toUInt8 && b '9'.toUInt8 then return b - '0'.toUInt8
else if b 'A'.toUInt8 && b 'F'.toUInt8 then return b - 'A'.toUInt8 + 10
else return b - 'a'.toUInt8 + 10
else fail s!"invalid hex digit {Char.ofUInt8 b |>.quote}"
/--
Parses a hexadecimal integer (one or more hex digits, up to 16 digits).
Used for chunk-size lines in chunked transfer encoding.
-/
partial def hex : Parser Nat := do
let rec go (acc : Nat) (count : Nat) : Parser Nat := do
match optional (attempt hexDigit) with
| some d =>
if count + 1 > 16 then
fail "chunk size too large"
else
go (acc * 16 + d.toNat) (count + 1)
| none =>
if count = 0 then
-- Preserve EOF as incremental chunk-size parsing can request more data.
-- For non-EOF invalid bytes, keep the specific parse failure.
let _ peek!
fail "expected hex digit"
else
return acc
go 0 0
-- Actual parsers
/--
Parses `HTTP-version = HTTP-name "/" DIGIT "." DIGIT` and returns the major and
minor version numbers as a pair.
-/
def parseHttpVersionNumber : Parser (Nat × Nat) := do
skipBytes "HTTP/".toUTF8
let major digit
skipByte '.'.toUInt8
let minor digit
pure ((major.toNat - 48), (minor.toNat - 48))
/--
Parses an HTTP version string and returns the corresponding `Version` value.
Fails if the version is not recognized by `Version.ofNumber?`.
-/
def parseHttpVersion : Parser Version := do
let (major, minor) parseHttpVersionNumber
liftOption <| Version.ofNumber? major minor
/-
method = token
Every branch is wrapped in `attempt` so that `<|>` always backtracks on
failure, even after consuming bytes. This is strictly necessary only for the
P-group (POST / PUT / PATCH) which share a common first byte, but wrapping
all alternatives keeps the parser defensively correct if new methods are
added in the future.
-/
def parseMethod : Parser Method :=
(attempt <| skipBytes "GET".toUTF8 <&> fun _ => Method.get)
<|> (attempt <| skipBytes "HEAD".toUTF8 <&> fun _ => Method.head)
<|> (attempt <| skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete)
<|> (attempt <| skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace)
<|> (attempt <| skipBytes "ACL".toUTF8 <&> fun _ => Method.acl)
<|> (attempt <| skipBytes "QUERY".toUTF8 <&> fun _ => Method.query)
<|> (attempt <| skipBytes "SEARCH".toUTF8 <&> fun _ => Method.search)
<|> (attempt <| skipBytes "BASELINE-CONTROL".toUTF8 <&> fun _ => Method.baselineControl)
<|> (attempt <| skipBytes "BIND".toUTF8 <&> fun _ => Method.bind)
<|> (attempt <| skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect)
<|> (attempt <| skipBytes "CHECKIN".toUTF8 <&> fun _ => Method.checkin)
<|> (attempt <| skipBytes "CHECKOUT".toUTF8 <&> fun _ => Method.checkout)
<|> (attempt <| skipBytes "COPY".toUTF8 <&> fun _ => Method.copy)
<|> (attempt <| skipBytes "LABEL".toUTF8 <&> fun _ => Method.label)
<|> (attempt <| skipBytes "LINK".toUTF8 <&> fun _ => Method.link)
<|> (attempt <| skipBytes "LOCK".toUTF8 <&> fun _ => Method.lock)
<|> (attempt <| skipBytes "MERGE".toUTF8 <&> fun _ => Method.merge)
<|> (attempt <| skipBytes "MKACTIVITY".toUTF8 <&> fun _ => Method.mkactivity)
<|> (attempt <| skipBytes "MKCALENDAR".toUTF8 <&> fun _ => Method.mkcalendar)
<|> (attempt <| skipBytes "MKCOL".toUTF8 <&> fun _ => Method.mkcol)
<|> (attempt <| skipBytes "MKREDIRECTREF".toUTF8 <&> fun _ => Method.mkredirectref)
<|> (attempt <| skipBytes "MKWORKSPACE".toUTF8 <&> fun _ => Method.mkworkspace)
<|> (attempt <| skipBytes "MOVE".toUTF8 <&> fun _ => Method.move)
<|> (attempt <| skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options)
<|> (attempt <| skipBytes "ORDERPATCH".toUTF8 <&> fun _ => Method.orderpatch)
<|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post)
<|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put)
<|> (attempt <| skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch)
<|> (attempt <| skipBytes "PRI".toUTF8 <&> fun _ => Method.pri)
<|> (attempt <| skipBytes "PROPFIND".toUTF8 <&> fun _ => Method.propfind)
<|> (attempt <| skipBytes "PROPPATCH".toUTF8 <&> fun _ => Method.proppatch)
<|> (attempt <| skipBytes "REBIND".toUTF8 <&> fun _ => Method.rebind)
<|> (attempt <| skipBytes "REPORT".toUTF8 <&> fun _ => Method.report)
<|> (attempt <| skipBytes "UNBIND".toUTF8 <&> fun _ => Method.unbind)
<|> (attempt <| skipBytes "UNCHECKOUT".toUTF8 <&> fun _ => Method.uncheckout)
<|> (attempt <| skipBytes "UNLINK".toUTF8 <&> fun _ => Method.unlink)
<|> (attempt <| skipBytes "UNLOCK".toUTF8 <&> fun _ => Method.unlock)
<|> (attempt <| skipBytes "UPDATEREDIRECTREF".toUTF8 <&> fun _ => Method.updateredirectref)
<|> (attempt <| skipBytes "UPDATE".toUTF8 <&> fun _ => Method.update)
<|> (attempt <| skipBytes "VERSION-CONTROL".toUTF8 <&> fun _ => Method.versionControl)
<|> (parseToken 64 *> fail "unrecognized method")
/--
Parses a request-target URI, up to `limits.maxUriLength` bytes.
Fails with `"uri too long"` if the target exceeds the configured limit.
-/
def parseURI (limits : H1.Config) : Parser ByteArray := do
let uri takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength
if uri.size == limits.maxUriLength then
if ( peekWhen? (· != ' '.toUInt8)) |>.isSome then
fail "uri too long"
return uri.toByteArray
/--
Shared core for request-line parsing: parses `request-target SP HTTP-version CRLF`
and returns the `RequestTarget` together with the raw major/minor version numbers.
Both `parseRequestLine` and `parseRequestLineRawVersion` call this after consuming
the method token, keeping URI validation and version parsing in one place.
-/
private def parseRequestLineBody (limits : H1.Config) : Parser (RequestTarget × Nat × Nat) := do
let rawUri parseURI limits <* sp
let uri match (Std.Http.URI.Parser.parseRequestTarget <* eof).run rawUri with
| .ok res => pure res
| .error res => fail res
let versionPair parseHttpVersionNumber <* crlf
return (uri, versionPair)
/--
Parses a request line and returns a fully-typed `Request.Head`.
`request-line = method SP request-target SP HTTP-version`
-/
public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do
skipLeadingRequestEmptyLines limits
let method parseMethod <* sp
let (uri, (major, minor)) parseRequestLineBody limits
if major == 1 minor == 1 then
return method, .v11, uri, .empty
else if major == 1 minor == 0 then
return method, .v10, uri, .empty
else
fail "unsupported HTTP version"
/--
Parses a request line and returns the recognized HTTP method and version when available.
request-line = method SP request-target SP HTTP-version
-/
public def parseRequestLineRawVersion (limits : H1.Config) : Parser (Method × RequestTarget × Option Version) := do
skipLeadingRequestEmptyLines limits
let method parseMethod <* sp
let (uri, (major, minor)) parseRequestLineBody limits
return (method, uri, Version.ofNumber? major minor)
/--
Parses a single header field line.
`field-line = field-name ":" OWS field-value OWS`
-/
def parseFieldLine (limits : H1.Config) : Parser (String × String) := do
let name parseToken limits.maxHeaderNameLength
let value skipByte ':'.toUInt8 *> ows limits *> optional (takeWhileUpTo isFieldVChar limits.maxHeaderValueLength) <* ows limits
let name liftOption <| String.fromUTF8? name.toByteArray
let value liftOption <| String.fromUTF8? <| value.map (·.toByteArray) |>.getD .empty
let value := value.trimAsciiEnd.toString
return (name, value)
/--
Parses a single header field line, or returns `none` when it sees the blank line that
terminates the header section.
```
field-line = field-name ":" OWS field-value OWS CRLF
```
-/
public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do
let next peek?
if next == some '\r'.toUInt8 next == some '\n'.toUInt8 then
crlf
pure none
else
some <$> (parseFieldLine limits <* crlf)
/--
Parses a backslash-escaped character inside a quoted-string.
`quoted-pair = "\" ( HTAB / SP / VCHAR )` — strict ASCII-only (no obs-text).
-/
def parseQuotedPair : Parser UInt8 := do
skipByte '\\'.toUInt8
let b any
if quotedPairChar (Char.ofUInt8 b) then
return b
else
fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}"
/--
Parses a quoted-string value, unescaping quoted-pairs.
`quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE`
-/
partial def parseQuotedString (maxLength : Nat) : Parser String := do
skipByte '"'.toUInt8
let rec loop (buf : ByteArray) (length : Nat) : Parser ByteArray := do
let b ← any
if b == '"'.toUInt8 then
return buf
else if b == '\\'.toUInt8 then
let next any
if quotedPairChar (Char.ofUInt8 next)
then
let length := length + 1
if length > maxLength then
fail "quoted-string too long"
else
loop (buf.push next) length
else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}"
else if isQdText b then
let length := length + 1
if length > maxLength then
fail "quoted-string too long"
else
loop (buf.push b) length
else
fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}"
liftOption <| String.fromUTF8? ( loop .empty 0)
-- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] )
def parseChunkExt (limits : H1.Config) : Parser (Chunk.ExtensionName × Option Chunk.ExtensionValue) := do
ows limits *> skipByte ';'.toUInt8 *> ows limits
let name (liftOption =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtNameLength) <* ows limits
let some name := Chunk.ExtensionName.ofString? name
| fail "invalid extension name"
if ( peekWhen? (· == '='.toUInt8)) |>.isSome then
-- RFC 9112 §7.1.1: BWS is allowed around "=".
-- The `<* ows limits` after the name already consumed any trailing whitespace,
-- so these ows calls are no-ops in practice, but kept for explicit grammar correspondence.
ows limits *> skipByte '='.toUInt8 *> ows limits
let value ows limits *> (parseQuotedString limits.maxChunkExtValueLength <|> liftOption =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> parseToken limits.maxChunkExtValueLength))
let some value := Chunk.ExtensionValue.ofString? value
| fail "invalid extension value"
return (name, some value)
return (name, none)
/--
Parses the size and extensions of a chunk.
-/
public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) := do
let size hex
let ext manyItems (optional (attempt (parseChunkExt limits))) limits.maxChunkExtensions
crlf
return (size, ext)
/--
Result of parsing partial or complete information.
-/
public inductive TakeResult
| complete (data : ByteSlice)
| incomplete (data : ByteSlice) (remaining : Nat)
/--
Parses a single chunk in chunked transfer encoding.
-/
public def parseChunkPartial (limits : H1.Config) : Parser (Option (Nat × Array (Chunk.ExtensionName × Option Chunk.ExtensionValue) × ByteSlice)) := do
let (size, ext) parseChunkSize limits
if size == 0 then
return none
else
let data take size
return some size, ext, data
/--
Parses fixed-size data that can be incomplete.
-/
public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it =>
if it.remainingBytes = 0 then
.error it .eof
else if it.remainingBytes < size then
.success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes))
else
.success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)]))
/--
Parses fixed-size chunk data that can be incomplete.
-/
public def parseChunkSizedData (size : Nat) : Parser TakeResult := do
match parseFixedSizeData size with
| .complete data => crlf *> return .complete data
| .incomplete data res => return .incomplete data res
/--
Returns `true` if `name` (compared case-insensitively) is a field that MUST NOT appear in HTTP/1.1
trailer sections per RFC 9112 §6.5. Forbidden fields are those required for message framing
(`content-length`, `transfer-encoding`), routing (`host`), or connection management (`connection`).
-/
private def isForbiddenTrailerField (name : String) : Bool :=
let n := name.toLower
n == "content-length" || n == "transfer-encoding" || n == "host" ||
n == "connection" || n == "expect" || n == "te" ||
n == "authorization" || n == "max-forwards" || n == "cache-control" ||
n == "content-encoding" || n == "upgrade" || n == "trailer"
/--
Parses a trailer header (used after a chunked body), rejecting forbidden field names per RFC 9112
§6.5. Fields used for message framing (`content-length`, `transfer-encoding`), routing (`host`),
or connection management (`connection`, `te`, `upgrade`) are rejected to prevent trailer injection
attacks where a downstream proxy might re-interpret them.
-/
def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := do
let result parseSingleHeader limits
if let some (name, _) := result then
if isForbiddenTrailerField name then
fail s!"forbidden trailer field: {name}"
return result
/--
Parses trailer headers after a chunked body and returns them as an array of name-value pairs.
This is exposed for callers that need the trailer values directly (e.g. clients). The
internal protocol machine uses `parseLastChunkBody` instead, which discards trailer values.
-/
public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do
let trailers manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
return trailers
/--
Returns `true` if `c` is a valid reason-phrase byte (`HTAB / SP / VCHAR`, strict ASCII-only).
-/
@[inline]
def isReasonPhraseByte (c : UInt8) : Bool :=
fieldContent (Char.ofUInt8 c)
/--
Parses a reason phrase (text after status code).
Allows only `HTAB / SP / VCHAR` bytes (strict ASCII-only).
-/
def parseReasonPhrase (limits : H1.Config) : Parser String := do
let bytes takeWhileUpTo isReasonPhraseByte limits.maxReasonPhraseLength
liftOption <| String.fromUTF8? bytes.toByteArray
/--
Parses a status-code (3 decimal digits), the following reason phrase, and the
terminating CRLF; returns a typed `Status`.
-/
def parseStatusCode (limits : H1.Config) : Parser Status := do
let d1 digit
let d2 digit
let d3 digit
let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48)
sp
let phrase parseReasonPhrase limits <* crlf
if h : IsValidReasonPhrase phrase then
if let some status := Status.ofCode (some phrase, h) code.toUInt16 then
return status
fail "invalid status code"
/--
Parses a status line and returns a fully-typed `Response.Head`.
`status-line = HTTP-version SP status-code SP [ reason-phrase ]`
Accepts only HTTP/1.1. For parsing where the version may be unrecognized and must be
mapped to an error event, use `parseStatusLineRawVersion`.
-/
public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do
let (major, minor) parseHttpVersionNumber <* sp
let status parseStatusCode limits
if major == 1 minor == 1 then
return { status, version := .v11, headers := .empty }
else if major == 1 minor == 0 then
return { status, version := .v10, headers := .empty }
else
fail "unsupported HTTP version"
/--
Parses a status line and returns the status code plus recognized HTTP version when available.
Consumes and discards the reason phrase.
status-line = HTTP-version SP status-code SP [ reason-phrase ] CRLF
-/
public def parseStatusLineRawVersion (limits : H1.Config) : Parser (Status × Option Version) := do
let (major, minor) parseHttpVersionNumber <* sp
let status parseStatusCode limits
return (status, Version.ofNumber? major minor)
/--
Parses the trailer section that follows the last chunk size line (`0\r\n`).
-/
public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do
discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
end Std.Http.Protocol.H1

View File

@@ -0,0 +1,319 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Reader
This module defines the reader state machine for parsing incoming HTTP/1.1 messages.
It tracks the parsing state including start line, headers, and body handling for
both fixed-length and chunked transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
The body-framing sub-state of the `Reader` state machine.
-/
inductive Reader.BodyState where
/--
Parse fixed-length body bytes, tracking the number of bytes remaining.
-/
| fixed (remaining : Nat)
/--
Parse the next chunk-size line in chunked transfer encoding.
-/
| chunkedSize
/--
Parse chunk data for the current chunk.
-/
| chunkedBody (ext : Array (Chunk.ExtensionName × Option Chunk.ExtensionValue)) (remaining : Nat)
/--
Parse body bytes until EOF (connection close).
-/
| closeDelimited
deriving Inhabited, Repr, BEq
/--
The state of the `Reader` state machine.
-/
inductive Reader.State (dir : Direction) : Type
/--
Initial state waiting for HTTP start line.
-/
| needStartLine : State dir
/--
State waiting for HTTP headers, tracking number of headers parsed.
-/
| needHeader : Nat State dir
/--
Unified body-reading state.
-/
| readBody : Reader.BodyState State dir
/--
Paused waiting for a `canContinue` decision, carrying the next state.
-/
| continue : State dir State dir
/--
State waiting to be able to read new data.
-/
| pending : State dir
/--
State that it completed a single request or response and can go to the next one
-/
| complete
/--
State that it has completed and cannot process more data.
-/
| closed
/--
The input is malformed.
-/
| failed (error : Error) : State dir
deriving Inhabited, Repr, BEq
/--
Manages the reading state of the HTTP parsing and processing machine.
-/
structure Reader (dir : Direction) where
/--
The current state of the machine.
-/
state : Reader.State dir := match dir with | .receiving => .needStartLine | .sending => .pending
/--
The input byte array.
-/
input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter
/--
The incoming message head.
-/
messageHead : Message.Head dir := {}
/--
Count of messages that this connection has already parsed.
-/
messageCount : Nat := 0
/--
Number of body bytes read for the current message.
-/
bodyBytesRead : Nat := 0
/--
Number of header bytes accumulated for the current message.
Counts name + value bytes plus 4 bytes per line for `: ` and `\r\n`.
-/
headerBytesRead : Nat := 0
/--
Set when no further input bytes will arrive (the remote end has closed the connection).
-/
noMoreInput : Bool := false
namespace Reader
/--
Checks if the reader is in a closed state and cannot process more messages.
-/
@[inline]
def isClosed (reader : Reader dir) : Bool :=
match reader.state with
| .closed => true
| _ => false
/--
Checks if the reader has completed parsing the current message.
-/
@[inline]
def isComplete (reader : Reader dir) : Bool :=
match reader.state with
| .complete => true
| _ => false
/--
Checks if the reader has encountered an error.
-/
@[inline]
def hasFailed (reader : Reader dir) : Bool :=
match reader.state with
| .failed _ => true
| _ => false
/--
Feeds new data into the reader's input buffer.
If the current input is exhausted, replaces it; otherwise compacts the buffer
by discarding already-parsed bytes before appending.
-/
@[inline]
def feed (data : ByteArray) (reader : Reader dir) : Reader dir :=
{ reader with input :=
if reader.input.atEnd
then data.iter
else (reader.input.array.extract reader.input.pos reader.input.array.size ++ data).iter }
/--
Replaces the reader's input iterator with a new one.
-/
@[inline]
def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir :=
{ reader with input }
/--
Updates the message head being constructed.
-/
@[inline]
def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir :=
{ reader with messageHead }
/--
Adds a header to the current message head.
-/
@[inline]
def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir :=
match dir with
| .sending | .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
/--
Closes the reader, transitioning to the closed state.
-/
@[inline]
def close (reader : Reader dir) : Reader dir :=
{ reader with state := .closed, noMoreInput := true }
/--
Marks the current message as complete and prepares for the next message.
-/
@[inline]
def markComplete (reader : Reader dir) : Reader dir :=
{ reader with
state := .complete
messageCount := reader.messageCount + 1 }
/--
Transitions the reader to a failed state with the given error.
-/
@[inline]
def fail (error : Error) (reader : Reader dir) : Reader dir :=
{ reader with state := .failed error }
/--
Resets the reader to parse a new message on the same connection.
-/
@[inline]
def reset (reader : Reader dir) : Reader dir :=
{ reader with
state := .needStartLine
bodyBytesRead := 0
headerBytesRead := 0
messageHead := {} }
/--
Checks if more input is needed to continue parsing.
-/
@[inline]
def needsMoreInput (reader : Reader dir) : Bool :=
reader.input.atEnd && !reader.noMoreInput &&
match reader.state with
| .complete | .closed | .failed _ | .«continue» _ => false
| _ => true
/--
Returns the current parse error if the reader has failed.
-/
@[inline]
def getError (reader : Reader dir) : Option Error :=
match reader.state with
| .failed err => some err
| _ => none
/--
Gets the number of bytes remaining in the input buffer.
-/
@[inline]
def remainingBytes (reader : Reader dir) : Nat :=
reader.input.array.size - reader.input.pos
/--
Advances the input iterator by n bytes.
-/
@[inline]
def advance (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with input := reader.input.forward n }
/--
Transitions to the state for reading headers.
-/
@[inline]
def startHeaders (reader : Reader dir) : Reader dir :=
{ reader with state := .needHeader 0, bodyBytesRead := 0, headerBytesRead := 0 }
/--
Adds body bytes parsed for the current message.
-/
@[inline]
def addBodyBytes (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with bodyBytesRead := reader.bodyBytesRead + n }
/--
Adds header bytes accumulated for the current message.
-/
@[inline]
def addHeaderBytes (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with headerBytesRead := reader.headerBytesRead + n }
/--
Transitions to the state for reading a fixed-length body.
-/
@[inline]
def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir :=
{ reader with state := .readBody (.fixed size) }
/--
Transitions to the state for reading chunked transfer encoding.
-/
@[inline]
def startChunkedBody (reader : Reader dir) : Reader dir :=
{ reader with state := .readBody .chunkedSize }
/--
Marks that no more input will be provided (connection closed).
-/
@[inline]
def markNoMoreInput (reader : Reader dir) : Reader dir :=
{ reader with noMoreInput := true }
/--
Checks if the connection should be kept alive for the next message.
-/
def shouldKeepAlive (reader : Reader dir) : Bool :=
reader.messageHead.shouldKeepAlive
end Reader

View File

@@ -0,0 +1,284 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Writer
This module defines the writer state machine for generating outgoing HTTP/1.1 messages.
It handles encoding headers and body content for both fixed-length and chunked
transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Internal
/--
The state of the `Writer` state machine.
-/
inductive Writer.State
/--
Initial state before any outgoing message has been prepared.
-/
| pending
/--
Waiting for the application to provide the outgoing message head via `send`.
-/
| waitingHeaders
/--
The message head has been provided; waiting for `shouldFlush` to become true before
serializing headers to output.
-/
| waitingForFlush
/--
Reserved; not currently entered by the state machine.
-/
| writingHeaders
/--
Writing the body output (either fixed-length or chunked).
-/
| writingBody (mode : Body.Length)
/--
Waiting for all buffered output to drain before transitioning to `complete`.
-/
| shuttingDown
/--
Completed writing a single message and ready to begin the next one.
-/
| complete
/--
Closed; no further data can be written.
-/
| closed
deriving Inhabited, Repr, BEq
/--
Manages the writing state of the HTTP generating and writing machine.
-/
structure Writer (dir : Direction) where
/--
Body chunks supplied by the user, accumulated before being flushed to output.
-/
userData : Array Chunk := .empty
/--
All the data produced by the writer, ready to be sent to the socket.
-/
outputData : ChunkedBuffer := .empty
/--
The state of the writer machine.
-/
state : Writer.State := match dir with | .receiving => .pending | .sending => .waitingHeaders
/--
When the user specifies the exact body size upfront, `Content-Length` framing is
used instead of chunked transfer encoding.
-/
knownSize : Option Body.Length := none
/--
The outgoing message that will be written to the output.
-/
messageHead : Message.Head dir.swap := {}
/--
Whether the user has called `send` to provide the outgoing message head.
-/
sentMessage : Bool := false
/--
Set when the user has finished sending body data, allowing fixed-size framing
to be determined upfront.
-/
userClosedBody : Bool := false
/--
When `true`, body bytes are intentionally omitted from the wire for this message
(e.g. HEAD responses), while headers/framing metadata may still describe the
hypothetical representation.
-/
omitBody : Bool := false
namespace Writer
/--
Returns `true` when no more user body data will arrive: either the user called
`closeBody`, or the writer has already transitioned to `complete` or `closed`.
Note: this does **not** mean the wire is ready to accept new bytes — a `closed`
writer cannot send anything. Use this to decide whether to flush pending body
data rather than to check writability.
-/
@[inline]
def noMoreUserData {dir} (writer : Writer dir) : Bool :=
match writer.state with
| .closed | .complete => true
| _ => writer.userClosedBody
/--
Checks if the writer is closed (cannot process more data).
-/
@[inline]
def isClosed (writer : Writer dir) : Bool :=
match writer.state with
| .closed => true
| _ => false
/--
Checks if the writer has completed processing a request.
-/
@[inline]
def isComplete (writer : Writer dir) : Bool :=
match writer.state with
| .complete => true
| _ => false
/--
Checks if the writer can accept more data from the user.
-/
@[inline]
def canAcceptData (writer : Writer dir) : Bool :=
match writer.state with
| .waitingHeaders => true
| .waitingForFlush => true
| .writingBody _ => !writer.userClosedBody
| _ => false
/--
Marks the body as closed, indicating no more user data will be added.
-/
@[inline]
def closeBody (writer : Writer dir) : Writer dir :=
{ writer with userClosedBody := true }
/--
Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked.
-/
def determineTransferMode (writer : Writer dir) : Body.Length :=
if let some mode := writer.knownSize then
mode
else if writer.userClosedBody then
let size := writer.userData.foldl (fun x y => x + y.data.size) 0
.fixed size
else
.chunked
/--
Adds user data chunks to the writer's buffer if the writer can accept data.
-/
@[inline]
def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir :=
if writer.canAcceptData then
{ writer with userData := writer.userData ++ data }
else
writer
/--
Writes accumulated user data to output using fixed-size encoding.
-/
def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat :=
if writer.userData.size = 0 then
(writer, limitSize)
else
let (chunks, pending, totalSize) := writer.userData.foldl (fun (state : Array ByteArray × Array Chunk × Nat) chunk =>
let (acc, pending, size) := state
if size >= limitSize then
(acc, pending.push chunk, size)
else
let remaining := limitSize - size
let takeSize := min chunk.data.size remaining
let dataPart := chunk.data.extract 0 takeSize
let acc := if takeSize = 0 then acc else acc.push dataPart
let size := size + takeSize
if takeSize < chunk.data.size then
let pendingChunk : Chunk := { chunk with data := chunk.data.extract takeSize chunk.data.size }
(acc, pending.push pendingChunk, size)
else
(acc, pending, size)
) (#[], #[], 0)
let outputData := writer.outputData.append (ChunkedBuffer.ofArray chunks)
let remaining := limitSize - totalSize
({ writer with userData := pending, outputData }, remaining)
/--
Writes accumulated user data to output using chunked transfer encoding.
-/
def writeChunkedBody (writer : Writer dir) : Writer dir :=
if writer.userData.size = 0 then
writer
else
let data := writer.userData
{ writer with userData := #[], outputData := data.foldl (Encode.encode .v11) writer.outputData }
/--
Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state.
-/
def writeFinalChunk (writer : Writer dir) : Writer dir :=
let writer := writer.writeChunkedBody
{ writer with
outputData := writer.outputData.write "0\r\n\r\n".toUTF8
state := .complete
}
/--
Extracts all accumulated output data and returns it with a cleared output buffer.
-/
@[inline]
def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) :=
let output := writer.outputData.toByteArray
some ({ writer with outputData := ChunkedBuffer.empty }, output)
/--
Updates the writer's state machine to a new state.
-/
@[inline]
def setState (state : Writer.State) (writer : Writer dir) : Writer dir :=
{ writer with state }
/--
Writes the message headers to the output buffer.
-/
private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir :=
{ writer with outputData := Internal.Encode.encode (v := .v11) writer.outputData messageHead }
/--
Checks if the connection should be kept alive based on the Connection header.
-/
def shouldKeepAlive (writer : Writer dir) : Bool :=
writer.messageHead.headers.get? .connection
|>.map (fun v => v.value.toLower != "close")
|>.getD true
/--
Closes the writer, transitioning to the closed state.
-/
@[inline]
def close (writer : Writer dir) : Writer dir :=
{ writer with state := .closed }
end Writer

View File

@@ -0,0 +1,188 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Async.TCP
public import Std.Sync.CancellationToken
public import Std.Sync.Semaphore
public import Std.Internal.Http.Server.Config
public import Std.Internal.Http.Server.Handler
public import Std.Internal.Http.Server.Connection
public section
/-!
# HTTP Server
This module defines a simple, asynchronous HTTP/1.1 server implementation.
It provides the `Std.Http.Server` structure, which encapsulates all server state, and functions for
starting, managing, and gracefully shutting down the server.
The server runs entirely using `Async` and uses a shared `CancellationContext` to signal shutdowns.
Each active client connection is tracked, and the server automatically resolves its shutdown
promise once all connections have closed.
-/
namespace Std.Http
open Std.Internal.IO.Async TCP
set_option linter.all true
/--
The `Server` structure holds all state required to manage the lifecycle of an HTTP server, including
connection tracking and shutdown coordination.
-/
structure Server where
/--
The context used for shutting down all connections and the server.
-/
context : Std.CancellationContext
/--
Active HTTP connections
-/
activeConnections : Std.Mutex UInt64
/--
Semaphore used to enforce the maximum number of simultaneous active connections.
`none` means no connection limit.
-/
connectionLimit : Option Std.Semaphore
/--
Indicates when the server has successfully shut down.
-/
shutdownPromise : Std.Channel Unit
/--
Configuration of the server
-/
config : Std.Http.Config
namespace Server
/--
Create a new `Server` structure with an optional configuration.
-/
def new (config : Std.Http.Config := {}) : IO Server := do
let context Std.CancellationContext.new
let activeConnections Std.Mutex.new 0
let connectionLimit
if config.maxConnections = 0 then
pure none
else
some <$> Std.Semaphore.new config.maxConnections
let shutdownPromise Std.Channel.new
return { context, activeConnections, connectionLimit, shutdownPromise, config }
/--
Triggers cancellation of all requests and the accept loop in the server. This function should be used
in conjunction with `waitShutdown` to properly coordinate the shutdown sequence.
-/
@[inline]
def shutdown (s : Server) : Async Unit :=
s.context.cancel .shutdown
/--
Waits for the server to shut down. Blocks until another task or async operation calls the `shutdown` function.
-/
@[inline]
def waitShutdown (s : Server) : Async Unit := do
Async.ofAsyncTask (( s.shutdownPromise.recv).map Except.ok)
/--
Returns a `Selector` that waits for the server to shut down.
-/
@[inline]
def waitShutdownSelector (s : Server) : Selector Unit :=
s.shutdownPromise.recvSelector
/--
Triggers cancellation of all requests and the accept loop, then waits for the server to fully shut down.
This is a convenience function combining `shutdown` and then `waitShutdown`.
-/
@[inline]
def shutdownAndWait (s : Server) : Async Unit := do
s.context.cancel .shutdown
s.waitShutdown
@[inline]
private def frameCancellation (s : Server) (releaseConnectionPermit : Bool := false)
(action : ContextAsync α) : ContextAsync α := do
s.activeConnections.atomically (modify (· + 1))
try
action
finally
if releaseConnectionPermit then
if let some limit := s.connectionLimit then
limit.release
s.activeConnections.atomically do
modify (· - 1)
if ( get) = 0 ( s.context.isCancelled) then
discard <| s.shutdownPromise.send ()
/--
Start a new HTTP/1.1 server on the given socket address. This function uses `Async` to handle tasks
and TCP connections, and returns a `Server` structure that can be used to cancel the server.
-/
def serve {σ : Type} [Handler σ]
(addr : Net.SocketAddress)
(handler : σ)
(config : Config := {}) (backlog : UInt32 := 1024) : Async Server := do
let httpServer Server.new config
let server Socket.Server.mk
server.bind addr
server.listen backlog
server.noDelay
let runServer := do
frameCancellation httpServer (action := do
while true do
let permitAcquired
if let some limit := httpServer.connectionLimit then
let permit limit.acquire
await permit
pure true
else
pure false
let result Selectable.one #[
.case (server.acceptSelector) (fun x => pure <| some x),
.case ( ContextAsync.doneSelector) (fun _ => pure none)
]
match result with
| some client =>
let extensions do
match ( EIO.toBaseIO client.getPeerName) with
| .ok addr => pure <| Extensions.empty.insert (Server.RemoteAddr.mk addr)
| .error _ => pure Extensions.empty
ContextAsync.background
(frameCancellation httpServer (releaseConnectionPermit := permitAcquired)
(action := do
serveConnection client handler config extensions))
| none =>
if permitAcquired then
if let some limit := httpServer.connectionLimit then
limit.release
break
)
background (runServer httpServer.context)
return httpServer
end Std.Http.Server

View File

@@ -0,0 +1,196 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Config
This module exposes the `Config`, a structure that describes timeout, request and headers
configuration of an HTTP Server.
-/
namespace Std.Http
set_option linter.all true
/--
Connection limits configuration with validation.
-/
structure Config where
/--
Maximum number of simultaneous active connections (default: 1024).
Setting this to `0` disables the limit entirely: the server will accept any number of
concurrent connections and no semaphore-based cap is enforced. Use with care — an
unconstrained server may exhaust file descriptors or memory under adversarial load.
-/
maxConnections : Nat := 1024
/--
Maximum number of requests per connection.
-/
maxRequests : Nat := 100
/--
Maximum number of headers allowed per request.
-/
maxHeaders : Nat := 50
/--
Maximum aggregate byte size of all header field lines in a single message
(name + value bytes plus 4 bytes per line for `: ` and `\r\n`). Default: 64 KiB.
-/
maxHeaderBytes : Nat := 65536
/--
Timeout (in milliseconds) for receiving additional data while a request is actively being
processed (e.g. reading the request body). Applies after the request headers have been parsed
and replaces the keep-alive timeout for the duration of the request.
-/
lingeringTimeout : Time.Millisecond.Offset := 10000
/--
Timeout for keep-alive connections
-/
keepAliveTimeout : { x : Time.Millisecond.Offset // 0 < x } := 12000, by decide
/--
Maximum time (in milliseconds) allowed to receive the complete request headers after the first
byte of a new request arrives. This prevents Slowloris-style attacks where a client sends bytes
at a slow rate to hold a connection slot open without completing a request. Once a request starts,
each individual read must complete within this window. Default: 5 seconds.
-/
headerTimeout : Time.Millisecond.Offset := 5000
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The maximum size that the connection can receive in a single recv call.
-/
maximumRecvSize : Nat := 8192
/--
Default buffer size for the connection
-/
defaultPayloadBytes : Nat := 8192
/--
Whether to automatically generate the `Date` header in responses.
-/
generateDate : Bool := true
/--
The `Server` header value injected into outgoing responses.
`none` suppresses the header entirely.
-/
serverName : Option Header.Value := some (.mk "LeanHTTP/1.1")
/--
Maximum length of request URI (default: 8192 bytes)
-/
maxUriLength : Nat := 8192
/--
Maximum number of bytes consumed while parsing request start-lines (default: 8192 bytes).
-/
maxStartLineLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes)
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes)
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 16)
-/
maxSpaceSequence : Nat := 16
/--
Maximum number of leading empty lines (bare CRLF) to skip before a request-line
(RFC 9112 §2.2 robustness). Default: 8.
-/
maxLeadingEmptyLines : Nat := 8
/--
Maximum length of chunk extension name (default: 256 bytes)
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes)
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum number of bytes consumed while parsing one chunk-size line with extensions (default: 8192 bytes).
-/
maxChunkLineLength : Nat := 8192
/--
Maximum allowed chunk payload size in bytes (default: 8 MiB).
-/
maxChunkSize : Nat := 8 * 1024 * 1024
/--
Maximum allowed total body size per request in bytes (default: 64 MiB).
-/
maxBodySize : Nat := 64 * 1024 * 1024
/--
Maximum length of reason phrase (default: 512 bytes)
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 20)
-/
maxTrailerHeaders : Nat := 20
/--
Maximum number of extensions on a single chunk-size line (default: 16).
-/
maxChunkExtensions : Nat := 16
namespace Config
/--
Converts to HTTP/1.1 config.
-/
def toH1Config (config : Config) : Protocol.H1.Config where
maxMessages := config.maxRequests
maxHeaders := config.maxHeaders
maxHeaderBytes := config.maxHeaderBytes
enableKeepAlive := config.enableKeepAlive
agentName := config.serverName
maxUriLength := config.maxUriLength
maxStartLineLength := config.maxStartLineLength
maxHeaderNameLength := config.maxHeaderNameLength
maxHeaderValueLength := config.maxHeaderValueLength
maxSpaceSequence := config.maxSpaceSequence
maxLeadingEmptyLines := config.maxLeadingEmptyLines
maxChunkExtensions := config.maxChunkExtensions
maxChunkExtNameLength := config.maxChunkExtNameLength
maxChunkExtValueLength := config.maxChunkExtValueLength
maxChunkLineLength := config.maxChunkLineLength
maxChunkSize := config.maxChunkSize
maxBodySize := config.maxBodySize
maxReasonPhraseLength := config.maxReasonPhraseLength
maxTrailerHeaders := config.maxTrailerHeaders
end Std.Http.Config

View File

@@ -0,0 +1,530 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.TCP
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Transport
public import Std.Internal.Http.Protocol.H1
public import Std.Internal.Http.Server.Config
public import Std.Internal.Http.Server.Handler
public section
namespace Std
namespace Http
namespace Server
open Std Internal IO Async TCP Protocol
open Time
/-!
# Connection
This module defines `Server.Connection`, a structure used to handle a single HTTP connection with
possibly multiple requests.
-/
set_option linter.all true
/--
Represents the remote address of a client connection.
-/
public structure RemoteAddr where
/--
The socket address of the remote client.
-/
addr : Net.SocketAddress
deriving TypeName
instance : ToString RemoteAddr where
toString addr := toString addr.addr.ipAddr ++ ":" ++ toString addr.addr.port
/--
A single HTTP connection.
-/
public structure Connection (α : Type) where
/--
The client connection.
-/
socket : α
/--
The processing machine for HTTP/1.1.
-/
machine : H1.Machine .receiving
/--
Extensions to attach to each request (e.g., remote address).
-/
extensions : Extensions := .empty
namespace Connection
/--
Events produced by the async select loop in `receiveWithTimeout`.
Each variant corresponds to one possible outcome of waiting for I/O.
-/
private inductive Recv (β : Type)
| bytes (x : Option ByteArray)
| responseBody (x : Option Chunk)
| bodyInterest (x : Bool)
| response (x : (Except Error (Response β)))
| timeout
| shutdown
| close
/--
The set of I/O sources to wait on during a single poll iteration.
Each `Option` field is `none` when that source is not currently active.
-/
private structure PollSources (α β : Type) where
socket : Option α
expect : Option Nat
response : Option (Std.Channel (Except Error (Response β)))
responseBody : Option β
requestBody : Option Body.Outgoing
timeout : Millisecond.Offset
keepAliveTimeout : Option Millisecond.Offset
headerTimeout : Option Timestamp
connectionContext : CancellationContext
/--
Waits for the next I/O event across all active sources described by `sources`.
Computes the socket recv size from `config`, then races all active selectables.
Calls `Handler.onFailure` and returns `.close` on transport errors.
-/
private def pollNextEvent
{σ β : Type} [Transport α] [Handler σ] [Body.Reader β]
(config : Config) (handler : σ) (sources : PollSources α β)
: Async (Recv β) := do
let expectedBytes := sources.expect
|>.getD config.defaultPayloadBytes
|>.min config.maximumRecvSize
|>.toUInt64
let mut selectables : Array (Selectable (Recv β)) := #[
.case sources.connectionContext.doneSelector (fun _ => do
let reason sources.connectionContext.getCancellationReason
match reason with
| some .deadline => pure .timeout
| _ => pure .shutdown)
]
if let some socket := sources.socket then
selectables := selectables.push (.case (Transport.recvSelector socket expectedBytes) (Recv.bytes · |> pure))
if let some keepAliveTimeout := sources.keepAliveTimeout then
selectables := selectables.push (.case ( Selector.sleep keepAliveTimeout) (fun _ => pure .timeout))
else if let some timeout := sources.headerTimeout then
selectables := selectables.push (.case ( Selector.sleep (timeout - ( Timestamp.now)).toMilliseconds) (fun _ => pure .timeout))
else
selectables := selectables.push (.case ( Selector.sleep sources.timeout) (fun _ => pure .timeout))
if let some responseBody := sources.responseBody then
selectables := selectables.push (.case (Body.Reader.recvSelector responseBody) (Recv.responseBody · |> pure))
if let some requestBody := sources.requestBody then
selectables := selectables.push (.case (Body.Writer.interestSelector requestBody) (Recv.bodyInterest · |> pure))
if let some response := sources.response then
selectables := selectables.push (.case response.recvSelector (Recv.response · |> pure))
try Selectable.one selectables
catch e =>
Handler.onFailure handler e
pure .close
/--
Handles the `Expect: 100-continue` protocol for a pending request head.
Races between the handler's decision (`Handler.onContinue`), the connection being
cancelled, and a lingering timeout. Returns the updated machine and whether
`pendingHead` should be cleared (i.e. when the request is rejected).
-/
private def handleContinueEvent
{σ : Type} [Handler σ]
(handler : σ) (machine : H1.Machine .receiving) (head : Request.Head)
(config : Config) (connectionContext : CancellationContext)
: Async (H1.Machine .receiving × Bool) := do
let continueChannel : Std.Channel Bool Std.Channel.new
let continueTask Handler.onContinue handler head |>.asTask
BaseIO.chainTask continueTask fun
| .ok v => discard <| continueChannel.send v
| .error _ => discard <| continueChannel.send false
let canContinue Selectable.one #[
.case continueChannel.recvSelector pure,
.case connectionContext.doneSelector (fun _ => pure false),
.case ( Selector.sleep config.lingeringTimeout) (fun _ => pure false)
]
let status := if canContinue then Status.«continue» else Status.expectationFailed
return (machine.canContinue status, !canContinue)
/--
Injects a `Date` header into a response head if `Config.generateDate` is set
and the response does not already include one.
-/
private def prepareResponseHead (config : Config) (head : Response.Head) : Async Response.Head := do
if config.generateDate ¬head.headers.contains Header.Name.date then
let now Std.Time.DateTime.now (tz := .UTC)
return { head with headers := head.headers.insert Header.Name.date (Header.Value.ofString! now.toRFC822String) }
else
return head
/--
Applies a successful handler response to the machine.
Optionally injects a `Date` header, records the known body size, and sends the
response head. Returns the updated machine and the body stream to drain, or `none`
when the body should be omitted (e.g., for HEAD requests).
-/
private def applyResponse
{β : Type} [Body.Reader β] [Body.Writer β]
(config : Config) (machine : H1.Machine .receiving) (res : Response β)
: Async (H1.Machine .receiving × Option β) := do
let size Body.Writer.getKnownSize res.body
let machineSized :=
if let some knownSize := size then machine.setKnownSize knownSize
else machine
let responseHead prepareResponseHead config res.line
let machineWithHead := machineSized.send responseHead
if machineWithHead.writer.omitBody then
if ¬( Body.Reader.isClosed res.body) then
Body.Reader.close res.body
return (machineWithHead, none)
else
return (machineWithHead, some res.body)
/--
All mutable state carried through the connection processing loop.
Bundled into a struct so it can be passed to and returned from helper functions.
-/
private structure ConnectionState (β : Type) where
machine : H1.Machine .receiving
requestOutgoing : Body.Outgoing
requestIncoming : Body.Incoming
keepAliveTimeout : Option Millisecond.Offset
currentTimeout : Millisecond.Offset
headerTimeout : Option Timestamp
response : Std.Channel (Except Error (Response β))
respStream : Option β
requiresData : Bool
expectData : Option Nat
handlerDispatched : Bool
pendingHead : Option Request.Head
/--
Processes all H1 events from a single machine step, updating the connection state.
Handles keep-alive resets, body-size tracking, `Expect: 100-continue`, and parse errors.
Returns the updated state; stops early on `.failed`.
-/
private def processH1Events
{σ β : Type} [Handler σ] [Body.Reader β]
(handler : σ) (config : Config) (connectionContext : CancellationContext)
(events : Array (H1.Event .receiving))
(state : ConnectionState β)
: Async (ConnectionState β) := do
let mut st := state
for event in events do
match event with
| .needMoreData expect =>
st := { st with requiresData := true, expectData := expect }
| .needAnswer => pure ()
| .endHeaders head =>
-- Sets the pending head and removes the KeepAlive or Header timeout.
st := { st with
currentTimeout := config.lingeringTimeout
keepAliveTimeout := none
headerTimeout := none
pendingHead := some head
}
if let some length := head.getSize true then
-- Sets the size of the body that is going out of the connection.
Body.Writer.setKnownSize st.requestOutgoing (some length)
| .«continue» =>
if let some head := st.pendingHead then
let (newMachine, clearPending) handleContinueEvent handler st.machine head config connectionContext
st := { st with machine := newMachine }
if clearPending then
st := { st with pendingHead := none }
| .next =>
-- Reset all per-request state for the next pipelined request.
if ¬( Body.Writer.isClosed st.requestOutgoing) then
Body.Writer.close st.requestOutgoing
if let some res := st.respStream then
if ¬( Body.Reader.isClosed res) then
Body.Reader.close res
let (newOut, newIn) Body.mkChannel
st := { st with
requestOutgoing := newOut
requestIncoming := newIn
response := Std.Channel.new
respStream := none
keepAliveTimeout := some config.keepAliveTimeout.val
currentTimeout := config.keepAliveTimeout.val
headerTimeout := none
handlerDispatched := false
}
| .failed err =>
Handler.onFailure handler (toString err)
if ¬( Body.Writer.isClosed st.requestOutgoing) then
Body.Writer.close st.requestOutgoing
st := { st with requiresData := false, pendingHead := none }
break
| .closeBody =>
if ¬( Body.Writer.isClosed st.requestOutgoing) then
Body.Writer.close st.requestOutgoing
| .close => pure ()
return st
/--
Dispatches a pending request head to the handler if one is waiting.
Spawns the handler as an async task and routes its result back through `state.response`.
Returns the updated state with `pendingHead` cleared and `handlerDispatched` set.
-/
private def dispatchPendingRequest
{σ : Type} [Handler σ]
(handler : σ) (extensions : Extensions) (connectionContext : CancellationContext)
(state : ConnectionState (Handler.ResponseBody σ))
: Async (ConnectionState (Handler.ResponseBody σ)) := do
if let some line := state.pendingHead then
let task Handler.onRequest handler { line, body := state.requestIncoming, extensions } connectionContext
|>.asTask
BaseIO.chainTask task (discard state.response.send)
return { state with pendingHead := none, handlerDispatched := true }
else
return state
/--
Processes a single async I/O event and updates the connection state.
Returns the updated state and `true` if the connection should be closed immediately.
-/
private def handleRecvEvent
{σ β : Type} [Handler σ] [Body.Reader β] [Body.Writer β]
(handler : σ) (config : Config)
(event : Recv β) (state : ConnectionState β)
: Async (ConnectionState β × Bool) := do
match event with
| .bytes (some bs) =>
let mut st := state
-- After the first byte after idle we switch from keep-alive timeout to per-request header timeout.
if st.keepAliveTimeout.isSome then
st := { st with
keepAliveTimeout := none
headerTimeout := some <| ( Timestamp.now) + config.headerTimeout
}
return ({ st with machine := st.machine.feed bs }, false)
| .bytes none =>
return ({ state with machine := state.machine.noMoreInput }, false)
| .responseBody (some chunk) =>
return ({ state with machine := state.machine.sendData #[chunk] }, false)
| .responseBody none =>
if let some res := state.respStream then
if ¬( Body.Reader.isClosed res) then Body.Reader.close res
return ({ state with machine := state.machine.userClosedBody, respStream := none }, false)
| .bodyInterest interested =>
if interested then
let (newMachine, pulledChunk) := state.machine.pullBody
let mut st := { state with machine := newMachine }
if let some pulled := pulledChunk then
try Body.Writer.send st.requestOutgoing pulled.chunk pulled.incomplete
catch e => Handler.onFailure handler e
if pulled.final then
if ¬( Body.Writer.isClosed st.requestOutgoing) then
Body.Writer.close st.requestOutgoing
return (st, false)
else
return (state, false)
| .close => return (state, true)
| .timeout =>
Handler.onFailure handler "request header timeout"
return ({ state with machine := state.machine.closeWithError .requestTimeout, handlerDispatched := false }, false)
| .shutdown =>
return ({ state with machine := state.machine.closeWithError .serviceUnavailable, handlerDispatched := false }, false)
| .response (.error err) =>
Handler.onFailure handler err
return ({ state with machine := state.machine.closeWithError .internalServerError, handlerDispatched := false }, false)
| .response (.ok res) =>
if state.machine.failed then
if ¬( Body.Reader.isClosed res.body) then Body.Reader.close res.body
return ({ state with handlerDispatched := false }, false)
else
let (newMachine, newRespStream) applyResponse config state.machine res
return ({ state with machine := newMachine, handlerDispatched := false, respStream := newRespStream }, false)
/--
Computes the active `PollSources` for the current connection state.
Determines which IO sources need attention and whether to include the socket.
-/
private def buildPollSources
{α β : Type} [Transport α]
(socket : α) (connectionContext : CancellationContext) (state : ConnectionState β)
: Async (PollSources α β) := do
let requestBodyOpen
if state.machine.canPullBody then pure !( Body.Writer.isClosed state.requestOutgoing)
else pure false
let requestBodyInterested
if state.machine.canPullBody requestBodyOpen then Body.Writer.hasInterest state.requestOutgoing
else pure false
let requestBody
if state.machine.canPullBodyNow requestBodyOpen then pure (some state.requestOutgoing)
else pure none
-- Include the socket only when there is more to do than waiting for the handler alone.
let pollSocket :=
state.requiresData !state.handlerDispatched state.respStream.isSome
state.machine.writer.sentMessage (state.machine.canPullBody requestBodyInterested)
return {
socket := if pollSocket then some socket else none
expect := state.expectData
response := if state.handlerDispatched then some state.response else none
responseBody := state.respStream
requestBody := requestBody
timeout := state.currentTimeout
keepAliveTimeout := state.keepAliveTimeout
headerTimeout := state.headerTimeout
connectionContext := connectionContext
}
/--
Runs the main request/response processing loop for a single connection.
Drives the HTTP/1.1 state machine through four phases each iteration:
send buffered output, process H1 events, dispatch pending requests, poll for I/O.
-/
private def handle
{σ : Type} [Transport α] [h : Handler σ]
(connection : Connection α)
(config : Config)
(connectionContext : CancellationContext)
(handler : σ) : Async Unit := do
let _ : Body.Reader (Handler.ResponseBody σ) := Handler.responseBodyReader
let _ : Body.Writer (Handler.ResponseBody σ) := Handler.responseBodyWriter
let socket := connection.socket
let (initOut, initIn) Body.mkChannel
let mut state : ConnectionState (Handler.ResponseBody σ) := {
machine := connection.machine
requestOutgoing := initOut
requestIncoming := initIn
keepAliveTimeout := some config.keepAliveTimeout.val
currentTimeout := config.keepAliveTimeout.val
headerTimeout := none
response := Std.Channel.new
respStream := none
requiresData := false
expectData := none
handlerDispatched := false
pendingHead := none
}
while ¬state.machine.halted do
-- Phase 1: advance the state machine and flush any output.
let (newMachine, step) := state.machine.step
state := { state with machine := newMachine }
if step.output.size > 0 then
try Transport.sendAll socket step.output.data
catch e =>
Handler.onFailure handler e
break
-- Phase 2: process all events emitted by this step.
state processH1Events handler config connectionContext step.events state
-- Phase 3: dispatch any newly parsed request to the handler.
state dispatchPendingRequest handler connection.extensions connectionContext state
-- Phase 4: wait for the next IO event when any source needs attention.
if state.requiresData state.handlerDispatched state.respStream.isSome state.machine.canPullBody then
state := { state with requiresData := false }
let sources buildPollSources socket connectionContext state
let event pollNextEvent config handler sources
let (newState, shouldClose) handleRecvEvent handler config event state
state := newState
if shouldClose then break
-- Clean up: close all open streams and the socket.
if ¬( Body.Writer.isClosed state.requestOutgoing) then
Body.Writer.close state.requestOutgoing
if let some res := state.respStream then
if ¬( Body.Reader.isClosed res) then Body.Reader.close res
Transport.close socket
end Connection
/--
Handles request/response processing for a single connection using an `Async` handler.
The library-level entry point for running a server is `Server.serve`.
This function can be used with a `TCP.Socket` or any other type that implements
`Transport` to build custom server loops.
# Example
```lean
-- Create a TCP socket server instance
let server ← Socket.Server.mk
server.bind addr
server.listen backlog
-- Enter an infinite loop to handle incoming client connections
while true do
let client ← server.accept
background (serveConnection client handler config)
```
-/
def serveConnection
{σ : Type} [Transport t] [Handler σ]
(client : t) (handler : σ)
(config : Config) (extensions : Extensions := .empty) : ContextAsync Unit := do
(Connection.mk client { config := config.toH1Config } extensions)
|>.handle config ( ContextAsync.getContext) handler
end Std.Http.Server

View File

@@ -0,0 +1,65 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Http.Data
public import Std.Internal.Async.ContextAsync
public section
namespace Std.Http.Server
open Std.Internal.IO.Async
set_option linter.all true
/--
A type class for handling HTTP server requests. Implement this class to define how the server
responds to incoming requests, failures, and `Expect: 100-continue` headers.
-/
class Handler (σ : Type) where
/--
Concrete body type produced by `onRequest`.
Defaults to `Body.AnyBody`, but handlers may override it with any reader/writer-compatible body.
-/
ResponseBody : Type := Body.AnyBody
/--
Reader instance required by the connection loop for sending response chunks.
-/
[responseBodyReader : Body.Reader ResponseBody]
/--
Writer instance used for known-size metadata and protocol integration.
-/
[responseBodyWriter : Body.Writer ResponseBody]
/--
Called for each incoming HTTP request.
-/
onRequest (self : σ) (request : Request Body.Incoming) : ContextAsync (Response ResponseBody)
/--
Called when an I/O or transport error occurs while processing a request (e.g. broken socket,
handler exception). This is a **notification only**: the connection will close regardless of
the handler's response. Use this for logging and metrics. The default implementation does nothing.
-/
onFailure (self : σ) (error : IO.Error) : Async Unit :=
pure ()
/--
Called when a request includes an `Expect: 100-continue` header. Return `true` to send a
`100 Continue` response and accept the body. If `false` is returned the server sends
`417 Expectation Failed`, disables keep-alive, and closes the request body reader.
This function is guarded by `Config.lingeringTimeout` and may be cancelled on server shutdown.
The default implementation always returns `true`.
-/
onContinue (self : σ) (request : Request.Head) : Async Bool :=
pure true
end Std.Http.Server

View File

@@ -0,0 +1,249 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Transport
This module exposes a `Transport` type class that is used to represent different transport mechanisms
that can be used with an HTTP connection.
-/
namespace Std.Http
open Std Internal IO Async TCP
set_option linter.all true
/--
Generic HTTP interface that abstracts over different transport mechanisms.
-/
class Transport (α : Type) where
/--
Receive data from the client connection, up to the expected size.
Returns None if the connection is closed or no data is available.
-/
recv : α UInt64 Async (Option ByteArray)
/--
Send all data through the client connection.
-/
sendAll : α Array ByteArray Async Unit
/--
Get a selector for receiving data asynchronously.
-/
recvSelector : α UInt64 Selector (Option ByteArray)
/--
Close the transport connection.
The default implementation is a no-op; override this for transports that require explicit teardown.
For `Socket.Client`, the runtime closes the file descriptor when the object is finalized.
-/
close : α IO Unit := fun _ => pure ()
instance : Transport Socket.Client where
recv client expect := client.recv? expect
sendAll client data := client.sendAll data
recvSelector client expect := client.recvSelector expect
open Internal.IO.Async in
/--
Shared state for a bidirectional mock connection.
-/
private structure MockLink.SharedState where
/--
Client to server direction.
-/
clientToServer : Std.CloseableChannel ByteArray
/--
Server to client direction.
-/
serverToClient : Std.CloseableChannel ByteArray
/--
Mock client endpoint for testing.
-/
structure Mock.Client where
private shared : MockLink.SharedState
/--
Mock server endpoint for testing.
-/
structure Mock.Server where
private shared : MockLink.SharedState
namespace Mock
/--
Creates a mock server and client that are connected to each other and share the
same underlying state, enabling bidirectional communication.
-/
def new : BaseIO (Mock.Client × Mock.Server) := do
let first Std.CloseableChannel.new
let second Std.CloseableChannel.new
return (first, second, first, second)
/--
Receives data from a channel, joining all available data up to the expected size. First does a
blocking recv, then greedily consumes available data with tryRecv until `expect` bytes are reached.
-/
def recvJoined (recvChan : Std.CloseableChannel ByteArray) (expect : Option UInt64) : Async (Option ByteArray) := do
match await ( recvChan.recv) with
| none => return none
| some first =>
let mut result := first
repeat
if let some expect := expect then
if result.size.toUInt64 expect then break
match recvChan.tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Sends a single ByteArray through a channel.
-/
def send (sendChan : Std.CloseableChannel ByteArray) (data : ByteArray) : Async Unit := do
Async.ofAsyncTask (( sendChan.send data) |>.map (Except.mapError (IO.userError toString)))
/--
Sends ByteArrays through a channel.
-/
def sendAll (sendChan : Std.CloseableChannel ByteArray) (data : Array ByteArray) : Async Unit := do
for chunk in data do
send sendChan chunk
/--
Creates a selector for receiving from a channel.
-/
def recvSelector (recvChan : Std.CloseableChannel ByteArray) : Selector (Option ByteArray) :=
recvChan.recvSelector
end Mock
namespace Mock.Client
/--
Gets the receive channel for a client (server to client direction).
-/
def getRecvChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
client.shared.serverToClient
/--
Gets the send channel for a client (client to server direction).
-/
def getSendChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
client.shared.clientToServer
/--
Sends a single ByteArray.
-/
def send (client : Mock.Client) (data : ByteArray) : Async Unit :=
Mock.send (getSendChan client) data
/--
Receives data, joining all available chunks.
-/
def recv? (client : Mock.Client) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
Mock.recvJoined (getRecvChan client) expect
/--
Tries to receive data without blocking, joining all immediately available chunks.
Returns `none` if no data is available.
-/
def tryRecv? (client : Mock.Client) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
match (getRecvChan client).tryRecv with
| none => return none
| some first =>
let mut result := first
repeat
match (getRecvChan client).tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Closes the mock server and client.
-/
def close (client : Mock.Client) : IO Unit := do
if !( client.shared.clientToServer.isClosed) then client.shared.clientToServer.close
if !( client.shared.serverToClient.isClosed) then client.shared.serverToClient.close
end Mock.Client
namespace Mock.Server
/--
Gets the receive channel for a server (client to server direction).
-/
def getRecvChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
server.shared.clientToServer
/--
Gets the send channel for a server (server to client direction).
-/
def getSendChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
server.shared.serverToClient
/--
Sends a single ByteArray.
-/
def send (server : Mock.Server) (data : ByteArray) : Async Unit :=
Mock.send (getSendChan server) data
/--
Receives data, joining all available chunks.
-/
def recv? (server : Mock.Server) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
Mock.recvJoined (getRecvChan server) expect
/--
Tries to receive data without blocking, joining all immediately available chunks. Returns `none` if no
data is available.
-/
def tryRecv? (server : Mock.Server) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
match (getRecvChan server).tryRecv with
| none => return none
| some first =>
let mut result := first
repeat
match (getRecvChan server).tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Closes the mock server and client.
-/
def close (server : Mock.Server) : IO Unit := do
if !( server.shared.clientToServer.isClosed) then server.shared.clientToServer.close
if !( server.shared.serverToClient.isClosed) then server.shared.serverToClient.close
end Mock.Server
instance : Transport Mock.Client where
recv client expect := Mock.recvJoined (Mock.Client.getRecvChan client) (some expect)
sendAll client data := Mock.sendAll (Mock.Client.getSendChan client) data
recvSelector client _ := Mock.recvSelector (Mock.Client.getRecvChan client)
close client := client.close
instance : Transport Mock.Server where
recv server expect := Mock.recvJoined (Mock.Server.getRecvChan server) (some expect)
sendAll server data := Mock.sendAll (Mock.Server.getSendChan server) data
recvSelector server _ := Mock.recvSelector (Mock.Server.getRecvChan server)
close server := server.close
end Std.Http

View File

@@ -44,8 +44,15 @@ protected def Parser.run (p : Parser α) (arr : ByteArray) : Except String α :=
Parse a single byte equal to `b`, fails if different.
-/
@[inline]
def pbyte (b : UInt8) : Parser UInt8 := attempt do
if ( any) = b then pure b else fail s!"expected: '{b}'"
def pbyte (b : UInt8) : Parser UInt8 := fun it =>
if h : it.hasNext then
let got := it.curr' h
if got = b then
.success (it.next' h) got
else
.error it (.other s!"expected: '{b}'")
else
.error it .eof
/--
Skip a single byte equal to `b`, fails if different.
@@ -57,16 +64,29 @@ def skipByte (b : UInt8) : Parser Unit :=
/--
Skip a sequence of bytes equal to the given `ByteArray`.
-/
def skipBytes (arr : ByteArray) : Parser Unit := do
for b in arr do
skipByte b
def skipBytes (arr : ByteArray) : Parser Unit := fun it =>
let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator :=
if h : idx < arr.size then
if hnext : it.hasNext then
let got := it.curr' hnext
let want := arr[idx]
if got = want then
go (idx + 1) (it.next' hnext)
else
.error it (.other s!"expected byte {want}, got {got}")
else
.error it .eof
else
.success it ()
go 0 it
/--
Parse a string by matching its UTF-8 bytes, returns the string on success.
-/
@[inline]
def pstring (s : String) : Parser String := do
skipBytes s.toUTF8
let utf8 := s.toUTF8
skipBytes utf8
return s
/--
@@ -193,19 +213,47 @@ def take (n : Nat) : Parser ByteSlice := fun it =>
else
.success (it.forward n) (it.array[it.idx...(it.idx+n)])
/--
Scans while `pred` is satisfied. Returns `(count, iterator, hitEof)`.
-/
private partial def scanWhile (pred : UInt8 Bool) (count : Nat) (iter : ByteArray.Iterator) :
Nat × ByteArray.Iterator × Bool :=
if h : iter.hasNext then
if pred (iter.curr' h) then
scanWhile pred (count + 1) (iter.next' h)
else
(count, iter, false)
else
(count, iter, true)
/--
Scans while `pred` is satisfied, bounded by `limit`.
Returns `(count, iterator, hitEof)`.
-/
private partial def scanWhileUpTo (pred : UInt8 Bool) (limit : Nat) (count : Nat)
(iter : ByteArray.Iterator) : Nat × ByteArray.Iterator × Bool :=
if count limit then
(count, iter, false)
else if h : iter.hasNext then
if pred (iter.curr' h) then
scanWhileUpTo pred limit (count + 1) (iter.next' h)
else
(count, iter, false)
else
(count, iter, true)
/--
Parses while a predicate is satisfied.
Fails with `.eof` if input ends while the predicate still holds.
-/
@[inline]
partial def takeWhile (pred : UInt8 Bool) : Parser ByteSlice :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
let (length, newIt) := findEnd 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
let (length, newIt, hitEof) := scanWhile pred 0 it
if hitEof then
.error newIt .eof
else
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses until a predicate is satisfied (exclusive).
@@ -216,16 +264,16 @@ def takeUntil (pred : UInt8 → Bool) : Parser ByteSlice :=
/--
Skips while a predicate is satisfied.
Fails with `.eof` if input ends while the predicate still holds.
-/
@[inline]
partial def skipWhile (pred : UInt8 Bool) : Parser Unit :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
if ¬iter.hasNext then iter
else if pred iter.curr then findEnd (count + 1) iter.next
else iter
.success (findEnd 0 it) ()
let (_, newIt, hitEof) := scanWhile pred 0 it
if hitEof then
.error newIt .eof
else
.success newIt ()
/--
Skips until a predicate is satisfied.
@@ -236,34 +284,31 @@ def skipUntil (pred : UInt8 → Bool) : Parser Unit :=
/--
Parses while a predicate is satisfied, up to a given limit.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
partial def takeWhileUpTo (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if count limit then (count, iter)
else if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
let (length, newIt) := findEnd 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
if hitEof then
.error newIt .eof
else
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses while a predicate is satisfied, up to a given limit, requiring at least one byte.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
def takeWhileUpTo1 (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : Nat × ByteArray.Iterator :=
if count limit then (count, iter)
else if ¬iter.hasNext then (count, iter)
else if pred iter.curr then findEnd (count + 1) iter.next
else (count, iter)
let (length, newIt, hitEof) := scanWhileUpTo pred limit 0 it
let (length, newIt) := findEnd 0 it
if length = 0 then
.error it (if newIt.atEnd then .eof else .other "expected at least one char")
if hitEof then
.error newIt .eof
else if length = 0 then
.error it (.other "expected at least one char")
else
.success newIt (it.array[it.idx...(it.idx + length)])
@@ -274,19 +319,42 @@ Parses until a predicate is satisfied (exclusive), up to a given limit.
def takeUntilUpTo (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
takeWhileUpTo (fun b => ¬pred b) limit
/--
Parses while a predicate is satisfied, consuming at most `limit` bytes.
Unlike `takeWhileUpTo`, succeeds even if input ends before the predicate stops holding.
-/
@[inline]
def takeWhileAtMost (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Parses while a predicate is satisfied, consuming at most `limit` bytes, requiring at least one.
Unlike `takeWhileUpTo1`, succeeds even if input ends before the predicate stops holding.
-/
@[inline]
def takeWhile1AtMost (pred : UInt8 Bool) (limit : Nat) : Parser ByteSlice :=
fun it =>
let (length, newIt, _) := scanWhileUpTo pred limit 0 it
if length = 0 then
.error it (.other "expected at least one char")
else
.success newIt (it.array[it.idx...(it.idx + length)])
/--
Skips while a predicate is satisfied, up to a given limit.
Fails with `.eof` if input ends before stopping or reaching the limit.
-/
@[inline]
partial def skipWhileUpTo (pred : UInt8 Bool) (limit : Nat) : Parser Unit :=
fun it =>
let rec findEnd (count : Nat) (iter : ByteArray.Iterator) : ByteArray.Iterator :=
if count limit then iter
else if ¬iter.hasNext then iter
else if pred iter.curr then findEnd (count + 1) iter.next
else iter
let (_, newIt, hitEof) := scanWhileUpTo pred limit 0 it
.success (findEnd 0 it) ()
if hitEof then
.error newIt .eof
else
.success newIt ()
/--
Skips until a predicate is satisfied, up to a given limit.

View File

@@ -11,11 +11,13 @@ public import Std.Sync.Channel
public import Std.Sync.Mutex
public import Std.Sync.RecursiveMutex
public import Std.Sync.Barrier
public import Std.Sync.Semaphore
public import Std.Sync.SharedMutex
public import Std.Sync.Notify
public import Std.Sync.Broadcast
public import Std.Sync.StreamMap
public import Std.Sync.CancellationToken
public import Std.Sync.CancellationContext
public import Std.Sync.Watch
@[expose] public section

View File

@@ -0,0 +1,96 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Lean FRO Contributors
-/
module
prelude
public import Init.Data.Queue
public import Init.System.Promise
public import Std.Sync.Mutex
public section
namespace Std
private structure SemaphoreState where
permits : Nat
waiters : Std.Queue (IO.Promise Unit) :=
deriving Nonempty
/--
Counting semaphore.
`Semaphore.acquire` returns a promise that is resolved once a permit is available.
If a permit is currently available, the returned promise is already resolved.
`Semaphore.release` either resolves one waiting promise or increments the available permits.
-/
structure Semaphore where private mk ::
private lock : Mutex SemaphoreState
/--
Creates a resolved promise.
-/
private def mkResolvedPromise [Nonempty α] (a : α) : BaseIO (IO.Promise α) := do
let promise IO.Promise.new
promise.resolve a
return promise
/--
Creates a new semaphore with `permits` initially available permits.
-/
def Semaphore.new (permits : Nat) : BaseIO Semaphore := do
return { lock := Mutex.new { permits } }
/--
Requests one permit.
Returns a promise that resolves once the permit is acquired.
-/
def Semaphore.acquire (sem : Semaphore) : BaseIO (IO.Promise Unit) := do
sem.lock.atomically do
let st get
if st.permits > 0 then
set { st with permits := st.permits - 1 }
mkResolvedPromise ()
else
let promise IO.Promise.new
set { st with waiters := st.waiters.enqueue promise }
return promise
/--
Tries to acquire a permit without blocking. Returns `true` on success.
-/
def Semaphore.tryAcquire (sem : Semaphore) : BaseIO Bool := do
sem.lock.atomically do
let st get
if st.permits > 0 then
set { st with permits := st.permits - 1 }
return true
else
return false
/--
Releases one permit and resolves one waiting acquirer, if any.
-/
def Semaphore.release (sem : Semaphore) : BaseIO Unit := do
let waiter? sem.lock.atomically do
let st get
match st.waiters.dequeue? with
| some (waiter, waiters) =>
set { st with waiters }
return some waiter
| none =>
set { st with permits := st.permits + 1 }
return none
if let some waiter := waiter? then
waiter.resolve ()
/--
Returns the number of currently available permits.
-/
def Semaphore.availablePermits (sem : Semaphore) : BaseIO Nat :=
sem.lock.atomically do
return ( get).permits
end Std

319
src/Std/Sync/Watch.lean Normal file
View File

@@ -0,0 +1,319 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Queue
public import Std.Sync.Mutex
public import Std.Internal.Async.Select
public section
open Std.Internal.IO.Async
/-!
This module contains the implementation of `Std.Watch`. `Std.Watch` is a single-value watch
channel, inspired by [tokio's watch implementation](https://github.com/tokio-rs/tokio/blob/master/tokio/src/sync/watch.rs).
A watch channel holds a single value that can be updated by the sender. Multiple receivers
can independently observe the current value and wait for it to change.
Unlike `Std.Channel`, a watch channel:
- Retains only the latest value, not a queue of messages.
- Allows receivers to read the current value without consuming it.
- Notifies all receivers when the value changes (broadcast semantics on change).
- Returns an error on `changed` if the sender has been dropped.
-/
namespace Std
/--
Errors that may be thrown while interacting with the watch channel API.
-/
inductive Watch.Error where
/--
The sender was dropped, so no new values will ever be sent.
-/
| closed
deriving Repr, DecidableEq, Hashable
instance : ToString Watch.Error where
toString
| .closed => "watch channel sender was dropped"
instance : MonadLift (EIO Watch.Error) IO where
monadLift x := EIO.toIO (.userError <| toString ·) x
private inductive Watch.Waiter (α : Type) where
| normal (promise : IO.Promise α)
| select (waiter : Internal.IO.Async.Waiter α)
private def Watch.Waiter.resolve (c : Watch.Waiter α) (x : α) : BaseIO Bool := do
match c with
| .normal promise =>
promise.resolve x
return true
| .select waiter =>
waiter.race (return false) fun promise => do
promise.resolve (.ok x)
return true
/--
The shared state of a watch channel.
-/
private structure Watch.State (α : Type) where
/--
The current value held by the watch channel.
-/
value : α
/--
Monotonically increasing version. Incremented on every `send`.
-/
version : Nat
/--
Whether the sender has been dropped (closed).
-/
closed : Bool
/--
Receivers waiting for the value to change.
-/
waiters : Std.Queue (Watch.Waiter (Except Watch.Error Unit))
deriving Nonempty
/--
A watch channel sender. Holds a reference to the shared state and can update the value.
-/
structure Watch (α : Type) where
private mk ::
private state : Mutex (Watch.State α)
deriving Nonempty
/--
A watch channel receiver. Each receiver independently tracks the version it last observed.
-/
structure Watch.Receiver (α : Type) where
private mk ::
private state : Mutex (Watch.State α)
private lastSeen : IO.Ref Nat
deriving Nonempty
namespace Watch
/--
Creates a new watch channel with an initial value. Returns the sender and a receiver.
-/
def new (initial : α) : BaseIO (Watch α × Watch.Receiver α) := do
let state Mutex.new {
value := initial
version := 0
closed := false
waiters :=
}
let lastSeen IO.mkRef 0
return (state, state, lastSeen)
/--
Sends a new value, updating the watched value and notifying all waiting receivers.
-/
def send (w : Watch α) (v : α) : BaseIO Unit := do
w.state.atomically do
let st get
let newVersion := st.version + 1
set { st with value := v, version := newVersion, waiters := }
for waiter in st.waiters.toArray do
discard <| waiter.resolve (.ok ())
/--
Closes the watch channel, signaling to receivers that no more values will be sent.
Waiting receivers will be woken up and their `changed` call will return `Watch.Error.closed`.
-/
def close (w : Watch α) : BaseIO Unit := do
w.state.atomically do
let st get
set { st with closed := true, waiters := }
for waiter in st.waiters.toArray do
discard <| waiter.resolve (.error .closed)
/--
Returns `true` if the sender has been closed.
-/
def isClosed (w : Watch α) : BaseIO Bool :=
w.state.atomically do
return ( get).closed
/--
Returns the current value held by the watch channel, as seen from the sender side.
-/
def current (w : Watch α) : BaseIO α :=
w.state.atomically do
return ( MonadState.get).value
namespace Receiver
/--
Borrow the current value without marking it as seen.
-/
def borrow (r : Watch.Receiver α) : BaseIO α :=
r.state.atomically do
return ( get).value
/--
Borrow the current value and mark the current version as seen, so that
the next `changed` call will only wake on a strictly newer value.
-/
def borrowAndUpdate (r : Watch.Receiver α) : BaseIO α := do
r.state.atomically do
let st get
r.lastSeen.set st.version
return st.value
/--
Returns `true` if the watched value has changed since this receiver last called
`borrowAndUpdate` or `changed`.
-/
def hasChanged (r : Watch.Receiver α) : BaseIO Bool := do
r.state.atomically do
let st get
let seen r.lastSeen.get
return st.version > seen
/--
Wait until the watched value changes relative to the version last seen by this receiver.
Returns `ok ()` on success or `error Watch.Error.closed` if the sender was dropped.
After a successful return the new value can be retrieved with `borrow` or `borrowAndUpdate`.
-/
partial def changed (r : Watch.Receiver α) : BaseIO (Task (Except Watch.Error Unit)) := do
r.state.atomically do
let st get
let seen r.lastSeen.get
if st.version > seen then
r.lastSeen.set st.version
return .pure <| .ok ()
else if st.closed then
return .pure <| .error .closed
else
let promise IO.Promise.new
modify fun s => { s with waiters := s.waiters.enqueue (.normal promise) }
BaseIO.bindTask promise.result? fun
| none => return .pure <| .error .closed
| some (Except.error e) => return .pure <| .error e
| some (Except.ok ()) =>
/- A notification arrived; recurse so `lastSeen` is updated atomically. -/
r.changed
/--
Creates a `Selector` that resolves when the watched value changes.
-/
def changedSelector (r : Watch.Receiver α) : Selector (Except Watch.Error Unit) where
tryFn := do
r.state.atomically do
let st get
let seen r.lastSeen.get
if st.version > seen then
r.lastSeen.set st.version
return some (.ok ())
else if st.closed then
return some (.error .closed)
else
return none
registerFn waiter := do
r.state.atomically do
let st get
let seen r.lastSeen.get
if st.version > seen || st.closed then
let result : Except Watch.Error Unit :=
if st.version > seen then .ok () else .error .closed
if st.version > seen then r.lastSeen.set st.version
waiter.race (return ()) fun promise =>
promise.resolve (.ok result)
else
modify fun s => { s with waiters := s.waiters.enqueue (.select waiter) }
unregisterFn := do
r.state.atomically do
let st get
let waiters st.waiters.filterM fun
| .normal _ => return true
| .select w => return !( w.checkFinished)
set { st with waiters }
end Receiver
/--
A sync wrapper around `Watch.Receiver` for blocking use.
-/
@[expose] def Sync (α : Type) : Type := Watch α
/--
A sync wrapper around `Watch.Receiver` for blocking use.
-/
@[expose] def Sync.Receiver (α : Type) : Type := Watch.Receiver α
namespace Sync
/--
Creates a new watch channel with an initial value. Returns the sender and a sync receiver.
-/
@[inline]
def new (initial : α) : BaseIO (Sync α × Sync.Receiver α) :=
Watch.new initial
/--
Sends a new value, updating the watched value and notifying all waiting receivers.
-/
@[inline]
def send (w : Sync α) (v : α) : BaseIO Unit :=
Watch.send w v
/--
Closes the watch channel.
-/
@[inline]
def close (w : Sync α) : BaseIO Unit :=
Watch.close w
/--
Returns `true` if the sender has been closed.
-/
@[inline]
def isClosed (w : Sync α) : BaseIO Bool :=
Watch.isClosed w
namespace Receiver
/--
Borrow the current value without marking it as seen.
-/
@[inline]
def borrow (r : Sync.Receiver α) : BaseIO α :=
Watch.Receiver.borrow r
/--
Borrow the current value and mark it as seen.
-/
@[inline]
def borrowAndUpdate (r : Sync.Receiver α) : BaseIO α :=
Watch.Receiver.borrowAndUpdate r
/--
Returns `true` if the watched value has changed since last seen.
-/
@[inline]
def hasChanged (r : Sync.Receiver α) : BaseIO Bool :=
Watch.Receiver.hasChanged r
/--
Block until the watched value changes. Returns `ok ()` or `error Watch.Error.closed`.
-/
def changed (r : Sync.Receiver α) : BaseIO (Except Watch.Error Unit) := do
IO.wait ( Watch.Receiver.changed r)
end Receiver
end Sync
end Watch
end Std

View File

@@ -0,0 +1,488 @@
import Std.Internal.Http.Data.Body
open Std.Internal.IO Async
open Std.Http
open Std.Http.Body
/-! ## Channel tests -/
-- Test send and recv on channel
def channelSendRecv : Async Unit := do
let (outgoing, incoming) Body.mkChannel
let chunk := Chunk.ofByteArray "hello".toUTF8
let sendTask async (t := AsyncTask) <| outgoing.send chunk
let result incoming.recv
assert! result.isSome
assert! result.get!.data == "hello".toUTF8
await sendTask
#eval channelSendRecv.block
-- Test tryRecv on empty channel returns none
def channelTryRecvEmpty : Async Unit := do
let (_outgoing, incoming) Body.mkChannel
let result incoming.tryRecv
assert! result.isNone
#eval channelTryRecvEmpty.block
-- Test tryRecv consumes a waiting producer
def channelTryRecvWithPendingSend : Async Unit := do
let (outgoing, incoming) Body.mkChannel
let sendTask async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "data".toUTF8)
let mut result := none
let mut fuel := 100
while result.isNone && fuel > 0 do
result incoming.tryRecv
if result.isNone then
let _ Selectable.one #[
.case ( Selector.sleep 1) pure
]
fuel := fuel - 1
assert! result.isSome
assert! result.get!.data == "data".toUTF8
await sendTask
#eval channelTryRecvWithPendingSend.block
-- Test close sets closed flag
def channelClose : Async Unit := do
let (outgoing, incoming) Body.mkChannel
assert! !( outgoing.isClosed)
outgoing.close
assert! ( incoming.isClosed)
#eval channelClose.block
-- Test recv on closed channel returns none
def channelRecvAfterClose : Async Unit := do
let (outgoing, incoming) Body.mkChannel
outgoing.close
let result incoming.recv
assert! result.isNone
#eval channelRecvAfterClose.block
-- Test Body.stream runs producer concurrently and transfers chunks
def bodyStreamSends : Async Unit := do
let incoming Body.stream fun outgoing => do
outgoing.send (Chunk.ofByteArray "x".toUTF8)
let first incoming.recv
assert! first.isSome
assert! first.get!.data == "x".toUTF8
let done incoming.recv
assert! done.isNone
#eval bodyStreamSends.block
-- Test Body.stream closes channel when generator throws
def bodyStreamThrowCloses : Async Unit := do
let incoming Body.stream fun _ => do
throw (.userError "boom")
let result incoming.recv
assert! result.isNone
#eval bodyStreamThrowCloses.block
-- Test for-in iteration collects chunks until close
def channelForIn : Async Unit := do
let (outgoing, incoming) Body.mkChannel
let producer async (t := AsyncTask) <| do
outgoing.send (Chunk.ofByteArray "a".toUTF8)
outgoing.send (Chunk.ofByteArray "b".toUTF8)
outgoing.close
let mut acc : ByteArray := .empty
for chunk in incoming do
acc := acc ++ chunk.data
assert! acc == "ab".toUTF8
await producer
#eval channelForIn.block
-- Test chunk extensions are preserved
def channelExtensions : Async Unit := do
let (outgoing, incoming) Body.mkChannel
let chunk := { data := "hello".toUTF8, extensions := #[(.mk "key", some (Chunk.ExtensionValue.ofString! "value"))] : Chunk }
let sendTask async (t := AsyncTask) <| outgoing.send chunk
let result incoming.recv
assert! result.isSome
assert! result.get!.extensions.size == 1
assert! result.get!.extensions[0]! == (Chunk.ExtensionName.mk "key", some <| .ofString! "value")
await sendTask
#eval channelExtensions.block
-- Test incomplete sends are collapsed before delivery
def channelCollapseIncompleteChunks : Async Unit := do
let (outgoing, incoming) Body.mkChannel
let first : Chunk := { data := "aaaaaaaaaa".toUTF8, extensions := #[(.mk "part", some <| .ofString! "first")] }
let second : Chunk := { data := "bbbbbbbbbb".toUTF8, extensions := #[(.mk "part", some <| .ofString! "second")] }
let last : Chunk := { data := "cccccccccccccccccccc".toUTF8, extensions := #[(.mk "part", some <| .ofString! "last")] }
outgoing.send first (incomplete := true)
outgoing.send second (incomplete := true)
let noChunkYet incoming.tryRecv
assert! noChunkYet.isNone
let sendFinal async (t := AsyncTask) <| outgoing.send last
let result incoming.recv
assert! result.isSome
let merged := result.get!
assert! merged.data == "aaaaaaaaaabbbbbbbbbbcccccccccccccccccccc".toUTF8
assert! merged.data.size == 40
assert! merged.extensions == #[(.mk "part", some <| .ofString! "first")]
await sendFinal
#eval channelCollapseIncompleteChunks.block
-- Test known size metadata
def channelKnownSize : Async Unit := do
let (outgoing, incoming) Body.mkChannel
outgoing.setKnownSize (some (.fixed 100))
let size incoming.getKnownSize
assert! size == some (.fixed 100)
#eval channelKnownSize.block
-- Test known size decreases when a chunk is consumed
def channelKnownSizeDecreases : Async Unit := do
let (outgoing, incoming) Body.mkChannel
outgoing.setKnownSize (some (.fixed 5))
let sendTask async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "hello".toUTF8)
let _ incoming.recv
await sendTask
let size incoming.getKnownSize
assert! size == some (.fixed 0)
#eval channelKnownSizeDecreases.block
-- Test only one blocked producer is allowed
def channelSingleProducerRule : Async Unit := do
let (outgoing, incoming) Body.mkChannel
let send1 async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "one".toUTF8)
-- Yield so `send1` can occupy the single pending-producer slot.
let _ Selectable.one #[
.case ( Selector.sleep 5) pure
]
let send2Failed
try
outgoing.send (Chunk.ofByteArray "two".toUTF8)
pure false
catch _ =>
pure true
assert! send2Failed
let first incoming.recv
assert! first.isSome
assert! first.get!.data == "one".toUTF8
await send1
#eval channelSingleProducerRule.block
-- Test only one blocked consumer is allowed
def channelSingleConsumerRule : Async Unit := do
let (outgoing, incoming) Body.mkChannel
let recv1 async (t := AsyncTask) <| incoming.recv
let hasInterest Selectable.one #[
.case outgoing.interestSelector pure
]
assert! hasInterest
let recv2Failed
try
let _ incoming.recv
pure false
catch _ =>
pure true
assert! recv2Failed
let sendTask async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "ok".toUTF8)
let r1 await recv1
assert! r1.isSome
assert! r1.get!.data == "ok".toUTF8
await sendTask
#eval channelSingleConsumerRule.block
-- Test hasInterest reflects blocked receiver state
def channelHasInterest : Async Unit := do
let (outgoing, incoming) Body.mkChannel
assert! !( outgoing.hasInterest)
let recvTask async (t := AsyncTask) <| incoming.recv
let hasInterest Selectable.one #[
.case outgoing.interestSelector pure
]
assert! hasInterest
assert! ( outgoing.hasInterest)
let sendTask async (t := AsyncTask) <| outgoing.send (Chunk.ofByteArray "x".toUTF8)
let _ await recvTask
await sendTask
assert! !( outgoing.hasInterest)
#eval channelHasInterest.block
-- Test interestSelector resolves false when channel closes first
def channelInterestSelectorClose : Async Unit := do
let (outgoing, _incoming) Body.mkChannel
let waitInterest async (t := AsyncTask) <|
Selectable.one #[
.case outgoing.interestSelector pure
]
outgoing.close
let interested await waitInterest
assert! interested == false
#eval channelInterestSelectorClose.block
/-! ## Full tests -/
-- Test Full.recv returns content once then EOF
def fullRecvConsumesOnce : Async Unit := do
let full Body.Full.ofString "hello"
let first full.recv
let second full.recv
assert! first.isSome
assert! first.get!.data == "hello".toUTF8
assert! second.isNone
#eval fullRecvConsumesOnce.block
-- Test Full known-size metadata tracks consumption
def fullKnownSizeLifecycle : Async Unit := do
let data := ByteArray.mk #[0x01, 0x02, 0x03, 0x04]
let full Body.Full.ofByteArray data
assert! ( full.getKnownSize) == some (.fixed 4)
let chunk full.tryRecv
assert! chunk.isSome
assert! chunk.get!.data == data
assert! ( full.getKnownSize) == some (.fixed 0)
#eval fullKnownSizeLifecycle.block
-- Test Full.close discards remaining content
def fullClose : Async Unit := do
let full Body.Full.ofString "bye"
assert! !( full.isClosed)
full.close
assert! ( full.isClosed)
assert! ( full.tryRecv).isNone
#eval fullClose.block
-- Test Full interest API always reports no consumer interest
def fullInterest : Async Unit := do
let full Body.Full.ofString "x"
assert! !( full.hasInterest)
let interested Selectable.one #[
.case full.interestSelector pure
]
assert! interested == false
#eval fullInterest.block
/-! ## Empty tests -/
-- Test Empty writer metadata and interest behavior
def emptyWriterBasics : Async Unit := do
let body : Body.Empty := {}
assert! ( Writer.getKnownSize body) == some (.fixed 0)
assert! ( Writer.isClosed body)
assert! !( Writer.hasInterest body)
Writer.setKnownSize body (some (.fixed 99))
assert! ( Writer.getKnownSize body) == some (.fixed 0)
Writer.close body
let interested Selectable.one #[
.case (Writer.interestSelector body) pure
]
assert! interested == false
#eval emptyWriterBasics.block
-- Test Empty writer rejects send
def emptyWriterSendFails : Async Unit := do
let body : Body.Empty := {}
let failed
try
Writer.send body (Chunk.ofByteArray "x".toUTF8) false
pure false
catch _ =>
pure true
assert! failed
#eval emptyWriterSendFails.block
/-! ## Request.Builder body tests -/
private def recvBuiltBody (body : Body.Full) : Async (Option Chunk) :=
body.recv
private def emptyBodyKnownSize (body : Body.Empty) : Async (Option Body.Length) :=
Writer.getKnownSize body
-- Test Request.Builder.text sets correct headers
def requestBuilderText : Async Unit := do
let req Request.post (.originForm! "/api")
|>.text "Hello, World!"
assert! req.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8")
assert! req.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody req.body
assert! body.isSome
assert! body.get!.data == "Hello, World!".toUTF8
#eval requestBuilderText.block
-- Test Request.Builder.json sets correct headers
def requestBuilderJson : Async Unit := do
let req Request.post (.originForm! "/api")
|>.json "{\"key\": \"value\"}"
assert! req.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json")
assert! req.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody req.body
assert! body.isSome
assert! body.get!.data == "{\"key\": \"value\"}".toUTF8
#eval requestBuilderJson.block
-- Test Request.Builder.fromBytes sets body
def requestBuilderFromBytes : Async Unit := do
let data := ByteArray.mk #[0x01, 0x02, 0x03]
let req Request.post (.originForm! "/api")
|>.fromBytes data
assert! req.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody req.body
assert! body.isSome
assert! body.get!.data == data
#eval requestBuilderFromBytes.block
-- Test Request.Builder.blank creates empty body
def requestBuilderNoBody : Async Unit := do
let req Request.get (.originForm! "/api")
|>.blank
assert! ( emptyBodyKnownSize req.body) == some (.fixed 0)
#eval requestBuilderNoBody.block
/-! ## Response.Builder body tests -/
-- Test Response.Builder.text sets correct headers
def responseBuilderText : Async Unit := do
let res Response.ok
|>.text "Hello, World!"
assert! res.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8")
assert! res.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody res.body
assert! body.isSome
assert! body.get!.data == "Hello, World!".toUTF8
#eval responseBuilderText.block
-- Test Response.Builder.json sets correct headers
def responseBuilderJson : Async Unit := do
let res Response.ok
|>.json "{\"status\": \"ok\"}"
assert! res.line.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json")
assert! res.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody res.body
assert! body.isSome
assert! body.get!.data == "{\"status\": \"ok\"}".toUTF8
#eval responseBuilderJson.block
-- Test Response.Builder.fromBytes sets body
def responseBuilderFromBytes : Async Unit := do
let data := ByteArray.mk #[0xaa, 0xbb]
let res Response.ok
|>.fromBytes data
assert! res.line.headers.get? Header.Name.contentLength == none
let body recvBuiltBody res.body
assert! body.isSome
assert! body.get!.data == data
#eval responseBuilderFromBytes.block
-- Test Response.Builder.blank creates empty body
def responseBuilderNoBody : Async Unit := do
let res Response.ok
|>.blank
assert! ( emptyBodyKnownSize res.body) == some (.fixed 0)
#eval responseBuilderNoBody.block

View File

@@ -0,0 +1,335 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def sendRawAndClose
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
client.getSendChan.close
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def responseText (response : ByteArray) : String :=
String.fromUTF8! response
def assertStatusPrefix (name : String) (response : ByteArray) (prefix_ : String) : IO Unit := do
let text := responseText response
unless text.startsWith prefix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected status prefix {prefix_.quote}\nGot:\n{text.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := responseText response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nMissing {needle.quote}\nGot:\n{text.quote}"
def assertNotContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := responseText response
if text.contains needle then
throw <| IO.userError s!"Test '{name}' failed:\nUnexpected {needle.quote}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := responseText response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then
0
else
(s.splitOn needle).length - 1
def assertStatusCount (name : String) (response : ByteArray) (expected : Nat) : IO Unit := do
let text := responseText response
let got := countOccurrences text "HTTP/1.1 "
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} responses but saw {got}\n{text.quote}"
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def notImplemented : String :=
"HTTP/1.1 501 Not Implemented\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def echoBodyHandler : TestHandler := fun req => do
let body : String req.body.readAll
Response.ok |>.text body
-- Content-Length body is read exactly.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /echo HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "CL body accepted" response "HTTP/1.1 200"
assertContains "CL body echoed" response "hello"
-- Chunked body baseline.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Chunked baseline" response "HTTP/1.1 200"
assertContains "Chunked baseline body" response "hello"
-- Uppercase and leading-zero chunk-size are accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n000A\x0d\n0123456789\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Chunk-size uppercase+leading-zero" response "HTTP/1.1 200"
assertContains "Chunk-size uppercase+leading-zero body" response "0123456789"
-- Chunk extensions with token and quoted value are accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=value;quoted=\"ok\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Chunk extensions accepted" response "HTTP/1.1 200"
assertContains "Chunk extensions body" response "hello"
-- h11-inspired: invalid chunk-size token is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /bad HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nG\x0d\nabc\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Invalid chunk-size token" response bad400
-- h11-inspired: reject bad bytes where chunk terminator must be CRLF.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /smuggle HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nxxx__1a\x0d\n".toUTF8
let response sendRawAndClose client server raw echoBodyHandler
assertExact "Chunk terminator bytes validated" response bad400
-- Missing terminal 0-chunk is rejected once EOF arrives.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /incomplete HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n".toUTF8
let response sendRawAndClose client server raw echoBodyHandler
assertExact "Missing terminal zero chunk" response bad400
-- TE+CL mixed framing is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /mix HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "TE+CL rejected" response bad400
-- Duplicate chunked coding is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /dup HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Duplicate chunked coding" response bad400
-- Duplicate Transfer-Encoding lines with unsupported coding are rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /dup-lines HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nTransfer-Encoding: gzip\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Duplicate TE headers with gzip" response bad400
-- Transfer-coding chains that end in chunked are accepted as chunked framing.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /gzip HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: gzip, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "gzip, chunked accepted as chunked framing" response "HTTP/1.1 200"
assertContains "gzip, chunked body delivered" response "hello"
-- Unsupported transfer codings without chunked framing are rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /identity HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: identity\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "identity transfer-coding rejected with 400" response bad400
-- Malformed Transfer-Encoding token list is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /bad-te-list HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: ,chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Malformed Transfer-Encoding token list" response bad400
-- Strict chunk-extension name/value limits.
#eval show IO _ from do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxChunkExtNameLength := 4
maxChunkExtValueLength := 4
}
let (clientA, serverA) Mock.new
let okName := "POST /ok-name HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;name=1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA okName echoBodyHandler (config := config)
assertStatusPrefix "Chunk ext name at limit" responseA "HTTP/1.1 200"
let (clientB, serverB) Mock.new
let badName := "POST /bad-name HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;toolong=1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB badName echoBodyHandler (config := config)
assertExact "Chunk ext name too long" responseB bad400
let (clientC, serverC) Mock.new
let okValue := "POST /ok-value HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;name=abcd\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC okValue echoBodyHandler (config := config)
assertStatusPrefix "Chunk ext value at limit" responseC "HTTP/1.1 200"
let (clientD, serverD) Mock.new
let badQuoted := "POST /bad-value HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;name=\"abcde\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD badQuoted echoBodyHandler (config := config)
assertExact "Quoted chunk ext value too long" responseD bad400
let (clientE, serverE) Mock.new
let badToken := "POST /bad-token HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;na@e=1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE badToken echoBodyHandler (config := config)
assertExact "Invalid chunk ext token char" responseE bad400
let (clientF, serverF) Mock.new
let mixed := "POST /mixed-ext HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2;toolong=3\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF mixed echoBodyHandler (config := config)
assertExact "Mixed valid/invalid chunk extensions" responseF bad400
-- maxChunkExtensions boundary is enforced.
#eval show IO _ from do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxChunkExtensions := 2
}
let (clientA, serverA) Mock.new
let okRaw := "POST /ext-count HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let okResponse sendRaw clientA serverA okRaw echoBodyHandler (config := config)
assertStatusPrefix "maxChunkExtensions at limit" okResponse "HTTP/1.1 200"
let (clientB, serverB) Mock.new
let badRaw := "POST /ext-count-overflow HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2;c=3\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let badResponse sendRaw clientB serverB badRaw echoBodyHandler (config := config)
assertExact "maxChunkExtensions overflow" badResponse bad400
-- Content-Length with leading zeros is accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /leading-zeros HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 005\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Content-Length leading zeros" response "HTTP/1.1 200"
assertContains "Content-Length leading zeros body" response "hello"
-- Very large Content-Length is rejected with 413.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /too-large HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 99999999999999999999\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Huge Content-Length" response "HTTP/1.1 413"
-- Duplicate Content-Length (same and different values) are rejected.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let same := "POST /dup-cl-same HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let responseA sendRaw clientA serverA same echoBodyHandler
assertExact "Duplicate Content-Length same" responseA bad400
let (clientB, serverB) Mock.new
let diff := "POST /dup-cl-diff HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 3\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nabc".toUTF8
let responseB sendRaw clientB serverB diff echoBodyHandler
assertExact "Duplicate Content-Length different" responseB bad400
-- Chunk-size line trailing whitespace is rejected.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /bad-space HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5 \x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Chunk-size trailing space" response bad400
-- Transfer-Encoding trailing OWS is currently accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /te-ows HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked \x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStatusPrefix "Transfer-Encoding trailing OWS" response "HTTP/1.1 200"
assertContains "Transfer-Encoding trailing OWS body" response "hello"
-- h11-inspired: early invalid-byte detection before CRLF.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let responseA sendRawAndClose clientA serverA (ByteArray.mk #[0x00]) echoBodyHandler
assertExact "Early invalid NUL" responseA bad400
let (clientB, serverB) Mock.new
let responseB sendRawAndClose clientB serverB (ByteArray.mk #[0x20]) echoBodyHandler
assertExact "Early invalid SP" responseB bad400
let (clientC, serverC) Mock.new
let responseC sendRawAndClose clientC serverC (ByteArray.mk #[0x16, 0x03, 0x01, 0x00, 0xa5]) echoBodyHandler
assertExact "Early invalid TLS prefix" responseC bad400
-- h11-inspired: reject garbage after request-line version token.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1 xxxxxx\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Garbage after request-line" response bad400
-- Extra bytes beyond Content-Length become the next pipelined request.
#eval show IO _ from do
let (client, server) Mock.new
let raw :=
("POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello" ++
"GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRaw client server raw (fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text s!"{toString req.line.uri}:{String.fromUTF8! body}")
assertStatusCount "Pipelined parse after exact CL" response 2
assertContains "Pipelined first response" response "/:hello"
assertContains "Pipelined second response" response "/second:"
assertNotContains "No parse confusion" response "/second:hello"

View File

@@ -0,0 +1,199 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {status}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := String.fromUTF8! response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def echoHandler : TestHandler :=
fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
-- Chunk extension name length limits.
-- Default maxChunkExtNameLength = 256.
#eval show IO _ from do
let name256 := String.ofList (List.replicate 256 'a')
let name257 := String.ofList (List.replicate 257 'a')
-- Name at exactly the limit → accepted.
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;{name256}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler
assertStatus "Ext name at 256 → accepted" responseA "HTTP/1.1 200"
-- Name one byte over the limit → rejected.
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;{name257}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler
assertExact "Ext name at 257 → rejected" responseB bad400
-- Chunk extension value length limits (unquoted token value).
-- Default maxChunkExtValueLength = 256.
#eval show IO _ from do
let val256 := String.ofList (List.replicate 256 'v')
let val257 := String.ofList (List.replicate 257 'v')
-- Token value at exactly the limit → accepted.
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext={val256}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler
assertStatus "Ext token value at 256 → accepted" responseA "HTTP/1.1 200"
-- Token value one byte over the limit → rejected.
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext={val257}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler
assertExact "Ext token value at 257 → rejected" responseB bad400
-- Chunk extension value length limits (quoted string value).
-- The limit applies to the unquoted content length.
#eval show IO _ from do
let val256 := String.ofList (List.replicate 256 'v')
let val257 := String.ofList (List.replicate 257 'v')
-- Quoted value at exactly the limit → accepted.
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=\"{val256}\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler
assertStatus "Ext quoted value at 256 → accepted" responseA "HTTP/1.1 200"
-- Quoted value one byte over the limit → rejected.
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=\"{val257}\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler
assertExact "Ext quoted value at 257 → rejected" responseB bad400
-- Chunk extension count limits.
-- Default maxChunkExtensions = 16.
#eval show IO _ from do
-- Build extension lists: ";ext0;ext1;...;extN"
let exts16 := (List.range 16).foldl (fun acc i => acc ++ s!";e{i}") ""
let exts17 := (List.range 17).foldl (fun acc i => acc ++ s!";e{i}") ""
-- Exactly 16 extensions → accepted.
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5{exts16}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler
assertStatus "16 extensions → accepted" responseA "HTTP/1.1 200"
-- 17 extensions → rejected.
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5{exts17}\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler
assertExact "17 extensions → rejected" responseB bad400
-- maxChunkExtensions config override.
#eval show IO _ from do
let cfg1 : Config := { lingeringTimeout := 1000, maxChunkExtensions := 1, generateDate := false }
-- 1 extension with limit=1 → accepted.
let (clientA, serverA) Mock.new
let rawA := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA echoHandler (config := cfg1)
assertStatus "1 ext with limit=1 → accepted" responseA "HTTP/1.1 200"
-- 2 extensions with limit=1 → rejected.
let (clientB, serverB) Mock.new
let rawB := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext1;ext2\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB echoHandler (config := cfg1)
assertExact "2 exts with limit=1 → rejected" responseB bad400
-- 0 extensions with limit=1 → accepted (no extensions is always fine).
let (clientC, serverC) Mock.new
let rawC := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rawC echoHandler (config := cfg1)
assertStatus "0 exts with limit=1 → accepted" responseC "HTTP/1.1 200"
-- Extension with no value (name-only extension token).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;novalue\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "Ext with no value → accepted" response "HTTP/1.1 200"
-- Body is still correctly delivered.
let text := String.fromUTF8! response
unless text.contains "hello" do
throw <| IO.userError "Body not delivered with name-only extension"
-- Extensions on the terminal (last) chunk are accepted.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0;final-ext=done\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "Extension on last chunk → accepted" response "HTTP/1.1 200"
-- Extension with name and quoted-string value.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=\"hello world\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "Ext with quoted-string value → accepted" response "HTTP/1.1 200"
-- Extension with non-token character in name → rejected.
#eval show IO _ from do
let (client, server) Mock.new
-- '@' is not a valid token character
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;bad@name\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertExact "Ext with non-token name char → rejected" response bad400
-- Multiple extensions with name=value pairs.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2;c=3\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "Multiple name=value extensions → accepted" response "HTTP/1.1 200"

View File

@@ -0,0 +1,435 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
/-!
# HTTP Client Security Tests
Tests for security properties of the HTTP client:
- `Authorization` is stripped on cross-scheme redirects (same host+port, different scheme).
Before the fix `crossOrigin` checked host+port only; a http→https redirect to the same
host+port would silently keep the credential header.
- Streaming (`.outgoing`) request bodies must not be retried on connection failure.
A channel-backed body is consumed on first use; retrying would send an empty body.
-/
private def runWithTimeout (name : String) (timeoutMs : Nat := 3000) (action : IO Unit) : IO Unit := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO Unit := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 =>
IO.cancel task
throw <| IO.userError s!"Test '{name}' timed out after {timeoutMs}ms"
| n + 1 =>
IO.sleep 10
loop n
loop ticks
-- Build a raw HTTP/1.1 response byte string.
private def rawResp
(status : String) (hdrs : Array (String × String)) (body : String) : ByteArray :=
let hdrLines := hdrs.foldl (fun s (k, v) => s ++ s!"{k}: {v}\r\n") ""
s!"HTTP/1.1 {status}\r\n{hdrLines}\r\n{body}".toUTF8
-- ============================================================
-- Redirect: Authorization stripped on scheme-change redirect
-- ============================================================
-- A 302 redirect from http://example.com:443/ to https://example.com:443/r has the
-- same host and port but a different scheme. crossOrigin must be true so that the
-- Authorization header is stripped before the redirect request is sent.
-- ============================================================
#eval show IO _ from runWithTimeout "scheme-change strips Authorization" 3000 <| Async.block do
let (mockClient, mockServer) Mock.new
let session Client.Session.new mockServer (config := {})
let cookieJar Cookie.Jar.new
let some domain := URI.DomainName.ofString? "example.com"
| throw (IO.userError "DomainName parse failed")
-- Agent with scheme=http on port 443. Redirect target https://example.com:443/r
-- has same host+port but different scheme → crossOrigin must be true after the fix.
let agent : Client.Agent Mock.Server := {
session
scheme := URI.Scheme.ofString! "http"
host := .name domain
port := 443
cookieJar
}
let request Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com:443"
|>.header! "Authorization" "Bearer secret-token"
|>.blank
let resultPromise : IO.Promise (Except String (Response Body.Incoming)) IO.Promise.new
background do
let result : Except String (Response Body.Incoming) try
let resp Client.Agent.send agent request
pure (Except.ok resp)
catch e => pure (Except.error (toString e))
discard <| resultPromise.resolve result
-- First exchange: drain the request, reply with 302 redirecting to HTTPS same host+port.
let _ mockClient.recv?
mockClient.send (rawResp "302 Found"
#[("Location", "https://example.com:443/redirected"),
("Content-Length", "0"),
("Connection", "keep-alive")] "")
-- Second exchange: receive the redirected request and capture its bytes.
let some redirectBytes mockClient.recv?
| throw (IO.userError "Test failed: no redirect request received")
mockClient.send (rawResp "200 OK"
#[("Content-Length", "2"), ("Connection", "close")] "ok")
-- Wait for the agent to finish.
match await resultPromise.result! with
| Except.error e => throw (IO.userError s!"agent error: {e}")
| Except.ok _ => pure ()
let redirectText := String.fromUTF8! redirectBytes
if redirectText.contains "Authorization:" then
throw <| IO.userError
s!"Test 'scheme-change strips Authorization' FAILED: \
Authorization header present in redirect request\n{redirectText.quote}"
-- ============================================================
-- Redirect: Authorization preserved on same-origin redirect
-- ============================================================
-- A 302 redirect to the same scheme, host, and port is a same-origin redirect.
-- The Authorization header must NOT be stripped in this case.
-- ============================================================
#eval show IO _ from runWithTimeout "same-origin preserves Authorization" 3000 <| Async.block do
let (mockClient, mockServer) Mock.new
let session Client.Session.new mockServer (config := {})
let cookieJar Cookie.Jar.new
let some domain := URI.DomainName.ofString? "example.com"
| throw (IO.userError "DomainName parse failed")
let agent : Client.Agent Mock.Server := {
session
scheme := URI.Scheme.ofString! "http"
host := .name domain
port := 80
cookieJar
}
let request Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Authorization" "Bearer secret-token"
|>.blank
let resultPromise : IO.Promise (Except String (Response Body.Incoming)) IO.Promise.new
background do
let result : Except String (Response Body.Incoming) try
let resp Client.Agent.send agent request
pure (Except.ok resp)
catch e => pure (Except.error (toString e))
discard <| resultPromise.resolve result
-- First exchange: reply with 302 to same scheme+host+port.
let _ mockClient.recv?
mockClient.send (rawResp "302 Found"
#[("Location", "http://example.com/same-origin"),
("Content-Length", "0"),
("Connection", "keep-alive")] "")
-- Second exchange: receive the redirected request.
let some redirectBytes mockClient.recv?
| throw (IO.userError "Test failed: no redirect request received")
mockClient.send (rawResp "200 OK"
#[("Content-Length", "2"), ("Connection", "close")] "ok")
match await resultPromise.result! with
| Except.error e => throw (IO.userError s!"agent error: {e}")
| Except.ok _ => pure ()
let redirectText := String.fromUTF8! redirectBytes
unless redirectText.contains "Authorization:" do
throw <| IO.userError
s!"Test 'same-origin preserves Authorization' FAILED: \
Authorization header was stripped on same-origin redirect\n{redirectText.quote}"
-- ============================================================
-- Body replay classification
-- ============================================================
-- Verifies that only `.outgoing` (channel-backed streaming) bodies are classified as
-- non-replayable, while `.full` (fixed bytes) and `.empty` bodies are safe to retry.
-- This mirrors the `bodyIsReplayable` check added to the retry guard in Agent.lean.
-- ============================================================
#eval show IO _ from Async.block do
-- .outgoing: channel is consumed on first send, must not be retried.
let (out, _) Body.mkChannel
let streamBody : Body.AnyBody := .outgoing out
let replayable := match streamBody with | .outgoing _ => false | _ => true
if replayable then
throw <| IO.userError "Test 'outgoing body is not replayable' FAILED"
-- .full: fixed ByteArray, safe to send again.
let fullBody : Body.AnyBody := .full ( Body.Full.ofByteArray "hello".toUTF8)
let replayable2 := match fullBody with | .outgoing _ => false | _ => true
unless replayable2 do
throw <| IO.userError "Test 'full body is replayable' FAILED"
-- .empty: trivially safe.
let emptyBody : Body.AnyBody := .empty {}
let replayable3 := match emptyBody with | .outgoing _ => false | _ => true
unless replayable3 do
throw <| IO.userError "Test 'empty body is replayable' FAILED"
-- ============================================================
-- Redirect: non-HTTP/HTTPS scheme in Location is not followed
-- ============================================================
-- A 302 response with Location: ftp://internal-host/secret must not be followed.
-- Before the fix, decideRedirect accepted any scheme that RequestTarget.parse? could
-- parse and would try to connect to the ftp host on port 80 (SSRF).
-- After the fix, only http:// and https:// redirect targets are followed; everything
-- else returns the 3xx response as-is.
-- ============================================================
#eval show IO _ from runWithTimeout "ftp:// redirect not followed" 3000 <| Async.block do
let (mockClient, mockServer) Mock.new
let session Client.Session.new mockServer (config := {})
let cookieJar Cookie.Jar.new
let some domain := URI.DomainName.ofString? "example.com"
| throw (IO.userError "DomainName parse failed")
let agent : Client.Agent Mock.Server := {
session
scheme := URI.Scheme.ofString! "http"
host := .name domain
port := 80
cookieJar
}
let request Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.blank
let resultPromise : IO.Promise (Except String (Response Body.Incoming)) IO.Promise.new
background do
let result try
let resp Client.Agent.send agent request
pure (Except.ok resp)
catch e => pure (Except.error (toString e))
discard <| resultPromise.resolve result
-- Server replies with a redirect to ftp:// (non-HTTP scheme).
let _ mockClient.recv?
mockClient.send (rawResp "302 Found"
#[("Location", "ftp://internal-host/secret"),
("Content-Length", "0")] "")
match await resultPromise.result! with
| Except.error e => throw (IO.userError s!"agent error: {e}")
| Except.ok resp =>
-- The agent must return the 302 as-is, not follow it.
unless resp.line.status == .found do
throw <| IO.userError
s!"Test 'ftp:// redirect not followed' FAILED: expected 302, got {resp.line.status.toCode}"
#eval show IO _ from runWithTimeout "file:// redirect not followed" 3000 <| Async.block do
let (mockClient, mockServer) Mock.new
let session Client.Session.new mockServer (config := {})
let cookieJar Cookie.Jar.new
let some domain := URI.DomainName.ofString? "example.com"
| throw (IO.userError "DomainName parse failed")
let agent : Client.Agent Mock.Server := {
session
scheme := URI.Scheme.ofString! "http"
host := .name domain
port := 80
cookieJar
}
let request Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.blank
let resultPromise : IO.Promise (Except String (Response Body.Incoming)) IO.Promise.new
background do
let result try
let resp Client.Agent.send agent request
pure (Except.ok resp)
catch e => pure (Except.error (toString e))
discard <| resultPromise.resolve result
let _ mockClient.recv?
mockClient.send (rawResp "301 Moved Permanently"
#[("Location", "file:///etc/passwd"),
("Content-Length", "0")] "")
match await resultPromise.result! with
| Except.error e => throw (IO.userError s!"agent error: {e}")
| Except.ok resp =>
unless resp.line.status == .movedPermanently do
throw <| IO.userError
s!"Test 'file:// redirect not followed' FAILED: expected 301, got {resp.line.status.toCode}"
-- ============================================================
-- Redirect: https:// redirect IS followed (sanity check)
-- ============================================================
-- Verify that the scheme restriction doesn't accidentally block legitimate
-- https:// redirects (same host, different scheme from http to https).
-- ============================================================
#eval show IO _ from runWithTimeout "https:// redirect is followed" 3000 <| Async.block do
let (mockClient, mockServer) Mock.new
let session Client.Session.new mockServer (config := {})
let cookieJar Cookie.Jar.new
let some domain := URI.DomainName.ofString? "example.com"
| throw (IO.userError "DomainName parse failed")
-- Agent with connectTo = none; cross-host redirects return the 3xx as-is.
-- We use the same-host case: http://example.com:80/target (same host+port, scheme changes).
let agent : Client.Agent Mock.Server := {
session
scheme := URI.Scheme.ofString! "http"
host := .name domain
port := 80
cookieJar
}
let request Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.blank
let resultPromise : IO.Promise (Except String (Response Body.Incoming)) IO.Promise.new
background do
let result try
let resp Client.Agent.send agent request
pure (Except.ok resp)
catch e => pure (Except.error (toString e))
discard <| resultPromise.resolve result
-- 302 to https://example.com/page — same host+port, scheme differs from http.
-- No connectTo factory means cross-origin redirect returns 302 as-is, but at least
-- the scheme check must not block it before that point; the 302 must be attempted.
let _ mockClient.recv?
mockClient.send (rawResp "302 Found"
#[("Location", "https://example.com/page"),
("Content-Length", "0")] "")
-- https://example.com/page resolves to port 443, which differs from port 80, so
-- this is a cross-origin redirect. With connectTo = none the agent returns the 302
-- as-is without issuing a second request. Run the optional mock service in the
-- background so the main fiber is not blocked when no second request arrives.
background do
let redirectReqOpt mockClient.recv?
if redirectReqOpt.isSome then
mockClient.send (rawResp "200 OK"
#[("Content-Length", "2"), ("Connection", "close")] "ok")
match await resultPromise.result! with
| Except.error e => throw (IO.userError s!"agent error: {e}")
| Except.ok resp =>
-- We accept either 302 (no connectTo for cross-host) or 200 (same-session follow).
let code := resp.line.status.toCode
unless code == 200 || code == 302 do
throw <| IO.userError
s!"Test 'https:// redirect is followed' FAILED: unexpected status {code}"
-- ============================================================
-- Redirect: streaming body dropped on method-preserving redirect
-- ============================================================
-- A 307 redirect preserves the original method and body. When the body is a
-- streaming channel (.outgoing) that has already been consumed by the first
-- request, it cannot be replayed. The redirect request must send no body
-- (Content-Length: 0) rather than silently retransmitting whatever bytes remain
-- in the channel (none, so it would be empty anyway — but the fix must explicitly
-- classify .outgoing as non-replayable so future semantics stay correct).
-- ============================================================
#eval show IO _ from runWithTimeout "streaming body dropped on 307 redirect" 3000 <| Async.block do
let (mockClient, mockServer) Mock.new
let session Client.Session.new mockServer (config := {})
let cookieJar Cookie.Jar.new
let some domain := URI.DomainName.ofString? "example.com"
| throw (IO.userError "DomainName parse failed")
let agent : Client.Agent Mock.Server := {
session
scheme := URI.Scheme.ofString! "http"
host := .name domain
port := 80
cookieJar
}
let request Request.new
|>.method .put
|>.uri! "/upload"
|>.header! "Host" "example.com"
|>.stream (fun out => do
Body.Writer.send out { data := "payload".toUTF8 } false
Body.Writer.close out)
let resultPromise : IO.Promise (Except String (Response Body.Incoming)) IO.Promise.new
background do
let result try
let resp Client.Agent.send agent request
pure (Except.ok resp)
catch e => pure (Except.error (toString e))
discard <| resultPromise.resolve result
-- First request: drain it completely before replying with 307.
-- The body is Transfer-Encoding: chunked; loop until the terminating 0\r\n\r\n
-- chunk arrives so the second recv? captures only the redirect request.
let mut firstBytes := ByteArray.empty
repeat
let some chunk mockClient.recv?
| throw (IO.userError "Test failed: connection closed before first request")
firstBytes := firstBytes ++ chunk
if (String.fromUTF8! firstBytes).endsWith "0\r\n\r\n" then break
mockClient.send (rawResp "307 Temporary Redirect"
#[("Location", "/new-upload"),
("Content-Length", "0")] "")
-- Second request: the redirect. The streaming body is already consumed so
-- the client must send no body (Content-Length: 0 or absent, no body bytes).
let some redirectBytes mockClient.recv?
| throw (IO.userError "Test failed: no redirect request received")
mockClient.send (rawResp "200 OK"
#[("Content-Length", "2"), ("Connection", "close")] "ok")
match await resultPromise.result! with
| Except.error e => throw (IO.userError s!"agent error: {e}")
| Except.ok _ => pure ()
let redirectText := String.fromUTF8! redirectBytes
-- The redirect request must target /new-upload.
unless redirectText.contains "PUT /new-upload" do
throw <| IO.userError
s!"Test 'streaming body dropped on 307 redirect' FAILED: expected PUT /new-upload\n{redirectText.quote}"
-- The body must be empty: Content-Length 0 (or no body bytes after blank line).
-- We check that "payload" does not appear in the redirect request.
if redirectText.contains "payload" then
throw <| IO.userError
s!"Test 'streaming body dropped on 307 redirect' FAILED: \
streaming body payload present in redirect request\n{redirectText.quote}"

View File

@@ -0,0 +1,630 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
structure TestCase where
/-- Descriptive name for the test -/
name : String
/-- The HTTP request to send -/
request : Request (Array Chunk)
/-- Handler function to process the request -/
handler : Request Body.Incoming ContextAsync (Response Body.AnyBody)
/-- Expected response string -/
expected : String
/-- Whether to use chunked encoding -/
chunked : Bool := false
deriving Inhabited
def toByteArray (req : Request (Array Chunk)) (chunked := false) : IO ByteArray := Async.block do
let mut data := Internal.Encode.encode (v := .v11) .empty req.line
let toByteArray (chunkData : Internal.ChunkedBuffer) (part : Chunk) := Internal.Encode.encode .v11 chunkData part
for part in req.body do data := toByteArray data part
if chunked then data := toByteArray data (Chunk.mk .empty .empty)
return data.toByteArray
/-- Send multiple requests through a mock connection and return the response data. -/
def sendRequests (client : Mock.Client) (server : Mock.Server) (reqs : Array (Request (Array Chunk)))
(onRequest : Request Body.Incoming ContextAsync (Response Body.AnyBody))
(chunked : Bool := false) : IO ByteArray := Async.block do
let mut data := .empty
for req in reqs do data := data ++ ( toByteArray req chunked)
client.send data
Std.Http.Server.serveConnection server onRequest { lingeringTimeout := 3000, generateDate := false }
|>.run
let res client.recv?
pure <| res.getD .empty
/-- Run a single test case, comparing actual response against expected response. -/
def runTest (name : String) (client : Mock.Client) (server : Mock.Server) (req : Request (Array Chunk))
(handler : Request Body.Incoming ContextAsync (Response Body.AnyBody)) (expected : String) (chunked : Bool := false) :
IO Unit := do
let response sendRequests client server #[req] handler chunked
let responseData := String.fromUTF8! response
if responseData != expected then
throw <| IO.userError s!
"Test '{name}' failed:\n\
Expected:\n{expected.quote}\n\
Got:\n{responseData.quote}"
def runTestCase (tc : TestCase) : IO Unit := do
let (client, server) Mock.new
Async.block <| runTest tc.name client server tc.request tc.handler tc.expected tc.chunked
-- Request Predicates
/-- Check if request is a basic GET requests to the specified URI and host. -/
def isBasicGetRequest (req : Request Body.Incoming) (uri : String) (host : String) : Bool :=
req.line.method == .get
req.line.version == .v11
toString req.line.uri = uri
req.line.headers.hasEntry (.mk "host") (.ofString! host)
/-- Check if request has a specific Content-Length header. -/
def hasContentLength (req : Request Body.Incoming) (length : String) : Bool :=
req.line.headers.hasEntry (.mk "content-length") (.ofString! length)
/-- Check if request uses chunked transfer encoding. -/
def isChunkedRequest (req : Request Body.Incoming) : Bool :=
if let some te := req.line.headers.get? (.mk "transfer-encoding") then
match Header.TransferEncoding.parse te with
| some te => te.isChunked
| none => false
else
false
/-- Check if request has a specific header with a specific value. -/
def hasHeader (req : Request Body.Incoming) (name : String) (value : String) : Bool :=
if let some name := Header.Name.ofString? name then
req.line.headers.hasEntry name (.ofString! value)
else
false
/-- Check if request method matches the expected method. -/
def hasMethod (req : Request Body.Incoming) (method : Method) : Bool :=
req.line.method == method
/-- Check if request URI matches the expected URI string. -/
def hasUri (req : Request Body.Incoming) (uri : String) : Bool :=
toString req.line.uri = uri
-- Tests
#eval runTestCase {
name := "GET with Content-Length"
request := Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.header! "Content-Length" "7"
|>.body #[.mk "survive".toUTF8 #[]]
handler := fun req => do
if isBasicGetRequest req "/" "example.com" hasContentLength req "7"
then Response.ok |>.text "ok"
else Response.badRequest |>.text "closed"
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 2\x0d\n\x0d\nok"
}
#eval runTestCase {
name := "Simple GET request"
request := Request.new
|>.method .get
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .get hasUri req "/api/users"
then Response.ok |>.text "users list"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 10\x0d\n\x0d\nusers list"
}
#eval runTestCase {
name := "POST with body"
request := Request.new
|>.method .post
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/json"
|>.header! "Content-Length" "16"
|>.header! "Connection" "close"
|>.body #[.mk "{\"name\":\"Alice\"}".toUTF8 #[]]
handler := fun req => do
if hasMethod req .post hasHeader req "Content-Type" "application/json"
then Response.new |>.status .created |>.text "Created"
else Response.badRequest |>.text ""
expected := "HTTP/1.1 201 Created\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 7\x0d\n\x0d\nCreated"
}
#eval runTestCase {
name := "DELETE request"
request := Request.new
|>.method .delete
|>.uri! "/api/users/123"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .delete hasUri req "/api/users/123"
then Response.new |>.status .noContent |>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 204 No Content\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\n\x0d\n"
}
#eval runTestCase {
name := "HEAD request"
request := Request.new
|>.method .head
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .head
then Response.ok |>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "OPTIONS request"
request := Request.new
|>.method .options
|>.uri! "*"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .options
then Response.new
|>.status .ok
|>.header! "Allow" "GET, POST, PUT, DELETE, OPTIONS"
|>.text ""
else Response.badRequest |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nAllow: GET, POST, PUT, DELETE, OPTIONS\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Request with multiple headers"
request := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Accept" "application/json"
|>.header! "User-Agent" "TestClient/1.0"
|>.header! "Authorization" "Bearer token123"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasHeader req "Authorization" "Bearer token123" hasHeader req "Accept" "application/json"
then Response.ok |>.text "authenticated"
else Response.new |>.status .unauthorized |>.text "unauthorized"
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 13\x0d\n\x0d\nauthenticated"
}
#eval runTestCase {
name := "Request with query parameters"
request := Request.new
|>.method .get
|>.uri! "/api/search?q=test&limit=10"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasUri req "/api/search?q=test&limit=10"
then Response.ok |>.text "search results"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 14\x0d\n\x0d\nsearch results"
}
#eval runTestCase {
name := "POST with empty body"
request := Request.new
|>.method .post
|>.uri! "/api/trigger"
|>.header! "Host" "api.example.com"
|>.header! "Content-Length" "0"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .post hasContentLength req "0"
then Response.new |>.status .accepted |>.text "triggered"
else Response.badRequest |>.text ""
expected := "HTTP/1.1 202 Accepted\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 9\x0d\n\x0d\ntriggered"
}
#eval runTestCase {
name := "Large response body"
request := Request.new
|>.method .get
|>.uri! "/api/large"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let largeBody := String.ofList (List.replicate 1000 'X')
Response.ok |>.text largeBody
expected := s!"HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 1000\x0d\n\x0d\n{String.ofList (List.replicate 1000 'X')}"
}
#eval runTestCase {
name := "Custom status code"
request := Request.new
|>.method .get
|>.uri! "/api/teapot"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
Response.new
|>.status .imATeapot
|>.text "I'm a teapot"
expected := "HTTP/1.1 418 I'm a teapot\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 12\x0d\n\x0d\nI'm a teapot"
}
#eval runTestCase {
name := "Request with special characters in URI"
request := Request.new
|>.method .get
|>.uri! "/api/users/%C3%A9"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasUri req "/api/users/%C3%A9"
then Response.ok |>.text "found"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 5\x0d\n\x0d\nfound"
}
#eval runTestCase {
name := "Response with custom headers"
request := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.header! "Cache-Control" "no-cache"
|>.body #[]
handler := fun _ => do
Response.new
|>.status .ok
|>.header! "Cache-Control" "no-cache"
|>.header! "X-Custom-Header" "custom-value"
|>.text "data"
expected := "HTTP/1.1 200 OK\x0d\nCache-Control: no-cache\x0d\nX-Custom-Header: custom-value\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 4\x0d\n\x0d\ndata"
}
#eval runTestCase {
name := "Request with Content-Type and body"
request := Request.new
|>.method .post
|>.uri! "/api/xml"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/xml"
|>.header! "Content-Length" "17"
|>.header! "Connection" "close"
|>.body #[.mk "<data>test</data>".toUTF8 #[]]
handler := fun req => do
if hasHeader req "Content-Type" "application/xml"
then Response.ok |>.text "processed xml"
else Response.new |>.status .unsupportedMediaType |>.text "unsupported"
expected := "HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 13\x0d\n\x0d\nprocessed xml"
}
-- Limits
#eval
let bigString := String.fromUTF8! (ByteArray.mk (Array.ofFn (n := 257) (fun _ => 65)))
runTestCase {
name := "Huge String request"
request := Request.new
|>.method .head
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! bigString "a"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .head
then Response.ok
|>.header (.ofString! bigString) (.ofString! "ata")
|>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Request line too long"
request :=
Request.new
|>.method .get
|>.uri (.originForm (.mk #[URI.EncodedString.encode <| String.ofList (List.replicate 2000 'a')] true), none)
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
Response.ok |>.text (toString (toString req.line.uri).length)
expected := "HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Header long"
request :=
Request.new
|>.method .get
|>.uri (.originForm (.mk #[URI.EncodedString.encode <| String.ofList (List.replicate 200 'a')] true), none)
|>.header! "Host" (String.ofList (List.replicate 8230 'a'))
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
Response.ok |>.text (toString (toString req.line.uri).length)
expected := "HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Too many headers"
request := Id.run do
let mut req := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
for i in [0:101] do
req := req |>.header! s!"X-Header-{i}" s!"value{i}"
return req |>.body #[]
handler := fun _ => do
Response.ok |>.text "success"
expected := "HTTP/1.1 431 Request Header Fields Too Large\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Header value too long"
request := Request.new
|>.method .get
|>.uri! "/api/test"
|>.header! "Host" "api.example.com"
|>.header! "X-Long-Value" (String.ofList (List.replicate 9000 'x'))
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
Response.ok |>.text "ok"
expected := "HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Total headers size too large"
request := Id.run do
let mut req := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
for i in [0:200] do
req := req |>.header! s!"X-Header-{i}" (String.ofList (List.replicate 200 'a'))
return req |>.body #[]
handler := fun _ => do
Response.ok |>.text "success"
expected := "HTTP/1.1 431 Request Header Fields Too Large\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
}
-- Tests
#eval runTestCase {
name := "Streaming response with fixed Content-Length"
request := Request.new
|>.method .get
|>.uri! "/stream"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let (stream, _incoming) Body.mkChannel
background do
for i in [0:3] do
let sleep Sleep.mk 5
sleep.wait
stream.send <| Chunk.ofByteArray s!"chunk{i}\n".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "21")
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 21\x0d\n\x0d\nchunk0\nchunk1\nchunk2\n"
}
#eval runTestCase {
name := "Streaming response with setKnownSize fixed"
request := Request.new
|>.method .get
|>.uri! "/stream-sized"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let (stream, _incoming) Body.mkChannel
stream.setKnownSize (some (.fixed 15))
background do
for i in [0:3] do
stream.send <| Chunk.ofByteArray s!"data{i}".toUTF8
stream.close
return Response.ok
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 15\x0d\n\x0d\ndata0data1data2"
}
#eval runTestCase {
name := "Streaming response with chunked encoding"
request := Request.new
|>.method .get
|>.uri! "/stream-chunked"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let (stream, _incoming) Body.mkChannel
background do
stream.send <| Chunk.ofByteArray "hello".toUTF8
stream.send <| Chunk.ofByteArray "world".toUTF8
stream.close
return Response.ok
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n5\x0d\nworld\x0d\n0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Chunked request with streaming response"
request := Request.new
|>.method .post
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Transfer-Encoding" "chunked"
|>.header! "Connection" "close"
|>.body #[
.mk "data1".toUTF8 #[],
.mk "data2".toUTF8 #[]
]
handler := fun req => do
let (stream, _incoming) Body.mkChannel
if isChunkedRequest req
then
background do
for i in [0:2] do
stream.send <| Chunk.ofByteArray s!"response{i}".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "18")
|>.body stream
else
stream.send <| Chunk.ofByteArray "not chunked".toUTF8
stream.close
return Response.badRequest
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 18\x0d\n\x0d\nresponse0response1"
chunked := true
}
#eval runTestCase {
name := "Fixed-length response overflow closes connection"
request := Request.new
|>.method .get
|>.uri! "/overflow"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let (stream, _incoming) Body.mkChannel
background do
stream.send <| Chunk.ofByteArray "abcdef".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "3")
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 3\x0d\n\x0d\n"
}

View File

@@ -0,0 +1,255 @@
import Std.Internal.Http.Data.Cookie
open Std.Http
/-!
# Cookie Parser Tests
Tests for `Set-Cookie` header parsing following RFC 6265 §4.1.
-/
-- Helper: parse a Set-Cookie header value, throw on failure.
def parseCookie (s : String) : IO Cookie.Parser.Parsed :=
IO.ofExcept (Cookie.Parser.parseSetCookie.run s.toUTF8)
-- Helper: assert parsing fails.
def parseShouldFail (label : String) (s : String) : IO Unit := do
match Cookie.Parser.parseSetCookie.run s.toUTF8 with
| .ok _ => throw <| IO.userError s!"Test '{label}' failed: expected parse failure but succeeded"
| .error _ => pure ()
-- ============================================================================
-- Basic cookie-pair
-- ============================================================================
#eval show IO _ from do
-- Minimal name=value
let p parseCookie "foo=bar"
unless p.name == "foo" do
throw <| IO.userError s!"Test 'basic name' failed: expected 'foo', got {p.name.quote}"
unless p.value == "bar" do
throw <| IO.userError s!"Test 'basic value' failed: expected 'bar', got {p.value.quote}"
unless p.domain == none do
throw <| IO.userError s!"Test 'basic domain absent' failed: expected none, got {repr p.domain}"
unless p.path == none do
throw <| IO.userError s!"Test 'basic path absent' failed: expected none, got {repr p.path}"
unless p.secure == false do
throw <| IO.userError "Test 'basic secure absent' failed: expected false"
-- Empty value is allowed (cookie-value = *cookie-octet)
let pEmpty parseCookie "session="
unless pEmpty.value == "" do
throw <| IO.userError s!"Test 'empty value' failed: expected '', got {pEmpty.value.quote}"
-- Numeric name is not a valid token (digits alone are not all tchar? Actually digits ARE tchar)
-- tchar includes DIGIT, so "123" is a valid token
let pNum parseCookie "123=abc"
unless pNum.name == "123" do
throw <| IO.userError s!"Test 'numeric name' failed: expected '123', got {pNum.name.quote}"
-- ============================================================================
-- Quoted cookie values
-- ============================================================================
#eval show IO _ from do
-- Double-quoted value: quotes are stripped, inner value is returned
let p parseCookie "id=\"abc123\""
unless p.value == "abc123" do
throw <| IO.userError s!"Test 'quoted value' failed: expected 'abc123', got {p.value.quote}"
-- Empty quoted value
let pEq parseCookie "id=\"\""
unless pEq.value == "" do
throw <| IO.userError s!"Test 'empty quoted value' failed: expected '', got {pEq.value.quote}"
-- Quoted value with all valid cookie-octets (excluding DQUOTE, SP, comma, semicolon, backslash)
let pOctets parseCookie "t=\"!#$%&'*+-.^_`|~\""
unless pOctets.value == "!#$%&'*+-.^_`|~" do
throw <| IO.userError s!"Test 'quoted cookie-octets' failed: got {pOctets.value.quote}"
-- ============================================================================
-- Domain attribute
-- ============================================================================
#eval show IO _ from do
-- Domain present
let p parseCookie "x=y; Domain=example.com"
unless p.domain == some "example.com" do
throw <| IO.userError s!"Test 'domain' failed: expected some \"example.com\", got {repr p.domain}"
-- Leading dot is stripped per RFC 6265 §5.2.3
let pDot parseCookie "x=y; Domain=.example.com"
unless pDot.domain == some "example.com" do
throw <| IO.userError s!"Test 'domain leading dot stripped' failed: expected some \"example.com\", got {repr pDot.domain}"
-- Empty domain attribute → domain is none
let pEmpty parseCookie "x=y; Domain="
unless pEmpty.domain == none do
throw <| IO.userError s!"Test 'empty domain' failed: expected none, got {repr pEmpty.domain}"
-- Dot-only domain → stripped to empty → domain is none
let pDotOnly parseCookie "x=y; Domain=."
unless pDotOnly.domain == none do
throw <| IO.userError s!"Test 'dot-only domain' failed: expected none, got {repr pDotOnly.domain}"
-- ============================================================================
-- Path attribute
-- ============================================================================
#eval show IO _ from do
-- Valid path starting with /
let p parseCookie "x=y; Path=/foo/bar"
unless p.path == some "/foo/bar" do
throw <| IO.userError s!"Test 'path' failed: expected some \"/foo/bar\", got {repr p.path}"
-- Root path
let pRoot parseCookie "x=y; Path=/"
unless pRoot.path == some "/" do
throw <| IO.userError s!"Test 'root path' failed: expected some \"/\", got {repr pRoot.path}"
-- Path not starting with / → none per RFC 6265 §5.2.4
let pNoSlash parseCookie "x=y; Path=noslash"
unless pNoSlash.path == none do
throw <| IO.userError s!"Test 'path without leading slash' failed: expected none, got {repr pNoSlash.path}"
-- Empty path → none
let pEmpty parseCookie "x=y; Path="
unless pEmpty.path == none do
throw <| IO.userError s!"Test 'empty path' failed: expected none, got {repr pEmpty.path}"
-- ============================================================================
-- Secure attribute
-- ============================================================================
#eval show IO _ from do
-- Secure present
let p parseCookie "x=y; Secure"
unless p.secure == true do
throw <| IO.userError "Test 'secure' failed: expected true"
-- Secure absent
let pNo parseCookie "x=y"
unless pNo.secure == false do
throw <| IO.userError "Test 'secure absent' failed: expected false"
-- Secure= (with a value — treated as Secure since we match the attr name)
let pVal parseCookie "x=y; Secure=true"
unless pVal.secure == true do
throw <| IO.userError "Test 'secure with value' failed: expected true"
-- ============================================================================
-- Combined attributes
-- ============================================================================
#eval show IO _ from do
let p parseCookie "sessionId=abc123; Domain=example.com; Path=/app; Secure"
unless p.name == "sessionId" do
throw <| IO.userError s!"Test 'combined name' failed: got {p.name.quote}"
unless p.value == "abc123" do
throw <| IO.userError s!"Test 'combined value' failed: got {p.value.quote}"
unless p.domain == some "example.com" do
throw <| IO.userError s!"Test 'combined domain' failed: got {repr p.domain}"
unless p.path == some "/app" do
throw <| IO.userError s!"Test 'combined path' failed: got {repr p.path}"
unless p.secure == true do
throw <| IO.userError "Test 'combined secure' failed: expected true"
-- ============================================================================
-- Case-insensitive attribute names (RFC 6265 §5.2)
-- ============================================================================
#eval show IO _ from do
-- Uppercase attribute names must be recognized
let p parseCookie "x=y; DOMAIN=example.com; PATH=/; SECURE"
unless p.domain == some "example.com" do
throw <| IO.userError s!"Test 'uppercase domain' failed: got {repr p.domain}"
unless p.path == some "/" do
throw <| IO.userError s!"Test 'uppercase path' failed: got {repr p.path}"
unless p.secure == true do
throw <| IO.userError "Test 'uppercase secure' failed: expected true"
-- Mixed-case attribute names
let pMixed parseCookie "x=y; Domain=a.com; Secure; Path=/x"
unless pMixed.domain == some "a.com" do
throw <| IO.userError s!"Test 'mixed-case domain' failed: got {repr pMixed.domain}"
unless pMixed.secure == true do
throw <| IO.userError "Test 'mixed-case secure' failed: expected true"
unless pMixed.path == some "/x" do
throw <| IO.userError s!"Test 'mixed-case path' failed: got {repr pMixed.path}"
-- ============================================================================
-- Unknown attributes are silently ignored
-- ============================================================================
#eval show IO _ from do
-- HttpOnly is silently ignored
let p parseCookie "x=y; HttpOnly"
unless p.name == "x" && p.value == "y" do
throw <| IO.userError s!"Test 'HttpOnly ignored' failed: name={p.name.quote} value={p.value.quote}"
-- Expires is silently ignored
let pExp parseCookie "x=y; Expires=Thu, 01 Jan 2026 00:00:00 GMT"
unless pExp.name == "x" && pExp.value == "y" do
throw <| IO.userError s!"Test 'Expires ignored' failed"
-- SameSite is silently ignored
let pSS parseCookie "x=y; SameSite=Strict"
unless pSS.name == "x" && pSS.value == "y" do
throw <| IO.userError s!"Test 'SameSite ignored' failed"
-- Max-Age is silently ignored
let pMaxAge parseCookie "x=y; Max-Age=3600"
unless pMaxAge.name == "x" && pMaxAge.value == "y" do
throw <| IO.userError s!"Test 'Max-Age ignored' failed"
-- Multiple unknown attributes
let pMulti parseCookie "x=y; Foo=bar; HttpOnly; Baz; Path=/p"
unless pMulti.path == some "/p" do
throw <| IO.userError s!"Test 'unknown attrs + path' failed: got {repr pMulti.path}"
-- ============================================================================
-- Duplicate attribute handling (last-write-wins is fine, RFC 6265 §5.3)
-- ============================================================================
#eval show IO _ from do
-- Last Domain wins
let p parseCookie "x=y; Domain=first.com; Domain=second.com"
unless p.domain == some "second.com" do
throw <| IO.userError s!"Test 'duplicate domain last wins' failed: got {repr p.domain}"
-- Last Path wins
let pPath parseCookie "x=y; Path=/first; Path=/second"
unless pPath.path == some "/second" do
throw <| IO.userError s!"Test 'duplicate path last wins' failed: got {repr pPath.path}"
-- ============================================================================
-- Semicolon spacing: RFC 6265 §4.1 allows optional SP after ";"
-- ============================================================================
#eval show IO _ from do
-- No space after semicolon
let pNoSp parseCookie "x=y;Secure"
unless pNoSp.secure == true do
throw <| IO.userError "Test 'no space after semicolon' failed: expected Secure=true"
-- Space after semicolon (standard)
let pSp parseCookie "x=y; Secure"
unless pSp.secure == true do
throw <| IO.userError "Test 'space after semicolon' failed: expected Secure=true"
-- ============================================================================
-- Invalid cookie names → parse failure
-- ============================================================================
#eval show IO _ from do
-- Empty name is not a valid token (token = 1*tchar, requires at least one char)
parseShouldFail "empty name" "=value"
-- Space in name (space is not a tchar)
parseShouldFail "space in name" "foo bar=value"
-- Semicolon in name (not a tchar)
parseShouldFail "semicolon in name" "foo;bar=value"
-- Missing '=' separator
parseShouldFail "missing equals" "nameonly"

View File

@@ -119,48 +119,69 @@ info: "999 Unknown"
/-! ## Request.line encoding -/
/--
info: "GET /path HTTP/1.1\x0d\n"
info: ""
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/path" } : Request.Head)
#eval encodeStr Headers.empty
/--
info: "POST /submit HTTP/1.1\x0d\n"
info: "Content-Type: text/html\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .post, version := .v11, uri := "/submit" } : Request.Head)
#eval encodeStr (Headers.empty.insert! "content-type" "text/html")
/--
info: "PUT /resource HTTP/2.0\x0d\n"
info: "X-Custom-Header: value\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Headers.empty.insert! "x-custom-header" "value")
/--
info: "GET /path HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := .parse! "/path" } : Request.Head)
/--
info: "POST /submit HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .post, version := .v11, uri := .parse! "/submit" } : Request.Head)
/--
info: "PUT /resource HTTP/2.0\x0d\nContent-Type: application/json\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({
method := .put
version := .v20
uri := "/resource"
uri := .parse! "/resource"
headers := Headers.empty.insert! "content-type" "application/json"
} : Request.Head)
/-! ## Response.line encoding -/
/--
info: "HTTP/1.1 200 OK\x0d\n"
info: "HTTP/1.1 200 OK\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .ok, version := .v11 } : Response.Head)
/--
info: "HTTP/1.1 404 Not Found\x0d\n"
info: "HTTP/1.1 404 Not Found\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .notFound, version := .v11 } : Response.Head)
/--
info: "HTTP/2.0 500 Internal Server Error\x0d\n"
info: "HTTP/2.0 500 Internal Server Error\x0d\nContent-Type: text/plain\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({
status := .internalServerError
version := .v20
headers := Headers.empty.insert! "content-type" "text/plain"
} : Response.Head)
/-! ## Chunk encoding -/
@@ -198,150 +219,150 @@ info: "a\x0d\n0123456789\x0d\n"
/-! ## Request builder -/
/--
info: "GET /index.html HTTP/1.1\x0d\n"
info: "GET /index.html HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.get "/index.html" |>.body ()).line
#eval encodeStr (Request.get (.parse! "/index.html") |>.body ()).line
/--
info: "POST /api/data HTTP/1.1\x0d\n"
info: "POST /api/data HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.post "/api/data" |>.body ()).line
#eval encodeStr (Request.post (.parse! "/api/data") |>.body ()).line
/--
info: "PUT /resource HTTP/1.1\x0d\n"
info: "PUT /resource HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.put "/resource" |>.body ()).line
#eval encodeStr (Request.put (.parse! "/resource") |>.body ()).line
/--
info: "DELETE /item HTTP/1.1\x0d\n"
info: "DELETE /item HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.delete "/item" |>.body ()).line
#eval encodeStr (Request.delete (.parse! "/item") |>.body ()).line
/--
info: "PATCH /update HTTP/1.1\x0d\n"
info: "PATCH /update HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.patch "/update" |>.body ()).line
#eval encodeStr (Request.patch (.parse! "/update") |>.body ()).line
/--
info: "HEAD /check HTTP/1.1\x0d\n"
info: "HEAD /check HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.head "/check" |>.body ()).line
#eval encodeStr (Request.head (.parse! "/check") |>.body ()).line
/--
info: "OPTIONS * HTTP/1.1\x0d\n"
info: "OPTIONS * HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.options "*" |>.body ()).line
#eval encodeStr (Request.options (.parse! "*") |>.body ()).line
/--
info: "CONNECT proxy:8080 HTTP/1.1\x0d\n"
info: "CONNECT proxy:8080 HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.connect "proxy:8080" |>.body ()).line
#eval encodeStr (Request.connect (.parse! "proxy:8080") |>.body ()).line
/--
info: "TRACE /debug HTTP/1.1\x0d\n"
info: "TRACE /debug HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.trace "/debug" |>.body ()).line
#eval encodeStr (Request.trace (.parse! "/debug") |>.body ()).line
/--
info: "POST /v2 HTTP/2.0\x0d\n"
info: "POST /v2 HTTP/2.0\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.new |>.method .post |>.uri "/v2" |>.version .v20 |>.body ()).line
#eval encodeStr (Request.new |>.method .post |>.uri (.parse! "/v2") |>.version .v20 |>.body ()).line
/-! ## Response builder -/
/--
info: "HTTP/1.1 200 OK\x0d\n"
info: "HTTP/1.1 200 OK\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.ok |>.body ()).line
/--
info: "HTTP/1.1 404 Not Found\x0d\n"
info: "HTTP/1.1 404 Not Found\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.notFound |>.body ()).line
/--
info: "HTTP/1.1 500 Internal Server Error\x0d\n"
info: "HTTP/1.1 500 Internal Server Error\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.internalServerError |>.body ()).line
/--
info: "HTTP/1.1 400 Bad Request\x0d\n"
info: "HTTP/1.1 400 Bad Request\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.badRequest |>.body ()).line
/--
info: "HTTP/1.1 201 Created\x0d\n"
info: "HTTP/1.1 201 Created\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.created |>.body ()).line
/--
info: "HTTP/1.1 202 Accepted\x0d\n"
info: "HTTP/1.1 202 Accepted\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.accepted |>.body ()).line
/--
info: "HTTP/1.1 401 Unauthorized\x0d\n"
info: "HTTP/1.1 401 Unauthorized\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.unauthorized |>.body ()).line
/--
info: "HTTP/1.1 403 Forbidden\x0d\n"
info: "HTTP/1.1 403 Forbidden\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.forbidden |>.body ()).line
/--
info: "HTTP/1.1 409 Conflict\x0d\n"
info: "HTTP/1.1 409 Conflict\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.conflict |>.body ()).line
/--
info: "HTTP/1.1 503 Service Unavailable\x0d\n"
info: "HTTP/1.1 503 Service Unavailable\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.serviceUnavailable |>.body ()).line
/--
info: "HTTP/1.1 418 I'm a teapot\x0d\n"
info: "HTTP/1.1 418 I'm a teapot\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.withStatus .imATeapot |>.body ()).line
/-! ## Edge cases: Status encoding -/
-- Status.other 0: minimum possible value
-- Status.other 104: minimum valid non-known code (100103 are all named)
/--
info: "999 Unknown"
-/
#guard_msgs in
#eval encodeStr (Status.other 999, "Unknown", by decide, by decide, by decide)
-- Status.other that overlaps with a named status (100 = Continue)
-- Status.other 209: non-named code between two known blocks
/--
info: "888 Unknown"
-/
#guard_msgs in
#eval encodeStr (Status.other 888, "Unknown", by decide, by decide, by decide)
-- Status.other max UInt16
-- Status.other 999: maximum valid code
/--
info: "999 Unknown"
-/
@@ -440,65 +461,154 @@ info: "3;a;b=1\x0d\nabc\x0d\n"
#guard_msgs in
#eval encodeStr ({ data := "abc".toUTF8, extensions := #[(Chunk.ExtensionName.mk "a", none), (Chunk.ExtensionName.mk "b", some (.ofString! "1"))] } : Chunk)
/-! ## Trailer encoding -/
-- Empty trailer: terminal chunk + CRLF
/--
info: "0\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr Trailer.empty
-- Trailer with a single header
/--
info: "0\x0d\nChecksum: abc123\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Trailer.empty.insert! "checksum" "abc123")
-- Trailer with a single header
/--
info: "0\x0d\nChecksum: abc 123\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Trailer.empty.insert! "checksum" "abc 123")
-- Trailer with multiple headers
/--
info: "0\x0d\nChecksum: abc123\x0d\nExpires: Thu, 01 Dec 2025 16:00:00 GMT\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Trailer.empty.insert! "checksum" "abc123" |>.insert! "expires" "Thu, 01 Dec 2025 16:00:00 GMT")
/-! ## Edge cases: Trailer validation -/
-- Empty header name is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "" |>.isNone : Bool)
-- Header name with spaces is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "bad name" |>.isNone : Bool)
-- Header name with colon is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "bad:name" |>.isNone : Bool)
-- Header name with newline is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "bad\nname" |>.isNone : Bool)
-- Header value with newline is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "bad\nvalue" |>.isNone : Bool)
-- Header value with null byte is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "bad\x00value" |>.isNone : Bool)
-- Header value with carriage return is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "bad\rvalue" |>.isNone : Bool)
-- Valid header name succeeds
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "content-type" |>.isSome : Bool)
-- Valid header value with tab succeeds (tab is allowed per RFC)
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "value\twith-tab" |>.isSome : Bool)
-- Empty header value is valid
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "" |>.isSome : Bool)
-- Header value with DEL character (0x7F) is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? (String.ofList [Char.ofNat 0x7F]) |>.isNone : Bool)
/-! ## Edge cases: Request URI encoding -/
-- URI with query parameters
/--
info: "GET /search?q=hello&lang=en HTTP/1.1\x0d\n"
info: "GET /search?q=hello&lang=en HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/search?q=hello&lang=en" } : Request.Head)
-- URI with fragment
/--
info: "GET /page#section HTTP/1.1\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/page#section" } : Request.Head)
#eval encodeStr ({ method := .get, version := .v11, uri := .parse! "/search?q=hello&lang=en" } : Request.Head)
-- URI with percent-encoded characters
/--
info: "GET /path%20with%20spaces HTTP/1.1\x0d\n"
info: "GET /path%20with%20spaces HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/path%20with%20spaces" } : Request.Head)
-- URI with special characters (brackets, colons)
/--
info: "GET /api/v1/users/[id]:action HTTP/1.1\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "/api/v1/users/[id]:action" } : Request.Head)
-- Empty URI
/--
info: "GET HTTP/1.1\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := "" } : Request.Head)
#eval encodeStr ({ method := .get, version := .v11, uri :=.parse! "/path%20with%20spaces" } : Request.Head)
/-! ## Edge cases: Response with unusual statuses -/
/--
info: "HTTP/1.1 100 Continue\x0d\n"
info: "HTTP/1.1 100 Continue\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .«continue», version := .v11 } : Response.Head)
/--
info: "HTTP/1.1 204 No Content\x0d\n"
info: "HTTP/1.1 204 No Content\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .noContent, version := .v11 } : Response.Head)
/--
info: "HTTP/1.1 301 Moved Permanently\x0d\n"
info: "HTTP/1.1 301 Moved Permanently\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .movedPermanently, version := .v11 } : Response.Head)
/--
info: "HTTP/3.0 200 OK\x0d\n"
info: "HTTP/3.0 200 OK\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .ok, version := .v30 } : Response.Head)

View File

@@ -0,0 +1,213 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
structure RejectContinueHandler where
onRequestCalls : IO.Ref Nat
instance : Std.Http.Server.Handler RejectContinueHandler where
onRequest self _request := do
self.onRequestCalls.modify (· + 1)
Response.ok |>.text "request-ran"
onContinue _self _head :=
pure false
structure AcceptContinueHandler where
onRequestCalls : IO.Ref Nat
instance : Std.Http.Server.Handler AcceptContinueHandler where
onRequest self request := do
self.onRequestCalls.modify (· + 1)
let body : String request.body.readAll
Response.ok |>.text s!"accepted:{body}"
onContinue _self _head :=
pure true
def sendRaw {σ : Type} [Std.Http.Server.Handler σ]
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : σ)
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to contain: {needle.quote}\nGot:\n{text.quote}"
def assertNotContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
if text.contains needle then
throw <| IO.userError s!"Test '{name}' failed:\nDid not expect: {needle.quote}\nGot:\n{text.quote}"
def assertCallCount (name : String) (calls : IO.Ref Nat) (expected : Nat) : IO Unit := do
let got calls.get
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected onRequest calls {expected}, got {got}"
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then
0
else
(s.splitOn needle).length - 1
def assertOccurrenceCount (name : String) (response : ByteArray) (needle : String) (expected : Nat) : IO Unit := do
let text := String.fromUTF8! response
let got := countOccurrences text needle
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} occurrences of {needle.quote}, got {got}\n{text.quote}"
-- Rejecting Expect returns 417 and does not execute user handler.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : RejectContinueHandler := { onRequestCalls := calls }
let raw := "POST /upload HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: 100-continue\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "Expect rejected status" response "HTTP/1.1 417 Expectation Failed"
assertNotContains "Expect rejected no 100 Continue" response "100 Continue"
assertNotContains "Expect rejected no handler body" response "request-ran"
assertOccurrenceCount "Expect rejected single response" response "HTTP/1.1 " 1
assertCallCount "Expect rejected call count" calls 0
-- Rejected Expect closes the exchange and blocks pipelined follow-up requests.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : RejectContinueHandler := { onRequestCalls := calls }
let req1 := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: 100-continue\x0d\nContent-Length: 5\x0d\n\x0d\nhello"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 handler
assertContains "Expect rejected still 417" response "HTTP/1.1 417 Expectation Failed"
assertNotContains "Expect rejected no second request" response "/second"
assertCallCount "Expect rejected pipelined call count" calls 0
-- Accepted Expect emits 100 Continue followed by final 200.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : AcceptContinueHandler := { onRequestCalls := calls }
let raw := "POST /ok HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: 100-continue\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "Expect accepted has 100" response "HTTP/1.1 100 Continue"
assertContains "Expect accepted has final 200" response "HTTP/1.1 200 OK"
assertContains "Expect accepted body" response "accepted:hello"
assertOccurrenceCount "Expect accepted exactly one 100" response "HTTP/1.1 100 Continue" 1
assertOccurrenceCount "Expect accepted exactly one 200" response "HTTP/1.1 200 OK" 1
assertCallCount "Expect accepted call count" calls 1
-- Non-100 Expect token proceeds as a normal request.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : RejectContinueHandler := { onRequestCalls := calls }
let raw := "POST /odd HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: something-else\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "Non-100 Expect status" response "HTTP/1.1 200 OK"
assertContains "Non-100 Expect handler ran" response "request-ran"
assertNotContains "Non-100 Expect no 100 Continue" response "100 Continue"
assertCallCount "Non-100 Expect call count" calls 1
-- h11-inspired: Expect token matching is case-insensitive.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : AcceptContinueHandler := { onRequestCalls := calls }
let raw := "POST /case HTTP/1.1\x0d\nHost: example.com\x0d\nExpect: 100-CONTINUE\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "Case-insensitive Expect has 100" response "HTTP/1.1 100 Continue"
assertContains "Case-insensitive Expect final 200" response "HTTP/1.1 200 OK"
assertCallCount "Case-insensitive Expect call count" calls 1
-- Normal requests without Expect do not emit 100 Continue.
#eval show IO _ from do
let (client, server) Mock.new
let calls IO.mkRef 0
let handler : AcceptContinueHandler := { onRequestCalls := calls }
let raw := "POST /no-expect HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw handler
assertContains "No Expect status" response "HTTP/1.1 200 OK"
assertContains "No Expect body" response "accepted:hello"
assertNotContains "No Expect no interim 100" response "100 Continue"
assertOccurrenceCount "No Expect exactly one 200" response "HTTP/1.1 200 OK" 1
assertCallCount "No Expect call count" calls 1
-- Date header is generated when enabled.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET /date HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let handler : TestHandler := fun _ => Response.ok |>.text "hello"
let response sendRaw client server raw handler (config := { lingeringTimeout := 3000, generateDate := true })
assertContains "Date generated status" response "HTTP/1.1 200 OK"
assertContains "Date generated header" response "Date: "
-- Date header is absent when disabled.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET /no-date HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let handler : TestHandler := fun _ => Response.ok |>.text "hello"
let response sendRaw client server raw handler (config := { lingeringTimeout := 3000, generateDate := false })
assertContains "Date disabled status" response "HTTP/1.1 200 OK"
assertNotContains "Date disabled header" response "Date: "
-- User-specified Date header is preserved and not duplicated.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET /custom-date HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let handler : TestHandler := fun _ =>
Response.ok
|>.header! "Date" "Mon, 01 Jan 2024 00:00:00 GMT"
|>.text "hello"
let response sendRaw client server raw handler (config := { lingeringTimeout := 3000, generateDate := true })
assertContains "User Date preserved" response "Date: Mon, 01 Jan 2024 00:00:00 GMT"
let text := String.fromUTF8! response
let count := countOccurrences text "Date: "
if count != 1 then
throw <| IO.userError s!"Test 'User Date not duplicated' failed:\nExpected one Date header, got {count}\n{text.quote}"

View File

@@ -0,0 +1,652 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def defaultConfig : Config :=
{ lingeringTimeout := 1000, generateDate := false }
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def runWithTimeout {α : Type} (name : String) (timeoutMs : Nat := 15000) (action : IO α) : IO α := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO α := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 =>
IO.cancel task
throw <| IO.userError s!"Test '{name}' timed out after {timeoutMs}ms (possible hang/regression)"
| n + 1 =>
IO.sleep 10
loop n
loop ticks
def closeChannelIdempotent {α : Type} (ch : Std.CloseableChannel α) : IO Unit := do
match EIO.toBaseIO ch.close with
| .ok _ => pure ()
| .error .alreadyClosed => pure ()
| .error err => throw <| IO.userError (toString err)
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := defaultConfig) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def sendRawAndClose
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := defaultConfig) : IO ByteArray := Async.block do
client.send raw
closeChannelIdempotent client.getSendChan
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure (res.getD .empty)
def sendFragmentedAndClose
(client : Mock.Client)
(server : Mock.Server)
(parts : Array ByteArray)
(handler : TestHandler)
(config : Config := defaultConfig) : IO ByteArray := Async.block do
let serverTask async (t := AsyncTask) do
Std.Http.Server.serveConnection server handler config
|>.run
for part in parts do
client.send part
closeChannelIdempotent client.getSendChan
await serverTask
let res client.recv?
pure (res.getD .empty)
def responseText (response : ByteArray) : String :=
String.fromUTF8! response
def responseBody (response : ByteArray) : String :=
let parts := (responseText response).splitOn "\x0d\n\x0d\n"
match parts.drop 1 with
| [] => ""
| body :: _ => body
def assertStatusPrefix (name : String) (response : ByteArray) (prefix_ : String) : IO Unit := do
let text := responseText response
unless text.startsWith prefix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected status prefix {prefix_.quote}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := responseText response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then
0
else
(s.splitOn needle).length - 1
def assertStatusCount (name : String) (response : ByteArray) (expected : Nat) : IO Unit := do
let text := responseText response
let got := countOccurrences text "HTTP/1.1 "
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} responses but saw {got}\n{text.quote}"
def nextSeed (seed : Nat) : Nat :=
(1664525 * seed + 1013904223) % 4294967296
def randBelow (seed : Nat) (maxExclusive : Nat) : Nat × Nat :=
let seed' := nextSeed seed
if maxExclusive == 0 then
(0, seed')
else
(seed' % maxExclusive, seed')
def randIn (seed : Nat) (low : Nat) (high : Nat) : Nat × Nat :=
if high < low then
(low, seed)
else
let (n, seed') := randBelow seed (high - low + 1)
(low + n, seed')
def randomAsciiBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 38
s := s'
let code :=
if r < 26 then
97 + r
else if r < 36 then
48 + (r - 26)
else if r == 36 then
45
else
95
out := out.push (UInt8.ofNat code)
(out, s)
def randomTokenBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 36
s := s'
let code :=
if r < 26 then
97 + r
else
48 + (r - 26)
out := out.push (UInt8.ofNat code)
(out, s)
def randomSplit (seed : Nat) (data : ByteArray) (maxPart : Nat := 17) : Array ByteArray × Nat := Id.run do
let mut s := seed
let mut out : Array ByteArray := #[]
let mut i := 0
while i < data.size do
let remaining := data.size - i
let upper := Nat.min maxPart remaining
let (partLen, s') := randIn s 1 upper
s := s'
out := out.push (data.extract i (i + partLen))
i := i + partLen
(out, s)
def randomChunkedPayload (seed : Nat) (body : ByteArray) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
let mut i := 0
while i < body.size do
let remaining := body.size - i
let maxChunk := Nat.min 9 remaining
let (chunkLen, s') := randIn s 1 maxChunk
s := s'
out := out ++ s!"{chunkLen}\x0d\n".toUTF8
out := out ++ body.extract i (i + chunkLen)
out := out ++ "\x0d\n".toUTF8
i := i + chunkLen
out := out ++ "0\x0d\n\x0d\n".toUTF8
(out, s)
def mkContentLengthHead (path : String) (bodySize : Nat) : ByteArray :=
s!"POST {path} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {bodySize}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
def mkChunkedHead (path : String) : ByteArray :=
s!"POST {path} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
def randomChunkExtensionList (seed : Nat) (count : Nat) : String × Nat := Id.run do
let mut s := seed
let mut ext := ""
for _ in [0:count] do
let (nameLen, s1) := randIn s 1 3
s := s1
let (valueLen, s2) := randIn s 1 3
s := s2
let (nameBytes, s3) := randomTokenBytes s nameLen
s := s3
let (valueBytes, s4) := randomTokenBytes s valueLen
s := s4
let name := String.fromUTF8! nameBytes
let value := String.fromUTF8! valueBytes
ext := ext ++ s!";{name}={value}"
(ext, s)
def randomTrailerLines (seed : Nat) (count : Nat) : String × Nat := Id.run do
let mut s := seed
let mut lines := ""
for i in [0:count] do
let (nameLen, s1) := randIn s 1 4
s := s1
let (valueLen, s2) := randIn s 1 6
s := s2
let (nameBytes, s3) := randomTokenBytes s nameLen
s := s3
let (valueBytes, s4) := randomTokenBytes s valueLen
s := s4
let name := String.fromUTF8! nameBytes
let value := String.fromUTF8! valueBytes
lines := lines ++ s!"X{i}-{name}: {value}\x0d\n"
(lines, s)
def echoBodyHandler : TestHandler := fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def runPipelinedReadAll
(raw : ByteArray)
(config : Config := defaultConfig) : IO (ByteArray × Array String) := Async.block do
let (client, server) Mock.new
let seenRef IO.mkRef (#[] : Array String)
let handler : TestHandler := fun req => do
let uri := toString req.line.uri
seenRef.modify (·.push uri)
let _body : String req.body.readAll
Response.ok |>.text uri
client.send raw
closeChannelIdempotent client.getSendChan
Std.Http.Server.serveConnection server handler config
|>.run
let response client.recv?
let seen seenRef.get
pure (response.getD .empty, seen)
def fuzzContentLengthEcho (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 0 128
seed := seed1
let (body, seed2) := randomAsciiBytes seed len
seed := seed2
let head := mkContentLengthHead s!"/fuzz-cl-{i}" body.size
let (bodyParts, seed3) := randomSplit seed body
seed := seed3
let parts := #[head] ++ bodyParts
let (client, server) Mock.new
let response sendFragmentedAndClose client server parts echoBodyHandler
let expectedBody := String.fromUTF8! body
assertStatusPrefix s!"fuzzContentLengthEcho case={i} seed={caseSeed}" response "HTTP/1.1 200"
let gotBody := responseBody response
if gotBody != expectedBody then
throw <| IO.userError s!"fuzzContentLengthEcho case={i} seed={caseSeed} failed:\nExpected body {expectedBody.quote}\nGot body {gotBody.quote}\nFull response:\n{(responseText response).quote}"
def fuzzContentLengthLeadingZerosAccepted (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 1 96
seed := seed1
let (leadingZeros, seed2) := randIn seed 1 5
seed := seed2
let (body, seed3) := randomAsciiBytes seed len
seed := seed3
let clToken := String.ofList (List.replicate leadingZeros '0') ++ toString len
let raw :=
s!"POST /cl-leading-zeros-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {clToken}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8 ++ body
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
let expectedBody := String.fromUTF8! body
assertStatusPrefix s!"fuzzContentLengthLeadingZerosAccepted case={i} seed={caseSeed} len={len} zeros={leadingZeros}" response "HTTP/1.1 200"
let gotBody := responseBody response
if gotBody != expectedBody then
throw <| IO.userError s!"fuzzContentLengthLeadingZerosAccepted case={i} seed={caseSeed} failed:\nExpected body {expectedBody.quote}\nGot body {gotBody.quote}\nFull response:\n{(responseText response).quote}"
def fuzzChunkedEcho (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 0 128
seed := seed1
let (body, seed2) := randomAsciiBytes seed len
seed := seed2
let (chunkedBody, seed3) := randomChunkedPayload seed body
seed := seed3
let head := mkChunkedHead s!"/fuzz-chunked-{i}"
let raw := head ++ chunkedBody
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
let expectedBody := String.fromUTF8! body
assertStatusPrefix s!"fuzzChunkedEcho case={i} seed={caseSeed}" response "HTTP/1.1 200"
let gotBody := responseBody response
if gotBody != expectedBody then
throw <| IO.userError s!"fuzzChunkedEcho case={i} seed={caseSeed} failed:\nExpected body {expectedBody.quote}\nGot body {gotBody.quote}\nFull response:\n{(responseText response).quote}"
def fuzzMixedTransferEncodingAndContentLengthRejected (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 0 96
seed := seed1
let (body, seed2) := randomAsciiBytes seed len
seed := seed2
let (chunkedBody, seed3) := randomChunkedPayload seed body
seed := seed3
let (declaredCl, seed4) := randIn seed 0 128
seed := seed4
let raw :=
s!"POST /te-cl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nContent-Length: {declaredCl}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8 ++ chunkedBody
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
assertExact s!"fuzzMixedTransferEncodingAndContentLengthRejected case={i} seed={caseSeed} declaredCl={declaredCl}" response bad400
def fuzzInvalidChunkSizeRejected (iterations : Nat) (seed0 : Nat) : IO Unit := do
let badTokens : Array String := #["g", "G", "z", "Z", "@", "!", "x"]
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (idx, seed1) := randBelow seed badTokens.size
seed := seed1
let token := badTokens[idx]!
let raw :=
s!"POST /bad-size-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n{token}\x0d\nabc\x0d\n0\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
assertExact s!"fuzzInvalidChunkSizeRejected case={i} seed={caseSeed} token={token}" response bad400
def fuzzDuplicateContentLengthRejected (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (cl1, seed1) := randIn seed 0 64
seed := seed1
let (same, seed2) := randBelow seed 2
seed := seed2
let (delta, seed3) := randIn seed 1 10
seed := seed3
let cl2 := if same == 0 then cl1 else cl1 + delta
let (body, seed4) := randomAsciiBytes seed cl1
seed := seed4
let raw :=
s!"POST /dup-cl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {cl1}\x0d\nContent-Length: {cl2}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8 ++ body
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler
assertExact s!"fuzzDuplicateContentLengthRejected case={i} seed={caseSeed} cl1={cl1} cl2={cl2}" response bad400
def fuzzChunkExtensionLimits (iterations : Nat) (seed0 : Nat) : IO Unit := do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxChunkExtNameLength := 4
maxChunkExtValueLength := 4
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (nameLen, seed1) := randIn seed 1 8
seed := seed1
let (valueLen, seed2) := randIn seed 1 8
seed := seed2
let (nameBytes, seed3) := randomTokenBytes seed nameLen
seed := seed3
let (valueBytes, seed4) := randomTokenBytes seed valueLen
seed := seed4
let name := String.fromUTF8! nameBytes
let value := String.fromUTF8! valueBytes
let raw :=
s!"POST /ext-limit-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n1;{name}={value}\x0d\nx\x0d\n0\x0d\n\x0d\n".toUTF8
let expectOk := nameLen 4 valueLen 4
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler (config := config)
if expectOk then
assertStatusPrefix s!"fuzzChunkExtensionLimits case={i} seed={caseSeed} nameLen={nameLen} valueLen={valueLen}" response "HTTP/1.1 200"
else
assertExact s!"fuzzChunkExtensionLimits case={i} seed={caseSeed} nameLen={nameLen} valueLen={valueLen}" response bad400
def fuzzChunkExtensionCountLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxChunkExtensions := 2
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (extCount, seed1) := randIn seed 0 5
seed := seed1
let (extList, seed2) := randomChunkExtensionList seed extCount
seed := seed2
let raw :=
s!"POST /ext-count-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n1{extList}\x0d\nx\x0d\n0\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler (config := config)
if extCount 2 then
assertStatusPrefix s!"fuzzChunkExtensionCountLimit case={i} seed={caseSeed} extCount={extCount}" response "HTTP/1.1 200"
else
assertExact s!"fuzzChunkExtensionCountLimit case={i} seed={caseSeed} extCount={extCount}" response bad400
def fuzzTrailerHeaderCountLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxTrailerHeaders := 2
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (trailerCount, seed1) := randIn seed 0 5
seed := seed1
let (trailers, seed2) := randomTrailerLines seed trailerCount
seed := seed2
let raw :=
s!"POST /trailers-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n1\x0d\na\x0d\n0\x0d\n{trailers}\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRaw client server raw echoBodyHandler (config := config)
if trailerCount 2 then
assertStatusPrefix s!"fuzzTrailerHeaderCountLimit case={i} seed={caseSeed} trailerCount={trailerCount}" response "HTTP/1.1 200"
else
assertExact s!"fuzzTrailerHeaderCountLimit case={i} seed={caseSeed} trailerCount={trailerCount}" response bad400
def fuzzCompleteFirstBodyAllowsPipeline (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, seed1) := randIn seed 0 32
seed := seed1
let (body, seed2) := randomAsciiBytes seed len
seed := seed2
let uri1 := s!"/first-complete-{i}"
let req1 :=
s!"POST {uri1} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {len}\x0d\n\x0d\n".toUTF8 ++ body
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (response, seen) runPipelinedReadAll (req1 ++ req2)
let text := responseText response
assertStatusCount s!"fuzzCompleteFirstBodyAllowsPipeline case={i} seed={caseSeed}" response 2
unless text.contains uri1 do
throw <| IO.userError s!"fuzzCompleteFirstBodyAllowsPipeline case={i} seed={caseSeed} failed:\nMissing first URI {uri1.quote}\n{text.quote}"
unless text.contains "/second" do
throw <| IO.userError s!"fuzzCompleteFirstBodyAllowsPipeline case={i} seed={caseSeed} failed:\nMissing second response\n{text.quote}"
if seen.size != 2 seen[0]! != uri1 seen[1]! != "/second" then
throw <| IO.userError s!"fuzzCompleteFirstBodyAllowsPipeline case={i} seed={caseSeed} failed:\nExpected seen=[{uri1.quote}, \"/second\"] got {seen}"
-- Property: Content-Length framing is stable across random payloads and random transport splits.
#eval runWithTimeout "fuzz_content_length_echo" 20000 do
fuzzContentLengthEcho 40 0x00C0FFEE
-- Property: Content-Length with randomized leading zeros is accepted and decoded to exact body length.
#eval runWithTimeout "fuzz_content_length_leading_zeros_accepted" 20000 do
fuzzContentLengthLeadingZerosAccepted 30 0x00CAB005
-- Property: Chunked framing reconstructs exact bodies under random chunking and transport splits.
#eval runWithTimeout "fuzz_chunked_echo" 20000 do
fuzzChunkedEcho 40 0x00123456
-- Property: Mixing Transfer-Encoding with Content-Length is always rejected.
#eval runWithTimeout "fuzz_te_cl_mixed_rejected" 20000 do
fuzzMixedTransferEncodingAndContentLengthRejected 30 0x0010CE11
-- Property: Invalid chunk-size tokens are rejected deterministically with 400.
#eval runWithTimeout "fuzz_invalid_chunk_size_rejected" 20000 do
fuzzInvalidChunkSizeRejected 30 0x00BAD001
-- Property: Duplicate Content-Length headers are always rejected (same or different values).
#eval runWithTimeout "fuzz_duplicate_content_length_rejected" 20000 do
fuzzDuplicateContentLengthRejected 30 0x00D0C1A7
-- Property: Chunk extension name/value limits are enforced under randomized lengths.
#eval runWithTimeout "fuzz_chunk_extension_limits" 20000 do
fuzzChunkExtensionLimits 40 0x00A11CE5
-- Property: Chunk extension count limit is enforced under randomized extension lists.
#eval runWithTimeout "fuzz_chunk_extension_count_limit" 20000 do
fuzzChunkExtensionCountLimit 35 0x00E77E11
-- Property: Trailer header count limit is enforced under randomized trailer sections.
#eval runWithTimeout "fuzz_trailer_header_count_limit" 20000 do
fuzzTrailerHeaderCountLimit 35 0x00A71A12
-- Property: Complete first request body allows pipelined follow-up parsing.
#eval runWithTimeout "fuzz_complete_first_body_allows_pipeline" 20000 do
fuzzCompleteFirstBodyAllowsPipeline 30 0x00777777

View File

@@ -0,0 +1,472 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
/-!
# Limit-enforcement fuzzing for the HTTP/1.1 server.
Tests that every configurable limit in `H1.Config` and `Server.Config` is
correctly enforced under randomized inputs. Inspired by hyper's fuzzing of
size and count limits.
-/
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def closeChannelIdempotent {α : Type} (ch : Std.CloseableChannel α) : IO Unit := do
match EIO.toBaseIO ch.close with
| .ok _ => pure ()
| .error .alreadyClosed => pure ()
| .error err => throw <| IO.userError (toString err)
def sendRaw
(client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : TestHandler) (config : Config) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config |>.run
let res client.recv?
pure (res.getD .empty)
def sendRawAndClose
(client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : TestHandler) (config : Config) : IO ByteArray := Async.block do
client.send raw
closeChannelIdempotent client.getSendChan
Std.Http.Server.serveConnection server handler config |>.run
let res client.recv?
pure (res.getD .empty)
def runWithTimeout {α : Type} (name : String) (timeoutMs : Nat := 20000) (action : IO α) : IO α := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO α := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 => IO.cancel task; throw <| IO.userError s!"Test '{name}' timed out"
| n + 1 => IO.sleep 10; loop n
loop ticks
-- PRNG
def nextSeed (seed : Nat) : Nat := (1664525 * seed + 1013904223) % 4294967296
def randBelow (seed : Nat) (n : Nat) : Nat × Nat :=
let s := nextSeed seed
(if n == 0 then 0 else s % n, s)
def randIn (seed : Nat) (lo hi : Nat) : Nat × Nat :=
if hi < lo then (lo, seed) else
let (r, s) := randBelow seed (hi - lo + 1)
(lo + r, s)
def randomTokenBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed; let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 36; s := s'
let code := if r < 26 then 97 + r else 48 + (r - 26)
out := out.push (UInt8.ofNat code)
(out, s)
def randomAsciiBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed; let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 26; s := s'
out := out.push (UInt8.ofNat (97 + r))
(out, s)
private def toHexAux : Nat Nat String String
| 0, _, acc => acc
| fuel + 1, n, acc =>
if n == 0 then acc
else
let d := n % 16
let c : Char := if d < 10 then Char.ofNat (48 + d) else Char.ofNat (87 + d)
toHexAux fuel (n / 16) (String.ofList [c] ++ acc)
def natToHex (n : Nat) : String :=
if n == 0 then "0" else toHexAux 16 n ""
def assertStatusPrefix (name : String) (response : ByteArray) (pfx : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith pfx do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {pfx.quote}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := String.fromUTF8! response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def countOccurrences (s needle : String) : Nat :=
if needle.isEmpty then 0 else (s.splitOn needle).length - 1
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def echoBodyHandler : TestHandler := fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def okHandler : TestHandler := fun _ => Response.ok |>.text "ok"
-- ============================================================================
-- maxBodySize — Content-Length framing
-- ============================================================================
-- Property: Content-Length body exactly at or below maxBodySize → 200.
-- Content-Length body above maxBodySize → 413 (no body bytes needed).
def fuzzBodySizeLimitContentLength (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 64
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxBodySize := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (bodySize, s1) := randIn seed 0 (limit + 20); seed := s1
let (bodyBytes, s2) := randomAsciiBytes seed bodySize; seed := s2
let raw :=
s!"POST /bl-cl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: {bodySize}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
++ bodyBytes
let (client, server) Mock.new
let response sendRaw client server raw (fun req => do
let _body : String req.body.readAll; Response.ok |>.text "ok") config
if bodySize limit then
assertStatusPrefix s!"fuzzBodySizeLimitCL iter={i} seed={caseSeed} size={bodySize}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzBodySizeLimitCL iter={i} seed={caseSeed} size={bodySize}" response "HTTP/1.1 413"
-- ============================================================================
-- maxBodySize — chunked framing
-- ============================================================================
-- Property: chunked bodies with total bytes at or below maxBodySize → 200.
-- Chunked bodies exceeding maxBodySize → 413.
def fuzzBodySizeLimitChunked (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 64
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxBodySize := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
-- Total body size across 1-4 chunks
let (totalSize, s1) := randIn seed 0 (limit + 16); seed := s1
let (numChunks, s2) := randIn seed 1 4; seed := s2
-- Build chunks that sum to totalSize
let chunkSize := (totalSize + numChunks - 1) / numChunks
let mut chunkedBody := ByteArray.empty
let mut remaining := totalSize
for _ in [0:numChunks] do
if remaining == 0 then break
let thisChunk := Nat.min chunkSize remaining
let (chunkBytes, s3) := randomAsciiBytes seed thisChunk; seed := s3
chunkedBody := chunkedBody ++ s!"{natToHex thisChunk}\x0d\n".toUTF8 ++ chunkBytes ++ "\x0d\n".toUTF8
remaining := remaining - thisChunk
chunkedBody := chunkedBody ++ "0\x0d\n\x0d\n".toUTF8
let raw :=
s!"POST /bl-ch-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
++ chunkedBody
let (client, server) Mock.new
let response sendRaw client server raw (fun req => do
let _body : String req.body.readAll; Response.ok |>.text "ok") config
if totalSize limit then
assertStatusPrefix s!"fuzzBodySizeLimitChunked iter={i} seed={caseSeed} total={totalSize}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzBodySizeLimitChunked iter={i} seed={caseSeed} total={totalSize}" response "HTTP/1.1 413"
-- ============================================================================
-- maxHeaders — header count limit
-- ============================================================================
-- Property: header count at or below maxHeaders → 200.
-- header count above maxHeaders → 400.
def fuzzHeaderCountLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 5
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxHeaders := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
-- Host counts as 1 header, Connection as 1, so extra headers = headerCount - 2
let (headerCount, s1) := randIn seed 2 (limit + 4); seed := s1
let extraCount := headerCount - 2 -- we always add Host + Connection
let mut extraHeaders := ""
let mut s := s1
for j in [0:extraCount] do
let (nameLen, s2) := randIn s 1 8; s := s2
let (nameBytes, s3) := randomTokenBytes s nameLen; s := s3
let name := String.fromUTF8! nameBytes
extraHeaders := extraHeaders ++ s!"X-Extra-{j}-{name}: value\x0d\n"
seed := s
let raw :=
s!"GET /hc-{i} HTTP/1.1\x0d\nHost: example.com\x0d\n{extraHeaders}Connection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
-- headerCount includes Host and Connection (always present)
if headerCount limit then
assertStatusPrefix s!"fuzzHeaderCount iter={i} seed={caseSeed} count={headerCount}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzHeaderCount iter={i} seed={caseSeed} count={headerCount}" response "HTTP/1.1 431"
-- ============================================================================
-- maxHeaderBytes — total header bytes limit
-- ============================================================================
-- Property: headers whose aggregate bytes (name + ": " + value + "\r\n") are at or
-- below maxHeaderBytes are accepted; above it they are rejected with 400.
def fuzzHeaderTotalBytesLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
-- Fixed baseline: "Host: example.com\r\n" + "Connection: close\r\n" = 20 + 20 = 40 bytes.
-- We'll add a single large X-Payload header to cross the boundary.
let limit : Nat := 200
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxHeaderBytes := limit
maxHeaderValueLength := limit + 100 -- allow value longer than total limit for testing
}
let baseline := ("Host: example.com\x0d\nConnection: close\x0d\n".toUTF8).size
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
-- Pick a value size that puts total bytes just under or over the limit
-- Each "X-Payload: " header = name(9) + ": "(2) + value + "\r\n"(2) = value + 13
let overhead := baseline + 13 -- "X-Payload" (9) + ": " (2) + "\r\n" (2) + baseline
-- We want value sizes that land on both sides of (limit - overhead)
let boundary := if limit > overhead then limit - overhead else 0
let (valueSize, s1) := randIn seed 0 (boundary + 20); seed := s1
let (valueBytes, s2) := randomAsciiBytes seed valueSize; seed := s2
let value := String.fromUTF8! valueBytes
let raw :=
s!"GET /hb-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nX-Payload: {value}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
-- Total bytes = baseline + "X-Payload: " + value + "\r\n"
let totalBytes := baseline + 9 + 2 + valueSize + 2
if totalBytes limit then
assertStatusPrefix s!"fuzzHeaderBytes iter={i} seed={caseSeed} total={totalBytes}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzHeaderBytes iter={i} seed={caseSeed} total={totalBytes}" response "HTTP/1.1 431"
-- ============================================================================
-- maxMessages — requests per connection
-- ============================================================================
-- Property: after maxMessages requests on a single connection, the server
-- closes the connection (disables keep-alive). All maxMessages
-- requests receive a valid response.
def fuzzMaxMessagesPerConnection (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 3
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxRequests := limit
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (reqCount, s1) := randIn seed 1 (limit + 3); seed := s1
-- Build reqCount keep-alive requests followed by close
let mut raw := ByteArray.empty
for j in [0:reqCount] do
let connHeader :=
if j + 1 == reqCount then "Connection: close\x0d\n" else "Connection: keep-alive\x0d\n"
raw := raw ++ s!"GET /msg-{i}-{j} HTTP/1.1\x0d\nHost: example.com\x0d\n{connHeader}\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
let text := String.fromUTF8! response
let seen := countOccurrences text "HTTP/1.1 200"
let expected := Nat.min reqCount limit
if seen != expected then
throw <| IO.userError
s!"fuzzMaxMessages iter={i} seed={caseSeed} reqCount={reqCount}: expected {expected} responses, got {seen}\n{text.quote}"
-- ============================================================================
-- maxLeadingEmptyLines — leading CRLF before request-line
-- ============================================================================
-- Property: at most maxLeadingEmptyLines bare CRLFs before the request-line are tolerated.
-- More than that → 400.
def fuzzLeadingEmptyLinesLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 4
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxLeadingEmptyLines := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (lineCount, s1) := randIn seed 0 (limit + 4); seed := s1
let leadingCRLF := (List.replicate lineCount "\x0d\n").foldl (· ++ ·) ""
let raw :=
(leadingCRLF ++ s!"GET /le-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
if lineCount limit then
assertStatusPrefix s!"fuzzLeadingEmptyLines iter={i} seed={caseSeed} count={lineCount}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzLeadingEmptyLines iter={i} seed={caseSeed} count={lineCount}" response "HTTP/1.1 400"
-- ============================================================================
-- maxSpaceSequence — OWS (optional whitespace) limit
-- ============================================================================
-- Property: OWS sequences at or below maxSpaceSequence are accepted.
-- OWS sequences exceeding the limit → 400.
def fuzzOWSSequenceLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 4
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxSpaceSequence := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (spaceCount, s1) := randIn seed 0 (limit + 4); seed := s1
let spaces := String.ofList (List.replicate spaceCount ' ')
-- OWS appears between ':' and value in header fields
let raw :=
s!"GET /ows-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nX-OWS:{spaces}value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
if spaceCount limit then
assertStatusPrefix s!"fuzzOWSLimit iter={i} seed={caseSeed} spaces={spaceCount}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzOWSLimit iter={i} seed={caseSeed} spaces={spaceCount}" response "HTTP/1.1 400"
-- ============================================================================
-- maxStartLineLength — request-line length limit
-- ============================================================================
-- Property: request lines at or below maxStartLineLength → 200.
-- Request lines above maxStartLineLength → 414 (URI too long) or 400.
def fuzzStartLineLengthLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 64
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxStartLineLength := limit
maxUriLength := limit
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
-- "GET " (4) + path + " HTTP/1.1\r\n" (12) → path can be up to (limit - 16)
let pathBudget := if limit > 16 then limit - 16 else 1
let (pathLen, s1) := randIn seed 1 (pathBudget + 10); seed := s1
let path := "/" ++ String.ofList (List.replicate (pathLen - 1) 'a')
let raw :=
s!"GET {path} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
-- start-line = "GET " + path + " HTTP/1.1\r\n"
let lineLen := 4 + pathLen + 11
if lineLen limit then
assertStatusPrefix s!"fuzzStartLineLen iter={i} seed={caseSeed} len={lineLen}" response "HTTP/1.1 200"
else
-- Either 414 (URI too long) or 400
let text := String.fromUTF8! response
unless text.startsWith "HTTP/1.1 414" || text.startsWith "HTTP/1.1 400" do
throw <| IO.userError
s!"fuzzStartLineLen iter={i} seed={caseSeed} len={lineLen}: expected 414 or 400, got {text.quote}"
-- ============================================================================
-- maxHeaderNameLength and maxHeaderValueLength
-- ============================================================================
-- Property: header names exceeding maxHeaderNameLength → 400.
def fuzzHeaderNameLengthLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 16
let config : Config := { lingeringTimeout := 1000, generateDate := false, maxHeaderNameLength := limit }
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (nameLen, s1) := randIn seed 1 (limit + 8); seed := s1
let (nameBytes, s2) := randomTokenBytes seed nameLen; seed := s2
let name := String.fromUTF8! nameBytes
let raw :=
s!"GET /hnl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\n{name}: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
if nameLen limit then
assertStatusPrefix s!"fuzzHeaderNameLen iter={i} seed={caseSeed} len={nameLen}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzHeaderNameLen iter={i} seed={caseSeed} len={nameLen}" response "HTTP/1.1 400"
-- Property: header values exceeding maxHeaderValueLength → 400.
def fuzzHeaderValueLengthLimit (iterations : Nat) (seed0 : Nat) : IO Unit := do
let limit : Nat := 32
let config : Config := {
lingeringTimeout := 1000
generateDate := false
maxHeaderValueLength := limit
maxHeaderBytes := 65536 -- don't let total bytes limit interfere
}
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (valueLen, s1) := randIn seed 0 (limit + 8); seed := s1
let (valueBytes, s2) := randomAsciiBytes seed valueLen; seed := s2
let value := String.fromUTF8! valueBytes
let raw :=
s!"GET /hvl-{i} HTTP/1.1\x0d\nHost: example.com\x0d\nX-Long: {value}\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let (client, server) Mock.new
let response sendRawAndClose client server raw okHandler config
if valueLen limit then
assertStatusPrefix s!"fuzzHeaderValueLen iter={i} seed={caseSeed} len={valueLen}" response "HTTP/1.1 200"
else
assertStatusPrefix s!"fuzzHeaderValueLen iter={i} seed={caseSeed} len={valueLen}" response "HTTP/1.1 400"
-- ============================================================================
-- Run all properties
-- ============================================================================
-- Property: maxBodySize enforced for Content-Length framing.
#eval runWithTimeout "fuzz_body_limit_content_length" 30000 do
fuzzBodySizeLimitContentLength 50 0x00B0DEC0
-- Property: maxBodySize enforced for chunked framing.
#eval runWithTimeout "fuzz_body_limit_chunked" 30000 do
fuzzBodySizeLimitChunked 40 0x00C8BE11
-- Property: maxHeaders count limit is enforced.
#eval runWithTimeout "fuzz_header_count_limit" 30000 do
fuzzHeaderCountLimit 50 0x00AA55AA
-- Property: maxHeaderBytes aggregate limit is enforced.
#eval runWithTimeout "fuzz_header_bytes_limit" 30000 do
fuzzHeaderTotalBytesLimit 40 0x00FACE77
-- Property: maxMessages per connection closes keep-alive after the configured count.
#eval runWithTimeout "fuzz_max_messages_per_connection" 30000 do
fuzzMaxMessagesPerConnection 30 0x00123ABC
-- Property: maxLeadingEmptyLines limit is enforced.
#eval runWithTimeout "fuzz_leading_empty_lines_limit" 30000 do
fuzzLeadingEmptyLinesLimit 50 0x00EEF00D
-- Property: maxSpaceSequence (OWS) limit is enforced.
#eval runWithTimeout "fuzz_ows_sequence_limit" 30000 do
fuzzOWSSequenceLimit 50 0x00ABE5AB
-- Property: maxStartLineLength / maxUriLength is enforced, returning 414 or 400.
#eval runWithTimeout "fuzz_start_line_length_limit" 30000 do
fuzzStartLineLengthLimit 50 0x00C0FFEE
-- Property: maxHeaderNameLength is enforced.
#eval runWithTimeout "fuzz_header_name_length_limit" 30000 do
fuzzHeaderNameLengthLimit 50 0x00DEAD01
-- Property: maxHeaderValueLength is enforced.
#eval runWithTimeout "fuzz_header_value_length_limit" 30000 do
fuzzHeaderValueLengthLimit 50 0x00BEEF02

View File

@@ -0,0 +1,304 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
/-!
# Random-byte fuzzing for the HTTP/1.1 parser.
Inspired by hyper's `fuzz_h1_req` libFuzzer target. The core property: any byte
sequence fed to the server must be handled without panicking, hanging, or
producing a malformed response. The server must either:
- Send no bytes (connection closed before a complete request arrives), or
- Send a response that starts with "HTTP/1.1 ".
-/
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def defaultConfig : Config :=
{ lingeringTimeout := 1000, generateDate := false }
def closeChannelIdempotent {α : Type} (ch : Std.CloseableChannel α) : IO Unit := do
match EIO.toBaseIO ch.close with
| .ok _ => pure ()
| .error .alreadyClosed => pure ()
| .error err => throw <| IO.userError (toString err)
def sendRawAndClose
(client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : TestHandler) (config : Config := defaultConfig) : IO ByteArray := Async.block do
client.send raw
closeChannelIdempotent client.getSendChan
Std.Http.Server.serveConnection server handler config |>.run
let res client.recv?
pure (res.getD .empty)
def runWithTimeout {α : Type} (name : String) (timeoutMs : Nat := 20000) (action : IO α) : IO α := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO α := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 => IO.cancel task; throw <| IO.userError s!"Test '{name}' timed out"
| n + 1 => IO.sleep 10; loop n
loop ticks
-- PRNG
def nextSeed (seed : Nat) : Nat := (1664525 * seed + 1013904223) % 4294967296
def randBelow (seed : Nat) (n : Nat) : Nat × Nat :=
let s := nextSeed seed
(if n == 0 then 0 else s % n, s)
def randIn (seed : Nat) (lo hi : Nat) : Nat × Nat :=
if hi < lo then (lo, seed) else
let (r, s) := randBelow seed (hi - lo + 1)
(lo + r, s)
-- All 256 byte values
def randomFullBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed; let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 256; s := s'
out := out.push (UInt8.ofNat r)
(out, s)
def okHandler : TestHandler := fun _ => Response.ok |>.text "ok"
-- Server-generated responses are always valid ASCII. Verify the response is
-- either empty or starts with the HTTP/1.1 status-line prefix.
def assertValidHttpOrEmpty (name : String) (response : ByteArray) : IO Unit := do
if response.size == 0 then pure ()
else
let pfx := "HTTP/1.1 ".toUTF8
if response.size >= pfx.size && response.extract 0 pfx.size == pfx then pure ()
else
let display := match String.fromUTF8? (response.extract 0 (Nat.min 200 response.size)) with
| some s => s.quote | none => "(non-UTF-8 bytes)"
throw <| IO.userError
s!"Test '{name}' failed:\nResponse is neither empty nor valid HTTP/1.1:\n{display}"
-- Property: any fully-random byte sequence never causes a panic or malformed response.
-- Direct analogue of hyper's fuzz_h1_req libFuzzer target.
def fuzzRandomBytesNoPanic (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, s1) := randIn seed 0 96; seed := s1
let (bytes, s2) := randomFullBytes seed len; seed := s2
let (client, server) Mock.new
let response sendRawAndClose client server bytes okHandler
assertValidHttpOrEmpty s!"fuzzRandomBytesNoPanic iter={i} seed={caseSeed} len={len}" response
-- Property: flipping a single bit in any valid request must not cause a panic.
def fuzzBitFlipOnValidRequests (iterations : Nat) (seed0 : Nat) : IO Unit := do
let corpus : Array ByteArray := #[
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
"POST /submit HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8,
"POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
"OPTIONS * HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
"CONNECT example.com:443 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
"DELETE /resource HTTP/1.1\x0d\nHost: api.example.com\x0d\nAuthorization: Bearer token123\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
]
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (idx, s1) := randBelow seed corpus.size; seed := s1
let req := corpus[idx]!
let (pos, s2) := randBelow seed req.size; seed := s2
let (bit, s3) := randBelow seed 8; seed := s3
let orig := req[pos]!
let mask : UInt8 := (1 : UInt8) <<< bit.toUInt8
let flipped := orig ^^^ mask
let mutated := (req.extract 0 pos).push flipped ++ req.extract (pos + 1) req.size
let (client, server) Mock.new
let response sendRawAndClose client server mutated okHandler
assertValidHttpOrEmpty
s!"fuzzBitFlip iter={i} seed={caseSeed} reqIdx={idx} pos={pos} bit={bit}" response
-- Property: truncating a valid request at any byte boundary must not cause a panic.
def fuzzTruncatedRequests (iterations : Nat) (seed0 : Nat) : IO Unit := do
let corpus : Array ByteArray := #[
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
"POST /data HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 4\x0d\nConnection: close\x0d\n\x0d\ndata".toUTF8,
"POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n4\x0d\ndata\x0d\n0\x0d\n\x0d\n".toUTF8,
"HEAD /resource HTTP/1.1\x0d\nHost: example.com\x0d\nAccept: application/json\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
]
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (idx, s1) := randBelow seed corpus.size; seed := s1
let req := corpus[idx]!
let (truncLen, s2) := randBelow seed req.size; seed := s2
let truncated := req.extract 0 truncLen
let (client, server) Mock.new
let response sendRawAndClose client server truncated okHandler
assertValidHttpOrEmpty
s!"fuzzTruncated iter={i} seed={caseSeed} reqIdx={idx} truncLen={truncLen}" response
-- Property: a valid HTTP method prefix followed by garbage must not cause a panic.
def fuzzMethodPrefixWithGarbage (iterations : Nat) (seed0 : Nat) : IO Unit := do
let methods : Array ByteArray := #[
"GET ".toUTF8, "POST ".toUTF8, "PUT ".toUTF8, "DELETE ".toUTF8,
"HEAD ".toUTF8, "OPTIONS ".toUTF8, "PATCH ".toUTF8, "CONNECT ".toUTF8,
"HTTP/1.1 ".toUTF8,
ByteArray.empty,
]
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (mIdx, s1) := randBelow seed methods.size; seed := s1
let pfx := methods[mIdx]!
let (gLen, s2) := randIn seed 0 64; seed := s2
let (garbage, s3) := randomFullBytes seed gLen; seed := s3
let request := pfx ++ garbage
let (client, server) Mock.new
let response sendRawAndClose client server request okHandler
assertValidHttpOrEmpty
s!"fuzzMethodPrefix iter={i} seed={caseSeed} mIdx={mIdx} gLen={gLen}" response
-- Property: high-byte (0x800xFF, non-ASCII) sequences must not cause a panic.
def fuzzHighByteValues (iterations : Nat) (seed0 : Nat) : IO Unit := do
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (len, s1) := randIn seed 0 48; seed := s1
let mut out := ByteArray.empty
let mut s := s1
for _ in [0:len] do
let (r, s') := randBelow s 128; s := s'
out := out.push (UInt8.ofNat (r + 128))
seed := s
let (client, server) Mock.new
let response sendRawAndClose client server out okHandler
assertValidHttpOrEmpty s!"fuzzHighBytes iter={i} seed={caseSeed} len={len}" response
-- Property: garbage appended after a complete request must not cause a panic.
def fuzzGarbageAfterCompleteRequest (iterations : Nat) (seed0 : Nat) : IO Unit := do
let validReq :=
"GET /check HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let mut seed := seed0
for i in [0:iterations] do
let caseSeed := seed
let (gLen, s1) := randIn seed 0 32; seed := s1
let (garbage, s2) := randomFullBytes seed gLen; seed := s2
let request := validReq ++ garbage
let (client, server) Mock.new
let response sendRawAndClose client server request okHandler
assertValidHttpOrEmpty
s!"fuzzGarbageAfter iter={i} seed={caseSeed} gLen={gLen}" response
-- Property: every single-byte input is handled safely (all 256 values).
def fuzzSingleByteInputs : IO Unit := do
for b in List.range 256 do
let bytes := ByteArray.mk #[b.toUInt8]
let (client, server) Mock.new
let response sendRawAndClose client server bytes okHandler
assertValidHttpOrEmpty s!"fuzzSingleByte byte={b}" response
-- Property: known attack patterns and real-world malformed inputs are handled safely.
-- This is the Lean analogue of hyper's denial-of-service and smuggling corpus.
def fuzzKnownMaliciousPatterns : IO Unit := do
let patterns : Array ByteArray := #[
-- TLS 1.2 client hello prefix
ByteArray.mk #[0x16, 0x03, 0x01, 0x00, 0xa5, 0x01, 0x00, 0x00],
-- TLS 1.3 client hello prefix
ByteArray.mk #[0x16, 0x03, 0x03, 0x00, 0x7c, 0x01, 0x00, 0x00],
-- HTTP/2 connection preface
"PRI * HTTP/2.0\x0d\n\x0d\nSM\x0d\n\x0d\n".toUTF8,
-- Bare LF line endings
"GET / HTTP/1.1\nHost: example.com\n\n".toUTF8,
-- CR-only line endings
"GET / HTTP/1.1\x0dHost: example.com\x0d\x0d".toUTF8,
-- Null bytes everywhere
ByteArray.mk #[0x00, 0x00, 0x00, 0x00],
-- CRLF injection attempt in request-line
"GET /path%0d%0aInjected: header HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8,
-- Unicode in path (raw multibyte UTF-8)
"GET /caf\xc3\xa9 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8,
-- HTTP response sent as a request (should fail, not panic)
"HTTP/1.1 200 OK\x0d\nContent-Length: 2\x0d\n\x0d\nok".toUTF8,
-- Request smuggling: CL.TE
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 6\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n0\x0d\n\x0d\nX".toUTF8,
-- Request smuggling: TE.CL
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nContent-Length: 3\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- Duplicate chunked coding
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- TE with tab (whitespace obfuscation)
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding:\x09chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- TE with null byte injection
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunk\x00ed\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- Extremely long method token
(String.ofList (List.replicate 8192 'A') ++ " / HTTP/1.1\x0d\nHost: h\x0d\n\x0d\n").toUTF8,
-- SSRF-like absolute-form URI targeting internal host
"GET http://169.254.169.254/latest/meta-data/ HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8,
-- Chunk size with non-hex chars
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nGG\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- Chunk size overflow attempt (16+ hex digits)
"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nffffffffffffffff1\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8,
-- Header with embedded CRLF in value
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Inject: foo\x0d\nEvil: injected\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
-- Multiple Host headers (smuggling attempt)
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nHost: evil.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
-- Absolute-form URI with bad host overrides Host header
"GET http://evil.internal/steal HTTP/1.1\x0d\nHost: public.example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
-- Folded header (obs-fold, rejected per RFC 9112)
"GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Folded: value\x0d\n continuation\x0d\nConnection: close\x0d\n\x0d\n".toUTF8,
]
for i in [:patterns.size] do
let pattern := patterns[i]!
let (client, server) Mock.new
let response sendRawAndClose client server pattern okHandler
assertValidHttpOrEmpty s!"fuzzKnownMalicious pattern={i}" response
-- ============================================================================
-- Run all properties
-- ============================================================================
-- Property: any random byte sequence is handled safely (core libFuzzer equivalent).
#eval runWithTimeout "fuzz_random_bytes_no_panic" 30000 do
fuzzRandomBytesNoPanic 200 0x00FACADE
-- Property: single bit mutations on valid requests are handled safely.
#eval runWithTimeout "fuzz_bit_flip_valid_requests" 30000 do
fuzzBitFlipOnValidRequests 150 0x00B1FF10
-- Property: truncation at any byte boundary is handled safely.
#eval runWithTimeout "fuzz_truncated_requests" 30000 do
fuzzTruncatedRequests 150 0x00DEAD42
-- Property: HTTP method prefix followed by random garbage is handled safely.
#eval runWithTimeout "fuzz_method_prefix_with_garbage" 30000 do
fuzzMethodPrefixWithGarbage 100 0x00CA7500
-- Property: high-byte (non-ASCII) sequences are handled safely.
#eval runWithTimeout "fuzz_high_byte_values" 30000 do
fuzzHighByteValues 120 0x00FF8000
-- Property: garbage appended after a complete request is handled safely.
#eval runWithTimeout "fuzz_garbage_after_complete_request" 30000 do
fuzzGarbageAfterCompleteRequest 100 0x00A1B2C3
-- Property: every single-byte input is handled safely (all 256 cases).
#eval runWithTimeout "fuzz_single_byte_inputs" 30000 do
fuzzSingleByteInputs
-- Property: known attack patterns and malformed inputs are handled safely.
#eval runWithTimeout "fuzz_known_malicious_patterns" 30000 do
fuzzKnownMaliciousPatterns

View File

@@ -0,0 +1,419 @@
import Std.Internal.Http
open Std Http
open Std.Http.Protocol.H1
private def ensure (name : String) (cond : Bool) (msg : String) : IO Unit := do
unless cond do
throw <| IO.userError s!"Test '{name}' failed:\n{msg}"
private def hasFailedEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .failed _ => true
| _ => false
private def hasNeedMoreDataEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .needMoreData _ => true
| _ => false
private def hasEndHeadersEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .endHeaders _ => true
| _ => false
private def hasCloseBodyEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .closeBody => true
| _ => false
private def hasContinueEvent (events : Array (Event .receiving)) : Bool :=
events.any fun
| .«continue» => true
| _ => false
private def countNeedAnswerEvents (events : Array (Event .receiving)) : Nat :=
events.foldl (init := 0) fun n e =>
match e with
| .needAnswer => n + 1
| _ => n
private def countFailedEvents (events : Array (Event .receiving)) : Nat :=
events.foldl (init := 0) fun n e =>
match e with
| .failed _ => n + 1
| _ => n
private def pulledBodyBytes (chunks : Array PulledChunk) : ByteArray :=
chunks.foldl (fun acc c => acc ++ c.chunk.data) .empty
private def splitEveryByte (data : ByteArray) : Array ByteArray := Id.run do
let mut parts : Array ByteArray := #[]
for i in [0:data.size] do
parts := parts.push (data.extract i (i + 1))
parts
private def nextSeed (seed : Nat) : Nat :=
(1664525 * seed + 1013904223) % 4294967296
private def randBelow (seed : Nat) (maxExclusive : Nat) : Nat × Nat :=
let seed' := nextSeed seed
if maxExclusive = 0 then
(0, seed')
else
(seed' % maxExclusive, seed')
private def randIn (seed : Nat) (low : Nat) (high : Nat) : Nat × Nat :=
if high < low then
(low, seed)
else
let (n, seed') := randBelow seed (high - low + 1)
(low + n, seed')
private def randomAsciiBytes (seed : Nat) (len : Nat) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
for _ in [0:len] do
let (r, s') := randBelow s 38
s := s'
let code :=
if r < 26 then 97 + r
else if r < 36 then 48 + (r - 26)
else if r = 36 then 45
else 95
out := out.push (UInt8.ofNat code)
(out, s)
private def randomSplit (seed : Nat) (data : ByteArray) (maxPart : Nat := 13) : Array ByteArray × Nat := Id.run do
let mut s := seed
let mut out : Array ByteArray := #[]
let mut i := 0
while i < data.size do
let remaining := data.size - i
let upper := Nat.min maxPart remaining
let (partLen, s') := randIn s 1 upper
s := s'
out := out.push (data.extract i (i + partLen))
i := i + partLen
(out, s)
private def randomChunkedPayload (seed : Nat) (body : ByteArray) : ByteArray × Nat := Id.run do
let mut s := seed
let mut out := ByteArray.empty
let mut i := 0
while i < body.size do
let remaining := body.size - i
let upper := Nat.min 9 remaining
let (chunkLen, s') := randIn s 1 upper
s := s'
out := out ++ s!"{chunkLen}\r\n".toUTF8
out := out ++ body.extract i (i + chunkLen)
out := out ++ "\r\n".toUTF8
i := i + chunkLen
out := out ++ "0\r\n\r\n".toUTF8
(out, s)
private def mkContentLengthRequest (path : String) (body : ByteArray) : ByteArray :=
s!"POST {path} HTTP/1.1\r\nHost: example.com\r\nContent-Length: {body.size}\r\nConnection: keep-alive\r\n\r\n".toUTF8 ++ body
private def mkChunkedRequest (path : String) (chunkedPayload : ByteArray) : ByteArray :=
s!"POST {path} HTTP/1.1\r\nHost: example.com\r\nTransfer-Encoding: chunked\r\nConnection: keep-alive\r\n\r\n".toUTF8 ++ chunkedPayload
private def mkChunkedHead (path : String) : ByteArray :=
s!"POST {path} HTTP/1.1\r\nHost: example.com\r\nTransfer-Encoding: chunked\r\nConnection: keep-alive\r\n\r\n".toUTF8
private structure IncrementalTrace where
machine : Machine .receiving
events : Array (Event .receiving) := #[]
output : ByteArray := .empty
pulled : Array PulledChunk := #[]
private def runIncrementalReceiving
(parts : Array ByteArray)
(config : Protocol.H1.Config := {}) : IncrementalTrace := Id.run do
let mut machine : Machine .receiving := { config := config }
let mut events : Array (Event .receiving) := #[]
let mut output := ByteArray.empty
let mut pulled : Array PulledChunk := #[]
for part in parts do
machine := machine.feed part
let (machine', step) := machine.step
machine := machine'
events := events ++ step.events
output := output ++ step.output.toByteArray
-- Pull at most one body chunk per input part to simulate streaming callers.
-- Guard on buffered bytes to avoid calling into body parsing on an empty buffer.
if machine.canPullBodyNow && machine.reader.input.remainingBytes > 0 then
let (machine', pulledNow?) := machine.pullBody
let (machine', pullEvents) := machine'.takeEvents
machine := machine'
if let some pulledNow := pulledNow? then
pulled := pulled.push pulledNow
events := events ++ pullEvents
else
pure ()
let (machine', finalStep) := machine.step
machine := machine'
events := events ++ finalStep.events
output := output ++ finalStep.output.toByteArray
-- After all input has arrived, drain the remaining ready body chunks.
let mut fuel := 4096
while fuel > 0 && machine.canPullBodyNow && machine.reader.input.remainingBytes > 0 do
fuel := fuel - 1
let (machine', pulledNow?) := machine.pullBody
let (machine', pullEvents) := machine'.takeEvents
machine := machine'
events := events ++ pullEvents
match pulledNow? with
| some pulledNow =>
pulled := pulled.push pulledNow
| none =>
break
return { machine, events, output, pulled }
private def assertIncrementalSuccess
(name : String)
(trace : IncrementalTrace)
(expectedBody : ByteArray)
(expectNeedMoreData : Bool := true) : IO Unit := do
ensure name (!trace.machine.failed) s!"machine ended failed with events:\n{repr trace.events}"
ensure name (!hasFailedEvent trace.events) s!"unexpected failure events:\n{repr trace.events}"
ensure name (hasEndHeadersEvent trace.events) s!"missing endHeaders event:\n{repr trace.events}"
if expectNeedMoreData then
ensure name (hasNeedMoreDataEvent trace.events) s!"expected at least one needMoreData event:\n{repr trace.events}"
else
pure ()
let got := pulledBodyBytes trace.pulled
ensure name (got == expectedBody)
s!"body mismatch:\nexpected={String.fromUTF8! expectedBody |>.quote}\nactual={String.fromUTF8! got |>.quote}"
let expectsPullSignals := expectedBody.size > 0 || trace.pulled.size > 0
if expectsPullSignals then
ensure name (hasCloseBodyEvent trace.events) s!"missing closeBody event:\n{repr trace.events}"
ensure name (trace.pulled.any (·.final)) "expected at least one final pulled chunk"
ensure name ((trace.pulled.back?.map (·.final)).getD false) "expected last pulled chunk to be final"
else
pure ()
private def runOneStepReceiving
(payload : ByteArray)
(config : Protocol.H1.Config := {}) :
Machine .receiving × StepResult .receiving :=
let machine0 : Machine .receiving := { config := config }
(machine0.feed payload).step
private def assertFailedWith
(name : String)
(payload : ByteArray)
(expected : Error)
(config : Protocol.H1.Config := {}) : IO Unit := do
let (machine, step) := runOneStepReceiving payload config
ensure name (hasFailedEvent step.events) s!"expected failure event, events:\n{repr step.events}"
ensure name (machine.error == some expected)
s!"expected error {repr expected}, got {repr machine.error}"
-- Deterministic: one-byte incremental content-length request.
#eval show IO Unit from do
let body := "hello".toUTF8
let request := mkContentLengthRequest "/inc-every-byte" body
let trace := runIncrementalReceiving (splitEveryByte request)
assertIncrementalSuccess "CL one-byte incremental parse" trace body
-- Deterministic: fragmented chunked request with boundaries through chunk metadata and payload.
#eval show IO Unit from do
let body := "abcXYZ".toUTF8
let payload := "3\r\nabc\r\n3\r\nXYZ\r\n0\r\n\r\n".toUTF8
let request := mkChunkedRequest "/inc-chunked" payload
let parts : Array ByteArray := #[
request.extract 0 17,
request.extract 17 39,
request.extract 39 58,
request.extract 58 (request.size - 4),
request.extract (request.size - 4) request.size
]
let trace := runIncrementalReceiving parts
assertIncrementalSuccess "Chunked fragmented parse" trace body
-- Regression: calling `pullBody` in chunked mode before any chunk-size byte arrives
-- must request more data rather than failing the machine.
#eval show IO Unit from do
let headOnly := mkChunkedHead "/wait-for-chunk-size"
let machine0 : Machine .receiving := { config := {} }
let (machine1, step1) := (machine0.feed headOnly).step
ensure "Chunked pull on empty input (setup)" (!machine1.failed) s!"unexpected setup failure events:\n{repr step1.events}"
ensure "Chunked pull on empty input (setup)" (hasEndHeadersEvent step1.events) "expected endHeaders in setup"
ensure "Chunked pull on empty input (setup)" machine1.canPullBodyNow "expected body state to be pullable"
let (machine2, pulled?) := machine1.pullBody
let (machine2, pullEvents) := machine2.takeEvents
ensure "Chunked pull on empty input" pulled?.isNone "expected no pulled body chunk"
ensure "Chunked pull on empty input" (!machine2.failed) s!"unexpected machine failure:\n{repr pullEvents}"
ensure "Chunked pull on empty input" (hasNeedMoreDataEvent pullEvents)
s!"expected needMoreData after empty pull:\n{repr pullEvents}"
-- Regression: unread buffered input must stay bounded to avoid memory blowups
-- when upper layers stall request-body consumption.
#eval show IO Unit from do
let cfg : Protocol.H1.Config := {
maxBodySize := 32
maxHeaderBytes := 16
maxStartLineLength := 16
maxChunkLineLength := 16
}
let cap := cfg.maxBodySize + cfg.maxHeaderBytes + cfg.maxStartLineLength + cfg.maxChunkLineLength
let payload := ByteArray.mk (Array.replicate (cap + 1) (UInt8.ofNat 97))
let machine0 : Machine .receiving := { config := cfg }
let machine1 := machine0.feed payload
ensure "Buffered input cap triggers failure" machine1.failed "expected machine to fail on oversized buffered input"
ensure "Buffered input cap triggers entityTooLarge" (machine1.error == some .entityTooLarge)
s!"expected entityTooLarge failure, got {repr machine1.error}"
-- Property-style: randomized content-length body and randomized split boundaries.
#eval show IO Unit from do
let mut seed : Nat := 0x21436587
for i in [0:120] do
let (bodyLen, s1) := randIn seed 0 128
seed := s1
let (body, s2) := randomAsciiBytes seed bodyLen
seed := s2
let request := mkContentLengthRequest s!"/prop-cl-{i}" body
let (parts, s3) := randomSplit seed request 11
seed := s3
let trace := runIncrementalReceiving parts
assertIncrementalSuccess s!"Property CL #{i}" trace body (expectNeedMoreData := parts.size > 1)
-- Property-style: randomized chunked payload and randomized split boundaries.
#eval show IO Unit from do
let mut seed : Nat := 0x89abcdef
for i in [0:120] do
let (bodyLen, s1) := randIn seed 0 128
seed := s1
let (body, s2) := randomAsciiBytes seed bodyLen
seed := s2
let (payload, s3) := randomChunkedPayload seed body
seed := s3
let request := mkChunkedRequest s!"/prop-chunked-{i}" payload
let (parts, s4) := randomSplit seed request 9
seed := s4
let trace := runIncrementalReceiving parts
assertIncrementalSuccess s!"Property chunked #{i}" trace body (expectNeedMoreData := parts.size > 1)
-- Edge case: unsupported HTTP version in request-line.
#eval show IO Unit from do
assertFailedWith
"Unsupported HTTP version"
"GET / HTTP/2.0\r\nHost: example.com\r\n\r\n".toUTF8
.unsupportedVersion
-- Edge case: URI length limit must map to uriTooLong.
#eval show IO Unit from do
let cfg : Protocol.H1.Config := { maxUriLength := 8, maxStartLineLength := 256 }
assertFailedWith
"URI too long"
"GET /this/path/is/way/too/long HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
.uriTooLong
cfg
-- Edge case: header count limit exceeded.
#eval show IO Unit from do
let cfg : Protocol.H1.Config := { maxHeaders := 1 }
assertFailedWith
"Too many headers"
"GET / HTTP/1.1\r\nHost: example.com\r\nX-Test: 1\r\n\r\n".toUTF8
.tooManyHeaders
cfg
-- Edge case: aggregate header bytes limit exceeded.
#eval show IO Unit from do
let cfg : Protocol.H1.Config := { maxHeaderBytes := 8 }
assertFailedWith
"Headers too large"
"GET / HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
.headersTooLarge
cfg
-- Edge case: duplicate Host headers rejected.
#eval show IO Unit from do
assertFailedWith
"Duplicate Host rejected"
"GET / HTTP/1.1\r\nHost: a.example\r\nHost: b.example\r\n\r\n".toUTF8
.badMessage
-- Edge case: mixed Content-Length and Transfer-Encoding rejected.
#eval show IO Unit from do
assertFailedWith
"TE and CL mixed"
"POST / HTTP/1.1\r\nHost: example.com\r\nContent-Length: 4\r\nTransfer-Encoding: chunked\r\n\r\n4\r\nWiki\r\n0\r\n\r\n".toUTF8
.badMessage
-- Edge case: CONNECT must use authority-form target.
#eval show IO Unit from do
assertFailedWith
"CONNECT non-authority target rejected"
"CONNECT / HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
.badMessage
-- Edge case: asterisk-form reserved for OPTIONS.
#eval show IO Unit from do
assertFailedWith
"GET * rejected"
"GET * HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
.badMessage
-- Edge case: Expect: 100-continue pauses reader and can be accepted/rejected.
#eval show IO Unit from do
let request :=
"POST /expect HTTP/1.1\r\nHost: example.com\r\nExpect: 100-continue\r\nContent-Length: 4\r\n\r\n".toUTF8
let machine0 : Machine .receiving := { config := {} }
let (machine1, step1) := (machine0.feed request).step
ensure "Expect setup not failed" (!machine1.failed) s!"unexpected failure:\n{repr step1.events}"
ensure "Expect emitted continue event" (hasContinueEvent step1.events) s!"missing continue event:\n{repr step1.events}"
ensure "Expect reader paused"
(match machine1.reader.state with
| .«continue» _ => true
| _ => false)
s!"expected continue state, got {repr machine1.reader.state}"
let machine2 := machine1.canContinue .«continue»
ensure "Expect accepted"
(match machine2.reader.state with
| .readBody (.fixed 4) => true
| _ => false)
s!"expected fixed(4) body state, got {repr machine2.reader.state}"
let machine3 := machine1.canContinue .ok
ensure "Expect rejected closes reader"
(match machine3.reader.state with
| .closed => true
| _ => false)
s!"expected closed reader, got {repr machine3.reader.state}"
ensure "Expect rejected disables keep-alive" (!machine3.keepAlive) "keepAlive should be disabled after reject"
-- Regression: receiving path should emit needAnswer only once per message.
#eval show IO Unit from do
let payload := "GET / HTTP/1.1\r\nHost: example.com\r\n\r\n".toUTF8
let (_machine, step) := runOneStepReceiving payload
ensure "Single needAnswer event" (countNeedAnswerEvents step.events == 1)
s!"expected exactly one needAnswer event, got events:\n{repr step.events}"
-- Regression: malformed request should emit failed only once.
#eval show IO Unit from do
let payload := "GET / HTTP/2.0\r\nHost: example.com\r\n\r\n".toUTF8
let (_machine, step) := runOneStepReceiving payload
ensure "Single failed event" (countFailedEvents step.events == 1)
s!"expected exactly one failed event, got events:\n{repr step.events}"

View File

@@ -0,0 +1,202 @@
import Std.Internal.Http
import Std.Internal.Http.Protocol.H1.Parser
open Std Internal Parsec ByteArray
open Std.Http.Protocol.H1
private def ensure (name : String) (cond : Bool) (msg : String) : IO Unit := do
unless cond do
throw <| IO.userError s!"{name}: {msg}"
private def runParser (p : Parser α) (s : String) : Except String α :=
match (p <* eof).run s.toUTF8 with
| .ok x => .ok x
| .error e => .error e
private def randBelow (gen : StdGen) (maxExclusive : Nat) : Nat × StdGen :=
if maxExclusive = 0 then
(0, gen)
else
randNat gen 0 (maxExclusive - 1)
private def pick! [Inhabited α] (gen : StdGen) (xs : Array α) : α × StdGen :=
let (i, gen') := randBelow gen xs.size
(xs[i]!, gen')
private def randomToken (gen : StdGen) (len : Nat) : String × StdGen := Id.run do
let mut g := gen
let mut out := ""
for _ in [0:len] do
let (r, g') := randBelow g 38
g := g'
let c :=
if r < 26 then Char.ofNat (97 + r)
else if r < 36 then Char.ofNat (48 + (r - 26))
else if r = 36 then '-'
else '_'
out := out.push c
(out, g)
private def randomReason (gen : StdGen) (len : Nat) : String × StdGen := Id.run do
let mut g := gen
let mut out := ""
for _ in [0:len] do
let (r, g') := randBelow g 30
g := g'
let c := if r < 26 then Char.ofNat (65 + r) else ' '
out := out.push c
(out.trimAscii.toString, g)
private def pad3 (n : Nat) : String :=
if n < 10 then s!"00{n}" else if n < 100 then s!"0{n}" else s!"{n}"
private def expectRequestOk (s : String) : IO Unit := do
match runParser (parseRequestLine {}) s with
| .ok _ => pure ()
| .error e => throw <| IO.userError s!"expected request-line success for {s.quote}, got: {e}"
private def expectRequestFail (s : String) : IO Unit := do
match runParser (parseRequestLine {}) s with
| .ok _ => throw <| IO.userError s!"expected request-line failure for {s.quote}"
| .error _ => pure ()
private def expectStatusOk (s : String) : IO Unit := do
match runParser (parseStatusLine {}) s with
| .ok _ => pure ()
| .error e => throw <| IO.userError s!"expected status-line success for {s.quote}, got: {e}"
private def expectStatusFail (s : String) : IO Unit := do
match runParser (parseStatusLine {}) s with
| .ok _ => throw <| IO.userError s!"expected status-line failure for {s.quote}"
| .error _ => pure ()
private def expectOk {α} (name : String) (p : Parser α) (s : String) : IO α := do
match runParser p s with
| .ok x => pure x
| .error e => throw <| IO.userError s!"{name}: expected success for {s.quote}, got {e}"
private def expectFail {α} (name : String) (p : Parser α) (s : String) : IO Unit := do
match runParser p s with
| .ok _ => throw <| IO.userError s!"{name}: expected failure for {s.quote}"
| .error _ => pure ()
#eval show IO Unit from do
let methods : Array String := #["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS", "HEAD", "CONNECT"]
let targets : Array String := #["/", "/a", "/a/b", "/a/b?q=1", "*", "http://example.com", "example.com:443"]
let versions : Array String := #["HTTP/1.1", "HTTP/1.0"]
let mut gen : StdGen := StdGen.mk 0x5eed1234 0x12345
for i in [0:400] do
let (m, g1) := pick! gen methods
let (t, g2) := pick! g1 targets
let (v, g3) := pick! g2 versions
gen := g3
let line := s!"{m} {t} {v}\r\n"
expectRequestOk line
-- Mutation 1: drop the first space
expectRequestFail s!"{m}{t} {v}\r\n"
-- Mutation 2: invalid version token
expectRequestFail s!"{m} {t} HTTP/2.0\r\n"
-- Mutation 3: bad method character
expectRequestFail s!"{m}! {t} {v}\r\n"
ensure "request fuzz progress" (i < 100000) "unreachable safety check"
#eval show IO Unit from do
let knownCodes : Array Nat := #[200, 201, 204, 301, 400, 404, 500, 503]
let mut gen : StdGen := StdGen.mk 0xabcde123 0x777
for _ in [0:400] do
let (code, g1) := pick! gen knownCodes
let (len, g2) := randBelow g1 20
let (reasonRaw, g3) := randomReason g2 (len + 1)
gen := g3
let reason := if reasonRaw.isEmpty then "OK" else reasonRaw
let line := s!"HTTP/1.1 {pad3 code} {reason}\r\n"
expectStatusOk line
-- Mutation 1: unsupported version
expectStatusFail s!"HTTP/2.0 {pad3 code} {reason}\r\n"
-- Mutation 2: non-digit in status code
expectStatusFail s!"HTTP/1.1 A{(pad3 code).drop 1} {reason}\r\n"
-- Mutation 3: illegal reason byte (DEL)
expectStatusFail s!"HTTP/1.1 {pad3 code} bad{Char.ofNat 127}\r\n"
#eval show IO Unit from do
-- Randomized malformed gibberish smoke: parser must simply return error or success,
-- but never crash/panic.
let mut gen : StdGen := StdGen.mk 0x31415926 0x27182818
for _ in [0:300] do
let (len, g1) := randBelow gen 80
let (tok, g2) := randomToken g1 (len + 1)
gen := g2
let _ := runParser (parseRequestLine {}) (tok ++ "\r\n")
let _ := runParser (parseStatusLine {}) (tok ++ "\r\n")
pure ()
-- Component tests for individual parser parts.
#eval show IO Unit from do
-- parseSingleHeader
let sh1 expectOk "parseSingleHeader some" (parseSingleHeader {} <* eof) "Host: x\r\n"
ensure "parseSingleHeader some present" sh1.isSome "expected some header"
let sh2 expectOk "parseSingleHeader none" (parseSingleHeader {} <* eof) "\r\n"
ensure "parseSingleHeader none present" sh2.isNone "expected header terminator"
-- parseChunkSize / parseChunkPartial
let (n1, ext1) expectOk "parseChunkSize bare" (parseChunkSize {} <* eof) "A\r\n"
ensure "parseChunkSize value" (n1 == 10) "chunk-size mismatch"
ensure "parseChunkSize ext empty" (ext1.isEmpty) "expected no extensions"
let (n2, ext2) expectOk "parseChunkSize ext" (parseChunkSize {} <* eof) "4;foo=bar;baz=\"qux\"\r\n"
ensure "parseChunkSize ext value" (n2 == 4) "chunk-size mismatch with ext"
ensure "parseChunkSize ext count" (ext2.size == 2) "expected 2 extensions"
let cp1 expectOk "parseChunkPartial some" (parseChunkPartial {} <* eof) "4\r\nWiki"
ensure "parseChunkPartial some isSome" cp1.isSome "expected chunk data"
ensure "parseChunkPartial some size" ((cp1.map (fun (n, _, _) => n)).getD 0 == 4) "size mismatch"
let cp0 expectOk "parseChunkPartial none" (parseChunkPartial {} <* eof) "0\r\n"
ensure "parseChunkPartial none isNone" cp0.isNone "expected last-chunk marker"
-- parseFixedSizeData / parseChunkSizedData
let fs1 expectOk "parseFixedSizeData complete" (parseFixedSizeData 4 <* eof) "Wiki"
ensure "parseFixedSizeData complete shape"
(match fs1 with | .complete _ => true | _ => false)
"expected complete result"
let fs2 expectOk "parseFixedSizeData incomplete" (parseFixedSizeData 4 <* eof) "Wi"
ensure "parseFixedSizeData incomplete shape"
(match fs2 with | .incomplete _ 2 => true | _ => false)
"expected incomplete result with remaining=2"
let cs1 expectOk "parseChunkSizedData complete" (parseChunkSizedData 4 <* eof) "Wiki\r\n"
ensure "parseChunkSizedData complete shape"
(match cs1 with | .complete _ => true | _ => false)
"expected complete chunk-sized result"
let cs2 expectOk "parseChunkSizedData incomplete" (parseChunkSizedData 4 <* eof) "Wi"
ensure "parseChunkSizedData incomplete shape"
(match cs2 with | .incomplete _ 2 => true | _ => false)
"expected incomplete chunk-sized result with remaining=2"
-- parseTrailers
let trailers expectOk "parseTrailers ok" (parseTrailers {} <* eof) "X-Test: a\r\nY-Test: b\r\n\r\n"
ensure "parseTrailers count" (trailers.size == 2) "expected 2 trailers"
expectFail "parseTrailers forbidden" (parseTrailers {} <* eof) "Content-Length: 1\r\n\r\n"
-- parseRequestLineRawVersion / parseStatusLineRawVersion
let (m1, _, v1) expectOk "parseRequestLineRawVersion" (parseRequestLineRawVersion {} <* eof) "GET / HTTP/1.1\r\n"
ensure "parseRequestLineRawVersion method" (m1 == Std.Http.Method.get) "method mismatch"
ensure "parseRequestLineRawVersion version" (v1 == some Std.Http.Version.v11) "expected recognized v11"
let (_, rv) expectOk "parseStatusLineRawVersion" (parseStatusLineRawVersion {} <* eof) "HTTP/1.1 204 No Content\r\n"
ensure "parseStatusLineRawVersion recognized" (rv == some Std.Http.Version.v11) "expected v11"
-- parseRequestLine / parseStatusLine failures
expectFail "parseRequestLine invalid version" (parseRequestLine {} <* eof) "GET / HTTP/2.0\r\n"
expectFail "parseStatusLine invalid version" (parseStatusLine {} <* eof) "HTTP/2.0 200 OK\r\n"

View File

@@ -0,0 +1,376 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def defaultConfig : Config :=
{ lingeringTimeout := 500, generateDate := false }
def runWithTimeout {α : Type} (name : String) (timeoutMs : Nat := 2000) (action : IO α) : IO α := do
let task IO.asTask action
let ticks := (timeoutMs + 9) / 10
let rec loop (remaining : Nat) : IO α := do
if ( IO.getTaskState task) == .finished then
match ( IO.wait task) with
| .ok x => pure x
| .error err => throw err
else
match remaining with
| 0 =>
IO.cancel task
throw <| IO.userError s!"Test '{name}' timed out after {timeoutMs}ms (possible hang/loop)"
| n + 1 =>
IO.sleep 10
loop n
loop ticks
def sendRaw (client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : TestHandler) (config : Config := defaultConfig) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def sendRawTimed (name : String) (raw : ByteArray)
(handler : TestHandler) (config : Config := defaultConfig) : IO ByteArray :=
runWithTimeout name 2000 do
let (client, server) Mock.new
sendRaw client server raw handler config
def runClosedClientTimed (name : String) (raw : ByteArray)
(handler : TestHandler) (config : Config := defaultConfig) : IO Unit :=
runWithTimeout name 2000 do
Async.block do
let (client, server) Mock.new
client.send raw
client.close
Std.Http.Server.serveConnection server handler config
|>.run
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then 0 else (s.splitOn needle).length - 1
def assertStatusPrefix (name : String) (response : ByteArray) (prefix_ : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith prefix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected prefix: {prefix_.quote}\nGot:\n{text.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nMissing: {needle.quote}\nGot:\n{text.quote}"
def assertNotContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
if text.contains needle then
throw <| IO.userError s!"Test '{name}' failed:\nDid not expect {needle.quote}\nGot:\n{text.quote}"
def assertEndsWith (name : String) (response : ByteArray) (suffix_ : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.endsWith suffix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected suffix: {suffix_.quote}\nGot:\n{text.quote}"
def assertStatusCount (name : String) (response : ByteArray) (expected : Nat) : IO Unit := do
let text := String.fromUTF8! response
let got := countOccurrences text "HTTP/1.1 "
unless got == expected do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} HTTP responses, got {got}\n{text.quote}"
def onesChunked (n : Nat) : String := Id.run do
let mut body := ""
for i in [0:n] do
body := body ++ s!"{toString i |>.length}\x0d\n{toString i}\x0d\n"
body ++ "0\x0d\n\x0d\n"
def ignoreHandler : TestHandler := fun _ => Response.ok |>.text "ok"
def uriHandler : TestHandler := fun req => Response.ok |>.text (toString req.line.uri)
def echoBodyHandler : TestHandler := fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text (String.fromUTF8! body)
def firstChunkHandler : TestHandler := fun req => do
let first req.body.recv
let text :=
match first with
| some chunk => String.fromUTF8! chunk.data
| none => "none"
Response.ok |>.text text
def streamPiecesHandler (n : Nat) : TestHandler := fun _ => do
let (outgoing, _incoming) Body.mkChannel
background do
for i in [0:n] do
outgoing.send <| Chunk.ofByteArray s!"piece-{i};".toUTF8
outgoing.close
return Response.ok
|>.body outgoing
def stressResponseHandler (n : Nat) : TestHandler := fun _ => do
let (outgoing, _incoming) Body.mkChannel
background do
for i in [0:n] do
outgoing.send <| Chunk.ofByteArray s!"x{i},".toUTF8
outgoing.close
return Response.ok
|>.body outgoing
-- 01: Ignore fixed-size request body and respond immediately.
#eval runWithTimeout "01_ignore_fixed_length_body" 2000 do
let raw := "POST /fixed HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 10\x0d\nConnection: close\x0d\n\x0d\n0123456789".toUTF8
let response sendRawTimed "01_ignore_fixed_length_body/send" raw ignoreHandler
assertStatusPrefix "01_ignore_fixed_length_body" response "HTTP/1.1 200"
-- 02: Ignore chunked request body and respond immediately.
#eval runWithTimeout "02_ignore_chunked_body" 2000 do
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n6\x0d\n world\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRawTimed "02_ignore_chunked_body/send" raw ignoreHandler
assertStatusPrefix "02_ignore_chunked_body" response "HTTP/1.1 200"
-- 03: Large fixed-size body ignored by handler (regression for stalled body transfer).
#eval runWithTimeout "03_ignore_large_fixed_body" 2000 do
let body := String.ofList (List.replicate 8192 'A')
let raw := s!"POST /large HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 8192\x0d\nConnection: close\x0d\n\x0d\n{body}".toUTF8
let response sendRawTimed "03_ignore_large_fixed_body/send" raw ignoreHandler
assertStatusPrefix "03_ignore_large_fixed_body" response "HTTP/1.1 200"
-- 04: Read full request body and echo it.
#eval runWithTimeout "04_echo_full_body" 2000 do
let raw := "POST /echo HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 11\x0d\nConnection: close\x0d\n\x0d\nhello world".toUTF8
let response sendRawTimed "04_echo_full_body/send" raw echoBodyHandler
assertContains "04_echo_full_body" response "hello world"
-- 05: Read only first chunk and reply (should not deadlock connection).
#eval runWithTimeout "05_read_first_chunk_only" 2000 do
let raw := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 11\x0d\nConnection: close\x0d\n\x0d\nhello world".toUTF8
let response sendRawTimed "05_read_first_chunk_only/send" raw firstChunkHandler
assertStatusPrefix "05_read_first_chunk_only" response "HTTP/1.1 200"
assertContains "05_read_first_chunk_only" response "hello world"
-- 06: Stream many response chunks.
#eval runWithTimeout "06_stream_many_response_chunks" 2000 do
let raw := "GET /stream HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "06_stream_many_response_chunks/send" raw (streamPiecesHandler 40)
assertStatusPrefix "06_stream_many_response_chunks" response "HTTP/1.1 200"
assertContains "06_stream_many_response_chunks" response "piece-0;"
assertContains "06_stream_many_response_chunks" response "piece-39;"
-- 07: Stream response with known fixed size.
#eval runWithTimeout "07_stream_known_size" 2000 do
let raw := "GET /known HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "07_stream_known_size/send" raw (fun _ => do
let (outgoing, _incoming) Body.mkChannel
outgoing.setKnownSize (some (.fixed 8))
background do
outgoing.send <| Chunk.ofByteArray "abcd".toUTF8
outgoing.send <| Chunk.ofByteArray "efgh".toUTF8
outgoing.close
return Response.ok
|>.body outgoing)
assertStatusPrefix "07_stream_known_size" response "HTTP/1.1 200"
assertContains "07_stream_known_size" response "Content-Length: 8"
assertContains "07_stream_known_size" response "abcdefgh"
-- 08: Use interestSelector before sending response data.
#eval runWithTimeout "08_interest_selector_gating" 2000 do
let raw := "GET /interest HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "08_interest_selector_gating/send" raw (fun _ => do
let (outgoing, _incoming) Body.mkChannel
background do
let interested Selectable.one #[
.case outgoing.interestSelector pure
]
if interested then
outgoing.send <| Chunk.ofByteArray "interest-ok".toUTF8
outgoing.close
return Response.ok
|>.body outgoing)
assertStatusPrefix "08_interest_selector_gating" response "HTTP/1.1 200"
assertContains "08_interest_selector_gating" response "interest-ok"
-- 09: Incomplete sends collapse into one payload.
#eval runWithTimeout "09_incomplete_send_collapse" 2000 do
let raw := "GET /collapse HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "09_incomplete_send_collapse/send" raw (fun _ => do
let (outgoing, _incoming) Body.mkChannel
background do
outgoing.send ({ data := "hello ".toUTF8, extensions := #[] } : Chunk) (incomplete := true)
outgoing.send ({ data := "wor".toUTF8, extensions := #[] } : Chunk) (incomplete := true)
outgoing.send ({ data := "ld".toUTF8, extensions := #[] } : Chunk)
outgoing.close
return Response.ok
|>.body outgoing)
assertStatusPrefix "09_incomplete_send_collapse" response "HTTP/1.1 200"
assertContains "09_incomplete_send_collapse" response "hello world"
-- 10: Pipeline fixed-body POST then GET.
#eval runWithTimeout "10_pipeline_fixed_then_get" 2000 do
let raw := ("POST /one HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello" ++
"GET /two HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "10_pipeline_fixed_then_get/send" raw uriHandler
assertStatusCount "10_pipeline_fixed_then_get" response 2
assertContains "10_pipeline_fixed_then_get" response "/one"
assertContains "10_pipeline_fixed_then_get" response "/two"
-- 11: Pipeline chunked-body POST then GET.
#eval runWithTimeout "11_pipeline_chunked_then_get" 2000 do
let raw := ("POST /chunk HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n" ++
"GET /two HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "11_pipeline_chunked_then_get/send" raw uriHandler
assertStatusCount "11_pipeline_chunked_then_get" response 2
assertContains "11_pipeline_chunked_then_get" response "/chunk"
assertContains "11_pipeline_chunked_then_get" response "/two"
-- 12: Malformed first request should not loop into second.
#eval runWithTimeout "12_malformed_closes_connection" 2000 do
let raw := ("GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBadHeader value\x0d\n\x0d\n" ++
"GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "12_malformed_closes_connection/send" raw uriHandler
assertStatusPrefix "12_malformed_closes_connection" response "HTTP/1.1 400"
assertStatusCount "12_malformed_closes_connection" response 1
-- 13: Client closes while server is streaming response.
#eval runWithTimeout "13_client_close_while_streaming" 2000 do
let raw := "GET /close-stream HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
runClosedClientTimed "13_client_close_while_streaming/run" raw (stressResponseHandler 600)
-- 14: Client closes before sending full body.
#eval runWithTimeout "14_client_close_mid_body" 2000 do
let raw := "POST /mid-body HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 100\x0d\nConnection: close\x0d\n\x0d\nabcde".toUTF8
runClosedClientTimed "14_client_close_mid_body/run" raw ignoreHandler
-- 15: Handler throws while request body is present.
#eval runWithTimeout "15_handler_throw_unread_body" 2000 do
let raw := "POST /throw HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRawTimed "15_handler_throw_unread_body/send" raw (fun _ => throw <| IO.userError "boom")
assertStatusPrefix "15_handler_throw_unread_body" response "HTTP/1.1 500"
-- 16: Many tiny chunked request chunks ignored by handler.
#eval runWithTimeout "16_many_tiny_chunked_ignored" 2000 do
let body := onesChunked 80
let raw := s!"POST /tiny HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n{body}".toUTF8
let response sendRawTimed "16_many_tiny_chunked_ignored/send" raw ignoreHandler
assertStatusPrefix "16_many_tiny_chunked_ignored" response "HTTP/1.1 200"
-- 17: Many tiny chunked request chunks consumed and counted.
#eval runWithTimeout "17_many_tiny_chunked_consumed" 2000 do
let body := onesChunked 25
let raw := s!"POST /count HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n{body}".toUTF8
let response sendRawTimed "17_many_tiny_chunked_consumed/send" raw (fun req => do
let mut count := 0
for _ in req.body do
count := count + 1
Response.ok |>.text (toString count))
assertStatusPrefix "17_many_tiny_chunked_consumed" response "HTTP/1.1 200"
assertEndsWith "17_many_tiny_chunked_consumed" response "25"
pure ()
-- 18: Stress response streaming with many chunks and active client.
#eval runWithTimeout "18_stress_streaming_active_client" 2000 do
let raw := "GET /stress HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRawTimed "18_stress_streaming_active_client/send" raw (stressResponseHandler 120)
assertStatusPrefix "18_stress_streaming_active_client" response "HTTP/1.1 200"
assertContains "18_stress_streaming_active_client" response "x0,"
assertContains "18_stress_streaming_active_client" response "x119,"
-- 19: Pipeline with large unread first body still processes second request.
#eval runWithTimeout "19_pipeline_large_unread_then_get" 2000 do
let body := String.ofList (List.replicate 5000 'b')
let raw := (s!"POST /big HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5000\x0d\n\x0d\n{body}" ++
"GET /after HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "19_pipeline_large_unread_then_get/send" raw uriHandler
assertStatusCount "19_pipeline_large_unread_then_get" response 2
assertContains "19_pipeline_large_unread_then_get" response "/big"
assertContains "19_pipeline_large_unread_then_get" response "/after"
-- 20: Triple pipeline mixed body styles.
#eval runWithTimeout "20_triple_pipeline_mixed" 2000 do
let raw := ("POST /a HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 4\x0d\n\x0d\ndata" ++
"POST /b HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n3\x0d\nhey\x0d\n0\x0d\n\x0d\n" ++
"GET /c HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n").toUTF8
let response sendRawTimed "20_triple_pipeline_mixed/send" raw uriHandler
assertStatusCount "20_triple_pipeline_mixed" response 3
assertContains "20_triple_pipeline_mixed" response "/a"
assertContains "20_triple_pipeline_mixed" response "/b"
assertContains "20_triple_pipeline_mixed" response "/c"
-- 21: Slow/incomplete active body transfer must time out (no connection pinning).
#eval runWithTimeout "21_incomplete_slow_post_times_out" 2000 do
let raw := "POST /slow HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 100\x0d\nConnection: close\x0d\n\x0d\nabcde".toUTF8
let response sendRawTimed
"21_incomplete_slow_post_times_out/send"
raw
(fun req => do
let _s : String req.body.readAll
Response.ok |>.text "unreachable")
(config := { lingeringTimeout := 200, generateDate := false })
assertStatusPrefix "21_incomplete_slow_post_times_out" response "HTTP/1.1 408"
-- 22: Keep-alive + unknown-size stream flushes once first chunk is available.
#eval runWithTimeout "22_keepalive_unknown_size_flushes_early" 3000 do
Async.block do
let (client, server) Mock.new
let handler : TestHandler := fun _ => do
let (outgoing, _incoming) Body.mkChannel
background do
outgoing.send <| Chunk.ofByteArray "aaa".toUTF8
let sleep Sleep.mk 300
sleep.wait
outgoing.send <| Chunk.ofByteArray "bbb".toUTF8
outgoing.close
return Response.ok
|>.body outgoing
background <| (Std.Http.Server.serveConnection server handler {
lingeringTimeout := 800
keepAliveTimeout := 1500, by decide
generateDate := false
}).run
client.send "GET /stream HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let mut early : Option ByteArray := none
for _ in [0:5] do
if early.isNone then
let sleep Sleep.mk 40
sleep.wait
early client.tryRecv?
let earlyBytes := early.getD ByteArray.empty
if earlyBytes.isEmpty then
throw <| IO.userError "Test '22_keepalive_unknown_size_flushes_early' failed:\nExpected early streamed bytes before producer EOF"
assertContains "22_keepalive_unknown_size_flushes_early header" earlyBytes "Transfer-Encoding: chunked"
assertContains "22_keepalive_unknown_size_flushes_early first chunk" earlyBytes "aaa"
assertNotContains "22_keepalive_unknown_size_flushes_early no second chunk yet" earlyBytes "bbb"
let sleep Sleep.mk 420
sleep.wait
let later := ( client.tryRecv?).getD ByteArray.empty
assertContains "22_keepalive_unknown_size_flushes_early second chunk later" later "bbb"
client.close

View File

@@ -0,0 +1,380 @@
import Std.Internal.Http.Data.Headers
import Std.Internal.Http.Protocol.H1
open Std.Http
open Std.Http.Header
/-! ## Header.Name tests -/
-- Valid header names
#guard (Name.ofString? "content-type").isSome
#guard (Name.ofString? "host").isSome
#guard (Name.ofString? "x-custom-header").isSome
#guard (Name.ofString? "accept").isSome
-- Trimming: leading/trailing whitespace is stripped
#guard (Name.ofString? " content-type ").isSome
#guard (Name.ofString? " content-type ") == Name.ofString? "content-type"
#guard (Name.ofString? " host ").isSome
-- Invalid header names (empty, spaces, control chars)
#guard (Name.ofString? "").isNone
#guard (Name.ofString? " ").isNone
#guard (Name.ofString? "invalid header").isNone
#guard (Name.ofString? "bad\nname").isNone
#guard (Name.ofString? "bad\x00name").isNone
#guard (Name.ofString? "bad(name").isNone
#guard (Name.ofString? "bad)name").isNone
#guard (Name.ofString? "bad,name").isNone
#guard (Name.ofString? "bad;name").isNone
#guard (Name.ofString? "bad[name").isNone
#guard (Name.ofString? "bad]name").isNone
#guard (Name.ofString? "bad{name").isNone
#guard (Name.ofString? "bad}name").isNone
#guard (Name.ofString? "bad\"name").isNone
-- Case normalization
/--
info: "content-type"
-/
#guard_msgs in
#eval (Name.ofString! "Content-Type").value
/--
info: "content-type"
-/
#guard_msgs in
#eval (Name.ofString! "CONTENT-TYPE").value
-- Canonical form
/--
info: "Content-Type"
-/
#guard_msgs in
#eval toString (Name.ofString! "content-type")
/--
info: "X-Custom-Header"
-/
#guard_msgs in
#eval toString (Name.ofString! "x-custom-header")
/--
info: "Host"
-/
#guard_msgs in
#eval toString (Name.ofString! "host")
-- Name.is case-insensitive comparison
#guard (Name.ofString! "content-type").is "Content-Type"
#guard (Name.ofString! "content-type").is "CONTENT-TYPE"
#guard (Name.ofString! "content-type").is "content-type"
#guard !(Name.ofString! "content-type").is "host"
-- Predefined names
#guard Name.contentType.value == "content-type"
#guard Name.contentLength.value == "content-length"
#guard Name.host.value == "host"
#guard Name.authorization.value == "authorization"
#guard Name.userAgent.value == "user-agent"
#guard Name.accept.value == "accept"
#guard Name.connection.value == "connection"
#guard Name.transferEncoding.value == "transfer-encoding"
#guard Name.server.value == "server"
-- Name equality
#guard Name.ofString! "content-type" == Name.ofString! "Content-Type"
#guard Name.ofString! "HOST" == Name.ofString! "host"
#guard !(Name.ofString! "content-type" == Name.ofString! "host")
/-! ## Header.Value tests -/
-- Trimming: leading/trailing whitespace is stripped
#guard (Value.ofString? " text/html ") == Value.ofString? "text/html"
#guard (Value.ofString? " value ") == Value.ofString? "value"
-- Valid header values (printable ASCII, tab, space)
#guard (Value.ofString? "text/html").isSome
#guard (Value.ofString? "application/json; charset=utf-8").isSome
#guard (Value.ofString? "").isSome
#guard (Value.ofString? "value with spaces").isSome
#guard (Value.ofString? "value\twith\ttabs").isSome
-- Invalid header values (control characters except tab)
#guard (Value.ofString? "bad\x00value").isNone
#guard (Value.ofString? "bad\nvalue").isNone
#guard (Value.ofString? "bad\rvalue").isNone
-- Value.is case-insensitive comparison
#guard (Value.ofString! "text/html").is "TEXT/HTML"
#guard (Value.ofString! "text/html").is "text/html"
#guard !(Value.ofString! "text/html").is "application/json"
-- Value toString
/--
info: "text/html"
-/
#guard_msgs in
#eval toString (Value.ofString! "text/html")
/-! ## Headers collection tests -/
-- Empty headers
#guard Headers.empty.isEmpty
#guard Headers.empty.size == 0
-- Add and retrieve
#guard (Headers.empty.insert! "content-type" "text/html").size == 1
#guard !(Headers.empty.insert! "content-type" "text/html").isEmpty
#guard (Headers.empty.insert! "content-type" "text/html").contains (Name.ofString! "content-type")
#guard (Headers.empty.insert? "content-type" "text/html").isSome
#guard (Headers.empty.insert? "bad header name" "text/html").isNone
#guard (Headers.empty.insert? "content-type" "bad\nvalue").isNone
-- get? retrieves the value
/--
info: "text/html"
-/
#guard_msgs in
#eval do
let h := Headers.empty.insert! "content-type" "text/html"
return (h.get? (Name.ofString! "content-type")).get!.value
-- get? returns none for missing headers
#guard (Headers.empty.get? (Name.ofString! "content-type")).isNone
-- Multiple headers
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
|>.insert! "accept" "application/json"
h.size == 3
#guard
let h := Headers.empty.insert! "host" "example.com"
h.contains (Name.ofString! "host") && !h.contains (Name.ofString! "accept")
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
let h' := h.erase (Name.ofString! "content-type")
!h'.contains (Name.ofString! "content-type") && h'.contains (Name.ofString! "host")
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
(h.erase (Name.ofString! "content-type")).size == 1
-- hasEntry
#guard
let h := Headers.empty.insert! "content-type" "text/html"
h.hasEntry (Name.ofString! "content-type") (Value.ofString! "text/html")
#guard
let h := Headers.empty.insert! "content-type" "text/html"
!h.hasEntry (Name.ofString! "content-type") (Value.ofString! "application/json")
-- update existing
/--
info: "TEXT/HTML"
-/
#guard_msgs in
#eval do
let h := Headers.empty.insert! "content-type" "text/html"
let h' := h.update (Name.ofString! "content-type") (fun v => Value.ofString! v.value.toUpper)
return (h'.get? (Name.ofString! "content-type")).get!.value
-- ofList
#guard
let h := Headers.ofList [
(Name.ofString! "host", Value.ofString! "example.com"),
(Name.ofString! "accept", Value.ofString! "*/*")
]
h.size == 2 && h.contains (Name.ofString! "host")
-- merge
#guard
let h1 := Headers.empty.insert! "content-type" "text/html"
let h2 := Headers.empty.insert! "host" "example.com"
let merged := h1.merge h2
merged.contains (Name.ofString! "content-type") && merged.contains (Name.ofString! "host")
-- filter
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
|>.insert! "accept" "application/json"
let filtered := h.filter (fun name _ => name.is "host")
filtered.size == 1 && filtered.contains (Name.ofString! "host")
-- fold
/--
info: 3
-/
#guard_msgs in
#eval do
let h := Headers.empty
|>.insert! "a" "1"
|>.insert! "b" "2"
|>.insert! "c" "3"
return h.fold 0 (fun acc _ _ => acc + 1)
-- getD with default
/--
info: "fallback"
-/
#guard_msgs in
#eval do
let h := Headers.empty
return (h.getD (Name.ofString! "missing") (Value.ofString! "fallback")).value
-- mapValues
/--
info: "TEXT/HTML"
-/
#guard_msgs in
#eval do
let h := Headers.empty.insert! "content-type" "text/html"
let h' := h.mapValues (fun _ v => Value.ofString! v.value.toUpper)
return (h'.get? (Name.ofString! "content-type")).get!.value
-- toArray preserves insertion order
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
|>.insert! "accept" "application/json"
h.toArray.map (fun (n, v) => (n.value, v.value)) ==
#[("content-type", "text/html"), ("host", "example.com"), ("accept", "application/json")]
-- toArray with duplicate keys: both values appear in insertion order
#guard
let h := Headers.empty
|>.insert! "accept" "text/html"
|>.insert! "accept" "application/json"
h.toArray.map (fun (n, v) => (n.value, v.value)) ==
#[("accept", "text/html"), ("accept", "application/json")]
-- toArray after erase preserves remaining insertion order
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
|>.insert! "accept" "application/json"
(h.erase (Name.ofString! "host")).toArray.map (fun (n, v) => (n.value, v.value)) ==
#[("content-type", "text/html"), ("accept", "application/json")]
/-! ## Header typeclass tests -/
-- ContentLength parse
#guard
match Header.ContentLength.parse (Value.ofString! "42") with
| some cl => cl.length == 42
| none => false
#guard
match Header.ContentLength.parse (Value.ofString! "0") with
| some cl => cl.length == 0
| none => false
#guard (Header.ContentLength.parse =<< (Value.ofString! "abc")).isNone
#guard (Header.ContentLength.parse =<< (Value.ofString? "")).isNone
/--
info: ("content-length", "42")
-/
#guard_msgs in
#eval do
let (name, value) := Header.ContentLength.serialize 42
return (name.value, value.value)
#guard
match Header.TransferEncoding.parse (Value.ofString! "chunked") with
| some te => te.isChunked
| none => false
#guard
match Header.TransferEncoding.parse (Value.ofString! "gzip, chunked") with
| some te => te.isChunked && te.codings.size == 2
| none => false
#guard
match Header.TransferEncoding.parse (Value.ofString! "gzip") with
| some te => !te.isChunked
| none => false
#guard (Header.TransferEncoding.parse =<< (Value.ofString? "chunked, gzip")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? "chunked, chunked")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? "")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? ",")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? " , , ")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? "g zip")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? "\"chunked\"")).isNone
/--
info: ("transfer-encoding", "gzip,chunked")
-/
#guard_msgs in
#eval do
let te : Header.TransferEncoding := #["gzip", "chunked"], by native_decide
let (name, value) := Header.TransferEncoding.serialize te
return (name.value, value.value)
#guard
match Header.Connection.parse (Value.ofString! "keep-alive, Close") with
| some c => c.containsToken "close" && c.shouldClose
| none => false
#guard (Header.Connection.parse =<< (Value.ofString? "keep alive")).isNone
/--
info: ("connection", "keep-alive,close")
-/
#guard_msgs in
#eval do
let c : Header.Connection := #["keep-alive", "close"], by native_decide
let (name, value) := Header.Connection.serialize c
return (name.value, value.value)
/-! ## Aggregate header byte limit (maxHeaderBytes) -/
section HeaderByteLimit
open Std.Http.Protocol.H1
-- Helper: feed all bytes at once and run one step, return the machine state.
private def runMachine (raw : String) (cfg : Config) : Machine .receiving :=
let machine : Machine .receiving := { config := cfg }
(machine.feed raw.toUTF8).step.fst
-- With a tight limit (35 bytes), two headers whose combined byte count exceeds
-- the limit should cause the machine to fail.
-- "host" (4) + "example.com" (11) + 4 = 19 bytes for the first header.
-- "x-a" (3) + "somevalue1" (10) + 4 = 17 bytes for the second → total 36 > 35.
#guard
let raw := "GET / HTTP/1.1\r\nhost: example.com\r\nx-a: somevalue1\r\n\r\n"
let cfg : Config := { maxHeaderBytes := 35 }
(runMachine raw cfg).failed
-- With a generous limit the same request succeeds (machine is not failed).
#guard
let raw := "GET / HTTP/1.1\r\nhost: example.com\r\nx-a: somevalue1\r\n\r\n"
let cfg : Config := { maxHeaderBytes := 100 }
!(runMachine raw cfg).failed
-- Exactly at the boundary: 19 bytes for host header alone, limit = 19 → ok.
#guard
let raw := "GET / HTTP/1.1\r\nhost: example.com\r\n\r\n"
let cfg : Config := { maxHeaderBytes := 19 }
!(runMachine raw cfg).failed
-- One byte under the two-header total → second header pushes it over.
#guard
let raw := "GET / HTTP/1.1\r\nhost: example.com\r\nx-a: somevalue1\r\n\r\n"
let cfg : Config := { maxHeaderBytes := 19 }
(runMachine raw cfg).failed
end HeaderByteLimit

View File

@@ -0,0 +1,224 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def runPipelined
(raw : String)
(readBody : Bool)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO (ByteArray × Array String) := Async.block do
let (client, server) Mock.new
let seenRef IO.mkRef (#[] : Array String)
let handler : TestHandler := fun req => do
let uri := toString req.line.uri
seenRef.modify (·.push uri)
let body
if readBody then
req.body.readAll
else
pure "<ignored>"
Response.ok |>.text s!"{uri}:{body}"
client.send raw.toUTF8
client.getSendChan.close
Std.Http.Server.serveConnection server handler config
|>.run
let response client.recv?
let seen seenRef.get
pure (response.getD .empty, seen)
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to contain {needle.quote}\nGot:\n{text.quote}"
def assertNotContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
if text.contains needle then
throw <| IO.userError s!"Test '{name}' failed:\nDid not expect {needle.quote}\nGot:\n{text.quote}"
def countOccurrences (s : String) (needle : String) : Nat :=
if needle.isEmpty then
0
else
(s.splitOn needle).length - 1
def assertStatusCount (name : String) (response : ByteArray) (expected : Nat) : IO Unit := do
let text := String.fromUTF8! response
let got := countOccurrences text "HTTP/1.1 "
if got != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} responses, got {got}\n{text.quote}"
def assertSeenCount (name : String) (seen : Array String) (expected : Nat) : IO Unit := do
if seen.size != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected {expected} handler calls, got {seen.size}: {seen}"
-- Two sequential requests on the same HTTP/1.1 connection.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /first HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 (fun req =>
Response.ok |>.text (toString req.line.uri))
assertStatusCount "Two keep-alive responses" response 2
assertContains "Two keep-alive first" response "/first"
assertContains "Two keep-alive second" response "/second"
-- Connection: close on first request blocks pipelined second request.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /first HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 (fun req =>
Response.ok |>.text (toString req.line.uri))
assertStatusCount "Connection close response count" response 1
assertContains "Connection close first served" response "/first"
assertNotContains "Connection close second blocked" response "/second"
-- Disabling keep-alive via config forces one response.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /1 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /2 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8
(fun req => Response.ok |>.text (toString req.line.uri))
(config := { lingeringTimeout := 3000, enableKeepAlive := false, generateDate := false })
assertStatusCount "Keep-alive disabled response count" response 1
assertContains "Keep-alive disabled first served" response "/1"
assertNotContains "Keep-alive disabled second blocked" response "/2"
-- maxRequests cap enforces hard limit on responses per connection.
#eval show IO _ from do
let (client, server) Mock.new
let req0 := "GET /0 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req1 := "GET /1 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /2 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req0 ++ req1 ++ req2).toUTF8
(fun req => Response.ok |>.text (toString req.line.uri))
(config := { lingeringTimeout := 3000, maxRequests := 2, generateDate := false })
assertStatusCount "maxRequests response count" response 2
assertContains "maxRequests /0 served" response "/0"
assertContains "maxRequests /1 served" response "/1"
assertNotContains "maxRequests /2 blocked" response "/2"
-- Handler that ignores a fixed-size body still allows next keep-alive request.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /ignore HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello"
let req2 := "GET /after HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 (fun req =>
Response.ok |>.text (toString req.line.uri))
assertStatusCount "Unread CL body keep-alive responses" response 2
assertContains "Unread CL body first" response "/ignore"
assertContains "Unread CL body second" response "/after"
-- Handler that ignores chunked body still allows next keep-alive request.
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n"
let req2 := "GET /next HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server (req1 ++ req2).toUTF8 (fun req =>
Response.ok |>.text (toString req.line.uri))
assertStatusCount "Unread chunked body keep-alive responses" response 2
assertContains "Unread chunked first" response "/chunked"
assertContains "Unread chunked second" response "/next"
-- Exact first Content-Length allows pipelined second request.
#eval show IO _ from do
let req1 := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 3\x0d\n\x0d\nabc"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertStatusCount "Exact CL pipelined responses" response 2
assertContains "Exact CL first" response "/first"
assertContains "Exact CL second" response "/second"
assertSeenCount "Exact CL seen count" seen 2
-- Incomplete first Content-Length blocks pipelined second request.
#eval show IO _ from do
let req1 := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 10\x0d\n\x0d\nabc"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertContains "Incomplete CL first served" response "/first"
assertNotContains "Incomplete CL second blocked" response "/second"
assertSeenCount "Incomplete CL seen count" seen 1
-- Incomplete first chunked body blocks pipelined second request.
#eval show IO _ from do
let req1 := "POST /chunked-first HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\nF\x0d\nhel"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertNotContains "Incomplete chunked second blocked" response "/second"
if seen.contains "/second" then
throw <| IO.userError s!"Test 'Incomplete chunked seen list' failed: {seen}"
-- Content-Length: 0 on first request allows immediate second request.
#eval show IO _ from do
let req1 := "POST /empty HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 0\x0d\n\x0d\n"
let req2 := "GET /tail HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertStatusCount "CL=0 pipelined responses" response 2
assertContains "CL=0 first" response "/empty"
assertContains "CL=0 second" response "/tail"
assertSeenCount "CL=0 seen count" seen 2
-- Complete chunked first request allows second request.
#eval show IO _ from do
let req1 := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n"
let req2 := "GET /tail HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let (response, seen) runPipelined (req1 ++ req2) true
assertStatusCount "Complete chunked pipelined responses" response 2
assertContains "Complete chunked first" response "/chunked"
assertContains "Complete chunked second" response "/tail"
assertSeenCount "Complete chunked seen count" seen 2

View File

@@ -0,0 +1,503 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def sendRawAndClose
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
client.getSendChan.close
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected status: {status}\nGot:\n{responseStr.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let responseStr := String.fromUTF8! response
if responseStr != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{responseStr.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to contain: {needle.quote}\nGot:\n{responseStr.quote}"
def headerSection (response : ByteArray) : String :=
(String.fromUTF8! response).splitOn "\x0d\n\x0d\n" |>.headD ""
def okHandler : TestHandler :=
fun _ => Response.ok |>.text "ok"
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def bad505 : String :=
"HTTP/1.1 505 HTTP Version Not Supported\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def ok200 : String :=
"HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 2\x0d\n\x0d\nok"
def ok200Head : String :=
"HTTP/1.1 200 OK\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 2\x0d\n\x0d\n"
def notImplemented : String :=
"HTTP/1.1 501 Not Implemented\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
-- Client-mode response parsing regressions.
#eval show IO _ from do
-- Initialize reader to needStartLine: for .sending machines the reader starts in .pending
-- and only advances after send/request is issued. Here we test the reader in isolation.
let machineA : Protocol.H1.Machine .sending := { config := {}, reader := { state := .needStartLine } }
let rawA := "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\n\x0d\n"
let (machineA, stepA) := (machineA.feed rawA.toUTF8).step
let failedA := stepA.events.any fun
| .failed _ => true
| _ => false
if failedA then
throw <| IO.userError s!"Test 'Client mode parses response status-line with headers' failed:\nUnexpected failure events: {repr stepA.events}"
let endedA := stepA.events.any fun
| .endHeaders _ => true
| _ => false
unless endedA do
throw <| IO.userError s!"Test 'Client mode parses response status-line with headers' failed:\nMissing endHeaders event: {repr stepA.events}"
unless machineA.reader.messageHead.status == .ok do
throw <| IO.userError s!"Test 'Client mode parses response status-line with headers' failed:\nUnexpected status: {repr machineA.reader.messageHead.status}"
unless machineA.reader.messageHead.headers.hasEntry Header.Name.contentLength (Header.Value.ofString! "0") do
throw <| IO.userError s!"Test 'Client mode parses response status-line with headers' failed:\nMissing Content-Length header in parsed response"
let machineB : Protocol.H1.Machine .sending := { config := {}, reader := { state := .needStartLine } }
let rawB := "HTTP/1.1 204 No Content\x0d\n\x0d\n"
let (_machineB, stepB) := (machineB.feed rawB.toUTF8).step
let failedB := stepB.events.any fun
| .failed _ => true
| _ => false
if failedB then
throw <| IO.userError s!"Test 'Client mode parses headerless response status-line' failed:\nUnexpected failure events: {repr stepB.events}"
let needMoreB := stepB.events.any fun
| .needMoreData _ => true
| _ => false
if needMoreB then
throw <| IO.userError s!"Test 'Client mode parses headerless response status-line' failed:\nUnexpected needMoreData event: {repr stepB.events}"
let endedB := stepB.events.any fun
| .endHeaders _ => true
| _ => false
unless endedB do
throw <| IO.userError s!"Test 'Client mode parses headerless response status-line' failed:\nMissing endHeaders event: {repr stepB.events}"
let machineC : Protocol.H1.Machine .sending := { config := {}, reader := { state := .needStartLine } }
let rawC := "HTTP/1.1 204 No Content\x0d\nContent-Length: 5\x0d\n\x0d\nHELLO"
let (machineC, stepC) := (machineC.feed rawC.toUTF8).step
let failedC := stepC.events.any fun
| .failed _ => true
| _ => false
if failedC then
throw <| IO.userError s!"Test 'Client mode ignores body framing on 204' failed:\nUnexpected failure events: {repr stepC.events}"
match machineC.reader.state with
| .readBody (.fixed 0) =>
pure ()
| .complete =>
pure ()
| .closed =>
pure ()
| _ =>
throw <| IO.userError s!"Test 'Client mode ignores body framing on 204' failed:\nUnexpected body-reading state: {repr machineC.reader.state}"
unless machineC.reader.input.remainingBytes == 5 do
throw <| IO.userError s!"Test 'Client mode ignores body framing on 204' failed:\nExpected 5 unread bytes, got {machineC.reader.input.remainingBytes}"
-- Outgoing response framing regressions for bodyless statuses.
#eval show IO _ from do
let request := "GET /cache HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let machine0 : Protocol.H1.Machine .receiving := { config := {} }
let (machine1, _step1) := (machine0.feed request).step
let headers304 := Headers.empty.insert Header.Name.contentLength (Header.Value.ofString! "5")
let machine304 := machine1.send ({ status := .notModified, headers := headers304 } : Response.Head)
let (_machine304, step304) := machine304.step
let text304 := String.fromUTF8! step304.output.toByteArray
unless text304.contains "HTTP/1.1 304 Not Modified" do
throw <| IO.userError s!"Test '304 preserves explicit Content-Length' failed:\n{text304.quote}"
unless text304.contains "Content-Length: 5" do
throw <| IO.userError s!"Test '304 preserves explicit Content-Length' failed:\n{text304.quote}"
if text304.contains "Content-Length: 0" then
throw <| IO.userError s!"Test '304 preserves explicit Content-Length' failed:\nUnexpected rewritten Content-Length: {text304.quote}"
let headers204 := Headers.empty
|>.insert Header.Name.contentLength (Header.Value.ofString! "9")
let machine204 := machine1.send ({ status := .noContent, headers := headers204 } : Response.Head)
let (_machine204, step204) := machine204.step
let text204 := String.fromUTF8! step204.output.toByteArray
unless step204.output.size > 0 do
throw <| IO.userError "Test '204 strips framing headers' failed:\nExpected serialized response output"
unless text204.contains "HTTP/1.1 204 No Content" do
throw <| IO.userError s!"Test '204 strips framing headers' failed:\n{text204.quote}"
if text204.contains "Content-Length:" text204.contains "Transfer-Encoding:" then
throw <| IO.userError s!"Test '204 strips framing headers' failed:\nUnexpected framing headers:\n{text204.quote}"
-- Host header rules.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let missingHost := "GET / HTTP/1.1\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA missingHost okHandler
assertExact "Missing Host header" responseA bad400
let (clientB, serverB) Mock.new
let emptyHost := "GET / HTTP/1.1\x0d\nHost: \x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB emptyHost okHandler
-- RFC 9110 §7.2: for origin-form URIs (no authority), an empty Host field value is valid.
assertExact "Empty Host header allowed" responseB ok200
let (clientC, serverC) Mock.new
let multiHost := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nHost: other.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC multiHost okHandler
assertExact "Multiple Host headers" responseC bad400
let (clientD, serverD) Mock.new
let absoluteIgnoresHost := "GET http://good.example/path HTTP/1.1\x0d\nHost: good.example\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD absoluteIgnoresHost okHandler
assertExact "Absolute-form authority takes precedence over Host" responseD ok200
-- HTTP version handling.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let rawA := "GET / HTTP/2.0\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA okHandler
assertExact "HTTP/2.0 rejected" responseA bad505
-- Request-line parsing failures.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let missingVersion := "GET /\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA missingVersion okHandler
assertExact "Missing version in request-line" responseA bad400
let (clientB, serverB) Mock.new
let missingUri := "GET HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB missingUri okHandler
assertExact "Missing URI in request-line" responseB bad400
let (clientC, serverC) Mock.new
let extraSpaces := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC extraSpaces okHandler
assertExact "Extra spaces in request-line" responseC bad400
let (clientD, serverD) Mock.new
let emptyLine := "\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD emptyLine okHandler
assertExact "Empty request-line" responseD ""
let (clientE, serverE) Mock.new
let whitespaceOnly := " \x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE whitespaceOnly okHandler
assertExact "Whitespace-only request-line" responseE bad400
let (clientF, serverF) Mock.new
let noSpaces := "GETHTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF noSpaces okHandler
assertExact "No spaces in request-line" responseF bad400
let (clientG, serverG) Mock.new
let leadingCRLF := "\x0d\nGET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseG sendRaw clientG serverG leadingCRLF okHandler
assertExact "Leading CRLF before request-line" responseG ok200
let (clientH, serverH) Mock.new
let garbageAfterVersion := "GET / HTTP/1.1 xxxxxx\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseH sendRaw clientH serverH garbageAfterVersion okHandler
assertExact "Garbage after request-line version" responseH bad400
-- Method rules. Only IANA-registered methods are accepted; all others return 400.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let invalidMethod := "FOOBAR / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA invalidMethod okHandler
assertExact "Unknown method rejected" responseA bad400
let (clientB, serverB) Mock.new
let lowercaseMethod := "get / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB lowercaseMethod okHandler
assertExact "Lowercase method rejected" responseB bad400
let (clientC, serverC) Mock.new
let nonAsciiMethod := "GÉT / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC nonAsciiMethod okHandler
assertExact "Non-ASCII method rejected" responseC bad400
let (clientD, serverD) Mock.new
let longMethod := String.ofList (List.replicate 20 'G')
let rawD := s!"{longMethod} / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD rawD okHandler
assertExact "Long unrecognized method rejected" responseD bad400
let (clientE, serverE) Mock.new
let tokenMethod := "X-CUSTOM / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE tokenMethod okHandler
assertExact "Token method with hyphen rejected" responseE bad400
-- HEAD framing and authority-form rules.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let headReq := "HEAD / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA headReq okHandler
assertExact "HEAD omits body bytes" responseA ok200Head
let (clientB, serverB) Mock.new
let badAuthority := "GET example.com:443 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB badAuthority okHandler
assertExact "GET authority-form rejected" responseB bad400
let (clientC, serverC) Mock.new
let okAuthority := "CONNECT example.com:443 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC okAuthority okHandler
assertExact "CONNECT authority-form accepted" responseC ok200
let (clientD, serverD) Mock.new
let badAuthorityPort := "CONNECT example.com:444 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD badAuthorityPort okHandler
assertExact "CONNECT authority-form non-default port mismatch rejected" responseD bad400
let (clientE, serverE) Mock.new
let getAsterisk := "GET * HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE getAsterisk okHandler
assertExact "Asterisk-form rejected for non-OPTIONS" responseE bad400
let (clientF, serverF) Mock.new
let optionsAsterisk := "OPTIONS * HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF optionsAsterisk okHandler
assertExact "Asterisk-form accepted for OPTIONS" responseF ok200
-- h11-inspired: GET and HEAD should use the same framing headers.
#eval show IO _ from do
let handler : TestHandler := fun _ => Response.ok |>.text "hello"
let (clientA, serverA) Mock.new
let getReq := "GET /frame HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let getResponse sendRaw clientA serverA getReq handler
let (clientB, serverB) Mock.new
let headReq := "HEAD /frame HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let headResponse sendRaw clientB serverB headReq handler
let getHeaders := headerSection getResponse
let headHeaders := headerSection headResponse
if getHeaders != headHeaders then
throw <| IO.userError s!"Test 'HEAD framing headers parity' failed:\nGET headers:\n{getHeaders.quote}\nHEAD headers:\n{headHeaders.quote}"
assertContains "GET framing body present" getResponse "hello"
let headText := String.fromUTF8! headResponse
if headText.contains "hello" then
throw <| IO.userError s!"Test 'HEAD framing body omitted' failed:\n{headText.quote}"
-- Header syntax and byte-level validation.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let noColon := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBadHeader value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA noColon okHandler
assertExact "Header without colon" responseA bad400
let (clientB, serverB) Mock.new
let obsFold := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\n X-Bad: folded\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB obsFold okHandler
assertExact "Leading whitespace header" responseB bad400
let (clientC, serverC) Mock.new
let beforeName := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Bad".toUTF8
let afterName := "Header: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC (beforeName ++ ByteArray.mk #[0] ++ afterName) okHandler
assertExact "NUL in header name" responseC bad400
let (clientD, serverD) Mock.new
let beforeValue := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Header: bad".toUTF8
let afterValue := "value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD (beforeValue ++ ByteArray.mk #[0] ++ afterValue) okHandler
assertExact "NUL in header value" responseD bad400
let (clientE, serverE) Mock.new
let beforeCtrl := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Header: bad".toUTF8
let afterCtrl := "value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE (beforeCtrl ++ ByteArray.mk #[0x01] ++ afterCtrl) okHandler
assertExact "Control char in header value" responseE bad400
let (clientF, serverF) Mock.new
let spaceInName := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBad Header: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF spaceInName okHandler
assertExact "Space in header name" responseF bad400
-- Lenient-but-supported parsing behavior.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let bareLF := "GET / HTTP/1.1\nHost: example.com\nConnection: close\n\n".toUTF8
let responseA sendRaw clientA serverA bareLF okHandler
assertExact "Bare LF line endings accepted" responseA bad400
let (clientB, serverB) Mock.new
let splitHeaders := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Inject: value\x0d\nEvil: injected\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB splitHeaders okHandler
assertExact "CRLF split into two headers" responseB ok200
let (clientC, serverC) Mock.new
let tabValue := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Tab: value\twith\ttabs\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC tabValue okHandler
assertExact "Tab in header value accepted" responseC ok200
let (clientD, serverD) Mock.new
let absoluteUri := "GET http://example.com/path HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD absoluteUri okHandler
assertExact "Absolute-form URI accepted" responseD ok200
let (clientE, serverE) Mock.new
let colonValue := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBad:Name: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE colonValue okHandler
assertExact "Additional colon remains in value" responseE ok200
-- Content-Length validation.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let nonNumeric := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: abc\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let responseA sendRaw clientA serverA nonNumeric okHandler
assertExact "Non-numeric Content-Length" responseA bad400
let (clientB, serverB) Mock.new
let negative := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: -1\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB negative okHandler
assertExact "Negative Content-Length" responseB bad400
let (clientC, serverC) Mock.new
let duplicateCl := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nContent-Length: 10\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let responseC sendRaw clientC serverC duplicateCl okHandler
assertExact "Duplicate Content-Length mismatch" responseC bad400
-- Transfer-Encoding normalization.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let mixedCase := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: Chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA mixedCase (fun req => do
let body : String req.body.readAll
Response.ok |>.text body)
assertStatus "Mixed-case chunked accepted" responseA "HTTP/1.1 200"
assertContains "Mixed-case chunked body" responseA "hello"
let (clientB, serverB) Mock.new
let teOWS := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked \x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB teOWS (fun req => do
let body : String req.body.readAll
Response.ok |>.text body)
assertStatus "Transfer-Encoding trailing OWS accepted" responseB "HTTP/1.1 200"
assertContains "Transfer-Encoding trailing OWS body" responseB "hello"
let (clientC, serverC) Mock.new
let doubleChunked := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC doubleChunked okHandler
assertExact "Double chunked rejected" responseC bad400
let (clientD, serverD) Mock.new
let unsupportedTe := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: gzip, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD unsupportedTe (fun req => do
let seenTe := match req.line.headers.getAll? Header.Name.transferEncoding with
| some values => String.intercalate "|" (values.map (·.value) |>.toList)
| none => "<none>"
Response.ok |>.text seenTe)
assertStatus "Chunked transfer-coding chain accepted" responseD "HTTP/1.1 200"
assertContains "Transfer-coding chain is visible to handler" responseD "gzip, chunked"
-- Size limits.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let longName := String.ofList (List.replicate 257 'X')
let rawA := s!"GET / HTTP/1.1\x0d\nHost: example.com\x0d\n{longName}: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA okHandler
assertExact "Header name too long" responseA bad400
let cfg : Config := {
lingeringTimeout := 1000
generateDate := false
maxStartLineLength := 16384
}
let (clientB, serverB) Mock.new
let segment := String.ofList (List.replicate 255 'a')
let maxUri := "/" ++ String.intercalate "/" (List.replicate 32 segment)
let rawB := s!"GET {maxUri} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB okHandler (config := cfg)
assertExact "URI at limit accepted" responseB ok200
let (clientC, serverC) Mock.new
let longUri := maxUri ++ "/x"
let rawC := s!"GET {longUri} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rawC okHandler (config := cfg)
assertStatus "URI too long returns 414" responseC "HTTP/1.1 414"
-- Empty connection closes silently (no response bytes).
#eval show IO _ from do
let (client, server) Mock.new
let response sendRawAndClose client server ByteArray.empty okHandler
assert! response.size == 0
-- h11-inspired: early invalid-byte detection before CRLF.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let responseA sendRawAndClose clientA serverA (ByteArray.mk #[0x00]) okHandler
assertExact "Early invalid NUL" responseA bad400
let (clientB, serverB) Mock.new
let responseB sendRawAndClose clientB serverB (ByteArray.mk #[0x20]) okHandler
assertExact "Early invalid SP" responseB bad400
let (clientC, serverC) Mock.new
let responseC sendRawAndClose clientC serverC (ByteArray.mk #[0x16, 0x03, 0x01, 0x00, 0xa5]) okHandler
assertExact "Early invalid TLS prefix" responseC bad400

View File

@@ -0,0 +1,184 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {status}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := String.fromUTF8! response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nMissing {needle.quote}\nGot:\n{text.quote}"
def echoHandler : TestHandler :=
fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def okHandler : TestHandler :=
fun _ => Response.ok |>.text "ok"
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
def bad501 : String :=
"HTTP/1.1 501 Not Implemented\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
-- Baseline: normal chunked request is accepted and body is delivered.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: chunked baseline" response "HTTP/1.1 200"
assertContains "TE: chunked body delivered" response "hello"
-- TE: gzip alone (chunked not last) → 400.
-- The server can only frame bodies with chunked; an unknown-only coding has no supported framing.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: gzip\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: gzip alone → 400" response bad400
-- TE: deflate alone → 400 (chunked not last).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: deflate\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: deflate alone → 400" response bad400
-- TE: identity alone → 400 (chunked not last).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: identity\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: identity alone → 400" response bad400
-- TE: chunked, gzip → 400. Chunked is not last; Validate rejects this.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, gzip\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: chunked not last → 400" response bad400
-- TE: gzip, chunked → 200. Unknown coding before chunked is accepted; body is framed as chunked.
-- The server reads the raw chunked bytes without applying the gzip layer.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: gzip, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: gzip, chunked → 200" response "HTTP/1.1 200"
assertContains "TE: gzip, chunked body delivered" response "hello"
-- TE: br, chunked → 200. Same as above with a different preceding coding.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: br, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: br, chunked → 200" response "HTTP/1.1 200"
-- TE + Content-Length → 400 (request smuggling prevention, RFC 9112 §6.1).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE + Content-Length → 400 (smuggling)" response bad400
-- TE: chunked, chunked → 400 (duplicate chunked, Validate rejects).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "TE: chunked, chunked → 400" response bad400
-- Mixed case: Chunked (capital C) is accepted (codings are lowercased when parsed).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: Chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: Chunked (mixed case) → 200" response "HTTP/1.1 200"
assertContains "TE: Chunked body delivered" response "hello"
-- NUL byte inside TE value → 400 (non-token character).
#eval show IO _ from do
let (client, server) Mock.new
let before := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunk".toUTF8
let after := "ed\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server (before ++ ByteArray.mk #[0x00] ++ after) okHandler
assertExact "NUL in TE value → 400" response bad400
-- Control character (0x01) inside TE value → 400.
#eval show IO _ from do
let (client, server) Mock.new
let before := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunk".toUTF8
let after := "ed\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server (before ++ ByteArray.mk #[0x01] ++ after) okHandler
assertExact "Control char in TE value → 400" response bad400
-- Empty TE value → 400 (empty coding list is invalid per TransferEncoding.Validate).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: \x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertStatus "TE: empty value → error" response "HTTP/1.1 4"
-- TE: chunked with trailing OWS is accepted (OWS is stripped before parsing).
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked \x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoHandler
assertStatus "TE: chunked with trailing OWS → 200" response "HTTP/1.1 200"

View File

@@ -0,0 +1,282 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
open Std.Http.Internal
abbrev TestHandler := Request Body.Incoming ContextAsync (Response Body.AnyBody)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
instance : Coe (ContextAsync (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
instance : Coe (Async (Response Body.Incoming)) (ContextAsync (Response Body.AnyBody)) where
coe action := do
let response action
pure { response with body := Body.Internal.incomingToOutgoing response.body }
def sendRaw
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def sendRawAndClose
(client : Mock.Client)
(server : Mock.Server)
(raw : ByteArray)
(handler : TestHandler)
(config : Config := { lingeringTimeout := 1000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
client.close
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected {status}\nGot:\n{text.quote}"
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let text := String.fromUTF8! response
unless text.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nMissing {needle.quote}\nGot:\n{text.quote}"
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let text := String.fromUTF8! response
if text != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{text.quote}"
def bodyHandler : TestHandler :=
fun req => do
let body : String req.body.readAll
Response.ok |>.text body
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nServer: LeanHTTP/1.1\x0d\nConnection: close\x0d\nContent-Length: 0\x0d\n\x0d\n"
-- Chunked body without trailers.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Chunked no trailers" response "HTTP/1.1 200"
assertContains "Chunked no trailers body" response "hello"
-- Single trailer header.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nChecksum: abc123\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Single trailer" response "HTTP/1.1 200"
assertContains "Single trailer body" response "hello"
-- Multiple trailer headers.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nChecksum: abc123\x0d\nExpires: Thu, 01 Dec 1994 16:00:00 GMT\x0d\nX-Custom: value\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Multiple trailers" response "HTTP/1.1 200"
assertContains "Multiple trailers body" response "hello"
-- Terminal chunk extensions can precede trailers.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0;ext=val\x0d\nX-Trailer: yes\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Terminal chunk extensions + trailers" response "HTTP/1.1 200"
assertContains "Terminal chunk extensions + trailers body" response "hello"
-- Trailer name and value limits.
#eval show IO _ from do
let exactName := String.ofList (List.replicate 256 'X')
let longName := String.ofList (List.replicate 257 'X')
let exactValue := String.ofList (List.replicate 8192 'v')
let longValue := String.ofList (List.replicate 8193 'v')
let (clientA, serverA) Mock.new
let rawA := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{exactName}: value\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA bodyHandler
assertStatus "Trailer name at 256" responseA "HTTP/1.1 200"
let (clientB, serverB) Mock.new
let rawB := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{longName}: value\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB bodyHandler
assertExact "Trailer name exceeds 256" responseB bad400
let (clientC, serverC) Mock.new
let rawC := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Exact: {exactValue}\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rawC bodyHandler
assertStatus "Trailer value at 8192" responseC "HTTP/1.1 200"
let (clientD, serverD) Mock.new
let rawD := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Too-Long: {longValue}\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD rawD bodyHandler
assertExact "Trailer value exceeds 8192" responseD bad400
-- maxTrailerHeaders enforcement.
#eval show IO _ from do
let config2 : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 2, generateDate := false }
let (clientA, serverA) Mock.new
let okRaw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nT1: a\x0d\nT2: b\x0d\n\x0d\n".toUTF8
let okResponse sendRaw clientA serverA okRaw bodyHandler (config := config2)
assertStatus "maxTrailerHeaders exact limit" okResponse "HTTP/1.1 200"
let (clientB, serverB) Mock.new
let badRaw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nT1: a\x0d\nT2: b\x0d\nT3: c\x0d\n\x0d\n".toUTF8
let badResponse sendRaw clientB serverB badRaw bodyHandler (config := config2)
assertExact "maxTrailerHeaders overflow" badResponse bad400
let config0 : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 0, generateDate := false }
let (clientC, serverC) Mock.new
let rejectAny := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Trailer: rejected\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rejectAny bodyHandler (config := config0)
assertExact "maxTrailerHeaders=0 rejects trailers" responseC bad400
let (clientD, serverD) Mock.new
let noTrailer := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD noTrailer bodyHandler (config := config0)
assertStatus "maxTrailerHeaders=0 no trailers" responseD "HTTP/1.1 200"
-- Trailer syntax validation.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let noColon := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nBadTrailer value\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA noColon bodyHandler
assertExact "Trailer without colon" responseA bad400
let (clientB, serverB) Mock.new
let leadingWS := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n X-Bad: folded\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB leadingWS bodyHandler
assertExact "Trailer leading whitespace" responseB bad400
let (clientC, serverC) Mock.new
let spaceName := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nBad Name: value\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC spaceName bodyHandler
assertExact "Trailer name contains space" responseC bad400
-- Trailer byte-level validation.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let beforeName := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Bad".toUTF8
let afterName := "Name: value\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA (beforeName ++ ByteArray.mk #[0] ++ afterName) bodyHandler
assertExact "NUL in trailer name" responseA bad400
let (clientB, serverB) Mock.new
let beforeValue := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Header: bad".toUTF8
let afterValue := "value\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB (beforeValue ++ ByteArray.mk #[0] ++ afterValue) bodyHandler
assertExact "NUL in trailer value" responseB bad400
let (clientC, serverC) Mock.new
let responseC sendRaw clientC serverC (beforeValue ++ ByteArray.mk #[0x01] ++ afterValue) bodyHandler
assertExact "Control char in trailer value" responseC bad400
-- Incomplete trailer section with client close yields no response bytes.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Trailer: value\x0d\n".toUTF8
let response sendRawAndClose client server raw bodyHandler
assert! response.size == 0
-- Trailer encoding emits terminal chunk plus trailer headers.
#eval show IO _ from Async.block do
let trailer := Trailer.empty
|>.insert (.mk "checksum") (.mk "abc123")
|>.insert (.mk "expires") (.mk "Thu, 01 Dec 1994")
let encoded := (Encode.encode (v := .v11) ChunkedBuffer.empty trailer).toByteArray
let text := String.fromUTF8! encoded
assert! text.contains "0\x0d\n"
assert! text.contains "Checksum: abc123\x0d\n"
assert! text.contains "Expires: Thu, 01 Dec 1994\x0d\n"
-- Empty trailer encoding is exactly terminal chunk CRLF CRLF.
#eval show IO _ from Async.block do
let encoded := (Encode.encode (v := .v11) ChunkedBuffer.empty Trailer.empty).toByteArray
let text := String.fromUTF8! encoded
assert! text == "0\x0d\n\x0d\n"
-- Trailer injection: forbidden field names must be rejected (RFC 9112 §6.5).
-- A client injecting framing or routing fields via trailers could confuse proxies.
#eval show IO _ from do
-- content-length in trailer must be rejected
let (clientA, serverA) Mock.new
let rawA := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nContent-Length: 1000\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA bodyHandler
assertExact "content-length in trailer rejected" responseA bad400
-- transfer-encoding in trailer must be rejected
let (clientB, serverB) Mock.new
let rawB := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB bodyHandler
assertExact "transfer-encoding in trailer rejected" responseB bad400
-- host in trailer must be rejected
let (clientC, serverC) Mock.new
let rawC := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nHost: evil.example\x0d\n\x0d\n".toUTF8
let responseC sendRaw clientC serverC rawC bodyHandler
assertExact "host in trailer rejected" responseC bad400
-- connection in trailer must be rejected
let (clientD, serverD) Mock.new
let rawD := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nConnection: keep-alive\x0d\n\x0d\n".toUTF8
let responseD sendRaw clientD serverD rawD bodyHandler
assertExact "connection in trailer rejected" responseD bad400
-- authorization in trailer must be rejected
let (clientE, serverE) Mock.new
let rawE := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nAuthorization: Bearer token\x0d\n\x0d\n".toUTF8
let responseE sendRaw clientE serverE rawE bodyHandler
assertExact "authorization in trailer rejected" responseE bad400
-- cache-control in trailer must be rejected
let (clientF, serverF) Mock.new
let rawF := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nCache-Control: no-cache\x0d\n\x0d\n".toUTF8
let responseF sendRaw clientF serverF rawF bodyHandler
assertExact "cache-control in trailer rejected" responseF bad400
-- te in trailer must be rejected
let (clientG, serverG) Mock.new
let rawG := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nTE: trailers\x0d\n\x0d\n".toUTF8
let responseG sendRaw clientG serverG rawG bodyHandler
assertExact "te in trailer rejected" responseG bad400
-- Forbidden trailer field names are rejected regardless of case.
#eval show IO _ from do
let (clientA, serverA) Mock.new
let rawA := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nCONTENT-LENGTH: 0\x0d\n\x0d\n".toUTF8
let responseA sendRaw clientA serverA rawA bodyHandler
assertExact "CONTENT-LENGTH in trailer rejected (uppercase)" responseA bad400
let (clientB, serverB) Mock.new
let rawB := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nContent-Length: 0\x0d\nChecksum: abc\x0d\n\x0d\n".toUTF8
let responseB sendRaw clientB serverB rawB bodyHandler
assertExact "forbidden trailer among others rejected" responseB bad400
-- Non-forbidden custom trailers are still allowed after the fix.
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nChecksum: deadbeef\x0d\nX-Timing: 12ms\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "non-forbidden trailers accepted" response "HTTP/1.1 200"
assertContains "body delivered with custom trailers" response "hello"

View File

@@ -0,0 +1,928 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
import Std.Internal.Http.Data.URI
import Std.Internal.Http.Data.URI.Encoding
open Std.Http
open Std.Http.URI
open Std.Http.URI.Parser
/-!
# URI Tests
Comprehensive tests for URI parsing, encoding, normalization, and manipulation.
This file consolidates tests from multiple URI-related test files.
-/
-- ============================================================================
-- Helper Functions
-- ============================================================================
def runParser (parser : Std.Internal.Parsec.ByteArray.Parser α) (s : String) : IO α :=
IO.ofExcept ((parser <* Std.Internal.Parsec.eof).run s.toUTF8)
def parseCheck (s : String) (exact : String := s) : IO Unit := do
let result runParser parseRequestTarget s
if toString result = exact then
pure ()
else
throw (.userError s!"expect {exact.quote} but got {(toString result).quote}")
def parseCheckFail (s : String) : IO Unit := do
match (parseRequestTarget <* Std.Internal.Parsec.eof).run s.toUTF8 with
| .ok r =>
throw <| .userError
s!"expected parse failure, but succeeded with {(repr r)}"
| .error _ =>
pure ()
-- ============================================================================
-- Percent Encoding Tests (EncodedString)
-- ============================================================================
-- Valid percent encoding validation
/--
info: some "abc"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "abc".toUTF8))
/--
info: some "%20"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%20".toUTF8))
/--
info: some "hello%20world"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "hello%20world".toUTF8))
/--
info: some "%FF"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%FF".toUTF8))
/--
info: some "%00"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%00".toUTF8))
-- Invalid percent encoding: incomplete
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "hello%".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%2".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%A".toUTF8))
-- Invalid percent encoding: non-hex characters
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%GG".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%2G".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%G2".toUTF8))
-- ============================================================================
-- Percent Encoding Decode Tests
-- ============================================================================
/--
info: some "abc"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "abc".toUTF8))
/--
info: some " "
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%20".toUTF8))
/--
info: some "hello world"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "hello%20world".toUTF8))
/--
info: some " !"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%20%21".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%FF".toUTF8))
/--
info: some "\x00"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%00".toUTF8))
-- ============================================================================
-- Query String Encoding Tests
-- ============================================================================
/--
info: some "hello+world"
-/
#guard_msgs in
#eval IO.println (repr (EncodedQueryString.ofByteArray? "hello+world".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedQueryString.ofByteArray? "%".toUTF8))
/--
info: some "hello world"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedQueryString.decode =<< (EncodedQueryString.ofByteArray? "hello+world".toUTF8))
/--
info: some " "
-/
#guard_msgs in
#eval IO.println (repr <| EncodedQueryString.decode =<< (EncodedQueryString.ofByteArray? "%20".toUTF8))
-- ============================================================================
-- Request Target Parsing - Basic Tests
-- ============================================================================
#eval parseCheck "///path/with/encoded%20space"
#eval parseCheck "/path/with/encoded%20space"
#eval parseCheck "/path/with/encoded%20space/"
#eval parseCheck "*"
#eval parseCheck "/api/search?q=hello%20world&category=tech%2Bgames"
#eval parseCheck "/"
#eval parseCheck "/api/v1/users/123/posts/456/comments/789"
#eval parseCheck "/files/../etc/passwd"
#eval parseCheck "example.com:8080"
#eval parseCheck "https://example.com:8080/ata"
#eval parseCheck "https://example.com:8080////./ata"
#eval parseCheck "192.168.1.1:3000"
#eval parseCheck "[::1]:8080"
#eval parseCheck "http://example.com/path/to/resource?query=value"
#eval parseCheck "https://api.example.com:443/v1/users?limit=10"
#eval parseCheck "http://[2001:db8::1]:8080/path"
#eval parseCheck "https://xn--nxasmq6b.xn--o3cw4h/path"
#eval parseCheck "localhost:65535"
#eval parseCheck "http:80"
#eval parseCheck "https://user:pass@secure.example.com/private"
#eval parseCheck "/double//slash//path"
#eval parseCheck "http://user%40example:pass%3Aword@host.com"
#eval parseCheck "http://[::ffff:192.168.1.1]/path"
#eval parseCheck "http://example.com:/"
#eval parseCheck "http://example.com:/?q=1"
#eval parseCheck "///////"
-- `&` in a key must be percent-encoded so toRawString round-trips correctly.
#guard
let query := URI.Query.empty.insert "a&b" "1"
query.toRawString == "a%26b=1"
-- `=` in a key must be percent-encoded so re-parsing preserves the key.
#guard
let query := URI.Query.empty.insert "a=b" "1"
query.toRawString == "a%3Db=1"
-- `&` in a value must be percent-encoded.
#guard
let query := URI.Query.empty.insert "key" "a&b"
query.toRawString == "key=a%26b"
-- `=` in a value is technically safe (parser uses first `=`), but encoding it
-- is still correct and keeps representation unambiguous.
#guard
let query := URI.Query.empty.insert "key" "a=b"
query.toRawString == "key=a%3Db"
-- Round-trip: insert → toRawString → re-parse should preserve the parameter.
#guard
let original := URI.Query.empty.insert "a&b" "c=d"
let raw := original.toRawString
-- Parse via a synthetic origin-form request target
match (URI.Parser.parseRequestTarget <* Std.Internal.Parsec.eof).run
s!"/path?{raw}".toUTF8 with
| .ok result =>
(result.query.get "a&b" == some "c=d")
| .error _ => false
#guard
match (parseRequestTarget <* Std.Internal.Parsec.eof).run "http:80".toUTF8 with
| .ok (.authorityForm _) => true
| _ => false
-- Parse failure tests
#eval parseCheckFail "/path with space"
#eval parseCheckFail "/path/%"
#eval parseCheckFail "/path/%2"
#eval parseCheckFail "/path/%ZZ"
#eval parseCheckFail ""
#eval parseCheckFail "[::1"
#eval parseCheckFail "[:::1]:80"
#eval parseCheckFail "http://exa_mple.com/path"
#eval parseCheckFail "http://[fe80::1%25eth0]/path"
#eval parseCheckFail "#frag"
#eval parseCheckFail "/path/\n"
#eval parseCheckFail "/path/\u0000"
#eval parseCheckFail "/page#section"
#eval parseCheckFail "/api/v1/users/[id]:action"
-- maxPathSegments should apply to trailing empty segments as well.
#guard
match (parseURI { maxPathSegments := 1 } <* Std.Internal.Parsec.eof).run
"http://example.com/a/".toUTF8 with
| .error _ => true
| .ok _ => false
-- ============================================================================
-- Request Target Parsing - Detailed Output Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.originForm
{ path := { segments := #["path", "with", "encoded%20space"], absolute := true }, query := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path/with/encoded%20space"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm
{ path := { segments := #["", "", "path", "with", "encoded%20space"], absolute := true }, query := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "///path/with/encoded%20space"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.asteriskForm
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "*"
IO.println (repr result)
/--
info: #[("q", some "hello%20world"), ("category", some "tech%2Bgames")]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api/search?q=hello%20world&category=tech%2Bgames"
IO.println (repr result.query)
/--
info: Std.Http.RequestTarget.originForm { path := { segments := #[], absolute := true }, query := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "example.com", port := Std.Http.URI.Port.value 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "example.com:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.ipv4 192.168.1.1, port := Std.Http.URI.Port.value 3000 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "192.168.1.1:3000"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.ipv6 ::1, port := Std.Http.URI.Port.value 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "[::1]:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "https",
authority := some { userInfo := none,
host := Std.Http.URI.Host.name "example.com",
port := Std.Http.URI.Port.value 8080 },
path := { segments := #["ata"], absolute := true },
query := #[] }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://example.com:8080/ata"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none,
host := Std.Http.URI.Host.ipv6 2001:db8::1,
port := Std.Http.URI.Port.value 8080 },
path := { segments := #["path"], absolute := true },
query := #[] }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[2001:db8::1]:8080/path"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "https",
authority := some { userInfo := some { username := "user%20b", password := some "pass" },
host := Std.Http.URI.Host.name "secure.example.com",
port := Std.Http.URI.Port.omitted },
path := { segments := #["private"], absolute := true },
query := #[] }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user%20b:pass@secure.example.com/private"
IO.println (repr result)
-- ============================================================================
-- IPv6 Host Tests
-- ============================================================================
/--
info: Std.Http.URI.Host.ipv6 ::1
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "[::1]:8080"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
/--
info: Std.Http.URI.Host.ipv6 2001:db8::8a2e:370:7334
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[2001:db8::8a2e:370:7334]:8080/api"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
/--
info: Std.Http.URI.Host.ipv6 ::
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[::]/path"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
-- ============================================================================
-- UserInfo Tests
-- ============================================================================
/--
info: some { username := "user", password := some "pass" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user:pass@example.com/private"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "user%20only", password := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user%20only@example.com/path"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "", password := some "pass" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://:pass@example.com/path"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "user", password := some "p%40ss%3Aw0rd" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user:p%40ss%3Aw0rd@example.com/"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
-- ============================================================================
-- Path.normalize Tests (RFC 3986 Section 5.2.4)
-- ============================================================================
/--
info: /a/b
-/
#guard_msgs in
#eval IO.println <| toString (URI.parse! "http://example.com/a/./b").path.normalize
/--
info: /a
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/..").path.normalize
/--
info: /a/g
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c/./../../g").path.normalize
/--
info: /g
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/../../../g").path.normalize
/--
info: /a/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../c").path.normalize
/--
info: /a/
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c/../.././").path.normalize
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../../..").path.normalize
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/../../../").path.normalize
/--
info: /a/b/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/./a/./b/./c/.").path.normalize
/--
info: /c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/../b/../c").path.normalize
-- ============================================================================
-- Path.parent Tests
-- ============================================================================
/--
info: /a/b
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c").path.parent
/--
info: /a
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b").path.parent
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a").path.parent
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/").path.parent
-- ============================================================================
-- Path.join Tests
-- ============================================================================
/--
info: /a/b/c/d
-/
#guard_msgs in
#eval do
let p1 := (URI.parse! "http://example.com/a/b").path
let p2 : URI.Path := { segments := #[URI.EncodedString.encode "c", URI.EncodedString.encode "d"], absolute := false }
IO.println (p1.join p2)
/--
info: /x/y
-/
#guard_msgs in
#eval do
let p1 := (URI.parse! "http://example.com/a/b").path
let p2 : URI.Path := { segments := #[URI.EncodedString.encode "x", URI.EncodedString.encode "y"], absolute := true }
IO.println (p1.join p2)
-- ============================================================================
-- Path.isEmpty Tests
-- ============================================================================
#guard (URI.parse! "http://example.com").path.isEmpty = true
#guard (URI.parse! "http://example.com/").path.absolute = true
#guard (URI.parse! "http://example.com/a").path.isEmpty = false
#guard (URI.parse! "http://example.com/a").path.absolute = true
-- ============================================================================
-- URI Modification Helpers
-- ============================================================================
#guard ((URI.parse! "http://example.com").withScheme! "htTps" |>.scheme) == "https"
#guard ((URI.parse! "http://example.com").withScheme! "ftP" |>.scheme) == "ftp"
/--
info: http://example.com/#section1
-/
#guard_msgs in
#eval IO.println ((URI.parse! "http://example.com/").withFragment (some (toString (URI.EncodedString.encode "section1" : URI.EncodedFragment))))
/--
info: http://example.com/?key=value
-/
#guard_msgs in
#eval do
let uri := URI.parse! "http://example.com/"
let query := URI.Query.empty.insert "key" "value"
IO.println (uri.withQuery query)
/--
info: http://example.com/new/path
-/
#guard_msgs in
#eval do
let uri := URI.parse! "http://example.com/old/path"
let newPath : URI.Path := { segments := #[URI.EncodedString.encode "new", URI.EncodedString.encode "path"], absolute := true }
IO.println (uri.withPath newPath)
-- ============================================================================
-- URI.normalize Tests (RFC 3986 Section 6)
-- ============================================================================
#guard (URI.parse! "HTTP://example.com").normalize.scheme == "http"
#guard (URI.parse! "HtTpS://example.com").normalize.scheme == "https"
/--
info: http://example.com/
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://EXAMPLE.COM/").normalize
/--
info: http://example.com/
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTP://Example.COM/").normalize
/--
info: http://example.com/a/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../c").normalize
/--
info: http://example.com/a/g
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTP://EXAMPLE.COM/a/b/c/./../../g").normalize
/--
info: https://www.example.com/PATH
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTPS://WWW.EXAMPLE.COM/PATH").normalize
-- ============================================================================
-- Query Parameter Tests
-- ============================================================================
-- Query with duplicate keys
/--
info: 3
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?tag=a&tag=b&tag=c"
let all := result.query.findAll "tag"
IO.println all.size
/--
info: #[some "a", some "b", some "c"]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?tag=a&tag=b&tag=c"
let all := result.query.findAll "tag"
IO.println (repr all)
/--
info: some (some "a")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?key=a&key=b&key=c"
IO.println (repr (result.query.find? "key"))
-- Empty value vs no value
/--
info: some (some "")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key="
IO.println (repr (result.query.find? "key"))
/--
info: some none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key"
IO.println (repr (result.query.find? "key"))
/--
info: some (some "value")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key=value"
IO.println (repr (result.query.find? "key"))
-- Raw lookup APIs should not alias with pre-encoded key spellings.
#guard
match (parseRequestTarget <* Std.Internal.Parsec.eof).run "/api?%61=1&a=2".toUTF8 with
| .ok result =>
let encodedA? := EncodedQueryParam.fromString? "%61"
((result.query.find? "a" |>.bind id |>.bind EncodedQueryParam.decode) == some "2") &&
(result.query.find? "%61").isNone &&
result.query.contains "a" &&
!result.query.contains "%61" &&
(match encodedA? with
| some encodedA =>
((result.query.findEncoded? encodedA |>.bind id |>.bind EncodedQueryParam.decode) == some "1") &&
result.query.containsEncoded encodedA
| none => false)
| .error _ => false
#guard
match (parseRequestTarget <* Std.Internal.Parsec.eof).run "/api?%61=1&a=2".toUTF8 with
| .ok result =>
match EncodedQueryParam.fromString? "%61" with
| some encodedA =>
let erasedRaw := result.query.erase "a"
let erasedEncoded := result.query.eraseEncoded encodedA
!erasedRaw.contains "a" &&
erasedRaw.containsEncoded encodedA &&
!erasedEncoded.containsEncoded encodedA &&
erasedEncoded.contains "a"
| none => false
| .error _ => false
-- ============================================================================
-- Query Operations
-- ============================================================================
#guard (URI.Query.empty.insert "a" "1" |>.contains "a") = true
#guard (URI.Query.empty.contains "nonexistent") = false
/--
info: a=1&b=2
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "a" "1"
|>.insert "b" "2"
IO.println query.toRawString
/--
info: b=2
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "a" "1"
|>.insert "b" "2"
|>.erase "a"
IO.println query.toRawString
/--
info: key=new
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "key" "old"
|>.set "key" "new"
IO.println query.toRawString
-- ============================================================================
-- URI Builder Tests
-- ============================================================================
-- Domain names longer than 255 characters are rejected.
#guard
let label := String.ofList (List.replicate 63 'a')
let longDomain := s!"{label}.{label}.{label}.{label}."
(URI.DomainName.ofString? longDomain).isNone
#guard
let label := String.ofList (List.replicate 63 'a')
let longDomain := s!"{label}.{label}.{label}.{label}."
(URI.Builder.empty.setHost? longDomain).isNone
/--
info: https://example.com/api/users?page=1
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme! "https"
|>.setHost! "example.com"
|>.appendPathSegment "api"
|>.appendPathSegment "users"
|>.addQueryParam "page" "1"
|>.build
IO.println uri
/--
info: http://localhost:8080/
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme! "http"
|>.setHost! "localhost"
|>.setPort 8080
|>.build
IO.println uri
/--
info: https://user:pass@secure.example.com/private
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme! "https"
|>.setUserInfo "user" (some "pass")
|>.setHost! "secure.example.com"
|>.appendPathSegment "private"
|>.build
IO.println uri
-- ============================================================================
-- Encoded Path Segment Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.originForm
{ path := { segments := #["path%2Fwith%2Fslashes"], absolute := true }, query := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path%2Fwith%2Fslashes"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { path := { segments := #["file%20name.txt"], absolute := true }, query := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/file%20name.txt"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { path := { segments := #["caf%C3%A9"], absolute := true }, query := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/caf%C3%A9"
IO.println (repr result)
-- ============================================================================
-- Authority Form Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "proxy.example.com", port := Std.Http.URI.Port.value 3128 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "proxy.example.com:3128"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.ipv4 127.0.0.1, port := Std.Http.URI.Port.value 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "127.0.0.1:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "1example.com", port := Std.Http.URI.Port.value 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "1example.com:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none,
host := Std.Http.URI.Host.name "1example.com",
port := Std.Http.URI.Port.omitted },
path := { segments := #["path"], absolute := true },
query := #[] }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://1example.com/path"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none,
host := Std.Http.URI.Host.name "123abc.example.com",
port := Std.Http.URI.Port.omitted },
path := { segments := #["page"], absolute := true },
query := #[] }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://123abc.example.com/page"
IO.println (repr result)