Compare commits

...

353 Commits

Author SHA1 Message Date
Sofia Rodrigues
e006edd7a1 feat: make tests smaller, add functions 2026-01-21 19:41:01 -03:00
Sofia Rodrigues
017c99b581 feat: onError 2026-01-21 15:16:01 -03:00
Sofia Rodrigues
b2cdef129f fix: small fixes 2026-01-21 00:08:57 -03:00
Sofia Rodrigues
1caabef943 fix: make url behavior srtict 2026-01-19 23:07:29 -03:00
Sofia Rodrigues
0617cc2cac fix: test 2026-01-18 14:39:31 -03:00
Sofia Rodrigues
9686680591 fix: small comment changes 2026-01-18 14:05:23 -03:00
Sofia Rodrigues
e12db84dc0 fix: small issues 2026-01-18 13:51:09 -03:00
Sofia Rodrigues
185d2be818 fix: remove useless files 2026-01-18 13:43:08 -03:00
Sofia Rodrigues
21cdf34d6c fix: typos 2026-01-18 13:42:49 -03:00
Sofia Rodrigues
d92122c8c4 fix: URI parsing was wrong for authorityForm
localhost:8080 now is parsed as a scheme and a path, to work with the URI RFC and HTTP1.1 RFC
2026-01-18 13:36:52 -03:00
Sofia Rodrigues
14129a736f fix: docs 2026-01-16 18:25:58 -03:00
Sofia Rodrigues
a1c1995076 fix: comments 2026-01-16 12:11:33 -03:00
Sofia Rodrigues
00f41fb152 feat: headers 2026-01-14 15:37:23 -03:00
Sofia Rodrigues
1458a61f7f feat: chunked as last encoding 2026-01-12 16:36:18 -03:00
Sofia Rodrigues
8cb626c8db Merge branch 'sofia/async-http' of https://github.com/leanprover/lean4 into sofia/async-http 2026-01-12 15:22:08 -03:00
Sofia Rodrigues
8fc12a44eb feat: headers normal form 2026-01-12 15:20:51 -03:00
Sofia Rodrigues
abae28d28b feat: more uri helpers and theorems 2026-01-09 20:02:07 -03:00
Sofia Rodrigues
aafc5f5f1f feat: uri decidable 2026-01-09 20:02:07 -03:00
Sofia Rodrigues
c94865d221 feat: request smuggling check 2026-01-09 20:02:07 -03:00
Sofia Rodrigues
71d7c96e82 fix: test with wrong content-length order 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
64c58c5b2b feat: add limit to bodycollect 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
4f0fa598c2 feat: add property in headers 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
1e08ec5e8d feat: add more tests for uri, specialize the userinfo type 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
522f08d212 feat: change parsing of uri 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
7f3178941c fix: recv selector of bytestream 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
dcb58be1b7 feat: HasAll in headers 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
d2706fd156 feat: keep alive timeout 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
02c736eb4d reverse: files 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
f2d280160f fix: server timeout 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
4af4420c64 feat: change build functions 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
81f3a88511 reverse: signal 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
865c3953a4 remove: useless files 2026-01-09 20:02:06 -03:00
Sofia Rodrigues
26d5bc7a74 feat: future 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
c5577d6d3b fix: tests 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
9b59503854 fix: tests 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
5cd8b6fce4 feat: doc 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
762c328ec3 fix: connection fork 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
aa693c18fa feat: more tests 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
0a71777aee test: add test 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
79343b87c0 fix: context 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
29f1d178ab fix: cancellationctx 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
397d67a0b4 fix: keep alive behavior 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
0f7390582d feat: cancellation backwards 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
1fd8d038c6 fix: comments 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
e7efa58e6e fix: docs 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
aa4f133c74 feat: add more tests 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
a0a0f45f38 feat: contextual fixes 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
13c6c4994c fix: context 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
59f4c09c21 fix: ext chunk 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
146419bd63 fix: gramatic issues 2026-01-09 20:02:05 -03:00
Sofia Rodrigues
36bcbb093a fix: grammatical issues 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
3a26a540ca fix: parser 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
39c035cad7 fix: trailer 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
f87c952296 feat: parse chunk 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
21ac5a6c80 fix: trailers parsing 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
a726a11ed6 fix: string changes 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
a2b322387b fix: multiple small problems 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
92fe0967b0 fix: weird behaviro when message as not sent :) 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
3d39609878 fix: test 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
83c51ead75 fix: details of connection: close 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
fc231fc42d fix: remove client related 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
25d950b50b fix: tests 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
98602cd8ce fix: bug fixes and improvement in performance 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
c1007723a7 fix: test 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
cfd5ca67aa feat: header changes 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
b974d4ca4d feat: small improvements in tests and client 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
4ef5e0f8e5 fix: import structure 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
34f848225c fix: test 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
5ec62e008c fix: copyright header 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
3f9cfbce83 feat: limits and tests 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
8992b70fb9 feat: header detection and duplication 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
f1f2d98fc7 fix: loop 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
04c5d3ae47 fix: remove small useless things 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
a1f8a5fddf feat: remove useless variable 2026-01-09 20:02:04 -03:00
Sofia Rodrigues
83cfe853fe fix: small issues with chunks and comments 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
97c8a3bf1a fix: protocol 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
818f915362 fix: headers 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
b7a3496999 refactor: part of the connection API 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
a542f73cdf feat: big refactor 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
e7247e1312 fix: remove log 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
470b49f573 feat: improve chunk extensions and handling of shutdown 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
d6ee4c539f fix: comment 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
0ff829a10b fix: remove macro 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
f1a4f3ee60 fix: copyright header 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
a0bcb4f1c7 fix: behavior of the client 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
65d5ef42c1 feat: closePeerConnection 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
2673fe3575 feat: small changes 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
9036480e07 fix: selectors 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
ee126cb28b fix: remove orphaned modules 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
f62fb4ff38 fix: http 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
33bd907d4f fix: update to master 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
6537fdc559 feat: small changes 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
6d209bcfa6 fix: style changes 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
7a35339254 fix: comments 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
a27778b261 fix: comments 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
3590ac0a8a fix: comment and bytebuffer 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
a813e216f9 feat: small changes 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
aab4877222 feat: add header value validation 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
b2adbc96ff fix: bytestream 2026-01-09 20:02:03 -03:00
Sofia Rodrigues
1930fd26ff feat: tests and small changes 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
08daa78cc3 fix: funnel imports 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
99e8939d9b fix: remove orphaned module 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
62e76bc384 feat: http 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
1a8aeca374 fix: async 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
9c983fa7ca fix: streammap 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
0274659e18 feat: cancellation 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
7c2f56f66c fix: simplify 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
e1ebd0e5a5 fix: async 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
271ff7ae0a fix: small comments 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
e146c0e61f fix: async 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
de669dbcb7 fix: streammap 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
9363e42c21 fix: async stream 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
7d81615fc5 fix: channel 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
f074c51af9 fix: stream map 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
bec683ca35 feat: basics stream map 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
69681b56a5 fix: channel 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
ef9df58907 fix: wrong function 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
7ba79053b3 fix: small fixes 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
3a253893d6 feat: async traits 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
4222c6cebd fix: stream can only return one type 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
449112b3da fix: remove useless function 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
8f22819d68 feat: remove outparams 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
b430c5456e feat: async type classes 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
801548be14 feat: http client 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
6cb476d464 refactor: http 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
d850276573 feat: rename 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
1c18ca11b5 fix: test 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
9bc9b49261 fix: timeout 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
fba7b5ad8f fix: update to master 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
75dbf7df97 feat: small changes 2026-01-09 20:02:02 -03:00
Sofia Rodrigues
c7595e1aba fix: small changes 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
965be635a2 feat: improve comment of serveConnection 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
1bc51eb0dd fix: style changes 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
24091da04f fix: comments 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
559885014e fix: bytestream comments 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
94b5ee5836 fix: comments 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
d86fc27254 fix: comment and bytebuffer 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
c09a0f70ea fix: url parser 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
7705e951a6 feat: small changes 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
51b67327de fix: test and small comments 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
c512fa1b49 fix: merge 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
ea2305b174 feat: add header value validation 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
e623949488 fix: remove useless coe 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
aab7d16cf4 fix: bytestream 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
e9ed6a9204 feat: tests and small changes 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
cd63928177 fix: imports 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
6383021cec fix: funnel imports 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
514f1bb20d fix: remove orphaned module 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
7d8bf08fd9 fix: imports 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
7b5a9d662c fix: coe option 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
63365564b0 fix: copyright notice 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
1cf73d1e66 feat: http 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
546b79e481 fix: async 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
c876d64c55 fix: streammap 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
39606ece6e feat: cancellation 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
7df95d7e01 fix: simplify 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
089a9eb254 fix: async 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
1bd391b450 fix: small comments 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
ebb4d1f2c3 fix: async 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
ffc9b3cb14 fix: streammap 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
29bdadea39 fix: async stream 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
d6ff24fdf3 fix: channel 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
dd79fda420 fix: stream map 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
4ff1146b19 feat: basics stream map 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
55692a55ae fix: channel 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
acb0af7da9 fix: wrong function 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
068fdb4842 fix: small fixes 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
2dd2d6cfc5 feat: async traits 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
8f0e8ffd77 fix: stream can only return one type 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
fe99ae3a37 fix: remove useless function 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
5952dc115a feat: remove outparams 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
48ea7def07 feat: async type classes 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
1800a079b6 fix: function names 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
08c14ffafd fix: notes and concurrently 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
2f03e145d4 feat: countAliveTokens and background 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
695cf3c9e9 fix: name and remove backgroudn 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
a934bc0acd test: async context test 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
46fd70864b feat: add selector.cancelled function 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
58a4d61e99 fix: comments 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
0f429d4ed2 feat: add contextual monad 2026-01-09 20:02:01 -03:00
Sofia Rodrigues
392e15075f fix: comment 2026-01-09 20:02:00 -03:00
Sofia Rodrigues
c6f80c2a11 fix: comments 2026-01-09 20:02:00 -03:00
Sofia Rodrigues
8cf20cdeb4 feat: context 2026-01-09 20:02:00 -03:00
Sofia Rodrigues
6313f22b1e feat: future 2026-01-09 20:01:19 -03:00
Sofia Rodrigues
d429561512 feat: more uri helpers and theorems 2026-01-09 19:58:14 -03:00
Sofia Rodrigues
00e569fe62 Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http 2025-12-29 13:21:09 -03:00
Sofia Rodrigues
59031e2908 feat: uri decidable 2025-12-26 19:58:47 -03:00
Sofia Rodrigues
207c7776a6 feat: request smuggling check 2025-12-26 17:20:54 -03:00
Sofia Rodrigues
c6b0245929 fix: test with wrong content-length order 2025-12-26 17:11:24 -03:00
Sofia Rodrigues
d2ec777a4e feat: add limit to bodycollect 2025-12-26 17:01:00 -03:00
Sofia Rodrigues
1fb0ab22c9 feat: add property in headers 2025-12-26 16:33:34 -03:00
Sofia Rodrigues
e1ece4838e feat: add more tests for uri, specialize the userinfo type 2025-12-26 14:48:36 -03:00
Sofia Rodrigues
e024d60260 feat: change parsing of uri 2025-12-24 17:51:50 -03:00
Sofia Rodrigues
ebd0056bfa fix: recv selector of bytestream 2025-12-23 21:40:30 -03:00
Sofia Rodrigues
998b7a98ad feat: HasAll in headers 2025-12-19 21:14:21 -03:00
Sofia Rodrigues
be1b96fa27 feat: keep alive timeout 2025-12-18 03:08:16 -03:00
Sofia Rodrigues
d4a378b50f reverse: files 2025-12-18 02:39:30 -03:00
Sofia Rodrigues
e8e5902f9a fix: server timeout 2025-12-18 02:39:04 -03:00
Sofia Rodrigues
eae30712cf feat: change build functions 2025-12-17 22:53:41 -03:00
Sofia Rodrigues
2eeb73bfae reverse: signal 2025-12-17 21:07:44 -03:00
Sofia Rodrigues
4493731335 remove: useless files 2025-12-17 21:00:54 -03:00
Sofia Rodrigues
5103fd944b feat: future 2025-12-17 20:59:20 -03:00
Sofia Rodrigues
c2ea05793f fix: tests 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
9756d5946f fix: tests 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
6a889eda3a feat: doc 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
9e9688d021 fix: connection fork 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
876547d404 feat: more tests 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
4f1795248f test: add test 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
3efb149d85 fix: context 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
b4bee04324 fix: cancellationctx 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
c5930d8284 fix: keep alive behavior 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
09287a574c feat: cancellation backwards 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
24a776e8a0 fix: comments 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
778289c5e6 fix: docs 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
597beb0a48 feat: add more tests 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
7f492f75a5 feat: contextual fixes 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
f26bf10cf1 fix: context 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
f135a4830a fix: ext chunk 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
31b8ed5157 fix: gramatic issues 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
b9f9518d15 fix: grammatical issues 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
2bb0503dc0 fix: parser 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
e11e1b9937 fix: trailer 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
7cca5e070b feat: parse chunk 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
2a3ec888ee fix: trailers parsing 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
a3a970e553 fix: string changes 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
e5bcdfe020 fix: multiple small problems 2025-12-17 20:57:20 -03:00
Sofia Rodrigues
12f33eebd4 fix: weird behaviro when message as not sent :) 2025-12-17 20:57:18 -03:00
Sofia Rodrigues
5fd23f0878 fix: test 2025-12-17 20:57:09 -03:00
Sofia Rodrigues
7afd19513b fix: details of connection: close 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
342e614f0f fix: remove client related 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
55645fa51b fix: tests 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
3aeb1cce3c fix: bug fixes and improvement in performance 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
c1683ec5bc fix: test 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
52e6863ad0 feat: header changes 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
bfc7cb1b27 feat: small improvements in tests and client 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
8d6baae17b fix: import structure 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
d169f2606d fix: test 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
25634a78f1 fix: copyright header 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
3e969253b3 feat: limits and tests 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
60f16b7532 feat: header detection and duplication 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
3b7381d326 fix: loop 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
b4f919468b fix: remove small useless things 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
0d90d45418 feat: remove useless variable 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
6326c7d3b3 fix: small issues with chunks and comments 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
2c08b50839 fix: protocol 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
595db1cf64 fix: headers 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
2f86695614 refactor: part of the connection API 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
7b269375f6 feat: big refactor 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
83b17191d2 fix: remove log 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
123c5209d1 feat: improve chunk extensions and handling of shutdown 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
ee98ed7535 fix: comment 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
ad3b3b6fd8 fix: remove macro 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
0577251413 fix: copyright header 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
2abcad5e6e fix: behavior of the client 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
78ec06d3cc feat: closePeerConnection 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
36c1416ac9 feat: small changes 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
9042768059 fix: selectors 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
183086a5bd fix: remove orphaned modules 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
4b68a44976 fix: http 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
2d29158568 fix: update to master 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
0a4823baab feat: small changes 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
f431c97297 fix: style changes 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
ce0a6a99a6 fix: comments 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
94d17e0ca3 fix: comments 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
f054f7d679 fix: comment and bytebuffer 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
dd701fd809 feat: small changes 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
5244656a14 feat: add header value validation 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
183ce44d55 fix: bytestream 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
9b58239b4b feat: tests and small changes 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
60eca9a897 fix: funnel imports 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
c89e865a41 fix: remove orphaned module 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
cbfa6ed78c feat: http 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
426b21b9ab fix: async 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
7ab3b6fed8 fix: streammap 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
15066ffb64 feat: cancellation 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
3b70b4e74a fix: simplify 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
2e48a50fb6 fix: async 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
78de8de671 fix: small comments 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
021a56f4a7 fix: async 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
4586e4acbf fix: streammap 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
b6b2fd7a87 fix: async stream 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
ef7f651f2d fix: channel 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
9bbe37b656 fix: stream map 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
26b208abe7 feat: basics stream map 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
97fb044377 fix: channel 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
8f302823b7 fix: wrong function 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
448988cdb4 fix: small fixes 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
9f1f701cef feat: async traits 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
a2c5207f9d fix: stream can only return one type 2025-12-17 20:57:08 -03:00
Sofia Rodrigues
b499386bc1 fix: remove useless function 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
4f6bfcca78 feat: remove outparams 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
ff27df47cb feat: async type classes 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
5b3ce9d804 feat: http client 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
e83ecd5e1b refactor: http 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
2b130c09ff feat: rename 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
a33134efa9 fix: test 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
6aefaeea6a fix: timeout 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
b7a209a10e fix: update to master 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
abe970846f feat: small changes 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
8dcfd41ea2 fix: small changes 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
5632fc881c feat: improve comment of serveConnection 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
a5d327fa44 fix: style changes 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
16c660ffe1 fix: comments 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
9a1417add3 fix: bytestream comments 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
97945791e6 fix: comments 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
93a14968ee fix: comment and bytebuffer 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
4cee3d3eaf fix: url parser 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
ca246c5923 feat: small changes 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
dfd0ed400e fix: test and small comments 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
850be6c8e8 fix: merge 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
8a586a8b8d feat: add header value validation 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
491be56c1f fix: remove useless coe 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
95152c8ca0 fix: bytestream 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
942c4ba3a0 feat: tests and small changes 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
537105018d fix: imports 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
4244af7ddc fix: funnel imports 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
ddb0e62764 fix: remove orphaned module 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
97b22ecf92 fix: imports 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
3b1cfca2d1 fix: coe option 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
1e0d86ebcc fix: copyright notice 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
7944ec0160 feat: http 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
278b46398e fix: async 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
7b052b02ab fix: streammap 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
96668994d4 feat: cancellation 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
bb74e1bccf fix: simplify 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
a7cd25f1ff fix: async 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
f88c758b0e fix: small comments 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
e2c331ab7e fix: async 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
1ed788ab9e fix: streammap 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
2ac72964e2 fix: async stream 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
969bd9242f fix: channel 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
19d88b8a92 fix: stream map 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
1729756bcc feat: basics stream map 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
c9a753b55d fix: channel 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
3ec6c66cec fix: wrong function 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
0936da3b78 fix: small fixes 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
31adf6837f feat: async traits 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
6efb2f8e59 fix: stream can only return one type 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
b98472e911 fix: remove useless function 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
3d4f30c232 feat: remove outparams 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
179d51f13d feat: async type classes 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
a59e0b48ba fix: function names 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
5e0ede4f8d fix: notes and concurrently 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
718b9741b7 feat: countAliveTokens and background 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
514e3ba0e8 fix: name and remove backgroudn 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
45d1fbc5ae test: async context test 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
dfa8bf7a81 feat: add selector.cancelled function 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
0495667e6f fix: comments 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
5a1aa2ea06 feat: add contextual monad 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
71abb2a3e8 fix: comment 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
fcfe7e98b9 fix: comments 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
a6e7210793 feat: context 2025-12-17 20:57:07 -03:00
Sofia Rodrigues
40f26ec349 feat: future 2025-12-17 19:28:01 -03:00
52 changed files with 11816 additions and 25 deletions

View File

@@ -57,4 +57,10 @@ theorem length_map {f : Char → Char} {s : String} : (s.map f).length = s.lengt
theorem map_eq_empty {f : Char Char} {s : String} : s.map f = "" s = "" := by
simp [ toList_eq_nil_iff]
@[simp]
theorem map_idempotent {s : String} (h : (c : Char) f (f c) = f c) : (s.map f |>.map f) = s.map f := by
apply String.ext
simp [String.toList_map, List.map_map]
exact fun c _ => h c
end String

View File

@@ -230,7 +230,7 @@ Examples:
* `"Orange".toLower = "orange"`
* `"ABc123".toLower = "abc123"`
-/
@[inline] def toLower (s : String) : String :=
@[inline, expose] def toLower (s : String) : String :=
s.map Char.toLower
/--

View File

@@ -9,6 +9,7 @@ prelude
public import Std.Internal.Async
public import Std.Internal.Parsec
public import Std.Internal.UV
public import Std.Internal.Http
@[expose] public section

View File

@@ -753,6 +753,10 @@ instance : MonadLift (EIO ε) (EAsync ε) where
instance : MonadLift BaseAsync (EAsync ε) where
monadLift x := .mk <| x.map (.ok)
instance : MonadAttach BaseAsync := .trivial
instance : MonadAttach (EAsync ε) := .trivial
@[inline]
protected partial def forIn
{β : Type} (init : β)

186
src/Std/Internal/Http.lean Normal file
View File

@@ -0,0 +1,186 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Server
public section
/-!
# HTTP Library
A low-level HTTP/1.1 server implementation for Lean. This library provides a pure,
sans-I/O protocol implementation that can be used with the `Async` library or with
custom connection handlers.
## Overview
This module provides a complete HTTP/1.1 server implementation with support for:
- Request/response handling with streaming bodies
- Keep-alive connections
- Chunked transfer encoding
- Header validation and management
- Configurable timeouts and limits
**Sans I/O Architecture**: The core protocol logic doesn't perform any actual I/O itself -
it just defines how data should be processed. This separation allows the protocol implementation
to remain pure and testable, while different transports (TCP sockets, mock clients) handle
the actual reading and writing of bytes.
## Quick Start
The main entry point is `HTTP.Server.serve`, which starts an HTTP/1.1 server:
```lean
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO.Async
open Std Http
def handler (req : Request Body) : ContextAsync (Response Body) := do
-- Return a simple text response
return Response.ok
|>.text "Hello, World!"
def main : IO Unit := do
let address := .v4 (.mk (.ofParts 127 0 0 1) 8080)
let server ← (Server.serve address handler (IO.eprintln ·)).block
server.waitShutdown.block
```
## Working with Requests
Incoming requests are represented by `Request Body`, which bundles together the
request line, parsed headers, and a lazily-consumed body. Headers are available
immediately, while the body can be streamed or collected on demand, allowing handlers
to efficiently process both small and large requests.
### Reading Headers
```lean
def handler (req : Request Body) : ContextAsync (Response Body) := do
-- Access request method and URI
let method := req.head.method -- Method.get, Method.post, etc.
let uri := req.head.uri -- RequestTarget
-- Read a specific header
if let some contentType := req.head.headers.get? (.new "content-type") then
IO.println s!"Content-Type: {contentType.value}"
return Response.ok |>.text "OK"
```
### Reading Request Body
The request body is exposed as a stream, which can be consumed incrementally or collected into memory.
Helper functions are provided to decode the body as UTF-8 text or raw bytes, with optional size limits
to protect against unbounded payloads.
```lean
def handler (req : Request Body) : ContextAsync (Response Body) := do
-- Collect entire body as string (with optional size limit)
let some bodyStr ← req.body.collectString (maxBytes := some 1024)
| return Response.badRequest |>.text "Invalid UTF-8 or body too large"
-- Or collect as raw bytes
let bodyBytes ← req.body.collectByteArray
return Response.ok |>.text s!"Received: {bodyStr}"
```
## Building Responses
Responses are constructed using an API that starts from a status code and adds headers and a body.
Common helpers exist for text, HTML, and binary responses, while still allowing full control over status
codes and header values.
```lean
-- Text response
Response.ok |>.text "Hello!"
-- HTML response
Response.ok |>.html "<h1>Hello!</h1>"
-- Binary response
Response.ok |>.binary someByteArray
-- Custom status
Response.withStatus .created |>.text "Resource created"
-- With custom headers
Response.ok
|>.header! "X-Custom-Header" "value"
|>.header! "Cache-Control" "no-cache"
|>.text "Response with headers"
```
### Streaming Responses
For large responses or server-sent events, use streaming:
```lean
def handler (req : Request Body) : ContextAsync (Response Body) := do
Response.ok
|>.header! "Content-Type" "text/plain"
|>.stream fun stream => do
for i in [0:10] do
stream.writeChunk { data := s!"chunk {i}\n".toUTF8 }
-- Optionally add delays for SSE-like behavior
stream.close
```
## Server Configuration
Configure server behavior with `Server.Config`:
```lean
def config : Std.Http.Config := {
keepAliveTimeout := ⟨30000, by decide⟩,
lingeringTimeout := 5000,
maximumRecvSize := 65536,
defaultPayloadBytes := 8192,
}
let server ← Server.serve address handler (IO.eprintln ·) config
```
## Architecture
### Request/Response Types
- `Request Body` - HTTP request with headers and body
- `Response Body` - HTTP response with status, headers, and body
- `Body` - Request/response body (empty, bytes, or stream)
- `Headers` - Collection of header name-value pairs
### Handler Signature
```lean
Request Body → ContextAsync (Response Body)
```
`ContextAsync` provides:
- Asynchronous I/O via the `Async` monad
- Cancellation context to monitor connection status
### Transport Layer
`Transport` is a type class abstracting the network layer. Implementations:
- `TCP.Socket.Client` - Standard TCP sockets for production
- `Mock.Client` - In-memory mock for testing
### Low-Level API
For custom connection handling, use `Server.serveConnection`:
```lean
-- Handle a single connection with custom transport
Server.serveConnection client handler config
```
-/

View File

@@ -0,0 +1,23 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Body
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Method
public import Std.Internal.Http.Data.Version
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.URI
public import Std.Internal.Http.Data.Status
/-!
# HTTP Data Types
This module re-exports all HTTP data types including request/response structures,
headers, methods, status codes, URIs, and body handling.
-/

View File

@@ -0,0 +1,132 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Body.Length
public import Std.Internal.Http.Data.Body.ByteStream
public section
/-!
# Body
This module defines the `Body` type, which represents the body of an HTTP request or response.
-/
namespace Std.Http
set_option linter.all true
open Std Internal IO Async
/--
Type that represents the body of a request or response with streams of byte arrays or byte arrays of fixed
size.
-/
inductive Body where
/--
Empty body with no content
-/
| empty
/--
Body containing raw byte data stored in memory
-/
| bytes (data : ByteArray)
/--
Body containing streaming data from a byte stream channel
-/
| stream (channel : Body.ByteStream)
deriving Inhabited
namespace Body
/--
Get content length of a body (if known).
-/
def getContentLength (body : Body) : Async Length :=
match body with
| empty => pure <| .fixed 0
| .bytes data => pure <| .fixed data.size
| .stream s => (Option.getD · .chunked) <$> s.getKnownSize
/--
Close the body and release any associated resources. For streaming bodies, this closes the underlying
channel. For other body types, this is a no-op.
-/
def close (body : Body) : Async Unit :=
match body with
| .stream channel => channel.close
| _ => pure ()
instance : Coe String Body where
coe s := .bytes (String.toUTF8 s)
instance : Coe ByteArray Body where
coe := .bytes
instance : Coe Body.ByteStream Body where
coe := .stream
instance : Coe Unit Body where
coe _ := Body.empty
instance : EmptyCollection Body where
emptyCollection := Body.empty
instance : ForIn Async Body Chunk where
forIn body acc step :=
match body with
| .empty => pure acc
| .bytes data => return ( step (Chunk.mk data #[]) acc).value
| .stream stream' => ByteStream.forIn stream' acc step
instance : ForIn ContextAsync Body Chunk where
forIn body acc step :=
match body with
| .empty => pure acc
| .bytes data => return ( step (Chunk.mk data #[]) acc).value
| .stream stream' => ByteStream.forIn' stream' acc step
/--
Collect all data from the body into a single `ByteArray`. This reads the entire body content into memory
and consumes significant memory for large bodies. If `maxBytes` is provided, throws an error if the body
exceeds that limit.
-/
def collectByteArray (body : Body) (maxBytes : Option Nat := none) : Async ByteArray := do
if let some maxBytes := maxBytes then
if let .fixed size body.getContentLength then
if size > maxBytes then
throw <| IO.userError s!"body exceeds limit ({maxBytes} bytes)"
let mut result := ByteArray.empty
let mut size := 0
for x in body do
let chunk := x.data
let newSize := size + chunk.size
if let some maxBytes := maxBytes then
if newSize > maxBytes then
throw <| IO.userError s!"body exceeds limit ({maxBytes} bytes)"
result := result ++ chunk
size := newSize
return result
/--
Collect all data from the body into a single `String`. This reads the entire body content into memory
and consumes significant memory for large bodies. If `maxBytes` is provided, throws an error if the body
exceeds that limit. Returns `some` if the data is valid UTF-8, otherwise `none`.
-/
def collectString (body : Body) (maxBytes : Option Nat := none) : Async (Option String) := do
let mut res collectByteArray body maxBytes
return String.fromUTF8? res

View File

@@ -0,0 +1,390 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Sync
public import Std.Internal.Async
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Body.Length
public import Init.Data.Queue
public section
/-!
# ByteStream
A `ByteStream` represents an asynchronous channel for streaming byte data in chunks. It provides an
interface for producers and consumers to exchange byte arrays with optional chunk metadata (extensions),
making it suitable for HTTP chunked transfer encoding and other streaming scenarios.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
namespace ByteStream
open Internal.IO.Async in
private inductive Consumer where
| normal (promise : IO.Promise (Option Chunk))
| select (finished : Waiter (Option Chunk))
private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := do
match c with
| .normal promise =>
promise.resolve x
return true
| .select waiter =>
let lose := return false
let win promise := do
promise.resolve (.ok x)
return true
waiter.race lose win
private structure Producer where
chunk : Chunk
promise : IO.Promise Bool
private structure State where
/--
Chunks pushed into the stream that are waiting to be consumed.
-/
values : Std.Queue Chunk
/--
Current amount of chunks buffered in the stream.
-/
amount : Nat
/--
Maximum capacity of chunks allowed in the buffer. Writers block when amount ≥ capacity.
-/
capacity : Nat
/--
Consumers that are blocked on a producer providing them a chunk. They will be resolved to `none`
if the stream closes.
-/
consumers : Std.Queue Consumer
/--
Producers that are blocked waiting for buffer space to become available.
-/
producers : Std.Queue Producer
/--
Whether the stream is closed already.
-/
closed : Bool
/--
Known size of the stream if available.
-/
knownSize : Option Body.Length
deriving Nonempty
end ByteStream
/--
A channel for byte arrays with support for chunk extensions.
-/
structure ByteStream where
private mk ::
private state : Mutex ByteStream.State
deriving Nonempty
namespace ByteStream
/--
Creates a new ByteStream with a specified capacity.
-/
def emptyWithCapacity (capacity : Nat := 128) : Async ByteStream := do
return {
state := Mutex.new {
values :=
consumers :=
producers :=
amount := 0
capacity
closed := false
knownSize := none
}
}
/--
Creates a new ByteStream with default capacity.
-/
@[always_inline, inline]
def empty : Async ByteStream :=
emptyWithCapacity
private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length :=
match knownSize with
| some (.fixed res) => some (Body.Length.fixed (res - chunk.size))
| _ => knownSize
private def tryWakeProducer [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m Unit := do
let st get
-- Try to wake a producer if we have space
if st.amount < st.capacity then
if let some (producer, producers) := st.producers.dequeue? then
let chunk := producer.chunk
if st.amount + 1 <= st.capacity then
-- We have space for this chunk
set { st with
values := st.values.enqueue chunk,
amount := st.amount + 1,
producers
}
producer.promise.resolve true
else
-- Still not enough space, put it back
set { st with producers := producers.enqueue producer }
private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m (Option Chunk) := do
let st get
if let some (chunk, values) := st.values.dequeue? then
let newKnownSize := decreaseKnownSize st.knownSize chunk
let newAmount := st.amount - 1
set { st with values, knownSize := newKnownSize, amount := newAmount }
-- Try to wake a blocked producer now that we have space
tryWakeProducer
return some chunk
else
return none
/--
Attempts to receive a chunk from the stream. Returns `some` with a chunk when data is available, or `none`
when the stream is closed or no data is available.
-/
def tryRecv (stream : ByteStream) : Async (Option Chunk) :=
stream.state.atomically do
tryRecv'
private def recv' (stream : ByteStream) : BaseIO (Task (Option Chunk)) := do
stream.state.atomically do
if let some chunk tryRecv' then
return .pure <| some chunk
else if ( get).closed then
return .pure none
else
let promise IO.Promise.new
modify fun st => { st with consumers := st.consumers.enqueue (.normal promise) }
return promise.result?.map (sync := true) (·.bind id)
/--
Receives (reads) a chunk from the stream. Returns `none` if the stream is closed and no data is available.
-/
def recv (stream : ByteStream) : Async (Option Chunk) := do
Async.ofTask ( recv' stream)
/--
Receives a chunk and returns only its data, discarding extensions. Returns `none` if the stream is
closed and no data is available.
-/
@[always_inline, inline]
def recvBytes (stream : ByteStream) : Async (Option ByteArray) := do
let chunk? stream.recv
return chunk?.map (·.data)
private def trySend' (chunk : Chunk) : AtomicT State BaseIO Bool := do
while true do
let st get
if let some (consumer, consumers) := st.consumers.dequeue? then
let newKnownSize := decreaseKnownSize st.knownSize chunk
let success consumer.resolve (some chunk)
set { st with consumers, knownSize := newKnownSize }
if success then
break
else
if st.amount + 1 <= st.capacity then
set { st with
values := st.values.enqueue chunk,
amount := st.amount + 1
}
return true
else
return false
return true
private def trySend (stream : ByteStream) (chunk : Chunk) : BaseIO Bool := do
stream.state.atomically do
if ( get).closed then
return false
else
trySend' chunk
private partial def send' (stream : ByteStream) (chunk : Chunk) : BaseIO (Task (Except IO.Error Unit)) := do
stream.state.atomically do
if ( get).closed then
return .pure <| .error (.userError "channel closed")
else if trySend' chunk then
return .pure <| .ok ()
else
let promise IO.Promise.new
let producer : Producer := { chunk, promise }
modify fun st => { st with producers := st.producers.enqueue producer }
BaseIO.bindTask promise.result? fun res => do
if res.getD false then
send' stream chunk
else
return .pure <| .error (.userError "channel closed")
/--
Writes data to the stream as a chunk with optional extensions.
-/
def write (stream : ByteStream) (data : ByteArray) (extensions : Array (String × Option String) := #[]) : Async Unit := do
if data.isEmpty then
return
let chunk := { data := data, extensions := extensions }
let res : AsyncTask _ send' stream chunk
await res
/--
Writes a complete chunk with extensions to the stream.
-/
def writeChunk (stream : ByteStream) (chunk : Chunk) : Async Unit := do
if ¬( trySend stream chunk) then
throw (IO.userError "trying to write to an already closed stream")
/--
Gets the known size of the stream if available. Returns `none` if the size is not known.
-/
@[always_inline, inline]
def getKnownSize (stream : ByteStream) : Async (Option Body.Length) := do
stream.state.atomically do
return ( get).knownSize
/--
Sets the known size of the stream. Use this when the total expected size is known ahead of time.
-/
@[always_inline, inline]
def setKnownSize (stream : ByteStream) (size : Option Body.Length) : Async Unit := do
stream.state.atomically do
modify fun st => { st with knownSize := size }
/--
Closes the stream, preventing further writes and causing pending/future
recv operations to return `none` when no data is available.
-/
def close (stream : ByteStream) : Async Unit := do
stream.state.atomically do
let st get
if st.closed then return ()
for consumer in st.consumers.toArray do
discard <| consumer.resolve none
set { st with consumers := , closed := true }
/--
Checks if the stream is closed.
-/
@[always_inline, inline]
def isClosed (stream : ByteStream) : Async Bool := do
stream.state.atomically do
return ( get).closed
@[inline]
private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT State m Bool := do
let st get
return !st.values.isEmpty || st.closed
open Internal.IO.Async in
/--
Creates a `Selector` that resolves once the `ByteStream` has data available and provides that data.
-/
def recvSelector (stream : ByteStream) : Selector (Option Chunk) where
tryFn := do
stream.state.atomically do
if recvReady' then
let val tryRecv'
return some val
else
return none
registerFn waiter := do
stream.state.atomically do
if recvReady' then
let lose := return ()
let win promise := do
promise.resolve (.ok ( tryRecv'))
waiter.race lose win
else
modify fun st => { st with consumers := st.consumers.enqueue (.select waiter) }
unregisterFn := do
stream.state.atomically do
let st get
let consumers st.consumers.filterM
fun
| .normal .. => return true
| .select waiter => return !( waiter.checkFinished)
set { st with consumers }
/--
Iterate over the body content in chunks, processing each chunk with the given step function.
-/
@[inline]
protected partial def forIn
{β : Type} (stream : ByteStream) (acc : β)
(step : Chunk β Async (ForInStep β)) : Async β := do
let rec @[specialize] loop (stream : ByteStream) (acc : β) : Async β := do
if let some chunk stream.recv then
match step chunk acc with
| .done res => return res
| .yield res => loop stream res
else
return acc
loop stream acc
/--
Iterate over the body content in chunks, processing each chunk with the given step function.
-/
@[inline]
protected partial def forIn'
{β : Type} (stream : ByteStream) (acc : β)
(step : Chunk β ContextAsync (ForInStep β)) : ContextAsync β := do
let rec @[specialize] loop (stream : ByteStream) (acc : β) : ContextAsync β := do
let data Selectable.one #[
.case (stream.recvSelector) pure,
.case ( ContextAsync.doneSelector) (fun _ => pure none),
]
if let some chunk := data then
match step chunk acc with
| .done res => return res
| .yield res => loop stream res
else
return acc
loop stream acc
instance : ForIn Async ByteStream Chunk where
forIn := Std.Http.Body.ByteStream.forIn
instance : ForIn ContextAsync ByteStream Chunk where
forIn := Std.Http.Body.ByteStream.forIn'
instance : IO.AsyncRead ByteStream (Option Chunk) where
read stream := stream.recv
instance : IO.AsyncWrite ByteStream ByteArray where
write stream data := do discard <| stream.write data
end ByteStream
end Std.Http.Body

View File

@@ -0,0 +1,49 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Repr
public section
/-!
# Length
This module defines the `Length` type, that represents the Content-Length or Transfer-Encoding
of an HTTP request or response.
-/
namespace Std.Http.Body
set_option linter.all true
/--
Size of the body of a response or request.
-/
inductive Length
/--
Indicates that the HTTP message body uses **chunked transfer encoding**.
-/
| chunked
/--
Indicates that the HTTP message body has a **fixed, known length**, as specified by the
`Content-Length` header.
-/
| fixed (n : Nat)
deriving Repr, BEq
namespace Length
/--
Checks if the `Length` is chunked.
-/
def isChunked : Length Bool
| .chunked => true
| _ => false
end Std.Http.Body.Length

View File

@@ -0,0 +1,73 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public import Std.Internal.Http.Internal
public section
/-!
# Chunk
This module defines the `Chunk` type, which represents a chunk of data from a request or response.
-/
namespace Std.Http
open Internal
set_option linter.all true
/--
Represents a chunk of data with optional extensions (key-value pairs).
-/
structure Chunk where
/--
The byte data contained in this chunk.
-/
data : ByteArray
/--
Optional metadata associated with this chunk as key-value pairs. Keys are strings, values are
optional strings.
-/
extensions : Array (String × Option String) := #[]
deriving Inhabited
namespace Chunk
/--
Creates a simple chunk without extensions.
-/
def ofByteArray (data : ByteArray) : Chunk :=
{ data := data, extensions := #[] }
/--
Adds an extension to a chunk.
-/
def withExtension (chunk : Chunk) (key : String) (value : String) : Chunk :=
{ chunk with extensions := chunk.extensions.push (key, some value) }
/--
Returns the total size of the chunk including data and formatted extensions. Extensions are formatted
as: ;name=value;name=value. Plus 2 bytes for \r\n at the end.
-/
def size (chunk : Chunk) : Nat :=
let extensionsSize := chunk.extensions.foldl (fun acc (name, value) => acc + name.length + (value.map (fun v => v.length + 1) |>.getD 0) + 1) 0
chunk.data.size + extensionsSize + (if extensionsSize > 0 then 2 else 0)
instance : Encode .v11 Chunk where
encode buffer chunk :=
let chunkLen := chunk.data.size
let exts := chunk.extensions.foldl (fun acc (name, value) => acc ++ ";" ++ name ++ (value.map (fun x => "=" ++ x) |>.getD "")) ""
let size := Nat.toDigits 16 chunkLen |>.toArray |>.map Char.toUInt8 |> ByteArray.mk
buffer.append #[size, exts.toUTF8, "\r\n".toUTF8, chunk.data, "\r\n".toUTF8]
end Chunk

View File

@@ -0,0 +1,10 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Header.Name
public import Std.Internal.Http.Data.URI

View File

@@ -0,0 +1,283 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public import Std.Internal.Http.Internal
public section
/-!
# Header Names and Values
This module defines the `Name` and `Value` types, which represent validated
HTTP header names and values that conform to HTTP standards.
-/
namespace Std.Http.Header
open Internal.String
set_option linter.all true
/--
Checks if a character is valid for use in an HTTP header value.
-/
@[expose]
def isValidHeaderChar (c : Char) : Bool :=
(0x21 c.val c.val 0x7E) c.val = 0x09 c.val = 0x20
/--
Proposition that asserts all characters in a string are valid for HTTP header values.
-/
@[expose]
abbrev isValidHeaderValue (s : String) : Prop :=
s.toList.all isValidHeaderChar
/--
A validated HTTP header value that ensures all characters conform to HTTP standards.
-/
structure Value where
/--
The string data
-/
value : String
/--
The proof that it's a valid header value
-/
validHeaderValue : isValidHeaderValue value
deriving BEq, DecidableEq, Repr
namespace Value
instance : Hashable Value := Hashable.hash Value.value
instance : Inhabited Value := "", by native_decide
/--
Creates a new `Value` from a string with an optional proof of validity.
If no proof is provided, it attempts to prove validity automatically.
-/
@[expose]
def new (s : String) (h : s.toList.all isValidHeaderChar := by decide) : Value :=
s, h
/--
Attempts to create a `Value` from a `String`, returning `none` if the string
contains invalid characters for HTTP header values.
-/
@[expose]
def ofString? (s : String) : Option Value :=
if h : s.toList.all isValidHeaderChar then
some s, h
else
none
/--
Creates a `Value` from a string, panicking with an error message if the
string contains invalid characters for HTTP header values.
-/
@[expose]
def ofString! (s : String) : Value :=
if h : s.toList.all isValidHeaderChar then
s, h
else
panic! s!"invalid header value: {s.quote}"
/--
Performs a case-insensitive comparison between a `Value` and a `String`.
Returns `true` if they match.
-/
@[expose]
def is (s : Value) (h : String) : Bool :=
s.value.toLower == h.toLower
instance : ToString Value where
toString v := v.value
end Value
/--
Checks if a character is valid for use in an HTTP header name.
-/
@[expose]
def isValidHeaderNameChar (c : Char) : Bool :=
let v := c.val
if v < 0x21 || v > 0x7E then
false
else
v != 0x22 && v != 0x28 && v != 0x29 && v != 0x2C && v != 0x3B
&& v != 0x5B && v != 0x5D && v != 0x7B && v != 0x7D
/--
Proposition that asserts all characters in a string are valid for HTTP header names.
-/
@[expose]
abbrev isValidHeaderName (s : String) : Prop :=
s.toList.all isValidHeaderNameChar !s.toList.isEmpty
/--
Proposition that a header name is in the internal normal form, meaning it has been
normalized by lowercasing.
-/
@[expose]
abbrev isNormalForm (s : String) : Prop :=
s = s.toLower
/--
A validated HTTP header name that ensures all characters conform to HTTP standards.
Header names are case-insensitive according to HTTP specifications.
-/
structure Name where
/--
The lowercased normalized header name string.
-/
value : String
/--
The proof that it's a valid header name
-/
validHeaderName : isValidHeaderName value
/--
The proof that we stored the header name in normal form
-/
normalForm : isNormalForm value
deriving Repr, DecidableEq, BEq
namespace Name
/--
Hash is based on lowercase version for case-insensitive comparison
-/
instance : Hashable Name where
hash x := Hashable.hash x.value
/--
Equality is case-insensitive
-/
instance : BEq Name where
beq x y := x.value == y.value
instance : Inhabited Name where default := "a", by decide, by decide, by native_decide
/--
Creates a new `Name` from a string with an optional proof of validity.
If no proof is provided, it attempts to prove validity automatically.
-/
@[expose]
def new (s : String) (h : isValidHeaderName s := by decide) (h₁ : isNormalForm s := by native_decide) : Name :=
s, h, h₁
/--
Attempts to create a `Name` from a `String`, returning `none` if the string
contains invalid characters for HTTP header names or is empty.
-/
@[expose]
def ofString? (s : String) : Option Name :=
let val := s.toLower
if h : isValidHeaderName val isNormalForm val then
some val, h.left, h.right
else
none
/--
Creates a `Name` from a string, panicking with an error message if the
string contains invalid characters for HTTP header names or is empty.
-/
@[expose]
def ofString! (s : String) : Name :=
let val := s.toLower
if h : isValidHeaderName val isNormalForm val then
val, h.left, h.right
else
panic! s!"invalid header name: {s.quote}"
/--
Converts the header name to canonical HTTP title case (e.g., "Content-Type").
-/
@[inline]
def toCanonical (name : Name) : String :=
let it := name.value.splitOn "-"
|>.map (·.capitalize)
String.intercalate "-" it
/--
Performs a case-insensitive comparison between a `Name` and a `String`.
Returns `true` if they match.
-/
@[expose]
def is (name : Name) (s : String) : Bool :=
name.value == s.toLower
instance : ToString Name where
toString name := name.toCanonical
/--
Standard Content-Type header name
-/
def contentType : Header.Name := .new "content-type"
/--
Standard Content-Length header name
-/
def contentLength : Header.Name := .new "content-length"
/--
Standard Host header name
-/
def host : Header.Name := .new "host"
/--
Standard Authorization header name
-/
def authorization : Header.Name := .new "authorization"
/--
Standard User-Agent header name
-/
def userAgent : Header.Name := .new "user-agent"
/--
Standard Accept header name
-/
def accept : Header.Name := .new "accept"
/--
Standard Connection header name
-/
def connection : Header.Name := .new "connection"
/--
Standard Transfer-Encoding header name
-/
def transferEncoding : Header.Name := .new "transfer-encoding"
/--
Standard Server header name
-/
def server : Header.Name := .new "server"
end Name
namespace Value
/--
Standard "close" header value for Connection header
-/
def close : Header.Value := .new "close"
/--
Standard "chunked" header value for Transfer-Encoding header
-/
def chunked : Header.Value := .new "chunked"
end Value
end Std.Http.Header

View File

@@ -0,0 +1,335 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Header.Basic
public import Std.Internal.Http.Internal
public section
/-!
# Headers
This module defines the `Headers` type, which represents an efficient collection of HTTP header
name-value pairs. The implementation is built on top of the generic `MultiMap` structure,
optimized for fast lookups and insertions while providing a convenient interface for managing
HTTP headers in both requests and responses.
-/
namespace Std.Http
open Std Internal
set_option linter.all true
/--
A structure for managing HTTP headers as key-value pairs.
Built on top of `MultiMap` for efficient multi-value header support.
-/
structure Headers where
/--
The underlying multimap that stores headers.
-/
map : MultiMap Header.Name Header.Value
deriving Inhabited, Repr
instance : Membership Header.Name Headers where
mem s h := h s.map
instance (name : Header.Name) (h : Headers) : Decidable (name h) :=
inferInstanceAs (Decidable (name h.map))
namespace Headers
/--
Proposition that a string corresponds to a valid header name present in the headers.
-/
abbrev In (s : String) (h : Headers) : Prop :=
match Header.Name.ofString? s with
| some name => name h
| none => False
instance {s : String} {h : Headers} : Decidable (In s h) := by
unfold In
cases headerName : Header.Name.ofString? s
all_goals exact inferInstance
/--
Retrieves the first `Header.Value` for the given key.
-/
@[inline]
def get (headers : Headers) (name : Header.Name) (h : name headers) : Header.Value :=
headers.map.get name h
/--
Retrieves all `Header.Value` entries for the given key.
-/
@[inline]
def getAll (headers : Headers) (name : Header.Name) (h : name headers) : Array Header.Value :=
headers.map.getAll name h
/--
Retrieves all `Header.Value` entries for the given key.
Returns `none` if the header is absent.
-/
@[inline]
def getAll? (headers : Headers) (name : Header.Name) : Option (Array Header.Value) :=
headers.map.getAll? name
/--
Retrieves the first `Header.Value` for the given key.
Returns `none` if the header is absent.
-/
@[inline]
def get? (headers : Headers) (name : Header.Name) : Option Header.Value :=
headers.map.get? name
/--
Checks if the entry is present in the `Headers`.
-/
@[inline]
def hasEntry (headers : Headers) (name : Header.Name) (value : Header.Value) : Bool :=
headers.map.data.get? name
|>.bind (fun x => x.val.find? (· == value))
|>.isSome
/--
Retrieves the last header value for the given key.
Returns `none` if the header is absent.
-/
@[inline]
def getLast? (headers : Headers) (name : Header.Name) : Option Header.Value :=
headers.map.getLast? name
/--
Like `get?`, but returns a default value if absent.
-/
@[inline]
def getD (headers : Headers) (name : Header.Name) (d : Header.Value) : Header.Value :=
headers.map.getD name d
/--
Like `get?`, but panics if absent.
-/
@[inline]
def get! (headers : Headers) (name : Header.Name) : Header.Value :=
headers.map.get! name
/--
Inserts a new key-value pair into the headers.
-/
@[inline]
def insert (headers : Headers) (key : Header.Name) (value : Header.Value) : Headers :=
{ map := headers.map.insert key value }
/--
Inserts a new key with an array of values.
-/
@[inline]
def insertMany (headers : Headers) (key : Header.Name) (value : Array Header.Value) (p : value.size > 0) : Headers :=
{ map := headers.map.insertMany key value p }
/--
Creates empty headers.
-/
def empty : Headers :=
{ map := }
/--
Creates headers from a list of key-value pairs.
-/
def ofList (pairs : List (Header.Name × Header.Value)) : Headers :=
{ map := MultiMap.ofList pairs }
/--
Checks if a header with the given name exists.
-/
@[inline]
def contains (headers : Headers) (name : Header.Name) : Bool :=
headers.map.contains name
/--
Removes a header with the given name.
-/
@[inline]
def erase (headers : Headers) (name : Header.Name) : Headers :=
{ map := headers.map.erase name }
/--
Gets the number of headers.
-/
@[inline]
def size (headers : Headers) : Nat :=
headers.map.size
/--
Checks if the headers are empty.
-/
@[inline]
def isEmpty (headers : Headers) : Bool :=
headers.map.isEmpty
/--
Merges two headers, with the second taking precedence for duplicate keys.
-/
def merge (headers1 headers2 : Headers) : Headers :=
{ map := headers1.map headers2.map }
/--
Converts the headers to a list of key-value pairs (flattened).
Each header with multiple values produces multiple pairs.
-/
def toList (headers : Headers) : List (Header.Name × Header.Value) :=
headers.map.toList
/--
Converts the headers to an array of key-value pairs (flattened).
Each header with multiple values produces multiple pairs.
-/
def toArray (headers : Headers) : Array (Header.Name × Header.Value) :=
headers.map.toArray
/--
Folds over all key-value pairs in the headers.
-/
def fold (headers : Headers) (init : α) (f : α Header.Name Header.Value α) : α :=
headers.map.toArray.foldl (fun acc (k, v) => f acc k v) init
/--
Maps a function over all header values, producing new headers.
-/
def mapValues (headers : Headers) (f : Header.Name Header.Value Header.Value) : Headers :=
let pairs := headers.map.toArray.map (fun (k, v) => (k, f k v))
{ map := pairs.foldl (fun acc (k, v) => acc.insert k v) MultiMap.empty }
/--
Filters and maps over header key-value pairs.
Returns only the pairs for which the function returns `some`.
-/
def filterMap (headers : Headers) (f : Header.Name Header.Value Option Header.Value) : Headers :=
let pairs := headers.map.toArray.filterMap (fun (k, v) =>
match f k v with
| some v' => some (k, v')
| none => none)
{ map := pairs.foldl (fun acc (k, v) => acc.insert k v) MultiMap.empty }
/--
Filters header key-value pairs, keeping only those that satisfy the predicate.
-/
def filter (headers : Headers) (f : Header.Name Header.Value Bool) : Headers :=
headers.filterMap (fun k v => if f k v then some v else none)
/--
Updates the first value of a header if it exists, or inserts if it doesn't.
Replaces all existing values for that header with the new value.
-/
def update (headers : Headers) (name : Header.Name) (f : Option Header.Value Header.Value) : Headers :=
let newValue := f (headers.get? name)
{ map := headers.map.erase name |>.insert name newValue }
instance : ToString Headers where
toString headers :=
let pairs := headers.map.toArray.map (fun (k, v) => s!"{k}: {v.value}")
String.intercalate "\r\n" pairs.toList
instance : Encode .v11 Headers where
encode buffer := buffer.writeString toString
instance : EmptyCollection Headers :=
Headers.empty
instance : Singleton (Header.Name × Header.Value) Headers :=
fun a, b => ( : Headers).insert a b
instance : Insert (Header.Name × Header.Value) Headers :=
fun a, b s => s.insert a b
instance : Union Headers :=
merge
/--
Proposition that all strings in a list are present in the headers.
-/
inductive HasAll : (h : Headers) (l : List String) Prop where
/--
The empty list is trivially present in any headers.
-/
| nil : HasAll h []
/--
If a string is in headers and the rest of the list satisfies HasAll,
then the whole list satisfies HasAll.
-/
| cons (member : In s h) (tail : HasAll h rest) : HasAll h (s :: rest)
namespace HasAll
theorem in_of_hasall (name : String) (inList : name list) (hasAll : HasAll headers list) : In name headers :=
match hasAll with
| .nil => by contradiction
| @HasAll.cons s _ _ member tail =>
if eq : s = name then
eq member
else
in_of_hasall name (List.mem_of_ne_of_mem (Ne.intro (fun x => eq x.symm)) inList) tail
theorem in_implies_valid (h : In name headers) : Header.isValidHeaderName name.toLower :=
if h₀ : Header.isValidHeaderName name.toLower then h₀ else by
unfold In Header.Name.ofString? at h
simp [h₀] at h
theorem mem_implies_valid (name : String) (inList : name list) (hasAll : HasAll headers list) : Header.isValidHeaderName name.toLower :=
in_implies_valid (in_of_hasall name inList hasAll)
theorem in_implies_mem (h : In nn headers) : p : (Header.isValidHeaderName nn.toLower Header.isNormalForm nn.toLower), Header.Name.mk nn.toLower p.left p.right headers := by
simp [In, Header.Name.ofString?] at h
if h2 : Header.isValidHeaderName nn.toLower Header.isNormalForm nn.toLower then
simp [eq_true h2] at h
exact h2, h
else
simp [eq_false h2] at h
theorem tail (hasAll : HasAll headers (h :: t)) : HasAll headers t := by
cases hasAll with
| cons _ tail => exact tail
theorem head : (hasAll : HasAll headers (h :: t)) In h headers
| cons member _ => member
/--
Decision procedure for `HasAll`.
-/
def decidable : Decidable (HasAll h l) :=
match l with
| [] => isTrue HasAll.nil
| head :: tail =>
if headMember : In head h then
match @decidable h tail with
| isTrue tailHasAll => Decidable.isTrue (HasAll.cons headMember tailHasAll)
| isFalse notTailHasAll => Decidable.isFalse fun hasAll => notTailHasAll hasAll.tail
else
Decidable.isFalse fun hasAll => headMember hasAll.head
/--
Gets the value of a header by name.
-/
def get (hasAll : HasAll headers l) (name : String) (h : (name l) := by get_elem_tactic) : Header.Value :=
let h2 := in_implies_mem (in_of_hasall name h hasAll)
headers.get (Header.Name.mk name.toLower h2.choose.left h2.choose.right) h2.choose_spec
/--
Gets all values of a header by name.
-/
def getAll (hasAll : HasAll headers l) (name : String) (h : (name l) := by get_elem_tactic) : Array Header.Value :=
let h2 := in_implies_mem (in_of_hasall name h hasAll)
headers.getAll (Header.Name.mk name.toLower h2.choose.left h2.choose.right) h2.choose_spec
instance : Decidable (HasAll h l) := decidable
end HasAll
end Headers
end Std.Http

View File

@@ -0,0 +1,120 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Repr
public import Std.Internal.Http.Internal
public section
/-!
# Method
This module provides the `Method` type, that represents HTTP request methods. It defines the
standard set of HTTP methods (e.g. `GET`, `POST`, `PUT`, `DELETE`).
-/
namespace Std.Http
open Internal
set_option linter.all true
/--
A method is a verb that describes the action to be performed.
* Reference: https://httpwg.org/specs/rfc9110.html#methods
-/
inductive Method where
/--
Retrieve a resource.
-/
| get
/--
Retrieve headers for a resource, without the body.
-/
| head
/--
Submit data to be processed (e.g., form submission).
-/
| post
/--
Replace a resource with new data.
-/
| put
/--
Remove a resource.
-/
| delete
/--
Establish a tunnel to a server (often for TLS).
-/
| connect
/--
Describe communication options for a resource.
-/
| options
/--
Perform a message loop-back test.
-/
| trace
/--
Apply partial modifications to a resource.
Source: https://www.rfc-editor.org/rfc/rfc5789.html
-/
| patch
deriving Repr, Inhabited, BEq, DecidableEq
namespace Method
/--
Converts a `String` into a `Method`.
-/
def ofString? : String Option Method
| "GET" => some .get
| "HEAD" => some .head
| "POST" => some .post
| "PUT" => some .put
| "DELETE" => some .delete
| "CONNECT" => some .connect
| "OPTIONS" => some .options
| "TRACE" => some .trace
| "PATCH" => some .patch
| _ => none
/--
Converts a `String` into a `Method`, panics if invalid.
-/
def ofString! (s : String) : Method :=
match ofString? s with
| some m => m
| none => panic! s!"invalid HTTP method: {s.quote}"
instance : ToString Method where
toString
| .get => "GET"
| .head => "HEAD"
| .post => "POST"
| .put => "PUT"
| .delete => "DELETE"
| .connect => "CONNECT"
| .options => "OPTIONS"
| .trace => "TRACE"
| .patch => "PATCH"
instance : Encode .v11 Method where
encode buffer := buffer.writeString toString
end Std.Http.Method

View File

@@ -0,0 +1,265 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Data.Body
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Method
public import Std.Internal.Http.Data.Version
public import Std.Internal.Http.Data.URI
public section
/-!
# Request
This module provides the `Request` type, which represents an HTTP request. It also defines ways
to build a `Request` using functions that make it easier.
-/
namespace Std.Http
set_option linter.all true
open Lean
/--
The main parts of a request containing the HTTP method, version, URI, and headers.
-/
structure Request.Head where
/--
The HTTP method (GET, POST, PUT, DELETE, etc.) for the request
-/
method : Method := .get
/--
The HTTP protocol version (HTTP/1.0, HTTP/1.1, HTTP/2.0, etc.)
-/
version : Version := .v11
/--
The request target/URI indicating the resource being requested
-/
uri : RequestTarget := .asteriskForm
/--
Collection of HTTP headers for the request (Content-Type, Authorization, etc.)
-/
headers : Headers := .empty
deriving Inhabited, Repr
/--
HTTP request structure parameterized by body type
-/
structure Request (t : Type) where
/--
The request headers and metadata
-/
head : Request.Head
/--
The request body content of type t
-/
body : t
deriving Inhabited
/--
Builds a HTTP Request
-/
structure Request.Builder where
/--
The head of the request
-/
head : Head := {}
namespace Request
instance : ToString Head where
toString req :=
toString req.method ++ " " ++
toString req.uri ++ " " ++
toString req.version ++
"\r\n" ++
toString req.headers ++ "\r\n\r\n"
/--
Creates a new HTTP Request builder with default head (method: GET, version: HTTP/1.1, asterisk URI,
empty headers)
-/
def new : Builder := { }
namespace Builder
/--
Creates a new HTTP Request builder with default head (method: GET, version: HTTP/1.1, asterisk URI,
empty headers)
-/
def empty : Builder := { }
/--
Sets the HTTP method for the request being built
-/
def method (builder : Builder) (method : Method) : Builder :=
{ builder with head := { builder.head with method := method } }
/--
Sets the HTTP version for the request being built
-/
def version (builder : Builder) (version : Version) : Builder :=
{ builder with head := { builder.head with version := version } }
/--
Sets the request target/URI for the request being built
-/
def uri (builder : Builder) (uri : RequestTarget) : Builder :=
{ builder with head := { builder.head with uri := uri } }
/--
Sets the request target/URI for the request being built
-/
@[inline]
def uri! (builder : Builder) (uri : String) : Builder :=
let uri := RequestTarget.parse! uri
{ builder with head := { builder.head with uri } }
/--
Adds a single header to the request being built
-/
def header (builder : Builder) (key : Header.Name) (value : Header.Value) : Builder :=
{ builder with head := { builder.head with headers := builder.head.headers.insert key value } }
/--
Adds a single header to the request being built, panics if the header is invalid
-/
def header! (builder : Builder) (key : String) (value : String) : Builder :=
let key := Header.Name.ofString! key
let value := Header.Value.ofString! value
{ builder with head := { builder.head with headers := builder.head.headers.insert key value } }
/--
Adds a header to the request being built only if the Option Header.Value is Some
-/
def headerOpt (builder : Builder) (key : Header.Name) (value : Option Header.Value) : Builder :=
match value with
| some v => builder.header key v
| none => builder
/--
Builds and returns the final HTTP Request with the specified body
-/
def body (builder : Builder) (body : t) : Request t :=
{ head := builder.head, body := body }
/--
Builds and returns the final HTTP Request without a body
-/
def build (builder : Builder) : Request Body :=
{ head := builder.head, body := .empty }
/--
Builds and returns the final HTTP Request with the specified body as binary data
-/
def binary (builder : Builder) (bytes : ByteArray) : Request Body :=
builder
|>.header (.new "content-type") (.new "application/octet-stream")
|>.body (Body.bytes bytes)
/--
Builds and returns the final HTTP Request with the specified body as plain text
-/
def text (builder : Builder) (body : String) : Request Body :=
builder
|>.header (.new "content-type") (.new "text/plain; charset=utf-8")
|>.body (body.toUTF8 |> Body.bytes)
/--
Builds and returns the final HTTP Request with the specified body as HTML
-/
def html (builder : Builder) (body : String) : Request Body :=
builder
|>.header (.new "content-type") (.new "text/html; charset=utf-8")
|>.body (body.toUTF8 |> Body.bytes)
end Builder
/--
Creates a new HTTP GET Request with the specified URI
-/
def get (uri : RequestTarget) : Builder :=
new
|>.method .get
|>.uri uri
/--
Creates a new HTTP POST Request builder with the specified URI.
-/
def post (uri : RequestTarget) : Builder :=
new
|>.method .post
|>.uri uri
/--
Creates a new HTTP PUT Request builder with the specified URI.
-/
def put (uri : RequestTarget) : Builder :=
new
|>.method .put
|>.uri uri
/--
Creates a new HTTP DELETE Request builder with the specified URI
-/
def delete (uri : RequestTarget) : Builder :=
new
|>.method .delete
|>.uri uri
/--
Creates a new HTTP PATCH Request builder with the specified URI
-/
def patch (uri : RequestTarget) : Builder :=
new
|>.method .patch
|>.uri uri
/--
Creates a new HTTP HEAD Request builder with the specified URI.
Named `head'` to avoid conflict with the `head` field accessor.
-/
def head' (uri : RequestTarget) : Builder :=
new
|>.method .head
|>.uri uri
/--
Creates a new HTTP OPTIONS Request builder with the specified URI.
Use `Request.options (RequestTarget.asteriskForm)` for server-wide OPTIONS.
-/
def options (uri : RequestTarget) : Builder :=
new
|>.method .options
|>.uri uri
/--
Creates a new HTTP CONNECT Request builder with the specified URI.
Typically used with `RequestTarget.authorityForm` for tunneling.
-/
def connect (uri : RequestTarget) : Builder :=
new
|>.method .connect
|>.uri uri
/--
Creates a new HTTP TRACE Request builder with the specified URI
-/
def trace (uri : RequestTarget) : Builder :=
new
|>.method .trace
|>.uri uri
end Std.Http.Request

View File

@@ -0,0 +1,261 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Http.Data.Body
public import Std.Internal.Http.Data.Status
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Version
public section
/-!
# Response
This module provides the `Response` type, which represents an HTTP response. It also defines
builder functions and convenience methods for constructing responses with various content types.
-/
namespace Std.Http
set_option linter.all true
open Std.Internal.IO.Async
open Internal Lean
/--
The main parts of a response.
-/
structure Response.Head where
/--
The HTTP status code and reason phrase, indicating the result of the request.
For example, `.ok` corresponds to `200 OK`.
-/
status : Status := .ok
/--
The HTTP protocol version used in the response, e.g. `HTTP/1.1`.
-/
version : Version := .v11
/--
The set of response headers, providing metadata such as `Content-Type`,
`Content-Length`, and caching directives.
-/
headers : Headers := .empty
deriving Inhabited, Repr
/--
HTTP response structure parameterized by body type
-/
structure Response (t : Type) where
/--
The information of the status-line of the response
-/
head : Response.Head := {}
/--
The content of the response.
-/
body : t
deriving Inhabited
/--
Builds a HTTP Response.
-/
structure Response.Builder where
/--
The information of the status-line of the response
-/
head : Head := {}
namespace Response
instance : ToString Head where
toString r :=
toString r.version ++ " " ++
toString r.status.toCode ++ " " ++
toString r.status ++ "\r\n" ++
toString r.headers ++ "\r\n\r\n"
/--
Creates a new HTTP Response builder with default head (status: 200 OK, version: HTTP/1.1, empty headers)
-/
def new : Builder := { }
namespace Builder
/--
Creates a new HTTP Response builder with default head (status: 200 OK, version: HTTP/1.1, empty headers)
-/
def empty : Builder := { }
/--
Sets the HTTP status code for the response being built
-/
def status (builder : Builder) (status : Status) : Builder :=
{ builder with head := { builder.head with status := status } }
/--
Sets the headers for the response being built
-/
def headers (builder : Builder) (headers : Headers) : Builder :=
{ builder with head := { builder.head with headers } }
/--
Adds a single header to the response being built
-/
def header (builder : Builder) (key : Header.Name) (value : Header.Value) : Builder :=
{ builder with head := { builder.head with headers := builder.head.headers.insert key value } }
/--
Adds a single header to the response being built
-/
def header! (builder : Builder) (key : String) (value : String) : Builder :=
let key := Header.Name.ofString! key
let value := Header.Value.ofString! value
{ builder with head := { builder.head with headers := builder.head.headers.insert key value } }
/--
Builds and returns the final HTTP Response with the specified body
-/
def body (builder : Builder) (body : t) : Response t :=
{ head := builder.head, body := body }
/--
Builds and returns the final HTTP Response with a stream builder
-/
def stream (builder : Builder) (body : Body.ByteStream ContextAsync Unit) : ContextAsync (Response Body) := do
let stream Body.ByteStream.empty
background (body stream)
return { head := builder.head, body := stream }
/--
Builds and returns the final HTTP Response.
-/
def build [EmptyCollection t] (builder : Builder) : Response t :=
{ head := builder.head, body := {} }
/--
Builds and returns the final HTTP Response with the specified body as binary data.
-/
def binary (builder : Builder) (bytes : ByteArray) : Response Body :=
builder
|>.header (.new "content-type") (.new "application/octet-stream")
|>.body (Body.bytes bytes)
/--
Builds and returns the final HTTP Response with the specified body as plain text.
-/
def text (builder : Builder) (body : String) : Response Body :=
builder
|>.header (.new "content-type") (.new "text/plain; charset=utf-8")
|>.body (body.toUTF8 |> Body.bytes)
/--
Builds and returns the final HTTP Response with the specified body as HTML.
-/
def html (builder : Builder) (body : String) : Response Body :=
builder
|>.header (.new "content-type") (.new "text/html; charset=utf-8")
|>.body (body.toUTF8 |> Body.bytes)
end Builder
/--
Creates a new HTTP Response builder with the 200 status code.
-/
def ok : Builder :=
.empty |>.status .ok
/--
Creates a new HTTP Response builder with the provided status.
-/
def withStatus (status : Status) : Builder :=
.empty |>.status status
/--
Creates a new HTTP Response builder with the 404 status code.
-/
def notFound : Builder :=
.empty |>.status .notFound
/--
Creates a new HTTP Response builder with the 500 status code.
-/
def internalServerError : Builder :=
.empty |>.status .internalServerError
/--
Creates a new HTTP Response builder with the 400 status code.
-/
def badRequest : Builder :=
.empty |>.status .badRequest
/--
Creates a new HTTP Response builder with the 201 status code.
-/
def created : Builder :=
.empty |>.status .created
/--
Creates a new HTTP Response builder with the 202 status code.
-/
def accepted : Builder :=
.empty |>.status .accepted
/--
Creates a new HTTP Response builder with the 401 status code.
-/
def unauthorized : Builder :=
.empty |>.status .unauthorized
/--
Creates a new HTTP Response builder with the 403 status code.
-/
def forbidden : Builder :=
.empty |>.status .forbidden
/--
Creates a new HTTP Response builder with the 409 status code.
-/
def conflict : Builder :=
.empty |>.status .conflict
/--
Creates a new HTTP Response builder with the 503 status code.
-/
def serviceUnavailable : Builder :=
.empty |>.status .serviceUnavailable
/--
Creates a redirect response with the 302 Found status code (temporary redirect).
-/
def redirect (location : String) : Builder :=
Builder.empty
|>.status .found
|>.header! "Location" location
/--
Creates a redirect response with the 301 Moved Permanently status code (permanent redirect).
-/
def redirectPermanent (location : String) : Builder :=
Builder.empty
|>.status .movedPermanently
|>.header! "Location" location
/--
Creates a redirect response with a configurable status code.
Use `permanent := true` for 301 Moved Permanently, `permanent := false` for 302 Found.
-/
def redirectWith (location : String) (permanent : Bool) : Builder :=
Builder.empty
|>.status (if permanent then .movedPermanently else .found)
|>.header! "Location" location
end Response

View File

@@ -0,0 +1,627 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Internal
public section
/-!
# Status
This module defines the `Status` type, which is a representation of HTTP status codes. Status codes are three-digit
integer codes that describe the result of an HTTP request. In this implementation we do not treat status
code as extensible.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
namespace Std.Http
set_option linter.all true
open Internal
/--
HTTP Status codes. Status codes are three-digit integer codes that describe the result of an
HTTP request. In this implementation we do not treat status code as extensible.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
inductive Status where
/--
100 Continue
-/
| «continue»
/--
101 Switching Protocols
-/
| switchingProtocols
/--
102 Processing
-/
| processing
/--
103 Early Hints
-/
| earlyHints
/--
200 OK
-/
| ok
/--
201 Created
-/
| created
/--
202 Accepted
-/
| accepted
/--
203 Non-Authoritative Information
-/
| nonAuthoritativeInformation
/--
204 No Content
-/
| noContent
/--
205 Reset Content
-/
| resetContent
/--
206 Partial Content
-/
| partialContent
/--
207 Multi-Status
-/
| multiStatus
/--
208 Already Reported
-/
| alreadyReported
/--
226 IM Used
-/
| imUsed
/--
300 Multiple Choices
-/
| multipleChoices
/--
301 Moved Permanently
-/
| movedPermanently
/--
302 Found
-/
| found
/--
303 See Other
-/
| seeOther
/--
304 Not Modified
-/
| notModified
/--
305 Use Proxy
-/
| useProxy
/--
306 Unused
-/
| unused
/--
307 Temporary Redirect
-/
| temporaryRedirect
/--
308 Permanent Redirect
-/
| permanentRedirect
/--
400 Bad Request
-/
| badRequest
/--
401 Unauthorized
-/
| unauthorized
/--
402 Payment Required
-/
| paymentRequired
/--
403 Forbidden
-/
| forbidden
/--
404 Not Found
-/
| notFound
/--
405 Method Not Allowed
-/
| methodNotAllowed
/--
406 Not Acceptable
-/
| notAcceptable
/--
407 Proxy Authentication Required
-/
| proxyAuthenticationRequired
/--
408 Request Timeout
-/
| requestTimeout
/--
409 Conflict
-/
| conflict
/--
410 Gone
-/
| gone
/--
411 Length Required
-/
| lengthRequired
/--
412 Precondition Failed
-/
| preconditionFailed
/--
413 Payload Too Large
-/
| payloadTooLarge
/--
414 URI Too Long
-/
| uriTooLong
/--
415 Unsupported Media Type
-/
| unsupportedMediaType
/--
416 Range Not Satisfiable
-/
| rangeNotSatisfiable
/--
417 Expectation Failed
-/
| expectationFailed
/--
418 I'm a teapot
-/
| imATeapot
/--
421 Misdirected Request
-/
| misdirectedRequest
/--
422 Unprocessable Entity
-/
| unprocessableEntity
/--
423 Locked
-/
| locked
/--
424 Failed Dependency
-/
| failedDependency
/--
425 Too Early
-/
| tooEarly
/--
426 Upgrade Required
-/
| upgradeRequired
/--
428 Precondition Required
-/
| preconditionRequired
/--
429 Too Many Requests
-/
| tooManyRequests
/--
431 Request Header Fields Too Large
-/
| requestHeaderFieldsTooLarge
/--
451 Unavailable For Legal Reasons
-/
| unavailableForLegalReasons
/--
500 Internal Server Error
-/
| internalServerError
/--
501 Not Implemented
-/
| notImplemented
/--
502 Bad Gateway
-/
| badGateway
/--
503 Service Unavailable
-/
| serviceUnavailable
/--
504 Gateway Timeout
-/
| gatewayTimeout
/--
505 HTTP Version Not Supported
-/
| httpVersionNotSupported
/--
506 Variant Also Negotiates
-/
| variantAlsoNegotiates
/--
507 Insufficient Storage
-/
| insufficientStorage
/--
508 Loop Detected
-/
| loopDetected
/--
510 Not Extended
-/
| notExtended
/--
511 Network Authentication Required
-/
| networkAuthenticationRequired
/--
Other
-/
| other (number : UInt16)
deriving Repr, Inhabited, BEq
instance : ToString Status where
toString
| .«continue» => "Continue"
| .switchingProtocols => "Switching Protocols"
| .processing => "Processing"
| .earlyHints => "Early Hints"
| .ok => "OK"
| .created => "Created"
| .accepted => "Accepted"
| .nonAuthoritativeInformation => "Non-Authoritative Information"
| .noContent => "No Content"
| .resetContent => "Reset Content"
| .partialContent => "Partial Content"
| .multiStatus => "Multi-Status"
| .alreadyReported => "Already Reported"
| .imUsed => "IM Used"
| .multipleChoices => "Multiple Choices"
| .movedPermanently => "Moved Permanently"
| .found => "Found"
| .seeOther => "See Other"
| .notModified => "Not Modified"
| .useProxy => "Use Proxy"
| .unused => "Unused"
| .temporaryRedirect => "Temporary Redirect"
| .permanentRedirect => "Permanent Redirect"
| .badRequest => "Bad Request"
| .unauthorized => "Unauthorized"
| .paymentRequired => "Payment Required"
| .forbidden => "Forbidden"
| .notFound => "Not Found"
| .methodNotAllowed => "Method Not Allowed"
| .notAcceptable => "Not Acceptable"
| .proxyAuthenticationRequired => "Proxy Authentication Required"
| .requestTimeout => "Request Timeout"
| .conflict => "Conflict"
| .gone => "Gone"
| .lengthRequired => "Length Required"
| .preconditionFailed => "Precondition Failed"
| .payloadTooLarge => "Payload Too Large"
| .uriTooLong => "URI Too Long"
| .unsupportedMediaType => "Unsupported Media Type"
| .rangeNotSatisfiable => "Range Not Satisfiable"
| .expectationFailed => "Expectation Failed"
| .imATeapot => "I'm a teapot"
| .misdirectedRequest => "Misdirected Request"
| .unprocessableEntity => "Unprocessable Entity"
| .locked => "Locked"
| .failedDependency => "Failed Dependency"
| .tooEarly => "Too Early"
| .upgradeRequired => "Upgrade Required"
| .preconditionRequired => "Precondition Required"
| .tooManyRequests => "Too Many Requests"
| .requestHeaderFieldsTooLarge => "Request Header Fields Too Large"
| .unavailableForLegalReasons => "Unavailable For Legal Reasons"
| .internalServerError => "Internal Server Error"
| .notImplemented => "Not Implemented"
| .badGateway => "Bad Gateway"
| .serviceUnavailable => "Service Unavailable"
| .gatewayTimeout => "Gateway Timeout"
| .httpVersionNotSupported => "HTTP Version Not Supported"
| .variantAlsoNegotiates => "Variant Also Negotiates"
| .insufficientStorage => "Insufficient Storage"
| .loopDetected => "Loop Detected"
| .notExtended => "Not Extended"
| .networkAuthenticationRequired => "Network Authentication Required"
| .other n => toString n
namespace Status
/--
Convert a Status to a numeric code. This is useful for sending the status code in a response.
-/
def toCode : Status UInt16
| «continue» => 100
| switchingProtocols => 101
| processing => 102
| earlyHints => 103
| ok => 200
| created => 201
| accepted => 202
| nonAuthoritativeInformation => 203
| noContent => 204
| resetContent => 205
| partialContent => 206
| multiStatus => 207
| alreadyReported => 208
| imUsed => 226
| multipleChoices => 300
| movedPermanently => 301
| found => 302
| seeOther => 303
| notModified => 304
| useProxy => 305
| unused => 306
| temporaryRedirect => 307
| permanentRedirect => 308
| badRequest => 400
| unauthorized => 401
| paymentRequired => 402
| forbidden => 403
| notFound => 404
| methodNotAllowed => 405
| notAcceptable => 406
| proxyAuthenticationRequired => 407
| requestTimeout => 408
| conflict => 409
| gone => 410
| lengthRequired => 411
| preconditionFailed => 412
| payloadTooLarge => 413
| uriTooLong => 414
| unsupportedMediaType => 415
| rangeNotSatisfiable => 416
| expectationFailed => 417
| imATeapot => 418
| misdirectedRequest => 421
| unprocessableEntity => 422
| locked => 423
| failedDependency => 424
| tooEarly => 425
| upgradeRequired => 426
| preconditionRequired => 428
| tooManyRequests => 429
| requestHeaderFieldsTooLarge => 431
| unavailableForLegalReasons => 451
| internalServerError => 500
| notImplemented => 501
| badGateway => 502
| serviceUnavailable => 503
| gatewayTimeout => 504
| httpVersionNotSupported => 505
| variantAlsoNegotiates => 506
| insufficientStorage => 507
| loopDetected => 508
| notExtended => 510
| networkAuthenticationRequired => 511
| other n => n
/--
Converts a `UInt16` to `Status`.
-/
def ofCode : UInt16 Status
| 100 => .«continue»
| 101 => .switchingProtocols
| 102 => .processing
| 103 => .earlyHints
| 200 => .ok
| 201 => .created
| 202 => .accepted
| 203 => .nonAuthoritativeInformation
| 204 => .noContent
| 205 => .resetContent
| 206 => .partialContent
| 207 => .multiStatus
| 208 => .alreadyReported
| 226 => .imUsed
| 300 => .multipleChoices
| 301 => .movedPermanently
| 302 => .found
| 303 => .seeOther
| 304 => .notModified
| 305 => .useProxy
| 306 => .unused
| 307 => .temporaryRedirect
| 308 => .permanentRedirect
| 400 => .badRequest
| 401 => .unauthorized
| 402 => .paymentRequired
| 403 => .forbidden
| 404 => .notFound
| 405 => .methodNotAllowed
| 406 => .notAcceptable
| 407 => .proxyAuthenticationRequired
| 408 => .requestTimeout
| 409 => .conflict
| 410 => .gone
| 411 => .lengthRequired
| 412 => .preconditionFailed
| 413 => .payloadTooLarge
| 414 => .uriTooLong
| 415 => .unsupportedMediaType
| 416 => .rangeNotSatisfiable
| 417 => .expectationFailed
| 418 => .imATeapot
| 421 => .misdirectedRequest
| 422 => .unprocessableEntity
| 423 => .locked
| 424 => .failedDependency
| 425 => .tooEarly
| 426 => .upgradeRequired
| 428 => .preconditionRequired
| 429 => .tooManyRequests
| 431 => .requestHeaderFieldsTooLarge
| 451 => .unavailableForLegalReasons
| 500 => .internalServerError
| 501 => .notImplemented
| 502 => .badGateway
| 503 => .serviceUnavailable
| 504 => .gatewayTimeout
| 505 => .httpVersionNotSupported
| 506 => .variantAlsoNegotiates
| 507 => .insufficientStorage
| 508 => .loopDetected
| 510 => .notExtended
| 511 => .networkAuthenticationRequired
| n => .other n
/--
Checks if the type of the status code is informational, meaning that the request was received
and the process is continuing.
-/
@[inline]
def isInformational (c : Status) : Bool :=
c.toCode < 200
/--
Checks if the type of the status code is success, meaning that the request was successfully received,
understood, and accepted.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isSuccess (c : Status) : Bool :=
200 c.toCode c.toCode < 300
/--
Checks if the type of the status code is redirection, meaning that further action needs to be taken
to complete the request.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isRedirection (c : Status) : Bool :=
300 c.toCode c.toCode < 400
/--
Checks if the type of the status code is a client error, meaning that the request contains bad syntax
or cannot be fulfilled.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isClientError (c : Status) : Bool :=
400 c.toCode c.toCode < 500
/--
Checks if the type of the status code is a server error, meaning that the server failed to fulfill
an apparently valid request.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isServerError (c : Status) : Bool :=
500 c.toCode c.toCode < 600
/--
Checks if the status code indicates an error (either client error 4xx or server error 5xx).
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isError (c : Status) : Bool :=
c.isClientError c.isServerError
instance : Encode .v11 Status where
encode buffer status := buffer
|>.writeString (toString <| toCode status)
|>.writeChar ' '
|>.writeString (toString status)
end Std.Http.Status

View File

@@ -0,0 +1,75 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.URI.Basic
public import Std.Internal.Http.Data.URI.Parser
public section
/-!
# URI
This module defines the `URI` and `RequestTarget` types that represent and manipulate components of
URIs as defined by RFC 3986. It provides parsing, rendering, and normalization utilities for working
with URIs and request targets in HTTP messages.
-/
namespace Std.Http.RequestTarget
set_option linter.all true
/--
Attempts to parse a `RequestTarget` from the given string.
-/
@[inline]
def parse? (string : String) : Option RequestTarget :=
(URI.Parser.parseRequestTarget <* Std.Internal.Parsec.eof).run string.toUTF8 |>.toOption
/--
Parses a `RequestTarget` from the given string. Panics if parsing fails. Use `parse?`
if you need a safe option-returning version.
-/
@[inline]
def parse! (string : String) : RequestTarget :=
match parse? string with
| some res => res
| none => panic! "invalid request target"
/--
Creates an origin-form request target from a path string.
The path should start with '/' (e.g., "/api/users" or "/search?q=test").
Panics if the string is not a valid origin-form request target.
-/
@[inline]
def originForm! (path : String) : RequestTarget :=
match parse? path with
| some (.originForm p q f) => .originForm p q f
| _ => panic! s!"invalid origin-form request target: {path}"
end RequestTarget
namespace URI
/--
Attempts to parse a `URI` from the given string.
-/
@[inline]
def parse? (string : String) : Option URI :=
(Parser.parseURI <* Std.Internal.Parsec.eof).run string.toUTF8 |>.toOption
/--
Parses a `URI` from the given string. Panics if parsing fails. Use `parse?` if you need a safe
option-returning version.
-/
@[inline]
def parse! (string : String) : URI :=
match parse? string with
| some res => res
| none => panic! "invalid URI"
end URI

View File

@@ -0,0 +1,724 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Net
public import Std.Internal.Http.Data.URI.Encoding
public import Std.Internal.Http.Internal
public section
/-!
# URI Structure
This module defines the complete URI structure following RFC 3986, including schemes, authorities,
paths, queries, fragments, and request targets.
All text components use the encoding types from `Std.Http.URI.Encoding` to ensure proper
percent-encoding is maintained throughout.
-/
namespace Std.Http
set_option linter.all true
open Internal
namespace URI
/--
URI scheme identifier (e.g., "http", "https", "ftp").
-/
abbrev Scheme := { s : String // String.IsLowerCase s }
instance : Inhabited Scheme where
default := "", .empty_isLowerCase
/--
User information component containing the username and optional password. Both fields store decoded
(unescaped) values.
-/
structure UserInfo where
/--
The username (decoded).
-/
username : String
/--
The optional password (decoded).
-/
password : Option String
deriving Inhabited, Repr
/--
Host component of a URI, supporting domain names and IP addresses.
-/
inductive Host
/--
A domain name (lowercase-normalized).
-/
| name (name : String) (valid : String.IsLowerCase name)
/--
An IPv4 address.
-/
| ipv4 (ipv4 : Net.IPv4Addr)
/--
An IPv6 address.
-/
| ipv6 (ipv6 : Net.IPv6Addr)
deriving Inhabited
instance : Repr Host where
reprPrec x prec :=
let nestPrec := (if prec 1024 then 1 else 2)
let name := "Std.Http.URI.Host"
let repr (ctr : String) a :=
Repr.addAppParen (Format.nest nestPrec (.text s!"{name}.{ctr}" ++ .line ++ a)).group prec
match x with
| Host.name a _ => repr "name" (reprArg a)
| Host.ipv4 a => repr "ipv4" (toString a)
| Host.ipv6 a => repr "ipv6" (toString a)
instance : ToString Host where
toString
| .name n _ => n
| .ipv4 addr => toString addr
| .ipv6 addr => s!"[{toString addr}]"
/--
TCP port number.
-/
abbrev Port := UInt16
/--
The authority component of a URI, identifying the network location of the resource.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2
-/
structure Authority where
/--
Optional user information (username and password).
-/
userInfo : Option UserInfo := none
/--
The host identifying the network location.
-/
host : Host
/--
Optional port number for connecting to the host.
-/
port : Option Port := none
deriving Inhabited, Repr
instance : ToString Authority where
toString auth :=
let userPart := match auth.userInfo with
| none => ""
| some name, some pass => s!"{toString name}:{toString pass}@"
| some name, none => s!"{toString name}@"
let hostPart := toString auth.host
let portPart := match auth.port with
| none => ""
| some p => s!":{p}"
s!"{userPart}{hostPart}{portPart}"
namespace Authority
end Authority
/--
Hierarchical path component of a URI. Each segment is stored as an `EncodedString` to maintain
proper percent-encoding.
-/
structure Path where
/--
The path segments making up the hierarchical structure (each segment is percent-encoded).
-/
segments : Array EncodedString
/--
Whether the path is absolute (begins with '/') or relative.
-/
absolute : Bool
deriving Inhabited, Repr
instance : ToString Path where
toString path :=
let result := String.intercalate "/" (path.segments.map toString).toList
if path.absolute then "/" ++ result else result
namespace Path
/--
Returns true if the path has no segments.
-/
def isEmpty (p : Path) : Bool := p.segments.isEmpty
/--
Returns the parent path by removing the last segment. If the path is empty, returns the path unchanged.
-/
def parent (p : Path) : Path :=
if p.segments.isEmpty then p
else { p with segments := p.segments.pop }
/--
Joins two paths. If the second path is absolute, it is returned as-is. Otherwise, the second path's
segments are appended to the first path.
-/
def join (p1 : Path) (p2 : Path) : Path :=
if p2.absolute then p2
else { p1 with segments := p1.segments ++ p2.segments }
/--
Appends a single segment to the path. The segment will be percent-encoded.
-/
def append (p : Path) (segment : String) : Path :=
{ p with segments := p.segments.push (EncodedString.encode segment) }
/--
Appends an already-encoded segment to the path.
-/
def appendEncoded (p : Path) (segment : EncodedString) : Path :=
{ p with segments := p.segments.push segment }
/--
Removes dot segments from the path according to RFC 3986 Section 5.2.4. This handles "."
(current directory) and ".." (parent directory) segments.
-/
def normalize (p : Path) : Path :=
let rec loop (input : List EncodedString) (output : List EncodedString) : List EncodedString :=
match input with
| [] =>
output.reverse
| segStr :: rest =>
if toString segStr == "." then
loop rest output
else if toString segStr == ".." then
match output with
| [] => loop rest []
| _ :: tail => loop rest tail
else
loop rest (segStr :: output)
{ p with segments := (loop p.segments.toList []).toArray }
/--
Returns the path segments as decoded strings.
Segments that cannot be decoded as UTF-8 are returned as their raw encoded form.
-/
def toDecodedSegments (p : Path) : Array String :=
p.segments.map fun seg =>
seg.decode.getD (toString seg)
end Path
/--
Query string represented as an array of key-value pairs. Both keys and values are stored as
`EncodedQueryString` for proper application/x-www-form-urlencoded encoding. Values are optional to
support parameters without values (e.g., "?flag"). Order is preserved based on insertion order.
-/
@[expose]
def Query := Array (EncodedQueryString × Option EncodedQueryString)
deriving Repr, Inhabited
namespace Query
/--
Extracts all unique query parameter names.
-/
@[expose]
def names (query : Query) : Array EncodedQueryString :=
query.map (fun p => p.fst)
|> Array.toList
|> List.eraseDups
|> List.toArray
/--
Extracts all query parameter values.
-/
@[expose]
def values (query : Query) : Array (Option EncodedQueryString) :=
query.map (fun p => p.snd)
/--
Returns the query as an array of (key, value) pairs. This is an identity function since Query is
already an array of pairs.
-/
@[expose]
def toArray (query : Query) : Array (EncodedQueryString × Option EncodedQueryString) :=
query
/--
Formats a query parameter as a string in the format "key" or "key=value". The key and value are
already percent-encoded as `EncodedQueryString`.
-/
def formatQueryParam (key : EncodedQueryString) (value : Option EncodedQueryString) : String :=
match value with
| none => toString key
| some v => s!"{toString key}={toString v}"
/--
Finds the first value of a query parameter by key name. Returns `none` if the key is not found.
The value remains encoded as `EncodedQueryString`.
-/
def find? (query : Query) (key : String) : Option (Option EncodedQueryString) :=
let encodedKey := EncodedQueryString.encode key
let matchingKey := Array.find? (fun x => x.fst.toByteArray = encodedKey.toByteArray) query
matchingKey.map (fun x => x.snd)
/--
Finds all values of a query parameter by key name. Returns an empty array if the key is not found.
The values remain encoded as `EncodedQueryString`.
-/
def findAll (query : Query) (key : String) : Array (Option EncodedQueryString) :=
let encodedKey := EncodedQueryString.encode key
query.filterMap (fun x =>
if x.fst.toByteArray = encodedKey.toByteArray then
some (x.snd)
else none)
/--
Adds a query parameter to the query string.
-/
def insert (query : Query) (key : String) (value : String) : Query :=
let encodedKey := EncodedQueryString.encode key
let encodedValue := EncodedQueryString.encode value
query.push (encodedKey, some encodedValue)
/--
Adds a query parameter to the query string.
-/
def insertEncoded (query : Query) (key : EncodedQueryString) (value : Option EncodedQueryString) : Query :=
query.push (key, value)
/--
Creates an empty query string.
-/
def empty : Query := #[]
/--
Creates a query string from a list of key-value pairs.
-/
def ofList (pairs : List (EncodedQueryString × Option EncodedQueryString)) : Query :=
pairs.toArray
/--
Checks if a query parameter exists.
-/
def contains (query : Query) (key : String) : Bool :=
let encodedKey := EncodedQueryString.encode key
query.any (fun x => x.fst.toByteArray = encodedKey.toByteArray)
/--
Removes all occurrences of a query parameter by key name.
-/
def erase (query : Query) (key : String) : Query :=
let encodedKey := EncodedQueryString.encode key
-- Filter out matching keys
query.filter (fun x => x.fst.toByteArray encodedKey.toByteArray)
/--
Gets the first value of a query parameter by key name, decoded as a string.
Returns `none` if the key is not found or if the value cannot be decoded as UTF-8.
-/
def get (query : Query) (key : String) : Option String :=
match query.find? key with
| none => none
| some none => some "" -- Key exists but has no value
| some (some encoded) => encoded.decode
/--
Gets the first value of a query parameter by key name, decoded as a string.
Returns the default value if the key is not found or if the value cannot be decoded.
-/
def getD (query : Query) (key : String) (default : String) : String :=
query.get key |>.getD default
/--
Sets a query parameter, replacing all existing values for that key.
Both key and value will be automatically percent-encoded.
-/
def set (query : Query) (key : String) (value : String) : Query :=
query.erase key |>.insert key value
/--
Converts the query to a properly encoded query string format.
Example: "key1=value1&key2=value2&flag"
-/
def toRawString (query : Query) : String :=
let params := query.map (fun (k, v) => formatQueryParam k v)
String.intercalate "&" params.toList
instance : EmptyCollection Query :=
Query.empty
instance : Singleton (String × String) Query :=
fun k, v => Query.empty.insert k v
instance : Insert (String × String) Query :=
fun k, v q => q.insert k v
instance : ToString Query where
toString q :=
if q.isEmpty then "" else
let encodedParams := q.toList.map fun (key, value) =>
Query.formatQueryParam key value
"?" ++ String.intercalate "&" encodedParams
end Query
end URI
/--
Complete URI structure following RFC 3986. All text components use encoded string types to ensure
proper percent-encoding.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html
-/
structure URI where
/--
The URI scheme (e.g., "http", "https", "ftp").
-/
scheme : URI.Scheme
/--
Optional authority component (user info, host, and port).
-/
authority : Option URI.Authority
/--
The hierarchical path component.
-/
path : URI.Path
/--
Optional query string as key-value pairs.
-/
query : URI.Query
/--
Optional fragment identifier (the part after '#'), percent-encoded.
-/
fragment : Option String
deriving Repr, Inhabited
instance : ToString URI where
toString uri :=
let schemePart := uri.scheme
let authorityPart := match uri.authority with
| none => ""
| some auth => s!"//{toString auth}"
let pathPart := toString uri.path
let queryPart := toString uri.query
let fragmentPart := uri.fragment.map (fun f => "#" ++ toString (URI.EncodedString.encode f)) |>.getD ""
s!"{schemePart}:{authorityPart}{pathPart}{queryPart}{fragmentPart}"
namespace URI
/--
Fluent builder for constructing URIs. Takes raw (unencoded) strings and handles encoding
automatically when building the final URI.
-/
structure Builder where
/--
The URI scheme (e.g., "http", "https").
-/
scheme : Option String := none
/--
User information (username and optional password).
-/
userInfo : Option UserInfo := none
/--
The host component.
-/
host : Option Host := none
/--
The port number.
-/
port : Option URI.Port := none
/--
Path segments (will be encoded when building).
-/
pathSegments : Array String := #[]
/--
Query parameters as (key, optional value) pairs (will be encoded when building).
-/
query : Array (String × Option String) := #[]
/--
Fragment identifier (will be encoded when building).
-/
fragment : Option String := none
deriving Inhabited
namespace Builder
/--
Creates an empty URI builder.
-/
def empty : Builder := {}
/--
Sets the URI scheme (e.g., "http", "https").
-/
def setScheme (b : Builder) (scheme : String) : Builder :=
{ b with scheme := some scheme }
/--
Sets the user information with username and optional password.
The strings will be automatically percent-encoded.
-/
def setUserInfo (b : Builder) (username : String) (password : Option String := none) : Builder :=
{ b with userInfo := some {
username := username
password := password
}
}
/--
Sets the host as a domain name.
The domain name will be automatically percent-encoded.
-/
def setHost (b : Builder) (name : String) : Builder :=
{ b with host := some (Host.name name.toLower String.IsLowerCase.lower_isLowerCase) }
/--
Sets the host as an IPv4 address.
-/
def setHostIPv4 (b : Builder) (addr : Net.IPv4Addr) : Builder :=
{ b with host := some (Host.ipv4 addr) }
/--
Sets the host as an IPv6 address.
-/
def setHostIPv6 (b : Builder) (addr : Net.IPv6Addr) : Builder :=
{ b with host := some (Host.ipv6 addr) }
/--
Sets the port number.
-/
def setPort (b : Builder) (port : Port) : Builder :=
{ b with port := some port }
/--
Replaces all path segments. Segments will be automatically percent-encoded when building.
-/
def setPath (b : Builder) (segments : Array String) : Builder :=
{ b with pathSegments := segments }
/--
Appends a single segment to the path. The segment will be automatically percent-encoded when building.
-/
def appendPathSegment (b : Builder) (segment : String) : Builder :=
{ b with pathSegments := b.pathSegments.push segment }
/--
Adds a query parameter with a value. Both key and value will be automatically percent-encoded when
building.
-/
def addQueryParam (b : Builder) (key : String) (value : String) : Builder :=
{ b with query := b.query.push (key, some value) }
/--
Adds a query parameter without a value (flag parameter). The key will be automatically
percent-encoded when building.
-/
def addQueryFlag (b : Builder) (key : String) : Builder :=
{ b with query := b.query.push (key, none) }
/--
Replaces all query parameters. Keys and values will be automatically percent-encoded when building.
-/
def setQuery (b : Builder) (query : Array (String × Option String)) : Builder :=
{ b with query := query }
/--
Sets the fragment identifier. The fragment will be automatically percent-encoded when building.
-/
def setFragment (b : Builder) (fragment : String) : Builder :=
{ b with fragment := some fragment }
/--
Builds a complete URI from the builder state, encoding all components. Defaults to "https" scheme if
none is specified.
-/
def build (b : Builder) : URI :=
let scheme := b.scheme.getD "https"
let authority :=
if b.host.isSome then
some {
userInfo := b.userInfo
host := b.host.getD default
port := b.port
}
else none
let path : Path := {
segments := b.pathSegments.map EncodedString.encode
absolute := true
}
let query :=
b.query.map fun (k, v) =>
(EncodedQueryString.encode k, v.map EncodedQueryString.encode)
let query := URI.Query.ofList query.toList
{
scheme := scheme.toLower, String.IsLowerCase.lower_isLowerCase
authority := authority
path
query := query
fragment := b.fragment
}
end Builder
end URI
namespace URI
/--
Returns a new URI with the scheme replaced.
-/
def withScheme (uri : URI) (scheme : String) : URI :=
{ uri with scheme := scheme.toLower, String.IsLowerCase.lower_isLowerCase }
/--
Returns a new URI with the authority replaced.
-/
def withAuthority (uri : URI) (authority : Option URI.Authority) : URI :=
{ uri with authority }
/--
Returns a new URI with the path replaced.
-/
def withPath (uri : URI) (path : URI.Path) : URI :=
{ uri with path }
/--
Returns a new URI with the query replaced.
-/
def withQuery (uri : URI) (query : URI.Query) : URI :=
{ uri with query }
/--
Returns a new URI with the fragment replaced.
-/
def withFragment (uri : URI) (fragment : Option String) : URI :=
{ uri with fragment }
/--
Normalizes a URI according to RFC 3986 Section 6.
-/
def normalize (uri : URI) : URI :=
{ uri with
scheme := uri.scheme
authority := uri.authority
path := uri.path.normalize
}
end URI
/--
HTTP request target forms as defined in RFC 7230 Section 5.3.
Reference: https://www.rfc-editor.org/rfc/rfc7230.html#section-5.3
-/
inductive RequestTarget where
/--
Origin-form request target (most common for HTTP requests). Consists of a path, optional query string,
and optional fragment.
Example: `/path/to/resource?key=value#section`
-/
| originForm (path : URI.Path) (query : Option URI.Query) (fragment : Option String)
/--
Absolute-form request target containing a complete URI. Used when making requests through a proxy.
Example: `http://example.com:8080/path?key=value`
-/
| absoluteForm (uri : URI)
/--
Authority-form request target (used for CONNECT requests).
Example: `example.com:443`
-/
| authorityForm (authority : URI.Authority)
/--
Asterisk-form request target (used with OPTIONS requests).
Example: `*`
-/
| asteriskForm
deriving Inhabited, Repr
namespace RequestTarget
/--
Extracts the path component from a request target, if available.
Returns an empty relative path for targets without a path.
-/
def path : RequestTarget URI.Path
| .originForm p _ _ => p
| .absoluteForm u => u.path
| _ => { segments := #[], absolute := false }
/--
Extracts the query component from a request target, if available.
Returns an empty array for targets without a query.
-/
def query : RequestTarget URI.Query
| .originForm _ q _ => q.getD URI.Query.empty
| .absoluteForm u => u.query
| _ => URI.Query.empty
/--
Extracts the authority component from a request target, if available.
-/
def authority? : RequestTarget Option URI.Authority
| .authorityForm a => some a
| .absoluteForm u => u.authority
| _ => none
/--
Extracts the fragment component from a request target, if available.
-/
def fragment? : RequestTarget Option String
| .originForm _ _ frag => frag
| .absoluteForm u => u.fragment
| _ => none
/--
Extracts the full URI if the request target is in absolute form.
-/
def uri? : RequestTarget Option URI
| .absoluteForm u => some u
| _ => none
instance : ToString RequestTarget where
toString
| .originForm path query frag =>
let pathStr := toString path
let queryStr := query.map toString |>.getD ""
let frag := frag.map (fun f => "#" ++ toString (URI.EncodedString.encode f)) |>.getD ""
s!"{pathStr}{queryStr}{frag}"
| .absoluteForm uri => toString uri
| .authorityForm auth => toString auth
| .asteriskForm => "*"
end Std.Http.RequestTarget

View File

@@ -0,0 +1,733 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public section
namespace Std.Http.URI
set_option linter.all true
/-!
# URI Encoding
This module provides utilities for percent-encoding URI components according to RFC 3986. It includes
character validation, encoding/decoding functions, and types that maintain encoding invariants through
Lean's dependent type system.
-/
/--
Checks if a byte represents an ASCII character (value < 128).
-/
def isAscii (c : UInt8) : Bool :=
c < 128
/--
Checks if all bytes in a `ByteArray` are ASCII characters.
-/
abbrev isAsciiByteArray (c : ByteArray) : Bool :=
c.data.all isAscii
instance : Decidable (isAsciiByteArray s) :=
inferInstanceAs (Decidable (s.data.all isAscii = true))
/--
Checks if a byte is a hexadecimal digit (0-9, a-f, or A-F). Note: This accepts both lowercase and
uppercase hex digits.
-/
def isHexDigit (c : UInt8) : Bool :=
(c '0'.toUInt8 && c '9'.toUInt8) ||
(c 'a'.toUInt8 && c 'f'.toUInt8) ||
(c 'A'.toUInt8 && c 'F'.toUInt8)
/--
Checks if a byte is an alphanumeric digit (0-9, a-z, or A-Z).
-/
def isAlphaNum (c : UInt8) : Bool :=
(c '0'.toUInt8 && c '9'.toUInt8) ||
(c 'a'.toUInt8 && c 'z'.toUInt8) ||
(c 'A'.toUInt8 && c 'Z'.toUInt8)
/--
Checks if a byte is an unreserved character according to RFC 3986. Unreserved characters are:
alphanumeric, hyphen, period, underscore, and tilde.
-/
def isUnreserved (c : UInt8) : Bool :=
isAlphaNum c ||
(c = '-'.toUInt8 || c = '.'.toUInt8 || c = '_'.toUInt8 || c = '~'.toUInt8)
/--
Checks if a byte is allowed in a URI path component (RFC 3986 pchar). Includes unreserved characters
plus common sub-delimiters and gen-delimiters used in paths.
-/
def isPathAllowed (c : UInt8) : Bool :=
isUnreserved c
|| c = ':'.toUInt8 || c = '@'.toUInt8 || c = '!'.toUInt8 || c = '$'.toUInt8
|| c = '&'.toUInt8 || c = '\''.toUInt8 || c = '('.toUInt8 || c = ')'.toUInt8
|| c = '*'.toUInt8 || c = '+'.toUInt8 || c = ','.toUInt8 || c = ';'.toUInt8
|| c = '='.toUInt8
/--
Checks if a byte is a valid character in a percent-encoded URI component. Valid characters are
unreserved characters or the percent sign (for escape sequences).
-/
def isEncodedChar (c : UInt8) : Bool :=
isUnreserved c || c = '%'.toUInt8
/--
Checks if a byte is valid in a percent-encoded query string component. Extends `isEncodedChar` to also
allow '+' which represents space in application/x-www-form-urlencoded format.
-/
def isEncodedQueryChar (c : UInt8) : Bool :=
isEncodedChar c || c = '+'.toUInt8
/--
Checks if all characters in a `ByteArray` are allowed in an encoded URI component. This is a fast check
that only verifies the character set, not full encoding validity.
-/
@[inline]
abbrev isAllowedEncodedChars (s : ByteArray) : Prop :=
s.data.all isEncodedChar
instance : Decidable (isAllowedEncodedChars s) :=
inferInstanceAs (Decidable (s.data.all isEncodedChar = true))
/--
Checks if all characters in a `ByteArray` are allowed in an encoded query parameter. Allows '+' as an
alternative encoding for space (application/x-www-form-urlencoded).
-/
@[inline]
abbrev isAllowedEncodedQueryChars (s : ByteArray) : Prop :=
s.data.all isEncodedQueryChar
instance : Decidable (isAllowedEncodedQueryChars s) :=
inferInstanceAs (Decidable (s.data.all isEncodedQueryChar = true))
/--
Validates that all percent signs in a byte array are followed by exactly two hexadecimal digits.
This ensures proper percent-encoding according to RFC 3986.
For example:
- `%20` is valid (percent followed by two hex digits)
- `%` is invalid (percent with no following digits)
- `%2` is invalid (percent followed by only one digit)
- `%GG` is invalid (percent followed by non-hex characters)
-/
def isValidPercentEncoding (ba : ByteArray) : Bool :=
let rec loop (i : Nat) : Bool :=
if h : i < ba.size then
let c := ba[i]'h
if c = '%'.toUInt8 then
-- Check that there are at least 2 more bytes
if h₁ : i + 1 < ba.size then
if h₂ : i + 2 < ba.size then
let d1 := ba[i + 1]'h₁
let d2 := ba[i + 2]'h₂
-- Check both are hex digits
if isHexDigit d1 && isHexDigit d2 then
loop (i + 3)
else
false
else
false
else
false
else
loop (i + 1)
else
true
termination_by ba.size - i
loop 0
/--
Converts a nibble (4-bit value, 0-15) to its hexadecimal digit representation. Returns '0'-'9' for
values 0-9, and 'A'-'F' for values 10-15.
-/
def hexDigit (n : UInt8) : UInt8 :=
if n < 10 then (n + '0'.toUInt8)
else (n - 10 + 'A'.toUInt8)
/--
Converts a hexadecimal digit character to its numeric value (0-15).
Returns `none` if the character is not a valid hex digit.
-/
def hexDigitToUInt8? (c : UInt8) : Option UInt8 :=
if c '0'.toUInt8 && c '9'.toUInt8 then
some (c - '0'.toUInt8)
else if c 'a'.toUInt8 && c 'f'.toUInt8 then
some (c - 'a'.toUInt8 + 10)
else if c 'A'.toUInt8 && c 'F'.toUInt8 then
some (c - 'A'.toUInt8 + 10)
else
none
theorem isAsciiByteArray.push {bs : ByteArray} (h : isAsciiByteArray bs) (h₁ : isAscii c) :
isAsciiByteArray (bs.push c) := by
simpa [isAsciiByteArray, ByteArray.push, Array.all_push, And.intro h h₁]
theorem isAllowedEncodedChars.push {bs : ByteArray} (h : isAllowedEncodedChars bs) (h₁ : isEncodedChar c) :
isAllowedEncodedChars (bs.push c) := by
simpa [isAllowedEncodedChars, ByteArray.push, Array.all_push, And.intro h h₁]
theorem isAllowedEncodedQueryChars.push {bs : ByteArray} (h : isAllowedEncodedQueryChars bs) (h₁ : isEncodedQueryChar c) :
isAllowedEncodedQueryChars (bs.push c) := by
simpa [isAllowedEncodedQueryChars, ByteArray.push, Array.all_push, And.intro h h₁]
theorem add_sub_assoc {w : Nat} {n m k : BitVec w} :
n + (m - k) = n - k + m := by
rw [BitVec.sub_eq_add_neg, BitVec.add_comm m, BitVec.add_assoc, BitVec.sub_eq_add_neg]
theorem isAlphaNum_isAscii {c : UInt8} (h : isAlphaNum c) : isAscii c := by
unfold isAlphaNum isAscii at *
simp at h
rcases h with h1, h2
next => simp; exact Nat.lt_of_le_of_lt h2 (by decide)
next h => simp; exact Nat.lt_of_le_of_lt h.2 (by decide)
next h => simp; exact Nat.lt_of_le_of_lt h.2 (by decide)
theorem isHexDigit_isAscii {c : UInt8} (h : isHexDigit c) : isAscii c := by
unfold isHexDigit isAscii at *
simp at h
rcases h with h1, h2
next => simp; exact Nat.lt_of_le_of_lt h2 (by decide)
next h => simp; exact Nat.lt_of_le_of_lt h.2 (by decide)
next h => simp; exact Nat.lt_of_le_of_lt h.2 (by decide)
theorem isEncodedChar_isAscii (c : UInt8) (h : isEncodedChar c) : isAscii c := by
unfold isEncodedChar isUnreserved at *
cases h' : isAlphaNum c
· simp [h'] at *; rcases h with h, h | h | h | h <;> (subst_vars; decide)
· simp [h'] at h; exact (isAlphaNum_isAscii h')
theorem isEncodedQueryChar_isAscii (c : UInt8) (h : isEncodedQueryChar c) : isAscii c := by
unfold isEncodedQueryChar isAscii at *
simp at h
rcases h
next h => exact isEncodedChar_isAscii c h
next h => subst_vars; decide
theorem hexDigit_isHexDigit (h₀ : x < 16) : isHexDigit (hexDigit x) := by
unfold hexDigit isHexDigit
have h₁ : x.toNat < 16 := h₀
split <;> simp [Char.toUInt8]
next p =>
have h₂ : x.toNat < 10 := p
have h₂ : 48 x.toNat + 48 := by omega
have h₃ : x.toNat + 48 57 := by omega
have h₄ : x.toNat + 48 < 256 := by omega
refine Or.inl (Or.inl ?_, ?_)
· exact (UInt8.ofNat_le_iff_le (by decide) h₄ |>.mpr h₂)
· exact (UInt8.ofNat_le_iff_le h₄ (by decide) |>.mpr h₃)
next p =>
have h₂ : ¬(x.toNat < 10) := p
have h₃ : 65 x.toNat - 10 + 65 := by omega
have h₅ : x.toNat - 10 + 65 70 := by omega
have h₄ : x.toNat - 10 + 65 < 256 := by omega
refine Or.inr ?_, ?_
· simpa [UInt8.ofNat_sub (by omega : 10 x.toNat)] using
UInt8.ofNat_le_iff_le (by decide : 65 < 256) h₄ |>.mpr h₃
· simpa [UInt8.ofNat_add, UInt8.ofNat_sub (by omega : 10 x.toNat)] using
UInt8.ofNat_le_iff_le h₄ (by decide : 70 < 256) |>.mpr h₅
theorem isHexDigit_isAlphaNum {c : UInt8} (h : isHexDigit c) : isAlphaNum c := by
unfold isHexDigit isAlphaNum at *
simp at h
rcases h with h1, h2
next => exact Or.inl (Or.inl h1, h2)
next h => exact Or.inl (Or.inr h.1, Nat.le_trans h.2 (by decide))
next h => exact Or.inr h.1, Nat.le_trans h.2 (by decide)
theorem isAlphaNum_isEncodedChar {c : UInt8} (h : isAlphaNum c) : isEncodedChar c := by
unfold isEncodedChar isUnreserved
simp at *
exact Or.inl (Or.inl h)
theorem isAlphaNum_isEncodedQueryChar {c : UInt8} (h : isAlphaNum c) : isEncodedQueryChar c := by
unfold isEncodedQueryChar isEncodedChar isUnreserved
simp at *
exact Or.inl (Or.inl (Or.inl h))
theorem isHexDigit_isEncodedChar {c : UInt8} (h : isHexDigit c) : isEncodedChar c :=
isAlphaNum_isEncodedChar (isHexDigit_isAlphaNum h)
theorem isHexDigit_isEncodedQueryChar {c : UInt8} (h : isHexDigit c) : isEncodedQueryChar c :=
isAlphaNum_isEncodedQueryChar (isHexDigit_isAlphaNum h)
theorem isUnreserved_ne_percent {c : UInt8} (h : isUnreserved c) : c '%'.toUInt8 := by
intro heq
unfold isUnreserved isAlphaNum at h
simp [heq, Char.toUInt8] at h
theorem all_of_all_of_imp {b : ByteArray} (h : b.data.all p) (imp : c, p c q c) : b.data.all q := by
rw [Array.all_eq] at *
simp at *
intro i x
exact (imp b.data[i]) (h i x)
theorem autf8EncodeChar_flatMap_ascii {a : List UInt8}
(is_ascii_list : (x : UInt8), x a x < 128) :
List.flatMap (fun a => String.utf8EncodeChar (Char.ofUInt8 a)) a = a := by
have h_encode {i : UInt8} (h : i < 128) : String.utf8EncodeChar (Char.ofUInt8 i) = [i] := by
simp [Char.ofUInt8, String.utf8EncodeChar, show ¬127 < i.toNat from Nat.not_lt_of_le (Nat.le_pred_of_lt h)]
induction a with
| nil => simp
| cons head tail ih =>
simp [List.flatMap_cons]
rw [h_encode]
· simp
rw [ih]
intro x hx
exact is_ascii_list x (by simp [hx])
· exact is_ascii_list head (by simp)
theorem List.toByteArray_loop_eq (xs : List UInt8) (acc : ByteArray) :
(List.toByteArray.loop xs acc).data = acc.data ++ xs.toArray := by
induction xs generalizing acc with
| nil => simp [List.toByteArray.loop]
| cons x xs ih => simp [List.toByteArray.loop, ih, Array.push]
theorem ByteArray.toList_toByteArray (ba : ByteArray) :
ba.data.toList.toByteArray = ba := by
cases ba with
| mk data =>
simp [List.toByteArray]
apply ByteArray.ext
simp [List.toByteArray_loop_eq, ByteArray.empty]
decide
theorem ascii_is_valid_utf8 (ba : ByteArray) (s : ba.data.all isAscii) : ByteArray.IsValidUTF8 ba := by
refine ba.data.toList.map Char.ofUInt8, ?_
rw [List.utf8Encode]
simp only [List.flatMap_map]
have is_ascii : (x : UInt8), x ba.data.toList x < 128 := by
let is_ascii := Array.all_eq_true_iff_forall_mem.mp s
simp [isAscii] at is_ascii
intro x hx
exact is_ascii x (by simp_all)
rw [autf8EncodeChar_flatMap_ascii is_ascii]
exact ByteArray.toList_toByteArray ba |>.symm
/-! ### Percent Encoding Preservation Lemmas -/
/--
Size of a pushed ByteArray.
-/
theorem ByteArray.size_push' {ba : ByteArray} {c : UInt8} : (ba.push c).size = ba.size + 1 := by
simp only [ ByteArray.size_data, ByteArray.data_push, Array.size_push]
/--
If index i is in bounds for ba, then i is also in bounds for ba.push c.
-/
theorem ByteArray.lt_size_of_lt_size_push {ba : ByteArray} {c : UInt8} {i : Nat}
(h : i < ba.size) : i < (ba.push c).size := by
rw [ByteArray.size_push']
omega
/--
Accessing an index less than the original size in a pushed array gives the original value.
-/
theorem ByteArray.getElem_push_lt' {ba : ByteArray} {c : UInt8} {i : Nat} (h : i < ba.size)
(h' : i < (ba.push c).size := ByteArray.lt_size_of_lt_size_push h) :
(ba.push c)[i]'h' = ba[i]'h := by
show (ba.push c).get i h' = ba.get i h
cases ba with
| mk data =>
show (data.push c)[i] = data[i]
exact Array.getElem_push_lt h
/--
Accessing the last index of a pushed array gives the pushed value.
-/
theorem ByteArray.getElem_push_eq' {ba : ByteArray} {c : UInt8}
(h : ba.size < (ba.push c).size := by rw [ByteArray.size_push']; omega) :
(ba.push c)[ba.size]'h = c := by
show (ba.push c).get ba.size h = c
cases ba with
| mk data =>
show (data.push c)[data.size] = c
exact Array.getElem_push_eq
/--
A generalized version of the percent-encoding validation loop that can be reasoned about independently.
This is equivalent to `isValidPercentEncoding.loop` but defined at the top level.
-/
def isValidPercentEncodingFrom (ba : ByteArray) (i : Nat) : Bool :=
if h : i < ba.size then
let c := ba[i]'h
if c = '%'.toUInt8 then
if h₁ : i + 1 < ba.size then
if h₂ : i + 2 < ba.size then
let d1 := ba[i + 1]'h₁
let d2 := ba[i + 2]'h₂
if isHexDigit d1 && isHexDigit d2 then
isValidPercentEncodingFrom ba (i + 3)
else
false
else
false
else
false
else
isValidPercentEncodingFrom ba (i + 1)
else
true
termination_by ba.size - i
/--
The internal loop of isValidPercentEncoding is equivalent to isValidPercentEncodingFrom.
-/
theorem isValidPercentEncoding_eq_from (ba : ByteArray) :
isValidPercentEncoding ba = isValidPercentEncodingFrom ba 0 := by
unfold isValidPercentEncoding
suffices h : i, isValidPercentEncoding.loop ba i = isValidPercentEncodingFrom ba i by exact h 0
intro i
induction h : ba.size - i using Nat.strongRecOn generalizing i with
| ind n ih =>
unfold isValidPercentEncoding.loop isValidPercentEncodingFrom
split
· rename_i hi
split
· rename_i h1
split
· rename_i h2
by_cases hpct : ba[i] = '%'.toUInt8
· simp only [hpct]
by_cases hhex : isHexDigit ba[i + 1] && isHexDigit ba[i + 2]
· simp [hhex]
exact ih (ba.size - (i + 3)) (by omega) (i + 3) rfl
· simp [Bool.eq_false_iff.mpr hhex]
· simp [hpct]
exact ih (ba.size - (i + 1)) (by omega) (i + 1) rfl
· by_cases hpct : ba[i] = '%'.toUInt8
· simp [hpct]
· simp [hpct]
exact ih (ba.size - (i + 1)) (by omega) (i + 1) rfl
· by_cases hpct : ba[i] = '%'.toUInt8
· simp [hpct]
· simp [hpct]
exact ih (ba.size - (i + 1)) (by omega) (i + 1) rfl
· rfl
/--
If i ≥ ba.size, then isValidPercentEncodingFrom returns true.
-/
theorem isValidPercentEncodingFrom_ge {ba : ByteArray} {i : Nat} (h : i ba.size) :
isValidPercentEncodingFrom ba i = true := by
unfold isValidPercentEncodingFrom
simp [Nat.not_lt.mpr h]
/--
A percent-encoded URI component with a compile-time proof that it contains only valid encoded characters.
This provides type-safe URI encoding without runtime validation.
The invariant guarantees that the string contains only unreserved characters (alphanumeric, hyphen, period,
underscore, tilde) and percent signs (for escape sequences).
-/
structure EncodedString where
private mk ::
/--
The underlying byte array containing the percent-encoded data.
-/
toByteArray : ByteArray
/--
Proof that all characters in the byte array are valid encoded characters.
-/
valid : isAllowedEncodedChars toByteArray
namespace EncodedString
/--
Creates an empty encoded string.
-/
def empty : EncodedString :=
.empty, by native_decide
instance : Inhabited EncodedString where
default := EncodedString.empty
/--
Appends a single encoded character to an encoded string.
Requires that the character is not '%' to maintain the percent-encoding invariant.
-/
private def push (s : EncodedString) (c : UInt8) (h : isEncodedChar c) : EncodedString :=
s.toByteArray.push c, isAllowedEncodedChars.push s.valid h
/--
Converts a byte to its percent-encoded hexadecimal representation (%XX). For example, a space
character (0x20) becomes "%20".
-/
private def byteToHex (b : UInt8) (s : EncodedString) : EncodedString :=
let ba := s.toByteArray.push '%'.toUInt8
|>.push (hexDigit (b >>> 4))
|>.push (hexDigit (b &&& 0xF))
let valid := by
have h1 : isEncodedChar '%'.toUInt8 := by decide
have h2 : isEncodedChar (hexDigit (b >>> 4)) :=
isHexDigit_isEncodedChar (hexDigit_isHexDigit (BitVec.toNat_ushiftRight_lt b.toBitVec 4 (by decide)))
have h3 : isEncodedChar (hexDigit (b &&& 0xF)) :=
isHexDigit_isEncodedChar (hexDigit_isHexDigit (@UInt8.and_lt_add_one b 0xF (by decide)))
exact isAllowedEncodedChars.push (isAllowedEncodedChars.push (isAllowedEncodedChars.push s.valid h1) h2) h3
ba, valid
/--
Encodes a raw string into an `EncodedString` with automatic proof construction. Unreserved characters
(alphanumeric, hyphen, period, underscore, tilde) are kept as-is, while all other characters are percent-encoded.
-/
def encode (s : String) : EncodedString :=
s.toUTF8.foldl (init := EncodedString.empty) fun acc c =>
if h : isUnreserved c then
acc.push c (by simp [isEncodedChar]; exact Or.inl h)
else
byteToHex c acc
/--
Attempts to create an `EncodedString` from a `ByteArray`. Returns `some` if the byte array contains only
valid encoded characters and all percent signs are followed by exactly two hex digits, `none` otherwise.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedString :=
if h : isAllowedEncodedChars ba then
if isValidPercentEncoding ba then some ba, h else none
else none
/--
Creates an `EncodedString` from a `ByteArray`, panicking if the byte array is invalid.
-/
def ofByteArray! (ba : ByteArray) : EncodedString :=
match ofByteArray? ba with
| some es => es
| none => panic! "invalid encoded string"
/--
Creates an `EncodedString` from a `String` by checking if it's already a valid percent-encoded string.
Returns `some` if valid, `none` otherwise.
-/
def ofString? (s : String) : Option EncodedString :=
ofByteArray? s.toUTF8
/--
Creates an `EncodedString` from a `String`, panicking if the string is not a valid percent-encoded string.
-/
def ofString! (s : String) : EncodedString :=
ofByteArray! s.toUTF8
/--
Creates an `EncodedString` from a `ByteArray` with compile-time proofs.
Use this when you have proofs that the byte array is valid.
-/
def new (ba : ByteArray) (valid : isAllowedEncodedChars ba) (_validEncoding : isValidPercentEncoding ba) : EncodedString :=
ba, valid
instance : ToString EncodedString where
toString es := es.toByteArray, ascii_is_valid_utf8 es.toByteArray (all_of_all_of_imp es.valid isEncodedChar_isAscii)
/--
Decodes an `EncodedString` back to a regular `String`. Converts percent-encoded sequences (e.g., "%20")
back to their original characters. Returns `none` if the decoded bytes are not valid UTF-8.
-/
def decode (es : EncodedString) : Option String := Id.run do
let mut decoded : ByteArray := ByteArray.empty
let rawBytes := es.toByteArray
let len := rawBytes.size
let mut i := 0
let percent := '%'.toNat.toUInt8
while h : i < len do
let c := rawBytes[i]
(decoded, i) := if h₁ : c == percent i + 1 < len then
let h1 := rawBytes[i + 1]
if let some hd1 := hexDigitToUInt8? h1 then
if h₂ : i + 2 < len then
let h2 := rawBytes[i + 2]
if let some hd2 := hexDigitToUInt8? h2 then
(decoded.push (hd1 * 16 + hd2), i + 3)
else
(((decoded.push c).push h1).push h2, i + 3)
else
((decoded.push c).push h1, i + 2)
else
((decoded.push c).push h1, i + 2)
else
(decoded.push c, i + 1)
return String.fromUTF8? decoded
instance : Repr EncodedString where
reprPrec es := reprPrec (toString es)
instance : BEq EncodedString where
beq x y := x.toByteArray = y.toByteArray
instance : Hashable EncodedString where
hash x := Hashable.hash x.toByteArray
end EncodedString
/--
A percent-encoded query string component with a compile-time proof that it contains only valid encoded
query characters. Extends `EncodedString` to support the '+' character for spaces, following the
application/x-www-form-urlencoded format.
This type is specifically designed for encoding query parameters where spaces can be represented as '+'
instead of "%20".
-/
structure EncodedQueryString where
private mk ::
/--
The underlying byte array containing the percent-encoded query data.
-/
toByteArray : ByteArray
/--
Proof that all characters in the byte array are valid encoded query characters.
-/
valid : isAllowedEncodedQueryChars toByteArray
namespace EncodedQueryString
/--
Creates an empty encoded query string.
-/
def empty : EncodedQueryString :=
.empty, by native_decide
instance : Inhabited EncodedQueryString where
default := EncodedQueryString.empty
/--
Appends a single encoded query character to an encoded query string.
-/
private def push (s : EncodedQueryString) (c : UInt8) (h : isEncodedQueryChar c) : EncodedQueryString :=
s.toByteArray.push c, isAllowedEncodedQueryChars.push s.valid h
/--
Attempts to create an `EncodedQueryString` from a `ByteArray`. Returns `some` if the byte array contains
only valid encoded query characters and all percent signs are followed by exactly two hex digits, `none` otherwise.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedQueryString :=
if h : isAllowedEncodedQueryChars ba then
if isValidPercentEncoding ba then some ba, h else none
else none
/--
Creates an `EncodedQueryString` from a `ByteArray`, panicking if the byte array is invalid.
-/
def ofByteArray! (ba : ByteArray) : EncodedQueryString :=
match ofByteArray? ba with
| some es => es
| none => panic! "invalid encoded query string"
/--
Creates an `EncodedQueryString` from a `String` by checking if it's already a valid percent-encoded string.
Returns `some` if valid, `none` otherwise.
-/
def ofString? (s : String) : Option EncodedQueryString :=
ofByteArray? s.toUTF8
/--
Creates an `EncodedQueryString` from a `String`, panicking if the string is not a valid percent-encoded string.
-/
def ofString! (s : String) : EncodedQueryString :=
ofByteArray! s.toUTF8
/--
Creates an `EncodedQueryString` from a `ByteArray` with compile-time proofs.
Use this when you have proofs that the byte array is valid.
-/
def new (ba : ByteArray) (valid : isAllowedEncodedQueryChars ba) (_validEncoding : isValidPercentEncoding ba) : EncodedQueryString :=
ba, valid
/--
Converts a byte to its percent-encoded hexadecimal representation (%XX). For example, a space character
(0x20) becomes "%20".
-/
private def byteToHex (b : UInt8) (s : EncodedQueryString) : EncodedQueryString :=
let ba := s.toByteArray.push '%'.toUInt8
|>.push (hexDigit (b >>> 4))
|>.push (hexDigit (b &&& 0xF))
let valid := by
have h1 : isEncodedQueryChar '%'.toUInt8 := by decide
have h2 : isEncodedQueryChar (hexDigit (b >>> 4)) :=
isHexDigit_isEncodedQueryChar (hexDigit_isHexDigit (BitVec.toNat_ushiftRight_lt b.toBitVec 4 (by decide)))
have h3 : isEncodedQueryChar (hexDigit (b &&& 0xF)) :=
isHexDigit_isEncodedQueryChar (hexDigit_isHexDigit (@UInt8.and_lt_add_one b 0xF (by decide)))
exact isAllowedEncodedQueryChars.push (isAllowedEncodedQueryChars.push (isAllowedEncodedQueryChars.push s.valid h1) h2) h3
ba, valid
/--
Encodes a raw string into an `EncodedQueryString` with automatic proof construction. Unreserved characters
are kept as-is, spaces are encoded as '+', and all other characters are percent-encoded.
-/
def encode (s : String) : EncodedQueryString :=
s.toUTF8.foldl (init := EncodedQueryString.empty) fun acc c =>
if h : isUnreserved c then
acc.push c (by simp [isEncodedQueryChar, isEncodedChar]; exact Or.inl (Or.inl h))
else if _ : c = ' '.toUInt8 then
acc.push '+'.toUInt8 (by simp [isEncodedQueryChar])
else
byteToHex c acc
instance : ToString EncodedQueryString where
toString es := es.toByteArray, ascii_is_valid_utf8 es.toByteArray (all_of_all_of_imp es.valid isEncodedQueryChar_isAscii)
/--
Decodes an `EncodedQueryString` back to a regular `String`. Converts percent-encoded sequences and '+'
signs back to their original characters. Returns `none` if the decoded bytes are not valid UTF-8.
This is almost the same code from `System.Uri.UriEscape.decodeUri`, but with `Option` instead.
-/
def decode (es : EncodedQueryString) : Option String := Id.run do
let mut decoded : ByteArray := ByteArray.empty
let rawBytes := es.toByteArray
let len := rawBytes.size
let mut i := 0
let percent := '%'.toNat.toUInt8
let plus := '+'.toNat.toUInt8
while h : i < len do
let c := rawBytes[i]
(decoded, i) := if c == plus then
(decoded.push ' '.toNat.toUInt8, i + 1)
else if h₁ : c == percent i + 1 < len then
let h1 := rawBytes[i + 1]
if let some hd1 := hexDigitToUInt8? h1 then
if h₂ : i + 2 < len then
let h2 := rawBytes[i + 2]
if let some hd2 := hexDigitToUInt8? h2 then
(decoded.push (hd1 * 16 + hd2), i + 3)
else
(((decoded.push c).push h1).push h2, i + 3)
else
((decoded.push c).push h1, i + 2)
else
((decoded.push c).push h1, i + 2)
else
(decoded.push c, i + 1)
return String.fromUTF8? decoded
end EncodedQueryString
instance : Repr EncodedQueryString where
reprPrec es := reprPrec (toString es)
instance : BEq EncodedQueryString where
beq x y := x.toByteArray = y.toByteArray
instance : Hashable EncodedQueryString where
hash x := Hashable.hash x.toByteArray
instance : Hashable (Option EncodedQueryString) where
hash
| some x => Hashable.hash ((ByteArray.mk #[1] ++ x.toByteArray))
| none => Hashable.hash (ByteArray.mk #[0])
end Std.Http.URI

View File

@@ -0,0 +1,399 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public import Std.Internal.Parsec
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Data.URI.Basic
public section
/-!
# URI Parser
This module provides parsers for URIs and request targets according to RFC 3986.
It handles parsing of schemes, authorities, paths, queries, and fragments.
-/
namespace Std.Http.URI.Parser
set_option linter.all true
open Std Internal Parsec ByteArray
@[inline]
private def isDigit (c : UInt8) : Bool :=
c >= '0'.toUInt8 c <= '9'.toUInt8
@[inline]
private def isHexDigit (c : UInt8) : Bool :=
isDigit c (c >= 'A'.toUInt8 c <= 'F'.toUInt8) (c >= 'a'.toUInt8 c <= 'f'.toUInt8)
@[inline]
private def isAlpha (c : UInt8) : Bool :=
(c >= 'A'.toUInt8 c <= 'Z'.toUInt8) (c >= 'a'.toUInt8 c <= 'z'.toUInt8)
@[inline]
private def isAlphaNum (c : UInt8) : Bool :=
isAlpha c isDigit c
@[inline]
private def isUnreserved (c : UInt8) : Bool :=
isAlphaNum c c = '-'.toUInt8 c = '.'.toUInt8 c = '_'.toUInt8 c = '~'.toUInt8
@[inline]
private def isSubDelims (c : UInt8) : Bool :=
c = '!'.toUInt8 c = '$'.toUInt8 c = '&'.toUInt8 c = '\''.toUInt8
c = '('.toUInt8 c = ')'.toUInt8 c = '*'.toUInt8 c = '+'.toUInt8
c = ','.toUInt8 c = ';'.toUInt8 c = '='.toUInt8
@[inline]
private def isGenDelims (c : UInt8) : Bool :=
c = ':'.toUInt8 c = '/'.toUInt8 c = '?'.toUInt8 c = '#'.toUInt8
c = '['.toUInt8 c = ']'.toUInt8 c = '@'.toUInt8
@[inline]
private def isReserved (c : UInt8) : Bool :=
isGenDelims c isSubDelims c
@[inline]
private def isPChar (c : UInt8) : Bool :=
isUnreserved c isSubDelims c c = ':'.toUInt8 c = '@'.toUInt8 c = '%'.toUInt8
@[inline]
private def isRegNameChar (c : UInt8) : Bool :=
isUnreserved c isSubDelims c c = '%'.toUInt8
@[inline]
private def isSchemeChar (c : UInt8) : Bool :=
isAlphaNum c c = '+'.toUInt8 c = '-'.toUInt8 c = '.'.toUInt8
@[inline]
private def isQueryChar (c : UInt8) : Bool :=
isPChar c c = '/'.toUInt8 c = '?'.toUInt8
@[inline]
private def isFragmentChar (c : UInt8) : Bool :=
isPChar c c = '/'.toUInt8 c = '?'.toUInt8
@[inline]
private def isUserInfoChar (c : UInt8) : Bool :=
isUnreserved c isSubDelims c c = '%'.toUInt8 c = ':'.toUInt8
@[inline]
private def tryOpt (p : Parser α) : Parser (Option α) :=
optional (attempt p)
@[inline]
private def ofExcept (p : Except String α) : Parser α :=
match p with
| .ok res => pure res
| .error err => fail err
@[inline]
private def peekIs (p : UInt8 Bool) : Parser Bool := do
return ( peekWhen? p).isSome
private def hexToByte (digit : UInt8) : UInt8 :=
if digit <= '9'.toUInt8 then digit - '0'.toUInt8
else if digit <= 'F'.toUInt8 then digit - 'A'.toUInt8 + 10
else digit - 'a'.toUInt8 + 10
private def parsePctEncoded : Parser UInt8 := do
skipByte '%'.toUInt8
let hi hexToByte <$> satisfy isHexDigit
let lo hexToByte <$> satisfy isHexDigit
return (hi <<< 4) ||| lo
-- scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
private def parseScheme : Parser URI.Scheme := do
let schemeBytes takeWhileUpTo1 isSchemeChar 63
return String.fromUTF8! schemeBytes.toByteArray |>.toLower, .lower_isLowerCase
-- port = *DIGIT
private def parsePortNumber : Parser UInt16 := do
let portBytes takeWhileUpTo isDigit 5
if portBytes.size = 0 then fail "empty port number"
let portStr := String.fromUTF8! portBytes.toByteArray
let some portNum := String.toNat? portStr
| fail s!"invalid port number:{portStr}"
if portNum > 65535 then
fail s!"port number too large: {portNum}"
return portNum.toUInt16
-- userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
private def parseUserInfo : Parser URI.UserInfo := do
let userBytesName takeWhileUpTo (fun x => x ':'.toUInt8 isUserInfoChar x) 1024
let some userName := URI.EncodedString.ofByteArray? userBytesName.toByteArray
| fail "invalid percent encoding in user info"
let userPass if peekIs (· == ':'.toUInt8) then
skip
let userBytesPass takeWhileUpTo isUserInfoChar 1024
let some userStrPass := URI.EncodedString.ofByteArray? userBytesPass.toByteArray >>= URI.EncodedString.decode
| fail "invalid percent encoding in user info"
pure <| some userStrPass
else
pure none
let some userName := userName.decode
| fail "invalid username"
return userName, userPass
-- IP-literal = "[" ( IPv6address / IPvFuture ) "]"
private def parseIPv6 : Parser Net.IPv6Addr := do
skipByte '['.toUInt8
let result takeWhileUpTo1 (fun x => x = ':'.toUInt8 isHexDigit x) 256
skipByte ']'.toUInt8
let ipv6Str := String.fromUTF8! result.toByteArray
let some ipv6Addr := Std.Net.IPv6Addr.ofString ipv6Str
| fail s!"invalid IPv6 address: {ipv6Str}"
return ipv6Addr
-- IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
private def parseIPv4 : Parser Net.IPv4Addr := do
let result takeWhileUpTo1 (fun x => x = '.'.toUInt8 isDigit x) 256
let ipv4Str := String.fromUTF8! result.toByteArray
let some ipv4Str := Std.Net.IPv4Addr.ofString ipv4Str
| fail s!"invalid IPv4 address: {ipv4Str}"
return ipv4Str
-- host = IP-literal / IPv4address / reg-name
private def parseHost : Parser URI.Host := do
if ( peekWhen? (· == '['.toUInt8)).isSome then
return .ipv6 ( parseIPv6)
else if ( peekWhen? isDigit).isSome then
return .ipv4 ( parseIPv4)
else
let isHostName x := isUnreserved x x = '%'.toUInt8 isSubDelims x
let some str := URI.EncodedString.ofByteArray? ( takeWhileUpTo1 isHostName 1024).toByteArray >>= URI.EncodedString.decode
| fail s!"invalid host"
return .name str.toLower .lower_isLowerCase
-- authority = [ userinfo "@" ] host [ ":" port ]
private def parseAuthority : Parser URI.Authority := do
let userinfo tryOpt do
let ui parseUserInfo
skipByte '@'.toUInt8
return ui
let host parseHost
let port optional do
skipByte ':'.toUInt8
parsePortNumber
return { userInfo := userinfo, host := host, port := port }
-- segment = *pchar
private def parseSegment : Parser ByteSlice := do
takeWhileUpTo isPChar 256
/-
path = path-abempty ; begins with "/" or is empty
/ path-absolute ; begins with "/" but not "//"
/ path-noscheme ; begins with a non-colon segment
/ path-rootless ; begins with a segment
/ path-empty ; zero characters
path-abempty = *( "/" segment )
path-absolute = "/" [ segment-nz *( "/" segment ) ]
path-noscheme = segment-nz-nc *( "/" segment )
path-rootless = segment-nz *( "/" segment )
path-empty = 0<pchar>
-/
/--
Parses an URI with combined parsing and validation.
-/
def parsePath (forceAbsolute : Bool) (allowEmpty : Bool) : Parser URI.Path := do
let mut isAbsolute := false
let mut segments : Array _ := #[]
let isSegmentOrSlash peekIs (fun c => isPChar c c = '/'.toUInt8)
if ¬allowEmpty (( isEof) ¬isSegmentOrSlash) then
fail "need a path"
-- Check if path is absolute
if peekIs (· == '/'.toUInt8) then
isAbsolute := true
skip
if peekIs (· == '/'.toUInt8) then
fail "it's a scheme starter"
else if forceAbsolute then
if allowEmpty (( isEof) ¬isSegmentOrSlash) then
return { segments := segments, absolute := isAbsolute }
else
fail "require '/' in path"
else
pure ()
-- Parse segments
while ( peek?).isSome do
let segmentBytes parseSegment
let some segmentStr := URI.EncodedString.ofByteArray? segmentBytes.toByteArray
| fail "invalid percent encoding in path segment"
segments := segments.push segmentStr
if ( peek?).any (· == '/'.toUInt8) then
skip
-- If path ends with '/', add empty segment
if ( peek?).isNone then
segments := segments.push (URI.EncodedString.empty)
else
break
return { segments := segments, absolute := isAbsolute }
-- query = *( pchar / "/" / "?" )
private def parseQuery : Parser URI.Query := do
let queryBytes takeWhileUpTo isQueryChar 1024
let some queryStr := String.fromUTF8? queryBytes.toByteArray
| fail "invalid query string"
let pairs : Option URI.Query := queryStr.splitOn "&" |>.foldlM (init := URI.Query.empty) fun acc pair => do
match pair.splitOn "=" with
| [key] =>
let key URI.EncodedQueryString.ofString? key
pure (acc.insertEncoded key none)
| key :: value =>
let key URI.EncodedQueryString.ofString? key
let value URI.EncodedQueryString.ofString? (String.intercalate "=" value)
pure (acc.insertEncoded key (some value))
| [] => pure acc
if let some pairs := pairs then
return pairs
else
fail "invalid query string"
-- fragment = *( pchar / "/" / "?" )
private def parseFragment : Parser URI.EncodedString := do
let fragmentBytes takeWhileUpTo isFragmentChar 1024
let some fragmentStr := URI.EncodedString.ofByteArray? fragmentBytes.toByteArray
| fail "invalid percent encoding in fragment"
return fragmentStr
private def parseHierPart : Parser (Option URI.Authority × URI.Path) := do
-- Check for "//" authority path-abempty
if ( tryOpt (skipString "//")).isSome then
let authority parseAuthority
let path parsePath true true -- path-abempty (must start with "/" or be empty)
return (some authority, path)
else
-- path-absolute / path-rootless / path-empty
let path parsePath false true
return (none, path)
/--
Parses a URI (Uniform Resource Identifier).
URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
hier-part = "//" authority path-abempty / path-absolute / path-rootless / path-empty
-/
public def parseURI : Parser URI := do
let scheme parseScheme
skipByte ':'.toUInt8
let (authority, path) parseHierPart
let query optional (skipByteChar '?' *> parseQuery)
let query := query.getD .empty
let fragment optional do
let some result := ( (skipByteChar '#' *> parseFragment)) |>.decode
| fail "invalid fragment parse encoding"
return result
return { scheme, authority, path, query, fragment }
/--
Parses a request target with combined parsing and validation.
-/
public def parseRequestTarget : Parser RequestTarget :=
asterisk <|> origin <|> authority <|> absolute
where
-- The asterisk form
asterisk : Parser RequestTarget := do
skipByte '*'.toUInt8
return .asteriskForm
-- origin-form = absolute-path [ "?" query ]
-- absolute-path = 1*( "/" segment )
origin : Parser RequestTarget := attempt do
if peekIs (· == '/'.toUInt8) then
let path parsePath true true
let query optional (skipByte '?'.toUInt8 *> parseQuery)
let frag optional do
let some result := ( (skipByteChar '#' *> parseFragment)) |>.decode
| fail "invalid fragment parse encoding"
return result
return .originForm path query frag
else
fail "not origin"
-- absolute-URI = scheme ":" hier-part [ "?" query ]
absolute : Parser RequestTarget := attempt do
let scheme parseScheme
skipByte ':'.toUInt8
let (authority, path) parseHierPart
let query optional (skipByteChar '?' *> parseQuery)
let query := query.getD URI.Query.empty
let fragment optional do
let some result := ( (skipByteChar '#' *> parseFragment)) |>.decode
| fail "invalid fragment parse encoding"
return result
return .absoluteForm { path, scheme, authority, query, fragment }
-- authority-form = host ":" port
authority : Parser RequestTarget := attempt do
let host parseHost
skipByteChar ':'
let port parsePortNumber
return .authorityForm { host, port := some port }
/--
Parses an HTTP `Host` header value.
-/
public def parseHostHeader : Parser (URI.Host × Option UInt16) := do
let host parseHost
let port optional do
skipByte ':'.toUInt8
parsePortNumber
if ¬( isEof) then
fail "invalid host header"
return (host, port)
end Std.Http.URI.Parser

View File

@@ -0,0 +1,91 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public section
/-!
# Version
The `Version` structure represents an HTTP version with a major and minor number. It includes several
standard versions of the HTTP protocol, such as HTTP/1.1, HTTP/2.0, and HTTP/3.0.
* Reference: https://httpwg.org/specs/rfc9110.html#protocol.version
-/
namespace Std.Http
set_option linter.all true
/--
The `Version` structure represents an HTTP version with a major and minor number. It includes several
standard versions of the HTTP protocol, such as HTTP/1.1, HTTP/2.0, and HTTP/3.0.
* Reference: https://httpwg.org/specs/rfc9110.html#protocol.version
-/
inductive Version
/--
`HTTP/1.1`
-/
| v11
/--
`HTTP/2.0`
-/
| v20
/--
`HTTP/3.0`
-/
| v30
deriving Repr, Inhabited, BEq, DecidableEq
namespace Version
/--
Converts a pair of `Nat` to the corresponding `Version`.
-/
def ofNumber? : Nat Nat Option Version
| 1, 1 => some .v11
| 2, 0 => some .v20
| 3, 0 => some .v30
| _, _ => none
/--
Converts `String` to the corresponding `Version`.
-/
def ofString? : String Option Version
| "HTTP/1.1" => some .v11
| "HTTP/2.0" => some .v20
| "HTTP/3.0" => some .v30
| _ => none
/--
Converts `String` to the corresponding `Version`, panics if invalid.
-/
def ofString! (s : String) : Version :=
match ofString? s with
| some v => v
| none => panic! s!"invalid HTTP version: {s.quote}"
/--
Converts a `Version` to its corresponding major and minor numbers as a pair.
-/
def toNumber : Version (Nat × Nat)
| .v11 => (1, 1)
| .v20 => (2, 0)
| .v30 => (3, 0)
instance : ToString Version where
toString
| .v11 => "HTTP/1.1"
| .v20 => "HTTP/2.0"
| .v30 => "HTTP/3.0"
end Std.Http.Version

View File

@@ -0,0 +1,21 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Internal.Map
public import Std.Internal.Http.Internal.LowerCase
public import Std.Internal.Http.Internal.Encode
public import Std.Internal.Http.Internal.ChunkedBuffer
public section
/-!
# HTTP Internal Utilities
This module re-exports internal utilities used by the HTTP library including
data structures, encoding functions, and buffer management.
-/

View File

@@ -0,0 +1,135 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public import Init.Data.ByteArray
public section
/-!
# ChunkedBuffer
This module provides an efficient way to concatenate multiple `ByteArray`s by deferring the actual
concatenation until necessary. This is particularly useful in HTTP response building and streaming
scenarios where data is accumulated incrementally.
-/
namespace Std.Http.Internal
set_option linter.all true
/--
A structure that accumulates multiple `ByteArray`s efficiently by tracking them in an array and
maintaining the total size. This allows building large buffers without repeated allocations and copies.
-/
structure ChunkedBuffer where
/--
The accumulated byte arrays
-/
data : Array ByteArray
/--
The total size in bytes of all accumulated arrays
-/
size : Nat
deriving Inhabited
namespace ChunkedBuffer
/--
An empty `ChunkedBuffer`.
-/
@[inline]
def empty : ChunkedBuffer :=
{ data := #[], size := 0 }
/--
Append a single `ByteArray` to the `ChunkedBuffer`.
-/
@[inline]
def push (c : ChunkedBuffer) (b : ByteArray) : ChunkedBuffer :=
{ data := c.data.push b, size := c.size + b.size }
/--
Writes a `ByteArray` to the `ChunkedBuffer`.
-/
@[inline]
def write (buffer : ChunkedBuffer) (data : ByteArray) : ChunkedBuffer :=
buffer.push data
/--
Writes a `Char` to the `ChunkedBuffer`.
-/
@[inline]
def writeChar (buffer : ChunkedBuffer) (data : Char) : ChunkedBuffer :=
buffer.push (ByteArray.mk #[data.toUInt8])
/--
Writes a `String` to the `ChunkedBuffer`.
-/
@[inline]
def writeString (buffer : ChunkedBuffer) (data : String) : ChunkedBuffer :=
buffer.push data.toUTF8
/--
Append many ByteArrays at once.
-/
@[inline]
def append (c : ChunkedBuffer) (d : ChunkedBuffer) : ChunkedBuffer :=
{ data := c.data ++ d.data, size := c.size + d.size }
/--
Turn the combined structure into a single contiguous ByteArray.
-/
@[inline]
def toByteArray (c : ChunkedBuffer) : ByteArray :=
if h : 1 = c.data.size then
c.data[0]'(Nat.le_of_eq h)
else
c.data.foldl (· ++ ·) (.emptyWithCapacity c.size)
/--
Build from a ByteArray directly.
-/
@[inline]
def ofByteArray (bs : ByteArray) : ChunkedBuffer :=
{ data := #[bs], size := bs.size }
/--
Build from an array of ByteArrays directly.
-/
@[inline]
def ofArray (bs : Array ByteArray) : ChunkedBuffer :=
{ data := bs, size := bs.foldl (· + ·.size) 0 }
/--
Check if it's an empty array.
-/
@[inline]
def isEmpty (bb : ChunkedBuffer) : Bool :=
bb.size = 0
instance : EmptyCollection ChunkedBuffer where
emptyCollection := empty
instance : HAppend ChunkedBuffer ChunkedBuffer ChunkedBuffer where
hAppend := append
instance : Coe ByteArray ChunkedBuffer where
coe := ofByteArray
instance : Coe (Array ByteArray) ChunkedBuffer where
coe := ofArray
instance : Append ChunkedBuffer where
append := append
instance : Repr ChunkedBuffer where
reprPrec bb _ := s!"ChunkedBuffer.ofArray {bb.data}"
end Std.Http.Internal.ChunkedBuffer

View File

@@ -0,0 +1,38 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Internal.ChunkedBuffer
public import Std.Internal.Http.Data.Version
public section
/-!
# Encode
Serializes types to a `ChunkedBuffer` containing their canonical HTTP representation for a specific
protocol version.
-/
namespace Std.Http.Internal
set_option linter.all true
/--
Serializes a type `t` to a `ChunkedBuffer` containing its canonical HTTP representation for protocol
version `v`.
-/
class Encode (v : Version) (t : Type) where
/--
Encodes a type `t` to a `ChunkedBuffer`.
-/
encode : ChunkedBuffer t ChunkedBuffer
instance : Encode .v11 Version where
encode buffer := buffer.writeString toString
end Std.Http.Internal

View File

@@ -0,0 +1,115 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public section
/-!
# Case-Insensitive Utilities
This module provides predicates and normalization functions to handle ASCII case-insensitivity. It
includes proofs of idempotency for lowercase transformations and utilities for validating lowercase
state in both `String` and `ByteArray` types.
These utilities are foundational for protocol elements (like HTTP headers) that require
case-insensitive handling.
-/
namespace Std.Http.Internal
set_option linter.all true
/--
Predicate asserting that a string is already in lowercase normal form.
-/
abbrev String.IsLowerCase (s : String) : Prop :=
s.toLower = s
namespace String.IsLowerCase
private theorem Char.toLower_toLower (c : Char) : c.toLower.toLower = c.toLower := by
unfold Char.toLower
have _hSub : ('a'.val - 'A'.val).toNat = 32 := by native_decide
have _hZ : 'Z'.val.toNat = 90 := by native_decide
have _hA : 'A'.val.toNat = 65 := by native_decide
split <;> rename_i h
· simp only [UInt32.le_iff_toNat_le] at h
split <;> rename_i h'
· simp only [UInt32.le_iff_toNat_le, UInt32.toNat_add] at h'; omega
· rfl
· rfl
/--
Proof that applying `toLower` to any string results in a string that
satisfies the `IsLowerCase` predicate.
-/
theorem lower_isLowerCase {s : String} : IsLowerCase s.toLower := by
unfold IsLowerCase String.toLower
exact String.map_idempotent Char.toLower_toLower
theorem empty_isLowerCase : IsLowerCase "" := by
native_decide
instance (x : String) : Decidable (IsLowerCase x) :=
inferInstanceAs (Decidable (x.toLower = x))
end String.IsLowerCase
/--
Returns the lowercase version of an ASCII byte.
If the byte is not an uppercase ASCII letter (A-Z), it returns the byte unchanged.
-/
@[inline]
def toLower (c : UInt8) : UInt8 :=
if c 0x41 c 0x5A then c + (0x61 - 0x41) else c
namespace ByteArray
/--
Returns `true` if the byte is not an uppercase ASCII letter.
-/
def isLower (c : UInt8) : Bool :=
¬(c 0x41 c 0x5A)
/--
Theorem proving that the result of `toLower` always satisfies the `isLower` predicate.
-/
theorem toLower_isLower {x : UInt8} : isLower (toLower x) := by
unfold isLower toLower
split <;> rename_i h
· have h₀ : 65 x.toNat := UInt8.ofNat_le_iff (by decide) |>.mp h.left
have h₁ : x.toNat 90 := UInt8.le_ofNat_iff (by decide) |>.mp h.right
have h₄ : 90 < x.toNat + 32 := by omega
have h₅ : x.toNat + 32 < 256 := by omega
simp
exact Or.inr (UInt8.lt_ofNat_iff h₅ |>.mpr h₄)
· simp [h]
/--
Predicate asserting that all bytes in a `ByteArray` satisfy `isLower`.
-/
abbrev IsLowerCase (s : ByteArray) : Prop :=
s.data.all isLower
theorem IsLowerCase.empty : IsLowerCase .empty := by
native_decide
theorem IsLowerCase.push {bs : ByteArray} (h : IsLowerCase bs) (h₁ : isLower c) :
IsLowerCase (bs.push c) := by
simpa [IsLowerCase, ByteArray.push, Array.all_push, And.intro h h₁]
/--
Transforms a `ByteArray` into a lowercase version, returning a `Subtype`
containing the new array and a proof that it satisfies `IsLowerCase`.
-/
def IsLowerCase.toLowerCase (x : ByteArray) : { s : ByteArray // IsLowerCase s } :=
x.foldl (fun b, p c => b.push (toLower c), push p (toLower_isLower)) ByteArray.empty, IsLowerCase.empty
end ByteArray
end Std.Http.Internal

View File

@@ -0,0 +1,215 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Data.HashMap
public section
/-!
# MultiMap
This module defines a generic `MultiMap` type that maps keys to multiple values.
The implementation is optimized for fast lookups and insertions while ensuring
that each key always has at least one associated value.
-/
namespace Std
open Std Internal
set_option linter.all true
/--
A structure for managing key-value pairs where each key can have multiple values.
Invariant: each key must have at least one value.
-/
structure MultiMap (α : Type u) (β : Type v) [BEq α] [Hashable α] where
/--
The internal hashmap that stores all the data.
Each key maps to a non-empty array of values.
-/
data : HashMap α { arr : Array β // arr.size > 0 }
deriving Inhabited, Repr
namespace MultiMap
variable {α : Type u} {β : Type v} [BEq α] [Hashable α]
instance : Membership α (MultiMap α β) where
mem map key := key map.data
instance (key : α) (map : MultiMap α β) : Decidable (key map) :=
inferInstanceAs (Decidable (key map.data))
/--
Retrieves the first value for the given key.
-/
@[inline]
def get (map : MultiMap α β) (key : α) (h : key map) : β :=
let arr := map.data.get key h
arr.val[0]'(arr.property)
/--
Retrieves all values for the given key.
-/
@[inline]
def getAll (map : MultiMap α β) (key : α) (h : key map) : Array β :=
map.data.get key h
/--
Retrieves all values for the given key.
Returns `none` if the key is absent.
-/
@[inline]
def getAll? (map : MultiMap α β) (key : α) : Option (Array β) :=
if h : key map then
some (map.getAll key h)
else
none
/--
Retrieves the first value for the given key.
Returns `none` if the key is absent.
-/
@[inline]
def get? (map : MultiMap α β) (key : α) : Option β :=
if h : key map then
some (map.get key h)
else
none
/--
Checks if the key-value pair is present in the map.
-/
@[inline]
def hasEntry (map : MultiMap α β) [BEq β] (key : α) (value : β) : Bool :=
map.data.get? key
|>.bind (fun x => x.val.find? (· == value))
|>.isSome
/--
Retrieves the last value for the given key.
Returns `none` if the key is absent.
-/
@[inline]
def getLast? (map : MultiMap α β) (key : α) : Option β :=
map.data.get? key
|>.bind (fun x => x.val[x.val.size - 1]?)
/--
Like `get?`, but returns a default value if absent.
-/
@[inline]
def getD (map : MultiMap α β) (key : α) (d : β) : β :=
map.get? key |>.getD d
/--
Like `get?`, but panics if absent.
-/
@[inline]
def get! [Inhabited β] (map : MultiMap α β) (key : α) : β :=
map.get? key |>.get!
/--
Inserts a new key-value pair into the map.
If the key already exists, appends the value to existing values.
-/
@[inline]
def insert (map : MultiMap α β) (key : α) (value : β) : MultiMap α β :=
if let some existingValues := map.data.get? key then
let newArr := existingValues.val.push value
{ data := map.data.insert key newArr, by unfold newArr; simp }
else
{ data := map.data.insert key #[value], by simp }
/--
Inserts a key with an array of values.
-/
@[inline]
def insertMany (map : MultiMap α β) (key : α) (values : Array β) (p : values.size > 0) : MultiMap α β :=
if h : values.size > 0 then
if let some existingValues := map.data.get? key then
let newArr := existingValues.val ++ values
{ data := map.data.insert key newArr, by unfold newArr; simp [Array.size_append]; omega }
else
{ data := map.data.insert key values, h }
else
map
/--
Creates an empty multimap.
-/
def empty : MultiMap α β :=
{ data := }
/--
Creates a multimap from a list of key-value pairs.
-/
def ofList (pairs : List (α × β)) : MultiMap α β :=
{ data := HashMap.ofList (pairs.map (fun (k, v) => (k, #[v], by simp))) }
/--
Checks if a key exists in the map.
-/
@[inline]
def contains (map : MultiMap α β) (key : α) : Bool :=
map.data.contains key
/--
Removes a key and all its values from the map.
-/
@[inline]
def erase (map : MultiMap α β) (key : α) : MultiMap α β :=
{ data := map.data.erase key }
/--
Gets the number of keys in the map.
-/
@[inline]
def size (map : MultiMap α β) : Nat :=
map.data.size
/--
Checks if the map is empty.
-/
@[inline]
def isEmpty (map : MultiMap α β) : Bool :=
map.data.isEmpty
/--
Merges two multimaps, with the second taking precedence for duplicate keys.
-/
def merge (map1 map2 : MultiMap α β) : MultiMap α β :=
map2.data.fold (fun acc k v => acc.insertMany k v.val v.property) map1
/--
Converts the multimap to an array of key-value pairs (flattened).
-/
def toArray (map : MultiMap α β) : Array (α × β) :=
map.data.toArray.flatMap (fun (k, vs) => vs.val.map (k, ·))
/--
Converts the multimap to a list of key-value pairs (flattened).
-/
def toList (map : MultiMap α β) : List (α × β) :=
map.toArray.toList
instance : EmptyCollection (MultiMap α β) :=
MultiMap.empty
instance : Singleton (α × β) (MultiMap α β) :=
fun a, b => ( : MultiMap α β).insert a b
instance : Insert (α × β) (MultiMap α β) :=
fun a, b m => m.insert a b
instance : Union (MultiMap α β) :=
merge
end MultiMap
end Std

View File

@@ -0,0 +1,652 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Reader
public import Std.Internal.Http.Protocol.H1.Writer
public import Std.Internal.Http.Protocol.H1.Event
public section
/-!
# HTTP/1.1 Protocol State Machine
This module implements the core HTTP/1.1 protocol state machine that handles
parsing requests/responses and generating output. The machine is direction-aware,
supporting both server mode (receiving requests) and client mode (receiving responses).
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Std Internal Parsec ByteArray
open Internal
/--
Results from a single step of the state machine.
-/
structure StepResult (dir : Direction) where
/--
Events that occurred during this step (e.g., headers received, data available, errors).
-/
events : Array (Event dir) := #[]
/--
Output data ready to be sent to the socket.
-/
output : ChunkedBuffer := .empty
/--
The HTTP 1.1 protocol state machine.
-/
structure Machine (dir : Direction) where
/--
The state of the reader.
-/
reader : Reader dir := {}
/--
The state of the writer.
-/
writer : Writer dir := {}
/--
The configuration.
-/
config : Config
/--
Events that happened during reading and writing.
-/
events : Array (Event dir) := #[]
/--
Error thrown by the machine.
-/
error : Option Error := none
/--
The timestamp for the `Date` header.
-/
instant : Option (Std.Time.DateTime .UTC) := none
/--
If the connection will be kept alive after the message.
-/
keepAlive : Bool := config.enableKeepAlive
/--
Whether a forced flush has been requested by the user.
-/
forcedFlush : Bool := false
/--
Host header.
-/
host : Option Header.Value := none
namespace Machine
@[inline]
private def modifyWriter (machine : Machine dir) (fn : Writer dir Writer dir) : Machine dir :=
{ machine with writer := fn machine.writer }
@[inline]
private def modifyReader (machine : Machine dir) (fn : Reader dir Reader dir) : Machine dir :=
{ machine with reader := fn machine.reader }
@[inline]
private def setReaderState (machine : Machine dir) (state : Reader.State dir) : Machine dir :=
machine.modifyReader ({ · with state })
@[inline]
private def setWriterState (machine : Machine dir) (state : Writer.State) : Machine dir :=
machine.modifyWriter ({ · with state })
@[inline]
private def addEvent (machine : Machine dir) (event : Event dir) : Machine dir :=
{ machine with events := machine.events.push event }
@[inline]
private def setEvent (machine : Machine dir) (event : Option (Event dir)) : Machine dir :=
match event with
| some event => machine.addEvent event
| none => machine
@[inline]
private def setError (machine : Machine dir) (error : Error) : Machine dir :=
{ machine with error := some error }
@[inline]
private def disableKeepAlive (machine : Machine dir) : Machine dir :=
{ machine with keepAlive := false }
@[inline]
private def setFailure (machine : Machine dir) (error : H1.Error) : Machine dir :=
machine
|>.addEvent (.failed error)
|>.setReaderState (.failed error)
|>.setError error
@[inline]
private def updateKeepAlive (machine : Machine dir) (should : Bool) : Machine dir :=
{ machine with keepAlive := machine.keepAlive should }
-- Helper Functions
private def isChunked (headers : Headers) : Option Bool :=
if let some res := headers.get? Header.Name.transferEncoding then
let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower)
if encodings.isEmpty then
none
else
let chunkedCount := encodings.filter (· == "chunked") |>.size
let lastIsChunked := encodings.back? == some "chunked"
if chunkedCount > 1 then
none
else if chunkedCount = 1 ¬lastIsChunked then
none
else
some lastIsChunked
else
some false
private def extractBodyLengthFromHeaders (headers : Headers) : Option Body.Length :=
match (headers.get? Header.Name.contentLength, isChunked headers) with
| (some cl, some false) => cl.value.toNat? >>= (some Body.Length.fixed)
| (_, some true) => some Body.Length.chunked
| _ => none
private def checkMessageHead (message : Message.Head dir) : Option Body.Length := do
match dir with
| .receiving => guard (message.headers.get? Header.Name.host |>.isSome)
| .sending => pure ()
if let .receiving := dir then
if message.method == .head message.method == .connect then
return .fixed 0
message.getSize
-- State Checks
/--
Returns `true` if the reader is in a failed state.
-/
@[inline]
def failed (machine : Machine dir) : Bool :=
match machine.reader.state with
| .failed _ => true
| _ => false
/--
Returns `true` if the reader has completed successfully.
-/
@[inline]
def isReaderComplete (machine : Machine dir) : Bool :=
match machine.reader.state with
| .complete => true
| _ => false
/--
Returns `true` if the reader is closed.
-/
@[inline]
def isReaderClosed (machine : Machine dir) : Bool :=
match machine.reader.state with
| .closed => true
| _ => false
/--
Returns `true` if the machine should flush buffered output.
-/
@[inline]
def shouldFlush (machine : Machine dir) : Bool :=
machine.failed
machine.reader.state == .closed
machine.writer.isReadyToSend
machine.writer.knownSize.isSome
/--
Returns `true` if the writer is waiting for headers of a new message.
-/
@[inline]
def isWaitingMessage (machine : Machine dir) : Bool :=
machine.writer.state == .waitingHeaders
¬machine.writer.sentMessage
/--
Returns `true` if both reader and writer are closed and no output remains.
-/
@[inline]
def halted (machine : Machine dir) : Bool :=
match machine.reader.state, machine.writer.state with
| .closed, .closed => machine.writer.outputData.isEmpty
| _, _ => false
private def parseWith (machine : Machine dir) (parser : Parser α) (limit : Option Nat)
(expect : Option Nat := none) : Machine dir × Option α :=
let remaining := machine.reader.input.remainingBytes
match parser machine.reader.input with
| .success buffer result =>
({ machine with reader := machine.reader.setInput buffer }, some result)
| .error it .eof =>
let usedBytesUntilFailure := remaining - it.remainingBytes
if machine.reader.noMoreInput then
(machine.setFailure .connectionClosed, none)
else if let some limit := limit then
if usedBytesUntilFailure limit
then (machine.setFailure .badMessage, none)
else (machine.addEvent (.needMoreData expect), none)
else
(machine.addEvent (.needMoreData expect), none)
| .error _ _ =>
(machine.setFailure .badMessage, none)
-- Message Processing
private def resetForNextMessage (machine : Machine ty) : Machine ty :=
if machine.keepAlive then
{ machine with
reader := {
state := .needStartLine,
input := machine.reader.input,
messageHead := {},
messageCount := machine.reader.messageCount + 1
},
writer := {
userData := .empty,
outputData := machine.writer.outputData,
state := .pending,
knownSize := none,
messageHead := {},
userClosedBody := false,
sentMessage := false
},
events := machine.events.push .next,
error := none
}
else
machine.addEvent .close
|>.setWriterState .closed
|>.setReaderState .closed
/-
This function processes the message we are receiving
-/
private def processHeaders (machine : Machine dir) : Machine dir :=
let machine := machine.updateKeepAlive (machine.reader.messageCount + 1 < machine.config.maxMessages)
let shouldKeepAlive : Bool := machine.reader.messageHead.shouldKeepAlive
let machine := updateKeepAlive machine shouldKeepAlive
match checkMessageHead machine.reader.messageHead with
| none => machine.setFailure .badMessage
| some size =>
let size := match size with
| .fixed n => .needFixedBody n
| .chunked => .needChunkedSize
let machine := machine.addEvent (.endHeaders machine.reader.messageHead)
machine.setReaderState size
|>.setWriterState .waitingHeaders
|>.addEvent .needAnswer
/--
This processes the message we are sending.
-/
def setHeaders (messageHead : Message.Head dir.swap) (machine : Machine dir) : Machine dir :=
let machine := machine.updateKeepAlive (machine.reader.messageCount + 1 < machine.config.maxMessages)
let shouldKeepAlive := messageHead.shouldKeepAlive
let machine := machine.updateKeepAlive shouldKeepAlive
let size := Writer.determineTransferMode machine.writer
let headers :=
if messageHead.headers.contains Header.Name.host then
messageHead.headers
else if let some host := machine.host then
messageHead.headers.insert Header.Name.host host
else
messageHead.headers
-- Add identity header based on direction
let headers :=
let identityOpt := machine.config.identityHeader
match dir, identityOpt with
| .receiving, some server => headers.insert Header.Name.server server
| .sending, some userAgent => headers.insert Header.Name.userAgent userAgent
| _, none => headers
-- Add Connection: close if needed
let headers :=
if !machine.keepAlive !headers.hasEntry Header.Name.connection Header.Value.close then
headers.insert Header.Name.connection Header.Value.close
else
headers
-- Add Content-Length or Transfer-Encoding if needed
let headers :=
if !(headers.contains Header.Name.contentLength headers.contains Header.Name.transferEncoding) then
match size with
| .fixed n => headers.insert Header.Name.contentLength (.ofString! <| toString n)
| .chunked => headers.insert Header.Name.transferEncoding Header.Value.chunked
else
headers
let state := Writer.State.writingBody size
let messageHead :=
match dir, messageHead with
| .receiving, messageHead => toString { messageHead with headers }
| .sending, messageHead => toString { messageHead with headers }
machine.modifyWriter (fun writer => {
writer with
outputData := writer.outputData.append messageHead.toUTF8,
state
})
/--Put some data inside the input of the machine. -/
@[inline]
def feed (machine : Machine ty) (data : ByteArray) : Machine ty :=
if machine.isReaderClosed then
machine
else
{ machine with reader := machine.reader.feed data }
/--Signal that reader is not going to receive any more messages. -/
@[inline]
def closeReader (machine : Machine dir) : Machine dir :=
machine.modifyReader ({ · with noMoreInput := true })
/--Signal that the writer cannot send more messages because the socket closed. -/
@[inline]
def closeWriter (machine : Machine dir) : Machine dir :=
machine.modifyWriter ({ · with state := .closed, userClosedBody := true })
/--Signal that the user is not sending data anymore. -/
@[inline]
def userClosedBody (machine : Machine dir) : Machine dir :=
machine.modifyWriter ({ · with userClosedBody := true })
/--Signal that the socket is not sending data anymore. -/
@[inline]
def noMoreInput (machine : Machine dir) : Machine dir :=
machine.modifyReader ({ · with noMoreInput := true })
/--Set a known size for the message body. -/
@[inline]
def setKnownSize (machine : Machine dir) (size : Body.Length) : Machine dir :=
machine.modifyWriter (fun w => { w with knownSize := w.knownSize.or (some size) })
/--Send the head of a message to the machine. -/
@[inline]
def send (machine : Machine dir) (message : Message.Head dir.swap) : Machine dir :=
if machine.isWaitingMessage then
let machine := machine.modifyWriter ({ · with messageHead := message, sentMessage := true })
let machine :=
if machine.writer.knownSize.isNone then
match extractBodyLengthFromHeaders message.headers with
| some size => machine.setKnownSize size
| none => machine
else
machine
machine.setWriterState .waitingForFlush
else
machine
/--Send data to the socket. -/
@[inline]
def sendData (machine : Machine dir) (data : Array Chunk) : Machine dir :=
if data.isEmpty then
machine
else
machine.modifyWriter (fun writer => { writer with userData := writer.userData ++ data })
/--Get all the events of the machine. -/
@[inline]
def takeEvents (machine : Machine dir) : Machine dir × Array (Event dir) :=
({ machine with events := #[] }, machine.events)
/--Take all the accumulated output to send to the socket. -/
@[inline]
def takeOutput (machine : Machine dir) : Machine dir × ChunkedBuffer :=
let output := machine.writer.outputData
({ machine with writer := { machine.writer with outputData := .empty } }, output)
/--Process the writer part of the machine. -/
partial def processWrite (machine : Machine dir) : Machine dir :=
match machine.writer.state with
| .pending =>
if machine.reader.isClosed then
machine.closeWriter
else
machine
| .waitingHeaders =>
machine.addEvent .needAnswer
| .waitingForFlush =>
if machine.shouldFlush then
machine.setHeaders machine.writer.messageHead
|> processWrite
else
machine
| .writingHeaders =>
machine.setWriterState (.writingBody (Writer.determineTransferMode machine.writer))
|> processWrite
| .writingBody (.fixed n) =>
if machine.writer.userData.size > 0 machine.writer.isReadyToSend then
let (writer, remaining) := Writer.writeFixedBody machine.writer n
let machine := { machine with writer }
if machine.writer.isReadyToSend remaining = 0 then
machine.setWriterState .complete |> processWrite
else
machine.setWriterState (.writingBody (.fixed remaining))
else
machine
| .writingBody .chunked =>
if machine.writer.userClosedBody then
machine.modifyWriter Writer.writeFinalChunk
|>.setWriterState .complete
|> processWrite
else if machine.writer.userData.size > 0 machine.writer.isReadyToSend then
machine.modifyWriter Writer.writeChunkedBody
|> processWrite
else
machine
| .shuttingDown =>
if machine.writer.outputData.isEmpty then
machine.setWriterState .complete |> processWrite
else
machine
| .complete =>
if machine.isReaderComplete then
if machine.keepAlive then
resetForNextMessage machine
else
machine.setWriterState .closed
|>.addEvent .close
else if machine.isReaderClosed then
machine.setWriterState .closed
|>.addEvent .close
else
if machine.keepAlive then
machine
else
machine.setWriterState .closed
| .closed =>
machine
/--Handle the failed state for the reader. -/
private def handleReaderFailed (machine : Machine dir) (error : H1.Error) : Machine dir :=
let machine : Machine dir :=
match dir with
| .receiving => machine
|>.setWriterState .waitingHeaders
|>.disableKeepAlive
|>.send { status := .badRequest } |>.userClosedBody
| .sending => machine
machine
|>.setReaderState .closed
|>.addEvent (.failed error)
|>.setError error
/--Process the reader part of the machine. -/
partial def processRead (machine : Machine dir) : Machine dir :=
match machine.reader.state with
| .needStartLine =>
if machine.reader.noMoreInput machine.reader.input.atEnd then
machine.setReaderState .closed
else if machine.reader.input.atEnd then
machine.addEvent (.needMoreData none)
else
let (machine, result) : Machine dir × Option (Message.Head dir) :=
match dir with
| .receiving => parseWith machine (parseRequestLine machine.config) (limit := some 8192)
| .sending => parseWith machine (parseStatusLine machine.config) (limit := some 8192)
if let some head := result then
if head.version != .v11 then
machine.setFailure .unsupportedVersion
else
machine
|>.modifyReader (.setMessageHead head)
|>.setReaderState (.needHeader 0)
|> processRead
else
machine
| .needHeader headerCount =>
let (machine, result) := parseWith machine
(parseSingleHeader machine.config) (limit := none)
if headerCount > machine.config.maxHeaders then
machine |>.setFailure .badMessage
else
if let some result := result then
if let some (name, value) := result then
if let some (name, headerValue) := Prod.mk <$> Header.Name.ofString? name <*> Header.Value.ofString? value then
machine
|>.modifyReader (.addHeader name headerValue)
|>.setReaderState (.needHeader (headerCount + 1))
|> processRead
else
machine.setFailure .badMessage
else
processHeaders machine
|> processRead
else
machine
| .needChunkedSize =>
let (machine, result) := parseWith machine (parseChunkSize machine.config) (limit := some 128)
match result with
| some (size, ext) =>
machine
|>.setReaderState (.needChunkedBody ext size)
|> processRead
| none =>
machine
| .needChunkedBody ext 0 =>
let (machine, result) := parseWith machine (parseLastChunkBody machine.config) (limit := some 2)
match result with
| some _ =>
machine
|>.setReaderState .complete
|>.addEvent (.gotData true ext .empty)
|> processRead
| none =>
machine
| .needChunkedBody ext size =>
let (machine, result) := parseWith machine
(parseChunkedSizedData size) (limit := none) (some size)
if let some body := result then
match body with
| .complete body =>
machine
|>.setReaderState .needChunkedSize
|>.addEvent (.gotData false ext body)
|> processRead
| .incomplete body remaining =>
machine
|>.setReaderState (.needChunkedBody ext remaining)
|>.addEvent (.gotData false ext body)
else
machine
| .needFixedBody 0 =>
machine
|>.setReaderState .complete
|>.addEvent (.gotData true #[] .empty)
|> processRead
| .needFixedBody size =>
let (machine, result) := parseWith machine (parseFixedSizeData size) (limit := none) (some size)
if let some body := result then
match body with
| .complete body =>
machine
|>.setReaderState .complete
|>.addEvent (.gotData true #[] body)
|> processRead
| .incomplete body remaining =>
machine
|>.setReaderState (.needFixedBody remaining)
|>.addEvent (.gotData false #[] body)
else
machine
| .complete =>
if (machine.reader.noMoreInput machine.reader.input.atEnd) ¬machine.keepAlive then
machine.setReaderState .closed
else
machine
| .closed =>
machine
| .failed error =>
handleReaderFailed machine error
/--
Execute one step of the state machine.
-/
def step (machine : Machine dir) : Machine dir × StepResult dir :=
let machine := machine.processRead.processWrite
let (machine, events) := machine.takeEvents
let (machine, output) := machine.takeOutput
(machine, { events, output })
end Std.Http.Protocol.H1.Machine

View File

@@ -0,0 +1,95 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public section
/-!
# HTTP/1.1 Configuration
This module defines the configuration options for HTTP/1.1 protocol processing,
including connection limits, header constraints, and various size limits.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Std Internal Parsec ByteArray
open Internal
/--
Connection limits configuration with validation.
-/
structure Config where
/--
Maximum number of messages per connection.
-/
maxMessages : Nat := 100
/--
Maximum number of headers allowed per message.
-/
maxHeaders : Nat := 100
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The server name (for sending responses) or user agent (for sending requests)
-/
identityHeader : Option Header.Value := some (.new "LeanServer")
/--
Maximum length of HTTP method token (default: 16 bytes)
-/
maxMethodLength : Nat := 16
/--
Maximum length of request URI (default: 8192 bytes)
-/
maxUriLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes)
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes)
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 256)
-/
maxSpaceSequence : Nat := 256
/--
Maximum length of chunk extension name (default: 256 bytes)
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes)
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum length of reason phrase (default: 512 bytes)
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 100)
-/
maxTrailerHeaders : Nat := 100

View File

@@ -0,0 +1,98 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public section
/-!
# HTTP/1.1 Errors
This module defines the error types for HTTP/1.1 protocol processing,
including parsing errors, timeout errors, and connection errors.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Specific HTTP processing errors with detailed information.
-/
inductive Error
/--
Malformed request line or status line.
-/
| invalidStatusLine
/--
Invalid or malformed header.
-/
| invalidHeader
/--
Request timeout occurred.
-/
| timeout
/--
Request entity too large.
-/
| entityTooLarge
/--
Unsupported HTTP method.
-/
| unsupportedMethod
/--
Unsupported HTTP version.
-/
| unsupportedVersion
/--
Invalid chunk encoding.
-/
| invalidChunk
/--
Connection Closed
-/
| connectionClosed
/--
Bad request/response
-/
| badMessage
/--
Generic error with message.
-/
| other (message : String)
deriving Repr, BEq
instance : ToString Error where
toString
| .invalidStatusLine => "Invalid status line"
| .invalidHeader => "Invalid header"
| .timeout => "Timeout"
| .entityTooLarge => "Entity too large"
| .unsupportedMethod => "Unsupported method"
| .unsupportedVersion => "Unsupported version"
| .invalidChunk => "Invalid chunk"
| .connectionClosed => "Connection closed"
| .badMessage => "Bad message"
| .other msg => s!"Other error: {msg}"
instance : Repr ByteSlice where
reprPrec x := reprPrec x.toByteArray.data

View File

@@ -0,0 +1,73 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Events
This module defines the events that can occur during HTTP/1.1 message processing,
including header completion, data arrival, and error conditions.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Events emitted during HTTP message processing.
-/
inductive Event (dir : Direction)
/--
Indicates that all headers have been successfully parsed.
-/
| endHeaders (head : Message.Head dir)
/--
Carries a chunk of message body data.
-/
| gotData (final : Bool) (ext : Array (String × Option String)) (data : ByteSlice)
/--
Signals that additional input data is required to continue processing.
-/
| needMoreData (size : Option Nat)
/--
Indicates a failure during parsing or processing.
-/
| failed (err : Error)
/--
Requests that the connection be closed.
-/
| close
/--
Indicates that a response is required.
-/
| needAnswer
/--
Indicates that a message body is required.
-/
| needBody
/--
Indicates readiness to process the next message.
-/
| next
deriving Inhabited, Repr

View File

@@ -0,0 +1,129 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data
public section
/-!
# Message
This module provides types and operations for HTTP/1.1 messages, centered around the `Direction`
type which models the server's role in message exchange: `Direction.receiving` for parsing incoming
requests from clients, and `Direction.sending` for generating outgoing responses to clients.
The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or
`Response.Head` accordingly, enabling generic code that works uniformly across both phases
while exposing common operations such as headers, version, and `shouldKeepAlive`
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Direction of message flow from the server's perspective.
-/
inductive Direction
/--
Receiving and parsing incoming requests from clients.
-/
| receiving
/--
Generating and sending outgoing responses to clients.
-/
| sending
deriving BEq
/--
Inverts the direction of the requests.
-/
@[expose]
abbrev Direction.swap : Direction Direction
| .receiving => .sending
| .sending => .receiving
/--
Gets the message head type based on direction.
-/
@[expose]
def Message.Head : Direction Type
| .receiving => Request.Head
| .sending => Response.Head
/--
Gets the headers of a `Message`.
-/
def Message.Head.headers (m : Message.Head dir) : Headers :=
match dir with
| .receiving => Request.Head.headers m
| .sending => Response.Head.headers m
/--
Gets the version of a `Message`.
-/
def Message.Head.version (m : Message.Head dir) : Version :=
match dir with
| .receiving => Request.Head.version m
| .sending => Response.Head.version m
/--
Checks whether the message indicates that the connection should be kept alive.
-/
@[inline]
def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool :=
¬message.headers.hasEntry (.new "connection") (.new "close")
message.version = .v11
instance : Repr (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Repr Request.Head)
| .sending => inferInstanceAs (Repr Response.Head)
instance : ToString (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (ToString Request.Head)
| .sending => inferInstanceAs (ToString Response.Head)
instance : EmptyCollection (Message.Head dir) where
emptyCollection :=
match dir with
| .receiving => {}
| .sending => {}
private def isChunked (message : Message.Head dir) : Option Bool :=
let headers := message.headers
if let some res := headers.get? (.new "transfer-encoding") then
let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower)
if encodings.isEmpty then
none
else
let chunkedCount := encodings.filter (· == "chunked") |>.size
let lastIsChunked := encodings.back? == some "chunked"
if chunkedCount > 1 then
none
else if chunkedCount = 1 ¬lastIsChunked then
none
else
some lastIsChunked
else
some false
/--
Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag.
-/
@[inline]
def Message.Head.getSize (message : Message.Head dir) : Option Body.Length :=
match (message.headers.getAll? (.new "content-length"), isChunked message) with
| (some #[cl], some false) => .fixed <$> cl.value.toNat?
| (none, some false) => some (.fixed 0)
| (none, some true) => some .chunked
| (some _, some _) => none -- To avoid request smuggling with multiple content-length headers.
| (_, none) => none -- Error validating the chunked encoding

View File

@@ -0,0 +1,314 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Parsec
public import Std.Internal.Http.Data
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Protocol.H1.Config
/-!
This module defines a parser for HTTP/1.1 requests. The reference used is https://httpwg.org/specs/rfc9112.html.
-/
namespace Std.Http.Protocol.H1
open Std Internal Parsec ByteArray Internal
set_option linter.all true
@[inline]
def isDigit (c : UInt8) : Bool :=
c '0'.toUInt8 c '9'.toUInt8
@[inline]
def isAlpha (c : UInt8) : Bool :=
(c 'a'.toUInt8 c 'z'.toUInt8) (c 'A'.toUInt8 c 'Z'.toUInt8)
@[inline]
def isVChar (c : UInt8) : Bool :=
c 0x21 c 0x7E
def isTokenCharacter (c : UInt8) : Bool :=
isDigit c isAlpha c c == '!'.toUInt8 c == '#'.toUInt8 c == '$'.toUInt8 c == '%'.toUInt8
c == '&'.toUInt8 c == '\''.toUInt8 c == '*'.toUInt8 c == '+'.toUInt8 c == '-'.toUInt8
c == '.'.toUInt8 c == '^'.toUInt8 c == '_'.toUInt8 c == '`'.toUInt8 c == '|'.toUInt8
c == '~'.toUInt8
@[inline]
def isObsChar (c : UInt8) : Bool :=
c 0x80 c 0xFF
@[inline]
def isFieldVChar (c : UInt8) : Bool :=
isVChar c isObsChar c c = ' '.toUInt8 c = '\t'.toUInt8
-- HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
@[inline]
def isQdText (c : UInt8) : Bool :=
c == '\t'.toUInt8
c == ' '.toUInt8
c == '!'.toUInt8
(c '#'.toUInt8 c '['.toUInt8)
(c ']'.toUInt8 c '~'.toUInt8)
isObsChar c
-- Parser blocks
def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do
let items many (attempt <| parser.bind (fun item => match item with
| some x => return x
| none => fail "end of items"))
if items.size > maxCount then
fail s!"Too many items: {items.size} > {maxCount}"
return items
def opt (x : Option α) : Parser α :=
if let some res := x then
return res
else
fail "expected value but got none"
@[inline]
def token (limit : Nat) : Parser ByteSlice :=
takeWhileUpTo1 isTokenCharacter limit
@[inline]
def crlf : Parser Unit := do
discard <| optional (skipByte '\r'.toUInt8)
skipByte '\n'.toUInt8
@[inline]
def rsp (limits : H1.Config) : Parser Unit :=
discard <| takeWhileUpTo1 (· == ' '.toUInt8) limits.maxSpaceSequence
@[inline]
def osp (limits : H1.Config) : Parser Unit :=
discard <| takeWhileUpTo (· == ' '.toUInt8) limits.maxSpaceSequence
@[inline]
def uint8 : Parser UInt8 := do
let d digit
return d.toUInt8
def hexDigit : Parser UInt8 := do
let b any
if b '0'.toUInt8 && b '9'.toUInt8 then return b - '0'.toUInt8
else if b 'A'.toUInt8 && b 'F'.toUInt8 then return b - 'A'.toUInt8 + 10
else if b 'a'.toUInt8 && b 'f'.toUInt8 then return b - 'a'.toUInt8 + 10
else fail s!"Invalid hex digit {Char.ofUInt8 b |>.quote}"
@[inline]
def hex : Parser Nat := do
let hexDigits many1 (attempt hexDigit)
return (hexDigits.foldl (fun acc cur => acc * 16 + cur.toNat) 0)
-- Actual parsers
-- HTTP-version = HTTP-name "/" DIGIT "." DIGIT
-- HTTP-name = %s"HTTP"
def parseHttpVersion : Parser Version := do
skipBytes "HTTP/".toUTF8
let major uint8
skipByte '.'.toUInt8
let minor uint8
opt <| Version.ofNumber? (major - 48 |>.toNat) (minor - 48 |>.toNat)
-- method = token
def parseMethod : Parser Method :=
(skipBytes "GET".toUTF8 <&> fun _ => Method.get)
<|> (skipBytes "HEAD".toUTF8 <&> fun _ => Method.head)
<|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post)
<|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put)
<|> (skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete)
<|> (skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect)
<|> (skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options)
<|> (skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace)
<|> (skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch)
def parseURI (limits : H1.Config) : Parser ByteArray := do
let uri takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength
return uri.toByteArray
/--
Parses a request line
request-line = method SP request-target SP HTTP-version
-/
public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do
let method parseMethod <* rsp limits
let uri parseURI limits <* rsp limits
let uri match (Std.Http.URI.Parser.parseRequestTarget <* eof).run uri with
| .ok res => pure res
| .error res => fail res
let version parseHttpVersion <* crlf
return method, version, uri, .empty
-- field-line = field-name ":" OWS field-value OWS
def parseFieldLine (limits : H1.Config) : Parser (String × String) := do
let name token limits.maxHeaderNameLength
let value skipByte ':'.toUInt8 *> osp limits *> takeWhileUpTo1 isFieldVChar limits.maxHeaderValueLength <* osp limits
let name opt <| String.fromUTF8? name.toByteArray
let value opt <| String.fromUTF8? value.toByteArray
return (name, value)
/--
Parses a single header.
field-line CRLF / CRLF
-/
public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do
let next peek?
if next == some '\r'.toUInt8 next == some '\n'.toUInt8 then
crlf
pure none
else
some <$> (parseFieldLine limits <* crlf)
-- quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
def parseQuotedPair : Parser UInt8 := do
skipByte '\\'.toUInt8
let b any
if b == '\t'.toUInt8 b == ' '.toUInt8 isVChar b isObsChar b then
return b
else
fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}"
-- quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
partial def parseQuotedString : Parser String := do
skipByte '"'.toUInt8
let rec loop (buf : ByteArray) : Parser ByteArray := do
let b ← any
if b == '"'.toUInt8 then
return buf
else if b == '\\'.toUInt8 then
let next any
if next == '\t'.toUInt8 next == ' '.toUInt8 isVChar next isObsChar next
then loop (buf.push next)
else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}"
else if isQdText b then
loop (buf.push b)
else
fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}"
opt <| String.fromUTF8? ( loop .empty)
-- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] )
def parseChunkExt (limits : H1.Config) : Parser (String × Option String) := do
osp limits *> skipByte ';'.toUInt8 *> osp limits
let name (opt =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtNameLength) <* osp limits
if ( peekWhen? (· == '='.toUInt8)) |>.isSome then
osp limits *> skipByte '='.toUInt8 *> osp limits
let value osp limits *> (parseQuotedString <|> opt =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtValueLength))
return (name, some value)
return (name, none)
/--
This function parses the size and extension of a chunk
-/
public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (String × Option String)) := do
let size hex
let ext many (parseChunkExt limits)
crlf
return (size, ext)
/--
Result of parsing partial or complete information.
-/
public inductive TakeResult
| complete (data : ByteSlice)
| incomplete (data : ByteSlice) (remaining : Nat)
/--
This function parses a single chunk in chunked transfer encoding
-/
public def parseChunk (limits : H1.Config) : Parser (Option (Nat × Array (String × Option String) × ByteSlice)) := do
let (size, ext) parseChunkSize limits
if size == 0 then
return none
else
let data take size
return some size, ext, data
/--
Parses a fixed size data that can be incomplete.
-/
public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it =>
if it.remainingBytes = 0 then
.error it .eof
else if it.remainingBytes < size then
.success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes))
else
.success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)]))
/--
Parses a fixed size data that can be incomplete.
-/
public def parseChunkedSizedData (size : Nat) : Parser TakeResult := do
match parseFixedSizeData size with
| .complete data => crlf *> return .complete data
| .incomplete data res => return .incomplete data res
/--
This function parses a trailer header (used after chunked body)
-/
def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := parseSingleHeader limits
/--
This function parses trailer headers after chunked body
-/
public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do
let trailers manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
return trailers
/--
Parses HTTP status code (3 digits)
-/
def parseStatusCode : Parser Status := do
let d1 digit
let d2 digit
let d3 digit
let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48)
return Status.ofCode code.toUInt16
/--
Parses reason phrase (text after status code)
-/
def parseReasonPhrase (limits : H1.Config) : Parser String := do
let bytes takeWhileUpTo (fun c => c != '\r'.toUInt8) limits.maxReasonPhraseLength
opt <| String.fromUTF8? bytes.toByteArray
/--
Parses a status line
status-line = HTTP-version SP status-code SP [ reason-phrase ]
-/
public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do
let version parseHttpVersion <* rsp limits
let status parseStatusCode <* rsp limits
discard <| parseReasonPhrase limits <* crlf
return status, version, .empty
/--
This function parses the body of the last chunk.
-/
public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do
discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
end Std.Http.Protocol.H1

View File

@@ -0,0 +1,269 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Reader
This module defines the reader state machine for parsing incoming HTTP/1.1 messages.
It tracks the parsing state including start line, headers, and body handling for
both fixed-length and chunked transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
The state of the `Reader` state machine.
-/
inductive Reader.State (dir : Direction) : Type
/--
Initial state waiting for HTTP start line.
-/
| needStartLine : State dir
/--
State waiting for HTTP headers, tracking number of headers parsed.
-/
| needHeader : Nat State dir
/--
State waiting for chunk size in chunked transfer encoding.
-/
| needChunkedSize : State dir
/--
State waiting for chunk body data of specified size.
-/
| needChunkedBody : Array (String × Option String) Nat State dir
/--
State waiting for fixed-length body data of specified size.
-/
| needFixedBody : Nat State dir
/--
State that it completed a single request or response and can go to the next one
-/
| complete
/--
State that it has completed and cannot process more data.
-/
| closed
/--
The input is malformed.
-/
| failed (error : Error) : State dir
deriving Inhabited, Repr, BEq
/--
Manages the reading state of the HTTP parsing and processing machine.
-/
structure Reader (dir : Direction) where
/--
The current state of the machine.
-/
state : Reader.State dir := .needStartLine
/--
The input byte array.
-/
input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter
/--
The incoming message head.
-/
messageHead : Message.Head dir := {}
/--
Count of messages that this connection already parsed
-/
messageCount : Nat := 0
/--
Flag that says that it cannot receive more input (the socket disconnected).
-/
noMoreInput : Bool := false
namespace Reader
/--
Checks if the reader is in a closed state and cannot process more messages.
-/
@[inline]
def isClosed (reader : Reader dir) : Bool :=
match reader.state with
| .closed => true
| _ => false
/--
Checks if the reader has completed parsing the current message.
-/
@[inline]
def isComplete (reader : Reader dir) : Bool :=
match reader.state with
| .complete => true
| _ => false
/--
Checks if the reader has encountered an error.
-/
@[inline]
def hasFailed (reader : Reader dir) : Bool :=
match reader.state with
| .failed _ => true
| _ => false
/--
Feeds new data into the reader's input buffer.
If the current input is exhausted, replaces it; otherwise appends.
-/
@[inline]
def feed (data : ByteArray) (reader : Reader dir) : Reader dir :=
{ reader with input :=
if reader.input.atEnd
then data.iter
else { reader.input with array := reader.input.array ++ data } }
/--
Replaces the reader's input iterator with a new one.
-/
@[inline]
def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir :=
{ reader with input }
/--
Updates the message head being constructed.
-/
@[inline]
def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir :=
{ reader with messageHead }
/--
Adds a header to the current message head.
-/
@[inline]
def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir :=
match dir with
| .sending => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
| .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
/--
Closes the reader, transitioning to the closed state.
-/
@[inline]
def close (reader : Reader dir) : Reader dir :=
{ reader with state := .closed, noMoreInput := true }
/--
Marks the current message as complete and prepares for the next message.
-/
@[inline]
def markComplete (reader : Reader dir) : Reader dir :=
{ reader with
state := .complete
messageCount := reader.messageCount + 1 }
/--
Transitions the reader to a failed state with the given error.
-/
@[inline]
def fail (error : Error) (reader : Reader dir) : Reader dir :=
{ reader with state := .failed error }
/--
Resets the reader to parse a new message on the same connection.
-/
@[inline]
def reset (reader : Reader dir) : Reader dir :=
{ reader with
state := .needStartLine
messageHead := {} }
/--
Checks if more input is needed to continue parsing.
-/
@[inline]
def needsMoreInput (reader : Reader dir) : Bool :=
reader.input.atEnd && !reader.noMoreInput &&
match reader.state with
| .complete | .closed | .failed _ => false
| _ => true
/--
Returns the current parse error if the reader has failed.
-/
@[inline]
def getError (reader : Reader dir) : Option Error :=
match reader.state with
| .failed err => some err
| _ => none
/--
Gets the number of bytes remaining in the input buffer.
-/
@[inline]
def remainingBytes (reader : Reader dir) : Nat :=
reader.input.array.size - reader.input.pos
/--
Advances the input iterator by n bytes.
-/
@[inline]
def advance (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with input := reader.input.forward n }
/--
Transitions to the state for reading headers.
-/
@[inline]
def startHeaders (reader : Reader dir) : Reader dir :=
{ reader with state := .needHeader 0 }
/--
Transitions to the state for reading a fixed-length body.
-/
@[inline]
def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir :=
{ reader with state := .needFixedBody size }
/--
Transitions to the state for reading chunked transfer encoding.
-/
@[inline]
def startChunkedBody (reader : Reader dir) : Reader dir :=
{ reader with state := .needChunkedSize }
/--
Marks that no more input will be provided (connection closed).
-/
@[inline]
def markNoMoreInput (reader : Reader dir) : Reader dir :=
{ reader with noMoreInput := true }
/--
Checks if the connection should be kept alive for the next message.
-/
def shouldKeepAlive (reader : Reader dir) : Bool :=
match reader.messageHead.headers.get? (.new "connection") with
| some val => let s := val.value.toLower; s == "keep-alive"
| none => true
end Reader

View File

@@ -0,0 +1,265 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Writer
This module defines the writer state machine for generating outgoing HTTP/1.1 messages.
It handles encoding headers and body content for both fixed-length and chunked
transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Internal
/--
The state of the `Writer` state machine.
-/
inductive Writer.State
/--
It starts writing only when part of the request is received.
-/
| pending
/--
Ready to write the message
-/
| waitingHeaders
/--
This is the state that we wait for a forced flush. This happens and causes the writer to
start actually writing to the outputData
-/
| waitingForFlush
/--
Writing the headers.
-/
| writingHeaders
/--
Writing a fixed size body output.
-/
| writingBody (mode : Body.Length)
/--
It will flush all the remaining data and cause it to shutdown the machine.
-/
| shuttingDown
/--
State that it completed a single request and can go to the next one
-/
| complete
/--
State that it has completed and cannot process more data.
-/
| closed
deriving Inhabited, Repr, BEq
/--
Manages the writing state of the HTTP generating and writing machine.
-/
structure Writer (dir : Direction) where
/--
This is all the data that the user is sending that is being accumulated.
-/
userData : Array Chunk := .empty
/--
All the data that is produced by the writer.
-/
outputData : ChunkedBuffer := .empty
/--
The state of the writer machine.
-/
state : Writer.State := .pending
/--
When the user specifies the exact size upfront, we can use Content-Length
instead of chunked transfer encoding for streaming
-/
knownSize : Option Body.Length := none
/--
The outgoing message that will be written to the output
-/
messageHead : Message.Head dir.swap := {}
/--
The user sent the message
-/
sentMessage : Bool := false
/--
This flags that the body stream is closed so if we start to write the body we know exactly the size.
-/
userClosedBody : Bool := false
namespace Writer
/--
Checks if the writer is ready to send data to the output.
-/
@[inline]
def isReadyToSend {dir} (writer : Writer dir) : Bool :=
match writer.state with
| .closed | .complete => true
| _ => writer.userClosedBody
/--
Checks if the writer is closed (cannot process more data)
-/
@[inline]
def isClosed (writer : Writer dir) : Bool :=
match writer.state with
| .closed => true
| _ => false
/--
Checks if the writer has completed processing a request
-/
@[inline]
def isComplete (writer : Writer dir) : Bool :=
match writer.state with
| .complete => true
| _ => false
/--
Checks if the writer can accept more data from the user
-/
@[inline]
def canAcceptData (writer : Writer dir) : Bool :=
match writer.state with
| .waitingHeaders => true
| .waitingForFlush => true
| .writingBody _ => !writer.userClosedBody
| _ => false
/--
Marks the body as closed, indicating no more user data will be added
-/
@[inline]
def closeBody (writer : Writer dir) : Writer dir :=
{ writer with userClosedBody := true }
/--
Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked
-/
def determineTransferMode (writer : Writer dir) : Body.Length :=
if let some mode := writer.knownSize then
mode
else if writer.userClosedBody then
let size := writer.userData.foldl (fun x y => x + y.size) 0
.fixed size
else
.chunked
/--
Adds user data chunks to the writer's buffer if the writer can accept data
-/
@[inline]
def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir :=
if writer.canAcceptData then
{ writer with userData := writer.userData ++ data }
else
writer
/--
Writes accumulated user data to output using fixed-size encoding
-/
def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat :=
if writer.userData.size = 0 then
(writer, limitSize)
else
let data := writer.userData.map Chunk.data
let (chunks, totalSize) := data.foldl (fun (acc, size) ba =>
if size >= limitSize then
(acc, size)
else
let remaining := limitSize - size
let takeSize := min ba.size remaining
let chunk := ba.extract 0 takeSize
(acc.push chunk, size + takeSize)
) (#[], 0)
let outputData := writer.outputData.append (ChunkedBuffer.mk chunks totalSize)
let remaining := limitSize - totalSize
({ writer with userData := #[], outputData }, remaining)
/--
Writes accumulated user data to output using chunked transfer encoding
-/
def writeChunkedBody (writer : Writer dir) : Writer dir :=
if writer.userData.size = 0 then
writer
else
let data := writer.userData
{ writer with userData := #[], outputData := data.foldl (Encode.encode .v11) writer.outputData }
/--
Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state
-/
def writeFinalChunk (writer : Writer dir) : Writer dir :=
let writer := writer.writeChunkedBody
{ writer with
outputData := writer.outputData.append "0\r\n\r\n".toUTF8
state := .complete
}
/--
Extracts all accumulated output data and returns it with a cleared output buffer
-/
@[inline]
def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) :=
let output := writer.outputData.toByteArray
some ({ writer with outputData := ChunkedBuffer.empty }, output)
/--
Updates the writer's state machine to a new state
-/
@[inline]
def setState (state : Writer.State) (writer : Writer dir) : Writer dir :=
{ writer with state }
/--
Writes the message headers to the output buffer
-/
private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir :=
{ writer with outputData := writer.outputData.push (toString messageHead).toUTF8 }
/--
Checks if the connection should be kept alive based on the Connection header
-/
def shouldKeepAlive (writer : Writer dir) : Bool :=
writer.messageHead.headers.get? (.new "connection")
|>.map (fun v => v.value.toLower != "close")
|>.getD true
/--
Closes the writer, transitioning to the closed state.
-/
@[inline]
def close (writer : Writer dir) : Writer dir :=
{ writer with state := .closed }
end Writer

View File

@@ -0,0 +1,150 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Async.TCP
public import Std.Sync.CancellationToken
public import Std.Internal.Http.Server.Config
public import Std.Internal.Http.Server.Connection
public section
/-!
# HTTP Server
This module defines a simple, asynchronous HTTP/1.1 server implementation.
It provides the `Std.Http.Server` structure, which encapsulates all server state, and functions for
starting, managing, and gracefully shutting down the server.
The server runs entirely using `Async` and uses a shared `CancellationContext` to signal shutdowns.
Each active client connection is tracked, and the server automatically resolves its shutdown
promise once all connections have closed.
-/
namespace Std.Http
open Std.Internal.IO.Async TCP
set_option linter.all true
/--
The `Server` structure holds all state required to manage the lifecycle of an HTTP server, including
connection tracking and shutdown coordination.
-/
structure Server where
/--
The context used for shutting down all connections and the server.
-/
context : Std.CancellationContext
/--
Active HTTP connections
-/
activeConnections : Std.Mutex UInt64
/--
Indicates when the server has successfully shutdown
-/
shutdownPromise : Std.Future Unit
/--
Configuration of the server
-/
config : Std.Http.Config
namespace Server
/--
Create a new `Server` structure with an optional configuration.
-/
def new (config : Std.Http.Config := {}) : IO Server := do
let context Std.CancellationContext.new
let activeConnections Std.Mutex.new 0
let shutdownPromise Std.Future.new
return { context, activeConnections, shutdownPromise, config }
/--
Triggers cancellation of all requests and the accept loop in the server. This function should be used
in conjunction with `waitShutdown` to properly coordinate the shutdown sequence.
-/
@[inline]
def shutdown (s : Server) : Async Unit :=
s.context.cancel .shutdown
/--
Waits for the server to shut down. Blocks until another task or async operation calls the `shutdown` function.
-/
@[inline]
def waitShutdown (s : Server) : Async Unit := do
Async.ofAsyncTask (( s.shutdownPromise.get).map Except.ok)
/--
Returns a `Selector` that waits for the server to shut down.
-/
@[inline]
def waitShutdownSelector (s : Server) : Selector Unit :=
s.shutdownPromise.selector
/--
Triggers cancellation of all requests and the accept loop, then waits for the server to fully shut down.
This is a convenience function combining `shutdown` and then `waitShutdown`.
-/
@[inline]
def shutdownAndWait (s : Server) : Async Unit := do
s.context.cancel .shutdown
s.waitShutdown
@[inline]
private def frameCancellation (s : Server) (action : ContextAsync α) : ContextAsync α := do
s.activeConnections.atomically (modify (· + 1))
let result action
s.activeConnections.atomically do
modify (· - 1)
if ( get) = 0 ( s.context.isCancelled) then
discard <| s.shutdownPromise.resolve ()
return result
/--
Start a new HTTP/1.1 server on the given socket address. This function uses `Async` to handle tasks
and TCP connections, and returns a `Server` structure that can be used to cancel the server.
-/
def serve
(addr : Net.SocketAddress)
(onRequest : Request Body ContextAsync (Response Body))
(onError : IO.Error Async Unit)
(config : Config := {}) (backlog : UInt32 := 128) : Async Server := do
let httpServer Server.new config
let server Socket.Server.mk
server.bind addr
server.listen backlog
let runServer := do
frameCancellation httpServer do
while true do
let result Selectable.one #[
.case (server.acceptSelector) (fun x => pure <| some x),
.case ( ContextAsync.doneSelector) (fun _ => pure none)
]
match result with
| some client => ContextAsync.background (frameCancellation httpServer (serveConnection client onRequest onError config))
| none => break
background (runServer httpServer.context)
return httpServer
end Std.Http.Server

View File

@@ -0,0 +1,139 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Config
This module exposes the `Config`, a structure that describes timeout, request and headers
configuration of an HTTP Server.
-/
namespace Std.Http
set_option linter.all true
/--
Connection limits configuration with validation.
-/
structure Config where
/--
Maximum number of requests per connection.
-/
maxRequests : Nat := 100
/--
Maximum number of headers allowed per request.
-/
maxHeaders : Nat := 50
/--
Maximum waiting time for more data.
-/
lingeringTimeout : Time.Millisecond.Offset := 10000
/--
Timeout for keep-alive connections
-/
keepAliveTimeout : { x : Time.Millisecond.Offset // 0 < x } := 12000, by decide
/--
Maximum time for requesting more data.
-/
requestTimeout : { x : Time.Millisecond.Offset // 0 < x } := 13000, by decide
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The maximum size that the connection can receive in a single recv call.
-/
maximumRecvSize : Nat := 8192
/--
Default buffer size for the connection
-/
defaultPayloadBytes : Nat := 8192
/--
The server name.
-/
serverName : Option Header.Value := some (.new "LeanHTTP/1.1")
/--
Maximum length of HTTP method token (default: 16 bytes)
-/
maxMethodLength : Nat := 16
/--
Maximum length of request URI (default: 8192 bytes)
-/
maxUriLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes)
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes)
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 256)
-/
maxSpaceSequence : Nat := 256
/--
Maximum length of chunk extension name (default: 256 bytes)
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes)
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum length of reason phrase (default: 512 bytes)
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 100)
-/
maxTrailerHeaders : Nat := 100
namespace Config
/--
Converts to HTTP 1.1 config
-/
def toH1Config (config : Config) : Protocol.H1.Config where
maxMessages := config.maxRequests
maxHeaders := config.maxHeaders
maxMethodLength := config.maxMethodLength
maxUriLength := config.maxUriLength
maxHeaderNameLength := config.maxHeaderNameLength
maxHeaderValueLength := config.maxHeaderValueLength
maxSpaceSequence := config.maxSpaceSequence
maxChunkExtNameLength := config.maxChunkExtNameLength
maxChunkExtValueLength := config.maxChunkExtValueLength
maxReasonPhraseLength := config.maxReasonPhraseLength
maxTrailerHeaders := config.maxTrailerHeaders
enableKeepAlive := config.enableKeepAlive
identityHeader := config.serverName
end Std.Http.Config

View File

@@ -0,0 +1,301 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.TCP
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Transport
public import Std.Internal.Http.Protocol.H1
public import Std.Internal.Http.Server.Config
public section
namespace Std
namespace Http
namespace Server
open Std Internal IO Async TCP Protocol
open Time
/-!
# Connection
This module defines `Server.Connection`, a structure used to handle a single HTTP connection with
possibly multiple requests.
-/
set_option linter.all true
/--
A single HTTP connection.
-/
public structure Connection (α : Type) where
/--
The client connection.
-/
socket : α
/--
The processing machine for HTTP 1.1
-/
machine : H1.Machine .receiving
namespace Connection
private inductive Recv
| bytes (x : Option ByteArray)
| channel (x : Option Chunk)
| response (x : (Except Error (Response Body)))
| timeout
| shutdown
| close
private def receiveWithTimeout
[Transport α]
(socket : Option α)
(expect : UInt64)
(channel : Option Body.ByteStream)
(response : Option (Std.Future (Except Error (Response Body))))
(timeoutMs : Millisecond.Offset)
(keepAliveTimeoutMs : Option Millisecond.Offset)
(connectionContext : CancellationContext) : Async Recv := do
let mut baseSelectables := #[
.case connectionContext.doneSelector (fun _ => do
let reason connectionContext.getCancellationReason
match reason with
| some .deadline => pure .timeout
| _ => pure .shutdown)
]
if let some socket := socket then
baseSelectables := baseSelectables.push (.case (Transport.recvSelector socket expect) (Recv.bytes · |> pure))
-- Timeouts are only applied if we are not expecting data from the user.
if channel.isNone response.isNone then
if let some keepAliveTimeout := keepAliveTimeoutMs then
baseSelectables := baseSelectables.push (.case ( Selector.sleep keepAliveTimeout) (fun _ => pure .timeout))
else
baseSelectables := baseSelectables.push (.case ( Selector.sleep timeoutMs) (fun _ => pure .timeout))
if let some channel := channel then
baseSelectables := baseSelectables.push (.case channel.recvSelector (Recv.channel · |> pure))
if let some response := response then
baseSelectables := baseSelectables.push (.case response.selector (Recv.response · |> pure))
Selectable.one baseSelectables
private def processNeedMoreData
[Transport α]
(config : Config)
(socket : Option α)
(expect : Option Nat)
(response : Option (Std.Future (Except Error (Response Body))))
(channel : Option Body.ByteStream)
(timeout : Millisecond.Offset)
(keepAliveTimeout : Option Millisecond.Offset)
(connectionContext : CancellationContext) : Async Recv := do
try
let expectedBytes := expect
|>.getD config.defaultPayloadBytes
|>.min config.maximumRecvSize
|>.toUInt64
receiveWithTimeout socket expectedBytes channel response timeout keepAliveTimeout connectionContext
catch _ =>
pure .close
private def handleError (machine : H1.Machine .receiving) (status : Status) (waitingResponse : Bool) : H1.Machine .receiving × Bool :=
if machine.isWaitingMessage waitingResponse then
let machine := machine.send { status, headers := .empty |>.insert (.new "connection") (.new "close") }
|>.userClosedBody
|>.closeReader
|>.noMoreInput
(machine, false)
else
(machine.closeWriter.noMoreInput, waitingResponse)
private def handle
[Transport α]
(connection : Connection α)
(config : Config)
(connectionContext : CancellationContext)
(onError : Error Async Unit)
(handler : Request Body ContextAsync (Response Body)) : Async Unit := do
let mut machine := connection.machine
let socket := connection.socket
let mut requestStream Body.ByteStream.emptyWithCapacity
let mut keepAliveTimeout := some config.keepAliveTimeout.val
let mut currentTimeout := config.keepAliveTimeout.val
let mut response Std.Future.new
let mut respStream := none
let mut requiresData := false
let mut needBody := false
let mut expectData := none
let mut waitingResponse := false
while ¬machine.halted do
let (newMachine, step) := machine.step
machine := newMachine
if step.output.size > 0 then
try Transport.sendAll socket step.output.data catch _ => break
for event in step.events do
match event with
| .needMoreData expect => do
requiresData := true
expectData := expect
| .needBody => do
needBody := true
| .needAnswer =>
pure ()
| .endHeaders head =>
waitingResponse := true
currentTimeout := config.lingeringTimeout
keepAliveTimeout := none
if let some length := head.getSize then
requestStream.setKnownSize (some length)
let newResponse := handler { head, body := (.stream requestStream) } connectionContext
let task newResponse.asTask
BaseIO.chainTask task fun x => discard <| response.resolve x
| .gotData final ext data =>
try
requestStream.writeChunk { data := data.toByteArray, extensions := ext }
if final then
requestStream.close
catch _ =>
pure ()
| .next => do
requestStream Body.ByteStream.emptyWithCapacity
response Std.Future.new
if let some res := respStream then
if ¬ ( res.isClosed) then res.close
respStream := none
keepAliveTimeout := some config.keepAliveTimeout.val
currentTimeout := config.keepAliveTimeout.val
waitingResponse := false
| .failed _ =>
pure ()
| .close =>
pure ()
if requiresData waitingResponse respStream.isSome then
let socket := some socket
let answer := if waitingResponse then some response else none
requiresData := false
needBody := false
let event processNeedMoreData config socket expectData answer respStream currentTimeout keepAliveTimeout connectionContext
match event with
| .bytes (some bs) =>
machine := machine.feed bs
| .bytes none =>
machine := machine.noMoreInput
| .channel (some chunk) =>
machine := machine.sendData #[chunk]
| .channel none =>
machine := machine.userClosedBody
if let some res := respStream then
if ¬( res.isClosed) then res.close
respStream := none
| .close =>
break
| .timeout =>
machine := machine.closeReader
let (newMachine, newWaitingResponse) := handleError machine .requestTimeout waitingResponse
machine := newMachine
waitingResponse := newWaitingResponse
| .shutdown =>
let (newMachine, newWaitingResponse) := handleError machine .serviceUnavailable waitingResponse
machine := newMachine
waitingResponse := newWaitingResponse
| .response (.error err) =>
onError err
let (newMachine, newWaitingResponse) := handleError machine .internalServerError waitingResponse
machine := newMachine
waitingResponse := newWaitingResponse
| .response (.ok res) =>
machine := machine.send res.head
waitingResponse := false
match res.body with
| .bytes data => machine := machine.sendData #[Chunk.mk data #[]] |>.userClosedBody
| .empty => machine := machine.userClosedBody
| .stream stream => do
let size stream.getKnownSize
machine := machine.setKnownSize (size.getD .chunked)
respStream := some stream
if ¬ ( requestStream.isClosed) then
requestStream.close
if let some res := respStream then
if ¬( res.isClosed) then
res.close
end Connection
/--
This is the entry point of the library. It is used to receive and send requests using an `Async`
handler for a single connection. It can be used with a `TCP.Socket` or any other type that implements
`Transport` to create a simple HTTP server capable of handling multiple connections concurrently.
# Example
```lean
-- Create a TCP socket server instance
let server ← Socket.Server.mk
server.bind addr
server.listen backlog
-- Enter an infinite loop to handle incoming client connections
while true do
let client ← server.accept
background (serveConnection client onRequest onError config)
```
-/
def serveConnection
[Transport t] (client : t) (onRequest : Request Body ContextAsync (Response Body))
(onError : Error Async Unit) (config : Config) : ContextAsync Unit := do
Connection.mk client { config := config.toH1Config }
|>.handle config ( ContextAsync.getContext) onError onRequest
end Std.Http.Server

View File

@@ -0,0 +1,231 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Transport
This module exposes a `Transport` type class that is used to represent different transport mechanisms
that can be used with a HTTP connection.
-/
namespace Std.Http
open Std Internal IO Async TCP
set_option linter.all true
/--
Generic HTTP interface that abstracts over different transport mechanisms.
-/
class Transport (α : Type) where
/--
Receive data from the client connection, up to the expected size.
Returns None if the connection is closed or no data is available.
-/
recv : α UInt64 Async (Option ByteArray)
/--
Send all data through the client connection.
-/
sendAll : α Array ByteArray Async Unit
/--
Get a selector for receiving data asynchronously.
-/
recvSelector : α UInt64 Selector (Option ByteArray)
instance : Transport Socket.Client where
recv client expect := client.recv? expect
sendAll client data := client.sendAll data
recvSelector client expect := client.recvSelector expect
open Internal.IO.Async in
/--
Shared state for a bidirectional mock connection.
-/
private structure MockLink.SharedState where
/--
Client to server direction.
-/
clientToServer : Std.CloseableChannel ByteArray
/--
Server to client direction.
-/
serverToClient : Std.CloseableChannel ByteArray
/--
Mock client endpoint for testing.
-/
structure Mock.Client where
private shared : MockLink.SharedState
/--
Mock server endpoint for testing.
-/
structure Mock.Server where
private shared : MockLink.SharedState
namespace Mock
/--
Create a mock server and client that are connected to each other and share the
same underlying state, enabling bidirectional communication.
-/
def new : BaseIO (Mock.Client × Mock.Server) := do
let first Std.CloseableChannel.new
let second Std.CloseableChannel.new
return (first, second, first, second)
/--
Receive data from a channel, joining all available data up to the expected size. First does a
blocking recv, then greedily consumes available data with tryRecv until `expect` bytes are reached.
-/
def recvJoined (recvChan : Std.CloseableChannel ByteArray) (expect : Option UInt64) : Async (Option ByteArray) := do
match await ( recvChan.recv) with
| none => return none
| some first =>
let mut result := first
repeat
if let some expect := expect then
if result.size.toUInt64 expect then break
match recvChan.tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Send a single ByteArray through a channel.
-/
def send (sendChan : Std.CloseableChannel ByteArray) (data : ByteArray) : Async Unit := do
Async.ofAsyncTask (( sendChan.send data) |>.map (Except.mapError (IO.userError toString)))
/--
Send ByteArrays through a channel.
-/
def sendAll (sendChan : Std.CloseableChannel ByteArray) (data : Array ByteArray) : Async Unit := do
for chunk in data do
send sendChan chunk
/--
Create a selector for receiving from a channel with joining behavior.
-/
def recvSelector (recvChan : Std.CloseableChannel ByteArray) : Selector (Option ByteArray) :=
recvChan.recvSelector
end Mock
namespace Mock.Client
/--
Get the receive channel for a client (server to client direction).
-/
def getRecvChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
client.shared.serverToClient
/--
Get the send channel for a client (client to server direction).
-/
def getSendChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
client.shared.clientToServer
/--
Send a single ByteArray.
-/
def send (client : Mock.Client) (data : ByteArray) : Async Unit :=
Mock.send (getSendChan client) data
/--
Receive data, joining all available chunks.
-/
def recv? (client : Mock.Client) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
Mock.recvJoined (getRecvChan client) expect
/--
Try to receive data without blocking, joining all immediately available chunks.
Returns `none` if no data is available.
-/
def tryRecv? (client : Mock.Client) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
match (getRecvChan client).tryRecv with
| none => return none
| some first =>
let mut result := first
repeat
match (getRecvChan client).tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Close the mock client, closing both directions of the connection.
-/
def close (client : Mock.Client) : IO Unit := do
client.shared.clientToServer.close
end Mock.Client
namespace Mock.Server
/--
Get the receive channel for a server (client to server direction).
-/
def getRecvChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
server.shared.clientToServer
/--
Get the send channel for a server (server to client direction).
-/
def getSendChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
server.shared.serverToClient
/--
Send a single ByteArray.
-/
def send (server : Mock.Server) (data : ByteArray) : Async Unit :=
Mock.send (getSendChan server) data
/--
Receive data, joining all available chunks.
-/
def recv? (server : Mock.Server) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
Mock.recvJoined (getRecvChan server) expect
/--
Try to receive data without blocking, joining all immediately available chunks. Returns `none` if no
data is available.
-/
def tryRecv? (server : Mock.Server) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
match (getRecvChan server).tryRecv with
| none => return none
| some first =>
let mut result := first
repeat
match (getRecvChan server).tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
end Mock.Server
instance : Transport Mock.Client where
recv client expect := Mock.recvJoined (Mock.Client.getRecvChan client) (some expect)
sendAll client data := Mock.sendAll (Mock.Client.getSendChan client) data
recvSelector client _ := Mock.recvSelector (Mock.Client.getRecvChan client)
instance : Transport Mock.Server where
recv server expect := Mock.recvJoined (Mock.Server.getRecvChan server) (some expect)
sendAll server data := Mock.sendAll (Mock.Server.getSendChan server) data
recvSelector server _ := Mock.recvSelector (Mock.Server.getRecvChan server)
end Std.Http

View File

@@ -56,16 +56,26 @@ def skipByte (b : UInt8) : Parser Unit :=
/--
Skip a sequence of bytes equal to the given `ByteArray`.
-/
def skipBytes (arr : ByteArray) : Parser Unit := do
for b in arr do
skipByte b
def skipBytes (arr : ByteArray) : Parser Unit := fun it =>
if it.remainingBytes < arr.size then
.error it .eof
else
let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator :=
if h : idx < arr.size then
match skipByte arr[idx] it with
| .success it' _ => go (idx + 1) it'
| .error it' err => .error it' err
else
.success it ()
go 0 it
/--
Parse a string by matching its UTF-8 bytes, returns the string on success.
-/
@[inline]
def pstring (s : String) : Parser String := do
skipBytes s.toUTF8
let utf8 := s.toUTF8
skipBytes utf8
return s
/--

View File

@@ -17,5 +17,6 @@ public import Std.Sync.Broadcast
public import Std.Sync.StreamMap
public import Std.Sync.CancellationToken
public import Std.Sync.CancellationContext
public import Std.Sync.Future
@[expose] public section

158
src/Std/Sync/Future.lean Normal file
View File

@@ -0,0 +1,158 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Sync.Mutex
public import Std.Internal.Async.IO
public section
/-!
This module contains the implementation of `Std.Future` that is a write-once container for a value of type `α`.
Once resolved with a value, it cannot be changed or resolved again. It's similar to an `IO.Promise` but it exists
in order to make `Seletor` work correctly.
-/
namespace Std
open Internal.IO.Async
private inductive Consumer (α : Type) where
| normal (promise : IO.Promise α)
| select (finished : Waiter α)
private def Consumer.resolve (c : Consumer α) (x : α) : BaseIO Bool := do
match c with
| .normal promise =>
promise.resolve x
return true
| .select waiter =>
let lose := return false
let win promise := do
promise.resolve (.ok x)
return true
waiter.race lose win
/--
A `Future` is a write-once container for a value of type `α`. Once resolved with a value, it cannot be
changed or resolved again.
-/
structure Future (α : Type) where
private mk ::
private state : Mutex (Option α)
private consumers : Mutex (Array (Consumer α))
private nonEmpty : Nonempty α
namespace Future
/--
Create a new unresolved `Future`.
-/
def new [h : Nonempty α] : BaseIO (Future α) := do
return {
state := Mutex.new none
consumers := Mutex.new #[]
nonEmpty := h
}
/--
Attempt to resolve the future with the given value. Returns `true` if the future was successfully resolved
(was not already resolved). Returns `false` if the future was already resolved. When resolved, all
waiting consumers will be notified.
-/
def resolve (p : Future α) (value : α) : BaseIO Bool := do
let consumersToNotify p.state.atomically do
let current get
match current with
| some _ =>
return none
| none =>
set (some value)
let cs p.consumers.atomically do
let cs get
MonadState.set #[]
return some cs
return cs
match consumersToNotify with
| none =>
return false
| some consumers =>
if consumers.isEmpty then
return true
for consumer in consumers do
discard <| consumer.resolve value
return true
/--
Check if the future has been resolved.
-/
def isResolved (p : Future α) : BaseIO Bool := do
p.state.atomically do
return ( get).isSome
/--
Get the value if the future is resolved, otherwise return `none`.
-/
def tryGet (p : Future α) : BaseIO (Option α) := do
p.state.atomically do
return ( get)
/--
Wait for the future to be resolved and return its value. Returns a task that will complete once the
future is resolved.
-/
def get [Inhabited α] (p : Future α) : BaseIO (Task α) := do
p.state.atomically do
match MonadState.get with
| some value =>
return .pure value
| none =>
let promise IO.Promise.new
p.consumers.atomically do
modify (·.push (.normal promise))
BaseIO.bindTask promise.result? fun res =>
match res with
| some res => pure (Task.pure res)
| none => unreachable!
/--
Creates a `Selector` that resolves once the future is resolved.
-/
def selector (p : Future α) : Selector α where
tryFn := p.tryGet
registerFn waiter := do
p.state.atomically do
match MonadState.get with
| some value =>
let lose := return ()
let win promise := promise.resolve (.ok value)
waiter.race lose win
| none =>
p.consumers.atomically do
modify (·.push (.select waiter))
unregisterFn := do
p.consumers.atomically do
let cs MonadState.get
let filtered cs.filterM fun
| .normal .. => return true
| .select waiter => return !( waiter.checkFinished)
set filtered
def ofPromise (promise : IO.Promise α) : BaseIO (Std.Future (Option α)) := do
let stdFuture Std.Future.new
BaseIO.chainTask promise.result? (fun x => discard <| stdFuture.resolve x)
return stdFuture
end Future
end Std

View File

@@ -101,16 +101,19 @@ extern "C" LEAN_EXPORT lean_obj_res lean_uv_signal_mk(uint32_t signum_obj, uint8
extern "C" LEAN_EXPORT lean_obj_res lean_uv_signal_next(b_obj_arg obj) {
lean_uv_signal_object * signal = lean_to_uv_signal(obj);
auto setup_signal = [obj, signal]() {
lean_assert(signal->m_promise == NULL);
auto create_promise = []() {
return lean_io_promise_new();
};
lean_object* promise = lean_io_promise_new();
signal->m_promise = promise;
auto setup_signal = [create_promise, obj, signal]() {
lean_assert(signal->m_promise == NULL);
signal->m_promise = create_promise();
signal->m_state = SIGNAL_STATE_RUNNING;
// The event loop must keep the signal alive for the duration of the run time.
lean_inc(obj);
lean_inc(promise);
event_loop_lock(&global_ev);
int result;
if (signal->m_repeating) {
@@ -127,19 +130,17 @@ extern "C" LEAN_EXPORT lean_obj_res lean_uv_signal_next(b_obj_arg obj) {
);
}
event_loop_unlock(&global_ev);
if (result != 0) {
lean_dec(obj);
lean_dec(promise);
event_loop_unlock(&global_ev);
return lean_io_result_mk_error(lean_decode_uv_error(result, NULL));
} else {
lean_inc(signal->m_promise);
return lean_io_result_mk_ok(signal->m_promise);
}
event_loop_unlock(&global_ev);
return lean_io_result_mk_ok(promise);
};
event_loop_lock(&global_ev);
if (signal->m_repeating) {
switch (signal->m_state) {
case SIGNAL_STATE_INITIAL:
@@ -153,23 +154,20 @@ extern "C" LEAN_EXPORT lean_obj_res lean_uv_signal_next(b_obj_arg obj) {
lean_dec(signal->m_promise);
}
signal->m_promise = lean_io_promise_new();
signal->m_promise = create_promise();
}
lean_inc(signal->m_promise);
event_loop_unlock(&global_ev);
return lean_io_result_mk_ok(signal->m_promise);
}
case SIGNAL_STATE_FINISHED:
{
if (signal->m_promise == NULL) {
lean_object* finished_promise = lean_io_promise_new();
event_loop_unlock(&global_ev);
lean_object* finished_promise = create_promise();
return lean_io_result_mk_ok(finished_promise);
}
lean_inc(signal->m_promise);
event_loop_unlock(&global_ev);
return lean_io_result_mk_ok(signal->m_promise);
}
}
@@ -178,11 +176,9 @@ extern "C" LEAN_EXPORT lean_obj_res lean_uv_signal_next(b_obj_arg obj) {
return setup_signal();
} else if (signal->m_promise != NULL) {
lean_inc(signal->m_promise);
event_loop_unlock(&global_ev);
return lean_io_result_mk_ok(signal->m_promise);
} else {
lean_object* finished_promise = lean_io_promise_new();
event_loop_unlock(&global_ev);
lean_object* finished_promise = create_promise();
return lean_io_result_mk_ok(finished_promise);
}
}
@@ -233,6 +229,7 @@ extern "C" LEAN_EXPORT lean_obj_res lean_uv_signal_cancel(b_obj_arg obj) {
lean_dec(signal->m_promise);
signal->m_promise = NULL;
signal->m_state = SIGNAL_STATE_INITIAL;
lean_dec(obj);
}
}

View File

@@ -0,0 +1,309 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO.Async
open Std Http
-- ============================================================================
-- collectByteArray tests
-- ============================================================================
def testCollectByteArrayExactMax : IO Unit := do
let res Async.block do
let data := ByteArray.mk (List.replicate 100 65 |>.toArray)
let body := Body.bytes data
let collected body.collectByteArray (some 100)
return collected.size
IO.println res
/--
info: 100
-/
#guard_msgs in
#eval testCollectByteArrayExactMax
def testCollectByteArrayOverLimit : IO Unit := do
let res Async.block do
let data := ByteArray.mk (List.replicate 101 65 |>.toArray)
let body := Body.bytes data
let _ body.collectByteArray (some 100)
return "should not reach here"
IO.println res
/--
error: body exceeds limit (100 bytes)
-/
#guard_msgs in
#eval testCollectByteArrayOverLimit
def testCollectByteArrayUnderLimit : IO Unit := do
let res Async.block do
let data := ByteArray.mk (List.replicate 50 65 |>.toArray)
let body := Body.bytes data
let collected body.collectByteArray (some 100)
return collected.size
IO.println res
/--
info: 50
-/
#guard_msgs in
#eval testCollectByteArrayUnderLimit
def testCollectByteArrayNoLimit : IO Unit := do
let res Async.block do
let data := ByteArray.mk (List.replicate 1000 65 |>.toArray)
let body := Body.bytes data
let collected body.collectByteArray none
return collected.size
IO.println res
/--
info: 1000
-/
#guard_msgs in
#eval testCollectByteArrayNoLimit
-- ============================================================================
-- collectString tests
-- ============================================================================
def testCollectStringValid : IO Unit := do
let res Async.block do
let body : Body := "hello"
body.collectString none
IO.println (repr res)
/--
info: some "hello"
-/
#guard_msgs in
#eval testCollectStringValid
def testCollectStringInvalidUtf8 : IO Unit := do
let res Async.block do
let invalidUtf8 := ByteArray.mk #[0xFF, 0xFE, 0x00, 0x01]
let body := Body.bytes invalidUtf8
body.collectString none
IO.println (repr res)
/--
info: none
-/
#guard_msgs in
#eval testCollectStringInvalidUtf8
def testCollectStringEmpty : IO Unit := do
let res Async.block do
let body := Body.empty
body.collectString none
IO.println (repr res)
/--
info: some ""
-/
#guard_msgs in
#eval testCollectStringEmpty
-- ============================================================================
-- Streaming body tests
-- ============================================================================
def testStreamingBody : IO Unit := do
let res Async.block do
let stream Body.ByteStream.empty
background do
discard <| stream.write "hello ".toUTF8
discard <| stream.write "world".toUTF8
stream.close
let body := Body.stream stream
let result body.collectByteArray none
return String.fromUTF8! result
IO.println <| res.quote
/--
info: "hello world"
-/
#guard_msgs in
#eval testStreamingBody
def testStreamingMultipleChunks : IO Unit := do
let count Async.block do
let stream Body.ByteStream.empty
background do
for i in [0:3] do
discard <| stream.write s!"chunk{i}".toUTF8
stream.close
let body := Body.stream stream
let mut count := 0
for _ in body do
count := count + 1
return count
IO.println s!"collected {count} chunks"
/--
info: collected 3 chunks
-/
#guard_msgs in
#eval testStreamingMultipleChunks
def testStreamingTotalSize : IO Unit := do
let size Async.block do
let stream Body.ByteStream.empty
background do
discard <| stream.write "aaaaa".toUTF8
discard <| stream.write "bbbbb".toUTF8
discard <| stream.write "ccccc".toUTF8
stream.close
let body := Body.stream stream
let collected body.collectByteArray none
return collected.size
IO.println size
/--
info: 15
-/
#guard_msgs in
#eval testStreamingTotalSize
-- ============================================================================
-- Empty body tests
-- ============================================================================
def testEmptyBodySize : IO Unit := do
let size Async.block do
let body := Body.empty
let collected body.collectByteArray none
return collected.size
IO.println size
/--
info: 0
-/
#guard_msgs in
#eval testEmptyBodySize
def testEmptyBodyLength : IO Unit := do
let isZero Async.block do
let body := Body.empty
let len body.getContentLength
return (len == .fixed 0)
IO.println isZero
/--
info: true
-/
#guard_msgs in
#eval testEmptyBodyLength
-- ============================================================================
-- Content length tests
-- ============================================================================
def testContentLengthFixed : IO Unit := do
let len Async.block do
let body : Body := "hello"
body.getContentLength
IO.println (repr len)
/--
info: Std.Http.Body.Length.fixed 5
-/
#guard_msgs in
#eval testContentLengthFixed
def testContentLengthEmpty : IO Unit := do
let len Async.block do
let body := Body.empty
body.getContentLength
IO.println (repr len)
/--
info: Std.Http.Body.Length.fixed 0
-/
#guard_msgs in
#eval testContentLengthEmpty
-- ============================================================================
-- Body coercions
-- ============================================================================
def testStringCoercion : IO Unit := do
let size Async.block do
let body : Body := "hello"
let collected body.collectByteArray none
return collected.size
IO.println size
/--
info: 5
-/
#guard_msgs in
#eval testStringCoercion
def testByteArrayCoercion : IO Unit := do
let size Async.block do
let data := ByteArray.mk #[1, 2, 3]
let body : Body := data
let collected body.collectByteArray none
return collected.size
IO.println size
/--
info: 3
-/
#guard_msgs in
#eval testByteArrayCoercion
def testUnitCoercion : IO Unit := do
let size Async.block do
let body : Body := ()
let collected body.collectByteArray none
return collected.size
IO.println size
/--
info: 0
-/
#guard_msgs in
#eval testUnitCoercion
-- ============================================================================
-- Body iteration
-- ============================================================================
def testBytesBodyIteration : IO Unit := do
let count Async.block do
let body : Body := "hello"
let mut count := 0
for _ in body do
count := count + 1
return count
IO.println count
/--
info: 1
-/
#guard_msgs in
#eval testBytesBodyIteration
def testEmptyBodyIteration : IO Unit := do
let count Async.block do
let body := Body.empty
let mut count := 0
for _ in body do
count := count + 1
return count
IO.println count
/--
info: 0
-/
#guard_msgs in
#eval testEmptyBodyIteration

View File

@@ -0,0 +1,687 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
structure TestCase where
/-- Descriptive name for the test -/
name : String
/-- The HTTP request to send -/
request : Request (Array Chunk)
/-- Handler function to process the request -/
handler : Request Body ContextAsync (Response Body)
/-- Expected response string -/
expected : String
/-- Whether to use chunked encoding -/
chunked : Bool := false
deriving Inhabited
def toByteArray (req : Request (Array Chunk)) (chunked := false) : IO ByteArray := Async.block do
let mut data := String.toUTF8 <| toString req.head
let toByteArray (part : Chunk) := Internal.Encode.encode .v11 .empty part |>.toByteArray
for part in req.body do data := data ++ (if chunked then toByteArray part else part.data)
if chunked then data := data ++ toByteArray (Chunk.mk .empty .empty)
return data
/-- Send multiple requests through a mock connection and return the response data. -/
def sendRequests (client : Mock.Client) (server : Mock.Server) (reqs : Array (Request (Array Chunk)))
(onRequest : Request Body ContextAsync (Response Body))
(chunked : Bool := false) : IO ByteArray := Async.block do
let mut data := .empty
for req in reqs do data := data ++ ( toByteArray req chunked)
client.send data
Std.Http.Server.serveConnection server onRequest (fun _ => pure ()) (config := { lingeringTimeout := 3000 })
|>.run
let res client.recv?
pure <| res.getD .empty
/-- Run a single test case, comparing actual response against expected response. -/
def runTest (name : String) (client : Mock.Client) (server : Mock.Server) (req : Request (Array Chunk))
(handler : Request Body ContextAsync (Response Body)) (expected : String) (chunked : Bool := false) :
IO Unit := do
let response sendRequests client server #[req] handler chunked
let responseData := String.fromUTF8! response
if responseData != expected then
throw <| IO.userError s!
"Test '{name}' failed:\n\
Expected:\n{expected.quote}\n\
Got:\n{responseData.quote}"
def runTestCase (tc : TestCase) : IO Unit := do
let (client, server) Mock.new
runTest tc.name client server tc.request tc.handler tc.expected tc.chunked
-- Request Predicates
/-- Check if request is a basic GET requests to the specified URI and host. -/
def isBasicGetRequest (req : Request Body) (uri : String) (host : String) : Bool :=
req.head.method == .get
req.head.version == .v11
toString req.head.uri = uri
req.head.headers.hasEntry (.new "host") (.ofString! host)
/-- Check if request has a specific Content-Length header. -/
def hasContentLength (req : Request Body) (length : String) : Bool :=
req.head.headers.hasEntry (.new "content-length") (.ofString! length)
/-- Check if request uses chunked transfer encoding. -/
def isChunkedRequest (req : Request Body) : Bool :=
let headers := req.head.headers
if let some res := headers.get? (.new "transfer-encoding") then
let encodings := res.value.split "," |>.toArray.map (·.trimAscii.toString.toLower)
if encodings.isEmpty then
false
else
let chunkedCount := encodings.filter (· == "chunked") |>.size
let lastIsChunked := encodings.back? == some "chunked"
if chunkedCount > 1 then
false
else if chunkedCount = 1 ¬lastIsChunked then
false
else
lastIsChunked
else
false
/-- Check if request has a specific header with a specific value. -/
def hasHeader (req : Request Body) (name : String) (value : String) : Bool :=
if let some name := Header.Name.ofString? name then
req.head.headers.hasEntry name (.ofString! value)
else
false
/-- Check if request method matches the expected method. -/
def hasMethod (req : Request Body) (method : Method) : Bool :=
req.head.method == method
/-- Check if request URI matches the expected URI string. -/
def hasUri (req : Request Body) (uri : String) : Bool :=
toString req.head.uri = uri
-- Tests
#eval runTestCase {
name := "GET with Content-Length"
request := Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.header! "Content-Length" "7"
|>.body #[.mk "survive".toUTF8 #[]]
handler := fun req => do
if isBasicGetRequest req "/" "example.com" hasContentLength req "7"
then return Response.ok |>.body "ok"
else return Response.badRequest |>.body "invalid"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 2\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nok"
}
#eval runTestCase {
name := "Simple GET request"
request := Request.new
|>.method .get
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .get hasUri req "/api/users"
then return Response.ok |>.body "users list"
else return Response.notFound |>.body ()
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 10\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nusers list"
}
#eval runTestCase {
name := "POST with body"
request := Request.new
|>.method .post
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/json"
|>.header! "Content-Length" "16"
|>.header! "Connection" "close"
|>.body #[.mk "{\"name\":\"Alice\"}".toUTF8 #[]]
handler := fun req => do
if hasMethod req .post hasHeader req "Content-Type" "application/json"
then return Response.new |>.status .created |>.body "Created"
else return Response.badRequest |>.body ()
expected := "HTTP/1.1 201 Created\x0d\nContent-Length: 7\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nCreated"
}
#eval runTestCase {
name := "DELETE request"
request := Request.new
|>.method .delete
|>.uri! "/api/users/123"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .delete hasUri req "/api/users/123"
then return Response.new |>.status .noContent |>.body ""
else return Response.notFound |>.body ()
expected := "HTTP/1.1 204 No Content\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "HEAD request"
request := Request.new
|>.method .head
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .head
then return Response.ok |>.body ""
else return Response.notFound |>.body ()
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "OPTIONS request"
request := Request.new
|>.method .options
|>.uri! "*"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .options
then return Response.new
|>.status .ok
|>.header! "Allow" "GET, POST, PUT, DELETE, OPTIONS"
|>.body ""
else return Response.badRequest |>.body ()
expected := "HTTP/1.1 200 OK\x0d\nAllow: GET, POST, PUT, DELETE, OPTIONS\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Request with multiple headers"
request := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Accept" "application/json"
|>.header! "User-Agent" "TestClient/1.0"
|>.header! "Authorization" "Bearer token123"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasHeader req "Authorization" "Bearer token123" hasHeader req "Accept" "application/json"
then return Response.ok |>.body "authenticated"
else return Response.new |>.status .unauthorized |>.body "unauthorized"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 13\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nauthenticated"
}
#eval runTestCase {
name := "Request with query parameters"
request := Request.new
|>.method .get
|>.uri! "/api/search?q=test&limit=10"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasUri req "/api/search?q=test&limit=10"
then return Response.ok |>.body "search results"
else return Response.notFound |>.body ()
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 14\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nsearch results"
}
#eval runTestCase {
name := "POST with empty body"
request := Request.new
|>.method .post
|>.uri! "/api/trigger"
|>.header! "Host" "api.example.com"
|>.header! "Content-Length" "0"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .post hasContentLength req "0"
then return Response.new |>.status .accepted |>.body "triggered"
else return Response.badRequest |>.body ()
expected := "HTTP/1.1 202 Accepted\x0d\nContent-Length: 9\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\ntriggered"
}
#eval runTestCase {
name := "Large response body"
request := Request.new
|>.method .get
|>.uri! "/api/large"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let largeBody := String.ofList (List.replicate 1000 'X')
return Response.ok |>.body largeBody
expected := s!"HTTP/1.1 200 OK\x0d\nContent-Length: 1000\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n{String.ofList (List.replicate 1000 'X')}"
}
#eval runTestCase {
name := "Custom status code"
request := Request.new
|>.method .get
|>.uri! "/api/teapot"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
return Response.new
|>.status .imATeapot
|>.body "I'm a teapot"
expected := "HTTP/1.1 418 I'm a teapot\x0d\nContent-Length: 12\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nI'm a teapot"
}
#eval runTestCase {
name := "Request with special characters in URI"
request := Request.new
|>.method .get
|>.uri! "/api/users/%C3%A9"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasUri req "/api/users/%C3%A9"
then return Response.ok |>.body "found"
else return Response.notFound |>.body ()
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nfound"
}
#eval runTestCase {
name := "Response with custom headers"
request := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.header! "Cache-Control" "no-cache"
|>.body #[]
handler := fun _ => do
return Response.new
|>.status .ok
|>.header! "Cache-Control" "no-cache"
|>.header! "X-Custom-Header" "custom-value"
|>.body "data"
expected := "HTTP/1.1 200 OK\x0d\nX-Custom-Header: custom-value\x0d\nContent-Length: 4\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nCache-Control: no-cache\x0d\n\x0d\ndata"
}
#eval runTestCase {
name := "Request with Content-Type and body"
request := Request.new
|>.method .post
|>.uri! "/api/xml"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/xml"
|>.header! "Content-Length" "17"
|>.header! "Connection" "close"
|>.body #[.mk "<data>test</data>".toUTF8 #[]]
handler := fun req => do
if hasHeader req "Content-Type" "application/xml"
then return Response.ok |>.body "processed xml"
else return Response.new |>.status .unsupportedMediaType |>.body "unsupported"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 13\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nprocessed xml"
}
#eval runTestCase {
name := "Request with Content-Type and body"
request := Request.new
|>.method .post
|>.uri! "/api/xml"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/xml"
|>.header! "Content-Length" "17"
|>.header! "Connection" "close"
|>.body #[.mk "<data>test</data>".toUTF8 #[]]
handler := fun req => do
if hasHeader req "Content-Type" "application/xml"
then return Response.ok |>.body "processed xml"
else return Response.new |>.status .unsupportedMediaType |>.body "unsupported"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 13\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nprocessed xml"
}
-- Limits
#eval
let bigString := String.fromUTF8! (ByteArray.mk (Array.ofFn (n := 257) (fun _ => 65)))
runTestCase {
name := "Huge String request"
request := Request.new
|>.method .head
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! bigString "a"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .head
then return Response.ok
|>.header (.ofString! bigString) (.ofString! "ata")
|>.body ""
else return Response.notFound
|>.body ()
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Request line too long"
request :=
Request.new
|>.method .get
|>.uri (.originForm (.mk #[URI.EncodedString.encode <| String.ofList (List.replicate 2000 'a')] true) none none)
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
return Response.ok
|>.body (toString (toString req.head.uri).length)
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Header long"
request :=
Request.new
|>.method .get
|>.uri (.originForm (.mk #[URI.EncodedString.encode <| String.ofList (List.replicate 200 'a')] true) none none)
|>.header! "Host" (String.ofList (List.replicate 8230 'a'))
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
return Response.ok
|>.body (toString (toString req.head.uri).length)
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Too many headers"
request := Id.run do
let mut req := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
for i in [0:101] do
req := req |>.header! s!"X-Header-{i}" s!"value{i}"
return req |>.body #[]
handler := fun _ => do
return Response.ok
|>.body "success"
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Header value too long"
request := Request.new
|>.method .get
|>.uri! "/api/test"
|>.header! "Host" "api.example.com"
|>.header! "X-Long-Value" (String.ofList (List.replicate 9000 'x'))
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
return Response.ok
|>.body "ok"
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Total headers size too large"
request := Id.run do
let mut req := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
for i in [0:200] do
req := req |>.header! s!"X-Header-{i}" (String.ofList (List.replicate 200 'a'))
return req |>.body #[]
handler := fun _ => do
return Response.ok
|>.body "success"
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
-- Tests
#eval runTestCase {
name := "Streaming response with fixed Content-Length"
request := Request.new
|>.method .get
|>.uri! "/stream"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
let stream Body.ByteStream.empty
background do
for i in [0:3] do
let sleep Sleep.mk 5
sleep.wait
discard <| stream.write s!"chunk{i}\n".toUTF8
stream.close
return Response.ok
|>.header (.new "content-length") (.new "21")
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 21\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nchunk0\nchunk1\nchunk2\n"
}
#eval runTestCase {
name := "Streaming response with setKnownSize fixed"
request := Request.new
|>.method .get
|>.uri! "/stream-sized"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let stream Body.ByteStream.empty
stream.setKnownSize (some (.fixed 15))
background do
for i in [0:3] do
discard <| stream.write s!"data{i}".toUTF8
stream.close
return Response.ok
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 15\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\ndata0data1data2"
}
#eval runTestCase {
name := "Streaming response with chunked encoding"
request := Request.new
|>.method .get
|>.uri! "/stream-chunked"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let stream Body.ByteStream.empty
background do
discard <| stream.write "hello".toUTF8
discard <| stream.write "world".toUTF8
stream.close
return Response.ok
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n5\x0d\nhello\x0d\n5\x0d\nworld\x0d\n0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Chunked request with streaming response"
request := Request.new
|>.method .post
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Transfer-Encoding" "chunked"
|>.header! "Connection" "close"
|>.body #[
.mk "data1".toUTF8 #[],
.mk "data2".toUTF8 #[]
]
handler := fun req => do
if isChunkedRequest req
then
let stream Body.ByteStream.empty
background do
for i in [0:2] do
discard <| stream.write s!"response{i}".toUTF8
stream.close
return Response.ok
|>.header (.new "content-length") (.new "18")
|>.body stream
else
return Response.badRequest |>.body "not chunked"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 18\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nresponse0response1"
chunked := true
}
#eval runTestCase {
name := "Chunked request with streaming response"
request := Request.new
|>.method .post
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Transfer-Encoding" "chunked"
|>.header! "Connection" "close"
|>.body #[
.mk "data1".toUTF8 #[],
.mk "data2".toUTF8 #[]
]
handler := fun req => do
if isChunkedRequest req
then
let stream Body.ByteStream.empty
background do
for i in [0:2] do
discard <| stream.write s!"response{i}".toUTF8
stream.close
return Response.ok
|>.header (.new "content-length") (.new "18")
|>.body stream
else
return Response.badRequest
|>.body "not chunked"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 18\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nresponse0response1"
chunked := true
}
#eval runTestCase {
name := "Chunked request with streaming response and other encodings"
request := Request.new
|>.method .post
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Transfer-Encoding" "gzip, chunked"
|>.header! "Connection" "close"
|>.body #[
.mk "data1".toUTF8 #[],
.mk "data2".toUTF8 #[]
]
handler := fun req => do
if isChunkedRequest req
then
let stream Body.ByteStream.empty
background do
for i in [0:2] do
discard <| stream.write s!"response{i}".toUTF8
stream.close
return Response.ok
|>.header (.new "content-length") (.new "18")
|>.body stream
else
return Response.badRequest
|>.body "not chunked"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 18\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nresponse0response1"
chunked := true
}

View File

@@ -0,0 +1,358 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
/-- Test cancelling a slow request handler -/
def testCancelSlowHandlerNotSendingData : IO Unit := do
let res Async.block do
let (client, server) Mock.new
let ctx CancellationContext.new
-- Start the server in the background
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
-- Simulate a slow handler that should be cancelled
Async.sleep 10000
return Response.ok
|>.body "should not complete"
) (config := { lingeringTimeout := 1000, keepAliveTimeout := 1000, by decide }) (fun _ => pure ())
client.getRecvChan.close
op ctx
-- Wait a bit for the request to start processing
Async.sleep 2000
-- Cancel the context
ctx.cancel .cancel
-- Try to receive response - should get nothing or partial response
-- The important thing is that the handler was cancelled
client.recv?
IO.println <| res.map (String.fromUTF8! · |>.quote)
/--
info: none
-/
#guard_msgs in
#eval testCancelSlowHandlerNotSendingData
/-- Test cancelling a slow request handler -/
def testCancelSlowHandler : IO Unit := do
let res Async.block do
let (client, server) Mock.new
let ctx CancellationContext.new
-- Start the server in the background
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
-- Simulate a slow handler that should be cancelled
Async.sleep 10000
return Response.ok
|>.body "should not complete"
) (config := { lingeringTimeout := 1000, keepAliveTimeout := 1000, by decide }) (fun _ => pure ())
client.getRecvChan.close
-- Send a simple request
client.send (String.toUTF8 "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
op ctx
-- Wait a bit for the request to start processing
Async.sleep 2000
-- Cancel the context
ctx.cancel .cancel
-- Try to receive response - should get nothing or partial response
-- The important thing is that the handler was cancelled
client.recv?
IO.println <| res.map (String.fromUTF8! · |>.quote)
/--
info: (some ("HTTP/1.1 503 Service Unavailable\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"))
-/
#guard_msgs in
#eval testCancelSlowHandler
/-- Test server shutdown during request processing -/
def testServerShutdownDuringRequest : IO Unit := do
let res Async.block do
let (client, server) Mock.new
let ctx CancellationContext.new
client.send (String.toUTF8 "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
Async.sleep 100000
return Response.ok |>.body "should not complete"
) (config := { lingeringTimeout := 5000 }) (fun _ => pure ())
op.runIn ctx
Async.sleep 1000
ctx.cancel .shutdown
client.recv?
IO.println <| res.map (String.fromUTF8! · |>.quote)
/--
info: (some ("HTTP/1.1 503 Service Unavailable\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"))
-/
#guard_msgs in
#eval testServerShutdownDuringRequest
/-- Test cancelling during response streaming -/
def testCancelDuringStreaming : IO Unit := Async.block do
let (client, server) Mock.new
let ctx CancellationContext.new
-- Start the server with a streaming handler
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
Response.new
|>.status .ok
|>.stream (fun s => do
-- Write some chunks
for i in [0:100] do
let ctx ContextAsync.getContext
if ctx.isCancelled then
-- Check if we were cancelled
break
s.writeChunk (Chunk.mk s!"chunk {i}\n".toUTF8 #[])
Async.sleep 50
s.close
)
) (config := { lingeringTimeout := 3000 }) (fun _ => pure ())
op ctx
-- Send request
client.send (String.toUTF8 "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
-- Wait for streaming to start
Async.sleep 200
-- Cancel while streaming
ctx.cancel .cancel
-- Try to receive remaining data
let _ client.recv?
#eval testCancelDuringStreaming
/-- Test that CancellationContext.fork creates cancellable child contexts -/
def testContextFork : IO Unit := Async.block do
let (client, server) Mock.new
let parentCtx CancellationContext.new
-- Start the server with forked contexts
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
-- This runs in a forked context
Async.sleep 10000
return Response.ok
|>.body "should not complete"
) (config := { lingeringTimeout := 3000 }) (fun _ => pure ())
op parentCtx
-- Send request
client.send (String.toUTF8 "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
-- Wait a bit
Async.sleep 100
-- Cancel parent context - should cancel forked child
parentCtx.cancel .cancel
let _ client.recv?
#eval testContextFork
/-- Test race with cancellation -/
def testRaceWithCancellation : IO Unit := Async.block do
let (client, server) Mock.new
let ctx CancellationContext.new
-- Start server with a race condition
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
-- Race two operations, one should win before cancellation
ContextAsync.race
(do Async.sleep 50; return Response.ok |>.body "fast")
(do Async.sleep 10000; return Response.ok |>.body "slow")
) (config := { lingeringTimeout := 3000 }) (fun _ => pure ())
op ctx
-- Send request
client.send (String.toUTF8 "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
-- Wait for the fast operation to complete
Async.sleep 200
-- The fast operation should have won
let response client.recv?
let responseData := String.fromUTF8! (response.getD .empty)
-- Check that we got a response (not cancelled)
if !responseData.contains "fast" then
throw <| IO.userError s!"Expected response with 'fast', got: {responseData}"
#eval testRaceWithCancellation
/-- Test handler that checks for cancellation -/
def testHandlerChecksCancellation : IO Unit := Async.block do
let (client, server) Mock.new
let ctx CancellationContext.new
-- Start server with handler that checks cancellation
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
-- Loop that checks for cancellation
for _ in [0:100] do
if ContextAsync.isCancelled then
-- Handler detected cancellation and exits early
return Response.new |>.status .serviceUnavailable |>.body "cancelled"
Async.sleep 50
return Response.ok |>.body "completed"
) (config := { lingeringTimeout := 3000 }) (fun _ => pure ())
op ctx
-- Send request
client.send (String.toUTF8 "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
-- Wait a bit
Async.sleep 100
-- Cancel
ctx.cancel .cancel
-- The handler should have detected cancellation
let _ client.recv?
#eval testHandlerChecksCancellation
/-- Test multiple concurrent requests with cancellation -/
def testMultipleConcurrentRequestsWithCancel : IO Unit := Async.block do
let (client1, server1) Mock.new
let (client2, server2) Mock.new
let ctx CancellationContext.new
-- Start two server connections
let op := ContextAsync.background do
Std.Http.Server.serveConnection server1 (fun _req => do
Async.sleep 10000
return Response.ok |>.body "server1"
) (config := { lingeringTimeout := 3000 }) (fun _ => pure ())
op ctx
let op := ContextAsync.background do
Std.Http.Server.serveConnection server2 (fun _req => do
Async.sleep 10000
return Response.ok |>.body "server2"
) (config := { lingeringTimeout := 3000 }) (fun _ => pure ())
op ctx
-- Send requests to both
client1.send (String.toUTF8 "GET /1 HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
client2.send (String.toUTF8 "GET /2 HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
-- Wait a bit
Async.sleep 100
-- Cancel all - both should be cancelled
ctx.cancel .cancel
let _ client1.recv?
let _ client2.recv?
#eval testMultipleConcurrentRequestsWithCancel
/-- Test deadline-based cancellation -/
def testDeadlineCancellation : IO Unit := Async.block do
let (client, server) Mock.new
let ctx CancellationContext.new
-- Start server
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
Async.sleep 10000
return Response.ok |>.body "should timeout"
) (config := { lingeringTimeout := 3000 }) (fun _ => pure ())
op ctx
-- Send request
client.send (String.toUTF8 "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
-- Wait a bit
Async.sleep 100
-- Cancel with deadline reason (simulating timeout)
ctx.cancel .deadline
-- Should get cancellation reason
let reason ctx.getCancellationReason
if reason != some .deadline then
throw <| IO.userError s!"Expected deadline cancellation, got: {reason}"
let _ client.recv?
#eval testDeadlineCancellation
/-- Test that completed requests don't get affected by cancellation -/
def testCompletedRequestNotAffected : IO Unit := Async.block do
let (client, server) Mock.new
let ctx CancellationContext.new
-- Start server with fast handler
let op := ContextAsync.background do
Std.Http.Server.serveConnection server (fun _req => do
-- Fast handler that completes before cancellation
return Response.ok |>.body "completed"
) (config := { lingeringTimeout := 3000 }) (fun _ => pure ())
op ctx
-- Send request
client.send (String.toUTF8 "GET / HTTP/1.1\r\nHost: example.com\r\nConnection: close\r\n\r\n")
-- Wait for completion
Async.sleep 200
-- Get response before cancellation
let response client.recv?
let responseData := String.fromUTF8! (response.getD .empty)
-- Now cancel (should not affect already completed request)
ctx.cancel .cancel
-- Verify we got the expected response
if !responseData.contains "200 OK" then
throw <| IO.userError s!"Expected 200 OK response, got: {responseData}"
#eval testCompletedRequestNotAffected

View File

@@ -0,0 +1,209 @@
import Std.Internal.Http.Data.Headers
open Std Http
-- ============================================================================
-- Headers.merge tests
-- ============================================================================
/--
info: 2
-/
#guard_msgs in
#eval do
-- Merge with overlapping keys - multimap behavior keeps both values
let h1 := Headers.empty.insert (.ofString! "content-type") (.ofString! "text/plain")
let h2 := Headers.empty.insert (.ofString! "content-type") (.ofString! "application/json")
let merged := h1.merge h2
-- After merge, content-type should have both values
IO.println (merged.getAll? (.ofString! "content-type")).get!.size
/--
info: 3
-/
#guard_msgs in
#eval do
-- Merge with non-overlapping keys
let h1 := Headers.empty.insert (.ofString! "x-custom-1") (.ofString! "value1")
let h2 := Headers.empty
|>.insert (.ofString! "x-custom-2") (.ofString! "value2")
|>.insert (.ofString! "x-custom-3") (.ofString! "value3")
let merged := h1.merge h2
IO.println merged.size
-- ============================================================================
-- Headers.getAll tests (multi-value headers)
-- ============================================================================
/--
info: 3
-/
#guard_msgs in
#eval do
-- Multiple values for same header
let headers := Headers.empty
|>.insert (.ofString! "accept") (.ofString! "text/html")
|>.insert (.ofString! "accept") (.ofString! "application/json")
|>.insert (.ofString! "accept") (.ofString! "text/plain")
if let some values := headers.getAll? (.ofString! "accept") then
IO.println values.size
else
IO.println "not found"
-- ============================================================================
-- Case-insensitive header lookup
-- ============================================================================
#guard
let headers := Headers.empty.insert (.ofString! "content-type") (.ofString! "text/plain")
-- All these should find the same header (case-insensitive)
headers.contains (.ofString! "content-type") &&
headers.contains (.ofString! "Content-Type") &&
headers.contains (.ofString! "CONTENT-TYPE")
/--
info: text/plain
-/
#guard_msgs in
#eval do
let headers := Headers.empty.insert (.ofString! "Content-Type") (.ofString! "text/plain")
if let some v := headers.get? (.ofString! "content-type") then
IO.println v.value
else
IO.println "not found"
-- ============================================================================
-- Invalid header name characters
-- ============================================================================
#guard (Header.Name.ofString? "valid-name").isSome
#guard (Header.Name.ofString? "").isNone -- empty
#guard (Header.Name.ofString? "has space").isNone -- space invalid
#guard (Header.Name.ofString? "has(paren").isNone -- parentheses invalid
#guard (Header.Name.ofString? "has,comma").isNone -- comma invalid
-- ============================================================================
-- Header value validation
-- ============================================================================
#guard (Header.Value.ofString? "valid value").isSome
#guard (Header.Value.ofString? "value with tab\t").isSome -- tab is valid
-- ============================================================================
-- HasAll proofs
-- ============================================================================
/--
info: true
-/
#guard_msgs in
#eval do
let headers := Headers.empty
|>.insert (.ofString! "content-type") (.ofString! "application/json")
|>.insert (.ofString! "host") (.ofString! "example.com")
|>.insert (.ofString! "accept") (.ofString! "text/plain")
-- Check HasAll for a subset of headers
let hasAll : Bool := match Headers.HasAll.decidable (h := headers) (l := ["content-type", "host"]) with
| isTrue _ => true
| isFalse _ => false
IO.println hasAll
/--
info: false
-/
#guard_msgs in
#eval do
let headers := Headers.empty
|>.insert (.ofString! "content-type") (.ofString! "application/json")
-- Check HasAll for headers not present
let hasAll : Bool := match Headers.HasAll.decidable (h := headers) (l := ["content-type", "missing-header"]) with
| isTrue _ => true
| isFalse _ => false
IO.println hasAll
-- ============================================================================
-- Headers iteration (toArray, toList)
-- ============================================================================
/--
info: 2
-/
#guard_msgs in
#eval do
let headers := Headers.empty
|>.insert (.ofString! "a") (.ofString! "1")
|>.insert (.ofString! "b") (.ofString! "2")
IO.println headers.toArray.size
/--
info: 2
-/
#guard_msgs in
#eval do
let headers := Headers.empty
|>.insert (.ofString! "x") (.ofString! "y")
|>.insert (.ofString! "z") (.ofString! "w")
IO.println headers.toList.length
-- ============================================================================
-- Header name constants
-- ============================================================================
#guard Header.Name.contentType == .ofString! "content-type"
#guard Header.Name.contentLength == .ofString! "content-length"
#guard Header.Name.host == .ofString! "host"
#guard Header.Name.authorization == .ofString! "authorization"
#guard Header.Name.userAgent == .ofString! "user-agent"
#guard Header.Name.accept == .ofString! "accept"
#guard Header.Name.connection == .ofString! "connection"
#guard Header.Name.transferEncoding == .ofString! "transfer-encoding"
-- ============================================================================
-- Using header name constants in practice
-- ============================================================================
/--
info: application/json
-/
#guard_msgs in
#eval do
let headers := Headers.empty
|>.insert Header.Name.contentType (.ofString! "application/json")
if let some v := headers.get? Header.Name.contentType then
IO.println v.value
else
IO.println "not found"
-- ============================================================================
-- Headers filter and map
-- ============================================================================
/--
info: 1
-/
#guard_msgs in
#eval do
let headers := Headers.empty
|>.insert (.ofString! "x-custom") (.ofString! "keep")
|>.insert (.ofString! "x-remove") (.ofString! "remove")
let filtered := headers.filter (fun name _ => name.is "x-custom")
IO.println filtered.size
-- ============================================================================
-- Headers update
-- ============================================================================
/--
info: updated
-/
#guard_msgs in
#eval do
let headers := Headers.empty
|>.insert (.ofString! "x-value") (.ofString! "original")
let updated := headers.update (.ofString! "x-value") (fun _ => .ofString! "updated")
if let some v := updated.get? (.ofString! "x-value") then
IO.println v.value
else
IO.println "not found"

View File

@@ -0,0 +1,512 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
import Std.Internal.Http.Protocol.H1.Parser
open Std.Http.Protocol
/-!
# HTTP/1.1 Parser Tests
Comprehensive tests for H1 protocol parsing including chunks, headers,
request lines, status lines, and edge cases.
-/
def runParser (parser : Std.Internal.Parsec.ByteArray.Parser α) (s : String) : IO α :=
IO.ofExcept (parser.run s.toUTF8)
-- ============================================================================
-- Chunk Parsing Tests
-- ============================================================================
/--
info: 16 / #[] / "adasdssdabcdabde"
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunk {}) "10\r\nadasdssdabcdabde"
match result with
| some (size, ext, body) => IO.println s!"{size} / {ext} / {String.fromUTF8! body.toByteArray |>.quote}"
| none => IO.println "end chunk"
/--
info: end chunk
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunk {}) "0\r\n"
match result with
| some (size, ext, body) => IO.println s!"{size} / {ext} / {String.fromUTF8! body.toByteArray |>.quote}"
| none => IO.println "end chunk"
/--
info: 255 / #[] / "This is a test chunk with exactly 255 bytes of data. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris lorem ipsu."
-/
#guard_msgs in
#eval show IO _ from do
let testData := "This is a test chunk with exactly 255 bytes of data. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris lorem ipsu."
let result runParser (H1.parseChunk {}) s!"FF\r\n{testData}"
match result with
| some (size, ext, body) => IO.println s!"{size} / {ext} / {String.fromUTF8! body.toByteArray |>.quote}"
| none => IO.println "end chunk"
-- ============================================================================
-- Chunk Size Parsing Tests
-- ============================================================================
/--
info: 16 / #[(abc, none), (def, none), (g, (some h))]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "10;abc;def;g=h\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 0 / #[(abc, none), (def, none), (g, (some h))]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "0;abc;def;g=h\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 4095 / #[]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "FFF\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 1 / #[(name, (some (value with spaces)))]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "1;name=\"value with spaces\"\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 0 / #[]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "0\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 16 / #[]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "10\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 255 / #[]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "FF\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 255 / #[]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "ff\r\n"
IO.println s!"{result.1} / {result.2}"
-- ============================================================================
-- Chunk Extension Tests
-- ============================================================================
/--
info: 10 / #[(ext1, none)]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "A;ext1\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 10 / #[(name, (some value))]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "A;name=value\r\n"
IO.println s!"{result.1} / {result.2}"
/--
info: 10 / #[(name, (some (value with spaces)))]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseChunkSize {}) "A;name=\"value with spaces\"\r\n"
IO.println s!"{result.1} / {result.2}"
-- ============================================================================
-- Single Header Parsing Tests
-- ============================================================================
/--
info: User-Agent / "Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0"
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:143.0) Gecko/20100101 Firefox/143.0\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v.quote}"
| none => IO.println "end"
/--
info: Content-Type / "application/json; charset=utf-8"
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "Content-Type: application/json; charset=utf-8\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v.quote}"
| none => IO.println "end"
/--
info: Authorization / Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v}"
| none => IO.println "end"
/--
info: end
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v}"
| none => IO.println "end"
/--
info: X-Custom-Header / value with multiple spaces
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "X-Custom-Header: value with multiple spaces\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v}"
| none => IO.println "end"
/--
info: Valid-Name / value
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "Valid-Name: value\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v}"
| none => IO.println "end"
/--
info: X-Custom-123 / test
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "X-Custom-123: test\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v}"
| none => IO.println "end"
/--
info: X-Special / value with spaces and !@#$%^&*()
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "X-Special: value with spaces and !@#$%^&*()\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v}"
| none => IO.println "end"
-- ============================================================================
-- Header Edge Cases
-- ============================================================================
-- Empty header value requires at least one character and fails
/--
error: offset 8: expected at least one char
-/
#guard_msgs in
#eval show IO _ from do
let _ runParser (H1.parseSingleHeader {}) "X-Empty:\r\n"
IO.println "should not reach"
-- Tab character is preserved in header value
/--
info: X-Tab / "\t"
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseSingleHeader {}) "X-Tab:\t\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v.quote}"
| none => IO.println "end"
-- Long header values (near limit of 8192)
/--
info: X-Long / 8000 chars
-/
#guard_msgs in
#eval show IO _ from do
let longValue := String.ofList (List.replicate 8000 'x')
let result runParser (H1.parseSingleHeader {}) s!"X-Long: {longValue}\r\n"
match result with
| some (k, v) => IO.println s!"{k} / {v.length} chars"
| none => IO.println "end"
-- ============================================================================
-- Trailer Parsing Tests
-- ============================================================================
/--
error: offset 0: unexpected end of input
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseTrailers {}) ""
IO.println s!"{result}"
/--
info: #[]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseTrailers {}) "\r\n"
IO.println s!"{result}"
/--
info: #[(X-Checksum, abc123), (X-Timestamp, 2023-01-01T12:00:00Z)]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseTrailers {}) "X-Checksum: abc123\r\nX-Timestamp: 2023-01-01T12:00:00Z\r\n\r\n"
IO.println s!"{result}"
/--
info: #[(X-Checksum, abc123)]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseTrailers {}) "X-Checksum: abc123\r\n\r\n"
IO.println s!"{result}"
/--
info: #[(X-First, value1), (X-Second, value2)]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseTrailers {}) "X-First: value1\r\nX-Second: value2\r\n\r\n"
IO.println s!"{result}"
-- ============================================================================
-- Request Line Parsing Tests
-- ============================================================================
/--
info: Std.Http.Method.get / Std.Http.RequestTarget.originForm { segments := #["ata", ""], absolute := true } none none / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "GET /ata/ HTTP/1.1\r\n"
IO.println s!"{repr result.method} / {repr result.uri} / {repr result.version}"
/--
info: Std.Http.Method.post / Std.Http.RequestTarget.originForm { segments := #["api", "v1", "users"], absolute := true } none none / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "POST /api/v1/users HTTP/1.1\r\n"
IO.println s!"{repr result.method} / {repr result.uri} / {repr result.version}"
/--
info: Std.Http.Method.put / Std.Http.RequestTarget.originForm
{ segments := #["data"], absolute := true }
(some #[("param1", some "value1"), ("param2", some "value2")])
none / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "PUT /data?param1=value1&param2=value2 HTTP/1.1\r\n"
IO.println s!"{repr result.method} / {repr result.uri} / {repr result.version}"
/--
info: Std.Http.Method.delete / Std.Http.RequestTarget.originForm { segments := #["items", "123"], absolute := true } none none / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "DELETE /items/123 HTTP/1.1\r\n"
IO.println s!"{repr result.method} / {repr result.uri} / {repr result.version}"
/--
info: Std.Http.Method.head / Std.Http.RequestTarget.originForm { segments := #[], absolute := true } none none / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "HEAD / HTTP/1.1\r\n"
IO.println s!"{repr result.method} / {repr result.uri} / {repr result.version}"
/--
info: Std.Http.Method.options / Std.Http.RequestTarget.asteriskForm / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "OPTIONS * HTTP/1.1\r\n"
IO.println s!"{repr result.method} / {repr result.uri} / {repr result.version}"
/--
info: Std.Http.Method.get / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "GET / HTTP/1.1\r\n"
IO.println s!"{repr result.method} / {repr result.version}"
-- ============================================================================
-- All Standard HTTP Methods
-- ============================================================================
/--
info: Std.Http.Method.head
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "HEAD / HTTP/1.1\r\n"
IO.println s!"{repr result.method}"
/--
info: Std.Http.Method.put
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "PUT /resource HTTP/1.1\r\n"
IO.println s!"{repr result.method}"
/--
info: Std.Http.Method.patch
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "PATCH /resource HTTP/1.1\r\n"
IO.println s!"{repr result.method}"
/--
info: Std.Http.Method.options
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "OPTIONS * HTTP/1.1\r\n"
IO.println s!"{repr result.method}"
/--
info: Std.Http.Method.trace
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "TRACE / HTTP/1.1\r\n"
IO.println s!"{repr result.method}"
/--
info: Std.Http.Method.connect
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "CONNECT example.com:443 HTTP/1.1\r\n"
IO.println s!"{repr result.method}"
-- ============================================================================
-- Invalid HTTP Versions
-- ============================================================================
/--
error: offset 14: expected value but got none
-/
#guard_msgs in
#eval show IO _ from do
let _ runParser (H1.parseRequestLine {}) "GET / HTTP/1.0\r\n"
IO.println "should not reach"
/--
info: Std.Http.Method.get / Std.Http.Version.v20
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseRequestLine {}) "GET / HTTP/2.0\r\n"
IO.println s!"{repr result.method} / {repr result.version}"
/--
error: offset 14: expected value but got none
-/
#guard_msgs in
#eval show IO _ from do
let _ runParser (H1.parseRequestLine {}) "GET / HTTP/3.1\r\n"
IO.println "should not reach"
-- ============================================================================
-- Case-Sensitive Method Names
-- ============================================================================
/--
error: offset 0: expected: '80'
-/
#guard_msgs in
#eval show IO _ from do
let _ runParser (H1.parseRequestLine {}) "get / HTTP/1.1\r\n"
IO.println "should not reach"
/--
error: offset 1: expected: '69'
-/
#guard_msgs in
#eval show IO _ from do
let _ runParser (H1.parseRequestLine {}) "Get / HTTP/1.1\r\n"
IO.println "should not reach"
/--
error: offset 1: expected: '65'
-/
#guard_msgs in
#eval show IO _ from do
let _ runParser (H1.parseRequestLine {}) "Post / HTTP/1.1\r\n"
IO.println "should not reach"
-- ============================================================================
-- Status Line Parsing Tests
-- ============================================================================
/--
info: Std.Http.Status.ok / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseStatusLine {}) "HTTP/1.1 200 OK\r\n"
IO.println s!"{repr result.status} / {repr result.version}"
/--
info: Std.Http.Status.notFound / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseStatusLine {}) "HTTP/1.1 404 Not Found\r\n"
IO.println s!"{repr result.status} / {repr result.version}"
/--
info: Std.Http.Status.internalServerError / Std.Http.Version.v11
-/
#guard_msgs in
#eval show IO _ from do
let result runParser (H1.parseStatusLine {}) "HTTP/1.1 500 Internal Server Error\r\n"
IO.println s!"{repr result.status} / {repr result.version}"

View File

@@ -0,0 +1,110 @@
import Std.Internal.Http
import Std.Internal.Async.TCP
import Std.Time
import Std.Data.Iterators
open Std.Internal.IO.Async
open Std.Http
open Std Iterators
def theTimeInTheFuture : Async ByteArray := do
( Sleep.mk 1000).wait
return s!"?\n".toUTF8
def tick :=
Iter.repeat (fun _ => ()) () |>.mapM (fun _ => theTimeInTheFuture)
def writeToStream (s : Body.ByteStream) {α : Type} [Iterator α Async ByteArray] [IteratorLoop α Async Async]
(i : Std.IterM (α := α) Async ByteArray) (count : Nat) : Async Unit := do
let mut n := 0
for b in i.allowNontermination do
if n >= count then break
s.writeChunk (Chunk.mk b #[("time", some (toString n))])
n := n + 1
s.close
/-- Convert an HTTP request to a byte array -/
def requestToByteArray (req : Request (Array Chunk)) : IO ByteArray := Async.block do
let mut data := String.toUTF8 <| toString req.head
for part in req.body do data := data ++ part.data
return data
/-- Send a request through a mock connection and return the response data -/
def sendRequest (client : Mock.Client) (server : Mock.Server) (req : Request (Array Chunk))
(onRequest : Request Body ContextAsync (Response Body)) : IO ByteArray := Async.block do
let data requestToByteArray req
client.send data
Std.Http.Server.serveConnection server onRequest (fun _ => pure ()) (config := { lingeringTimeout := 3000, keepAliveTimeout := 1000, by decide })
|>.run
let res client.recv?
pure <| res.getD .empty
def testStreamingResponse : IO Unit := do
let pair Mock.new
let (client, server) := pair
let request := Request.new
|>.method .get
|>.uri! "/stream"
|>.header! "Host" "localhost"
|>.header! "Connection" "close"
|>.body #[]
let response sendRequest client server request handle
let responseData := String.fromUTF8! response
IO.println responseData.quote
-- Check that response starts with correct HTTP status line
if !responseData.startsWith "HTTP/1.1 200 OK\x0d\n" then
throw <| IO.userError "Response should start with HTTP/1.1 200 OK"
-- Check that Transfer-Encoding header is present (for streaming)
if !responseData.contains "Transfer-Encoding: chunked" then
throw <| IO.userError "Response should use chunked transfer encoding"
-- Check that we got multiple chunks (at least 3 time stamps)
let bodyStart := responseData.splitOn "\x0d\n\x0d\n"
if bodyStart.length < 2 then
throw <| IO.userError "Response should have headers and body"
where
handle (_req : Request Body) : ContextAsync (Response Body) :=
Response.new
|>.status .ok
|>.stream (writeToStream · tick 3)
/--
info: "HTTP/1.1 200 OK\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n2;time=0\x0d\n?\n\x0d\n2;time=1\x0d\n?\n\x0d\n2;time=2\x0d\n?\n\x0d\n0\x0d\n\x0d\n"
-/
#guard_msgs in
#eval testStreamingResponse
/-- Test that without Connection: close, the server waits and times out -/
def testTimeout : IO Unit := do
let pair Mock.new
let (client, server) := pair
-- Request WITHOUT Connection: close header
let request := Request.new
|>.method .get
|>.uri! "/stream"
|>.header! "Host" "localhost"
|>.body #[]
let response sendRequest client server request handle
let responseData := String.fromUTF8! response
IO.println responseData.quote
where
handle (_req : Request Body) : ContextAsync (Response Body) :=
return Response.new
|>.status .ok
|>.build
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval testTimeout

View File

@@ -0,0 +1,511 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO.Async
open Std Http
/-!
# HTTP Server Tests
Comprehensive tests for HTTP server compliance, security, and request handling.
Tests raw byte handling, request smuggling prevention, and protocol compliance.
-/
-- ============================================================================
-- Helper Functions
-- ============================================================================
def requestToByteArray (req : Request (Array Chunk)) (chunked := false) : IO ByteArray := Async.block do
let mut data := String.toUTF8 <| toString req.head
let toByteArray (part : Chunk) := Internal.Encode.encode .v11 .empty part |>.toByteArray
for part in req.body do data := data ++ (if chunked then toByteArray part else part.data)
if chunked then data := data ++ toByteArray (Chunk.mk .empty .empty)
return data
def sendRawBytes (data : Array ByteArray)
(onRequest : Request Body ContextAsync (Response Body))
(config : Config := { lingeringTimeout := 3000 }) : IO ByteArray := Async.block do
let (client, server) Mock.new
for d in data do
client.send d
client.close
Std.Http.Server.serveConnection server onRequest (fun _ => pure ()) config |>.run
let res client.recv?
pure <| res.getD .empty
def echoHandler (req: Request Body) : ContextAsync (Response Body) := do
let mut data := ByteArray.empty
for chunk in req.body do
data := data ++ chunk.data
return Response.new
|>.status .ok
|>.body data
def maximumSizeHandlerEcho (maxSize : Nat) (req: Request Body) : ContextAsync (Response Body) := do
let mut size := 0
let mut data := ByteArray.empty
for i in req.body do
size := size + i.size
data := data ++ i.data
if size > maxSize then
return Response.new
|>.status .payloadTooLarge
|>.header! "Connection" "close"
|>.body .empty
return Response.new
|>.status .ok
|>.body data
-- ============================================================================
-- Fragmented Request Tests
-- ============================================================================
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 1\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\na"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST /ata/po HTTP/1.1\r\nCont".toUTF8, "ent-Length: 1\r\nHost: ata\r\n\r\na".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nhello"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["PO".toUTF8, "ST / HTTP/1.1\r\nContent-Length: 5\r\nHost: test\r\n\r\nhello".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 4\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\ntest"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST /path/to/".toUTF8, "resource HTTP/1.1\r\nContent-Length: 4\r\nHost: test\r\n\r\ntest".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 10\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nhelloworld"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 10\r\nHost: test\r\n\r\nhello".toUTF8, "world".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 2\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nok"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["P".toUTF8, "O".toUTF8, "ST / HTTP/1.1\r\nContent-Length: 2\r\nHost: test\r\n\r\n".toUTF8, "o".toUTF8, "k".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Basic HTTP Methods
-- ============================================================================
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["GET / HTTP/1.1\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 0\r\nHost: test\r\n\r\n".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Chunked Encoding Tests
-- ============================================================================
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 11\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nhello world"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\n5\r\nhello\r\n6\r\n world\r\n0\r\n\r\n".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 4\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\ntest"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\n4;ext=value\r\ntest\r\n0\r\n\r\n".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 3\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nfoo"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\n3\r\nfoo\r\n0\r\nX-Trailer: value\r\n\r\n".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\n0\r\n\r\n".toUTF8] (maximumSizeHandlerEcho 150)
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nhello"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: gzip, chunked\r\nHost: test\r\n\r\n5\r\nhello\r\n0\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- CL.CL Attack Prevention (Duplicate Content-Length)
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 5\r\nContent-Length: 5\r\nHost: test\r\n\r\nhello".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 5\r\nContent-Length: 10\r\nHost: test\r\n\r\nhello".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 5, 10\r\nHost: test\r\n\r\nhello".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- CL.TE Attack Prevention (Content-Length + Transfer-Encoding)
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 100\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\n5\r\nhello\r\n0\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nContent-Length: 100\r\nHost: test\r\n\r\n5\r\nhello\r\n0\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Transfer-Encoding Validation
-- ============================================================================
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nhello"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\n5\r\nhello\r\n0\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Invalid Methods
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["INVALID / HTTP/1.1\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["get / HTTP/1.1\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Invalid HTTP Versions
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["GET / HTTP/1.0\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["GET / HTTP/2.0\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Missing Host Header
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["GET / HTTP/1.1\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Malformed Request Line
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["GET /\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Header Injection Prevention
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nX-Custom".toUTF8, ByteArray.mk #[0], ": value\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nX-Custom\t: value\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nX-Custom : value\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\n: value\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nInvalid Header: value\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Invalid Chunked Encoding
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\nZZZ\r\ndata\r\n0\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\n5hello\r\n0\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\n5\r\nhello\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nTransfer-Encoding: chunked\r\nHost: test\r\n\r\nA\r\nhello\r\n0\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Content-Length Validation
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: -5\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: abc\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 100\r\nHost: test\r\n\r\nshort".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nhello"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 5\r\nHost: test\r\n\r\nhello".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nexact"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 5\r\nHost: test\r\n\r\nexact".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Keep-Alive / Pipelining Tests
-- ============================================================================
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nfirstHTTP/1.1 200 OK\x0d\nContent-Length: 6\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nsecond"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 5\r\nHost: test\r\n\r\nfirstPOST / HTTP/1.1\r\nContent-Length: 6\r\nHost: test\r\n\r\nsecond".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 4\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\ndata"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 4\r\nConnection: close\r\nHost: test\r\n\r\ndata".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 9\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nkeepalive"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 9\r\nConnection: keep-alive\r\nHost: test\r\n\r\nkeepalive".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Multiple Headers
-- ============================================================================
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 4\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\ndata"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 4\r\nX-Custom: value1\r\nX-Custom: value2\r\nHost: test\r\n\r\ndata".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 2\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nok"
-/
#guard_msgs in
#eval show IO Unit from do
let longValue := String.join (List.replicate 1000 "x")
let response sendRawBytes #[s!"POST / HTTP/1.1\r\nX-Long: {longValue}\r\nContent-Length: 2\r\nHost: test\r\n\r\nok".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Extra Data After Body
-- ============================================================================
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nhelloHTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 5\r\nHost: test\r\n\r\nhello extra data here".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Header Folding (Obsolete, should be rejected)
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nContent-Length: 5 \r\nHost: test\r\n\r\nhello".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST / HTTP/1.1\r\nX-Custom: line1\r\n continuation\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response
-- ============================================================================
-- Control Characters in Path
-- ============================================================================
/--
info: "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval show IO Unit from do
let response sendRawBytes #["POST /path\x0Btest HTTP/1.1\r\nHost: test\r\n\r\n".toUTF8] echoHandler
IO.println <| String.quote <| String.fromUTF8! response

View File

@@ -0,0 +1,826 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
import Std.Internal.Http.Data.URI
import Std.Internal.Http.Data.URI.Encoding
import Std.Internal.Http.Protocol.H1.Parser
open Std.Http
open Std.Http.URI
open Std.Http.URI.Parser
/-!
# URI Tests
Comprehensive tests for URI parsing, encoding, normalization, and manipulation.
This file consolidates tests from multiple URI-related test files.
-/
-- ============================================================================
-- Helper Functions
-- ============================================================================
def runParser (parser : Std.Internal.Parsec.ByteArray.Parser α) (s : String) : IO α :=
IO.ofExcept ((parser <* Std.Internal.Parsec.eof).run s.toUTF8)
def parseCheck (s : String) (exact : String := s) : IO Unit := do
let result runParser parseRequestTarget s
if toString result = exact then
pure ()
else
throw (.userError s!"expect {exact.quote} but got {(toString result).quote}")
def parseCheckFail (s : String) : IO Unit := do
match (parseRequestTarget <* Std.Internal.Parsec.eof).run s.toUTF8 with
| .ok r =>
throw <| .userError
s!"expected parse failure, but succeeded with {(repr r)}"
| .error _ =>
pure ()
-- ============================================================================
-- Percent Encoding Tests (EncodedString)
-- ============================================================================
-- Valid percent encoding validation
/--
info: some "abc"
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "abc".toUTF8))
/--
info: some "%20"
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%20".toUTF8))
/--
info: some "hello%20world"
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "hello%20world".toUTF8))
/--
info: some "%FF"
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%FF".toUTF8))
/--
info: some "%00"
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%00".toUTF8))
-- Invalid percent encoding: incomplete
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "hello%".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%2".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%A".toUTF8))
-- Invalid percent encoding: non-hex characters
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%GG".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%2G".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedString.ofByteArray? "%G2".toUTF8))
-- ============================================================================
-- Percent Encoding Decode Tests
-- ============================================================================
/--
info: some "abc"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedString.decode =<< (EncodedString.ofByteArray? "abc".toUTF8))
/--
info: some " "
-/
#guard_msgs in
#eval IO.println (repr <| EncodedString.decode =<< (EncodedString.ofByteArray? "%20".toUTF8))
/--
info: some "hello world"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedString.decode =<< (EncodedString.ofByteArray? "hello%20world".toUTF8))
/--
info: some " !"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedString.decode =<< (EncodedString.ofByteArray? "%20%21".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr <| EncodedString.decode =<< (EncodedString.ofByteArray? "%FF".toUTF8))
/--
info: some "\x00"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedString.decode =<< (EncodedString.ofByteArray? "%00".toUTF8))
-- ============================================================================
-- Query String Encoding Tests
-- ============================================================================
/--
info: some "hello+world"
-/
#guard_msgs in
#eval IO.println (repr (EncodedQueryString.ofByteArray? "hello+world".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedQueryString.ofByteArray? "%".toUTF8))
/--
info: some "hello world"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedQueryString.decode =<< (EncodedQueryString.ofByteArray? "hello+world".toUTF8))
/--
info: some " "
-/
#guard_msgs in
#eval IO.println (repr <| EncodedQueryString.decode =<< (EncodedQueryString.ofByteArray? "%20".toUTF8))
-- ============================================================================
-- Request Target Parsing - Basic Tests
-- ============================================================================
#eval parseCheck "/path/with/encoded%20space"
#eval parseCheck "/path/with/encoded%20space/"
#eval parseCheck "*"
#eval parseCheck "https://ata/b?ata=be#lol%F0%9F%94%A5"
#eval parseCheck "/api/search?q=hello%20world&category=tech%2Bgames"
#eval parseCheck "/"
#eval parseCheck "/api/v1/users/123/posts/456/comments/789"
#eval parseCheck "/files/../etc/passwd"
#eval parseCheck "example.com:8080"
#eval parseCheck "https://example.com:8080/ata"
#eval parseCheck "192.168.1.1:3000"
#eval parseCheck "[::1]:8080"
#eval parseCheck "http://example.com/path/to/resource?query=value"
#eval parseCheck "https://api.example.com:443/v1/users?limit=10"
#eval parseCheck "http://[2001:db8::1]:8080/path"
#eval parseCheck "https://example.com/page#section1"
#eval parseCheck "https://xn--nxasmq6b.xn--o3cw4h/path"
#eval parseCheck "localhost:65535"
#eval parseCheck "https://user:pass@secure.example.com/private"
#eval parseCheck "/double//slash//path"
-- Parse failure tests
#eval parseCheckFail "/path with space"
#eval parseCheckFail "/path/%"
#eval parseCheckFail "/path/%2"
#eval parseCheckFail "/path/%ZZ"
#eval parseCheckFail ""
#eval parseCheckFail "[::1"
#eval parseCheckFail "[:::1]:80"
#eval parseCheckFail "#frag"
#eval parseCheckFail "/path/\n"
#eval parseCheckFail "/path/\u0000"
-- ============================================================================
-- Request Target Parsing - Detailed Output Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.originForm { segments := #["path", "with", "encoded%20space"], absolute := true } none none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path/with/encoded%20space"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.asteriskForm
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "*"
IO.println (repr result)
/--
info: some "lol🔥"
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://ata/b?ata=be#lol%F0%9F%94%A5"
IO.println (repr (result.fragment?))
/--
info: #[("q", some "hello%20world"), ("category", some "tech%2Bgames")]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api/search?q=hello%20world&category=tech%2Bgames"
IO.println (repr result.query)
/--
info: Std.Http.RequestTarget.originForm { segments := #[], absolute := true } none none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "example.com", port := some 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "example.com:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm { userInfo := none, host := Std.Http.URI.Host.ipv4 192.168.1.1, port := some 3000 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "192.168.1.1:3000"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm { userInfo := none, host := Std.Http.URI.Host.ipv6 ::1, port := some 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "[::1]:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "https",
authority := some { userInfo := none, host := Std.Http.URI.Host.name "example.com", port := some 8080 },
path := { segments := #["ata"], absolute := true },
query := #[],
fragment := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://example.com:8080/ata"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none, host := Std.Http.URI.Host.ipv6 2001:db8::1, port := some 8080 },
path := { segments := #["path"], absolute := true },
query := #[],
fragment := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[2001:db8::1]:8080/path"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "https",
authority := some { userInfo := some { username := "user b", password := some "pass" },
host := Std.Http.URI.Host.name "secure.example.com",
port := none },
path := { segments := #["private"], absolute := true },
query := #[],
fragment := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user%20b:pass@secure.example.com/private"
IO.println (repr result)
-- ============================================================================
-- IPv6 Host Tests
-- ============================================================================
/--
info: Std.Http.URI.Host.ipv6 ::1
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "[::1]:8080"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
/--
info: Std.Http.URI.Host.ipv6 2001:db8::8a2e:370:7334
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[2001:db8::8a2e:370:7334]:8080/api"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
/--
info: Std.Http.URI.Host.ipv6 ::
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[::]/path"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
-- ============================================================================
-- UserInfo Tests
-- ============================================================================
/--
info: some { username := "user", password := some "pass" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user:pass@example.com/private"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "user only", password := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user%20only@example.com/path"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "", password := some "pass" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://:pass@example.com/path"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "user", password := some "p@ss:w0rd" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user:p%40ss%3Aw0rd@example.com/"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
-- ============================================================================
-- Path.normalize Tests (RFC 3986 Section 5.2.4)
-- ============================================================================
/--
info: /a/b
-/
#guard_msgs in
#eval IO.println <| toString (URI.parse! "http://example.com/a/./b").path.normalize
/--
info: /a
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/..").path.normalize
/--
info: /a/g
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c/./../../g").path.normalize
/--
info: /g
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/../../../g").path.normalize
/--
info: /a/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../c").path.normalize
/--
info: /a/
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c/../.././").path.normalize
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../../..").path.normalize
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/../../../").path.normalize
/--
info: /a/b/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/./a/./b/./c/.").path.normalize
/--
info: /c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/../b/../c").path.normalize
-- ============================================================================
-- Path.parent Tests
-- ============================================================================
/--
info: /a/b
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c").path.parent
/--
info: /a
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b").path.parent
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a").path.parent
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/").path.parent
-- ============================================================================
-- Path.join Tests
-- ============================================================================
/--
info: /a/b/c/d
-/
#guard_msgs in
#eval do
let p1 := (URI.parse! "http://example.com/a/b").path
let p2 : URI.Path := { segments := #[URI.EncodedString.encode "c", URI.EncodedString.encode "d"], absolute := false }
IO.println (p1.join p2)
/--
info: /x/y
-/
#guard_msgs in
#eval do
let p1 := (URI.parse! "http://example.com/a/b").path
let p2 : URI.Path := { segments := #[URI.EncodedString.encode "x", URI.EncodedString.encode "y"], absolute := true }
IO.println (p1.join p2)
-- ============================================================================
-- Path.isEmpty Tests
-- ============================================================================
#guard (URI.parse! "http://example.com").path.isEmpty = true
#guard (URI.parse! "http://example.com/").path.absolute = true
#guard (URI.parse! "http://example.com/a").path.isEmpty = false
#guard (URI.parse! "http://example.com/a").path.absolute = true
-- ============================================================================
-- URI Modification Helpers
-- ============================================================================
#guard ((URI.parse! "http://example.com").withScheme "htTps" |>.scheme) == "https"
#guard ((URI.parse! "http://example.com").withScheme "ftP" |>.scheme) == "ftp"
/--
info: http://example.com/#section1
-/
#guard_msgs in
#eval IO.println ((URI.parse! "http://example.com/").withFragment (some (toString <| URI.EncodedString.encode "section1")))
/--
info: http://example.com/?key=value
-/
#guard_msgs in
#eval do
let uri := URI.parse! "http://example.com/"
let query := URI.Query.empty.insert "key" "value"
IO.println (uri.withQuery query)
/--
info: http://example.com/new/path
-/
#guard_msgs in
#eval do
let uri := URI.parse! "http://example.com/old/path"
let newPath : URI.Path := { segments := #[URI.EncodedString.encode "new", URI.EncodedString.encode "path"], absolute := true }
IO.println (uri.withPath newPath)
-- ============================================================================
-- URI.normalize Tests (RFC 3986 Section 6)
-- ============================================================================
#guard (URI.parse! "HTTP://example.com").normalize.scheme == "http"
#guard (URI.parse! "HtTpS://example.com").normalize.scheme == "https"
/--
info: http://example.com/
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://EXAMPLE.COM/").normalize
/--
info: http://example.com/
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTP://Example.COM/").normalize
/--
info: http://example.com/a/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../c").normalize
/--
info: http://example.com/a/g
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTP://EXAMPLE.COM/a/b/c/./../../g").normalize
/--
info: https://www.example.com/PATH
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTPS://WWW.EXAMPLE.COM/PATH").normalize
-- ============================================================================
-- Query Parameter Tests
-- ============================================================================
-- Query with duplicate keys
/--
info: 3
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?tag=a&tag=b&tag=c"
let all := result.query.findAll "tag"
IO.println all.size
/--
info: #[some "a", some "b", some "c"]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?tag=a&tag=b&tag=c"
let all := result.query.findAll "tag"
IO.println (repr all)
/--
info: some (some "a")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?key=a&key=b&key=c"
IO.println (repr (result.query.find? "key"))
-- Empty value vs no value
/--
info: some (some "")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key="
IO.println (repr (result.query.find? "key"))
/--
info: some none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key"
IO.println (repr (result.query.find? "key"))
/--
info: some (some "value")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key=value"
IO.println (repr (result.query.find? "key"))
-- ============================================================================
-- Query Operations
-- ============================================================================
#guard (URI.Query.empty.insert "a" "1" |>.contains "a") = true
#guard (URI.Query.empty.contains "nonexistent") = false
/--
info: a=1&b=2
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "a" "1"
|>.insert "b" "2"
IO.println query.toRawString
/--
info: b=2
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "a" "1"
|>.insert "b" "2"
|>.erase "a"
IO.println query.toRawString
/--
info: key=new
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "key" "old"
|>.set "key" "new"
IO.println query.toRawString
-- ============================================================================
-- Fragment Tests
-- ============================================================================
/--
info: some "section/subsection"
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://example.com/page#section%2Fsubsection"
IO.println (repr result.fragment?)
/--
info: some "heading with spaces"
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://example.com/page#heading%20with%20spaces"
IO.println (repr result.fragment?)
/--
info: none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path"
IO.println (repr result.fragment?)
/--
info: some ""
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path#"
IO.println (repr result.fragment?)
-- ============================================================================
-- URI Builder Tests
-- ============================================================================
/--
info: https://example.com/api/users?page=1
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme "https"
|>.setHost "example.com"
|>.appendPathSegment "api"
|>.appendPathSegment "users"
|>.addQueryParam "page" "1"
|>.build
IO.println uri
/--
info: http://localhost:8080/
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme "http"
|>.setHost "localhost"
|>.setPort 8080
|>.build
IO.println uri
/--
info: https://user:pass@secure.example.com/private
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme "https"
|>.setUserInfo "user" (some "pass")
|>.setHost "secure.example.com"
|>.appendPathSegment "private"
|>.build
IO.println uri
-- ============================================================================
-- Encoded Path Segment Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.originForm { segments := #["path%2Fwith%2Fslashes"], absolute := true } none none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path%2Fwith%2Fslashes"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { segments := #["file%20name.txt"], absolute := true } none none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/file%20name.txt"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { segments := #["caf%C3%A9"], absolute := true } none none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/caf%C3%A9"
IO.println (repr result)
-- ============================================================================
-- Authority Form Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "proxy.example.com", port := some 3128 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "proxy.example.com:3128"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm { userInfo := none, host := Std.Http.URI.Host.ipv4 127.0.0.1, port := some 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "127.0.0.1:8080"
IO.println (repr result)

View File

@@ -0,0 +1,76 @@
import Std.Sync
open Std
def assertBEq [BEq α] [ToString α] (is should : α) : IO Unit := do
if is != should then
throw <| .userError s!"{is} should be {should}"
def resolveOnce (f : Future Nat) : IO Unit := do
assertBEq ( f.isResolved) false
assertBEq ( f.tryGet) none
assertBEq ( f.resolve 42) true
assertBEq ( f.isResolved) true
assertBEq ( f.tryGet) (some 42)
assertBEq ( f.resolve 43) false
assertBEq ( f.tryGet) (some 42)
def getAfterResolve (f : Future Nat) : IO Unit := do
assertBEq ( f.resolve 37) true
let task f.get
assertBEq ( IO.wait task) 37
def getBeforeResolve (f : Future Nat) : IO Unit := do
let task f.get
assertBEq ( f.resolve 37) true
assertBEq ( IO.wait task) 37
def multipleGets (f : Future Nat) : IO Unit := do
let task1 f.get
let task2 f.get
let task3 f.get
assertBEq ( f.resolve 99) true
assertBEq ( IO.wait task1) 99
assertBEq ( IO.wait task2) 99
assertBEq ( IO.wait task3) 99
def concurrentResolve (f : Future Nat) : IO Unit := do
let resolveTask1 IO.asTask (f.resolve 10)
let resolveTask2 IO.asTask (f.resolve 20)
let resolveTask3 IO.asTask (f.resolve 30)
let result1 IO.ofExcept =<< IO.wait resolveTask1
let result2 IO.ofExcept =<< IO.wait resolveTask2
let result3 IO.ofExcept =<< IO.wait resolveTask3
let successCount := [result1, result2, result3].filter id |>.length
assertBEq successCount 1
let value f.tryGet
assertBEq (value.isSome) true
assertBEq ([10, 20, 30].contains value.get!) true
def concurrentGetResolve (f : Future Nat) : IO Unit := do
let getTask1 f.get
let getTask2 f.get
let resolveTask f.resolve 55
let getTask3 f.get
let value1 IO.wait getTask1
let value2 IO.wait getTask2
let value3 IO.wait getTask3
assertBEq resolveTask true
assertBEq value1 55
assertBEq value2 55
assertBEq value3 55
def suite : IO Unit := do
resolveOnce ( Future.new)
getAfterResolve ( Future.new)
getBeforeResolve ( Future.new)
multipleGets ( Future.new)
concurrentResolve ( Future.new)
concurrentGetResolve ( Future.new)
#eval suite