Compare commits

...

156 Commits

Author SHA1 Message Date
Sofia Rodrigues
892336336e Merge branch 'sofia/openssl-socket' of https://github.com/leanprover/lean4 into sofia/async-http-client-ssl 2026-02-14 18:52:22 -03:00
Sofia Rodrigues
48293bb323 fix: recv selector 2026-02-14 18:48:35 -03:00
Sofia Rodrigues
00f1bb1be2 feat: openssl server 2026-02-14 18:48:11 -03:00
Sofia Rodrigues
79c77c0804 Merge branch 'sofia/openssl-socket' into sofia/async-http-client-ssl 2026-02-14 17:46:07 -03:00
Sofia Rodrigues
adab6fefa0 feat: openssl socket 2026-02-14 17:45:23 -03:00
Sofia Rodrigues
12796e60bf Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/openssl 2026-02-14 06:22:05 -03:00
Sofia Rodrigues
c782aa6e7a feat: client 2026-02-13 10:52:48 -03:00
Sofia Rodrigues
8404f9cdd2 feat: client 2026-02-13 10:32:00 -03:00
Sofia Rodrigues
8017d39c4e Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 10:29:22 -03:00
Sofia Rodrigues
25bb4ee812 feat: protocol 2026-02-13 10:28:48 -03:00
Sofia Rodrigues
7c1aff34e2 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:28:13 -03:00
Sofia Rodrigues
28670d4420 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:26:13 -03:00
Sofia Rodrigues
30f3a3520e Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 10:22:47 -03:00
Sofia Rodrigues
9acca40aaf revert: h1 2026-02-13 10:21:57 -03:00
Sofia Rodrigues
bf2ed2c87a revert: h1 2026-02-13 10:20:35 -03:00
Sofia Rodrigues
3561d58203 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-13 10:14:03 -03:00
Sofia Rodrigues
1d80616068 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-13 10:13:55 -03:00
Sofia Rodrigues
61c93a7f57 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-13 10:13:40 -03:00
Sofia Rodrigues
b042b8efbd fix: parser path 2026-02-13 10:13:00 -03:00
Sofia Rodrigues
8c00ba48ae fix: parser 2026-02-13 10:12:22 -03:00
Sofia Rodrigues
991a27b7f2 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-13 10:11:30 -03:00
Sofia Rodrigues
69e38e9495 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-13 10:11:23 -03:00
Sofia Rodrigues
16d0162ef0 Merge branch 'sofia/async-http-uri' into sofia/async-http-body 2026-02-13 10:11:10 -03:00
Sofia Rodrigues
d07f5c502f feat: specialize encodedstrings 2026-02-13 10:10:34 -03:00
Sofia Rodrigues
5b1493507d feat: body channel should close on completion 2026-02-13 02:53:16 -03:00
Sofia Rodrigues
1180572926 fix: test 2026-02-13 02:29:55 -03:00
Sofia Rodrigues
6dc19ef871 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:23:20 -03:00
Sofia Rodrigues
4a641fc498 revert: bytearray parser 2026-02-13 02:22:43 -03:00
Sofia Rodrigues
2a04014fa7 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:18:18 -03:00
Sofia Rodrigues
4f20a815ec fix: extension name 2026-02-13 02:18:09 -03:00
Sofia Rodrigues
4906e14e51 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 02:13:50 -03:00
Sofia Rodrigues
c9296c7371 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 02:12:07 -03:00
Sofia Rodrigues
4db36b214b feat: improve parser 2026-02-13 02:11:38 -03:00
Sofia Rodrigues
a6d94c7504 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-13 01:58:16 -03:00
Sofia Rodrigues
045abb48bb Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:57:40 -03:00
Sofia Rodrigues
10337c620b fix: test 2026-02-13 01:57:23 -03:00
Sofia Rodrigues
698f557aa3 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:56:37 -03:00
Sofia Rodrigues
692c7c1a09 fix: test 2026-02-13 01:56:29 -03:00
Sofia Rodrigues
1bdfdcdb38 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:55:05 -03:00
Sofia Rodrigues
cacfe00c1d fix: test 2026-02-13 01:54:52 -03:00
Sofia Rodrigues
0fd0fa9c73 fix: test 2026-02-13 01:54:26 -03:00
Sofia Rodrigues
52fdc0f734 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:52:49 -03:00
Sofia Rodrigues
451c11d5a1 fix: make strict 2026-02-13 01:52:04 -03:00
Sofia Rodrigues
e92fcf6d46 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-13 01:41:20 -03:00
Sofia Rodrigues
07140aceb8 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-13 01:39:32 -03:00
Sofia Rodrigues
2cc32928a4 feat: add parser features to path 2026-02-13 01:39:12 -03:00
Sofia Rodrigues
153513d5e2 fix: typos 2026-02-13 01:29:12 -03:00
Sofia Rodrigues
94308408a9 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-13 01:20:31 -03:00
Sofia Rodrigues
1ae6970b77 fix: comment 2026-02-13 01:19:51 -03:00
Sofia Rodrigues
0704f877f5 fix: tests 2026-02-13 01:07:32 -03:00
Sofia Rodrigues
7ff0e6f9c0 feat: 100-continue 2026-02-13 00:56:08 -03:00
Sofia Rodrigues
5b4498ac9d Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-13 00:47:12 -03:00
Sofia Rodrigues
976cc79b0c feat: 100-continue 2026-02-13 00:45:38 -03:00
Sofia Rodrigues
8d6ff0d727 feat: handler 2026-02-13 00:19:36 -03:00
Sofia Rodrigues
26c0e4dac4 feat: date header 2026-02-13 00:06:41 -03:00
Sofia Rodrigues
9ce1821be0 feat: add trailers some type of headers 2026-02-12 12:46:15 -03:00
Sofia Rodrigues
eeff4847fe Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-12 12:31:30 -03:00
Sofia Rodrigues
2956f88050 refactor: move trailers 2026-02-12 12:30:59 -03:00
Sofia Rodrigues
26d9c1c07b feat: add extension handling of quotes and ExtensionName 2026-02-12 12:21:47 -03:00
Sofia Rodrigues
73af014cbd fix: documentation 2026-02-12 11:55:15 -03:00
Sofia Rodrigues
d206f437ef Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-12 10:47:19 -03:00
Sofia Rodrigues
d099586632 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-12 10:46:56 -03:00
Sofia Rodrigues
058d95e441 feat: maximum size in readAll 2026-02-12 10:46:43 -03:00
Sofia Rodrigues
b40ac55755 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 19:42:01 -03:00
Sofia Rodrigues
43aa88e5a6 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:40:58 -03:00
Sofia Rodrigues
8fe2d519d2 revert: chunk changes 2026-02-11 19:40:34 -03:00
Sofia Rodrigues
07ed645f45 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:35:43 -03:00
Sofia Rodrigues
9485e8f5eb revert: add toString head 2026-02-11 19:35:31 -03:00
Sofia Rodrigues
dc96616781 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-11 19:30:41 -03:00
Sofia Rodrigues
0c44b4ae05 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-11 19:29:54 -03:00
Sofia Rodrigues
3568464ca7 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-11 19:23:59 -03:00
Sofia Rodrigues
8e5296c71a fix: chunked 2026-02-11 19:22:30 -03:00
Sofia Rodrigues
eee971e3ef Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 19:10:02 -03:00
Sofia Rodrigues
7a1f8b2d30 fix: readAll 2026-02-11 19:09:45 -03:00
Sofia Rodrigues
157e122891 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 18:51:21 -03:00
Sofia Rodrigues
b12ab7eae4 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-11 18:49:17 -03:00
Sofia Rodrigues
10c8a923e6 feat: readAll functions 2026-02-11 18:48:10 -03:00
Sofia Rodrigues
2b91589750 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 18:08:30 -03:00
Sofia Rodrigues
3e9674eaa9 feat: avoid more than one host 2026-02-11 18:08:16 -03:00
Sofia Rodrigues
d902c6a9f4 fix: mock double close 2026-02-11 18:07:58 -03:00
Sofia Rodrigues
04a17e8c55 fix: fail event should end everything 2026-02-11 18:06:16 -03:00
Sofia Rodrigues
1b6cd457d3 Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 17:40:54 -03:00
Sofia Rodrigues
2bc2080fbe fix: bad request behavior 2026-02-11 17:40:19 -03:00
Sofia Rodrigues
6b6425e8d7 fix: close mock bidirectionaly and fix test 2026-02-11 17:39:48 -03:00
Sofia Rodrigues
fb0e95d8ce fix: avoid gate errors 2026-02-11 17:25:26 -03:00
Sofia Rodrigues
4e4702a31f Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-11 17:22:53 -03:00
Sofia Rodrigues
5a2ad22f97 fix: selectable.one can used to change register/unregister order causing double register 2026-02-11 17:22:37 -03:00
Sofia Rodrigues
f02139f7ce fix: skipBytes 2026-02-11 17:14:52 -03:00
Sofia Rodrigues
d004e175e2 fix: error message 2026-02-11 17:03:27 -03:00
Sofia Rodrigues
7928a95c34 tests: add more tests 2026-02-11 16:53:11 -03:00
Sofia Rodrigues
202e6c5228 fix: transport, add explicit close that is no-op for tp 2026-02-11 14:54:08 -03:00
Sofia Rodrigues
0aeaa5e71d Merge branch 'sofia/async-http-h1' of https://github.com/leanprover/lean4 into sofia/async-http-server 2026-02-10 17:30:08 -03:00
Sofia Rodrigues
9ad4ee304b fix: imports 2026-02-10 17:29:04 -03:00
Sofia Rodrigues
5bd280553d Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-10 17:25:08 -03:00
Sofia Rodrigues
7e215c8220 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-10 17:24:00 -03:00
Sofia Rodrigues
2c23680163 fix: imports 2026-02-10 17:23:14 -03:00
Sofia Rodrigues
c4f179daa0 Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-10 17:15:31 -03:00
Sofia Rodrigues
c2f657a15a Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 17:12:45 -03:00
Sofia Rodrigues
9332081875 fix: import 2026-02-10 17:12:20 -03:00
Sofia Rodrigues
1cec97568b fix: imports 2026-02-10 17:11:11 -03:00
Sofia Rodrigues
b567713641 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 17:03:44 -03:00
Sofia Rodrigues
de776c1f32 fix: interpolation 2026-02-10 17:03:02 -03:00
Sofia Rodrigues
c498ea74ec Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-uri 2026-02-10 16:47:08 -03:00
Sofia Rodrigues
f4aad3a494 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-10 16:46:48 -03:00
Sofia Rodrigues
1cebf576c3 Merge branch 'master' of https://github.com/leanprover/lean4 into sofia/async-http-data 2026-02-10 16:44:20 -03:00
Sofia Rodrigues
25dac2e239 fix: test 2026-02-09 22:25:14 -03:00
Sofia Rodrigues
4a9de7094c feat: new body 2026-02-09 22:20:05 -03:00
Sofia Rodrigues
c4eab3b677 Merge branch 'sofia/async-http-h1' into sofia/async-http-server 2026-02-09 21:58:26 -03:00
Sofia Rodrigues
dd125c7999 Merge branch 'sofia/async-http-body' into sofia/async-http-h1 2026-02-09 21:57:42 -03:00
Sofia Rodrigues
5e3dce8088 fix: chunk stream will only deal with content-size of the chunks not with the wireFormatSize 2026-02-09 21:57:26 -03:00
Sofia Rodrigues
4c64f2c2e8 fix: suggestions 2026-02-09 21:55:38 -03:00
Sofia Rodrigues
aa6e11dfc0 Merge branch 'sofia/async-http-body' of https://github.com/leanprover/lean4 into sofia/async-http-h1 2026-02-09 21:25:02 -03:00
Sofia Rodrigues
e7d1e7dd54 Merge branch 'sofia/async-http-uri' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-09 21:09:55 -03:00
Sofia Rodrigues
03843fd3f0 fix: suggestions 2026-02-09 21:09:38 -03:00
Sofia Rodrigues
294e9900ea feat: unify all in stream 2026-02-09 20:29:18 -03:00
Sofia Rodrigues
f13651979e fix: wireFormatSize 2026-02-09 19:31:41 -03:00
Sofia Rodrigues
3d8ba4d09b Merge branch 'sofia/async-http-headers' of https://github.com/leanprover/lean4 into sofia/async-http-body 2026-02-09 12:30:01 -03:00
Sofia Rodrigues
63984c8dda fix: header value 2026-02-09 12:01:25 -03:00
Sofia Rodrigues
e2fd8a5835 Merge branch 'sofia/async-http-data' of https://github.com/leanprover/lean4 into sofia/async-http-headers 2026-02-09 11:28:52 -03:00
Sofia Rodrigues
a0263870b9 fix: extensions 2026-02-09 11:24:48 -03:00
Sofia Rodrigues
3c4ae58aff feat: add extensions 2026-02-09 11:18:24 -03:00
Sofia Rodrigues
5965707575 fix: apply suggestions 2026-02-09 11:17:03 -03:00
Sofia Rodrigues
dbe0140578 fix: enforce validations 2026-02-09 10:28:43 -03:00
Sofia Rodrigues
bc21289793 feat: http docs 2026-01-25 12:48:06 -03:00
Sofia Rodrigues
f11bd0928d feat: server basics 2026-01-25 12:48:06 -03:00
Sofia Rodrigues
6ffd5ad2a4 fix: incremental parsing 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
7ce8cbc01c feat: remove toString instances 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
12a7603c77 fix: orphan module 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
53a6355074 feat: H1 protocol 2026-01-25 12:42:29 -03:00
Sofia Rodrigues
f8ad249e42 test: wrong test 2026-01-25 12:40:41 -03:00
Sofia Rodrigues
3c41d3961e feat: empty body and constructors 2026-01-25 12:39:43 -03:00
Sofia Rodrigues
18bc715bad feat: remove useless functions 2026-01-25 12:39:43 -03:00
Sofia Rodrigues
3349d20663 feat: body 2026-01-25 12:39:41 -03:00
Sofia Rodrigues
bad70e3eab feat: request type has request target 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
21286eb163 fix: domain name comment 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
0e5f07558c feat: introduce data type for HTTP 2026-01-25 12:36:36 -03:00
Sofia Rodrigues
6e26b901e4 fix: encoding 2026-01-25 12:33:07 -03:00
Sofia Rodrigues
81c67c8f12 revert: levenshtein test 2026-01-25 12:29:37 -03:00
Sofia Rodrigues
990e21eefc fix: namespace 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
7141144a2f fix: remove native_decide 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
8c343501c1 fix: apply suggestions
Co-authored-by: Rob23oba <152706811+Rob23oba@users.noreply.github.com>
2026-01-25 12:27:16 -03:00
Sofia Rodrigues
44f08686cd feat: connection values 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
65883f8c2a fix: levenshtein test is using the new Decidable instance 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
bd28a8fad5 fix: tests and type class 2026-01-25 12:27:16 -03:00
Sofia Rodrigues
8ba86c2c67 fix: case and usage of native_decide 2026-01-25 12:24:52 -03:00
Sofia Rodrigues
d3cddf9e44 fix: Headers.Basic comment 2026-01-25 12:24:52 -03:00
Sofia Rodrigues
5f3babee5c feat: headers data structure 2026-01-25 12:24:51 -03:00
Sofia Rodrigues
26dfc9a872 refactor: remove headers 2026-01-25 12:16:13 -03:00
Sofia Rodrigues
e47439e8be fix: default size 2026-01-25 11:26:03 -03:00
Sofia Rodrigues
1ef53758be fix: apply suggestions
Co-authored-by: Rob23oba <152706811+Rob23oba@users.noreply.github.com>
2026-01-25 11:21:06 -03:00
Sofia Rodrigues
8544042789 fix: status and chunkedbuffer 2026-01-25 11:19:34 -03:00
Sofia Rodrigues
f564d43d98 feat: basic headers structure to more structured approach 2026-01-23 17:58:44 -03:00
Sofia Rodrigues
32fa0666c9 feat: data components 2026-01-23 17:14:53 -03:00
Sofia Rodrigues
9e27f77a45 feat: openssl nix 2026-01-16 19:04:34 -03:00
Sofia Rodrigues
4a26fe317d fix: remove tls 2026-01-16 18:54:46 -03:00
Sofia Rodrigues
23797245eb feat: start openssl 2026-01-15 16:10:09 -03:00
71 changed files with 15091 additions and 13 deletions

View File

@@ -24,7 +24,7 @@
stdenv = pkgs.overrideCC pkgs.stdenv llvmPackages.clang;
} ({
buildInputs = with pkgs; [
cmake gmp libuv ccache pkg-config
cmake gmp libuv ccache pkg-config openssl
llvmPackages.bintools # wrapped lld
llvmPackages.llvm # llvm-symbolizer for asan/lsan
gdb

View File

@@ -347,6 +347,35 @@ if(NOT LEAN_STANDALONE)
string(APPEND LEAN_EXTRA_LINKER_FLAGS " ${LIBUV_LDFLAGS}")
endif()
# OpenSSL
if("${CMAKE_SYSTEM_NAME}" MATCHES "Emscripten")
# Only on WebAssembly we compile OpenSSL ourselves
set(OPENSSL_EMSCRIPTEN_FLAGS "${EMSCRIPTEN_SETTINGS}")
# OpenSSL needs to be configured for Emscripten using their configuration system
ExternalProject_add(openssl
PREFIX openssl
GIT_REPOSITORY https://github.com/openssl/openssl
# Sync version with flake.nix if applicable
GIT_TAG openssl-3.0.15
CONFIGURE_COMMAND <SOURCE_DIR>/Configure linux-generic32 no-shared no-dso no-engine no-tests --prefix=<INSTALL_DIR> CC=${CMAKE_C_COMPILER} CXX=${CMAKE_CXX_COMPILER} AR=${CMAKE_AR} CFLAGS=${OPENSSL_EMSCRIPTEN_FLAGS}
BUILD_COMMAND emmake make -j
INSTALL_COMMAND emmake make install_sw
BUILD_IN_SOURCE ON)
set(OPENSSL_INCLUDE_DIR "${CMAKE_BINARY_DIR}/openssl/include")
set(OPENSSL_CRYPTO_LIBRARY "${CMAKE_BINARY_DIR}/openssl/lib/libcrypto.a")
set(OPENSSL_SSL_LIBRARY "${CMAKE_BINARY_DIR}/openssl/lib/libssl.a")
set(OPENSSL_LIBRARIES "${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY}")
else()
find_package(OpenSSL REQUIRED)
set(OPENSSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
endif()
include_directories(${OPENSSL_INCLUDE_DIR})
if(NOT LEAN_STANDALONE)
string(JOIN " " OPENSSL_LIBRARIES ${OPENSSL_LIBRARIES})
string(APPEND LEAN_EXTRA_LINKER_FLAGS " ${OPENSSL_LIBRARIES}")
endif()
# Windows SDK (for ICU)
if(CMAKE_SYSTEM_NAME MATCHES "Windows")
# Pass 'tools' to skip MSVC version check (as MSVC/Visual Studio is not necessarily installed)
@@ -721,9 +750,9 @@ if(STAGE GREATER 1)
endif()
else()
add_subdirectory(runtime)
if(CMAKE_SYSTEM_NAME MATCHES "Emscripten")
add_dependencies(leanrt libuv)
add_dependencies(leanrt_initial-exec libuv)
if("${CMAKE_SYSTEM_NAME}" MATCHES "Emscripten")
add_dependencies(leanrt libuv openssl)
add_dependencies(leanrt_initial-exec libuv openssl)
endif()
add_subdirectory(util)

View File

@@ -230,7 +230,7 @@ Examples:
* `"empty".isEmpty = false`
* `" ".isEmpty = false`
-/
@[inline] def isEmpty (s : String) : Bool :=
@[inline, expose] def isEmpty (s : String) : Bool :=
s.utf8ByteSize == 0
@[export lean_string_isempty]

View File

@@ -57,4 +57,14 @@ theorem length_map {f : Char → Char} {s : String} : (s.map f).length = s.lengt
theorem map_eq_empty {f : Char Char} {s : String} : s.map f = "" s = "" := by
simp [ toList_eq_nil_iff]
@[simp]
theorem map_map {f g : Char Char} {s : String} : String.map g (String.map f s) = String.map (g f) s := by
apply String.ext
simp [List.map_map]
@[simp]
theorem map_id {s : String} : String.map id s = s := by
apply String.ext
simp [List.map_id]
end String

View File

@@ -229,7 +229,7 @@ Examples:
* `"Orange".toLower = "orange"`
* `"ABc123".toLower = "abc123"`
-/
@[inline] def toLower (s : String) : String :=
@[inline, expose] def toLower (s : String) : String :=
s.map Char.toLower
/--

View File

@@ -21,6 +21,9 @@ opaque maxSmallNatFn : Unit → Nat
@[extern "lean_libuv_version"]
opaque libUVVersionFn : Unit Nat
@[extern "lean_openssl_version"]
opaque openSSLVersionFn : Unit Nat
def closureMaxArgs : Nat :=
closureMaxArgsFn ()
@@ -30,4 +33,7 @@ def maxSmallNat : Nat :=
def libUVVersion : Nat :=
libUVVersionFn ()
def openSSLVersion : Nat :=
openSSLVersionFn ()
end Lean

View File

@@ -7,8 +7,10 @@ module
prelude
public import Std.Internal.Async
public import Std.Internal.Http
public import Std.Internal.Parsec
public import Std.Internal.UV
public import Std.Internal.SSL
@[expose] public section

View File

@@ -10,6 +10,7 @@ public import Std.Internal.Async.Basic
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Async.Timer
public import Std.Internal.Async.TCP
public import Std.Internal.Async.TCP.SSL
public import Std.Internal.Async.UDP
public import Std.Internal.Async.DNS
public import Std.Internal.Async.Select
@@ -17,3 +18,4 @@ public import Std.Internal.Async.Process
public import Std.Internal.Async.System
public import Std.Internal.Async.Signal
public import Std.Internal.Async.IO
public import Std.Internal.SSL

View File

@@ -10,6 +10,8 @@ public import Init.Data.Random
public import Std.Internal.Async.Basic
import Init.Data.ByteArray.Extra
import Init.Data.Array.Lemmas
public import Std.Sync.Mutex
public import Std.Sync.Barrier
import Init.Omega
public section
@@ -132,6 +134,8 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let gen := mkStdGen seed
let selectables := shuffleIt selectables gen
let gate IO.Promise.new
for selectable in selectables do
if let some val selectable.selector.tryFn then
let result selectable.cont val
@@ -141,6 +145,9 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let promise IO.Promise.new
for selectable in selectables do
if finished.get then
break
let waiterPromise IO.Promise.new
let waiter := Waiter.mk finished waiterPromise
selectable.selector.registerFn waiter
@@ -157,18 +164,20 @@ partial def Selectable.one (selectables : Array (Selectable α)) : Async α := d
let async : Async _ :=
try
let res IO.ofExcept res
discard <| await gate.result?
for selectable in selectables do
selectable.selector.unregisterFn
let contRes selectable.cont res
promise.resolve (.ok contRes)
promise.resolve (.ok ( selectable.cont res))
catch e =>
promise.resolve (.error e)
async.toBaseIO
Async.ofPromise (pure promise)
gate.resolve ()
let result Async.ofPromise (pure promise)
return result
/--
Performs fair and data-loss free non-blocking multiplexing on the `Selectable`s in `selectables`.
@@ -224,6 +233,8 @@ def Selectable.combine (selectables : Array (Selectable α)) : IO (Selector α)
let derivedWaiter := Waiter.mk waiter.finished waiterPromise
selectable.selector.registerFn derivedWaiter
let barrier IO.Promise.new
discard <| IO.bindTask (t := waiterPromise.result?) fun res? => do
match res? with
| none => return (Task.pure (.ok ()))
@@ -231,6 +242,7 @@ def Selectable.combine (selectables : Array (Selectable α)) : IO (Selector α)
let async : Async _ := do
let mainPromise := waiter.promise
await barrier
for selectable in selectables do
selectable.selector.unregisterFn

View File

@@ -0,0 +1,417 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.UV.TCP
public import Std.Internal.Async.Timer
public import Std.Internal.Async.Select
public import Std.Internal.SSL
public section
namespace Std
namespace Internal
namespace IO
namespace Async
namespace TCP
namespace SSL
open Std.Net
/--
Default chunk size used by TLS I/O loops.
-/
def ioChunkSize : UInt64 := 16 * 1024
/--
Represents a TLS-enabled TCP server socket.
-/
structure Server where
private ofNative ::
native : Internal.UV.TCP.Socket
/--
Represents a TLS-enabled TCP client socket.
-/
structure Client where
private ofNative ::
native : Internal.UV.TCP.Socket
ssl : Std.Internal.SSL.Session
@[inline]
private def feedEncryptedChunk (ssl : Std.Internal.SSL.Session) (encrypted : ByteArray) : Async Unit := do
if encrypted.size == 0 then
return ()
let consumed ssl.feedEncrypted encrypted
if consumed.toNat != encrypted.size then
throw <| IO.userError s!"TLS input short write: consumed {consumed} / {encrypted.size} bytes"
@[inline]
private partial def flushEncrypted (native : Internal.UV.TCP.Socket) (ssl : Std.Internal.SSL.Session) : Async Unit := do
let out ssl.drainEncrypted
if out.size == 0 then
return ()
Async.ofPromise <| native.send #[out]
flushEncrypted native ssl
/--
Runs the TLS handshake loop until the handshake is established.
-/
private partial def handshake (native : Internal.UV.TCP.Socket) (ssl : Std.Internal.SSL.Session) (chunkSize : UInt64 := ioChunkSize) : Async Unit := do
let done ssl.handshake
flushEncrypted native ssl
if done then
return ()
let encrypted? Async.ofPromise <| native.recv? chunkSize
match encrypted? with
| none =>
throw <| IO.userError "peer closed connection before TLS handshake completed"
| some encrypted =>
feedEncryptedChunk ssl encrypted
handshake native ssl chunkSize
namespace Server
/--
Configures the shared TLS server context with PEM certificate and private key files.
-/
@[inline]
def configureContext (certFile keyFile : String) : IO Unit :=
Std.Internal.SSL.configureServerContext certFile keyFile
/--
Creates a new TLS-enabled TCP server socket.
-/
@[inline]
def mk : IO Server := do
let native Internal.UV.TCP.Socket.new
return Server.ofNative native
/--
Binds the server socket to the specified address.
-/
@[inline]
def bind (s : Server) (addr : SocketAddress) : IO Unit :=
s.native.bind addr
/--
Listens for incoming connections with the given backlog.
-/
@[inline]
def listen (s : Server) (backlog : UInt32) : IO Unit :=
s.native.listen backlog
@[inline] private def mkServerClient (native : Internal.UV.TCP.Socket) : IO Client := do
let ssl Std.Internal.SSL.Session.mkServer
return Client.ofNative native ssl
/--
Accepts an incoming TLS client connection and performs the TLS handshake.
-/
@[inline]
def accept (s : Server) (chunkSize : UInt64 := ioChunkSize) : Async Client := do
let native Async.ofPromise <| s.native.accept
let client mkServerClient native
SSL.handshake client.native client.ssl chunkSize
return client
/--
Tries to accept an incoming TLS client connection.
-/
@[inline]
def tryAccept (s : Server) : IO (Option Client) := do
let res s.native.tryAccept
let socket IO.ofExcept res
match socket with
| none => return none
| some native => return some ( mkServerClient native)
/--
Creates a `Selector` that resolves once `s` has a connection available.
-/
def acceptSelector (s : TCP.SSL.Server) : Selector Client :=
{
tryFn :=
s.tryAccept
registerFn waiter := do
let task s.native.accept
-- If we get cancelled the promise will be dropped so prepare for that
IO.chainTask (t := task.result?) fun res => do
match res with
| none => return ()
| some res =>
let lose := return ()
let win promise := do
try
let native IO.ofExcept res
let ssl Std.Internal.SSL.Session.mkServer
promise.resolve (.ok (Client.ofNative native ssl))
catch e =>
promise.resolve (.error e)
waiter.race lose win
unregisterFn := s.native.cancelAccept
}
/--
Gets the local address of the server socket.
-/
@[inline]
def getSockName (s : Server) : IO SocketAddress :=
s.native.getSockName
/--
Enables the Nagle algorithm for all client sockets accepted by this server socket.
-/
@[inline]
def noDelay (s : Server) : IO Unit :=
s.native.noDelay
/--
Enables TCP keep-alive for all client sockets accepted by this server socket.
-/
@[inline]
def keepAlive (s : Server) (enable : Bool) (delay : Std.Time.Second.Offset) (_ : delay.val 1 := by decide) : IO Unit :=
s.native.keepAlive enable.toInt8 delay.val.toNat.toUInt32
end Server
namespace Client
/--
Configures the shared TLS client context.
`caFile` may be empty to use default trust settings.
-/
@[inline]
def configureContext (caFile := "") (verifyPeer := true) : IO Unit :=
Std.Internal.SSL.configureClientContext caFile verifyPeer
/--
Creates a new TLS-enabled TCP client socket with a client-side TLS session.
-/
@[inline]
def mk : IO Client := do
let native Internal.UV.TCP.Socket.new
let ssl Std.Internal.SSL.Session.mkClient
return Client.ofNative native ssl
/--
Binds the client socket to the specified address.
-/
@[inline]
def bind (s : Client) (addr : SocketAddress) : IO Unit :=
s.native.bind addr
/--
Sets SNI server name used during the TLS handshake.
-/
@[inline]
def setServerName (s : Client) (host : String) : IO Unit :=
s.ssl.setServerName host
/--
Performs the TLS handshake on an established TCP connection.
-/
@[inline]
def handshake (s : Client) (chunkSize : UInt64 := ioChunkSize) : Async Unit :=
SSL.handshake s.native s.ssl chunkSize
/--
Connects the client socket to the given address and performs the TLS handshake.
-/
@[inline]
def connect (s : Client) (addr : SocketAddress) (chunkSize : UInt64 := ioChunkSize) : Async Unit := do
Async.ofPromise <| s.native.connect addr
s.handshake chunkSize
/--
Attempts to write plaintext data into TLS. Returns true when accepted.
Any encrypted TLS output generated is flushed to the socket.
-/
@[inline]
def write (s : Client) (data : ByteArray) : Async Bool := do
let accepted s.ssl.write data
flushEncrypted s.native s.ssl
return accepted
/--
Sends data through a TLS-enabled socket.
Fails if OpenSSL reports the write as pending additional I/O.
-/
@[inline]
def send (s : Client) (data : ByteArray) : Async Unit := do
if s.write data then
return ()
else
throw <| IO.userError "TLS write is pending additional I/O; call `recv?` or retry later"
/--
Sends multiple data buffers through the TLS-enabled socket.
-/
@[inline]
def sendAll (s : Client) (data : Array ByteArray) : Async Unit :=
data.forM (s.send ·)
/--
Receives decrypted plaintext data from TLS.
If no plaintext is immediately available, this function pulls encrypted data from TCP first.
-/
partial def recv? (s : Client) (size : UInt64) (chunkSize : UInt64 := ioChunkSize) : Async (Option ByteArray) := do
match s.ssl.read? size with
| some plain =>
flushEncrypted s.native s.ssl
return some plain
| none =>
flushEncrypted s.native s.ssl
let encrypted? Async.ofPromise <| s.native.recv? chunkSize
match encrypted? with
| none =>
return none
| some encrypted =>
feedEncryptedChunk s.ssl encrypted
recv? s size chunkSize
/--
Tries to receive decrypted plaintext data without blocking.
Returns `some (some data)` if plaintext is available, `some none` if the peer closed,
or `none` if no data is ready yet.
-/
partial def tryRecv (s : Client) (size : UInt64) (chunkSize : UInt64 := ioChunkSize) : Async (Option (Option ByteArray)) := do
let pending s.ssl.pendingPlaintext
if pending > 0 then
let res s.recv? size
return some res
else
let readableWaiter s.native.waitReadable
flushEncrypted s.native s.ssl
if readableWaiter.isResolved then
let encrypted? Async.ofPromise <| s.native.recv? ioChunkSize
match encrypted? with
| none =>
return none
| some encrypted =>
feedEncryptedChunk s.ssl encrypted
tryRecv s size chunkSize
else
s.native.cancelRecv
return none
/--
Feeds encrypted socket data into SSL until plaintext is pending.
Resolves the returned promise once plaintext is available. If no more socket data
is available (or the promise gets dropped), it exits without resolving.
-/
partial def waitReadable (s : Client) (chunkSize : UInt64 := ioChunkSize) : Async Unit := do
if ( s.ssl.pendingPlaintext) > 0 then
return ()
let rec go : Async Unit := do
let readable Async.ofPromise <| s.native.waitReadable
if !readable then
return ()
let encrypted? Async.ofPromise <| s.native.recv? chunkSize
match encrypted? with
| none =>
return ()
| some encrypted =>
feedEncryptedChunk s.ssl encrypted
flushEncrypted s.native s.ssl
if ( s.ssl.pendingPlaintext) > 0 then
return ()
else
go
go
/--
Creates a `Selector` that resolves once `s` has plaintext data available.
-/
def recvSelector (s : TCP.SSL.Client) (size : UInt64) : Selector (Option ByteArray) :=
{
tryFn := s.tryRecv size
registerFn waiter := do
let readableWaiter s.waitReadable.asTask
-- If we get cancelled the promise will be dropped so prepare for that
discard <| IO.mapTask (t := readableWaiter) fun res => do
match res with
| .error _ => return ()
| .ok _ =>
let lose := return ()
let win promise := do
try
-- We know that this read should not block.
let result (s.recv? size).block
promise.resolve (.ok result)
catch e =>
promise.resolve (.error e)
waiter.race lose win
unregisterFn := s.native.cancelRecv
}
/--
Shuts down the write side of the client socket.
-/
@[inline]
def shutdown (s : Client) : Async Unit :=
Async.ofPromise <| s.native.shutdown
/--
Gets the remote address of the client socket.
-/
@[inline]
def getPeerName (s : Client) : IO SocketAddress :=
s.native.getPeerName
/--
Gets the local address of the client socket.
-/
@[inline]
def getSockName (s : Client) : IO SocketAddress :=
s.native.getSockName
/--
Returns the X.509 verification result code for this TLS session.
-/
@[inline]
def verifyResult (s : Client) : IO UInt64 :=
s.ssl.verifyResult
/--
Enables the Nagle algorithm for the client socket.
-/
@[inline]
def noDelay (s : Client) : IO Unit :=
s.native.noDelay
/--
Enables TCP keep-alive with a specified delay for the client socket.
-/
@[inline]
def keepAlive (s : Client) (enable : Bool) (delay : Std.Time.Second.Offset) (_ : delay.val 0 := by decide) : IO Unit :=
s.native.keepAlive enable.toInt8 delay.val.toNat.toUInt32
end Client
end SSL
end TCP
end Async
end IO
end Internal
end Std

186
src/Std/Internal/Http.lean Normal file
View File

@@ -0,0 +1,186 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Client
public import Std.Internal.Http.Server
public section
/-!
# HTTP Library
A low-level HTTP/1.1 server implementation for Lean. This library provides a pure,
sans-I/O protocol implementation that can be used with the `Async` library or with
custom connection handlers.
## Overview
This module provides a complete HTTP/1.1 server implementation with support for:
- Request/response handling with streaming bodies
- Keep-alive connections
- Chunked transfer encoding
- Header validation and management
- Configurable timeouts and limits
**Sans I/O Architecture**: The core protocol logic doesn't perform any actual I/O itself -
it just defines how data should be processed. This separation allows the protocol implementation
to remain pure and testable, while different transports (TCP sockets, mock clients) handle
the actual reading and writing of bytes.
## Quick Start
The main entry point is `Server.serve`, which starts an HTTP/1.1 server. Implement the
`Server.Handler` type class to define how the server handles requests, errors, and
`Expect: 100-continue` headers:
```lean
import Std.Internal.Http
open Std Internal IO Async
open Std Http Server
structure MyHandler
instance : Handler MyHandler where
onRequest _ req := do
Response.ok |>.text "Hello, World!"
def main : IO Unit := Async.block do
let addr : Net.SocketAddress := .v4 ⟨.ofParts 127 0 0 1, 8080⟩
let server ← Server.serve addr MyHandler.mk
server.waitShutdown
```
## Working with Requests
Incoming requests are represented by `Request Body.Stream`, which bundles together the
request line, parsed headers, and a lazily-consumed body. Headers are available
immediately, while the body can be streamed or collected on demand, allowing handlers
to efficiently process both small and large requests.
### Reading Headers
```lean
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
-- Access request method and URI
let method := req.head.method -- Method.get, Method.post, etc.
let uri := req.head.uri -- RequestTarget
-- Read a specific header
if let some contentType := req.head.headers.get? (.mk "content-type") then
IO.println s!"Content-Type: {contentType}"
Response.ok |>.text "OK"
```
### Reading the Request Body
The request body is exposed as a `Body.Stream`, which can be consumed incrementally or
collected into memory. The `readAll` method reads the entire body, with an optional size
limit to protect against unbounded payloads.
```lean
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
-- Collect entire body as a String
let bodyStr : String ← req.body.readAll
-- Or with a maximum size limit
let bodyStr : String ← req.body.readAll (maximumSize := some 1024)
Response.ok |>.text s!"Received: {bodyStr}"
```
## Building Responses
Responses are constructed using a builder API that starts from a status code and adds
headers and a body. Common helpers exist for text, HTML, JSON, and binary responses, while
still allowing full control over status codes and header values.
```lean
-- Text response
Response.ok |>.text "Hello!"
-- HTML response
Response.ok |>.html "<h1>Hello!</h1>"
-- JSON response
Response.ok |>.json "{\"key\": \"value\"}"
-- Binary response
Response.ok |>.bytes someByteArray
-- Custom status
Response.new |>.status .created |>.text "Resource created"
-- With custom headers
Response.ok
|>.header! "X-Custom-Header" "value"
|>.header! "Cache-Control" "no-cache"
|>.text "Response with headers"
```
### Streaming Responses
For large responses or server-sent events, use streaming:
```lean
def handler (req : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
Response.ok
|>.header! "Content-Type" "text/plain"
|>.stream fun stream => do
for i in [0:10] do
stream.send { data := s!"chunk {i}\n".toUTF8 }
Async.sleep 1000
stream.close
```
## Server Configuration
Configure server behavior with `Config`:
```lean
def config : Config := {
maxRequests := 10000000,
lingeringTimeout := 5000,
}
let server ← Server.serve addr MyHandler.mk config
```
## Handler Type Class
Implement `Server.Handler` to define how the server processes events. The class has three
methods, all with default implementations:
- `onRequest` — called for each incoming request; returns a response inside `ContextAsync`
- `onFailure` — called when an error occurs while processing a request
- `onContinue` — called when a request includes an `Expect: 100-continue` header; return
`true` to accept the body or `false` to reject it
```lean
structure MyHandler where
greeting : String
instance : Handler MyHandler where
onRequest self req := do
Response.ok |>.text self.greeting
onFailure self err := do
IO.eprintln s!"Error: {err}"
```
The handler methods operate in the following monads:
- `onRequest` uses `ContextAsync` — an asynchronous monad (`ReaderT CancellationContext Async`) that provides:
- Full access to `Async` operations (spawning tasks, sleeping, concurrent I/O)
- A `CancellationContext` tied to the client connection — when the client disconnects, the
context is cancelled, allowing your handler to detect this and stop work early
- `onFailure` uses `Async`
- `onContinue` uses `Async`
-/

View File

@@ -0,0 +1,12 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Client.Config
public import Std.Internal.Http.Client.Connection
public section

View File

@@ -0,0 +1,99 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Config
This module exposes the `Config` structure describing timeouts, connection,
and header configurations for an HTTP client.
-/
namespace Std.Http.Client
set_option linter.all true
/--
Client connection configuration with validation.
-/
structure Config where
/--
Maximum number of requests per connection (for keep-alive).
-/
maxRequestsPerConnection : Nat := 1000
/--
Maximum number of headers allowed per response.
-/
maxResponseHeaders : Nat := 200
/--
Maximum size of a single header value in bytes.
-/
maxHeaderValueSize : Nat := 16384
/--
Maximum waiting time for additional data before timing out.
-/
readTimeout : Time.Millisecond.Offset := 30000
/--
Timeout duration for keep-alive connections.
-/
keepAliveTimeout : { x : Time.Millisecond.Offset // 0 < x } := 60000, by decide
/--
Timeout for the entire request lifecycle (connect + read + write).
-/
requestTimeout : { x : Time.Millisecond.Offset // 0 < x } := 120000, by decide
/--
Whether to enable keep-alive connections.
-/
enableKeepAlive : Bool := true
/--
Output buffer flush threshold in bytes.
-/
writeBufferHighWatermark : Nat := 4096
/--
Maximum number of bytes to receive in a single read call.
-/
maxRecvChunkSize : Nat := 16384
/--
Default buffer size for request payloads.
-/
defaultRequestBufferSize : Nat := 16384
/--
The user-agent string to send by default.
-/
userAgent : Option Header.Value := some (.mk "lean-http/1.1")
namespace Config
/--
Convert this client config into an HTTP/1.1 protocol configuration.
-/
def toH1Config (config : Config) : Std.Http.Protocol.H1.Config :=
{ maxMessages := config.maxRequestsPerConnection
maxHeaders := config.maxResponseHeaders
maxHeaderNameLength := config.maxHeaderValueSize
maxHeaderValueLength := config.maxHeaderValueSize
enableKeepAlive := config.enableKeepAlive
identityHeader := config.userAgent
}
end Config
end Std.Http.Client

View File

@@ -0,0 +1,397 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.TCP
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Transport
public import Std.Internal.Http.Protocol.H1
public import Std.Internal.Http.Client.Config
public section
namespace Std
namespace Http
namespace Client
open Std Internal IO Async TCP Protocol
open Time
/-!
# Connection
This module defines a `Client.Connection` that is a structure used to handle a single HTTP connection with
possibly multiple requests/responses from the client side.
-/
set_option linter.all true
/--
A single HTTP client connection.
-/
public structure Connection (α : Type) where
/--
The server connection.
-/
socket : α
/--
The processing machine for HTTP 1.1
-/
machine : H1.Machine .sending
/--
A request packet to be sent through the persistent connection channel.
-/
structure RequestPacket where
/--
The request to send.
-/
request : Request Body.Stream
/--
Promise to resolve with the response.
-/
responsePromise : IO.Promise (Except Error (Response Body.Stream))
namespace RequestPacket
/--
Resolve the response promise with an error.
-/
def onError (packet : RequestPacket) (error : Error) : BaseIO Unit :=
discard <| packet.responsePromise.resolve (.error error)
/--
Resolve the response promise with a successful response.
-/
def onResponse (packet : RequestPacket) (response : Response Body.Stream) : BaseIO Unit :=
discard <| packet.responsePromise.resolve (.ok response)
end RequestPacket
/--
A persistent HTTP client connection that handles multiple sequential requests.
-/
public structure PersistentConnection (α : Type) where
/--
The underlying connection.
-/
connection : Connection α
/--
Channel for sending new requests.
-/
requestChannel : Std.CloseableChannel RequestPacket
/--
Resolves when the connection task exits.
-/
shutdown : IO.Promise Unit
namespace PersistentConnection
/--
Send a request through the persistent connection.
-/
def send [Transport α] (pc : PersistentConnection α) (request : Request Body.Stream) : Async (Response Body.Stream) := do
let responsePromise IO.Promise.new
let task pc.requestChannel.send { request, responsePromise }
let .ok _ await task
| throw (.userError "connection closed, cannot send more requests")
match await responsePromise.result! with
| .ok response =>
pure response
| .error e =>
throw e
/--
Wait for the background connection task to terminate.
-/
def waitShutdown (pc : PersistentConnection α) : Async Unit := do
await pc.shutdown.result!
/--
Close the persistent connection by closing the request channel.
-/
def close (pc : PersistentConnection α) : Async Unit := do
discard <| EIO.toBaseIO pc.requestChannel.close
end PersistentConnection
namespace Connection
private inductive Recv
| bytes (x : Option ByteArray)
| channel (x : Option Chunk)
| packet (x : Option RequestPacket)
| timeout
| shutdown
| close
private def receiveWithTimeout
[Transport α]
(socket : Option α)
(expect : UInt64)
(requestStream : Option Body.Stream)
(requestChannel : Option (Std.CloseableChannel RequestPacket))
(timeoutMs : Millisecond.Offset)
(keepAliveTimeoutMs : Option Millisecond.Offset)
(connectionContext : CancellationContext) : Async Recv := do
let mut baseSelectables : Array (Selectable Recv) := #[
.case connectionContext.doneSelector (fun _ => do
let reason connectionContext.getCancellationReason
match reason with
| some .deadline => pure .timeout
| _ => pure .shutdown)
]
if let some socket := socket then
baseSelectables := baseSelectables.push (.case (Transport.recvSelector socket expect) (Recv.bytes · |> pure))
-- Timeouts are only applied if we are not waiting on the user.
if requestStream.isNone requestChannel.isNone then
if let some keepAliveTimeout := keepAliveTimeoutMs then
baseSelectables := baseSelectables.push (.case ( Selector.sleep keepAliveTimeout) (fun _ => pure .timeout))
else
baseSelectables := baseSelectables.push (.case ( Selector.sleep timeoutMs) (fun _ => pure .timeout))
if let some requestStream := requestStream then
baseSelectables := baseSelectables.push (.case requestStream.recvSelector (Recv.channel · |> pure))
if let some requestChannel := requestChannel then
baseSelectables := baseSelectables.push (.case requestChannel.recvSelector (Recv.packet · |> pure))
Selectable.one baseSelectables
private def processNeedMoreData
[Transport α]
(config : Config)
(socket : Option α)
(expect : Option Nat)
(requestChannel : Option (Std.CloseableChannel RequestPacket))
(requestStream : Option Body.Stream)
(timeout : Millisecond.Offset)
(keepAliveTimeout : Option Millisecond.Offset)
(connectionContext : CancellationContext) : Async Recv := do
try
let expectedBytes := expect
|>.getD config.defaultRequestBufferSize
|>.min config.maxRecvChunkSize
|>.toUInt64
receiveWithTimeout socket expectedBytes requestStream requestChannel timeout keepAliveTimeout connectionContext
catch _ =>
pure .close
private def handleError
(machine : H1.Machine .sending)
(currentRequest : Option RequestPacket)
(error : Error) : BaseIO (H1.Machine .sending × Option RequestPacket) := do
if let some packet := currentRequest then
packet.onError error
pure (machine.closeWriter.closeReader.noMoreInput, none)
private def handle
[Transport α]
(connection : Connection α)
(config : Config)
(connectionContext : CancellationContext)
(requestChannel : Std.CloseableChannel RequestPacket) : Async Unit := do
let mut machine := connection.machine
let socket := connection.socket
let mut currentTimeout := config.keepAliveTimeout.val
let mut keepAliveTimeout := some config.keepAliveTimeout.val
let mut currentRequest : Option RequestPacket := none
let mut requestStream : Option Body.Stream := none
let mut responseStream : Option Body.Stream := none
let mut requiresData := false
let mut expectData := none
let mut waitingForRequest := true
while ¬machine.halted do
let (newMachine, step) := machine.step
machine := newMachine
if step.output.size > 0 then
try Transport.sendAll socket #[step.output.toByteArray] catch _ => break
for event in step.events do
match event with
| .needMoreData expect => do
requiresData := true
expectData := expect
| .needBody =>
pure ()
| .needAnswer =>
pure ()
| .«continue» =>
pure ()
| .endHeaders head => do
currentTimeout := config.readTimeout
keepAliveTimeout := none
if let some stream := responseStream then
if let some length := head.getSize true then
stream.setKnownSize (some length)
if let some packet := currentRequest then
packet.onResponse { head, body := stream }
| .gotData final ext data =>
if let some stream := responseStream then
try
stream.send { data := data.toByteArray, extensions := ext }
if final then
stream.close
catch _ =>
pure ()
| .closeBody =>
if let some stream := requestStream then
if ¬( stream.isClosed) then stream.close
| .next => do
if let some stream := requestStream then
if ¬( stream.isClosed) then stream.close
requestStream := none
responseStream := none
currentRequest := none
waitingForRequest := true
keepAliveTimeout := some config.keepAliveTimeout.val
currentTimeout := config.keepAliveTimeout.val
| .failed err =>
if let some packet := currentRequest then
packet.onError (.userError (toString err))
| .close =>
pure ()
if requiresData waitingForRequest requestStream.isSome then
let socket := some socket
let nextRequest := if waitingForRequest then some requestChannel else none
requiresData := false
let event processNeedMoreData
config socket expectData nextRequest requestStream currentTimeout keepAliveTimeout connectionContext
match event with
| .bytes (some bs) =>
machine := machine.feed bs
| .bytes none =>
machine := machine.noMoreInput
| .channel (some chunk) =>
machine := machine.sendData #[chunk]
| .channel none =>
machine := machine.userClosedBody
if let some stream := requestStream then
if ¬( stream.isClosed) then stream.close
requestStream := none
| .packet (some packet) =>
currentRequest := some packet
waitingForRequest := false
currentTimeout := config.requestTimeout.val
keepAliveTimeout := none
machine := machine.send packet.request.head
let stream := packet.request.body
if let some size stream.getKnownSize then
machine := machine.setKnownSize size
requestStream := some stream
responseStream := some ( Body.Stream.emptyWithCapacity)
| .packet none =>
break
| .close =>
break
| .timeout =>
let (newMachine, newCurrentRequest) handleError machine currentRequest (.userError "request timeout")
machine := newMachine
currentRequest := newCurrentRequest
| .shutdown =>
let (newMachine, newCurrentRequest) handleError machine currentRequest (.userError "connection shutdown")
machine := newMachine
currentRequest := newCurrentRequest
if let some packet := currentRequest then
packet.onError (.userError "connection closed")
if let some stream := responseStream then
if ¬( stream.isClosed) then
stream.close
if let some stream := requestStream then
if ¬( stream.isClosed) then
stream.close
discard <| EIO.toBaseIO requestChannel.close
let mut keepDraining := true
while keepDraining do
match requestChannel.tryRecv with
| some packet =>
packet.onError (.userError "connection closed")
| none =>
keepDraining := false
Transport.close socket
end Connection
/--
Create a persistent connection that can handle multiple sequential requests.
This is the entry point for creating a client connection. It can be used with a `TCP.Socket` or any
other type that implements `Transport` to create an HTTP client capable of handling multiple
sequential requests on a single connection.
-/
def createPersistentConnection [Transport t] (client : t) (config : Config := {}) : Async (PersistentConnection t) := do
let requestChannel Std.CloseableChannel.new
let shutdown IO.Promise.new
let connection := Connection.mk client { config := config.toH1Config }
let ctx CancellationContext.new
background do
try
Std.Http.Client.Connection.handle connection config ctx requestChannel
finally
discard <| shutdown.resolve ()
pure { connection, requestChannel, shutdown }
end Std.Http.Client

View File

@@ -0,0 +1,24 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Method
public import Std.Internal.Http.Data.Version
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Status
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.URI
public import Std.Internal.Http.Data.Body
/-!
# HTTP Data Types
This module re-exports all HTTP data types including request/response structures,
headers, methods, status codes, URIs, and body handling.
-/

View File

@@ -0,0 +1,13 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Body.Basic
public import Std.Internal.Http.Data.Body.Length
public import Std.Internal.Http.Data.Body.Stream

View File

@@ -0,0 +1,61 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.Body.Length
public section
/-!
# Body
This module defines shared types for HTTP body handling.
-/
namespace Std.Http.Body
set_option linter.all true
/--
Typeclass for types that can be converted to a `ByteArray`.
-/
class ToByteArray (α : Type) where
/--
Transforms into a `ByteArray`
-/
toByteArray : α ByteArray
instance : ToByteArray ByteArray where
toByteArray := id
instance : ToByteArray String where
toByteArray := String.toUTF8
/--
Typeclass for types that can be decoded from a `ByteArray`. The conversion may fail with an error
message if the bytes are not valid for the target type.
-/
class FromByteArray (α : Type) where
/--
Attempts to decode a `ByteArray` into the target type, returning an error message on failure.
-/
fromByteArray : ByteArray Except String α
instance : FromByteArray ByteArray where
fromByteArray := .ok
instance : FromByteArray String where
fromByteArray bs :=
match String.fromUTF8? bs with
| some s => .ok s
| none => .error "invalid UTF-8 encoding"
end Std.Http.Body

View File

@@ -0,0 +1,49 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Repr
public section
/-!
# Length
This module defines the `Length` type, that represents the Content-Length or Transfer-Encoding
of an HTTP request or response.
-/
namespace Std.Http.Body
set_option linter.all true
/--
Size of the body of a response or request.
-/
inductive Length
/--
Indicates that the HTTP message body uses **chunked transfer encoding**.
-/
| chunked
/--
Indicates that the HTTP message body has a **fixed, known length**, as specified by the
`Content-Length` header.
-/
| fixed (n : Nat)
deriving Repr, BEq
namespace Length
/--
Checks if the `Length` is chunked.
-/
def isChunked : Length Bool
| .chunked => true
| _ => false
end Std.Http.Body.Length

View File

@@ -0,0 +1,557 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Sync
public import Std.Internal.Async
public import Std.Internal.Http.Data.Request
public import Std.Internal.Http.Data.Response
public import Std.Internal.Http.Data.Chunk
public import Std.Internal.Http.Data.Body.Basic
public import Std.Internal.Http.Data.Body.Length
public import Init.Data.Queue
public import Init.Data.ByteArray
public section
/-!
# Body.Stream
A `Stream` represents an asynchronous channel for streaming data in chunks. It provides an
interface for producers and consumers to exchange chunks with optional metadata (extensions),
making it suitable for HTTP chunked transfer encoding and other streaming scenarios.
-/
namespace Std.Http.Body
open Std Internal IO Async
set_option linter.all true
namespace Stream
open Internal.IO.Async in
private inductive Consumer where
| normal (promise : IO.Promise (Option Chunk))
| select (finished : Waiter (Option Chunk))
private def Consumer.resolve (c : Consumer) (x : Option Chunk) : BaseIO Bool := do
match c with
| .normal promise =>
promise.resolve x
return true
| .select waiter =>
let lose := return false
let win promise := do
promise.resolve (.ok x)
return true
waiter.race lose win
private structure Producer where
chunk : Chunk
promise : IO.Promise Bool
private structure State where
/--
Chunks pushed into the stream that are waiting to be consumed.
-/
values : Std.Queue Chunk
/--
Current number of chunks buffered in the stream.
-/
amount : Nat
/--
Maximum number of chunks allowed in the buffer. Writers block when amount ≥ capacity.
-/
capacity : Nat
/--
Consumers that are blocked on a producer providing them a chunk. They will be resolved to `none`
if the stream closes.
-/
consumers : Std.Queue Consumer
/--
Producers that are blocked waiting for buffer space to become available.
-/
producers : Std.Queue Producer
/--
Whether the stream is closed already.
-/
closed : Bool
/--
Known size of the stream if available.
-/
knownSize : Option Body.Length
deriving Nonempty
end Stream
/--
A channel for chunks with support for chunk extensions.
-/
structure Stream where
private mk ::
private state : Mutex Stream.State
deriving Nonempty, TypeName
namespace Stream
/--
Creates a new Stream with a specified capacity.
-/
def emptyWithCapacity (capacity : Nat := 128) : Async Stream := do
return {
state := Mutex.new {
values :=
consumers :=
producers :=
amount := 0
capacity
closed := false
knownSize := none
}
}
/--
Creates a new Stream with default capacity.
-/
@[always_inline, inline]
def empty : Async Stream :=
emptyWithCapacity
private def decreaseKnownSize (knownSize : Option Body.Length) (chunk : Chunk) : Option Body.Length :=
match knownSize with
| some (.fixed res) => some (Body.Length.fixed (res - chunk.data.size))
| _ => knownSize
private def tryWakeProducer [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m Unit := do
let st get
-- Try to wake a producer if we have space
if st.amount < st.capacity then
if let some (producer, producers) := st.producers.dequeue? then
let chunk := producer.chunk
if st.amount + 1 <= st.capacity then
set { st with
values := st.values.enqueue chunk,
amount := st.amount + 1,
producers
}
producer.promise.resolve true
else
set { st with producers := producers.enqueue producer }
private def tryRecv' [Monad m] [MonadLiftT (ST IO.RealWorld) m] [MonadLiftT BaseIO m] :
AtomicT State m (Option Chunk) := do
let st get
if let some (chunk, values) := st.values.dequeue? then
let newKnownSize := decreaseKnownSize st.knownSize chunk
let newAmount := st.amount - 1
set { st with values, knownSize := newKnownSize, amount := newAmount }
tryWakeProducer
return some chunk
else
return none
/--
Attempts to receive a chunk from the stream. Returns `some` with a chunk when data is available, or `none`
when the stream is closed or no data is available.
-/
def tryRecv (stream : Stream) : Async (Option Chunk) :=
stream.state.atomically do
tryRecv'
private def recv' (stream : Stream) : BaseIO (Task (Option Chunk)) := do
stream.state.atomically do
if let some chunk tryRecv' then
return .pure <| some chunk
else if ( get).closed then
return .pure none
else
let promise IO.Promise.new
modify fun st => { st with consumers := st.consumers.enqueue (.normal promise) }
return promise.result?.map (sync := true) (·.bind id)
/--
Receives a chunk from the stream. Blocks if no data is available yet. Returns `none` if the stream
is closed and no data is available. The amount parameter is ignored for chunk streams.
-/
def recv (stream : Stream) (_count : Option UInt64) : Async (Option Chunk) := do
Async.ofTask ( recv' stream)
private def trySend' (chunk : Chunk) : AtomicT State BaseIO Bool := do
while true do
let st get
if let some (consumer, consumers) := st.consumers.dequeue? then
let newKnownSize := decreaseKnownSize st.knownSize chunk
let success consumer.resolve (some chunk)
set { st with consumers, knownSize := newKnownSize }
if success then
break
else
if st.amount + 1 <= st.capacity then
set { st with
values := st.values.enqueue chunk,
amount := st.amount + 1
}
return true
else
return false
return true
private def trySend (stream : Stream) (chunk : Chunk) : BaseIO Bool := do
stream.state.atomically do
if ( get).closed then
return false
else
trySend' chunk
private def send' (stream : Stream) (chunk : Chunk) : BaseIO (Task (Except IO.Error Unit)) := do
stream.state.atomically do
if ( get).closed then
return .pure <| .error (.userError "channel closed")
else if trySend' chunk then
return .pure <| .ok ()
else
let promise IO.Promise.new
let producer : Producer := { chunk, promise }
modify fun st => { st with producers := st.producers.enqueue producer }
return promise.result?.map (sync := true) fun res =>
if res.getD false then .ok () else .error (.userError "channel closed")
/--
Sends a chunk to the stream. Blocks if the buffer is full.
-/
def send (stream : Stream) (chunk : Chunk) : Async Unit := do
if chunk.data.isEmpty then
return
let res : AsyncTask _ send' stream chunk
await res
/--
Gets the known size of the stream if available. Returns `none` if the size is not known.
-/
@[always_inline, inline]
def getKnownSize (stream : Stream) : Async (Option Body.Length) := do
stream.state.atomically do
return ( get).knownSize
/--
Sets the known size of the stream. Use this when the total expected size is known ahead of time.
-/
@[always_inline, inline]
def setKnownSize (stream : Stream) (size : Option Body.Length) : Async Unit := do
stream.state.atomically do
modify fun st => { st with knownSize := size }
/--
Closes the stream, preventing further sends and causing pending/future
recv operations to return `none` when no data is available.
-/
def close (stream : Stream) : Async Unit := do
stream.state.atomically do
let st get
if st.closed then return ()
for consumer in st.consumers.toArray do
discard <| consumer.resolve none
for producer in st.producers.toArray do
producer.promise.resolve false
set { st with consumers := , producers := , closed := true }
/--
Checks if the stream is closed.
-/
@[always_inline, inline]
def isClosed (stream : Stream) : Async Bool := do
stream.state.atomically do
return ( get).closed
@[inline]
private def recvReady' [Monad m] [MonadLiftT (ST IO.RealWorld) m] :
AtomicT State m Bool := do
let st get
return !st.values.isEmpty || st.closed
open Internal.IO.Async in
/--
Creates a `Selector` that resolves once the `Stream` has data available and provides that data.
-/
def recvSelector (stream : Stream) : Selector (Option Chunk) where
tryFn := do
stream.state.atomically do
if recvReady' then
let val tryRecv'
return some val
else
return none
registerFn waiter := do
stream.state.atomically do
if recvReady' then
let lose := return ()
let win promise := do
promise.resolve (.ok ( tryRecv'))
waiter.race lose win
else
modify fun st => { st with consumers := st.consumers.enqueue (.select waiter) }
unregisterFn := do
stream.state.atomically do
let st get
let consumers st.consumers.filterM
fun
| .normal .. => return true
| .select waiter => return !( waiter.checkFinished)
set { st with consumers }
/--
Sends data to the stream and writes a chunk to it.
-/
def writeChunk (stream : Stream) (chunk : Chunk) : Async Unit :=
stream.send chunk
/--
Iterate over the stream content in chunks, processing each chunk with the given step function.
-/
@[inline]
protected partial def forIn
{β : Type} (stream : Stream) (acc : β)
(step : Chunk β Async (ForInStep β)) : Async β := do
let rec @[specialize] loop (stream : Stream) (acc : β) : Async β := do
if let some chunk stream.recv none then
match step chunk acc with
| .done res => return res
| .yield res => loop stream res
else
return acc
loop stream acc
/--
Iterate over the stream content in chunks, processing each chunk with the given step function.
-/
@[inline]
protected partial def forIn'
{β : Type} (stream : Stream) (acc : β)
(step : Chunk β ContextAsync (ForInStep β)) : ContextAsync β := do
let rec @[specialize] loop (stream : Stream) (acc : β) : ContextAsync β := do
let data Selectable.one #[
.case (stream.recvSelector) pure,
.case ( ContextAsync.doneSelector) (fun _ => pure none),
]
if let some chunk := data then
match step chunk acc with
| .done res => return res
| .yield res => loop stream res
else
return acc
loop stream acc
instance : ForIn Async Stream Chunk where
forIn := Std.Http.Body.Stream.forIn
instance : ForIn ContextAsync Stream Chunk where
forIn := Std.Http.Body.Stream.forIn'
/--
Reads all remaining chunks from the stream and returns the concatenated data as a `ByteArray`.
Blocks until the stream is closed. If `maximumSize` is provided, throws an `IO.Error` if the
total data exceeds that limit.
-/
partial def readAll [FromByteArray α] (stream : Stream) (maximumSize : Option UInt64 := none) : ContextAsync α := do
let mut result := ByteArray.empty
for chunk in stream do
result := result ++ chunk.data
if let some max := maximumSize then
if result.size.toUInt64 > max then
throw (.userError s!"body exceeded maximum size of {max} bytes")
match FromByteArray.fromByteArray result with
| .ok a => return a
| .error msg => throw (.userError msg)
end Std.Http.Body.Stream
namespace Std.Http.Request.Builder
open Internal.IO.Async
/--
Builds a request with a streaming body. The generator function receives the `Stream` and
can write chunks to it asynchronously.
-/
def stream (builder : Builder) (gen : Body.Stream Async Unit) : Async (Request Body.Stream) := do
let body Body.Stream.empty
background (gen body)
return { head := builder.head, body }
/--
Builds a request with an empty body.
-/
def blank (builder : Builder) : Async (Request Body.Stream) := do
let body Body.Stream.empty
body.setKnownSize (some (.fixed 0))
body.close
return { head := builder.head, body }
/--
Builds a request with a text body. Sets Content-Type to text/plain and Content-Length automatically.
-/
def text (builder : Builder) (content : String) : Async (Request Body.Stream) := do
let bytes := content.toUTF8
let body Body.Stream.empty
body.setKnownSize (some (.fixed bytes.size))
body.send (Chunk.ofByteArray bytes)
body.close
let headers := builder.head.headers
|>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8")
|>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size))
return { head := { builder.head with headers }, body }
/--
Builds a request with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically.
-/
def bytes (builder : Builder) (content : ByteArray) : Async (Request Body.Stream) := do
let body Body.Stream.empty
body.setKnownSize (some (.fixed content.size))
body.send (Chunk.ofByteArray content)
body.close
let headers := builder.head.headers
|>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream")
|>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size))
return { head := { builder.head with headers }, body }
/--
Builds a request with a JSON body. Sets Content-Type to application/json and Content-Length automatically.
-/
def json (builder : Builder) (content : String) : Async (Request Body.Stream) := do
let bytes := content.toUTF8
let body Body.Stream.empty
body.setKnownSize (some (.fixed bytes.size))
body.send (Chunk.ofByteArray bytes)
body.close
let headers := builder.head.headers
|>.insert Header.Name.contentType (Header.Value.ofString! "application/json")
|>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size))
return { head := { builder.head with headers }, body }
/--
Builds a request with an HTML body. Sets Content-Type to text/html and Content-Length automatically.
-/
def html (builder : Builder) (content : String) : Async (Request Body.Stream) := do
let bytes := content.toUTF8
let body Body.Stream.empty
body.setKnownSize (some (.fixed bytes.size))
body.send (Chunk.ofByteArray bytes)
body.close
let headers := builder.head.headers
|>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8")
|>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size))
return { head := { builder.head with headers }, body }
/--
Builds a request with an empty body (alias for blank).
-/
def noBody (builder : Builder) : Async (Request Body.Stream) :=
builder.blank
end Std.Http.Request.Builder
namespace Std.Http.Response.Builder
open Internal.IO.Async
/--
Builds a response with a streaming body. The generator function receives the `Stream` and
can write chunks to it asynchronously.
-/
def stream (builder : Builder) (gen : Body.Stream Async Unit) : Async (Response Body.Stream) := do
let body Body.Stream.empty
background (gen body)
return { head := builder.head, body }
/--
Builds a response with an empty body.
-/
def blank (builder : Builder) : Async (Response Body.Stream) := do
let body Body.Stream.empty
body.setKnownSize (some (.fixed 0))
body.close
return { head := builder.head, body }
/--
Builds a response with a text body. Sets Content-Type to text/plain and Content-Length automatically.
-/
def text (builder : Builder) (content : String) : Async (Response Body.Stream) := do
let bytes := content.toUTF8
let body Body.Stream.empty
body.setKnownSize (some (.fixed bytes.size))
body.send (Chunk.ofByteArray bytes)
body.close
let headers := builder.head.headers
|>.insert Header.Name.contentType (Header.Value.ofString! "text/plain; charset=utf-8")
|>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size))
return { head := { builder.head with headers }, body }
/--
Builds a response with a binary body. Sets Content-Type to application/octet-stream and Content-Length automatically.
-/
def bytes (builder : Builder) (content : ByteArray) : Async (Response Body.Stream) := do
let body Body.Stream.empty
body.setKnownSize (some (.fixed content.size))
body.send (Chunk.ofByteArray content)
body.close
let headers := builder.head.headers
|>.insert Header.Name.contentType (Header.Value.ofString! "application/octet-stream")
|>.insert Header.Name.contentLength (Header.Value.ofString! (toString content.size))
return { head := { builder.head with headers }, body }
/--
Builds a response with a JSON body. Sets Content-Type to application/json and Content-Length automatically.
-/
def json (builder : Builder) (content : String) : Async (Response Body.Stream) := do
let bytes := content.toUTF8
let body Body.Stream.empty
body.setKnownSize (some (.fixed bytes.size))
body.send (Chunk.ofByteArray bytes)
body.close
let headers := builder.head.headers
|>.insert Header.Name.contentType (Header.Value.ofString! "application/json")
|>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size))
return { head := { builder.head with headers }, body }
/--
Builds a response with an HTML body. Sets Content-Type to text/html and Content-Length automatically.
-/
def html (builder : Builder) (content : String) : Async (Response Body.Stream) := do
let bytes := content.toUTF8
let body Body.Stream.empty
body.setKnownSize (some (.fixed bytes.size))
body.send (Chunk.ofByteArray bytes)
body.close
let headers := builder.head.headers
|>.insert Header.Name.contentType (Header.Value.ofString! "text/html; charset=utf-8")
|>.insert Header.Name.contentLength (Header.Value.ofString! (toString bytes.size))
return { head := { builder.head with headers }, body }
/--
Builds a response with an empty body (alias for blank).
-/
def noBody (builder : Builder) : Async (Response Body.Stream) :=
builder.blank
end Std.Http.Response.Builder

View File

@@ -0,0 +1,260 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public import Std.Data.HashMap
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Data.Headers
public section
/-!
# Chunk
This module defines the `Chunk` type, which represents a chunk of data from a request or response.
-/
namespace Std.Http
open Internal
set_option linter.all true
/--
Checks if a character is a valid HTTP token character per RFC 9110 §5.6.2.
Token characters include alphanumerics and the following: `!#$%&'*+-.^_`|~`
-/
def isTokenCharacter (c : Char) : Bool :=
c.toNat < 128 && Nat.testBit 0x57ffffffc7fffffe03ff6cfa00000000 c.toNat
/--
Proposition that asserts all characters in a string are valid token characters and that it is
non-empty.
-/
abbrev IsValidExtensionName (s : String) : Prop :=
s.toList.all isTokenCharacter ¬s.isEmpty
/--
A validated chunk extension name that ensures all characters conform to HTTP token standards
per RFC 9110 §5.6.2. Extension names appear in chunked transfer encoding as key-value metadata.
-/
structure ExtensionName where
/--
The extension name string.
-/
value : String
/--
The proof that it's a valid extension name.
-/
validExtensionName : IsValidExtensionName value := by decide
deriving Repr, DecidableEq, BEq
namespace ExtensionName
instance : Hashable ExtensionName where
hash x := Hashable.hash x.value
instance : Inhabited ExtensionName where
default := "x", by native_decide
/--
Attempts to create an `ExtensionName` from a `String`, returning `none` if the string contains
invalid characters or is empty.
-/
def ofString? (s : String) : Option ExtensionName :=
if h : IsValidExtensionName s then
some s, h
else
none
/--
Creates an `ExtensionName` from a string, panicking with an error message if the string contains
invalid characters or is empty.
-/
def ofString! (s : String) : ExtensionName :=
if h : IsValidExtensionName s then
s, h
else
panic! ("invalid extension name: " ++ s.quote)
instance : ToString ExtensionName where
toString name := name.value
end ExtensionName
/--
Represents a chunk of data with optional extensions (key-value pairs).
-/
structure Chunk where
/--
The byte data contained in this chunk.
-/
data : ByteArray
/--
Optional metadata associated with this chunk as key-value pairs. Keys are strings, values are
optional strings.
-/
extensions : Array (ExtensionName × Option String) := #[]
deriving Inhabited
namespace Chunk
/--
Quotes an extension value if it contains non-token characters, otherwise returns it as-is.
-/
def quoteExtensionValue (s : String) : String :=
if s.any (fun c => !isTokenCharacter c) then s.quote else s
/--
An empty chunk with no data and no extensions.
-/
def empty : Chunk :=
{ data := .empty, extensions := #[] }
/--
Creates a simple chunk without extensions.
-/
def ofByteArray (data : ByteArray) : Chunk :=
{ data := data, extensions := #[] }
/--
Adds an extension to a chunk.
-/
def withExtension (chunk : Chunk) (key : ExtensionName) (value : String) : Chunk :=
{ chunk with extensions := chunk.extensions.push (key, some value) }
/--
Interprets the chunk data as a UTF-8 encoded string.
-/
def toString? (chunk : Chunk) : Option String :=
String.fromUTF8? chunk.data
instance : Encode .v11 Chunk where
encode buffer chunk :=
let chunkLen := chunk.data.size
let exts := chunk.extensions.foldl (fun acc (name, value) =>
acc ++ ";" ++ name.value.toLower ++ (value.elim "" (fun x => "=" ++ quoteExtensionValue x))) ""
let size := Nat.toDigits 16 chunkLen |>.toArray |>.map Char.toUInt8 |> ByteArray.mk
buffer.append #[size, exts.toUTF8, "\r\n".toUTF8, chunk.data, "\r\n".toUTF8]
end Chunk
/--
Trailer headers sent after the final chunk in HTTP/1.1 chunked transfer encoding.
Per RFC 9112 §7.1.2, trailers allow the sender to include additional metadata after
the message body, such as message integrity checks or digital signatures.
-/
structure Trailer where
/--
The trailer header fields as key-value pairs.
-/
headers : Headers
deriving Inhabited
namespace Trailer
/--
Creates an empty trailer with no headers.
-/
def empty : Trailer :=
{ headers := .empty }
/--
Inserts a trailer header field.
-/
@[inline]
def insert (trailer : Trailer) (name : Header.Name) (value : Header.Value) : Trailer :=
{ headers := trailer.headers.insert name value }
/--
Inserts a trailer header field from string name and value, panicking if either is invalid.
-/
@[inline]
def insert! (trailer : Trailer) (name : String) (value : String) : Trailer :=
{ headers := trailer.headers.insert! name value }
/--
Retrieves the first value for the given trailer header name.
Returns `none` if absent.
-/
@[inline]
def get? (trailer : Trailer) (name : Header.Name) : Option Header.Value :=
trailer.headers.get? name
/--
Retrieves all values for the given trailer header name.
Returns `none` if absent.
-/
@[inline]
def getAll? (trailer : Trailer) (name : Header.Name) : Option (Array Header.Value) :=
trailer.headers.getAll? name
/--
Checks if a trailer header with the given name exists.
-/
@[inline]
def contains (trailer : Trailer) (name : Header.Name) : Bool :=
trailer.headers.contains name
/--
Removes a trailer header with the given name.
-/
@[inline]
def erase (trailer : Trailer) (name : Header.Name) : Trailer :=
{ headers := trailer.headers.erase name }
/--
Gets the number of trailer headers.
-/
@[inline]
def size (trailer : Trailer) : Nat :=
trailer.headers.size
/--
Checks if the trailer has no headers.
-/
@[inline]
def isEmpty (trailer : Trailer) : Bool :=
trailer.headers.isEmpty
/--
Merges two trailers, accumulating values for duplicate keys from both.
-/
def merge (t1 t2 : Trailer) : Trailer :=
{ headers := t1.headers.merge t2.headers }
/--
Converts the trailer headers to a list of key-value pairs.
-/
def toList (trailer : Trailer) : List (Header.Name × Header.Value) :=
trailer.headers.toList
/--
Converts the trailer headers to an array of key-value pairs.
-/
def toArray (trailer : Trailer) : Array (Header.Name × Header.Value) :=
trailer.headers.toArray
/--
Folds over all key-value pairs in the trailer headers.
-/
def fold (trailer : Trailer) (init : α) (f : α Header.Name Header.Value α) : α :=
trailer.headers.fold init f
instance : Encode .v11 Trailer where
encode buffer trailer :=
buffer.write "0\r\n".toUTF8
|> (Encode.encode .v11 · trailer.headers)
|>.write "\r\n".toUTF8
end Trailer

View File

@@ -0,0 +1,83 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Core
public import Init.Dynamic
public import Std.Data.TreeMap
open Lean
public section
/-!
# Extensions
This module provides the `Extensions` type, a dynamically-typed map for storing optional metadata
on HTTP requests and responses. It can be used by parsers, middleware, or other processing stages
to attach arbitrary typed data.
-/
namespace Std.Http
set_option linter.all true
/--
A dynamic typed map for optional metadata that can be attached to HTTP requests and responses.
Extensions allow storing arbitrary typed data keyed by type name, useful for middleware-style
metadata such as parsed socket information or custom processing data.
-/
structure Extensions where
private mk ::
/--
The underlying tree map storing dynamic values keyed by their type name.
Note: We cannot use `Name.quickCmp` here.
-/
private data : TreeMap Name Dynamic (compare ·.hash.toNat ·.hash.toNat) := .empty
deriving Inhabited
namespace Extensions
/--
An empty extensions map with no data.
-/
def empty : Extensions :=
.empty
/--
Retrieves a value of type `α` from the extensions, if present.
-/
@[inline]
def get (x : Extensions) (α : Type) [TypeName α] : Option α := do
let dyn x.data.get? (TypeName.typeName α)
dyn.get? α
/--
Inserts a value into the extensions, keyed by its type name.
If a value of the same type already exists, it is replaced.
-/
@[inline]
def insert (x : Extensions) [TypeName α] (data : α) : Extensions :=
let dyn := Dynamic.mk data
x.data.insert dyn.typeName dyn
/--
Removes the value of type `α` from the extensions.
-/
@[inline]
def remove (x : Extensions) (α : Type) [TypeName α] : Extensions :=
x.data.erase (TypeName.typeName α)
/--
Checks whether the extensions contain a value of type `α`.
-/
@[inline]
def contains (x : Extensions) (α : Type) [TypeName α] : Bool :=
x.data.contains (TypeName.typeName α)
end Std.Http.Extensions

View File

@@ -0,0 +1,250 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public import Std.Data.HashMap
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Data.Headers.Basic
public import Std.Internal.Http.Data.Headers.Name
public import Std.Internal.Http.Data.Headers.Value
public section
/-!
# Headers
This module defines the `Headers` type, which represents a collection of HTTP header name-value pairs.
-/
namespace Std.Http
set_option linter.all true
open Internal
/--
A structure for managing HTTP headers as key-value pairs.
-/
structure Headers where
/--
The underlying multimap that stores headers.
-/
map : MultiMap Header.Name Header.Value
deriving Inhabited, Repr
instance : Membership Header.Name Headers where
mem s h := h s.map
instance (name : Header.Name) (h : Headers) : Decidable (name h) :=
inferInstanceAs (Decidable (name h.map))
namespace Headers
/--
Retrieves the first `Header.Value` for the given key.
-/
@[inline]
def get (headers : Headers) (name : Header.Name) (h : name headers) : Header.Value :=
headers.map.get name h
/--
Retrieves all `Header.Value` entries for the given key.
-/
@[inline]
def getAll (headers : Headers) (name : Header.Name) (h : name headers) : Array Header.Value :=
headers.map.getAll name h
/--
Retrieves all `Header.Value` entries for the given key.
Returns `none` if the header is absent.
-/
@[inline]
def getAll? (headers : Headers) (name : Header.Name) : Option (Array Header.Value) :=
headers.map.getAll? name
/--
Retrieves the first `Header.Value` for the given key.
Returns `none` if the header is absent.
-/
@[inline]
def get? (headers : Headers) (name : Header.Name) : Option Header.Value :=
headers.map.get? name
/--
Checks if the entry is present in the `Headers`.
-/
@[inline]
def hasEntry (headers : Headers) (name : Header.Name) (value : Header.Value) : Bool :=
headers.map.data.get? name
|>.bind (fun x => x.val.find? (· == value))
|>.isSome
/--
Retrieves the last header value for the given key.
Returns `none` if the header is absent.
-/
@[inline]
def getLast? (headers : Headers) (name : Header.Name) : Option Header.Value :=
headers.map.getLast? name
/--
Like `get?`, but returns a default value if absent.
-/
@[inline]
def getD (headers : Headers) (name : Header.Name) (d : Header.Value) : Header.Value :=
headers.map.getD name d
/--
Like `get?`, but panics if absent.
-/
@[inline]
def get! (headers : Headers) (name : Header.Name) : Header.Value :=
headers.map.get! name
/--
Inserts a new key-value pair into the headers.
-/
@[inline]
def insert (headers : Headers) (key : Header.Name) (value : Header.Value) : Headers :=
{ map := headers.map.insert key value }
/--
Adds a header from string name and value, panicking if either is invalid.
-/
@[inline]
def insert! (headers : Headers) (name : String) (value : String) : Headers :=
headers.insert (Header.Name.ofString! name) (Header.Value.ofString! value)
/--
Inserts a new key with an array of values.
-/
@[inline]
def insertMany (headers : Headers) (key : Header.Name) (value : Array Header.Value) (p : value.size > 0) : Headers :=
{ map := headers.map.insertMany key value p }
/--
Creates empty headers.
-/
def empty : Headers :=
{ map := }
/--
Creates headers from a list of key-value pairs.
-/
def ofList (pairs : List (Header.Name × Header.Value)) : Headers :=
{ map := MultiMap.ofList pairs }
/--
Checks if a header with the given name exists.
-/
@[inline]
def contains (headers : Headers) (name : Header.Name) : Bool :=
headers.map.contains name
/--
Removes a header with the given name.
-/
@[inline]
def erase (headers : Headers) (name : Header.Name) : Headers :=
{ map := headers.map.erase name }
/--
Gets the number of headers.
-/
@[inline]
def size (headers : Headers) : Nat :=
headers.map.size
/--
Checks if the headers are empty.
-/
@[inline]
def isEmpty (headers : Headers) : Bool :=
headers.map.isEmpty
/--
Merges two headers, accumulating values for duplicate keys from both.
-/
def merge (headers1 headers2 : Headers) : Headers :=
{ map := headers1.map headers2.map }
/--
Converts the headers to a list of key-value pairs (flattened). Each header with multiple values produces
multiple pairs.
-/
def toList (headers : Headers) : List (Header.Name × Header.Value) :=
headers.map.toList
/--
Converts the headers to an array of key-value pairs (flattened). Each header with multiple values
produces multiple pairs.
-/
def toArray (headers : Headers) : Array (Header.Name × Header.Value) :=
headers.map.toArray
/--
Folds over all key-value pairs in the headers.
-/
def fold (headers : Headers) (init : α) (f : α Header.Name Header.Value α) : α :=
headers.map.toArray.foldl (fun acc (k, v) => f acc k v) init
/--
Maps a function over all header values, producing new headers.
-/
def mapValues (headers : Headers) (f : Header.Name Header.Value Header.Value) : Headers :=
let pairs := headers.map.toArray.map (fun (k, v) => (k, f k v))
{ map := pairs.foldl (fun acc (k, v) => acc.insert k v) MultiMap.empty }
/--
Filters and maps over header key-value pairs. Returns only the pairs for which the function returns `some`.
-/
def filterMap (headers : Headers) (f : Header.Name Header.Value Option Header.Value) : Headers :=
let pairs := headers.map.toArray.filterMap (fun (k, v) =>
match f k v with
| some v' => some (k, v')
| none => none)
{ map := pairs.foldl (fun acc (k, v) => acc.insert k v) MultiMap.empty }
/--
Filters header key-value pairs, keeping only those that satisfy the predicate.
-/
def filter (headers : Headers) (f : Header.Name Header.Value Bool) : Headers :=
headers.filterMap (fun k v => if f k v then some v else none)
/--
Updates the first value of a header if it exists, or inserts if it doesn't. Replaces all existing values
for that header with the new value.
-/
def update (headers : Headers) (name : Header.Name) (f : Option Header.Value Header.Value) : Headers :=
let newValue := f (headers.get? name)
{ map := headers.map.erase name |>.insert name newValue }
instance : ToString Headers where
toString headers :=
let pairs := headers.map.toArray.map (fun (k, v) => s!"{k}: {v.value}")
String.intercalate "\r\n" pairs.toList
instance : Encode .v11 Headers where
encode buffer headers :=
headers.fold buffer (fun buf name value =>
buf.writeString s!"{name}: {value}\r\n")
instance : EmptyCollection Headers :=
Headers.empty
instance : Singleton (Header.Name × Header.Value) Headers :=
fun a, b => ( : Headers).insert a b
instance : Insert (Header.Name × Header.Value) Headers :=
fun a, b s => s.insert a b
instance : Union Headers :=
merge
end Std.Http.Headers

View File

@@ -0,0 +1,148 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Headers.Name
public import Std.Internal.Http.Data.Headers.Value
public import Std.Internal.Http.Internal
public section
/-!
# Header Typeclass and Common Headers
This module defines the `Header` typeclass for typed HTTP headers and some common header parsers.
-/
namespace Std.Http
set_option linter.all true
open Internal
/--
Typeclass for typed HTTP headers that can be parsed from and serialized to header values.
-/
class Header (α : Type) where
/--
Parse a header value into the typed representation.
-/
parse : Header.Value Option α
/--
Serialize the typed representation back to a name-value pair.
-/
serialize : α Header.Name × Header.Value
/--
An `Encode` instance can be derived from any `Header` instance by serializing to the wire format
`Name: Value\r\n`.
-/
instance [h : Header α] : Encode .v11 α where
encode buffer a :=
let (name, value) := h.serialize a
buffer.writeString s!"{name}: {value}\r\n"
namespace Header
/--
The `Content-Length` header, representing the size of the message body in bytes.
Parses only valid natural number values.
-/
structure ContentLength where
/--
The content length in bytes.
-/
length : Nat
deriving BEq, Repr
namespace ContentLength
/--
Parses a content length header from a name and value
-/
def parse (v : Value) : Option ContentLength :=
v.value.toNat?.map (.mk)
/--
Serializes a content length header back to a name-value pair
.-/
def serialize (h : ContentLength) : Name × Value :=
(Header.Name.contentLength, Value.ofString! (toString h.length))
instance : Header ContentLength := parse, serialize
end ContentLength
/--
Validates the chunked placement rules. Returns `none` if the encoding list violates the constraints.
-/
@[expose]
def TransferEncoding.Validate (codings : Array String) : Bool :=
if codings.isEmpty || codings.any (· == "") then
false
else
let chunkedCount := codings.filter (· == "chunked") |>.size
let lastIsChunked := codings.back? == some "chunked"
if chunkedCount > 1 then
false
else if chunkedCount == 1 && !lastIsChunked then
false
else
true
/--
The `Transfer-Encoding` header, representing the list of transfer codings applied to the message body.
Validation rules (RFC 9112 Section 6.1):
- "chunked" may appear at most once.
- If "chunked" is present, it must be the last encoding in the list.
-/
structure TransferEncoding where
/--
The ordered list of transfer codings.
-/
codings : Array String
/--
Valid encodings.
-/
valid : TransferEncoding.Validate codings = true
deriving Repr
namespace TransferEncoding
/--
Returns `true` if the transfer encoding ends with chunked.
-/
def isChunked (te : TransferEncoding) : Bool :=
te.codings.back? == some "chunked"
/--
Parses a comma-separated list of transfer codings from a header value, validating chunked placement.
-/
def parse (v : Value) : Option TransferEncoding :=
let codings := v.value.split (· == ',') |>.toArray.map (·.trimAscii.toString.toLower)
if h : TransferEncoding.Validate codings then
some codings, h
else
none
/--
Serializes a transfer encoding back to a comma-separated header value.
-/
def serialize (te : TransferEncoding) : Header.Name × Header.Value :=
let value := ",".intercalate (te.codings.toList)
(Header.Name.transferEncoding, Value.ofString! value)
instance : Header TransferEncoding := parse, serialize
end Std.Http.Header.TransferEncoding

View File

@@ -0,0 +1,172 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public import Init.Data.ToString
public import Std.Internal.Http.Internal
public section
/-!
# Header Names and Values
This module defines the `Name` and `Value` types, which represent validated HTTP header names and
values that conform to HTTP standards.
-/
namespace Std.Http.Header
set_option linter.all true
open Internal
/--
Checks if a character is valid for use in an HTTP header name.
-/
@[expose]
def isValidHeaderNameChar (c : Char) : Bool :=
c.toNat < 128 && Nat.testBit 0x57ffffffc7fffffe03ff6cfa00000000 c.toNat
/--
Proposition that asserts all characters in a string are valid and that it is non-empty for HTTP header names.
-/
abbrev IsValidHeaderName (s : String) : Prop :=
s.toList.all isValidHeaderNameChar ¬s.isEmpty
/--
A validated HTTP header name that ensures all characters conform to HTTP standards. Header names are
case-insensitive according to HTTP specifications.
-/
structure Name where
/--
The lowercased normalized header name string.
-/
value : String
/--
The proof that it's a valid header name
-/
validHeaderName : IsValidHeaderName value := by decide
/--
The proof that we stored the header name in normal form
-/
normalForm : IsLowerCase value := by decide
deriving Repr, DecidableEq, BEq
namespace Name
instance : Hashable Name where
hash x := Hashable.hash x.value
instance : Inhabited Name where
default := "_", by decide, by decide
/--
Attempts to create a `Name` from a `String`, returning `none` if the string contains invalid
characters for HTTP header names or is empty.
-/
@[expose]
def ofString? (s : String) : Option Name :=
let val := s.toLower
if h : IsValidHeaderName val IsLowerCase val then
some val, h.left, h.right
else
none
/--
Creates a `Name` from a string, panicking with an error message if the
string contains invalid characters for HTTP header names or is empty.
-/
@[expose]
def ofString! (s : String) : Name :=
let val := s.toLower
if h : IsValidHeaderName val IsLowerCase val then
val, h.left, h.right
else
panic! s!"invalid header name: {s.quote}"
/--
Converts the header name to title case (e.g., "Content-Type").
Note: some well-known headers have unconventional casing (e.g., "WWW-Authenticate"),
but since HTTP header names are case-insensitive, this always uses simple capitalization.
-/
@[inline]
def toCanonical (name : Name) : String :=
let it := name.value.splitOn "-"
|>.map (·.capitalize)
String.intercalate "-" it
/--
Performs a case-insensitive comparison between a `Name` and a `String`. Returns `true` if they match.
-/
@[expose]
def is (name : Name) (s : String) : Bool :=
name.value == s.toLower
instance : ToString Name where
toString name := name.toCanonical
/--
Standard Content-Type header name
-/
def contentType : Header.Name := .mk "content-type"
/--
Standard Content-Length header name
-/
def contentLength : Header.Name := .mk "content-length"
/--
Standard Host header name
-/
def host : Header.Name := .mk "host"
/--
Standard Authorization header name
-/
def authorization : Header.Name := .mk "authorization"
/--
Standard User-Agent header name
-/
def userAgent : Header.Name := .mk "user-agent"
/--
Standard Accept header name
-/
def accept : Header.Name := .mk "accept"
/--
Standard Connection header name
-/
def connection : Header.Name := .mk "connection"
/--
Standard Transfer-Encoding header name
-/
def transferEncoding : Header.Name := .mk "transfer-encoding"
/--
Standard Server header name
-/
def server : Header.Name := .mk "server"
/--
Standard Date header name
-/
def date : Header.Name := .mk "date"
/--
Standard Expect header name
-/
def expect : Header.Name := .mk "expect"
end Std.Http.Header.Name

View File

@@ -0,0 +1,105 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.String
public import Init.Data.ToString
public import Std.Internal.Http.Internal
public section
/-!
# Header Values
This module defines the `Value` type, which represents validated HTTP header values that conform to HTTP
standards.
-/
namespace Std.Http.Header
set_option linter.all true
open Internal
/--
Checks if a character is valid for use in an HTTP header value.
-/
abbrev isValidHeaderChar (c : Char) : Bool :=
((' ' c c '~') c = '\t') ('\u0080' c c '\u00FF')
/--
Proposition that asserts all characters in a string are valid for HTTP header values.
-/
abbrev IsValidHeaderValue (s : String) : Prop :=
s.toList.all isValidHeaderChar
/--
A validated HTTP header value that ensures all characters conform to HTTP standards.
-/
structure Value where
/--
The string data
-/
value : String
/--
The proof that it's a valid header value
-/
validHeaderValue : IsValidHeaderValue value := by decide
deriving BEq, DecidableEq, Repr
instance : Hashable Value where
hash := Hashable.hash Value.value
instance : Inhabited Value where
default := "_", by decide
namespace Value
/--
Attempts to create a `Value` from a `String`, returning `none` if the string contains invalid characters
for HTTP header values.
-/
@[expose]
def ofString? (s : String) : Option Value :=
if h : IsValidHeaderValue s then
some s, h
else
none
/--
Creates a `Value` from a string, panicking with an error message if the string contains invalid
characters for HTTP header values.
-/
@[expose]
def ofString! (s : String) : Value :=
if h : IsValidHeaderValue s then
s, h
else
panic! s!"invalid header value: {s.quote}"
/--
Performs a case-insensitive comparison between a `Value` and a `String`. Returns `true` if they match.
-/
@[expose]
def is (s : Value) (h : String) : Bool :=
s.value.toLower == h.toLower
instance : ToString Value where
toString v := v.value
/--
Standard close header value
-/
def close : Header.Value := .mk "close"
/--
Standard chunked header value
-/
def chunked : Header.Value := .mk "chunked"
end Std.Http.Header.Value

View File

@@ -0,0 +1,121 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.Data.Repr
public import Init.Data.ToString
public import Std.Internal.Http.Internal
public section
/-!
# Method
This module provides the `Method` type, that represents HTTP request methods. It defines the
standard set of HTTP methods (e.g. `GET`, `POST`, `PUT`, `DELETE`).
-/
namespace Std.Http
set_option linter.all true
open Internal
/--
A method is a verb that describes the action to be performed.
* Reference: https://httpwg.org/specs/rfc9110.html#methods
-/
inductive Method where
/--
Retrieve a resource.
-/
| get
/--
Retrieve headers for a resource, without the body.
-/
| head
/--
Submit data to be processed (e.g., form submission).
-/
| post
/--
Replace a resource with new data.
-/
| put
/--
Remove a resource.
-/
| delete
/--
Establish a tunnel to a server (often for TLS).
-/
| connect
/--
Describe communication options for a resource.
-/
| options
/--
Perform a message loop-back test.
-/
| trace
/--
Apply partial modifications to a resource.
Source: https://www.rfc-editor.org/rfc/rfc5789.html
-/
| patch
deriving Repr, Inhabited, BEq, DecidableEq
namespace Method
/--
Converts a `String` into a `Method`.
-/
def ofString? : String Option Method
| "GET" => some .get
| "HEAD" => some .head
| "POST" => some .post
| "PUT" => some .put
| "DELETE" => some .delete
| "CONNECT" => some .connect
| "OPTIONS" => some .options
| "TRACE" => some .trace
| "PATCH" => some .patch
| _ => none
/--
Converts a `String` into a `Method`, panics if invalid.
-/
def ofString! (s : String) : Method :=
match ofString? s with
| some m => m
| none => panic! s!"invalid HTTP method: {s.quote}"
instance : ToString Method where
toString
| .get => "GET"
| .head => "HEAD"
| .post => "POST"
| .put => "PUT"
| .delete => "DELETE"
| .connect => "CONNECT"
| .options => "OPTIONS"
| .trace => "TRACE"
| .patch => "PATCH"
instance : Encode .v11 Method where
encode buffer := buffer.writeString toString
end Std.Http.Method

View File

@@ -0,0 +1,266 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.Extensions
public import Std.Internal.Http.Data.Method
public import Std.Internal.Http.Data.Version
public import Std.Internal.Http.Data.Headers
public import Std.Internal.Http.Data.URI
public section
/-!
# Request
This module provides the `Request` type, which represents an HTTP request. It also defines ways
to build a `Request` using functions that make it easier.
-/
namespace Std.Http
set_option linter.all true
/--
The main parts of a request containing the HTTP method, version, URI, and headers.
-/
structure Request.Head where
/--
The HTTP method (GET, POST, PUT, DELETE, etc.) for the request
-/
method : Method := .get
/--
The HTTP protocol version (HTTP/1.0, HTTP/1.1, HTTP/2.0, etc.)
-/
version : Version := .v11
/--
The request target/URI indicating the resource being requested
-/
uri : RequestTarget := .asteriskForm
/--
Collection of HTTP headers for the request (Content-Type, Authorization, etc.)
-/
headers : Headers := .empty
deriving Inhabited, Repr
/--
HTTP request structure parameterized by body type
-/
structure Request (t : Type) where
/--
The request headers and metadata
-/
head : Request.Head
/--
The request body content of type t
-/
body : t
/--
Optional dynamic metadata attached to the request.
-/
extensions : Extensions := .empty
deriving Inhabited
/--
Builds an HTTP Request
-/
structure Request.Builder where
/--
The head of the request
-/
head : Head := {}
/--
Optional dynamic metadata attached to the request.
-/
extensions : Extensions := .empty
namespace Request
instance : ToString Head where
toString req :=
toString req.method ++ " " ++
toString req.uri ++ " " ++
toString req.version ++
"\r\n" ++
toString req.headers ++
"\r\n"
open Internal in
instance : Encode .v11 Head where
encode buffer req :=
let buffer := Encode.encode (v := .v11) buffer req.method
let buffer := buffer.writeChar ' '
let buffer := Encode.encode (v := .v11) buffer req.uri
let buffer := buffer.writeChar ' '
let buffer := Encode.encode (v := .v11) buffer req.version
let buffer := buffer.writeString "\r\n"
let buffer := Encode.encode (v := .v11) buffer req.headers
buffer.writeString "\r\n"
/--
Creates a new HTTP Request builder with default head (method: GET, version: HTTP/1.1, asterisk URI,
empty headers)
-/
def new : Builder := { }
namespace Builder
/--
Creates a new HTTP Request builder with default head (method: GET, version: HTTP/1.1, asterisk URI,
empty headers)
-/
def empty : Builder := { }
/--
Sets the HTTP method for the request being built
-/
def method (builder : Builder) (method : Method) : Builder :=
{ builder with head := { builder.head with method := method } }
/--
Sets the HTTP version for the request being built
-/
def version (builder : Builder) (version : Version) : Builder :=
{ builder with head := { builder.head with version := version } }
/--
Sets the request target/URI for the request being built
-/
def uri (builder : Builder) (uri : RequestTarget) : Builder :=
{ builder with head := { builder.head with uri := uri } }
/--
Sets the request target/URI for the request being built
-/
def uri! (builder : Builder) (uri : String) : Builder :=
let uri := RequestTarget.parse! uri
{ builder with head := { builder.head with uri } }
/--
Sets the headers for the request being built
-/
def headers (builder : Builder) (headers : Headers) : Builder :=
{ builder with head := { builder.head with headers } }
/--
Adds a single header to the request being built
-/
def header (builder : Builder) (key : Header.Name) (value : Header.Value) : Builder :=
{ builder with head := { builder.head with headers := builder.head.headers.insert key value } }
/--
Adds a single header to the request being built, panics if the header is invalid
-/
def header! (builder : Builder) (key : String) (value : String) : Builder :=
let key := Header.Name.ofString! key
let value := Header.Value.ofString! value
{ builder with head := { builder.head with headers := builder.head.headers.insert key value } }
/--
Adds a header to the request being built only if the Option Header.Value is some
-/
def headerOpt (builder : Builder) (key : Header.Name) (value : Option Header.Value) : Builder :=
match value with
| some v => builder.header key v
| none => builder
/--
Inserts a typed extension value into the request being built.
-/
def extension (builder : Builder) [TypeName α] (data : α) : Builder :=
{ builder with extensions := builder.extensions.insert data }
/--
Builds and returns the final HTTP Request with the specified body
-/
def body (builder : Builder) (body : t) : Request t :=
{ head := builder.head, body := body, extensions := builder.extensions }
end Builder
/--
Creates a new HTTP GET Request with the specified URI
-/
def get (uri : RequestTarget) : Builder :=
new
|>.method .get
|>.uri uri
/--
Creates a new HTTP POST Request builder with the specified URI.
-/
def post (uri : RequestTarget) : Builder :=
new
|>.method .post
|>.uri uri
/--
Creates a new HTTP PUT Request builder with the specified URI.
-/
def put (uri : RequestTarget) : Builder :=
new
|>.method .put
|>.uri uri
/--
Creates a new HTTP DELETE Request builder with the specified URI
-/
def delete (uri : RequestTarget) : Builder :=
new
|>.method .delete
|>.uri uri
/--
Creates a new HTTP PATCH Request builder with the specified URI
-/
def patch (uri : RequestTarget) : Builder :=
new
|>.method .patch
|>.uri uri
/--
Creates a new HTTP HEAD Request builder with the specified URI.
Named `head'` to avoid conflict with the `head` field accessor.
-/
def head' (uri : RequestTarget) : Builder :=
new
|>.method .head
|>.uri uri
/--
Creates a new HTTP OPTIONS Request builder with the specified URI.
Use `Request.options (RequestTarget.asteriskForm)` for server-wide OPTIONS.
-/
def options (uri : RequestTarget) : Builder :=
new
|>.method .options
|>.uri uri
/--
Creates a new HTTP CONNECT Request builder with the specified URI.
Typically used with `RequestTarget.authorityForm` for tunneling.
-/
def connect (uri : RequestTarget) : Builder :=
new
|>.method .connect
|>.uri uri
/--
Creates a new HTTP TRACE Request builder with the specified URI
-/
def trace (uri : RequestTarget) : Builder :=
new
|>.method .trace
|>.uri uri
end Std.Http.Request

View File

@@ -0,0 +1,230 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Http.Data.Extensions
public import Std.Internal.Http.Data.Status
public import Std.Internal.Http.Data.Version
public import Std.Internal.Http.Data.Headers
public section
/-!
# Response
This module provides the `Response` type, which represents an HTTP response. It also defines
builder functions and convenience methods for constructing responses with various content types.
-/
namespace Std.Http
set_option linter.all true
open Std.Internal.IO.Async
/--
The main parts of a response.
-/
structure Response.Head where
/--
The HTTP status code and reason phrase, indicating the result of the request.
For example, `.ok` corresponds to `200 OK`.
-/
status : Status := .ok
/--
The HTTP protocol version used in the response, e.g. `HTTP/1.1`.
-/
version : Version := .v11
/--
The set of response headers, providing metadata such as `Content-Type`,
`Content-Length`, and caching directives.
-/
headers : Headers := .empty
deriving Inhabited, Repr
/--
HTTP response structure parameterized by body type
-/
structure Response (t : Type) where
/--
The information of the status-line of the response
-/
head : Response.Head := {}
/--
The content of the response.
-/
body : t
/--
Optional dynamic metadata attached to the response.
-/
extensions : Extensions := .empty
deriving Inhabited
/--
Builds an HTTP Response.
-/
structure Response.Builder where
/--
The information of the status-line of the response
-/
head : Head := {}
/--
Optional dynamic metadata attached to the response.
-/
extensions : Extensions := .empty
namespace Response
instance : ToString Head where
toString r :=
toString r.version ++ " " ++
toString r.status.toCode ++ " " ++
toString r.status ++ "\r\n" ++
toString r.headers ++
"\r\n"
open Internal in
instance : Encode .v11 Head where
encode buffer r :=
let buffer := Encode.encode (v := .v11) buffer r.version
let buffer := buffer.writeChar ' '
let buffer := Encode.encode (v := .v11) buffer r.status
let buffer := buffer.writeString "\r\n"
let buffer := Encode.encode (v := .v11) buffer r.headers
buffer.writeString "\r\n"
/--
Creates a new HTTP Response builder with default head (status: 200 OK, version: HTTP/1.1, empty headers)
-/
def new : Builder := { }
namespace Builder
/--
Creates a new HTTP Response builder with default head (status: 200 OK, version: HTTP/1.1, empty headers)
-/
def empty : Builder := { }
/--
Sets the HTTP status code for the response being built
-/
def status (builder : Builder) (status : Status) : Builder :=
{ builder with head := { builder.head with status := status } }
/--
Sets the headers for the response being built
-/
def headers (builder : Builder) (headers : Headers) : Builder :=
{ builder with head := { builder.head with headers } }
/--
Adds a single header to the response being built
-/
def header (builder : Builder) (key : Header.Name) (value : Header.Value) : Builder :=
{ builder with head := { builder.head with headers := builder.head.headers.insert key value } }
/--
Adds a single header to the response being built, panics if the header is invalid
-/
def header! (builder : Builder) (key : String) (value : String) : Builder :=
let key := Header.Name.ofString! key
let value := Header.Value.ofString! value
{ builder with head := { builder.head with headers := builder.head.headers.insert key value } }
/--
Inserts a typed extension value into the response being built.
-/
def extension (builder : Builder) [TypeName α] (data : α) : Builder :=
{ builder with extensions := builder.extensions.insert data }
/--
Builds and returns the final HTTP Response with the specified body
-/
def body (builder : Builder) (body : t) : Response t :=
{ head := builder.head, body := body, extensions := builder.extensions }
/--
Builds and returns the final HTTP Response.
-/
def build [EmptyCollection t] (builder : Builder) : Response t :=
{ head := builder.head, body := {}, extensions := builder.extensions }
end Builder
/--
Creates a new HTTP Response builder with the 200 status code.
-/
def ok : Builder :=
.empty |>.status .ok
/--
Creates a new HTTP Response builder with the provided status.
-/
def withStatus (status : Status) : Builder :=
.empty |>.status status
/--
Creates a new HTTP Response builder with the 404 status code.
-/
def notFound : Builder :=
.empty |>.status .notFound
/--
Creates a new HTTP Response builder with the 500 status code.
-/
def internalServerError : Builder :=
.empty |>.status .internalServerError
/--
Creates a new HTTP Response builder with the 400 status code.
-/
def badRequest : Builder :=
.empty |>.status .badRequest
/--
Creates a new HTTP Response builder with the 201 status code.
-/
def created : Builder :=
.empty |>.status .created
/--
Creates a new HTTP Response builder with the 202 status code.
-/
def accepted : Builder :=
.empty |>.status .accepted
/--
Creates a new HTTP Response builder with the 401 status code.
-/
def unauthorized : Builder :=
.empty |>.status .unauthorized
/--
Creates a new HTTP Response builder with the 403 status code.
-/
def forbidden : Builder :=
.empty |>.status .forbidden
/--
Creates a new HTTP Response builder with the 409 status code.
-/
def conflict : Builder :=
.empty |>.status .conflict
/--
Creates a new HTTP Response builder with the 503 status code.
-/
def serviceUnavailable : Builder :=
.empty |>.status .serviceUnavailable
end Response

View File

@@ -0,0 +1,634 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Internal
public section
/-!
# Status
This module defines the `Status` type, which is a representation of HTTP status codes. Status codes are three-digit
integer codes that describe the result of an HTTP request. In this implementation we do not treat status
code as extensible.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
namespace Std.Http
set_option linter.all true
open Internal
/--
HTTP Status codes. Status codes are three-digit integer codes that describe the result of an
HTTP request. In this implementation we do not treat status code as extensible.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
inductive Status where
/--
100 Continue
-/
| «continue»
/--
101 Switching Protocols
-/
| switchingProtocols
/--
102 Processing
-/
| processing
/--
103 Early Hints
-/
| earlyHints
/--
200 OK
-/
| ok
/--
201 Created
-/
| created
/--
202 Accepted
-/
| accepted
/--
203 Non-Authoritative Information
-/
| nonAuthoritativeInformation
/--
204 No Content
-/
| noContent
/--
205 Reset Content
-/
| resetContent
/--
206 Partial Content
-/
| partialContent
/--
207 Multi-Status
-/
| multiStatus
/--
208 Already Reported
-/
| alreadyReported
/--
226 IM Used
-/
| imUsed
/--
300 Multiple Choices
-/
| multipleChoices
/--
301 Moved Permanently
-/
| movedPermanently
/--
302 Found
-/
| found
/--
303 See Other
-/
| seeOther
/--
304 Not Modified
-/
| notModified
/--
305 Use Proxy
-/
| useProxy
/--
306 Unused
-/
| unused
/--
307 Temporary Redirect
-/
| temporaryRedirect
/--
308 Permanent Redirect
-/
| permanentRedirect
/--
400 Bad Request
-/
| badRequest
/--
401 Unauthorized
-/
| unauthorized
/--
402 Payment Required
-/
| paymentRequired
/--
403 Forbidden
-/
| forbidden
/--
404 Not Found
-/
| notFound
/--
405 Method Not Allowed
-/
| methodNotAllowed
/--
406 Not Acceptable
-/
| notAcceptable
/--
407 Proxy Authentication Required
-/
| proxyAuthenticationRequired
/--
408 Request Timeout
-/
| requestTimeout
/--
409 Conflict
-/
| conflict
/--
410 Gone
-/
| gone
/--
411 Length Required
-/
| lengthRequired
/--
412 Precondition Failed
-/
| preconditionFailed
/--
413 Payload Too Large
-/
| payloadTooLarge
/--
414 URI Too Long
-/
| uriTooLong
/--
415 Unsupported Media Type
-/
| unsupportedMediaType
/--
416 Range Not Satisfiable
-/
| rangeNotSatisfiable
/--
417 Expectation Failed
-/
| expectationFailed
/--
418 I'm a teapot
-/
| imATeapot
/--
421 Misdirected Request
-/
| misdirectedRequest
/--
422 Unprocessable Entity
-/
| unprocessableEntity
/--
423 Locked
-/
| locked
/--
424 Failed Dependency
-/
| failedDependency
/--
425 Too Early
-/
| tooEarly
/--
426 Upgrade Required
-/
| upgradeRequired
/--
428 Precondition Required
-/
| preconditionRequired
/--
429 Too Many Requests
-/
| tooManyRequests
/--
431 Request Header Fields Too Large
-/
| requestHeaderFieldsTooLarge
/--
451 Unavailable For Legal Reasons
-/
| unavailableForLegalReasons
/--
500 Internal Server Error
-/
| internalServerError
/--
501 Not Implemented
-/
| notImplemented
/--
502 Bad Gateway
-/
| badGateway
/--
503 Service Unavailable
-/
| serviceUnavailable
/--
504 Gateway Timeout
-/
| gatewayTimeout
/--
505 HTTP Version Not Supported
-/
| httpVersionNotSupported
/--
506 Variant Also Negotiates
-/
| variantAlsoNegotiates
/--
507 Insufficient Storage
-/
| insufficientStorage
/--
508 Loop Detected
-/
| loopDetected
/--
510 Not Extended
-/
| notExtended
/--
511 Network Authentication Required
-/
| networkAuthenticationRequired
/--
Other
-/
| other (number : UInt16)
deriving Repr, Inhabited, BEq
namespace Status
/--
Convert a Status to a numeric code. This is useful for sending the status code in a response.
-/
def toCode : Status UInt16
| «continue» => 100
| switchingProtocols => 101
| processing => 102
| earlyHints => 103
| ok => 200
| created => 201
| accepted => 202
| nonAuthoritativeInformation => 203
| noContent => 204
| resetContent => 205
| partialContent => 206
| multiStatus => 207
| alreadyReported => 208
| imUsed => 226
| multipleChoices => 300
| movedPermanently => 301
| found => 302
| seeOther => 303
| notModified => 304
| useProxy => 305
| unused => 306
| temporaryRedirect => 307
| permanentRedirect => 308
| badRequest => 400
| unauthorized => 401
| paymentRequired => 402
| forbidden => 403
| notFound => 404
| methodNotAllowed => 405
| notAcceptable => 406
| proxyAuthenticationRequired => 407
| requestTimeout => 408
| conflict => 409
| gone => 410
| lengthRequired => 411
| preconditionFailed => 412
| payloadTooLarge => 413
| uriTooLong => 414
| unsupportedMediaType => 415
| rangeNotSatisfiable => 416
| expectationFailed => 417
| imATeapot => 418
| misdirectedRequest => 421
| unprocessableEntity => 422
| locked => 423
| failedDependency => 424
| tooEarly => 425
| upgradeRequired => 426
| preconditionRequired => 428
| tooManyRequests => 429
| requestHeaderFieldsTooLarge => 431
| unavailableForLegalReasons => 451
| internalServerError => 500
| notImplemented => 501
| badGateway => 502
| serviceUnavailable => 503
| gatewayTimeout => 504
| httpVersionNotSupported => 505
| variantAlsoNegotiates => 506
| insufficientStorage => 507
| loopDetected => 508
| notExtended => 510
| networkAuthenticationRequired => 511
| other n => n
/--
Converts a `UInt16` to `Status`.
-/
def ofCode : UInt16 Status
| 100 => .«continue»
| 101 => .switchingProtocols
| 102 => .processing
| 103 => .earlyHints
| 200 => .ok
| 201 => .created
| 202 => .accepted
| 203 => .nonAuthoritativeInformation
| 204 => .noContent
| 205 => .resetContent
| 206 => .partialContent
| 207 => .multiStatus
| 208 => .alreadyReported
| 226 => .imUsed
| 300 => .multipleChoices
| 301 => .movedPermanently
| 302 => .found
| 303 => .seeOther
| 304 => .notModified
| 305 => .useProxy
| 306 => .unused
| 307 => .temporaryRedirect
| 308 => .permanentRedirect
| 400 => .badRequest
| 401 => .unauthorized
| 402 => .paymentRequired
| 403 => .forbidden
| 404 => .notFound
| 405 => .methodNotAllowed
| 406 => .notAcceptable
| 407 => .proxyAuthenticationRequired
| 408 => .requestTimeout
| 409 => .conflict
| 410 => .gone
| 411 => .lengthRequired
| 412 => .preconditionFailed
| 413 => .payloadTooLarge
| 414 => .uriTooLong
| 415 => .unsupportedMediaType
| 416 => .rangeNotSatisfiable
| 417 => .expectationFailed
| 418 => .imATeapot
| 421 => .misdirectedRequest
| 422 => .unprocessableEntity
| 423 => .locked
| 424 => .failedDependency
| 425 => .tooEarly
| 426 => .upgradeRequired
| 428 => .preconditionRequired
| 429 => .tooManyRequests
| 431 => .requestHeaderFieldsTooLarge
| 451 => .unavailableForLegalReasons
| 500 => .internalServerError
| 501 => .notImplemented
| 502 => .badGateway
| 503 => .serviceUnavailable
| 504 => .gatewayTimeout
| 505 => .httpVersionNotSupported
| 506 => .variantAlsoNegotiates
| 507 => .insufficientStorage
| 508 => .loopDetected
| 510 => .notExtended
| 511 => .networkAuthenticationRequired
| n => .other n
/--
Checks if the type of the status code is informational, meaning that the request was received
and the process is continuing.
-/
@[inline]
def isInformational (c : Status) : Bool :=
100 c.toCode c.toCode < 200
/--
Checks if the type of the status code is success, meaning that the request was successfully received,
understood, and accepted.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isSuccess (c : Status) : Bool :=
200 c.toCode c.toCode < 300
/--
Checks if the type of the status code is redirection, meaning that further action needs to be taken
to complete the request.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isRedirection (c : Status) : Bool :=
300 c.toCode c.toCode < 400
/--
Checks if the type of the status code is a client error, meaning that the request contains bad syntax
or cannot be fulfilled.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isClientError (c : Status) : Bool :=
400 c.toCode c.toCode < 500
/--
Checks if the type of the status code is a server error, meaning that the server failed to fulfill
an apparently valid request.
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isServerError (c : Status) : Bool :=
500 c.toCode c.toCode < 600
/--
Checks if the status code indicates an error (either client error 4xx or server error 5xx).
* Reference: https://httpwg.org/specs/rfc9110.html#status.codes
-/
@[inline]
def isError (c : Status) : Bool :=
c.isClientError c.isServerError
/--
Returns the standard reason phrase for an HTTP status code as defined in RFC 9110.
For known status codes this returns the canonical phrase (e.g., "OK" for 200).
For unknown codes (`other n`), returns the numeric code as a string.
-/
def reasonPhrase : Status String
| .«continue» => "Continue"
| .switchingProtocols => "Switching Protocols"
| .processing => "Processing"
| .earlyHints => "Early Hints"
| .ok => "OK"
| .created => "Created"
| .accepted => "Accepted"
| .nonAuthoritativeInformation => "Non-Authoritative Information"
| .noContent => "No Content"
| .resetContent => "Reset Content"
| .partialContent => "Partial Content"
| .multiStatus => "Multi-Status"
| .alreadyReported => "Already Reported"
| .imUsed => "IM Used"
| .multipleChoices => "Multiple Choices"
| .movedPermanently => "Moved Permanently"
| .found => "Found"
| .seeOther => "See Other"
| .notModified => "Not Modified"
| .useProxy => "Use Proxy"
| .unused => "Unused"
| .temporaryRedirect => "Temporary Redirect"
| .permanentRedirect => "Permanent Redirect"
| .badRequest => "Bad Request"
| .unauthorized => "Unauthorized"
| .paymentRequired => "Payment Required"
| .forbidden => "Forbidden"
| .notFound => "Not Found"
| .methodNotAllowed => "Method Not Allowed"
| .notAcceptable => "Not Acceptable"
| .proxyAuthenticationRequired => "Proxy Authentication Required"
| .requestTimeout => "Request Timeout"
| .conflict => "Conflict"
| .gone => "Gone"
| .lengthRequired => "Length Required"
| .preconditionFailed => "Precondition Failed"
| .payloadTooLarge => "Payload Too Large"
| .uriTooLong => "URI Too Long"
| .unsupportedMediaType => "Unsupported Media Type"
| .rangeNotSatisfiable => "Range Not Satisfiable"
| .expectationFailed => "Expectation Failed"
| .imATeapot => "I'm a teapot"
| .misdirectedRequest => "Misdirected Request"
| .unprocessableEntity => "Unprocessable Entity"
| .locked => "Locked"
| .failedDependency => "Failed Dependency"
| .tooEarly => "Too Early"
| .upgradeRequired => "Upgrade Required"
| .preconditionRequired => "Precondition Required"
| .tooManyRequests => "Too Many Requests"
| .requestHeaderFieldsTooLarge => "Request Header Fields Too Large"
| .unavailableForLegalReasons => "Unavailable For Legal Reasons"
| .internalServerError => "Internal Server Error"
| .notImplemented => "Not Implemented"
| .badGateway => "Bad Gateway"
| .serviceUnavailable => "Service Unavailable"
| .gatewayTimeout => "Gateway Timeout"
| .httpVersionNotSupported => "HTTP Version Not Supported"
| .variantAlsoNegotiates => "Variant Also Negotiates"
| .insufficientStorage => "Insufficient Storage"
| .loopDetected => "Loop Detected"
| .notExtended => "Not Extended"
| .networkAuthenticationRequired => "Network Authentication Required"
| .other n => Nat.repr n.toNat
instance : ToString Status where
toString := reasonPhrase
instance : Encode .v11 Status where
encode buffer status := buffer
|>.writeString (toString <| toCode status)
|>.writeChar ' '
|>.writeString status.reasonPhrase
end Std.Http.Status

View File

@@ -0,0 +1,75 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data.URI.Basic
public import Std.Internal.Http.Data.URI.Parser
public section
/-!
# URI
This module defines the `URI` and `RequestTarget` types that represent and manipulate components of
URIs as defined by RFC 3986. It provides parsing, rendering, and normalization utilities for working
with URIs and request targets in HTTP messages.
-/
namespace Std.Http.RequestTarget
set_option linter.all true
/--
Attempts to parse a `RequestTarget` from the given string.
-/
@[inline]
def parse? (string : String) : Option RequestTarget :=
(URI.Parser.parseRequestTarget <* Std.Internal.Parsec.eof).run string.toUTF8 |>.toOption
/--
Parses a `RequestTarget` from the given string. Panics if parsing fails. Use `parse?`
if you need a safe option-returning version.
-/
@[inline]
def parse! (string : String) : RequestTarget :=
match parse? string with
| some res => res
| none => panic! "invalid request target"
/--
Creates an origin-form request target from a path string.
The path should start with '/' (e.g., "/api/users" or "/search?q=test").
Panics if the string is not a valid origin-form request target.
-/
@[inline]
def originForm! (path : String) : RequestTarget :=
match parse? path with
| some (.originForm p q) => .originForm p q
| _ => panic! s!"invalid origin-form request target: {path}"
end RequestTarget
namespace URI
/--
Attempts to parse a `URI` from the given string.
-/
@[inline]
def parse? (string : String) : Option URI :=
(Parser.parseURI <* Std.Internal.Parsec.eof).run string.toUTF8 |>.toOption
/--
Parses a `URI` from the given string. Panics if parsing fails. Use `parse?` if you need a safe
option-returning version.
-/
@[inline]
def parse! (string : String) : URI :=
match parse? string with
| some res => res
| none => panic! "invalid URI"
end URI

View File

@@ -0,0 +1,764 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.ToString
public import Std.Net
public import Std.Internal.Http.Data.URI.Encoding
public import Std.Internal.Http.Internal
public section
/-!
# URI Structure
This module defines the complete URI structure following RFC 3986, including schemes, authorities,
paths, queries, fragments, and request targets.
All text components use the encoding types from `Std.Http.URI.Encoding` to ensure proper
percent-encoding is maintained throughout.
-/
namespace Std.Http
set_option linter.all true
open Internal
namespace URI
/--
URI scheme identifier (e.g., "http", "https", "ftp").
-/
abbrev Scheme := { s : String // IsLowerCase s }
instance : Inhabited Scheme where
default := "", .isLowerCase_empty
/--
User information component containing the username and optional password. Both fields store decoded
(unescaped) values.
-/
structure UserInfo where
/--
The username (decoded).
-/
username : String
/--
The optional password (decoded).
-/
password : Option String
deriving Inhabited, Repr
/--
Checks if a character is valid for use in a domain name.
Valid characters are ASCII alphanumeric, hyphens, and dots.
-/
def isValidDomainNameChar (c : Char) : Bool :=
c.isAlphanum || c == '-' || c == '.'
/--
Proposition that asserts all characters in a string are valid domain name characters.
-/
abbrev IsValidDomainName (s : String) : Prop :=
s.toList.all isValidDomainNameChar
/--
A domain name represented as a validated, lowercase-normalized string.
Only ASCII alphanumeric characters, hyphens, and dots are allowed.
Internationalized domain names must be converted to punycode before use.
-/
abbrev DomainName := { s : String // IsLowerCase s IsValidDomainName s }
/--
Host component of a URI, supporting domain names and IP addresses.
-/
inductive Host
/--
A domain name (lowercase-normalized).
-/
| name (name : DomainName)
/--
An IPv4 address.
-/
| ipv4 (ipv4 : Net.IPv4Addr)
/--
An IPv6 address.
-/
| ipv6 (ipv6 : Net.IPv6Addr)
deriving Inhabited
instance : Repr Host where
reprPrec x prec :=
let nestPrec := (if prec 1024 then 1 else 2)
let name := "Std.Http.URI.Host"
let repr (ctr : String) a :=
Repr.addAppParen (Format.nest nestPrec (.text s!"{name}.{ctr}" ++ .line ++ a)).group prec
match x with
| Host.name a => repr "name" (reprArg a)
| Host.ipv4 a => repr "ipv4" (toString a)
| Host.ipv6 a => repr "ipv6" (toString a)
instance : ToString Host where
toString
| .name n => n
| .ipv4 addr => toString addr
| .ipv6 addr => s!"[{toString addr}]"
/--
TCP port number.
-/
abbrev Port := UInt16
/--
The authority component of a URI, identifying the network location of the resource.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2
-/
structure Authority where
/--
Optional user information (username and password).
-/
userInfo : Option UserInfo := none
/--
The host identifying the network location.
-/
host : Host
/--
Optional port number for connecting to the host.
-/
port : Option Port := none
deriving Inhabited, Repr
instance : ToString Authority where
toString auth :=
let userPart := match auth.userInfo with
| none => ""
| some name, some pass => s!"{toString (EncodedString.encode name (r := isUnreserved))}:{toString (EncodedString.encode pass (r := isUnreserved))}@"
| some name, none => s!"{toString (EncodedString.encode name (r := isUnreserved))}@"
let hostPart := toString auth.host
let portPart := match auth.port with
| none => ""
| some p => s!":{p}"
s!"{userPart}{hostPart}{portPart}"
namespace Authority
end Authority
/--
Hierarchical path component of a URI. Each segment is stored as an `EncodedSegment` to maintain
proper percent-encoding.
-/
structure Path where
/--
The path segments making up the hierarchical structure (each segment is percent-encoded).
-/
segments : Array (EncodedSegment)
/--
Whether the path is absolute (begins with '/') or relative.
-/
absolute : Bool
deriving Inhabited, Repr
instance : ToString Path where
toString path :=
let result := String.intercalate "/" (path.segments.map toString).toList
if path.absolute then "/" ++ result else result
namespace Path
/--
Returns true if the path has no segments.
-/
def isEmpty (p : Path) : Bool := p.segments.isEmpty
/--
Returns the parent path by removing the last segment. If the path is empty, returns the path unchanged.
-/
def parent (p : Path) : Path :=
if p.segments.isEmpty then p
else { p with segments := p.segments.pop }
/--
Joins two paths. If the second path is absolute, it is returned as-is. Otherwise, the second path's
segments are appended to the first path.
-/
def join (p1 : Path) (p2 : Path) : Path :=
if p2.absolute then p2
else { p1 with segments := p1.segments ++ p2.segments }
/--
Appends a single segment to the path. The segment will be percent-encoded.
-/
def append (p : Path) (segment : String) : Path :=
{ p with segments := p.segments.push (EncodedSegment.encode segment) }
/--
Appends an already-encoded segment to the path.
-/
def appendEncoded (p : Path) (segment : EncodedSegment) : Path :=
{ p with segments := p.segments.push segment }
/--
Removes dot segments from the path according to RFC 3986 Section 5.2.4. This handles "."
(current directory) and ".." (parent directory) segments.
-/
def normalize (p : Path) : Path :=
let rec loop (input : List (EncodedSegment)) (output : List (EncodedSegment)) : List (EncodedSegment) :=
match input with
| [] =>
output.reverse
| segStr :: rest =>
if toString segStr == "." then
loop rest output
else if toString segStr == ".." then
match output with
| [] => loop rest []
| _ :: tail => loop rest tail
else
loop rest (segStr :: output)
{ p with segments := (loop p.segments.toList []).toArray }
/--
Returns the path segments as decoded strings.
Segments that cannot be decoded as UTF-8 are returned as their raw encoded form.
-/
def toDecodedSegments (p : Path) : Array String :=
p.segments.map fun seg =>
seg.decode.getD (toString seg)
end Path
/--
Query string represented as an array of key-value pairs. Both keys and values are stored as
`EncodedQueryParam` for proper application/x-www-form-urlencoded encoding. Values are optional to
support parameters without values (e.g., "?flag"). Order is preserved based on insertion order.
-/
@[expose]
def Query := Array (EncodedQueryParam × Option EncodedQueryParam)
deriving Repr, Inhabited
namespace Query
/--
Extracts all unique query parameter names.
-/
@[expose]
def names (query : Query) : Array EncodedQueryParam :=
query.map (fun p => p.fst)
|> Array.toList
|> List.eraseDups
|> List.toArray
/--
Extracts all query parameter values.
-/
@[expose]
def values (query : Query) : Array (Option EncodedQueryParam) :=
query.map (fun p => p.snd)
/--
Returns the query as an array of (key, value) pairs. This is an identity function since Query is
already an array of pairs.
-/
@[expose]
def toArray (query : Query) : Array (EncodedQueryParam × Option EncodedQueryParam) :=
query
/--
Formats a query parameter as a string in the format "key" or "key=value". The key and value are
already percent-encoded as `EncodedQueryParam`.
-/
def formatQueryParam (key : EncodedQueryParam) (value : Option EncodedQueryParam) : String :=
match value with
| none => toString key
| some v => s!"{toString key}={toString v}"
/--
Finds the first value of a query parameter by key name. Returns `none` if the key is not found.
The value remains encoded as `EncodedQueryParam`.
-/
def find? (query : Query) (key : String) : Option (Option EncodedQueryParam) :=
let encodedKey : EncodedQueryParam := EncodedQueryParam.encode key
let matchingKey := Array.find? (fun x => x.fst.toByteArray = encodedKey.toByteArray) query
matchingKey.map (fun x => x.snd)
/--
Finds all values of a query parameter by key name. Returns an empty array if the key is not found.
The values remain encoded as `EncodedQueryParam`.
-/
def findAll (query : Query) (key : String) : Array (Option EncodedQueryParam) :=
let encodedKey : EncodedQueryParam := EncodedQueryParam.encode key
query.filterMap (fun x =>
if x.fst.toByteArray = encodedKey.toByteArray then
some (x.snd)
else none)
/--
Adds a query parameter to the query string.
-/
def insert (query : Query) (key : String) (value : String) : Query :=
let encodedKey : EncodedQueryParam := EncodedQueryParam.encode key
let encodedValue : EncodedQueryParam := EncodedQueryParam.encode value
query.push (encodedKey, some encodedValue)
/--
Adds a query parameter to the query string.
-/
def insertEncoded (query : Query) (key : EncodedQueryParam) (value : Option EncodedQueryParam) : Query :=
query.push (key, value)
/--
Creates an empty query string.
-/
def empty : Query := #[]
/--
Creates a query string from a list of key-value pairs.
-/
def ofList (pairs : List (EncodedQueryParam × Option EncodedQueryParam)) : Query :=
pairs.toArray
/--
Checks if a query parameter exists.
-/
def contains (query : Query) (key : String) : Bool :=
let encodedKey : EncodedQueryParam := EncodedQueryParam.encode key
query.any (fun x => x.fst.toByteArray = encodedKey.toByteArray)
/--
Removes all occurrences of a query parameter by key name.
-/
def erase (query : Query) (key : String) : Query :=
let encodedKey : EncodedQueryParam := EncodedQueryParam.encode key
-- Filter out matching keys
query.filter (fun x => x.fst.toByteArray encodedKey.toByteArray)
/--
Gets the first value of a query parameter by key name, decoded as a string.
Returns `none` if the key is not found or if the value cannot be decoded as UTF-8.
-/
def get (query : Query) (key : String) : Option String :=
match query.find? key with
| none => none
| some none => some "" -- Key exists but has Pno value
| some (some encoded) => encoded.decode
/--
Gets the first value of a query parameter by key name, decoded as a string.
Returns the default value if the key is not found or if the value cannot be decoded.
-/
def getD (query : Query) (key : String) (default : String) : String :=
query.get key |>.getD default
/--
Sets a query parameter, replacing all existing values for that key.
Both key and value will be automatically percent-encoded.
-/
def set (query : Query) (key : String) (value : String) : Query :=
query.erase key |>.insert key value
/--
Converts the query to a properly encoded query string format.
Example: "key1=value1&key2=value2&flag"
-/
def toRawString (query : Query) : String :=
let params := query.map (fun (k, v) => formatQueryParam k v)
String.intercalate "&" params.toList
instance : EmptyCollection Query :=
Query.empty
instance : Singleton (String × String) Query :=
fun k, v => Query.empty.insert k v
instance : Insert (String × String) Query :=
fun k, v q => q.insert k v
instance : ToString Query where
toString q :=
if q.isEmpty then "" else
let encodedParams := q.toList.map fun (key, value) =>
Query.formatQueryParam key value
"?" ++ String.intercalate "&" encodedParams
end Query
end URI
/--
Complete URI structure following RFC 3986. All text components use encoded string types to ensure
proper percent-encoding.
Reference: https://www.rfc-editor.org/rfc/rfc3986.html
-/
structure URI where
/--
The URI scheme (e.g., "http", "https", "ftp").
-/
scheme : URI.Scheme
/--
Optional authority component (user info, host, and port).
-/
authority : Option URI.Authority
/--
The hierarchical path component.
-/
path : URI.Path
/--
Optional query string as key-value pairs.
-/
query : URI.Query
/--
Optional fragment identifier (the part after '#'), percent-encoded.
-/
fragment : Option String
deriving Repr, Inhabited
instance : ToString URI where
toString uri :=
let schemePart := uri.scheme
let authorityPart := match uri.authority with
| none => ""
| some auth => s!"//{toString auth}"
let pathPart := toString uri.path
let queryPart := toString uri.query
let fragmentPart := uri.fragment.map (fun f => "#" ++ toString (URI.EncodedFragment.encode f)) |>.getD ""
s!"{schemePart}:{authorityPart}{pathPart}{queryPart}{fragmentPart}"
namespace URI
/--
Fluent builder for constructing URIs. Takes raw (unencoded) strings and handles encoding
automatically when building the final URI.
-/
structure Builder where
/--
The URI scheme (e.g., "http", "https").
-/
scheme : Option String := none
/--
User information (username and optional password).
-/
userInfo : Option UserInfo := none
/--
The host component.
-/
host : Option Host := none
/--
The port number.
-/
port : Option URI.Port := none
/--
Path segments (will be encoded when building).
-/
pathSegments : Array String := #[]
/--
Query parameters as (key, optional value) pairs (will be encoded when building).
-/
query : Array (String × Option String) := #[]
/--
Fragment identifier (will be encoded when building).
-/
fragment : Option String := none
deriving Inhabited
namespace Builder
/--
Creates an empty URI builder.
-/
def empty : Builder := {}
/--
Sets the URI scheme (e.g., "http", "https").
-/
def setScheme (b : Builder) (scheme : String) : Builder :=
{ b with scheme := some scheme }
/--
Sets the user information with username and optional password.
The strings will be automatically percent-encoded.
-/
def setUserInfo (b : Builder) (username : String) (password : Option String := none) : Builder :=
{ b with userInfo := some {
username := username
password := password
}
}
/--
Sets the host as a domain name, returning `none` if the name contains invalid characters.
The domain name will be automatically lowercased.
Only ASCII alphanumeric characters, hyphens, and dots are allowed.
Internationalized domain names must be converted to punycode before use.
-/
def setHost? (b : Builder) (name : String) : Option Builder :=
let lower := name.toLower
if h : IsValidDomainName lower then
some { b with host := some (Host.name lower, IsLowerCase.isLowerCase_toLower, h) }
else
none
/--
Sets the host as a domain name, panicking if the name contains invalid characters.
The domain name will be automatically lowercased.
Only ASCII alphanumeric characters, hyphens, and dots are allowed.
Internationalized domain names must be converted to punycode before use.
-/
def setHost! (b : Builder) (name : String) : Builder :=
match b.setHost? name with
| some b => b
| none => panic! s!"invalid domain name: {name.quote}"
/--
Sets the host as an IPv4 address.
-/
def setHostIPv4 (b : Builder) (addr : Net.IPv4Addr) : Builder :=
{ b with host := some (Host.ipv4 addr) }
/--
Sets the host as an IPv6 address.
-/
def setHostIPv6 (b : Builder) (addr : Net.IPv6Addr) : Builder :=
{ b with host := some (Host.ipv6 addr) }
/--
Sets the port number.
-/
def setPort (b : Builder) (port : Port) : Builder :=
{ b with port := some port }
/--
Replaces all path segments. Segments will be automatically percent-encoded when building.
-/
def setPath (b : Builder) (segments : Array String) : Builder :=
{ b with pathSegments := segments }
/--
Appends a single segment to the path. The segment will be automatically percent-encoded when building.
-/
def appendPathSegment (b : Builder) (segment : String) : Builder :=
{ b with pathSegments := b.pathSegments.push segment }
/--
Adds a query parameter with a value. Both key and value will be automatically percent-encoded when
building.
-/
def addQueryParam (b : Builder) (key : String) (value : String) : Builder :=
{ b with query := b.query.push (key, some value) }
/--
Adds a query parameter without a value (flag parameter). The key will be automatically
percent-encoded when building.
-/
def addQueryFlag (b : Builder) (key : String) : Builder :=
{ b with query := b.query.push (key, none) }
/--
Replaces all query parameters. Keys and values will be automatically percent-encoded when building.
-/
def setQuery (b : Builder) (query : Array (String × Option String)) : Builder :=
{ b with query := query }
/--
Sets the fragment identifier. The fragment will be automatically percent-encoded when building.
-/
def setFragment (b : Builder) (fragment : String) : Builder :=
{ b with fragment := some fragment }
/--
Builds a complete URI from the builder state, encoding all components. Defaults to "https" scheme if
none is specified.
-/
def build (b : Builder) : URI :=
let scheme := b.scheme.getD "https"
let authority :=
if b.host.isSome then
some {
userInfo := b.userInfo
host := b.host.getD default
port := b.port
}
else none
let path : Path := {
segments := b.pathSegments.map EncodedSegment.encode
absolute := true
}
let query :=
b.query.map fun (k, v) =>
(EncodedQueryParam.encode k, v.map EncodedQueryParam.encode)
let query := URI.Query.ofList query.toList
{
scheme := scheme.toLower, IsLowerCase.isLowerCase_toLower
authority := authority
path
query := query
fragment := b.fragment
}
end Builder
end URI
namespace URI
/--
Returns a new URI with the scheme replaced.
-/
def withScheme (uri : URI) (scheme : String) : URI :=
{ uri with scheme := scheme.toLower, IsLowerCase.isLowerCase_toLower }
/--
Returns a new URI with the authority replaced.
-/
def withAuthority (uri : URI) (authority : Option URI.Authority) : URI :=
{ uri with authority }
/--
Returns a new URI with the path replaced.
-/
def withPath (uri : URI) (path : URI.Path) : URI :=
{ uri with path }
/--
Returns a new URI with the query replaced.
-/
def withQuery (uri : URI) (query : URI.Query) : URI :=
{ uri with query }
/--
Returns a new URI with the fragment replaced.
-/
def withFragment (uri : URI) (fragment : Option String) : URI :=
{ uri with fragment }
/--
Normalizes a URI according to RFC 3986 Section 6.
-/
def normalize (uri : URI) : URI :=
{ uri with
scheme := uri.scheme
authority := uri.authority
path := uri.path.normalize
}
end URI
/--
HTTP request target forms as defined in RFC 7230 Section 5.3.
Reference: https://www.rfc-editor.org/rfc/rfc7230.html#section-5.3
-/
inductive RequestTarget where
/--
Origin-form request target (most common for HTTP requests). Consists of a path, optional query string,
and optional fragment.
Example: `/path/to/resource?key=value#section`
-/
| originForm (path : URI.Path) (query : Option URI.Query)
/--
Absolute-form request target containing a complete URI. Used when making requests through a proxy.
Example: `http://example.com:8080/path?key=value`
-/
| absoluteForm (uri : URI) (noFrag : uri.fragment.isNone)
/--
Authority-form request target (used for CONNECT requests).
Example: `example.com:443`
-/
| authorityForm (authority : URI.Authority)
/--
Asterisk-form request target (used with OPTIONS requests).
Example: `*`
-/
| asteriskForm
deriving Inhabited, Repr
namespace RequestTarget
/--
Extracts the path component from a request target, if available.
Returns an empty relative path for targets without a path.
-/
def path : RequestTarget URI.Path
| .originForm p _ => p
| .absoluteForm u _ => u.path
| _ => { segments := #[], absolute := false }
/--
Extracts the query component from a request target, if available.
Returns an empty array for targets without a query.
-/
def query : RequestTarget URI.Query
| .originForm _ q => q.getD URI.Query.empty
| .absoluteForm u _ => u.query
| _ => URI.Query.empty
/--
Extracts the authority component from a request target, if available.
-/
def authority? : RequestTarget Option URI.Authority
| .authorityForm a => some a
| .absoluteForm u _ => u.authority
| _ => none
/--
Extracts the fragment component from a request target, if available.
-/
def fragment? : RequestTarget Option String
| .originForm _ _ => none
| .absoluteForm u _ => u.fragment
| _ => none
/--
Extracts the full URI if the request target is in absolute form.
-/
def uri? : RequestTarget Option URI
| .absoluteForm u _ => some u
| _ => none
instance : ToString RequestTarget where
toString
| .originForm path query =>
let pathStr := toString path
let queryStr := query.map toString |>.getD ""
s!"{pathStr}{queryStr}"
| .absoluteForm uri _ => toString uri
| .authorityForm auth => toString auth
| .asteriskForm => "*"
instance : Encode .v11 RequestTarget where
encode buffer target := buffer.writeString (toString target)
end Std.Http.RequestTarget

View File

@@ -0,0 +1,754 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Grind
import Init.While
import Init.Data.SInt.Lemmas
import Init.Data.UInt.Lemmas
import Init.Data.UInt.Bitwise
import Init.Data.Array.Lemmas
public import Init.Data.String
public section
namespace Std.Http.URI
set_option linter.all true
/-!
# URI Encoding
This module provides utilities for percent-encoding URI components according to RFC 3986. It includes
character validation, encoding/decoding functions, and types that maintain encoding invariants through
Lean's dependent type system.
-/
/--
Checks if a byte represents an ASCII character (value < 128).
-/
def isAscii (c : UInt8) : Bool :=
c < 128
/--
Checks if a byte is a hexadecimal digit (0-9, a-f, or A-F). Note: This accepts both lowercase and
uppercase hex digits.
-/
def isHexDigit (c : UInt8) : Bool :=
(c '0'.toUInt8 && c '9'.toUInt8) ||
(c 'a'.toUInt8 && c 'f'.toUInt8) ||
(c 'A'.toUInt8 && c 'F'.toUInt8)
/--
Checks if a byte is an alphanumeric digit (0-9, a-z, or A-Z).
-/
def isAlphaNum (c : UInt8) : Bool :=
(c '0'.toUInt8 && c '9'.toUInt8) ||
(c 'a'.toUInt8 && c 'z'.toUInt8) ||
(c 'A'.toUInt8 && c 'Z'.toUInt8)
/--
Checks if a byte is an unreserved character according to RFC 3986. Unreserved characters are:
alphanumeric, hyphen, period, underscore, and tilde.
-/
def isUnreserved (c : UInt8) : Bool :=
isAlphaNum c ||
(c = '-'.toUInt8 || c = '.'.toUInt8 || c = '_'.toUInt8 || c = '~'.toUInt8)
/--
Checks if a byte is a sub-delimiter character according to RFC 3986.
Sub-delimiters are: `!`, `$`, `&`, `'`, `(`, `)`, `*`, `+`, `,`, `;`, `=`.
-/
def isSubDelims (c : UInt8) : Bool :=
c = '!'.toUInt8 || c = '$'.toUInt8 || c = '&'.toUInt8 || c = '\''.toUInt8 ||
c = '('.toUInt8 || c = ')'.toUInt8 || c = '*'.toUInt8 || c = '+'.toUInt8 ||
c = ','.toUInt8 || c = ';'.toUInt8 || c = '='.toUInt8
/--
Checks if a byte is a valid path character (`pchar`) according to RFC 3986.
`pchar = unreserved / pct-encoded / sub-delims / ":" / "@"`
Note: The percent-encoding (`pct-encoded`) is handled separately by `isEncodedChar`,
so this predicate only covers the non-percent characters.
-/
def isPChar (c : UInt8) : Bool :=
isUnreserved c || isSubDelims c || c = ':'.toUInt8 || c = '@'.toUInt8
/--
Checks if a byte is a valid character in a URI query component according to RFC 3986.
`query = *( pchar / "/" / "?" )`
-/
def isQueryChar (c : UInt8) : Bool :=
isPChar c || c = '/'.toUInt8 || c = '?'.toUInt8
/--
Checks if a byte is a valid character in a URI fragment component according to RFC 3986.
`fragment = *( pchar / "/" / "?" )`
-/
def isFragmentChar (c : UInt8) : Bool :=
isPChar c || c = '/'.toUInt8 || c = '?'.toUInt8
/--
Checks if a byte is a valid character in a URI userinfo component according to RFC 3986.
`userinfo = *( unreserved / pct-encoded / sub-delims / ":" )`
-/
def isUserInfoChar (c : UInt8) : Bool :=
isUnreserved c || isSubDelims c || c = ':'.toUInt8
/--
Checks if a byte is a valid character in a percent-encoded URI component. Valid characters are
unreserved characters or the percent sign (for escape sequences).
-/
def isEncodedChar (rule : UInt8 Bool) (c : UInt8) : Bool :=
isAscii c (rule c isHexDigit c c = '%'.toUInt8)
/--
Checks if a byte is valid in a percent-encoded query string component. Extends `isEncodedChar` to also
allow '+' which represents space in application/x-www-form-urlencoded format.
-/
def isEncodedQueryChar (rule : UInt8 Bool) (c : UInt8) : Bool :=
isEncodedChar rule c c = '+'.toUInt8
/--
Checks if all characters in a `ByteArray` are allowed in an encoded URI component. This is a fast check
that only verifies the character set, not full encoding validity.
-/
@[inline]
abbrev isAllowedEncodedChars (rule : UInt8 Bool) (s : ByteArray) : Prop :=
s.data.all (isEncodedChar rule)
instance : Decidable (isAllowedEncodedChars r s) :=
inferInstanceAs (Decidable (s.data.all (isEncodedChar r) = true))
/--
Checks if all characters in a `ByteArray` are allowed in an encoded query parameter. Allows '+' as an
alternative encoding for space (application/x-www-form-urlencoded).
-/
@[inline]
abbrev isAllowedEncodedQueryChars (rule : UInt8 Bool) (s : ByteArray) : Prop :=
s.data.all (isEncodedQueryChar rule)
instance : Decidable (isAllowedEncodedQueryChars r s) :=
inferInstanceAs (Decidable (s.data.all (isEncodedQueryChar r) = true))
/--
Validates that all percent signs in a byte array are followed by exactly two hexadecimal digits.
This ensures proper percent-encoding according to RFC 3986.
For example:
- `%20` is valid (percent followed by two hex digits)
- `%` is invalid (percent with no following digits)
- `%2` is invalid (percent followed by only one digit)
- `%GG` is invalid (percent followed by non-hex characters)
-/
def isValidPercentEncoding (ba : ByteArray) : Bool :=
let rec loop (i : Nat) : Bool :=
if h : i < ba.size then
let c := ba[i]'h
if c = '%'.toUInt8 then
if h₂ : i + 2 < ba.size then
let d1 := ba[i + 1]'(by omega)
let d2 := ba[i + 2]'h₂
if isHexDigit d1 && isHexDigit d2 then
loop (i + 3)
else false
else false
else loop (i + 1)
else true
termination_by ba.size - i
loop 0
/--
Converts a nibble (4-bit value, 0-15) to its hexadecimal digit representation. Returns '0'-'9' for
values 0-9, and 'A'-'F' for values 10-15.
-/
def hexDigit (n : UInt8) : UInt8 :=
if n < 10 then (n + '0'.toUInt8)
else (n - 10 + 'A'.toUInt8)
/--
Converts a hexadecimal digit character to its numeric value (0-15).
Returns `none` if the character is not a valid hex digit.
-/
def hexDigitToUInt8? (c : UInt8) : Option UInt8 :=
if c '0'.toUInt8 && c '9'.toUInt8 then
some (c - '0'.toUInt8)
else if c 'a'.toUInt8 && c 'f'.toUInt8 then
some (c - 'a'.toUInt8 + 10)
else if c 'A'.toUInt8 && c 'F'.toUInt8 then
some (c - 'A'.toUInt8 + 10)
else
none
private theorem isAllowedEncodedChars.push {bs : ByteArray} (h : isAllowedEncodedChars r bs) (h₁ : isEncodedChar r c) :
isAllowedEncodedChars r (bs.push c) := by
simpa [isAllowedEncodedChars, ByteArray.push, Array.all_push, And.intro h h₁]
private theorem isAllowedEncodedQueryChars.push {bs : ByteArray} (h : isAllowedEncodedQueryChars r bs) (h₁ : isEncodedQueryChar r c) :
isAllowedEncodedQueryChars r (bs.push c) := by
simpa [isAllowedEncodedQueryChars, ByteArray.push, Array.all_push, And.intro h h₁]
private theorem isEncodedChar_isAscii (c : UInt8) (h : isEncodedChar r c) : isAscii c := by
simp [isEncodedChar, isAscii] at *
exact h.left
private theorem isEncodedQueryChar_isAscii (c : UInt8) (h : isEncodedQueryChar r c) : isAscii c := by
unfold isEncodedQueryChar isAscii at *
simp at h
rcases h
next h => exact isEncodedChar_isAscii c h
next h => subst_vars; decide
private theorem hexDigit_isHexDigit (h₀ : x < 16) : isHexDigit (hexDigit x) := by
unfold hexDigit isHexDigit
have h₁ : x.toNat < 16 := h₀
split <;> simp [Char.toUInt8]
next p =>
have h₂ : x.toNat < 10 := p
have h₂ : 48 x.toNat + 48 := by omega
have h₃ : x.toNat + 48 57 := by omega
have h₄ : x.toNat + 48 < 256 := by omega
refine Or.inl (Or.inl ?_, ?_)
· exact (UInt8.ofNat_le_iff_le (by decide) h₄ |>.mpr h₂)
· exact (UInt8.ofNat_le_iff_le h₄ (by decide) |>.mpr h₃)
next p =>
have h₂ : ¬(x.toNat < 10) := p
have h₃ : 65 x.toNat - 10 + 65 := by omega
have h₅ : x.toNat - 10 + 65 70 := by omega
have h₄ : x.toNat - 10 + 65 < 256 := by omega
refine Or.inr ?_, ?_
· simpa [UInt8.ofNat_sub (by omega : 10 x.toNat)] using
UInt8.ofNat_le_iff_le (by decide : 65 < 256) h₄ |>.mpr h₃
· simpa [UInt8.ofNat_add, UInt8.ofNat_sub (by omega : 10 x.toNat)] using
UInt8.ofNat_le_iff_le h₄ (by decide : 70 < 256) |>.mpr h₅
private theorem isHexDigit_isAscii {c : UInt8} (h : isHexDigit c) : isAscii c := by
simp [isHexDigit, isAscii, Char.toUInt8] at *
rcases h with h1, h2 | h1, h2
· exact UInt8.lt_of_le_of_lt h2 (by decide)
next h => exact UInt8.lt_of_le_of_lt h.right (by decide)
· exact UInt8.lt_of_le_of_lt h2 (by decide)
private theorem isHexDigit_isEncodedChar {c : UInt8} (h : isHexDigit c) : isEncodedChar r c := by
unfold isEncodedChar
simp at *
exact And.intro (isHexDigit_isAscii h) (Or.inr (Or.inl h))
private theorem isHexDigit_isEncodedQueryChar {c : UInt8} (h : isHexDigit c) : isEncodedQueryChar r c := by
unfold isEncodedQueryChar isEncodedChar
simp at *
exact Or.inl (And.intro (isHexDigit_isAscii h) (Or.inr (Or.inl h)))
theorem all_of_all_of_imp {b : ByteArray} (h : b.data.all p) (imp : c, p c q c) : b.data.all q := by
rw [Array.all_eq] at *
simp at *
intro i x
exact (imp b.data[i]) (h i x)
private theorem autf8EncodeChar_flatMap_ascii {a : List UInt8}
(is_ascii_list : (x : UInt8), x a x < 128) :
List.flatMap (fun a => String.utf8EncodeChar (Char.ofUInt8 a)) a = a := by
have h_encode {i : UInt8} (h : i < 128) : String.utf8EncodeChar (Char.ofUInt8 i) = [i] := by
simp [Char.ofUInt8, String.utf8EncodeChar, show ¬127 < i.toNat from Nat.not_lt_of_le (Nat.le_pred_of_lt h)]
induction a with
| nil => simp
| cons head tail ih =>
simp [List.flatMap_cons]
rw [h_encode]
· simp
rw [ih]
intro x hx
exact is_ascii_list x (by simp [hx])
· exact is_ascii_list head (by simp)
private theorem List.toByteArray_loop_eq (xs : List UInt8) (acc : ByteArray) :
(List.toByteArray.loop xs acc).data = acc.data ++ xs.toArray := by
induction xs generalizing acc with
| nil => simp [List.toByteArray.loop]
| cons x xs ih => simp [List.toByteArray.loop, ih, Array.push]
private theorem ByteArray.toList_toByteArray (ba : ByteArray) :
ba.data.toList.toByteArray = ba := by
cases ba with
| mk data =>
simp [List.toByteArray]
apply ByteArray.ext
simp [List.toByteArray_loop_eq, ByteArray.empty]
decide
theorem ascii_is_valid_utf8 (ba : ByteArray) (s : ba.data.all isAscii) : ByteArray.IsValidUTF8 ba := by
refine ba.data.toList.map Char.ofUInt8, ?_
rw [List.utf8Encode]
simp only [List.flatMap_map]
have is_ascii : (x : UInt8), x ba.data.toList x < 128 := by
let is_ascii := Array.all_eq_true_iff_forall_mem.mp s
simp [isAscii] at is_ascii
intro x hx
exact is_ascii x (by simp_all)
rw [autf8EncodeChar_flatMap_ascii is_ascii]
exact ByteArray.toList_toByteArray ba |>.symm
/--
A percent-encoded URI component with a compile-time proof that it contains only valid encoded characters.
This provides type-safe URI encoding without runtime validation.
The invariant guarantees that the string contains only unreserved characters (alphanumeric, hyphen, period,
underscore, tilde) and percent signs (for escape sequences).
-/
structure EncodedString (r : UInt8 Bool) where
private mk ::
/--
The underlying byte array containing the percent-encoded data.
-/
toByteArray : ByteArray
/--
Proof that all characters in the byte array are valid encoded characters.
-/
valid : isAllowedEncodedChars r toByteArray
namespace EncodedString
/--
Creates an empty encoded string.
-/
def empty : EncodedString r :=
.empty, by simp []; exact fun i h => by contradiction
instance : Inhabited (EncodedString r) where
default := EncodedString.empty
/--
Appends a single encoded character to an encoded string.
Requires that the character is not '%' to maintain the percent-encoding invariant.
-/
private def push (s : EncodedString r) (c : UInt8) (h : isEncodedChar r c) : EncodedString r :=
s.toByteArray.push c, isAllowedEncodedChars.push s.valid h
/--
Converts a byte to its percent-encoded hexadecimal representation (%XX). For example, a space
character (0x20) becomes "%20".
-/
private def byteToHex (b : UInt8) (s : EncodedString r) : EncodedString r :=
let ba := s.toByteArray.push '%'.toUInt8
|>.push (hexDigit (b >>> 4))
|>.push (hexDigit (b &&& 0xF))
let valid := by
have h1 : isEncodedChar r '%'.toUInt8 :=
by simp [isEncodedChar]; decide
have h2 : isEncodedChar r (hexDigit (b >>> 4)) :=
let h₀ := hexDigit_isHexDigit (BitVec.toNat_ushiftRight_lt b.toBitVec 4 (by decide))
isHexDigit_isEncodedChar h₀
have h3 : isEncodedChar r (hexDigit (b &&& 0xF)) :=
let h₀ := hexDigit_isHexDigit (@UInt8.and_lt_add_one b 0xF (by decide))
isHexDigit_isEncodedChar h₀
exact isAllowedEncodedChars.push (isAllowedEncodedChars.push (isAllowedEncodedChars.push s.valid h1) h2) h3
ba, valid
/--
Encodes a raw string into an `EncodedString` with automatic proof construction. Unreserved characters
(alphanumeric, hyphen, period, underscore, tilde) are kept as-is, while all other characters are percent-encoded.
-/
def encode (s : String) : EncodedString r :=
s.toUTF8.foldl (init := EncodedString.empty) fun acc c =>
if h : isAscii c r c then
acc.push c (by simp [isEncodedChar]; exact And.intro h.left (Or.inl h.right))
else
byteToHex c acc
/--
Attempts to create an `EncodedString` from a `ByteArray`. Returns `some` if the byte array contains only
valid encoded characters and all percent signs are followed by exactly two hex digits, `none` otherwise.
-/
def ofByteArray? (ba : ByteArray) : Option (EncodedString r) :=
if h : isAllowedEncodedChars r ba then
if isValidPercentEncoding ba then some ba, h else none
else none
/--
Creates an `EncodedString` from a `ByteArray`, panicking if the byte array is invalid.
-/
def ofByteArray! (ba : ByteArray) : EncodedString r :=
match ofByteArray? ba with
| some es => es
| none => panic! "invalid encoded string"
/--
Creates an `EncodedString` from a `String` by checking if it's already a valid percent-encoded string.
Returns `some` if valid, `none` otherwise.
-/
def ofString? (s : String) : Option (EncodedString r) :=
ofByteArray? s.toUTF8
/--
Creates an `EncodedString` from a `String`, panicking if the string is not a valid percent-encoded string.
-/
def ofString! (s : String) : EncodedString r :=
ofByteArray! s.toUTF8
/--
Creates an `EncodedString` from a `ByteArray` with compile-time proofs.
Use this when you have proofs that the byte array is valid.
-/
def new (ba : ByteArray) (valid : isAllowedEncodedChars r ba) (_validEncoding : isValidPercentEncoding ba) : EncodedString r :=
ba, valid
instance : ToString (EncodedString r) where
toString es := es.toByteArray, ascii_is_valid_utf8 es.toByteArray (all_of_all_of_imp es.valid (fun c h => by simp [isEncodedChar] at h; exact h.left))
/--
Decodes an `EncodedString` back to a regular `String`. Converts percent-encoded sequences (e.g., "%20")
back to their original characters. Returns `none` if the decoded bytes are not valid UTF-8.
-/
def decode (es : EncodedString r) : Option String := Id.run do
let mut decoded : ByteArray := ByteArray.empty
let rawBytes := es.toByteArray
let len := rawBytes.size
let mut i := 0
let percent := '%'.toNat.toUInt8
while h : i < len do
let c := rawBytes[i]
(decoded, i) := if h₁ : c == percent i + 1 < len then
let h1 := rawBytes[i + 1]
if let some hd1 := hexDigitToUInt8? h1 then
if h₂ : i + 2 < len then
let h2 := rawBytes[i + 2]
if let some hd2 := hexDigitToUInt8? h2 then
(decoded.push (hd1 * 16 + hd2), i + 3)
else
(((decoded.push c).push h1).push h2, i + 3)
else
((decoded.push c).push h1, i + 2)
else
((decoded.push c).push h1, i + 2)
else
(decoded.push c, i + 1)
return String.fromUTF8? decoded
instance : Repr (EncodedString r) where
reprPrec es n := reprPrec (toString es) n
instance : BEq (EncodedString r) where
beq x y := x.toByteArray = y.toByteArray
instance : Hashable (EncodedString r) where
hash x := Hashable.hash x.toByteArray
end EncodedString
/--
A percent-encoded query string component with a compile-time proof that it contains only valid encoded
query characters. Extends `EncodedString` to support the '+' character for spaces, following the
application/x-www-form-urlencoded format.
This type is specifically designed for encoding query parameters where spaces can be represented as '+'
instead of "%20".
-/
structure EncodedQueryString (r : UInt8 Bool) where
private mk ::
/--
The underlying byte array containing the percent-encoded query data.
-/
toByteArray : ByteArray
/--
Proof that all characters in the byte array are valid encoded query characters.
-/
valid : isAllowedEncodedQueryChars r toByteArray
namespace EncodedQueryString
/--
Creates an empty encoded query string.
-/
def empty : EncodedQueryString r :=
.empty, by simp; intro a h; contradiction
instance : Inhabited (EncodedQueryString r) where
default := EncodedQueryString.empty
/--
Appends a single encoded query character to an encoded query string.
-/
private def push (s : EncodedQueryString r) (c : UInt8) (h : isEncodedQueryChar r c) : EncodedQueryString r :=
s.toByteArray.push c, isAllowedEncodedQueryChars.push s.valid h
/--
Attempts to create an `EncodedQueryString` from a `ByteArray`. Returns `some` if the byte array contains
only valid encoded query characters and all percent signs are followed by exactly two hex digits, `none` otherwise.
-/
def ofByteArray? (ba : ByteArray) : Option (EncodedQueryString r) :=
if h : isAllowedEncodedQueryChars r ba then
if isValidPercentEncoding ba then some ba, h else none
else none
/--
Creates an `EncodedQueryString` from a `ByteArray`, panicking if the byte array is invalid.
-/
def ofByteArray! (ba : ByteArray) : EncodedQueryString r :=
match ofByteArray? ba with
| some es => es
| none => panic! "invalid encoded query string"
/--
Creates an `EncodedQueryString` from a `String` by checking if it's already a valid percent-encoded string.
Returns `some` if valid, `none` otherwise.
-/
def ofString? (s : String) : Option (EncodedQueryString r) :=
ofByteArray? s.toUTF8
/--
Creates an `EncodedQueryString` from a `String`, panicking if the string is not a valid percent-encoded string.
-/
def ofString! (s : String) : EncodedQueryString r :=
ofByteArray! s.toUTF8
/--
Creates an `EncodedQueryString` from a `ByteArray` with compile-time proofs.
Use this when you have proofs that the byte array is valid.
-/
def new (ba : ByteArray) (valid : isAllowedEncodedQueryChars r ba) (_validEncoding : isValidPercentEncoding ba) : EncodedQueryString r :=
ba, valid
/--
Converts a byte to its percent-encoded hexadecimal representation (%XX). For example, a space character
(0x20) becomes "%20".
-/
private def byteToHex (b : UInt8) (s : EncodedQueryString r) : EncodedQueryString r :=
let ba := s.toByteArray.push '%'.toUInt8
|>.push (hexDigit (b >>> 4))
|>.push (hexDigit (b &&& 0xF))
let valid := by
have h1 : isEncodedQueryChar r '%'.toUInt8 := by
simp [isEncodedQueryChar, isEncodedChar]; decide
have h2 : isEncodedQueryChar r (hexDigit (b >>> 4)) :=
isHexDigit_isEncodedQueryChar (hexDigit_isHexDigit (BitVec.toNat_ushiftRight_lt b.toBitVec 4 (by decide)))
have h3 : isEncodedQueryChar r (hexDigit (b &&& 0xF)) :=
isHexDigit_isEncodedQueryChar (hexDigit_isHexDigit (@UInt8.and_lt_add_one b 0xF (by decide)))
exact isAllowedEncodedQueryChars.push (isAllowedEncodedQueryChars.push (isAllowedEncodedQueryChars.push s.valid h1) h2) h3
ba, valid
/--
Encodes a raw string into an `EncodedQueryString` with automatic proof construction. Unreserved characters
are kept as-is, spaces are encoded as '+', and all other characters are percent-encoded.
-/
def encode {r} (s : String) : EncodedQueryString r :=
s.toUTF8.foldl (init := EncodedQueryString.empty) fun acc c =>
if h : isAscii c r c then
acc.push c (by simp [isEncodedQueryChar, isEncodedChar]; exact Or.inl (And.intro h.left (Or.inl h.right)))
else if _ : c = ' '.toUInt8 then
acc.push '+'.toUInt8 (by simp [isEncodedQueryChar])
else
byteToHex c acc
/--
Converts an `EncodedQueryString` to a `String`, given a proof that all characters satisfying `r` are ASCII.
-/
def toString (es : EncodedQueryString r) : String :=
es.toByteArray, ascii_is_valid_utf8 es.toByteArray (all_of_all_of_imp es.valid (fun c h => isEncodedQueryChar_isAscii c h))
/--
Decodes an `EncodedQueryString` back to a regular `String`. Converts percent-encoded sequences and '+'
signs back to their original characters. Returns `none` if the decoded bytes are not valid UTF-8.
This is almost the same code from `System.Uri.UriEscape.decodeUri`, but with `Option` instead.
-/
def decode (es : EncodedQueryString r) : Option String := Id.run do
let mut decoded : ByteArray := ByteArray.empty
let rawBytes := es.toByteArray
let len := rawBytes.size
let mut i := 0
let percent := '%'.toNat.toUInt8
let plus := '+'.toNat.toUInt8
while h : i < len do
let c := rawBytes[i]
(decoded, i) := if c == plus then
(decoded.push ' '.toNat.toUInt8, i + 1)
else if h₁ : c == percent i + 1 < len then
let h1 := rawBytes[i + 1]
if let some hd1 := hexDigitToUInt8? h1 then
if h₂ : i + 2 < len then
let h2 := rawBytes[i + 2]
if let some hd2 := hexDigitToUInt8? h2 then
(decoded.push (hd1 * 16 + hd2), i + 3)
else
(((decoded.push c).push h1).push h2, i + 3)
else
((decoded.push c).push h1, i + 2)
else
((decoded.push c).push h1, i + 2)
else
(decoded.push c, i + 1)
return String.fromUTF8? decoded
end EncodedQueryString
instance : ToString (EncodedQueryString r) where
toString := EncodedQueryString.toString
instance : Repr (EncodedQueryString r) where
reprPrec es n := reprPrec (toString es) n
instance : BEq (EncodedQueryString r) where
beq x y := x.toByteArray = y.toByteArray
instance : Hashable (EncodedQueryString r) where
hash x := Hashable.hash x.toByteArray
instance : Hashable (Option (EncodedQueryString r)) where
hash
| some x => Hashable.hash ((ByteArray.mk #[1] ++ x.toByteArray))
| none => Hashable.hash (ByteArray.mk #[0])
/--
A percent-encoded URI path segment. Valid characters are `pchar` (unreserved, sub-delims, ':', '@').
-/
abbrev EncodedSegment := EncodedString isPChar
namespace EncodedSegment
/--
Encodes a raw string into an encoded path segment.
-/
def encode (s : String) : EncodedSegment :=
EncodedString.encode (r := isPChar) s
/--
Attempts to create an encoded path segment from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedSegment :=
EncodedString.ofByteArray? (r := isPChar) ba
/--
Creates an encoded path segment from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedSegment :=
EncodedString.ofByteArray! (r := isPChar) ba
/--
Decodes an encoded path segment back to a UTF-8 string.
-/
def decode (segment : EncodedSegment) : Option String :=
EncodedString.decode segment
end EncodedSegment
/--
A percent-encoded URI fragment component. Valid characters are `pchar / "/" / "?"`.
-/
abbrev EncodedFragment := EncodedString isFragmentChar
namespace EncodedFragment
/--
Encodes a raw string into an encoded fragment component.
-/
def encode (s : String) : EncodedFragment :=
EncodedString.encode (r := isFragmentChar) s
/--
Attempts to create an encoded fragment component from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedFragment :=
EncodedString.ofByteArray? (r := isFragmentChar) ba
/--
Creates an encoded fragment component from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedFragment :=
EncodedString.ofByteArray! (r := isFragmentChar) ba
/--
Decodes an encoded fragment component back to a UTF-8 string.
-/
def decode (fragment : EncodedFragment) : Option String :=
EncodedString.decode fragment
end EncodedFragment
/--
A percent-encoded URI userinfo component. Valid characters are `unreserved / sub-delims / ":"`.
-/
abbrev EncodedUserInfo := EncodedString isUserInfoChar
namespace EncodedUserInfo
/--
Encodes a raw string into an encoded userinfo component.
-/
def encode (s : String) : EncodedUserInfo :=
EncodedString.encode (r := isUserInfoChar) s
/--
Attempts to create an encoded userinfo component from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedUserInfo :=
EncodedString.ofByteArray? (r := isUserInfoChar) ba
/--
Creates an encoded userinfo component from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedUserInfo :=
EncodedString.ofByteArray! (r := isUserInfoChar) ba
/--
Decodes an encoded userinfo component back to a UTF-8 string.
-/
def decode (userInfo : EncodedUserInfo) : Option String :=
EncodedString.decode userInfo
end EncodedUserInfo
/--
A percent-encoded URI query parameter. Valid characters are `pchar / "/" / "?"` with '+' for spaces.
-/
abbrev EncodedQueryParam := EncodedQueryString isQueryChar
namespace EncodedQueryParam
/--
Encodes a raw string into an encoded query parameter.
-/
def encode (s : String) : EncodedQueryParam :=
EncodedQueryString.encode (r := isQueryChar) s
/--
Attempts to create an encoded query parameter from raw bytes.
-/
def ofByteArray? (ba : ByteArray) : Option EncodedQueryParam :=
EncodedQueryString.ofByteArray? (r := isQueryChar) ba
/--
Creates an encoded query parameter from raw bytes, panicking on invalid encoding.
-/
def ofByteArray! (ba : ByteArray) : EncodedQueryParam :=
EncodedQueryString.ofByteArray! (r := isQueryChar) ba
/--
Attempts to create an encoded query parameter from an encoded string.
-/
def fromString? (s : String) : Option EncodedQueryParam :=
EncodedQueryString.ofString? (r := isQueryChar) s
/--
Decodes an encoded query parameter back to a UTF-8 string.
-/
def decode (param : EncodedQueryParam) : Option String :=
EncodedQueryString.decode param
end EncodedQueryParam
end Std.Http.URI

View File

@@ -0,0 +1,400 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.While
public import Init.Data.String
public import Std.Internal.Parsec
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Data.URI.Basic
public section
/-!
# URI Parser
This module provides parsers for URIs and request targets according to RFC 3986.
It handles parsing of schemes, authorities, paths, queries, and fragments.
-/
namespace Std.Http.URI.Parser
set_option linter.all true
open Std Internal Parsec ByteArray
@[inline]
private def isDigit (c : UInt8) : Bool :=
c >= '0'.toUInt8 c <= '9'.toUInt8
@[inline]
private def isHexDigit (c : UInt8) : Bool :=
isDigit c (c >= 'A'.toUInt8 c <= 'F'.toUInt8) (c >= 'a'.toUInt8 c <= 'f'.toUInt8)
@[inline]
private def isAlpha (c : UInt8) : Bool :=
(c >= 'A'.toUInt8 c <= 'Z'.toUInt8) (c >= 'a'.toUInt8 c <= 'z'.toUInt8)
@[inline]
private def isAlphaNum (c : UInt8) : Bool :=
isAlpha c isDigit c
@[inline]
private def isUnreserved (c : UInt8) : Bool :=
isAlphaNum c c = '-'.toUInt8 c = '.'.toUInt8 c = '_'.toUInt8 c = '~'.toUInt8
@[inline]
private def isSubDelims (c : UInt8) : Bool :=
c = '!'.toUInt8 c = '$'.toUInt8 c = '&'.toUInt8 c = '\''.toUInt8
c = '('.toUInt8 c = ')'.toUInt8 c = '*'.toUInt8 c = '+'.toUInt8
c = ','.toUInt8 c = ';'.toUInt8 c = '='.toUInt8
@[inline]
private def isGenDelims (c : UInt8) : Bool :=
c = ':'.toUInt8 c = '/'.toUInt8 c = '?'.toUInt8 c = '#'.toUInt8
c = '['.toUInt8 c = ']'.toUInt8 c = '@'.toUInt8
@[inline]
private def isReserved (c : UInt8) : Bool :=
isGenDelims c isSubDelims c
@[inline]
private def isPChar (c : UInt8) : Bool :=
isUnreserved c isSubDelims c c = ':'.toUInt8 c = '@'.toUInt8 c = '%'.toUInt8
@[inline]
private def isRegNameChar (c : UInt8) : Bool :=
isUnreserved c isSubDelims c c = '%'.toUInt8
@[inline]
private def isSchemeChar (c : UInt8) : Bool :=
isAlphaNum c c = '+'.toUInt8 c = '-'.toUInt8 c = '.'.toUInt8
@[inline]
private def isQueryChar (c : UInt8) : Bool :=
isPChar c c = '/'.toUInt8 c = '?'.toUInt8
@[inline]
private def isFragmentChar (c : UInt8) : Bool :=
isPChar c c = '/'.toUInt8 c = '?'.toUInt8
@[inline]
private def isUserInfoChar (c : UInt8) : Bool :=
isUnreserved c isSubDelims c c = '%'.toUInt8 c = ':'.toUInt8
@[inline]
private def tryOpt (p : Parser α) : Parser (Option α) :=
optional (attempt p)
@[inline]
private def ofExcept (p : Except String α) : Parser α :=
match p with
| .ok res => pure res
| .error err => fail err
@[inline]
private def peekIs (p : UInt8 Bool) : Parser Bool := do
return ( peekWhen? p).isSome
private def hexToByte (digit : UInt8) : UInt8 :=
if digit <= '9'.toUInt8 then digit - '0'.toUInt8
else if digit <= 'F'.toUInt8 then digit - 'A'.toUInt8 + 10
else digit - 'a'.toUInt8 + 10
private def parsePctEncoded : Parser UInt8 := do
skipByte '%'.toUInt8
let hi hexToByte <$> satisfy isHexDigit
let lo hexToByte <$> satisfy isHexDigit
return (hi <<< 4) ||| lo
-- scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
private def parseScheme : Parser URI.Scheme := do
let schemeBytes takeWhileUpTo1 isSchemeChar 63
return String.fromUTF8! schemeBytes.toByteArray |>.toLower, .isLowerCase_toLower
-- port = *DIGIT
private def parsePortNumber : Parser UInt16 := do
let portBytes takeWhileUpTo isDigit 5
if portBytes.size = 0 then fail "empty port number"
let portStr := String.fromUTF8! portBytes.toByteArray
let some portNum := String.toNat? portStr
| fail s!"invalid port number:{portStr}"
if portNum > 65535 then
fail s!"port number too large: {portNum}"
return portNum.toUInt16
-- userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
private def parseUserInfo : Parser URI.UserInfo := do
let userBytesName takeWhileUpTo (fun x => x ':'.toUInt8 isUserInfoChar x) 1024
let some userName := URI.EncodedUserInfo.ofByteArray? userBytesName.toByteArray
| fail "invalid percent encoding in user info"
let userPass if peekIs (· == ':'.toUInt8) then
skip
let userBytesPass takeWhileUpTo isUserInfoChar 1024
let some userStrPass := URI.EncodedUserInfo.ofByteArray? userBytesPass.toByteArray >>= URI.EncodedUserInfo.decode
| fail "invalid percent encoding in user info"
pure <| some userStrPass
else
pure none
let some userName := userName.decode
| fail "invalid username"
return userName, userPass
-- IP-literal = "[" ( IPv6address / IPvFuture ) "]"
private def parseIPv6 : Parser Net.IPv6Addr := do
skipByte '['.toUInt8
let result takeWhileUpTo1 (fun x => x = ':'.toUInt8 isHexDigit x) 256
skipByte ']'.toUInt8
let ipv6Str := String.fromUTF8! result.toByteArray
let some ipv6Addr := Std.Net.IPv6Addr.ofString ipv6Str
| fail s!"invalid IPv6 address: {ipv6Str}"
return ipv6Addr
-- IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
private def parseIPv4 : Parser Net.IPv4Addr := do
let result takeWhileUpTo1 (fun x => x = '.'.toUInt8 isDigit x) 256
let ipv4Str := String.fromUTF8! result.toByteArray
let some ipv4Str := Std.Net.IPv4Addr.ofString ipv4Str
| fail s!"invalid IPv4 address: {ipv4Str}"
return ipv4Str
-- host = IP-literal / IPv4address / reg-name
-- Note: RFC 1123 allows domain labels to start with digits, so we must try IPv4
-- first and fall back to reg-name parsing if it fails.
private def parseHost : Parser URI.Host := do
if ( peekWhen? (· == '['.toUInt8)).isSome then
return .ipv6 ( parseIPv6)
else
if ( peekWhen? isDigit).isSome then
if let some ipv4 tryOpt parseIPv4 then
return .ipv4 ipv4
-- It needs to be a legal DNS label, so it differs from reg-name.
let isHostName x := isAlphaNum x x = '-'.toUInt8 x = '.'.toUInt8
let some str := String.fromUTF8? ( takeWhileUpTo1 isHostName 1024).toByteArray
| fail s!"invalid host"
let lower := str.toLower
if h : URI.IsValidDomainName lower then
return .name lower, .isLowerCase_toLower, h
else
fail s!"invalid domain name: {str}"
-- authority = [ userinfo "@" ] host [ ":" port ]
private def parseAuthority : Parser URI.Authority := do
let userinfo tryOpt do
let ui parseUserInfo
skipByte '@'.toUInt8
return ui
let host parseHost
let port optional do
skipByte ':'.toUInt8
parsePortNumber
return { userInfo := userinfo, host := host, port := port }
-- segment = *pchar
private def parseSegment : Parser ByteSlice := do
takeWhileUpTo isPChar 256
/-
path = path-abempty ; begins with "/" or is empty
/ path-absolute ; begins with "/" but not "//"
/ path-noscheme ; begins with a non-colon segment
/ path-rootless ; begins with a segment
/ path-empty ; zero characters
path-abempty = *( "/" segment )
path-absolute = "/" [ segment-nz *( "/" segment ) ]
path-noscheme = segment-nz-nc *( "/" segment )
path-rootless = segment-nz *( "/" segment )
path-empty = 0<pchar>
-/
/--
Parses an URI with combined parsing and validation.
-/
def parsePath (forceAbsolute : Bool) (allowEmpty : Bool) : Parser URI.Path := do
let mut isAbsolute := false
let mut segments : Array _ := #[]
let isSegmentOrSlash peekIs (fun c => isPChar c c = '/'.toUInt8)
if ¬allowEmpty (( isEof) ¬isSegmentOrSlash) then
fail "need a path"
-- Check if path is absolute
if peekIs (· == '/'.toUInt8) then
isAbsolute := true
skip
if peekIs (· == '/'.toUInt8) then
fail "it's a scheme starter"
else if forceAbsolute then
if allowEmpty (( isEof) ¬isSegmentOrSlash) then
return { segments := segments, absolute := isAbsolute }
else
fail "require '/' in path"
else
pure ()
-- Parse segments
while ( peek?).isSome do
let segmentBytes parseSegment
let some segmentStr := URI.EncodedSegment.ofByteArray? segmentBytes.toByteArray
| fail "invalid percent encoding in path segment"
segments := segments.push segmentStr
if ( peek?).any (· == '/'.toUInt8) then
skip
-- If path ends with '/', add empty segment
if ( peek?).isNone then
segments := segments.push (URI.EncodedString.empty)
else
break
return { segments := segments, absolute := isAbsolute }
-- query = *( pchar / "/" / "?" )
private def parseQuery : Parser URI.Query := do
let queryBytes takeWhileUpTo isQueryChar 1024
let some queryStr := String.fromUTF8? queryBytes.toByteArray
| fail "invalid query string"
let pairs : Option URI.Query := queryStr.splitOn "&" |>.foldlM (init := URI.Query.empty) fun acc pair => do
match pair.splitOn "=" with
| [key] =>
let key URI.EncodedQueryParam.fromString? key
pure (acc.insertEncoded key none)
| key :: value =>
let key URI.EncodedQueryParam.fromString? key
let value URI.EncodedQueryParam.fromString? (String.intercalate "=" value)
pure (acc.insertEncoded key (some value))
| [] => pure acc
if let some pairs := pairs then
return pairs
else
fail "invalid query string"
-- fragment = *( pchar / "/" / "?" )
private def parseFragment : Parser URI.EncodedFragment := do
let fragmentBytes takeWhileUpTo isFragmentChar 1024
let some fragmentStr := URI.EncodedFragment.ofByteArray? fragmentBytes.toByteArray
| fail "invalid percent encoding in fragment"
return fragmentStr
private def parseHierPart : Parser (Option URI.Authority × URI.Path) := do
-- Check for "//" authority path-abempty
if ( tryOpt (skipString "//")).isSome then
let authority parseAuthority
let path parsePath true true -- path-abempty (must start with "/" or be empty)
return (some authority, path)
else
-- path-absolute / path-rootless / path-empty
let path parsePath false true
return (none, path)
/--
Parses a URI (Uniform Resource Identifier).
URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
hier-part = "//" authority path-abempty / path-absolute / path-rootless / path-empty
-/
public def parseURI : Parser URI := do
let scheme parseScheme
skipByte ':'.toUInt8
let (authority, path) parseHierPart
let query optional (skipByteChar '?' *> parseQuery)
let query := query.getD .empty
let fragment optional do
let some result := ( (skipByteChar '#' *> parseFragment)) |>.decode
| fail "invalid fragment parse encoding"
return result
return { scheme, authority, path, query, fragment }
/--
Parses a request target with combined parsing and validation.
-/
public def parseRequestTarget : Parser RequestTarget :=
asterisk <|> origin <|> authority <|> absolute
where
-- The asterisk form
asterisk : Parser RequestTarget := do
skipByte '*'.toUInt8
return .asteriskForm
-- origin-form = absolute-path [ "?" query ]
-- absolute-path = 1*( "/" segment )
origin : Parser RequestTarget := attempt do
if peekIs (· == '/'.toUInt8) then
let path parsePath true true
let query optional (skipByte '?'.toUInt8 *> parseQuery)
return .originForm path query
else
fail "not origin"
-- absolute-URI = scheme ":" hier-part [ "?" query ]
absolute : Parser RequestTarget := attempt do
let scheme parseScheme
skipByte ':'.toUInt8
let (authority, path) parseHierPart
let query optional (skipByteChar '?' *> parseQuery)
let query := query.getD URI.Query.empty
return .absoluteForm { path, scheme, authority, query, fragment := none } (by simp)
-- authority-form = host ":" port
authority : Parser RequestTarget := attempt do
let host parseHost
skipByteChar ':'
let port parsePortNumber
return .authorityForm { host, port := some port }
/--
Parses an HTTP `Host` header value.
-/
public def parseHostHeader : Parser (URI.Host × Option UInt16) := do
let host parseHost
let port optional do
skipByte ':'.toUInt8
parsePortNumber
if ¬( isEof) then
fail "invalid host header"
return (host, port)
end Std.Http.URI.Parser

View File

@@ -0,0 +1,92 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.ToString
public import Init.Data.String
public section
/-!
# Version
The `Version` structure represents an HTTP version with a major and minor number. It includes several
standard versions of the HTTP protocol, such as HTTP/1.1, HTTP/2.0, and HTTP/3.0.
* Reference: https://httpwg.org/specs/rfc9110.html#protocol.version
-/
namespace Std.Http
set_option linter.all true
/--
The `Version` structure represents an HTTP version with a major and minor number. It includes several
standard versions of the HTTP protocol, such as HTTP/1.1, HTTP/2.0, and HTTP/3.0.
* Reference: https://httpwg.org/specs/rfc9110.html#protocol.version
-/
inductive Version
/--
`HTTP/1.1`
-/
| v11
/--
`HTTP/2.0`
-/
| v20
/--
`HTTP/3.0`
-/
| v30
deriving Repr, Inhabited, BEq, DecidableEq
namespace Version
/--
Converts a pair of `Nat` to the corresponding `Version`.
-/
def ofNumber? : Nat Nat Option Version
| 1, 1 => some .v11
| 2, 0 => some .v20
| 3, 0 => some .v30
| _, _ => none
/--
Converts `String` to the corresponding `Version`.
-/
def ofString? : String Option Version
| "HTTP/1.1" => some .v11
| "HTTP/2.0" => some .v20
| "HTTP/3.0" => some .v30
| _ => none
/--
Converts `String` to the corresponding `Version`, panics if invalid.
-/
def ofString! (s : String) : Version :=
match ofString? s with
| some v => v
| none => panic! s!"invalid HTTP version: {s.quote}"
/--
Converts a `Version` to its corresponding major and minor numbers as a pair.
-/
def toNumber : Version (Nat × Nat)
| .v11 => (1, 1)
| .v20 => (2, 0)
| .v30 => (3, 0)
instance : ToString Version where
toString
| .v11 => "HTTP/1.1"
| .v20 => "HTTP/2.0"
| .v30 => "HTTP/3.0"
end Std.Http.Version

View File

@@ -0,0 +1,21 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Internal.ChunkedBuffer
public import Std.Internal.Http.Internal.LowerCase
public import Std.Internal.Http.Internal.MultiMap
public import Std.Internal.Http.Internal.Encode
public section
/-!
# HTTP Internal Utilities
This module re-exports internal utilities used by the HTTP library including
data structures, encoding functions, and buffer management.
-/

View File

@@ -0,0 +1,143 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.ToString
import Init.Data.Array.Lemmas
public import Init.Data.String
public import Init.Data.ByteArray
public import Init.Data.Queue
public section
/-!
# ChunkedBuffer
This module provides an efficient way to concatenate multiple `ByteArray`s by deferring the actual
concatenation until necessary. This is particularly useful in HTTP response building and streaming
scenarios where data is accumulated incrementally.
-/
namespace Std.Http.Internal
set_option linter.all true
/--
A structure that accumulates multiple `ByteArray`s efficiently by tracking them in an array and
maintaining the total size. This allows building large buffers without repeated allocations and copies.
-/
structure ChunkedBuffer where
/--
The accumulated byte arrays
-/
data : Queue ByteArray
/--
The total size in bytes of all accumulated arrays
-/
size : Nat
namespace ChunkedBuffer
/--
An empty `ChunkedBuffer`.
-/
@[inline]
def empty : ChunkedBuffer :=
{ data := .empty, size := 0 }
/--
Append a single `ByteArray` to the `ChunkedBuffer`.
-/
@[inline]
def push (c : ChunkedBuffer) (b : ByteArray) : ChunkedBuffer :=
{ data := c.data.enqueue b, size := c.size + b.size }
/--
Writes a `ByteArray` to the `ChunkedBuffer`.
-/
@[inline]
def write (buffer : ChunkedBuffer) (data : ByteArray) : ChunkedBuffer :=
buffer.push data
/--
Writes a `ChunkedBuffer` to the `ChunkedBuffer`.
-/
@[inline]
def append (buffer : ChunkedBuffer) (data : ChunkedBuffer) : ChunkedBuffer :=
-- Queue.enqueueAll prepends to eList, so reverse to maintain FIFO order
{ data := buffer.data.enqueueAll data.data.toArray.toList.reverse, size := buffer.size + data.size }
/--
Writes a `Char` to the `ChunkedBuffer`. Only the low byte is written (`Char.toUInt8`),
so this is only correct for ASCII characters.
-/
@[inline]
def writeChar (buffer : ChunkedBuffer) (data : Char) : ChunkedBuffer :=
buffer.push (ByteArray.mk #[data.toUInt8])
/--
Writes a `String` to the `ChunkedBuffer`.
-/
@[inline]
def writeString (buffer : ChunkedBuffer) (data : String) : ChunkedBuffer :=
buffer.push data.toUTF8
/--
Turn the combined structure into a single contiguous ByteArray.
-/
@[inline]
def toByteArray (cb : ChunkedBuffer) : ByteArray :=
let arr := cb.data.toArray
if h : 1 = arr.size then
arr[0]'(Nat.le_of_eq h)
else
arr.foldl (· ++ ·) (.emptyWithCapacity cb.size)
/--
Build from a ByteArray directly.
-/
@[inline]
def ofByteArray (bs : ByteArray) : ChunkedBuffer :=
{ data := .empty |>.enqueue bs, size := bs.size }
/--
Build from an array of ByteArrays directly.
-/
@[inline]
def ofArray (bs : Array ByteArray) : ChunkedBuffer :=
{ data := .empty |>.enqueueAll bs.reverse.toList , size := bs.foldl (· + ·.size) 0 }
/--
Dequeue the first `ByteArray` from the `ChunkedBuffer`, returning it along with the remaining buffer.
Returns `none` if the buffer is empty.
-/
@[inline]
def dequeue? (c : ChunkedBuffer) : Option (ByteArray × ChunkedBuffer) :=
match c.data.dequeue? with
| some (b, rest) => some (b, { data := rest, size := c.size - b.size })
| none => none
/--
Check if it's an empty array.
-/
@[inline]
def isEmpty (bb : ChunkedBuffer) : Bool :=
bb.size = 0
instance : Inhabited ChunkedBuffer := empty
instance : EmptyCollection ChunkedBuffer where
emptyCollection := empty
instance : Coe ByteArray ChunkedBuffer where
coe := ofByteArray
instance : Coe (Array ByteArray) ChunkedBuffer where
coe := ofArray
end Std.Http.Internal.ChunkedBuffer

View File

@@ -0,0 +1,38 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Internal.ChunkedBuffer
public import Std.Internal.Http.Data.Version
public section
/-!
# Encode
Serializes types to a `ChunkedBuffer` containing their canonical HTTP representation for a specific
protocol version.
-/
namespace Std.Http.Internal
set_option linter.all true
/--
Serializes a type `t` to a `ChunkedBuffer` containing its canonical HTTP representation for protocol
version `v`.
-/
class Encode (v : Version) (t : Type) where
/--
Encodes a type `t` to a `ChunkedBuffer`.
-/
encode : ChunkedBuffer t ChunkedBuffer
instance : Encode .v11 Version where
encode buffer := buffer.writeString toString
end Std.Http.Internal

View File

@@ -0,0 +1,66 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Grind
import Init.Data.Int.OfNat
import Init.Data.UInt.Lemmas
public import Init.Data.String
@[expose]
public section
/-!
# LowerCase
This module provides predicates and normalization functions to handle ASCII case-insensitivity. It
includes proofs of idempotency for lowercase transformations and utilities for validating lowercase
state `String`.
-/
namespace Std.Http.Internal
set_option linter.all true
/--
Predicate asserting that a string is already in lowercase normal form.
-/
@[expose] def IsLowerCase (s : String) : Prop :=
s.toLower = s
private theorem Char.toLower_eq_self_iff {c : Char} : c.toLower = c c.isUpper = false := by
simp only [Char.toLower, Char.isUpper]
split <;> rename_i h <;> simpa [UInt32.le_iff_toNat_le, Char.ext_iff] using h
private theorem String.toLower_eq_self_iff {s : String} : s.toLower = s s.toList.any Char.isUpper = false := by
simp only [String.toLower, String.toList_inj, String.toList_map]
rw (occs := [2]) [ List.map_id s.toList]
rw [List.map_eq_map_iff]
simp [Char.toLower_eq_self_iff]
instance : Decidable (IsLowerCase s) :=
decidable_of_decidable_of_iff (p := s.toList.any Char.isUpper = false)
(by exact String.toLower_eq_self_iff.symm)
namespace IsLowerCase
private theorem Char.toLower_idempotent (c : Char) : c.toLower.toLower = c.toLower := by
grind [Char.toLower]
/--
Proof that applying `toLower` to any string results in a string that satisfies the `IsLowerCase`
predicate.
-/
theorem isLowerCase_toLower {s : String} : IsLowerCase s.toLower := by
unfold IsLowerCase String.toLower
rw [String.map_map, Function.comp_def]
simp [Char.toLower_idempotent]
theorem isLowerCase_empty : IsLowerCase "" := by
simp [IsLowerCase, String.toLower]
end Std.Http.Internal.IsLowerCase

View File

@@ -0,0 +1,213 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Grind
import Init.Data.Int.OfNat
public import Std.Data.HashMap
public section
/-!
# MultiMap
This module defines a generic `MultiMap` type that maps keys to multiple values.
The implementation is optimized for fast lookups and insertions while ensuring
that each key always has at least one associated value.
-/
namespace Std
open Std Internal
set_option linter.all true
/--
A structure for managing key-value pairs where each key can have multiple values.
Invariant: each key must have at least one value.
-/
structure MultiMap (α : Type u) (β : Type v) [BEq α] [Hashable α] where
/--
The internal hashmap that stores all the data.
Each key maps to a non-empty array of values.
-/
data : HashMap α { arr : Array β // arr.size > 0 }
deriving Inhabited, Repr
namespace MultiMap
variable {α : Type u} {β : Type v} [BEq α] [Hashable α]
instance : Membership α (MultiMap α β) where
mem map key := key map.data
instance (key : α) (map : MultiMap α β) : Decidable (key map) :=
inferInstanceAs (Decidable (key map.data))
/--
Retrieves the first value for the given key.
-/
@[inline]
def get (map : MultiMap α β) (key : α) (h : key map) : β :=
let arr := map.data.get key h
arr.val[0]'(arr.property)
/--
Retrieves all values for the given key.
-/
@[inline]
def getAll (map : MultiMap α β) (key : α) (h : key map) : Array β :=
map.data.get key h
/--
Retrieves all values for the given key.
Returns `none` if the key is absent.
-/
@[inline]
def getAll? (map : MultiMap α β) (key : α) : Option (Array β) :=
if h : key map then
some (map.getAll key h)
else
none
/--
Retrieves the first value for the given key.
Returns `none` if the key is absent.
-/
@[inline]
def get? (map : MultiMap α β) (key : α) : Option β :=
if h : key map then
some (map.get key h)
else
none
/--
Checks if the key-value pair is present in the map.
-/
@[inline]
def hasEntry (map : MultiMap α β) [BEq β] (key : α) (value : β) : Bool :=
map.data.get? key
|>.bind (fun x => x.val.find? (· == value))
|>.isSome
/--
Retrieves the last value for the given key.
Returns `none` if the key is absent.
-/
@[inline]
def getLast? (map : MultiMap α β) (key : α) : Option β :=
map.data.get? key
|>.bind (fun x => x.val[x.val.size - 1]?)
/--
Like `get?`, but returns a default value if absent.
-/
@[inline]
def getD (map : MultiMap α β) (key : α) (d : β) : β :=
map.get? key |>.getD d
/--
Like `get?`, but panics if absent.
-/
@[inline]
def get! [Inhabited β] (map : MultiMap α β) (key : α) : β :=
map.get? key |>.get!
/--
Inserts a new key-value pair into the map.
If the key already exists, appends the value to existing values.
-/
@[inline]
def insert (map : MultiMap α β) (key : α) (value : β) : MultiMap α β :=
let data := map.data.alter key fun
| some existingValues => some existingValues.val.push value, by simp
| none => some #[value], by simp
{ data }
/--
Inserts a key with an array of values.
-/
@[inline]
def insertMany (map : MultiMap α β) (key : α) (values : Array β) (h : values.size > 0) : MultiMap α β :=
let data := map.data.alter key fun
| some existingValues => some existingValues.val ++ values, by simp; grind
| none => some values, h
{ data }
/--
Creates an empty multimap.
-/
def empty : MultiMap α β :=
{ data := }
/--
Creates a multimap from a list of key-value pairs.
-/
def ofList (pairs : List (α × β)) : MultiMap α β :=
pairs.foldl (fun acc (k, v) => acc.insert k v) empty
/--
Checks if a key exists in the map.
-/
@[inline]
def contains (map : MultiMap α β) (key : α) : Bool :=
map.data.contains key
/--
Removes a key and all its values from the map.
-/
@[inline]
def erase (map : MultiMap α β) (key : α) : MultiMap α β :=
{ data := map.data.erase key }
/--
Gets the number of keys in the map.
-/
@[inline]
def size (map : MultiMap α β) : Nat :=
map.data.size
/--
Checks if the map is empty.
-/
@[inline]
def isEmpty (map : MultiMap α β) : Bool :=
map.data.isEmpty
/--
Merges two multimaps, with the values of the second appearing after the values of the first for duplicate keys.
-/
def merge (map1 map2 : MultiMap α β) : MultiMap α β :=
map2.data.fold (fun acc k v => acc.insertMany k v.val v.property) map1
/--
Converts the multimap to an array of key-value pairs (flattened).
-/
def toArray (map : MultiMap α β) : Array (α × β) :=
map.data.toArray.flatMap (fun (k, vs) => vs.val.map (k, ·))
/--
Converts the multimap to a list of key-value pairs (flattened).
-/
def toList (map : MultiMap α β) : List (α × β) :=
map.toArray.toList
instance : EmptyCollection (MultiMap α β) :=
MultiMap.empty
instance : Singleton (α × β) (MultiMap α β) :=
fun a, b => ( : MultiMap α β).insert a b
instance : Insert (α × β) (MultiMap α β) :=
fun a, b m => m.insert a b
instance : Union (MultiMap α β) :=
merge
end MultiMap
end Std

View File

@@ -0,0 +1,683 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Reader
public import Std.Internal.Http.Protocol.H1.Writer
public import Std.Internal.Http.Protocol.H1.Event
public section
/-!
# HTTP/1.1 Protocol State Machine
This module implements the core HTTP/1.1 protocol state machine that handles
parsing requests/responses and generating output. The machine is direction-aware,
supporting both server mode (receiving requests) and client mode (receiving responses).
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Std Internal Parsec ByteArray
open Internal
/--
Results from a single step of the state machine.
-/
structure StepResult (dir : Direction) where
/--
Events that occurred during this step (e.g., headers received, data available, errors).
-/
events : Array (Event dir) := #[]
/--
Output data ready to be sent to the socket.
-/
output : ChunkedBuffer := .empty
/--
The HTTP 1.1 protocol state machine.
-/
structure Machine (dir : Direction) where
/--
The state of the reader.
-/
reader : Reader dir := {}
/--
The state of the writer.
-/
writer : Writer dir := {}
/--
The configuration.
-/
config : Config
/--
Events that happened during reading and writing.
-/
events : Array (Event dir) := #[]
/--
Error thrown by the machine.
-/
error : Option Error := none
/--
The timestamp for the `Date` header.
-/
instant : Option (Std.Time.DateTime .UTC) := none
/--
If the connection will be kept alive after the message.
-/
keepAlive : Bool := config.enableKeepAlive
/--
Whether a forced flush has been requested by the user.
-/
forcedFlush : Bool := false
/--
Host header.
-/
host : Option Header.Value := none
namespace Machine
@[inline]
private def modifyWriter (machine : Machine dir) (fn : Writer dir Writer dir) : Machine dir :=
{ machine with writer := fn machine.writer }
@[inline]
private def modifyReader (machine : Machine dir) (fn : Reader dir Reader dir) : Machine dir :=
{ machine with reader := fn machine.reader }
@[inline]
private def setReaderState (machine : Machine dir) (state : Reader.State dir) : Machine dir :=
machine.modifyReader ({ · with state })
@[inline]
private def setWriterState (machine : Machine dir) (state : Writer.State) : Machine dir :=
machine.modifyWriter ({ · with state })
@[inline]
private def addEvent (machine : Machine dir) (event : Event dir) : Machine dir :=
{ machine with events := machine.events.push event }
@[inline]
private def setEvent (machine : Machine dir) (event : Option (Event dir)) : Machine dir :=
match event with
| some event => machine.addEvent event
| none => machine
@[inline]
private def setError (machine : Machine dir) (error : Error) : Machine dir :=
{ machine with error := some error }
@[inline]
private def disableKeepAlive (machine : Machine dir) : Machine dir :=
{ machine with keepAlive := false }
@[inline]
private def setFailure (machine : Machine dir) (error : H1.Error) : Machine dir :=
machine
|>.addEvent (.failed error)
|>.setReaderState (.failed error)
|>.setError error
@[inline]
private def updateKeepAlive (machine : Machine dir) (should : Bool) : Machine dir :=
{ machine with keepAlive := machine.keepAlive should }
private def checkMessageHead (message : Message.Head dir) : Option Body.Length := do
match dir with
| .receiving => do
let headers message.headers.getAll? Header.Name.host
guard (headers.size = 1)
| .sending => pure ()
if let .receiving := dir then
if message.method == .head message.method == .connect then
return .fixed 0
message.getSize true
@[inline]
private def hasExpectContinue (message : Message.Head dir) : Bool :=
message.headers.hasEntry (.mk "expect") (Header.Value.ofString! "100-continue")
-- State Checks
/--
Returns `true` if the reader is in a failed state.
-/
@[inline]
def failed (machine : Machine dir) : Bool :=
match machine.reader.state with
| .failed _ => true
| _ => false
/--
Returns `true` if the reader has completed successfully.
-/
@[inline]
def isReaderComplete (machine : Machine dir) : Bool :=
match machine.reader.state with
| .complete => true
| _ => false
/--
Returns `true` if the reader is closed.
-/
@[inline]
def isReaderClosed (machine : Machine dir) : Bool :=
match machine.reader.state with
| .closed => true
| _ => false
/--
Returns `true` if the machine should flush buffered output.
-/
@[inline]
def shouldFlush (machine : Machine dir) : Bool :=
machine.failed
machine.reader.state == .closed
machine.writer.isReadyToSend
machine.writer.knownSize.isSome
/--
Returns `true` if the writer is waiting for headers of a new message.
-/
@[inline]
def isWaitingMessage (machine : Machine dir) : Bool :=
machine.writer.state == .waitingHeaders
¬machine.writer.sentMessage
/--
Returns `true` if both reader and writer are closed and no output remains.
-/
@[inline]
def halted (machine : Machine dir) : Bool :=
match machine.reader.state, machine.writer.state with
| .closed, .closed => machine.writer.outputData.isEmpty
| _, _ => false
private def parseWith (machine : Machine dir) (parser : Parser α) (limit : Option Nat)
(expect : Option Nat := none) : Machine dir × Option α :=
let remaining := machine.reader.input.remainingBytes
match parser machine.reader.input with
| .success buffer result =>
({ machine with reader := machine.reader.setInput buffer }, some result)
| .error it .eof =>
let usedBytesUntilFailure := remaining - it.remainingBytes
if machine.reader.noMoreInput then
(machine.setFailure .connectionClosed, none)
else if let some limit := limit then
if usedBytesUntilFailure limit
then (machine.setFailure .badMessage, none)
else (machine.addEvent (.needMoreData expect), none)
else
(machine.addEvent (.needMoreData expect), none)
| .error _ _ =>
(machine.setFailure .badMessage, none)
-- Message Processing
private def resetForNextMessage (machine : Machine dir) : Machine dir :=
if machine.keepAlive then
{ machine with
reader := {
state := match dir with | .receiving => .needStartLine | .sending => .pending,
input := machine.reader.input,
messageHead := {},
messageCount := machine.reader.messageCount + 1
},
writer := {
userData := .empty,
outputData := machine.writer.outputData,
state := match dir with | .receiving => .pending | .sending => .waitingHeaders,
knownSize := none,
messageHead := {},
userClosedBody := false,
sentMessage := false
},
events := machine.events.push .next,
error := none
}
else
machine.addEvent .close
|>.setWriterState .closed
|>.setReaderState .closed
/-
This function processes the message we are receiving
-/
private def processHeaders (machine : Machine dir) : Machine dir :=
let machine := machine.updateKeepAlive (machine.reader.messageCount + 1 < machine.config.maxMessages)
let shouldKeepAlive : Bool := machine.reader.messageHead.shouldKeepAlive
let machine := updateKeepAlive machine shouldKeepAlive
match checkMessageHead machine.reader.messageHead with
| none => machine.setFailure .badMessage
| some size =>
let state : Reader.State dir := match size with
| .fixed n => Reader.State.needFixedBody n
| .chunked => Reader.State.needChunkedSize
let machine := machine.addEvent (.endHeaders machine.reader.messageHead)
let waitingContinue : Bool :=
match dir with
| .receiving => hasExpectContinue machine.reader.messageHead
| .sending => false
let nextState : Reader.State dir := if waitingContinue then Reader.State.«continue» state else state
let machine := if waitingContinue then machine.addEvent .continue else machine
match dir, nextState, machine with
| .receiving,nextState, machine => machine.setReaderState nextState |>.setWriterState .waitingHeaders |>.addEvent .needAnswer
| .sending, nextState, machine => machine.setReaderState nextState
/--
This processes the message we are sending.
-/
def setHeaders (messageHead : Message.Head dir.swap) (machine : Machine dir) : Machine dir :=
let machine := machine.updateKeepAlive (machine.reader.messageCount + 1 < machine.config.maxMessages)
let shouldKeepAlive := messageHead.shouldKeepAlive
let machine := machine.updateKeepAlive shouldKeepAlive
let size := Writer.determineTransferMode machine.writer
let headers :=
if messageHead.headers.contains Header.Name.host then
messageHead.headers
else if let some host := machine.host then
messageHead.headers.insert Header.Name.host host
else
messageHead.headers
-- Add identity header based on direction
let headers :=
let identityOpt := machine.config.identityHeader
match dir, identityOpt with
| .receiving, some server => headers.insert Header.Name.server server
| .sending, some userAgent => headers.insert Header.Name.userAgent userAgent
| _, none => headers
-- Add Connection: close if needed
let headers :=
if !machine.keepAlive !headers.hasEntry Header.Name.connection Header.Value.close then
headers.insert Header.Name.connection Header.Value.close
else
headers
-- Add Content-Length or Transfer-Encoding if needed
let headers :=
if !(headers.contains Header.Name.contentLength headers.contains Header.Name.transferEncoding) then
match size with
| .fixed n => headers.insert Header.Name.contentLength (.ofString! <| toString n)
| .chunked => headers.insert Header.Name.transferEncoding Header.Value.chunked
else
headers
let state := Writer.State.writingBody size
machine.modifyWriter (fun writer => {
writer with
outputData :=
match dir, messageHead with
| .receiving, messageHead => Encode.encode (v := .v11) writer.outputData { messageHead with headers }
| .sending, messageHead => Encode.encode (v := .v11) writer.outputData { messageHead with headers },
state
})
/--Put some data inside the input of the machine. -/
@[inline]
def feed (machine : Machine dir) (data : ByteArray) : Machine dir :=
if machine.isReaderClosed then
machine
else
{ machine with reader := machine.reader.feed data }
/--Signal that reader is not going to receive any more messages. -/
@[inline]
def closeReader (machine : Machine dir) : Machine dir :=
machine.modifyReader ({ · with noMoreInput := true })
/--Signal that the writer cannot send more messages because the socket closed. -/
@[inline]
def closeWriter (machine : Machine dir) : Machine dir :=
machine.modifyWriter ({ · with state := .closed, userClosedBody := true })
/--Signal that the user is not sending data anymore. -/
@[inline]
def userClosedBody (machine : Machine dir) : Machine dir :=
machine.modifyWriter ({ · with userClosedBody := true })
/--Signal that the socket is not sending data anymore. -/
@[inline]
def noMoreInput (machine : Machine dir) : Machine dir :=
machine.modifyReader ({ · with noMoreInput := true })
/--Set a known size for the message body. -/
@[inline]
def setKnownSize (machine : Machine dir) (size : Body.Length) : Machine dir :=
machine.modifyWriter (fun w => { w with knownSize := w.knownSize.or (some size) })
@[inline]
private def isWriterClosed (machine : Machine dir) : Bool :=
match machine.writer.state with
| .closed => true
| _ => false
/--Send the head of a message to the machine. -/
@[inline]
def send (machine : Machine dir) (message : Message.Head dir.swap) : Machine dir :=
if machine.isWaitingMessage then
let machine := machine.modifyWriter ({ · with messageHead := message, sentMessage := true })
let machine :=
if machine.writer.knownSize.isNone then
match message.getSize false with
| some size => machine.setKnownSize size
| none => machine
else
machine
match dir, machine with
| .sending, machine => machine.setWriterState .waitingForFlush |>.setReaderState .needStartLine
| .receiving, machine => machine.setWriterState .waitingForFlush
else
machine
/--
Allow body processing to continue after receiving `Expect: 100-continue`.
-/
def canContinue (machine : Machine dir) (status : Status) : Machine dir :=
match dir, machine.reader.state with
| .sending, _ => machine
| .receiving, Reader.State.«continue» nextState =>
if status == .«continue» then
let machine := machine.modifyWriter (fun writer => {
writer with outputData := Encode.encode (v := .v11) writer.outputData ({ status := .«continue» } : Response.Head)
})
machine.setReaderState nextState
else
machine.send { status }
|>.userClosedBody
|>.disableKeepAlive
|>.closeReader
|>.setReaderState .closed
| .receiving, _ => machine
/--Send data to the socket. -/
@[inline]
def sendData (machine : Machine dir) (data : Array Chunk) : Machine dir :=
if data.isEmpty then
machine
else
machine.modifyWriter (fun writer => { writer with userData := writer.userData ++ data })
/--Get all the events of the machine. -/
@[inline]
def takeEvents (machine : Machine dir) : Machine dir × Array (Event dir) :=
({ machine with events := #[] }, machine.events)
/--Take all the accumulated output to send to the socket. -/
@[inline]
def takeOutput (machine : Machine dir) : Machine dir × ChunkedBuffer :=
let output := machine.writer.outputData
({ machine with writer := { machine.writer with outputData := .empty } }, output)
/--Process the writer part of the machine. -/
partial def processWrite (machine : Machine dir) : Machine dir :=
match machine.writer.state with
| .pending =>
if machine.reader.isClosed then
machine.closeWriter
else
machine
| .waitingHeaders =>
machine.addEvent .needAnswer
| .waitingForFlush =>
if machine.shouldFlush then
machine.setHeaders machine.writer.messageHead
|> processWrite
else
machine
| .writingHeaders =>
machine.setWriterState (.writingBody (Writer.determineTransferMode machine.writer))
|> processWrite
| .writingBody (.fixed n) =>
if machine.writer.userData.size > 0 machine.writer.isReadyToSend then
let (writer, remaining) := Writer.writeFixedBody machine.writer n
let machine := { machine with writer }
if remaining = 0 then
machine.setWriterState .complete |> processWrite
|>.addEvent .closeBody
else
machine.setWriterState (.writingBody (.fixed remaining))
else
machine
| .writingBody .chunked =>
if machine.writer.userClosedBody then
machine.modifyWriter Writer.writeFinalChunk
|>.setWriterState .complete
|> processWrite
else if machine.writer.userData.size > 0 machine.writer.isReadyToSend then
machine.modifyWriter Writer.writeChunkedBody
|> processWrite
else
machine
| .shuttingDown =>
if machine.writer.outputData.isEmpty then
machine.setWriterState .complete |> processWrite
else
machine
| .complete =>
if machine.isReaderComplete then
if machine.keepAlive then
resetForNextMessage machine
else
machine.setWriterState .closed
|>.addEvent .close
else if machine.isReaderClosed then
machine.setWriterState .closed
|>.addEvent .close
else
if machine.keepAlive then
machine
else
machine.setWriterState .closed
| .closed =>
machine
/--Handle the failed state for the reader. -/
private def handleReaderFailed (machine : Machine dir) (error : H1.Error) : Machine dir :=
let machine : Machine dir :=
match dir with
| .receiving =>
if ¬machine.writer.sentMessage ¬machine.writer.isClosed then
machine
|>.setWriterState .waitingHeaders
|>.disableKeepAlive
|>.send { status := .badRequest } |>.userClosedBody
else
machine
| .sending => machine
machine
|>.setReaderState .closed
|>.addEvent (.failed error)
|>.setError error
/--Process the reader part of the machine. -/
partial def processRead (machine : Machine dir) : Machine dir :=
match machine.reader.state with
| .pending =>
if machine.isWriterClosed then
machine.setReaderState .closed
else
machine
| .needStartLine =>
if machine.reader.noMoreInput machine.reader.input.atEnd then
machine.setReaderState .closed
else if machine.reader.input.atEnd then
machine.addEvent (.needMoreData none)
else
let (machine, result) : Machine dir × Option (Message.Head dir) :=
match dir with
| .receiving => parseWith machine (parseRequestLine machine.config) (limit := some 8192)
| .sending => parseWith machine (parseStatusLine machine.config) (limit := some 8192)
if let some head := result then
if head.version != .v11 then
machine.setFailure .unsupportedVersion
else
machine
|>.modifyReader (.setMessageHead head)
|>.setReaderState (.needHeader 0)
|> processRead
else
machine
| .needHeader headerCount =>
let (machine, result) := parseWith machine
(parseSingleHeader machine.config) (limit := none)
if headerCount > machine.config.maxHeaders then
machine |>.setFailure .badMessage
else
if let some result := result then
if let some (name, value) := result then
if let some (name, headerValue) := Prod.mk <$> Header.Name.ofString? name <*> Header.Value.ofString? value then
machine
|>.modifyReader (.addHeader name headerValue)
|>.setReaderState (.needHeader (headerCount + 1))
|> processRead
else
machine.setFailure .badMessage
else
processHeaders machine
|> processRead
else
machine
| .needChunkedSize =>
let (machine, result) := parseWith machine (parseChunkSize machine.config) (limit := some 128)
match result with
| some (size, ext) =>
machine
|>.setReaderState (.needChunkedBody ext size)
|> processRead
| none =>
machine
| .needChunkedBody ext 0 =>
let (machine, result) := parseWith machine (parseLastChunkBody machine.config) (limit := some 2)
match result with
| some _ =>
machine
|>.setReaderState .complete
|>.addEvent (.gotData true ext .empty)
|> processRead
| none =>
machine
| .needChunkedBody ext size =>
let (machine, result) := parseWith machine
(parseChunkedSizedData size) (limit := none) (some size)
if let some body := result then
match body with
| .complete body =>
machine
|>.setReaderState .needChunkedSize
|>.addEvent (.gotData false ext body)
|> processRead
| .incomplete body remaining =>
machine
|>.setReaderState (.needChunkedBody ext remaining)
|>.addEvent (.gotData false ext body)
else
machine
| .needFixedBody 0 =>
machine
|>.setReaderState .complete
|>.addEvent (.gotData true #[] .empty)
|> processRead
| .needFixedBody size =>
let (machine, result) := parseWith machine (parseFixedSizeData size) (limit := none) (some size)
if let some body := result then
match body with
| .complete body =>
machine
|>.setReaderState .complete
|>.addEvent (.gotData true #[] body)
|> processRead
| .incomplete body remaining =>
machine
|>.setReaderState (.needFixedBody remaining)
|>.addEvent (.gotData false #[] body)
else
machine
| Reader.State.«continue» _ =>
machine
| .complete =>
if (machine.reader.noMoreInput machine.reader.input.atEnd) ¬machine.keepAlive then
machine.setReaderState .closed
else
machine
| .closed =>
machine
| .failed error =>
handleReaderFailed machine error
/--
Execute one step of the state machine.
-/
def step (machine : Machine dir) : Machine dir × StepResult dir :=
let machine := machine.processRead.processWrite
let (machine, events) := machine.takeEvents
let (machine, output) := machine.takeOutput
(machine, { events, output })
end Std.Http.Protocol.H1.Machine

View File

@@ -0,0 +1,97 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public section
/-!
# HTTP/1.1 Configuration
This module defines the configuration options for HTTP/1.1 protocol processing,
including connection limits, header constraints, and various size limits.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Std Internal Parsec ByteArray
open Internal
/--
Connection limits configuration with validation.
-/
structure Config where
/--
Maximum number of messages per connection.
-/
maxMessages : Nat := 100
/--
Maximum number of headers allowed per message.
-/
maxHeaders : Nat := 100
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The server name (for sending responses) or user agent (for sending requests)
-/
identityHeader : Option Header.Value := some (.mk "LeanServer")
/--
Maximum length of HTTP method token (default: 16 bytes)
-/
maxMethodLength : Nat := 16
/--
Maximum length of request URI (default: 8192 bytes)
-/
maxUriLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes)
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes)
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 256)
-/
maxSpaceSequence : Nat := 256
/--
Maximum length of chunk extension name (default: 256 bytes)
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes)
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum length of reason phrase (default: 512 bytes)
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 100)
-/
maxTrailerHeaders : Nat := 100
end Std.Http.Protocol.H1

View File

@@ -0,0 +1,98 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public section
/-!
# HTTP/1.1 Errors
This module defines the error types for HTTP/1.1 protocol processing,
including parsing errors, timeout errors, and connection errors.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Specific HTTP processing errors with detailed information.
-/
inductive Error
/--
Malformed request line or status line.
-/
| invalidStatusLine
/--
Invalid or malformed header.
-/
| invalidHeader
/--
Request timeout occurred.
-/
| timeout
/--
Request entity too large.
-/
| entityTooLarge
/--
Unsupported HTTP method.
-/
| unsupportedMethod
/--
Unsupported HTTP version.
-/
| unsupportedVersion
/--
Invalid chunk encoding.
-/
| invalidChunk
/--
Connection Closed
-/
| connectionClosed
/--
Bad request/response
-/
| badMessage
/--
Generic error with message.
-/
| other (message : String)
deriving Repr, BEq
instance : ToString Error where
toString
| .invalidStatusLine => "Invalid status line"
| .invalidHeader => "Invalid header"
| .timeout => "Timeout"
| .entityTooLarge => "Entity too large"
| .unsupportedMethod => "Unsupported method"
| .unsupportedVersion => "Unsupported version"
| .invalidChunk => "Invalid chunk"
| .connectionClosed => "Connection closed"
| .badMessage => "Bad message"
| .other msg => s!"Other error: {msg}"
instance : Repr ByteSlice where
reprPrec x := reprPrec x.toByteArray.data

View File

@@ -0,0 +1,83 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Events
This module defines the events that can occur during HTTP/1.1 message processing,
including header completion, data arrival, and error conditions.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Events emitted during HTTP message processing.
-/
inductive Event (dir : Direction)
/--
Indicates that all headers have been successfully parsed.
-/
| endHeaders (head : Message.Head dir)
/--
Carries a chunk of message body data.
-/
| gotData (final : Bool) (ext : Array (ExtensionName × Option String)) (data : ByteSlice)
/--
Signals that additional input data is required to continue processing.
-/
| needMoreData (size : Option Nat)
/--
Indicates a failure during parsing or processing.
-/
| failed (err : Error)
/--
Requests that the connection be closed.
-/
| close
/--
The body should be closed.
-/
| closeBody
/--
Indicates that a response is required.
-/
| needAnswer
/--
Indicates that a message body is required.
-/
| needBody
/--
Indicates readiness to process the next message.
-/
| next
/--
Indicates that it needs a continue.
-/
| «continue»
deriving Inhabited, Repr

View File

@@ -0,0 +1,115 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
import Init.Data.Array
public import Std.Internal.Http.Data
public section
/-!
# Message
This module provides types and operations for HTTP/1.1 messages, centered around the `Direction`
type which models the server's role in message exchange: `Direction.receiving` for parsing incoming
requests from clients, and `Direction.sending` for generating outgoing responses to clients.
The `Message.Head` type is parameterized by `Direction` and resolves to `Request.Head` or
`Response.Head` accordingly, enabling generic code that works uniformly across both phases
while exposing common operations such as headers, version, and `shouldKeepAlive`
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
Direction of message flow from the server's perspective.
-/
inductive Direction
/--
Receiving and parsing incoming requests from clients.
-/
| receiving
/--
Generating and sending outgoing responses to clients.
-/
| sending
deriving BEq
/--
Inverts the direction of the requests.
-/
@[expose]
abbrev Direction.swap : Direction Direction
| .receiving => .sending
| .sending => .receiving
/--
Gets the message head type based on direction.
-/
@[expose]
def Message.Head : Direction Type
| .receiving => Request.Head
| .sending => Response.Head
/--
Gets the headers of a `Message`.
-/
def Message.Head.headers (m : Message.Head dir) : Headers :=
match dir with
| .receiving => Request.Head.headers m
| .sending => Response.Head.headers m
/--
Gets the version of a `Message`.
-/
def Message.Head.version (m : Message.Head dir) : Version :=
match dir with
| .receiving => Request.Head.version m
| .sending => Response.Head.version m
private def isChunked (message : Message.Head dir) : Option Bool :=
match message.headers.get? .transferEncoding with
| none => some false
| some v => Header.TransferEncoding.parse v |>.map (·.isChunked)
/--
Determines the message body size based on the `Content-Length` header and the `Transfer-Encoding` (chunked) flag.
-/
def Message.Head.getSize (message : Message.Head dir) (allowEOFBody : Bool) : Option Body.Length :=
match (message.headers.getAll? .contentLength, isChunked message) with
| (some #[cl], some false) => .fixed <$> cl.value.toNat?
| (none, some false) => if allowEOFBody then some (.fixed 0) else none
| (none, some true) => some .chunked
| (some _, some _) => none -- To avoid request smuggling with multiple content-length headers.
| (_, none) => none -- Error validating the chunked encoding
/--
Checks whether the message indicates that the connection should be kept alive.
-/
@[inline]
def Message.Head.shouldKeepAlive (message : Message.Head dir) : Bool :=
¬message.headers.hasEntry .connection (.mk "close")
message.version = .v11
instance : Repr (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Repr Request.Head)
| .sending => inferInstanceAs (Repr Response.Head)
instance : Internal.Encode .v11 (Message.Head dir) :=
match dir with
| .receiving => inferInstanceAs (Internal.Encode .v11 Request.Head)
| .sending => inferInstanceAs (Internal.Encode .v11 Response.Head)
instance : EmptyCollection (Message.Head dir) where
emptyCollection :=
match dir with
| .receiving => {}
| .sending => {}

View File

@@ -0,0 +1,328 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Parsec
public import Std.Internal.Http.Data
public import Std.Internal.Parsec.ByteArray
public import Std.Internal.Http.Protocol.H1.Config
/-!
This module defines a parser for HTTP/1.1 requests. The reference used is https://httpwg.org/specs/rfc9112.html.
-/
namespace Std.Http.Protocol.H1
open Std Internal Parsec ByteArray Internal
set_option linter.all true
@[inline]
def isDigit (c : UInt8) : Bool :=
c '0'.toUInt8 c '9'.toUInt8
@[inline]
def isAlpha (c : UInt8) : Bool :=
(c 'a'.toUInt8 c 'z'.toUInt8) (c 'A'.toUInt8 c 'Z'.toUInt8)
@[inline]
def isVChar (c : UInt8) : Bool :=
c 0x21 c 0x7E
def isTokenCharacter (c : UInt8) : Bool :=
isDigit c isAlpha c c == '!'.toUInt8 c == '#'.toUInt8 c == '$'.toUInt8 c == '%'.toUInt8
c == '&'.toUInt8 c == '\''.toUInt8 c == '*'.toUInt8 c == '+'.toUInt8 c == '-'.toUInt8
c == '.'.toUInt8 c == '^'.toUInt8 c == '_'.toUInt8 c == '`'.toUInt8 c == '|'.toUInt8
c == '~'.toUInt8
@[inline]
def isObsChar (c : UInt8) : Bool :=
c 0x80 c 0xFF
@[inline]
def isFieldVChar (c : UInt8) : Bool :=
isVChar c isObsChar c c = ' '.toUInt8 c = '\t'.toUInt8
-- HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
@[inline]
def isQdText (c : UInt8) : Bool :=
c == '\t'.toUInt8
c == ' '.toUInt8
c == '!'.toUInt8
(c '#'.toUInt8 c '['.toUInt8)
(c ']'.toUInt8 c '~'.toUInt8)
isObsChar c
-- Parser blocks
def manyItems {α : Type} (parser : Parser (Option α)) (maxCount : Nat) : Parser (Array α) := do
let items many (attempt <| parser.bind (fun item => match item with
| some x => return x
| none => fail "end of items"))
if items.size > maxCount then
fail s!"Too many items: {items.size} > {maxCount}"
return items
def opt (x : Option α) : Parser α :=
if let some res := x then
return res
else
fail "expected value but got none"
@[inline]
def token (limit : Nat) : Parser ByteSlice :=
takeWhileUpTo1 isTokenCharacter limit
@[inline]
def crlf : Parser Unit := do
discard <| optional (skipByte '\r'.toUInt8)
skipByte '\n'.toUInt8
@[inline]
def rsp (limits : H1.Config) : Parser Unit := do
discard <| takeWhileUpTo1 (· == ' '.toUInt8) limits.maxSpaceSequence
if ( peekWhen? (· == ' '.toUInt8)) |>.isSome then
fail "invalid space sequence"
else
pure ()
@[inline]
def osp (limits : H1.Config) : Parser Unit := do
discard <| takeWhileUpTo (· == ' '.toUInt8) limits.maxSpaceSequence
if ( peekWhen? (· == ' '.toUInt8)) |>.isSome then
fail "invalid space sequence"
else
pure ()
@[inline]
def uint8 : Parser UInt8 := do
let d digit
return d.toUInt8
def hexDigit : Parser UInt8 := do
let b any
if b '0'.toUInt8 && b '9'.toUInt8 then return b - '0'.toUInt8
else if b 'A'.toUInt8 && b 'F'.toUInt8 then return b - 'A'.toUInt8 + 10
else if b 'a'.toUInt8 && b 'f'.toUInt8 then return b - 'a'.toUInt8 + 10
else fail s!"Invalid hex digit {Char.ofUInt8 b |>.quote}"
@[inline]
def hex : Parser Nat := do
let hexDigits many1 (attempt hexDigit)
return (hexDigits.foldl (fun acc cur => acc * 16 + cur.toNat) 0)
-- Actual parsers
-- HTTP-version = HTTP-name "/" DIGIT "." DIGIT
-- HTTP-name = %s"HTTP"
def parseHttpVersion : Parser Version := do
skipBytes "HTTP/".toUTF8
let major uint8
skipByte '.'.toUInt8
let minor uint8
opt <| Version.ofNumber? (major - 48 |>.toNat) (minor - 48 |>.toNat)
-- method = token
def parseMethod : Parser Method :=
(skipBytes "GET".toUTF8 <&> fun _ => Method.get)
<|> (skipBytes "HEAD".toUTF8 <&> fun _ => Method.head)
<|> (attempt <| skipBytes "POST".toUTF8 <&> fun _ => Method.post)
<|> (attempt <| skipBytes "PUT".toUTF8 <&> fun _ => Method.put)
<|> (skipBytes "DELETE".toUTF8 <&> fun _ => Method.delete)
<|> (skipBytes "CONNECT".toUTF8 <&> fun _ => Method.connect)
<|> (skipBytes "OPTIONS".toUTF8 <&> fun _ => Method.options)
<|> (skipBytes "TRACE".toUTF8 <&> fun _ => Method.trace)
<|> (skipBytes "PATCH".toUTF8 <&> fun _ => Method.patch)
def parseURI (limits : H1.Config) : Parser ByteArray := do
let uri takeUntilUpTo (· == ' '.toUInt8) limits.maxUriLength
return uri.toByteArray
/--
Parses a request line
request-line = method SP request-target SP HTTP-version
-/
public def parseRequestLine (limits : H1.Config) : Parser Request.Head := do
let method parseMethod <* rsp limits
let uri parseURI limits <* rsp limits
let uri match (Std.Http.URI.Parser.parseRequestTarget <* eof).run uri with
| .ok res => pure res
| .error res => fail res
let version parseHttpVersion <* crlf
return method, version, uri, .empty
-- field-line = field-name ":" OWS field-value OWS
def parseFieldLine (limits : H1.Config) : Parser (String × String) := do
let name token limits.maxHeaderNameLength
let value skipByte ':'.toUInt8 *> osp limits *> optional (takeWhileUpTo isFieldVChar limits.maxHeaderValueLength) <* osp limits
let name opt <| String.fromUTF8? name.toByteArray
let value opt <| String.fromUTF8? <| value.map (·.toByteArray) |>.getD .empty
return (name, value)
/--
Parses a single header.
field-line CRLF / CRLF
-/
public def parseSingleHeader (limits : H1.Config) : Parser (Option (String × String)) := do
let next peek?
if next == some '\r'.toUInt8 next == some '\n'.toUInt8 then
crlf
pure none
else
some <$> (parseFieldLine limits <* crlf)
-- quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
def parseQuotedPair : Parser UInt8 := do
skipByte '\\'.toUInt8
let b any
if b == '\t'.toUInt8 b == ' '.toUInt8 isVChar b isObsChar b then
return b
else
fail s!"invalid quoted-pair byte: {Char.ofUInt8 b |>.quote}"
-- quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
partial def parseQuotedString : Parser String := do
skipByte '"'.toUInt8
let rec loop (buf : ByteArray) : Parser ByteArray := do
let b ← any
if b == '"'.toUInt8 then
return buf
else if b == '\\'.toUInt8 then
let next any
if next == '\t'.toUInt8 next == ' '.toUInt8 isVChar next isObsChar next
then loop (buf.push next)
else fail s!"invalid quoted-pair byte: {Char.ofUInt8 next |>.quote}"
else if isQdText b then
loop (buf.push b)
else
fail s!"invalid qdtext byte: {Char.ofUInt8 b |>.quote}"
opt <| String.fromUTF8? ( loop .empty)
-- chunk-ext = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val] )
def parseChunkExt (limits : H1.Config) : Parser (ExtensionName × Option String) := do
osp limits *> skipByte ';'.toUInt8 *> osp limits
let name (opt =<< String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtNameLength) <* osp limits
let some name := ExtensionName.ofString? name
| fail "invalid extension name"
if ( peekWhen? (· == '='.toUInt8)) |>.isSome then
osp limits *> skipByte '='.toUInt8 *> osp limits
let value osp limits *> (parseQuotedString <|> opt =<< (String.fromUTF8? <$> ByteSlice.toByteArray <$> token limits.maxChunkExtValueLength))
return (name, some value)
return (name, none)
/--
This function parses the size and extension of a chunk
-/
public def parseChunkSize (limits : H1.Config) : Parser (Nat × Array (ExtensionName × Option String)) := do
let size hex
let ext many (parseChunkExt limits)
crlf
return (size, ext)
/--
Result of parsing partial or complete information.
-/
public inductive TakeResult
| complete (data : ByteSlice)
| incomplete (data : ByteSlice) (remaining : Nat)
/--
This function parses a single chunk in chunked transfer encoding
-/
public def parseChunk (limits : H1.Config) : Parser (Option (Nat × Array (ExtensionName × Option String) × ByteSlice)) := do
let (size, ext) parseChunkSize limits
if size == 0 then
return none
else
let data take size
return some size, ext, data
/--
Parses a fixed size data that can be incomplete.
-/
public def parseFixedSizeData (size : Nat) : Parser TakeResult := fun it =>
if it.remainingBytes = 0 then
.error it .eof
else if it.remainingBytes < size then
.success (it.forward it.remainingBytes) (.incomplete it.array[it.idx...(it.idx+it.remainingBytes)] (size - it.remainingBytes))
else
.success (it.forward size) (.complete (it.array[it.idx...(it.idx+size)]))
/--
Parses a fixed size data that can be incomplete.
-/
public def parseChunkedSizedData (size : Nat) : Parser TakeResult := do
match parseFixedSizeData size with
| .complete data => crlf *> return .complete data
| .incomplete data res => return .incomplete data res
/--
This function parses a trailer header (used after chunked body)
-/
def parseTrailerHeader (limits : H1.Config) : Parser (Option (String × String)) := parseSingleHeader limits
/--
This function parses trailer headers after chunked body
-/
public def parseTrailers (limits : H1.Config) : Parser (Array (String × String)) := do
let trailers manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
return trailers
/--
Parses HTTP status code (3 digits)
-/
def parseStatusCode : Parser Status := do
let d1 digit
let d2 digit
let d3 digit
let code := (d1.toNat - 48) * 100 + (d2.toNat - 48) * 10 + (d3.toNat - 48)
return Status.ofCode code.toUInt16
/--
Parses reason phrase (text after status code)
-/
def parseReasonPhrase (limits : H1.Config) : Parser String := do
let bytes takeWhileUpTo (fun c => c != '\r'.toUInt8) limits.maxReasonPhraseLength
opt <| String.fromUTF8? bytes.toByteArray
/--
Parses a status line
status-line = HTTP-version SP status-code SP [ reason-phrase ]
-/
public def parseStatusLine (limits : H1.Config) : Parser Response.Head := do
let version parseHttpVersion <* rsp limits
let status parseStatusCode <* rsp limits
discard <| parseReasonPhrase limits <* crlf
return status, version, .empty
/--
This function parses the body of the last chunk.
-/
public def parseLastChunkBody (limits : H1.Config) : Parser Unit := do
discard <| manyItems (parseTrailerHeader limits) limits.maxTrailerHeaders
crlf
end Std.Http.Protocol.H1

View File

@@ -0,0 +1,280 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Reader
This module defines the reader state machine for parsing incoming HTTP/1.1 messages.
It tracks the parsing state including start line, headers, and body handling for
both fixed-length and chunked transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
/--
The state of the `Reader` state machine.
-/
inductive Reader.State (dir : Direction) : Type
/--
Initial state waiting for HTTP start line.
-/
| needStartLine : State dir
/--
State waiting for HTTP headers, tracking number of headers parsed.
-/
| needHeader : Nat State dir
/--
State waiting for chunk size in chunked transfer encoding.
-/
| needChunkedSize : State dir
/--
State waiting for chunk body data of specified size.
-/
| needChunkedBody : Array (ExtensionName × Option String) Nat State dir
/--
State waiting for fixed-length body data of specified size.
-/
| needFixedBody : Nat State dir
/--
Paused waiting for a `canContinue` decision, carrying the next state.
-/
| continue : State dir State dir
/--
State waiting to be able to read new data.
-/
| pending : State dir
/--
State that it completed a single request or response and can go to the next one
-/
| complete
/--
State that it has completed and cannot process more data.
-/
| closed
/--
The input is malformed.
-/
| failed (error : Error) : State dir
deriving Inhabited, Repr, BEq
/--
Manages the reading state of the HTTP parsing and processing machine.
-/
structure Reader (dir : Direction) where
/--
The current state of the machine.
-/
state : Reader.State dir := match dir with | .receiving => .needStartLine | .sending => .pending
/--
The input byte array.
-/
input : ByteArray.Iterator := ByteArray.emptyWithCapacity 4096 |>.iter
/--
The incoming message head.
-/
messageHead : Message.Head dir := {}
/--
Count of messages that this connection already parsed
-/
messageCount : Nat := 0
/--
Flag that says that it cannot receive more input (the socket disconnected).
-/
noMoreInput : Bool := false
namespace Reader
/--
Checks if the reader is in a closed state and cannot process more messages.
-/
@[inline]
def isClosed (reader : Reader dir) : Bool :=
match reader.state with
| .closed => true
| _ => false
/--
Checks if the reader has completed parsing the current message.
-/
@[inline]
def isComplete (reader : Reader dir) : Bool :=
match reader.state with
| .complete => true
| _ => false
/--
Checks if the reader has encountered an error.
-/
@[inline]
def hasFailed (reader : Reader dir) : Bool :=
match reader.state with
| .failed _ => true
| _ => false
/--
Feeds new data into the reader's input buffer.
If the current input is exhausted, replaces it; otherwise compacts the buffer
by discarding already-parsed bytes before appending.
-/
@[inline]
def feed (data : ByteArray) (reader : Reader dir) : Reader dir :=
{ reader with input :=
if reader.input.atEnd
then data.iter
else (reader.input.array.extract reader.input.pos reader.input.array.size ++ data).iter }
/--
Replaces the reader's input iterator with a new one.
-/
@[inline]
def setInput (input : ByteArray.Iterator) (reader : Reader dir) : Reader dir :=
{ reader with input }
/--
Updates the message head being constructed.
-/
@[inline]
def setMessageHead (messageHead : Message.Head dir) (reader : Reader dir) : Reader dir :=
{ reader with messageHead }
/--
Adds a header to the current message head.
-/
@[inline]
def addHeader (name : Header.Name) (value : Header.Value) (reader : Reader dir) : Reader dir :=
match dir with
| .sending => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
| .receiving => { reader with messageHead := { reader.messageHead with headers := reader.messageHead.headers.insert name value } }
/--
Closes the reader, transitioning to the closed state.
-/
@[inline]
def close (reader : Reader dir) : Reader dir :=
{ reader with state := .closed, noMoreInput := true }
/--
Marks the current message as complete and prepares for the next message.
-/
@[inline]
def markComplete (reader : Reader dir) : Reader dir :=
{ reader with
state := .complete
messageCount := reader.messageCount + 1 }
/--
Transitions the reader to a failed state with the given error.
-/
@[inline]
def fail (error : Error) (reader : Reader dir) : Reader dir :=
{ reader with state := .failed error }
/--
Resets the reader to parse a new message on the same connection.
-/
@[inline]
def reset (reader : Reader dir) : Reader dir :=
{ reader with
state := .needStartLine
messageHead := {} }
/--
Checks if more input is needed to continue parsing.
-/
@[inline]
def needsMoreInput (reader : Reader dir) : Bool :=
reader.input.atEnd && !reader.noMoreInput &&
match reader.state with
| .complete | .closed | .failed _ | .«continue» _ => false
| _ => true
/--
Returns the current parse error if the reader has failed.
-/
@[inline]
def getError (reader : Reader dir) : Option Error :=
match reader.state with
| .failed err => some err
| _ => none
/--
Gets the number of bytes remaining in the input buffer.
-/
@[inline]
def remainingBytes (reader : Reader dir) : Nat :=
reader.input.array.size - reader.input.pos
/--
Advances the input iterator by n bytes.
-/
@[inline]
def advance (n : Nat) (reader : Reader dir) : Reader dir :=
{ reader with input := reader.input.forward n }
/--
Transitions to the state for reading headers.
-/
@[inline]
def startHeaders (reader : Reader dir) : Reader dir :=
{ reader with state := .needHeader 0 }
/--
Transitions to the state for reading a fixed-length body.
-/
@[inline]
def startFixedBody (size : Nat) (reader : Reader dir) : Reader dir :=
{ reader with state := .needFixedBody size }
/--
Transitions to the state for reading chunked transfer encoding.
-/
@[inline]
def startChunkedBody (reader : Reader dir) : Reader dir :=
{ reader with state := .needChunkedSize }
/--
Marks that no more input will be provided (connection closed).
-/
@[inline]
def markNoMoreInput (reader : Reader dir) : Reader dir :=
{ reader with noMoreInput := true }
/--
Checks if the connection should be kept alive for the next message.
-/
def shouldKeepAlive (reader : Reader dir) : Bool :=
match reader.messageHead.headers.get? .connection with
| some val => let s := val.value.toLower; s == "keep-alive"
| none => true
end Reader

View File

@@ -0,0 +1,265 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Data
public import Std.Internal.Http.Internal
public import Std.Internal.Http.Protocol.H1.Parser
public import Std.Internal.Http.Protocol.H1.Config
public import Std.Internal.Http.Protocol.H1.Message
public import Std.Internal.Http.Protocol.H1.Error
public section
/-!
# HTTP/1.1 Writer
This module defines the writer state machine for generating outgoing HTTP/1.1 messages.
It handles encoding headers and body content for both fixed-length and chunked
transfer encodings.
-/
namespace Std.Http.Protocol.H1
set_option linter.all true
open Internal
/--
The state of the `Writer` state machine.
-/
inductive Writer.State
/--
It starts writing only when part of the request is received.
-/
| pending
/--
Ready to write the message
-/
| waitingHeaders
/--
This is the state that we wait for a forced flush. This happens and causes the writer to
start actually writing to the outputData
-/
| waitingForFlush
/--
Writing the headers.
-/
| writingHeaders
/--
Writing a fixed size body output.
-/
| writingBody (mode : Body.Length)
/--
It will flush all the remaining data and cause it to shutdown the machine.
-/
| shuttingDown
/--
State that it completed a single request and can go to the next one
-/
| complete
/--
State that it has completed and cannot process more data.
-/
| closed
deriving Inhabited, Repr, BEq
/--
Manages the writing state of the HTTP generating and writing machine.
-/
structure Writer (dir : Direction) where
/--
This is all the data that the user is sending that is being accumulated.
-/
userData : Array Chunk := .empty
/--
All the data that is produced by the writer.
-/
outputData : ChunkedBuffer := .empty
/--
The state of the writer machine.
-/
state : Writer.State := match dir with | .receiving => .pending | .sending => .waitingHeaders
/--
When the user specifies the exact size upfront, we can use Content-Length
instead of chunked transfer encoding for streaming
-/
knownSize : Option Body.Length := none
/--
The outgoing message that will be written to the output
-/
messageHead : Message.Head dir.swap := {}
/--
The user sent the message
-/
sentMessage : Bool := false
/--
This flags that the body stream is closed so if we start to write the body we know exactly the size.
-/
userClosedBody : Bool := false
namespace Writer
/--
Checks if the writer is ready to send data to the output.
-/
@[inline]
def isReadyToSend {dir} (writer : Writer dir) : Bool :=
match writer.state with
| .closed | .complete => true
| _ => writer.userClosedBody
/--
Checks if the writer is closed (cannot process more data)
-/
@[inline]
def isClosed (writer : Writer dir) : Bool :=
match writer.state with
| .closed => true
| _ => false
/--
Checks if the writer has completed processing a request
-/
@[inline]
def isComplete (writer : Writer dir) : Bool :=
match writer.state with
| .complete => true
| _ => false
/--
Checks if the writer can accept more data from the user
-/
@[inline]
def canAcceptData (writer : Writer dir) : Bool :=
match writer.state with
| .waitingHeaders => true
| .waitingForFlush => true
| .writingBody _ => !writer.userClosedBody
| _ => false
/--
Marks the body as closed, indicating no more user data will be added
-/
@[inline]
def closeBody (writer : Writer dir) : Writer dir :=
{ writer with userClosedBody := true }
/--
Determines the transfer encoding mode based on explicit setting, body closure state, or defaults to chunked
-/
def determineTransferMode (writer : Writer dir) : Body.Length :=
if let some mode := writer.knownSize then
mode
else if writer.userClosedBody then
let size := writer.userData.foldl (fun x y => x + y.data.size) 0
.fixed size
else
.chunked
/--
Adds user data chunks to the writer's buffer if the writer can accept data
-/
@[inline]
def addUserData (data : Array Chunk) (writer : Writer dir) : Writer dir :=
if writer.canAcceptData then
{ writer with userData := writer.userData ++ data }
else
writer
/--
Writes accumulated user data to output using fixed-size encoding
-/
def writeFixedBody (writer : Writer dir) (limitSize : Nat) : Writer dir × Nat :=
if writer.userData.size = 0 then
(writer, limitSize)
else
let data := writer.userData.map Chunk.data
let (chunks, totalSize) := data.foldl (fun (acc, size) ba =>
if size >= limitSize then
(acc, size)
else
let remaining := limitSize - size
let takeSize := min ba.size remaining
let chunk := ba.extract 0 takeSize
(acc.push chunk, size + takeSize)
) (#[], 0)
let outputData := writer.outputData.append (ChunkedBuffer.ofArray chunks)
let remaining := limitSize - totalSize
({ writer with userData := #[], outputData }, remaining)
/--
Writes accumulated user data to output using chunked transfer encoding
-/
def writeChunkedBody (writer : Writer dir) : Writer dir :=
if writer.userData.size = 0 then
writer
else
let data := writer.userData
{ writer with userData := #[], outputData := data.foldl (Encode.encode .v11) writer.outputData }
/--
Writes the final chunk terminator (0\r\n\r\n) and transitions to complete state
-/
def writeFinalChunk (writer : Writer dir) : Writer dir :=
let writer := writer.writeChunkedBody
{ writer with
outputData := writer.outputData.write "0\r\n\r\n".toUTF8
state := .complete
}
/--
Extracts all accumulated output data and returns it with a cleared output buffer
-/
@[inline]
def takeOutput (writer : Writer dir) : Option (Writer dir × ByteArray) :=
let output := writer.outputData.toByteArray
some ({ writer with outputData := ChunkedBuffer.empty }, output)
/--
Updates the writer's state machine to a new state
-/
@[inline]
def setState (state : Writer.State) (writer : Writer dir) : Writer dir :=
{ writer with state }
/--
Writes the message headers to the output buffer
-/
private def writeHeaders (messageHead : Message.Head dir.swap) (writer : Writer dir) : Writer dir :=
{ writer with outputData := Internal.Encode.encode (v := .v11) writer.outputData messageHead }
/--
Checks if the connection should be kept alive based on the Connection header
-/
def shouldKeepAlive (writer : Writer dir) : Bool :=
writer.messageHead.headers.get? .connection
|>.map (fun v => v.value.toLower != "close")
|>.getD true
/--
Closes the writer, transitioning to the closed state.
-/
@[inline]
def close (writer : Writer dir) : Writer dir :=
{ writer with state := .closed }
end Writer

View File

@@ -0,0 +1,201 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Async.TCP
public import Std.Internal.Async.TCP.SSL
public import Std.Sync.CancellationToken
public import Std.Internal.Http.Server.Config
public import Std.Internal.Http.Server.Handler
public import Std.Internal.Http.Server.Connection
public section
/-!
# HTTP Server
This module defines a simple, asynchronous HTTP/1.1 server implementation.
It provides the `Std.Http.Server` structure, which encapsulates all server state, and functions for
starting, managing, and gracefully shutting down the server.
The server runs entirely using `Async` and uses a shared `CancellationContext` to signal shutdowns.
Each active client connection is tracked, and the server automatically resolves its shutdown
promise once all connections have closed.
-/
namespace Std.Http
open Std.Internal.IO.Async TCP
set_option linter.all true
/--
The `Server` structure holds all state required to manage the lifecycle of an HTTP server, including
connection tracking and shutdown coordination.
-/
structure Server where
/--
The context used for shutting down all connections and the server.
-/
context : Std.CancellationContext
/--
Active HTTP connections
-/
activeConnections : Std.Mutex UInt64
/--
Indicates when the server has successfully shutdown
-/
shutdownPromise : Std.Channel Unit
/--
Configuration of the server
-/
config : Std.Http.Config
namespace Server
/--
Create a new `Server` structure with an optional configuration.
-/
def new (config : Std.Http.Config := {}) : IO Server := do
let context Std.CancellationContext.new
let activeConnections Std.Mutex.new 0
let shutdownPromise Std.Channel.new
return { context, activeConnections, shutdownPromise, config }
/--
Triggers cancellation of all requests and the accept loop in the server. This function should be used
in conjunction with `waitShutdown` to properly coordinate the shutdown sequence.
-/
@[inline]
def shutdown (s : Server) : Async Unit :=
s.context.cancel .shutdown
/--
Waits for the server to shut down. Blocks until another task or async operation calls the `shutdown` function.
-/
@[inline]
def waitShutdown (s : Server) : Async Unit := do
Async.ofAsyncTask (( s.shutdownPromise.recv).map Except.ok)
/--
Returns a `Selector` that waits for the server to shut down.
-/
@[inline]
def waitShutdownSelector (s : Server) : Selector Unit :=
s.shutdownPromise.recvSelector
/--
Triggers cancellation of all requests and the accept loop, then waits for the server to fully shut down.
This is a convenience function combining `shutdown` and then `waitShutdown`.
-/
@[inline]
def shutdownAndWait (s : Server) : Async Unit := do
s.context.cancel .shutdown
s.waitShutdown
@[inline]
private def frameCancellation (s : Server) (action : ContextAsync α) : ContextAsync α := do
s.activeConnections.atomically (modify (· + 1))
let result action
s.activeConnections.atomically do
modify (· - 1)
if ( get) = 0 ( s.context.isCancelled) then
discard <| s.shutdownPromise.send ()
return result
/--
Start a new HTTP/1.1 server on the given socket address. This function uses `Async` to handle tasks
and TCP connections, and returns a `Server` structure that can be used to cancel the server.
-/
def serve {σ : Type} [Handler σ]
(addr : Net.SocketAddress)
(handler : σ)
(config : Config := {}) (backlog : UInt32 := 128) : Async Server := do
let httpServer Server.new config
let server Socket.Server.mk
server.bind addr
server.listen backlog
let runServer := do
frameCancellation httpServer do
while true do
let result Selectable.one #[
.case (server.acceptSelector) (fun x => pure <| some x),
.case ( ContextAsync.doneSelector) (fun _ => pure none)
]
match result with
| some client =>
let extensions : Extensions := match ( EIO.toBaseIO client.getPeerName) with
| .ok addr => Extensions.empty.insert (Server.RemoteAddr.mk addr)
| .error _ => Extensions.empty
ContextAsync.background (frameCancellation httpServer (serveConnection client handler config extensions))
| none => break
background (runServer httpServer.context)
return httpServer
/--
Start a new HTTPS (HTTP over TLS) server on the given socket address.
The server context is configured with the provided PEM certificate and key files.
-/
def serveSSL {σ : Type} [Handler σ]
(addr : Net.SocketAddress)
(handler : σ)
(certFile keyFile : String)
(config : Config := {}) (backlog : UInt32 := 128)
(chunkSize : UInt64 := TCP.SSL.ioChunkSize) : Async Server := do
TCP.SSL.Server.configureContext certFile keyFile
let httpServer Server.new config
let server TCP.SSL.Server.mk
server.bind addr
server.listen backlog
let runServer := do
frameCancellation httpServer do
while true do
let result Selectable.one #[
.case (server.acceptSelector) (fun x => pure <| some x),
.case ( ContextAsync.doneSelector) (fun _ => pure none)
]
match result with
| some client =>
let extensions : Extensions := match ( EIO.toBaseIO client.getPeerName) with
| .ok addr => Extensions.empty.insert (Server.RemoteAddr.mk addr)
| .error _ => Extensions.empty
ContextAsync.background (frameCancellation httpServer do
try
client.handshake chunkSize
serveConnection client handler config extensions
catch _ =>
pure ())
| none => break
background (runServer httpServer.context)
return httpServer
end Std.Http.Server

View File

@@ -0,0 +1,144 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Time
public import Std.Internal.Http.Protocol.H1
public section
/-!
# Config
This module exposes the `Config`, a structure that describes timeout, request and headers
configuration of an HTTP Server.
-/
namespace Std.Http
set_option linter.all true
/--
Connection limits configuration with validation.
-/
structure Config where
/--
Maximum number of requests per connection.
-/
maxRequests : Nat := 100
/--
Maximum number of headers allowed per request.
-/
maxHeaders : Nat := 50
/--
Maximum waiting time for more data.
-/
lingeringTimeout : Time.Millisecond.Offset := 10000
/--
Timeout for keep-alive connections
-/
keepAliveTimeout : { x : Time.Millisecond.Offset // 0 < x } := 12000, by decide
/--
Maximum time for requesting more data.
-/
requestTimeout : { x : Time.Millisecond.Offset // 0 < x } := 13000, by decide
/--
Whether to enable keep-alive connections by default.
-/
enableKeepAlive : Bool := true
/--
The maximum size that the connection can receive in a single recv call.
-/
maximumRecvSize : Nat := 8192
/--
Default buffer size for the connection
-/
defaultPayloadBytes : Nat := 8192
/--
Whether to automatically generate the `Date` header in responses.
-/
generateDate : Bool := true
/--
The server name.
-/
serverName : Option Header.Value := some (.mk "LeanHTTP/1.1")
/--
Maximum length of HTTP method token (default: 16 bytes)
-/
maxMethodLength : Nat := 16
/--
Maximum length of request URI (default: 8192 bytes)
-/
maxUriLength : Nat := 8192
/--
Maximum length of header field name (default: 256 bytes)
-/
maxHeaderNameLength : Nat := 256
/--
Maximum length of header field value (default: 8192 bytes)
-/
maxHeaderValueLength : Nat := 8192
/--
Maximum number of spaces in delimiter sequences (default: 256)
-/
maxSpaceSequence : Nat := 256
/--
Maximum length of chunk extension name (default: 256 bytes)
-/
maxChunkExtNameLength : Nat := 256
/--
Maximum length of chunk extension value (default: 256 bytes)
-/
maxChunkExtValueLength : Nat := 256
/--
Maximum length of reason phrase (default: 512 bytes)
-/
maxReasonPhraseLength : Nat := 512
/--
Maximum number of trailer headers (default: 100)
-/
maxTrailerHeaders : Nat := 100
namespace Config
/--
Converts to HTTP 1.1 config
-/
def toH1Config (config : Config) : Protocol.H1.Config where
maxMessages := config.maxRequests
maxHeaders := config.maxHeaders
maxMethodLength := config.maxMethodLength
maxUriLength := config.maxUriLength
maxHeaderNameLength := config.maxHeaderNameLength
maxHeaderValueLength := config.maxHeaderValueLength
maxSpaceSequence := config.maxSpaceSequence
maxChunkExtNameLength := config.maxChunkExtNameLength
maxChunkExtValueLength := config.maxChunkExtValueLength
maxReasonPhraseLength := config.maxReasonPhraseLength
maxTrailerHeaders := config.maxTrailerHeaders
enableKeepAlive := config.enableKeepAlive
identityHeader := config.serverName
end Std.Http.Config

View File

@@ -0,0 +1,356 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async.TCP
public import Std.Internal.Async.ContextAsync
public import Std.Internal.Http.Transport
public import Std.Internal.Http.Protocol.H1
public import Std.Internal.Http.Server.Config
public import Std.Internal.Http.Server.Handler
public section
namespace Std
namespace Http
namespace Server
open Std Internal IO Async TCP Protocol
open Time
/-!
# Connection
This module defines `Server.Connection`, a structure used to handle a single HTTP connection with
possibly multiple requests.
-/
set_option linter.all true
/--
Represents the remote address of a client connection.
-/
public structure RemoteAddr where
/--
The socket address of the remote client.
-/
addr : Net.SocketAddress
deriving TypeName
instance : ToString RemoteAddr where
toString addr := toString addr.addr.ipAddr ++ ":" ++ toString addr.addr.port
/--
A single HTTP connection.
-/
public structure Connection (α : Type) where
/--
The client connection.
-/
socket : α
/--
The processing machine for HTTP 1.1
-/
machine : H1.Machine .receiving
/--
Extensions to attach to each request (e.g., remote address).
-/
extensions : Extensions := .empty
namespace Connection
instance : Repr (Response t) where
reprPrec t := reprPrec t.head
deriving instance Repr for Error
deriving instance Repr for ByteArray
deriving instance Repr for Chunk
private inductive Recv
| bytes (x : Option ByteArray)
| channel (x : Option Chunk)
| response (x : (Except Error (Response Body.Stream)))
| timeout
| shutdown
| close
private instance : Repr Recv where
reprPrec
| .bytes b => reprPrec ("bytes", b)
| .channel b => reprPrec ("channel", b)
| .response b => reprPrec ("response", b)
| .timeout => reprPrec "timeout"
| .shutdown => reprPrec "shutdown"
| .close => reprPrec "close"
private def receiveWithTimeout {α}
[Transport α]
(socket : Option α)
(expect : UInt64)
(ch : Option Body.Stream)
(response : Option (Std.Channel (Except Error (Response Body.Stream))))
(timeoutMs : Millisecond.Offset)
(keepAliveTimeoutMs : Option Millisecond.Offset)
(connectionContext : CancellationContext) : Async Recv := do
let mut baseSelectables : Array (Selectable Recv) := #[
.case connectionContext.doneSelector (fun _ => do
let reason connectionContext.getCancellationReason
match reason with
| some .deadline => pure .timeout
| _ => pure .shutdown)
]
if let some socket := socket then
baseSelectables := baseSelectables.push (.case (Transport.recvSelector socket expect) (Recv.bytes · |> pure))
-- Timeouts are only applied if we are not expecting data from the user.
if ch.isNone response.isNone then
if let some keepAliveTimeout := keepAliveTimeoutMs then
baseSelectables := baseSelectables.push (.case ( Selector.sleep keepAliveTimeout) (fun _ => pure .timeout))
else
baseSelectables := baseSelectables.push (.case ( Selector.sleep timeoutMs) (fun _ => pure .timeout))
if let some ch := ch then
baseSelectables := baseSelectables.push (.case ch.recvSelector (Recv.channel · |> pure))
if let some response := response then
baseSelectables := baseSelectables.push (.case response.recvSelector (Recv.response · |> pure))
Selectable.one baseSelectables
private def processNeedMoreData
[Transport α]
(config : Config)
(socket : Option α)
(expect : Option Nat)
(response : Option (Std.Channel (Except Error (Response Body.Stream))))
(channel : Option Body.Stream)
(timeout : Millisecond.Offset)
(keepAliveTimeout : Option Millisecond.Offset)
(connectionContext : CancellationContext) : Async Recv := do
try
let expectedBytes := expect
|>.getD config.defaultPayloadBytes
|>.min config.maximumRecvSize
|>.toUInt64
receiveWithTimeout socket expectedBytes channel response timeout keepAliveTimeout connectionContext
catch _ =>
pure .close
private def handleError (machine : H1.Machine .receiving) (status : Status) (waitingResponse : Bool) : H1.Machine .receiving × Bool :=
if machine.isWaitingMessage waitingResponse then
let machine := machine.send { status, headers := .empty |>.insert .connection .close }
|>.userClosedBody
|>.closeReader
|>.noMoreInput
(machine, false)
else
(machine.closeWriter.noMoreInput, waitingResponse)
private def handle
{σ : Type} [Transport α] [Handler σ]
(connection : Connection α)
(config : Config)
(connectionContext : CancellationContext)
(handler : σ) : Async Unit := do
let mut machine := connection.machine
let socket := connection.socket
let mut requestStream Body.Stream.emptyWithCapacity
let mut keepAliveTimeout := some config.keepAliveTimeout.val
let mut currentTimeout := config.keepAliveTimeout.val
let mut response Std.Channel.new
let mut respStream : Option Body.Stream := none
let mut requiresData := false
let mut needBody := false
let mut expectData := none
let mut waitingResponse := false
let mut pendingHead : Option Request.Head := none
while ¬machine.halted do
let (newMachine, step) := machine.step
machine := newMachine
if step.output.size > 0 then
try Transport.sendAll socket #[step.output.toByteArray] catch _ => break
for event in step.events do
match event with
| .needMoreData expect => do
requiresData := true
expectData := expect
| .needBody => do
needBody := true
| .needAnswer =>
pure ()
| .endHeaders head =>
currentTimeout := config.lingeringTimeout
keepAliveTimeout := none
if let some length := head.getSize true then
requestStream.setKnownSize (some length)
pendingHead := some head
| .«continue» =>
if let some head := pendingHead then
let canContinue Handler.onContinue handler head
let status := if canContinue then Status.«continue» else Status.unauthorized
machine := machine.canContinue status
| .gotData final ext data =>
try
requestStream.send { data := data.toByteArray, extensions := ext }
if final then
requestStream.close
catch _ =>
pure ()
| .next => do
requestStream Body.Stream.emptyWithCapacity
response Std.Channel.new
if let some res := respStream then
if ¬ ( res.isClosed) then res.close
respStream := none
keepAliveTimeout := some config.keepAliveTimeout.val
currentTimeout := config.keepAliveTimeout.val
waitingResponse := false
| .failed _ =>
pendingHead := none
waitingResponse := false
requiresData := false
break
| .closeBody =>
if let some stream := respStream then
stream.close
| .close =>
pure ()
if let some head := pendingHead then
waitingResponse := true
let newResponse := Handler.onRequest handler { head, body := requestStream, extensions := connection.extensions } connectionContext
let task newResponse.asTask
BaseIO.chainTask task fun x => discard <| response.send x
pendingHead := none
if requiresData waitingResponse respStream.isSome then
let socket := some socket
let answer := if waitingResponse then some response else none
requiresData := false
needBody := false
let event processNeedMoreData config socket expectData answer respStream currentTimeout keepAliveTimeout connectionContext
match event with
| .bytes (some bs) =>
machine := machine.feed bs
| .bytes none =>
machine := machine.noMoreInput
| .channel (some chunk) =>
machine := machine.sendData #[chunk]
| .channel none =>
machine := machine.userClosedBody
if let some res := respStream then
if ¬( res.isClosed) then res.close
respStream := none
| .close =>
break
| .timeout =>
machine := machine.closeReader
let (newMachine, newWaitingResponse) := handleError machine .requestTimeout waitingResponse
machine := newMachine
waitingResponse := newWaitingResponse
| .shutdown =>
let (newMachine, newWaitingResponse) := handleError machine .serviceUnavailable waitingResponse
machine := newMachine
waitingResponse := newWaitingResponse
| .response (.error err) =>
Handler.onFailure handler err
let (newMachine, newWaitingResponse) := handleError machine .internalServerError waitingResponse
machine := newMachine
waitingResponse := newWaitingResponse
| .response (.ok res) =>
let head do
if config.generateDate ¬res.head.headers.contains Header.Name.date then
let now Std.Time.DateTime.now (tz := .UTC)
pure { res.head with headers := res.head.headers.insert Header.Name.date (Header.Value.ofString! now.toRFC822String) }
else
pure res.head
machine := machine.send head
waitingResponse := false
let size res.body.getKnownSize
machine := machine.setKnownSize (size.getD .chunked)
respStream := some res.body
if ¬ ( requestStream.isClosed) then
requestStream.close
if let some res := respStream then
if ¬( res.isClosed) then
res.close
Transport.close socket
end Connection
/--
This is the entry point of the library. It is used to receive and send requests using an `Async`
handler for a single connection. It can be used with a `TCP.Socket` or any other type that implements
`Transport` to create a simple HTTP server capable of handling multiple connections concurrently.
# Example
```lean
-- Create a TCP socket server instance
let server ← Socket.Server.mk
server.bind addr
server.listen backlog
-- Enter an infinite loop to handle incoming client connections
while true do
let client ← server.accept
background (serveConnection client handler config)
```
-/
def serveConnection
{σ : Type} [Transport t] [Handler σ] (client : t) (handler : σ)
(config : Config) (extensions : Extensions := .empty) : ContextAsync Unit := do
(Connection.mk client { config := config.toH1Config } extensions)
|>.handle config ( ContextAsync.getContext) handler
end Std.Http.Server

View File

@@ -0,0 +1,46 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Async
public import Std.Internal.Http.Data
public import Std.Internal.Async.ContextAsync
public section
namespace Std.Http.Server
open Std.Internal.IO.Async
set_option linter.all true
/--
A type class for handling HTTP server events. Implement this class to define how the server
responds to incoming requests, failures, and `Expect: 100-continue` headers.
-/
class Handler (σ : Type) where
/--
Called for each incoming HTTP request. The default implementation returns a 404 Not Found response.
-/
onRequest (self : σ) (request : Request Body.Stream) : ContextAsync (Response Body.Stream) := do
pure { head := { status := .notFound }, body := Body.Stream.emptyWithCapacity }
/--
Called when an error occurs while processing a request. The default implementation does nothing.
-/
onFailure (self : σ) (error : IO.Error) : Async Unit :=
pure ()
/--
Called when a request includes an `Expect: 100-continue` header. Return `true` to send a
`100 Continue` response and accept the body, or `false` to reject it. The default implementation
always accepts.
-/
onContinue (self : σ) (request : Request.Head) : Async Bool :=
pure true
end Std.Http.Server

View File

@@ -0,0 +1,253 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Std.Internal.Http.Protocol.H1
public import Std.Internal.Async.TCP.SSL
public section
/-!
# Transport
This module exposes a `Transport` type class that is used to represent different transport mechanisms
that can be used with a HTTP connection.
-/
namespace Std.Http
open Std Internal IO Async TCP
set_option linter.all true
/--
Generic HTTP interface that abstracts over different transport mechanisms.
-/
class Transport (α : Type) where
/--
Receive data from the client connection, up to the expected size.
Returns None if the connection is closed or no data is available.
-/
recv : α UInt64 Async (Option ByteArray)
/--
Send all data through the client connection.
-/
sendAll : α Array ByteArray Async Unit
/--
Get a selector for receiving data asynchronously.
-/
recvSelector : α UInt64 Selector (Option ByteArray)
/--
Close the transport connection. This is a no-op for socket-based transports.
-/
close : α IO Unit := fun _ => pure ()
instance : Transport Socket.Client where
recv client expect := client.recv? expect
sendAll client data := client.sendAll data
recvSelector client expect := client.recvSelector expect
instance : Transport TCP.SSL.Client where
recv client expect := client.recv? expect
sendAll client data := client.sendAll data
recvSelector client expect := client.recvSelector expect
open Internal.IO.Async in
/--
Shared state for a bidirectional mock connection.
-/
private structure MockLink.SharedState where
/--
Client to server direction.
-/
clientToServer : Std.CloseableChannel ByteArray
/--
Server to client direction.
-/
serverToClient : Std.CloseableChannel ByteArray
/--
Mock client endpoint for testing.
-/
structure Mock.Client where
private shared : MockLink.SharedState
/--
Mock server endpoint for testing.
-/
structure Mock.Server where
private shared : MockLink.SharedState
namespace Mock
/--
Create a mock server and client that are connected to each other and share the
same underlying state, enabling bidirectional communication.
-/
def new : BaseIO (Mock.Client × Mock.Server) := do
let first Std.CloseableChannel.new
let second Std.CloseableChannel.new
return (first, second, first, second)
/--
Receive data from a channel, joining all available data up to the expected size. First does a
blocking recv, then greedily consumes available data with tryRecv until `expect` bytes are reached.
-/
def recvJoined (recvChan : Std.CloseableChannel ByteArray) (expect : Option UInt64) : Async (Option ByteArray) := do
match await ( recvChan.recv) with
| none => return none
| some first =>
let mut result := first
repeat
if let some expect := expect then
if result.size.toUInt64 expect then break
match recvChan.tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Send a single ByteArray through a channel.
-/
def send (sendChan : Std.CloseableChannel ByteArray) (data : ByteArray) : Async Unit := do
Async.ofAsyncTask (( sendChan.send data) |>.map (Except.mapError (IO.userError toString)))
/--
Send ByteArrays through a channel.
-/
def sendAll (sendChan : Std.CloseableChannel ByteArray) (data : Array ByteArray) : Async Unit := do
for chunk in data do
send sendChan chunk
/--
Create a selector for receiving from a channel with joining behavior.
-/
def recvSelector (recvChan : Std.CloseableChannel ByteArray) : Selector (Option ByteArray) :=
recvChan.recvSelector
end Mock
namespace Mock.Client
/--
Get the receive channel for a client (server to client direction).
-/
def getRecvChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
client.shared.serverToClient
/--
Get the send channel for a client (client to server direction).
-/
def getSendChan (client : Mock.Client) : Std.CloseableChannel ByteArray :=
client.shared.clientToServer
/--
Send a single ByteArray.
-/
def send (client : Mock.Client) (data : ByteArray) : Async Unit :=
Mock.send (getSendChan client) data
/--
Receive data, joining all available chunks.
-/
def recv? (client : Mock.Client) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
Mock.recvJoined (getRecvChan client) expect
/--
Try to receive data without blocking, joining all immediately available chunks.
Returns `none` if no data is available.
-/
def tryRecv? (client : Mock.Client) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
match (getRecvChan client).tryRecv with
| none => return none
| some first =>
let mut result := first
repeat
match (getRecvChan client).tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Close the mock server and client.
-/
def close (client : Mock.Client) : IO Unit := do
if !( client.shared.clientToServer.isClosed) then client.shared.clientToServer.close
if !( client.shared.serverToClient.isClosed) then client.shared.serverToClient.close
end Mock.Client
namespace Mock.Server
/--
Get the receive channel for a server (client to server direction).
-/
def getRecvChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
server.shared.clientToServer
/--
Get the send channel for a server (server to client direction).
-/
def getSendChan (server : Mock.Server) : Std.CloseableChannel ByteArray :=
server.shared.serverToClient
/--
Send a single ByteArray.
-/
def send (server : Mock.Server) (data : ByteArray) : Async Unit :=
Mock.send (getSendChan server) data
/--
Receive data, joining all available chunks.
-/
def recv? (server : Mock.Server) (expect : Option UInt64 := none) : Async (Option ByteArray) :=
Mock.recvJoined (getRecvChan server) expect
/--
Try to receive data without blocking, joining all immediately available chunks. Returns `none` if no
data is available.
-/
def tryRecv? (server : Mock.Server) (_expect : UInt64 := 0) : BaseIO (Option ByteArray) := do
match (getRecvChan server).tryRecv with
| none => return none
| some first =>
let mut result := first
repeat
match (getRecvChan server).tryRecv with
| none => break
| some chunk => result := result ++ chunk
return some result
/--
Close the mock server and client.
-/
def close (server : Mock.Server) : IO Unit := do
if !( server.shared.clientToServer.isClosed) then server.shared.clientToServer.close
if !( server.shared.serverToClient.isClosed) then server.shared.serverToClient.close
end Mock.Server
instance : Transport Mock.Client where
recv client expect := Mock.recvJoined (Mock.Client.getRecvChan client) (some expect)
sendAll client data := Mock.sendAll (Mock.Client.getSendChan client) data
recvSelector client _ := Mock.recvSelector (Mock.Client.getRecvChan client)
close client := client.close
instance : Transport Mock.Server where
recv server expect := Mock.recvJoined (Mock.Server.getRecvChan server) (some expect)
sendAll server data := Mock.sendAll (Mock.Server.getSendChan server) data
recvSelector server _ := Mock.recvSelector (Mock.Server.getRecvChan server)
close server := server.close
end Std.Http

View File

@@ -57,16 +57,29 @@ def skipByte (b : UInt8) : Parser Unit :=
/--
Skip a sequence of bytes equal to the given `ByteArray`.
-/
def skipBytes (arr : ByteArray) : Parser Unit := do
for b in arr do
skipByte b
def skipBytes (arr : ByteArray) : Parser Unit := fun it =>
let rec go (idx : Nat) (it : ByteArray.Iterator) : ParseResult Unit ByteArray.Iterator :=
if h : idx < arr.size then
if hnext : it.hasNext then
let got := it.curr' hnext
let want := arr[idx]
if got = want then
go (idx + 1) (it.next' hnext)
else
.error it (.other s!"expected byte {want}, got {got}")
else
.error it (.other s!"unexpected end of input while matching {arr.size} bytes")
else
.success it ()
go 0 it
/--
Parse a string by matching its UTF-8 bytes, returns the string on success.
-/
@[inline]
def pstring (s : String) : Parser String := do
skipBytes s.toUTF8
let utf8 := s.toUTF8
skipBytes utf8
return s
/--

117
src/Std/Internal/SSL.lean Normal file
View File

@@ -0,0 +1,117 @@
/-
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
module
prelude
public import Init.System.Promise
public section
namespace Std
namespace Internal
namespace SSL
private opaque SessionImpl : NonemptyType.{0}
/--
Represents an OpenSSL session backed by memory BIOs.
-/
def Session : Type := SessionImpl.type
instance : Nonempty Session := by exact SessionImpl.property
/--
Configures the shared server context with a certificate and key in PEM format.
-/
@[extern "lean_uv_ssl_configure_server_ctx"]
opaque configureServerContext (certFile : @& String) (keyFile : @& String) : IO Unit
/--
Configures the shared client context.
`caFile` may be empty to keep default trust configuration.
-/
@[extern "lean_uv_ssl_configure_client_ctx"]
opaque configureClientContext (caFile : @& String) (verifyPeer : Bool) : IO Unit
namespace Session
/--
Creates a new SSL session. Set `isServer := true` for server-side handshakes.
-/
@[extern "lean_uv_ssl_mk"]
opaque mk (isServer : Bool) : IO Session
/--
Creates a server-side SSL session.
-/
@[extern "lean_uv_ssl_mk_server"]
opaque mkServer : IO Session
/--
Creates a client-side SSL session.
-/
@[extern "lean_uv_ssl_mk_client"]
opaque mkClient : IO Session
/--
Sets SNI host name for client-side handshakes.
-/
@[extern "lean_uv_ssl_set_server_name"]
opaque setServerName (ssl : @& Session) (host : @& String) : IO Unit
/--
Gets the X.509 verify result code after handshake.
-/
@[extern "lean_uv_ssl_verify_result"]
opaque verifyResult (ssl : @& Session) : IO UInt64
/--
Runs one handshake step. Returns true when handshake is complete.
-/
@[extern "lean_uv_ssl_handshake"]
opaque handshake (ssl : @& Session) : IO Bool
/--
Attempts to write plaintext application data into SSL.
Returns true when accepted, false when OpenSSL needs more I/O first.
-/
@[extern "lean_uv_ssl_write"]
opaque write (ssl : @& Session) (data : @& ByteArray) : IO Bool
/--
Attempts to read decrypted plaintext data. Returns none when OpenSSL needs more I/O.
-/
@[extern "lean_uv_ssl_read"]
opaque read? (ssl : @& Session) (maxBytes : UInt64) : IO (Option ByteArray)
/--
Feeds encrypted TLS bytes into the SSL input BIO.
-/
@[extern "lean_uv_ssl_feed_encrypted"]
opaque feedEncrypted (ssl : @& Session) (data : @& ByteArray) : IO UInt64
/--
Drains encrypted TLS bytes from the SSL output BIO.
-/
@[extern "lean_uv_ssl_drain_encrypted"]
opaque drainEncrypted (ssl : @& Session) : IO ByteArray
/--
Returns the amount of encrypted TLS bytes currently pending in the output BIO.
-/
@[extern "lean_uv_ssl_pending_encrypted"]
opaque pendingEncrypted (ssl : @& Session) : IO UInt64
/--
Returns the amount of decrypted plaintext bytes currently buffered inside the SSL object.
-/
@[extern "lean_uv_ssl_pending_plaintext"]
opaque pendingPlaintext (ssl : @& Session) : IO UInt64
end Session
end SSL
end Internal
end Std

View File

@@ -33,6 +33,8 @@ set(
uv/dns.cpp
uv/system.cpp
uv/signal.cpp
openssl.cpp
openssl/session.cpp
)
if(USE_MIMALLOC)
list(APPEND RUNTIME_OBJS ${LEAN_BINARY_DIR}/../mimalloc/src/mimalloc/src/static.c)

View File

@@ -14,6 +14,8 @@ Author: Leonardo de Moura
#include "runtime/mutex.h"
#include "runtime/init_module.h"
#include "runtime/libuv.h"
#include "runtime/openssl.h"
#include "runtime/openssl/session.h"
namespace lean {
extern "C" LEAN_EXPORT void lean_initialize_runtime_module() {
@@ -25,6 +27,8 @@ extern "C" LEAN_EXPORT void lean_initialize_runtime_module() {
initialize_mutex();
initialize_process();
initialize_stack_overflow();
initialize_openssl();
initialize_openssl_session();
initialize_libuv();
}
void initialize_runtime_module() {
@@ -32,6 +36,7 @@ void initialize_runtime_module() {
}
void finalize_runtime_module() {
finalize_stack_overflow();
finalize_openssl();
finalize_process();
finalize_mutex();
finalize_thread();

96
src/runtime/openssl.cpp Normal file
View File

@@ -0,0 +1,96 @@
/*
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Author: Sofia Rodrigues
*/
#include "runtime/openssl.h"
#include "runtime/io.h"
#include <mutex>
#ifndef LEAN_EMSCRIPTEN
#include <openssl/opensslv.h>
#include <openssl/err.h>
namespace lean {
static std::once_flag g_openssl_init_once;
static SSL_CTX * g_ssl_server_ctx = nullptr;
static SSL_CTX * g_ssl_client_ctx = nullptr;
static void configure_ctx_common(SSL_CTX * ctx) {
SSL_CTX_set_mode(ctx, SSL_MODE_AUTO_RETRY);
#ifdef SSL_OP_NO_RENEGOTIATION
SSL_CTX_clear_options(ctx, SSL_OP_NO_RENEGOTIATION);
#endif
#ifdef SSL_OP_ALLOW_CLIENT_RENEGOTIATION
SSL_CTX_set_options(ctx, SSL_OP_ALLOW_CLIENT_RENEGOTIATION);
#endif
}
static void initialize_openssl_once() {
if (OPENSSL_init_ssl(0, nullptr) != 1) {
lean_internal_panic("failed to initialize OpenSSL");
}
g_ssl_server_ctx = SSL_CTX_new(TLS_server_method());
g_ssl_client_ctx = SSL_CTX_new(TLS_client_method());
if (g_ssl_server_ctx == nullptr || g_ssl_client_ctx == nullptr) {
if (g_ssl_server_ctx != nullptr) SSL_CTX_free(g_ssl_server_ctx);
if (g_ssl_client_ctx != nullptr) SSL_CTX_free(g_ssl_client_ctx);
g_ssl_server_ctx = nullptr;
g_ssl_client_ctx = nullptr;
lean_internal_panic("failed to create OpenSSL SSL_CTX pair");
}
configure_ctx_common(g_ssl_server_ctx);
configure_ctx_common(g_ssl_client_ctx);
}
void initialize_openssl() {
std::call_once(g_openssl_init_once, initialize_openssl_once);
}
void finalize_openssl() {
if (g_ssl_server_ctx != nullptr) {
SSL_CTX_free(g_ssl_server_ctx);
g_ssl_server_ctx = nullptr;
}
if (g_ssl_client_ctx != nullptr) {
SSL_CTX_free(g_ssl_client_ctx);
g_ssl_client_ctx = nullptr;
}
}
SSL_CTX * get_openssl_server_ctx() {
initialize_openssl();
return g_ssl_server_ctx;
}
SSL_CTX * get_openssl_client_ctx() {
initialize_openssl();
return g_ssl_client_ctx;
}
}
extern "C" LEAN_EXPORT lean_obj_res lean_openssl_version(lean_obj_arg o) {
return lean_unsigned_to_nat(OPENSSL_VERSION_NUMBER);
}
#else
namespace lean {
void initialize_openssl() {}
void finalize_openssl() {}
}
extern "C" LEAN_EXPORT lean_obj_res lean_openssl_version(lean_obj_arg o) {
return lean_box(0);
}
#endif

25
src/runtime/openssl.h Normal file
View File

@@ -0,0 +1,25 @@
/*
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Author: Sofia Rodrigues
*/
#pragma once
#include <lean/lean.h>
#ifndef LEAN_EMSCRIPTEN
#include <openssl/ssl.h>
#endif
namespace lean {
void initialize_openssl();
void finalize_openssl();
#ifndef LEAN_EMSCRIPTEN
SSL_CTX * get_openssl_server_ctx();
SSL_CTX * get_openssl_client_ctx();
#endif
}
extern "C" LEAN_EXPORT lean_obj_res lean_openssl_version(lean_obj_arg);

View File

@@ -0,0 +1,561 @@
/*
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Author: Sofia Rodrigues
*/
#include "runtime/openssl/session.h"
#include <climits>
#include <cstdlib>
#include <cstring>
#include <string>
#ifndef LEAN_EMSCRIPTEN
#include <openssl/err.h>
#endif
namespace lean {
#ifndef LEAN_EMSCRIPTEN
static inline lean_object * mk_ssl_error(char const * where, int ssl_err = 0) {
unsigned long err = ERR_get_error();
char err_buf[256];
err_buf[0] = '\0';
if (err != 0) {
ERR_error_string_n(err, err_buf, sizeof(err_buf));
}
// Drain remaining errors so they don't pollute future calls.
ERR_clear_error();
std::string msg(where);
if (ssl_err != 0) {
msg += " (ssl_error=" + std::to_string(ssl_err) + ")";
}
if (err_buf[0] != '\0') {
msg += ": ";
msg += err_buf;
}
return lean_mk_io_user_error(mk_string(msg.c_str()));
}
static inline lean_obj_res mk_ssl_io_error(char const * where, int ssl_err = 0) {
return lean_io_result_mk_error(mk_ssl_error(where, ssl_err));
}
static inline lean_object * mk_empty_byte_array() {
lean_object * arr = lean_alloc_sarray(1, 0, 0);
lean_sarray_set_size(arr, 0);
return arr;
}
static void free_pending_writes(lean_ssl_session_object * obj) {
if (obj->pending_writes != nullptr) {
for (size_t i = 0; i < obj->pending_writes_count; i++) {
free(obj->pending_writes[i].data);
}
free(obj->pending_writes);
obj->pending_writes = nullptr;
}
obj->pending_writes_count = 0;
}
static bool append_pending_write(lean_ssl_session_object * obj, char const * data, size_t size) {
char * copy = (char*)malloc(size);
if (copy == nullptr) return false;
std::memcpy(copy, data, size);
size_t new_count = obj->pending_writes_count + 1;
lean_ssl_pending_write * new_arr = (lean_ssl_pending_write*)realloc(
obj->pending_writes, sizeof(lean_ssl_pending_write) * new_count
);
if (new_arr == nullptr) {
free(copy);
return false;
}
obj->pending_writes = new_arr;
obj->pending_writes[obj->pending_writes_count].data = copy;
obj->pending_writes[obj->pending_writes_count].size = size;
obj->pending_writes_count = new_count;
return true;
}
/*
Return values:
1 -> write completed
0 -> write blocked (WANT_READ / WANT_WRITE / ZERO_RETURN)
-1 -> fatal error
*/
static int ssl_write_step(lean_ssl_session_object * obj, char const * data, size_t size, int * out_err) {
if (size > INT_MAX) {
*out_err = SSL_ERROR_SSL;
return -1;
}
int rc = SSL_write(obj->ssl, data, (int)size);
if (rc > 0) {
return 1;
}
int err = SSL_get_error(obj->ssl, rc);
*out_err = err;
if (err == SSL_ERROR_WANT_READ || err == SSL_ERROR_WANT_WRITE || err == SSL_ERROR_ZERO_RETURN) {
return 0;
}
return -1;
}
/*
Return values:
1 -> all pending writes flushed
0 -> still blocked by renegotiation
-1 -> fatal error, *out_err filled
*/
static int try_flush_pending_writes(lean_ssl_session_object * obj, int * out_err) {
if (obj->pending_writes_count == 0) return 1;
size_t completed = 0;
for (size_t i = 0; i < obj->pending_writes_count; i++) {
lean_ssl_pending_write * pw = &obj->pending_writes[i];
while (pw->size > 0) {
int err = 0;
int step = ssl_write_step(obj, pw->data, pw->size, &err);
if (step == 1) {
// We do not enable partial writes, so a successful SSL_write consumes the full buffer.
pw->size = 0;
continue;
}
if (step == 0) {
goto done;
}
*out_err = err;
return -1;
}
free(pw->data);
pw->data = nullptr;
completed++;
}
done:
if (completed > 0) {
obj->pending_writes_count -= completed;
if (obj->pending_writes_count == 0) {
free(obj->pending_writes);
obj->pending_writes = nullptr;
} else {
std::memmove(
obj->pending_writes,
obj->pending_writes + completed,
sizeof(lean_ssl_pending_write) * obj->pending_writes_count
);
// Keep memory usage proportional to currently queued writes.
lean_ssl_pending_write * shrunk = (lean_ssl_pending_write*)realloc(
obj->pending_writes,
sizeof(lean_ssl_pending_write) * obj->pending_writes_count
);
if (shrunk != nullptr) {
obj->pending_writes = shrunk;
}
}
}
return obj->pending_writes_count == 0 ? 1 : 0;
}
void lean_ssl_session_finalizer(void * ptr) {
lean_ssl_session_object * obj = (lean_ssl_session_object*)ptr;
if (obj->ssl != nullptr) {
SSL_free(obj->ssl);
}
free_pending_writes(obj);
free(obj);
}
void initialize_openssl_session() {
g_ssl_session_external_class = lean_register_external_class(lean_ssl_session_finalizer, [](void * obj, lean_object * f) {
(void)obj;
(void)f;
});
}
static lean_obj_res mk_ssl_session(uint8_t is_server) {
SSL_CTX * ctx = is_server ? get_openssl_server_ctx() : get_openssl_client_ctx();
if (ctx == nullptr) {
return mk_ssl_io_error("failed to initialize OpenSSL context");
}
SSL * ssl = SSL_new(ctx);
if (ssl == nullptr) {
return mk_ssl_io_error("SSL_new failed");
}
BIO * read_bio = BIO_new(BIO_s_mem());
BIO * write_bio = BIO_new(BIO_s_mem());
if (read_bio == nullptr || write_bio == nullptr) {
if (read_bio != nullptr) BIO_free(read_bio);
if (write_bio != nullptr) BIO_free(write_bio);
SSL_free(ssl);
return mk_ssl_io_error("BIO_new failed");
}
BIO_set_nbio(read_bio, 1);
BIO_set_nbio(write_bio, 1);
SSL_set_bio(ssl, read_bio, write_bio);
SSL_set_mode(ssl, SSL_MODE_AUTO_RETRY);
if (is_server) {
SSL_set_accept_state(ssl);
} else {
SSL_set_connect_state(ssl);
}
lean_ssl_session_object * ssl_obj = (lean_ssl_session_object*)malloc(sizeof(lean_ssl_session_object));
if (ssl_obj == nullptr) {
SSL_free(ssl);
return mk_ssl_io_error("failed to allocate SSL session object");
}
ssl_obj->ssl = ssl;
ssl_obj->read_bio = read_bio;
ssl_obj->write_bio = write_bio;
ssl_obj->pending_writes_count = 0;
ssl_obj->pending_writes = nullptr;
lean_object * obj = lean_ssl_session_object_new(ssl_obj);
lean_mark_mt(obj);
return lean_io_result_mk_ok(obj);
}
/* Std.Internal.SSL.Session.mk (isServer : Bool) : IO Session */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk(uint8_t is_server) {
return mk_ssl_session(is_server);
}
/* Std.Internal.SSL.Session.mkServer : IO Session */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk_server() {
return mk_ssl_session(1);
}
/* Std.Internal.SSL.Session.mkClient : IO Session */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk_client() {
return mk_ssl_session(0);
}
/* Std.Internal.SSL.configureServerContext (certFile keyFile : @& String) : IO Unit */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_configure_server_ctx(b_obj_arg cert_file, b_obj_arg key_file) {
SSL_CTX * ctx = get_openssl_server_ctx();
if (ctx == nullptr) {
return mk_ssl_io_error("failed to initialize OpenSSL context");
}
const char * cert = lean_string_cstr(cert_file);
const char * key = lean_string_cstr(key_file);
if (SSL_CTX_use_certificate_file(ctx, cert, SSL_FILETYPE_PEM) <= 0) {
return mk_ssl_io_error("SSL_CTX_use_certificate_file failed");
}
if (SSL_CTX_use_PrivateKey_file(ctx, key, SSL_FILETYPE_PEM) <= 0) {
return mk_ssl_io_error("SSL_CTX_use_PrivateKey_file failed");
}
if (SSL_CTX_check_private_key(ctx) != 1) {
return mk_ssl_io_error("SSL_CTX_check_private_key failed");
}
return lean_io_result_mk_ok(lean_box(0));
}
/* Std.Internal.SSL.configureClientContext (caFile : @& String) (verifyPeer : Bool) : IO Unit */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_configure_client_ctx(b_obj_arg ca_file, uint8_t verify_peer) {
SSL_CTX * ctx = get_openssl_client_ctx();
if (ctx == nullptr) {
return mk_ssl_io_error("failed to initialize OpenSSL client context");
}
const char * ca = lean_string_cstr(ca_file);
if (ca != nullptr && ca[0] != '\0') {
if (SSL_CTX_load_verify_locations(ctx, ca, nullptr) != 1) {
return mk_ssl_io_error("SSL_CTX_load_verify_locations failed");
}
} else if (verify_peer) {
// Fall back to platform trust anchors when no custom CA file is provided.
if (SSL_CTX_set_default_verify_paths(ctx) != 1) {
return mk_ssl_io_error("SSL_CTX_set_default_verify_paths failed");
}
}
SSL_CTX_set_verify(ctx, verify_peer ? SSL_VERIFY_PEER : SSL_VERIFY_NONE, nullptr);
return lean_io_result_mk_ok(lean_box(0));
}
/* Std.Internal.SSL.Session.setServerName (ssl : @& Session) (host : @& String) : IO Unit */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_set_server_name(b_obj_arg ssl, b_obj_arg host) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
const char * server_name = lean_string_cstr(host);
if (SSL_set_tlsext_host_name(ssl_obj->ssl, server_name) != 1) {
return mk_ssl_io_error("SSL_set_tlsext_host_name failed");
}
return lean_io_result_mk_ok(lean_box(0));
}
/* Std.Internal.SSL.Session.verifyResult (ssl : @& Session) : IO UInt64 */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_verify_result(b_obj_arg ssl) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
long result = SSL_get_verify_result(ssl_obj->ssl);
return lean_io_result_mk_ok(lean_box_uint64((uint64_t)result));
}
/* Std.Internal.SSL.Session.handshake (ssl : @& Session) : IO Bool */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_handshake(b_obj_arg ssl) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
int rc = SSL_do_handshake(ssl_obj->ssl);
if (rc == 1) {
return lean_io_result_mk_ok(lean_box(1));
}
int err = SSL_get_error(ssl_obj->ssl, rc);
if (err == SSL_ERROR_WANT_READ || err == SSL_ERROR_WANT_WRITE || err == SSL_ERROR_ZERO_RETURN) {
return lean_io_result_mk_ok(lean_box(0));
}
return mk_ssl_io_error("SSL_do_handshake failed", err);
}
/* Std.Internal.SSL.Session.write (ssl : @& Session) (data : @& ByteArray) : IO Bool */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_write(b_obj_arg ssl, b_obj_arg data) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
size_t data_len = lean_sarray_size(data);
char const * payload = (char const*)lean_sarray_cptr(data);
if (data_len == 0) {
return lean_io_result_mk_ok(lean_box(1));
}
int err = 0;
int step = ssl_write_step(ssl_obj, payload, data_len, &err);
if (step == 1) {
return lean_io_result_mk_ok(lean_box(1));
}
// If renegotiation blocks writes, queue plaintext and retry after subsequent reads.
if (step == 0 && err == SSL_ERROR_WANT_READ) {
if (!append_pending_write(ssl_obj, payload, data_len)) {
return mk_ssl_io_error("failed to append pending SSL write");
}
return lean_io_result_mk_ok(lean_box(0));
}
if (step == 0 && (err == SSL_ERROR_WANT_WRITE || err == SSL_ERROR_ZERO_RETURN)) {
return lean_io_result_mk_ok(lean_box(0));
}
return mk_ssl_io_error("SSL_write failed", err);
}
/* Std.Internal.SSL.Session.read? (ssl : @& Session) (maxBytes : UInt64) : IO (Option ByteArray) */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_read(b_obj_arg ssl, uint64_t max_bytes) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
if (max_bytes == 0) {
return lean_io_result_mk_ok(mk_option_some(mk_empty_byte_array()));
}
if (max_bytes > INT_MAX) {
max_bytes = INT_MAX;
}
lean_object * out = lean_alloc_sarray(1, 0, max_bytes);
int rc = SSL_read(ssl_obj->ssl, (void*)lean_sarray_cptr(out), (int)max_bytes);
if (rc > 0) {
int flush_err = 0;
if (try_flush_pending_writes(ssl_obj, &flush_err) < 0) {
lean_dec(out);
return mk_ssl_io_error("pending SSL write flush failed", flush_err);
}
lean_sarray_set_size(out, (size_t)rc);
return lean_io_result_mk_ok(mk_option_some(out));
}
lean_dec(out);
int err = SSL_get_error(ssl_obj->ssl, rc);
if (err == SSL_ERROR_WANT_READ || err == SSL_ERROR_WANT_WRITE || err == SSL_ERROR_ZERO_RETURN) {
int flush_err = 0;
if (try_flush_pending_writes(ssl_obj, &flush_err) < 0) {
return mk_ssl_io_error("pending SSL write flush failed", flush_err);
}
return lean_io_result_mk_ok(mk_option_none());
}
return mk_ssl_io_error("SSL_read failed", err);
}
/* Std.Internal.SSL.Session.feedEncrypted (ssl : @& Session) (data : @& ByteArray) : IO UInt64 */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_feed_encrypted(b_obj_arg ssl, b_obj_arg data) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
size_t data_len = lean_sarray_size(data);
if (data_len == 0) {
return lean_io_result_mk_ok(lean_box_uint64(0));
}
if (data_len > INT_MAX) {
return mk_ssl_io_error("BIO_write input too large");
}
int rc = BIO_write(ssl_obj->read_bio, lean_sarray_cptr(data), (int)data_len);
if (rc >= 0) {
return lean_io_result_mk_ok(lean_box_uint64((uint64_t)rc));
}
if (BIO_should_retry(ssl_obj->read_bio)) {
return lean_io_result_mk_ok(lean_box_uint64(0));
}
return mk_ssl_io_error("BIO_write failed");
}
/* Std.Internal.SSL.Session.drainEncrypted (ssl : @& Session) : IO ByteArray */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_drain_encrypted(b_obj_arg ssl) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
size_t pending = BIO_ctrl_pending(ssl_obj->write_bio);
if (pending == 0) {
return lean_io_result_mk_ok(mk_empty_byte_array());
}
if (pending > INT_MAX) {
return mk_ssl_io_error("BIO_pending output too large");
}
lean_object * out = lean_alloc_sarray(1, 0, pending);
int rc = BIO_read(ssl_obj->write_bio, (void*)lean_sarray_cptr(out), (int)pending);
if (rc >= 0) {
lean_sarray_set_size(out, (size_t)rc);
return lean_io_result_mk_ok(out);
}
lean_dec(out);
if (BIO_should_retry(ssl_obj->write_bio)) {
return lean_io_result_mk_ok(mk_empty_byte_array());
}
return mk_ssl_io_error("BIO_read failed");
}
/* Std.Internal.SSL.Session.pendingEncrypted (ssl : @& Session) : IO UInt64 */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_pending_encrypted(b_obj_arg ssl) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
return lean_io_result_mk_ok(lean_box_uint64((uint64_t)BIO_ctrl_pending(ssl_obj->write_bio)));
}
/* Std.Internal.SSL.Session.pendingPlaintext (ssl : @& Session) : IO UInt64 */
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_pending_plaintext(b_obj_arg ssl) {
lean_ssl_session_object * ssl_obj = lean_to_ssl_session_object(ssl);
return lean_io_result_mk_ok(lean_box_uint64((uint64_t)SSL_pending(ssl_obj->ssl)));
}
#else
void initialize_openssl_session() {}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk(uint8_t is_server) {
(void)is_server;
return io_result_mk_error("lean_uv_ssl_mk is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_configure_server_ctx(b_obj_arg cert_file, b_obj_arg key_file) {
(void)cert_file;
(void)key_file;
return io_result_mk_error("lean_uv_ssl_configure_server_ctx is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_configure_client_ctx(b_obj_arg ca_file, uint8_t verify_peer) {
(void)ca_file;
(void)verify_peer;
return io_result_mk_error("lean_uv_ssl_configure_client_ctx is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_set_server_name(b_obj_arg ssl, b_obj_arg host) {
(void)ssl;
(void)host;
return io_result_mk_error("lean_uv_ssl_set_server_name is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_verify_result(b_obj_arg ssl) {
(void)ssl;
return io_result_mk_error("lean_uv_ssl_verify_result is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk_server() {
return io_result_mk_error("lean_uv_ssl_mk_server is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk_client() {
return io_result_mk_error("lean_uv_ssl_mk_client is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_handshake(b_obj_arg ssl) {
(void)ssl;
return io_result_mk_error("lean_uv_ssl_handshake is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_write(b_obj_arg ssl, b_obj_arg data) {
(void)ssl;
(void)data;
return io_result_mk_error("lean_uv_ssl_write is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_read(b_obj_arg ssl, uint64_t max_bytes) {
(void)ssl;
(void)max_bytes;
return io_result_mk_error("lean_uv_ssl_read is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_feed_encrypted(b_obj_arg ssl, b_obj_arg data) {
(void)ssl;
(void)data;
return io_result_mk_error("lean_uv_ssl_feed_encrypted is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_drain_encrypted(b_obj_arg ssl) {
(void)ssl;
return io_result_mk_error("lean_uv_ssl_drain_encrypted is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_pending_encrypted(b_obj_arg ssl) {
(void)ssl;
return io_result_mk_error("lean_uv_ssl_pending_encrypted is not supported");
}
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_pending_plaintext(b_obj_arg ssl) {
(void)ssl;
return io_result_mk_error("lean_uv_ssl_pending_plaintext is not supported");
}
#endif
}

View File

@@ -0,0 +1,55 @@
/*
Copyright (c) 2026 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Author: Sofia Rodrigues
*/
#pragma once
#include <lean/lean.h>
#include "runtime/io.h"
#include "runtime/object.h"
#include "runtime/openssl.h"
#ifndef LEAN_EMSCRIPTEN
#include <openssl/ssl.h>
#include <openssl/bio.h>
#endif
namespace lean {
static lean_external_class * g_ssl_session_external_class = nullptr;
void initialize_openssl_session();
#ifndef LEAN_EMSCRIPTEN
typedef struct {
char * data;
size_t size;
} lean_ssl_pending_write;
typedef struct {
SSL * ssl;
BIO * read_bio;
BIO * write_bio;
size_t pending_writes_count;
lean_ssl_pending_write * pending_writes;
} lean_ssl_session_object;
static inline lean_object * lean_ssl_session_object_new(lean_ssl_session_object * s) { return lean_alloc_external(g_ssl_session_external_class, s); }
static inline lean_ssl_session_object * lean_to_ssl_session_object(lean_object * o) { return (lean_ssl_session_object*)(lean_get_external_data(o)); }
#endif
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk(uint8_t is_server);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk_server();
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_mk_client();
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_configure_server_ctx(b_obj_arg cert_file, b_obj_arg key_file);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_configure_client_ctx(b_obj_arg ca_file, uint8_t verify_peer);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_set_server_name(b_obj_arg ssl, b_obj_arg host);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_verify_result(b_obj_arg ssl);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_handshake(b_obj_arg ssl);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_write(b_obj_arg ssl, b_obj_arg data);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_read(b_obj_arg ssl, uint64_t max_bytes);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_feed_encrypted(b_obj_arg ssl, b_obj_arg data);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_drain_encrypted(b_obj_arg ssl);
extern "C" LEAN_EXPORT lean_obj_res lean_uv_ssl_pending_encrypted(b_obj_arg ssl);
}

View File

@@ -0,0 +1,274 @@
import Std.Internal.Http.Data.Body
open Std.Internal.IO Async
open Std.Http
open Std.Http.Body
/-! ## Stream tests -/
-- Test send followed by recv returns the chunk
def streamSendRecv : Async Unit := do
let stream Stream.empty
let chunk := Chunk.ofByteArray "hello".toUTF8
stream.send chunk
let result stream.recv none
assert! result.isSome
assert! result.get!.data == "hello".toUTF8
#eval streamSendRecv.block
-- Test tryRecv on empty stream returns none
def streamTryRecvEmpty : Async Unit := do
let stream Stream.empty
let result stream.tryRecv
assert! result.isNone
#eval streamTryRecvEmpty.block
-- Test tryRecv returns data when available
def streamTryRecvWithData : Async Unit := do
let stream Stream.empty
stream.send (Chunk.ofByteArray "data".toUTF8)
let result stream.tryRecv
assert! result.isSome
assert! result.get!.data == "data".toUTF8
#eval streamTryRecvWithData.block
-- Test close sets the closed flag
def streamClose : Async Unit := do
let stream Stream.empty
assert! !( stream.isClosed)
stream.close
assert! ( stream.isClosed)
#eval streamClose.block
-- Test recv on closed stream returns none
def streamRecvAfterClose : Async Unit := do
let stream Stream.empty
stream.close
let result stream.recv none
assert! result.isNone
#eval streamRecvAfterClose.block
-- Test FIFO ordering of multiple chunks
def streamMultipleFIFO : Async Unit := do
let stream Stream.empty
stream.send (Chunk.ofByteArray "one".toUTF8)
stream.send (Chunk.ofByteArray "two".toUTF8)
stream.send (Chunk.ofByteArray "three".toUTF8)
let r1 stream.recv none
let r2 stream.recv none
let r3 stream.recv none
assert! r1.get!.data == "one".toUTF8
assert! r2.get!.data == "two".toUTF8
assert! r3.get!.data == "three".toUTF8
#eval streamMultipleFIFO.block
-- Test for-in iteration collects all chunks until close
def streamForIn : Async Unit := do
let stream Stream.empty
stream.send (Chunk.ofByteArray "a".toUTF8)
stream.send (Chunk.ofByteArray "b".toUTF8)
stream.close
let mut acc : ByteArray := .empty
for chunk in stream do
acc := acc ++ chunk.data
assert! acc == "ab".toUTF8
#eval streamForIn.block
-- Test chunks preserve extensions
def streamExtensions : Async Unit := do
let stream Stream.empty
let chunk := { data := "hello".toUTF8, extensions := #[(.mk "key", some "value")] : Chunk }
stream.send chunk
let result stream.recv none
assert! result.isSome
assert! result.get!.extensions.size == 1
assert! result.get!.extensions[0]! == (.mk "key", some "value")
#eval streamExtensions.block
-- Test set/get known size
def streamKnownSize : Async Unit := do
let stream Stream.empty
stream.setKnownSize (some (.fixed 100))
let size stream.getKnownSize
assert! size == some (.fixed 100)
#eval streamKnownSize.block
-- Test capacity: filling up to capacity succeeds via tryRecv check
def streamCapacityFull : Async Unit := do
let stream Stream.emptyWithCapacity (capacity := 3)
stream.send (Chunk.ofByteArray "a".toUTF8)
stream.send (Chunk.ofByteArray "b".toUTF8)
stream.send (Chunk.ofByteArray "c".toUTF8)
-- All three should be buffered
let r1 stream.tryRecv
let r2 stream.tryRecv
let r3 stream.tryRecv
let r4 stream.tryRecv
assert! r1.get!.data == "a".toUTF8
assert! r2.get!.data == "b".toUTF8
assert! r3.get!.data == "c".toUTF8
assert! r4.isNone
#eval streamCapacityFull.block
-- Test capacity: send blocks when buffer is full and resumes after recv
def streamCapacityBackpressure : Async Unit := do
let stream Stream.emptyWithCapacity (capacity := 2)
stream.send (Chunk.ofByteArray "a".toUTF8)
stream.send (Chunk.ofByteArray "b".toUTF8)
-- Spawn a send that should block because capacity is 2
let sendTask async (t := AsyncTask) <|
stream.send (Chunk.ofByteArray "c".toUTF8)
-- Consume one to free space
let r1 stream.recv none
assert! r1.get!.data == "a".toUTF8
-- Wait for the blocked send to complete
sendTask.block
-- Now we should be able to recv the remaining two
let r2 stream.recv none
let r3 stream.recv none
assert! r2.get!.data == "b".toUTF8
assert! r3.get!.data == "c".toUTF8
#eval streamCapacityBackpressure.block
-- Test capacity 1: only one chunk at a time
def streamCapacityOne : Async Unit := do
let stream Stream.emptyWithCapacity (capacity := 1)
stream.send (Chunk.ofByteArray "first".toUTF8)
let sendTask async (t := AsyncTask) <|
stream.send (Chunk.ofByteArray "second".toUTF8)
let r1 stream.recv none
assert! r1.get!.data == "first".toUTF8
sendTask.block
let r2 stream.recv none
assert! r2.get!.data == "second".toUTF8
#eval streamCapacityOne.block
-- Test close unblocks pending producers
def streamCloseUnblocksProducers : Async Unit := do
let stream Stream.emptyWithCapacity (capacity := 1)
stream.send (Chunk.ofByteArray "fill".toUTF8)
-- This send should block because buffer is full
let sendTask async (t := AsyncTask) <|
try
stream.send (Chunk.ofByteArray "blocked".toUTF8)
catch _ =>
pure ()
-- Close should unblock the producer (send gets error internally)
stream.close
await sendTask
#eval streamCloseUnblocksProducers.block
/-! ## Request.Builder body tests -/
-- Test Request.Builder.text sets correct headers
def requestBuilderText : Async Unit := do
let req Request.post (.originForm! "/api")
|>.text "Hello, World!"
assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8")
assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13")
let body req.body.tryRecv
assert! body.isSome
assert! body.get!.data == "Hello, World!".toUTF8
#eval requestBuilderText.block
-- Test Request.Builder.json sets correct headers
def requestBuilderJson : Async Unit := do
let req Request.post (.originForm! "/api")
|>.json "{\"key\": \"value\"}"
assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json")
let body req.body.tryRecv
assert! body.isSome
assert! body.get!.data == "{\"key\": \"value\"}".toUTF8
#eval requestBuilderJson.block
-- Test Request.Builder.bytes sets correct headers
def requestBuilderBytes : Async Unit := do
let data := ByteArray.mk #[0x01, 0x02, 0x03]
let req Request.post (.originForm! "/api")
|>.bytes data
assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/octet-stream")
assert! req.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "3")
let body req.body.tryRecv
assert! body.isSome
assert! body.get!.data == data
#eval requestBuilderBytes.block
-- Test Request.Builder.html sets correct headers
def requestBuilderHtml : Async Unit := do
let req Request.post (.originForm! "/api")
|>.html "<html></html>"
assert! req.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/html; charset=utf-8")
let body req.body.tryRecv
assert! body.isSome
#eval requestBuilderHtml.block
-- Test Request.Builder.noBody creates empty body
def requestBuilderNoBody : Async Unit := do
let req Request.get (.originForm! "/api")
|>.noBody
let body req.body.tryRecv
assert! body.isNone
#eval requestBuilderNoBody.block
/-! ## Response.Builder body tests -/
-- Test Response.Builder.text sets correct headers
def responseBuilderText : Async Unit := do
let res Response.ok
|>.text "Hello, World!"
assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "text/plain; charset=utf-8")
assert! res.head.headers.get? Header.Name.contentLength == some (Header.Value.ofString! "13")
let body res.body.tryRecv
assert! body.isSome
assert! body.get!.data == "Hello, World!".toUTF8
#eval responseBuilderText.block
-- Test Response.Builder.json sets correct headers
def responseBuilderJson : Async Unit := do
let res Response.ok
|>.json "{\"status\": \"ok\"}"
assert! res.head.headers.get? Header.Name.contentType == some (Header.Value.ofString! "application/json")
let body res.body.tryRecv
assert! body.isSome
#eval responseBuilderJson.block
-- Test Response.Builder.noBody creates empty body
def responseBuilderNoBody : Async Unit := do
let res Response.ok
|>.noBody
let body res.body.tryRecv
assert! body.isNone
#eval responseBuilderNoBody.block

View File

@@ -0,0 +1,519 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Stream)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
/-!
# Body Edge Case Tests
Tests for HTTP/1.1 body handling edge cases: Content-Length mismatches, chunked encoding
edge cases, body reading/consuming, Transfer-Encoding conflicts, and trailer headers.
-/
/-- Send raw bytes to the server and return the response. -/
def sendRaw (client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : Request Body.Stream ContextAsync (Response Body.Stream))
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
/-- Assert response string contains a substring. -/
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to contain: {needle.quote}\nGot:\n{responseStr.quote}"
/-- Assert the full response matches exactly. -/
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let responseStr := String.fromUTF8! response
if responseStr != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{responseStr.quote}"
/-- Assert response starts with given prefix. -/
def assertStartsWith (name : String) (response : ByteArray) (prefix_ : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.startsWith prefix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to start with: {prefix_.quote}\nGot:\n{responseStr.quote}"
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
def timeout408 : String :=
"HTTP/1.1 408 Request Timeout\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
/-- Handler that reads and echoes the full request body. -/
def echoBodyHandler : Request Body.Stream ContextAsync (Response Body.Stream) :=
fun req => do
let ctx ContextAsync.getContext
background do
Async.sleep 3000
ctx.cancel .deadline
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text (String.fromUTF8! body)
-- =============================================================================
-- POST with body and handler reads it correctly
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /echo HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 13\x0d\nConnection: close\x0d\n\x0d\nHello, World!".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Echo body" response "Hello, World!"
-- =============================================================================
-- POST with Content-Length: 0 and no body bytes
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /empty HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStartsWith "Empty body CL=0" response "HTTP/1.1 200"
-- =============================================================================
-- Chunked request - handler reads body data
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n6\x0d\n world\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Chunked body read" response "hello world"
-- =============================================================================
-- Zero-length chunked body (just the terminator)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST /empty-chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStartsWith "Zero-length chunked" response "HTTP/1.1 200"
-- =============================================================================
-- Single-byte chunks
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n1\x0d\na\x0d\n1\x0d\nb\x0d\n1\x0d\nc\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Single-byte chunks" response "abc"
-- =============================================================================
-- Large chunk size (hex)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let body := String.ofList (List.replicate 255 'X')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nff\x0d\n{body}\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Large hex chunk (0xff=255)" response body
-- =============================================================================
-- Chunked with chunk extensions
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=val\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Chunk with extensions" response "hello"
-- =============================================================================
-- Chunked with quoted chunk extension value
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;ext=\"quoted value\"\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Chunk with quoted extension" response "hello"
-- =============================================================================
-- Chunked with trailer headers
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nX-Checksum: abc123\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Chunked with trailers" response "hello"
-- =============================================================================
-- Malformed chunk size (non-hex)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nZZ\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
let responseStr := String.fromUTF8! response
unless responseStr.startsWith "HTTP/1.1 400" responseStr.startsWith "HTTP/1.1 408" do
throw <| IO.userError s!"Test 'Malformed chunk size' failed:\nExpected 400 or 408 status but got:\n{responseStr.quote}"
-- =============================================================================
-- Both Content-Length AND Transfer-Encoding (TE takes precedence per RFC 9112 §6.1)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 100\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Does not allow both headers" response bad400
-- =============================================================================
-- POST without Content-Length or Transfer-Encoding (ambiguous body)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\nsome body data".toUTF8
let response sendRaw client server raw echoBodyHandler
-- Without CL or TE, server should treat body as zero-length
assertStartsWith "POST no CL no TE" response "HTTP/1.1"
-- =============================================================================
-- GET with unexpected body (Content-Length present)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
-- GET with body is technically valid per RFC
assertStartsWith "GET with body" response "HTTP/1.1 200"
-- =============================================================================
-- Multiple chunks with varying sizes
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\na\x0d\n0123456789\x0d\n1\x0d\nX\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Varying chunk sizes" response "abc0123456789X"
-- =============================================================================
-- Chunked with uppercase hex
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let body := String.ofList (List.replicate 10 'Y')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nA\x0d\n{body}\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Uppercase hex chunk size" response body
-- =============================================================================
-- Chunked with mixed case hex
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let body := String.ofList (List.replicate 15 'Z')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nF\x0d\n{body}\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Mixed case hex" response body
-- =============================================================================
-- Large body with Content-Length
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let body := String.ofList (List.replicate 10000 'A')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 10000\x0d\nConnection: close\x0d\n\x0d\n{body}".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Large body 10KB" response (String.ofList (List.replicate 100 'A'))
-- =============================================================================
-- Multiple Content-Length headers with same value (MAY accept)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
-- Same value duplicated - server may accept or reject
assertStartsWith "Duplicate CL same value" response "HTTP/1.1"
-- =============================================================================
-- Transfer-Encoding: identity (should be treated as no encoding)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: identity\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStartsWith "TE identity" response "HTTP/1.1"
-- =============================================================================
-- Chunked with multiple extensions
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5;a=1;b=2\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Multiple chunk extensions" response "hello"
-- =============================================================================
-- Body exactly at Content-Length boundary
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Exact CL boundary" response "hello"
-- =============================================================================
-- Binary body data
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let headers := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 4\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let body := ByteArray.mk #[0xFF, 0x00, 0xAB, 0xCD]
let raw := headers ++ body
let response sendRaw client server raw (fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
if body.size == 4 then Response.ok |>.text "ok"
else Response.badRequest |>.text s!"wrong size: {body.size}")
assertContains "Binary body" response "ok"
-- =============================================================================
-- Chunked with trailing whitespace in chunk size
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
-- Chunk size with trailing space before CRLF
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5 \x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
-- Trailing space in chunk size line - may be accepted or rejected
assertStartsWith "Chunk size trailing space" response "HTTP/1.1"
-- =============================================================================
-- Handler that ignores body - server should still drain on keep-alive
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /ignore HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello"
let req2 := "GET /after HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
-- Handler does NOT read the body
Response.ok |>.text (toString req.head.uri))
assertContains "Ignore body, drain: first" response "/ignore"
assertContains "Ignore body, drain: second" response "/after"
-- =============================================================================
-- Chunked body followed by another request on keep-alive (body must be drained)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n"
let req2 := "GET /next HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
Response.ok |>.text (toString req.head.uri))
assertContains "Chunked drain keep-alive: first" response "/chunked"
assertContains "Chunked drain keep-alive: second" response "/next"
-- =============================================================================
-- Content-Length with leading zeros
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 005\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
-- Leading zeros - some servers accept, some reject
assertStartsWith "CL leading zeros" response "HTTP/1.1"
-- =============================================================================
-- Very large Content-Length value (overflow attempt)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 99999999999999999999\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw echoBodyHandler
assertStartsWith "Huge CL value" response "HTTP/1.1"
-- =============================================================================
-- Chunked encoding: chunk size with leading zeros
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n005\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Chunk size leading zeros" response "hello"
-- =============================================================================
-- Multiple trailer headers after chunked body
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nX-Checksum: abc\x0d\nX-Signature: def\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
assertContains "Multiple trailers" response "hello"
-- =============================================================================
-- Content-Length mismatch: body shorter than declared (should timeout/error)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
-- Declare CL=10 but only send 5 bytes, then close
let headers := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 10\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let body := "hello".toUTF8
let raw := headers ++ body
let result Async.block do
client.send raw
-- Close only the client→server direction to simulate client disconnect
client.getSendChan.close
Std.Http.Server.serveConnection server echoBodyHandler { lingeringTimeout := 500, generateDate := false }
|>.run
let res client.recv?
pure <| res.getD .empty
-- Server should respond with something (timeout, error, or partial) but not crash
assert! result.size > 0 result.size == 0
-- =============================================================================
-- Content-Length mismatch: body longer than declared (extra data is next request)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
-- CL=5 so only "hello" is read; remainder is parsed as a new request
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhelloGET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw (fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text (String.fromUTF8! body))
-- First response should have exactly "hello"
assertContains "CL mismatch longer: first body" response "hello"
-- The server processes the remainder as a second request on keep-alive.
-- We see two HTTP/1.1 200 responses in the output.
let responseStr := String.fromUTF8! response
let parts := responseStr.splitOn "HTTP/1.1 200 OK"
if parts.length < 3 then
throw <| IO.userError s!"CL mismatch longer: expected 2 responses, got {parts.length} parts"
-- =============================================================================
-- Duplicate Content-Length with different values (MUST reject per RFC 9110 §8.6)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 3\x0d\nContent-Length: 7\x0d\nConnection: close\x0d\n\x0d\nabc".toUTF8
let response sendRaw client server raw echoBodyHandler
assertExact "Duplicate CL different values (3 vs 7)" response bad400
-- =============================================================================
-- Chunk size overflow (extremely large hex number)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\nFFFFFFFFFFFFFFFF\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw echoBodyHandler
-- Should either reject or handle gracefully
assertStartsWith "Chunk size overflow" response "HTTP/1.1"
-- =============================================================================
-- Incomplete chunked body (missing final 0\r\n\r\n, then connection closes)
-- =============================================================================
#eval show IO _ from do Async.block do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n".toUTF8
client.send raw
client.close
let result Async.block do
Std.Http.Server.serveConnection server echoBodyHandler { lingeringTimeout := 500, generateDate := false }
|>.run
let res client.recv?
pure <| res.getD .empty
-- Server should handle incomplete chunked body without crashing
assert! result.size >= 0
-- =============================================================================
-- Content-Length: 0 with POST and handler reading body
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw (fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
if body.size == 0
then Response.ok |>.text "empty"
else Response.badRequest |>.text s!"unexpected: {body.size}")
assertContains "CL=0 body is empty" response "empty"
-- =============================================================================
-- Handler ignores chunked body on keep-alive, next request uses Content-Length
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /first HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\na\x0d\n0123456789\x0d\n0\x0d\n\x0d\n"
let req2 := "POST /second HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 3\x0d\nConnection: close\x0d\n\x0d\nabc"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
-- Intentionally don't read body of first request
Response.ok |>.text (toString req.head.uri))
assertContains "Chunked then CL: first" response "/first"
assertContains "Chunked then CL: second" response "/second"
-- =============================================================================
-- Extremely large number of chunks (100 single-byte chunks)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let mut chunked := ""
for _ in [0:100] do
chunked := chunked ++ "1\x0d\nX\x0d\n"
chunked := chunked ++ "0\x0d\n\x0d\n"
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n{chunked}".toUTF8
let response sendRaw client server raw echoBodyHandler
let expected := String.ofList (List.replicate 100 'X')
assertContains "100 single-byte chunks" response expected

View File

@@ -0,0 +1,93 @@
import Std.Internal.Http
import Std.Internal.Http.Client
import Std.Internal.Async
open Std.Internal.IO.Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Stream)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
private def runClientRequest (handler : TestHandler) (request : Async (Request Body.Stream)) : Async String := do
let (client, server) Mock.new
background (Std.Http.Server.serveConnection server handler
(config := { lingeringTimeout := 3000, generateDate := false }) |>.run)
let conn Std.Http.Client.createPersistentConnection client
let response conn.send ( request)
let body : String ContextAsync.run response.body.readAll
conn.close
return s!"{response.head}{body}".quote
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 9\x0d\nServer: LeanHTTP/1.1\x0d\nmaracujá"
-/
#guard_msgs in
#eval IO.println =<< Async.block do
runClientRequest
(handler := fun req => pure { head := { status := .ok }, body := req.body })
(request :=
Request.post (.originForm! "/a/b")
|>.header! "Host" "."
|>.text "maracujá")
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nServer: LeanHTTP/1.1\x0d\n"
-/
#guard_msgs in
#eval IO.println =<< Async.block do
runClientRequest
(handler := fun req => show ContextAsync _ from do
let mut size := 0
for chunk in req.body do
size := size + chunk.data.size
if size > 100 then
return ( Response.withStatus .payloadTooLarge |>.blank)
Response.ok |>.blank)
(request :=
Request.post (.originForm! "/a/b")
|>.header! "Host" "."
|>.text "maracujá")
/--
info: "HTTP/1.1 404 Not Found\x0d\nContent-Length: 9\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nNot Found"
-/
#guard_msgs in
#eval IO.println =<< Async.block do
runClientRequest
(handler := fun req => show ContextAsync _ from do
if toString req.head.uri == "/missing" then
Response.notFound |>.text "Not Found"
else
Response.ok |>.text "Found")
(request :=
Request.get (.originForm! "/missing")
|>.header! "Host" "localhost"
|>.noBody)
/--
info: "HTTP/1.1 200 OK\x0d\nContent-Length: 13\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: application/json\x0d\n{\"key\":\"val\"}"
-/
#guard_msgs in
#eval IO.println =<< Async.block do
runClientRequest
(handler := fun (_ : Request Body.Stream) => show ContextAsync _ from do
let content := "{\"key\":\"val\"}".toUTF8
Response.new
|>.status .ok
|>.header! "Content-Type" "application/json"
|>.stream (fun stream => do
stream.setKnownSize (some (.fixed content.size))
stream.send (Chunk.ofByteArray content)
stream.close))
(request :=
Request.get (.originForm! "/json")
|>.header! "Host" "localhost"
|>.noBody)

View File

@@ -0,0 +1,635 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Stream)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
structure TestCase where
/-- Descriptive name for the test -/
name : String
/-- The HTTP request to send -/
request : Request (Array Chunk)
/-- Handler function to process the request -/
handler : Request Body.Stream ContextAsync (Response Body.Stream)
/-- Expected response string -/
expected : String
/-- Whether to use chunked encoding -/
chunked : Bool := false
deriving Inhabited
def toByteArray (req : Request (Array Chunk)) (chunked := false) : IO ByteArray := Async.block do
let mut data := Internal.Encode.encode (v := .v11) .empty req.head
let toByteArray (chunkData : Internal.ChunkedBuffer) (part : Chunk) := Internal.Encode.encode .v11 chunkData part
for part in req.body do data := toByteArray data part
if chunked then data := toByteArray data (Chunk.mk .empty .empty)
return data.toByteArray
/-- Send multiple requests through a mock connection and return the response data. -/
def sendRequests (client : Mock.Client) (server : Mock.Server) (reqs : Array (Request (Array Chunk)))
(onRequest : Request Body.Stream ContextAsync (Response Body.Stream))
(chunked : Bool := false) : IO ByteArray := Async.block do
let mut data := .empty
for req in reqs do data := data ++ ( toByteArray req chunked)
client.send data
Std.Http.Server.serveConnection server onRequest { lingeringTimeout := 3000, generateDate := false }
|>.run
let res client.recv?
pure <| res.getD .empty
/-- Run a single test case, comparing actual response against expected response. -/
def runTest (name : String) (client : Mock.Client) (server : Mock.Server) (req : Request (Array Chunk))
(handler : Request Body.Stream ContextAsync (Response Body.Stream)) (expected : String) (chunked : Bool := false) :
IO Unit := do
let response sendRequests client server #[req] handler chunked
let responseData := String.fromUTF8! response
if responseData != expected then
throw <| IO.userError s!
"Test '{name}' failed:\n\
Expected:\n{expected.quote}\n\
Got:\n{responseData.quote}"
def runTestCase (tc : TestCase) : IO Unit := do
let (client, server) Mock.new
Async.block <| runTest tc.name client server tc.request tc.handler tc.expected tc.chunked
-- Request Predicates
/-- Check if request is a basic GET requests to the specified URI and host. -/
def isBasicGetRequest (req : Request Body.Stream) (uri : String) (host : String) : Bool :=
req.head.method == .get
req.head.version == .v11
toString req.head.uri = uri
req.head.headers.hasEntry (.mk "host") (.ofString! host)
/-- Check if request has a specific Content-Length header. -/
def hasContentLength (req : Request Body.Stream) (length : String) : Bool :=
req.head.headers.hasEntry (.mk "content-length") (.ofString! length)
/-- Check if request uses chunked transfer encoding. -/
def isChunkedRequest (req : Request Body.Stream) : Bool :=
if let some te := req.head.headers.get? (.mk "transfer-encoding") then
match Header.TransferEncoding.parse te with
| some te => te.isChunked
| none => false
else
false
/-- Check if request has a specific header with a specific value. -/
def hasHeader (req : Request Body.Stream) (name : String) (value : String) : Bool :=
if let some name := Header.Name.ofString? name then
req.head.headers.hasEntry name (.ofString! value)
else
false
/-- Check if request method matches the expected method. -/
def hasMethod (req : Request Body.Stream) (method : Method) : Bool :=
req.head.method == method
/-- Check if request URI matches the expected URI string. -/
def hasUri (req : Request Body.Stream) (uri : String) : Bool :=
toString req.head.uri = uri
-- Tests
#eval runTestCase {
name := "GET with Content-Length"
request := Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.header! "Content-Length" "7"
|>.body #[.mk "survive".toUTF8 #[]]
handler := fun req => do
if isBasicGetRequest req "/" "example.com" hasContentLength req "7"
then Response.ok |>.text "ok"
else Response.badRequest |>.text "closed"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 2\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nok"
}
#eval runTestCase {
name := "Simple GET request"
request := Request.new
|>.method .get
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .get hasUri req "/api/users"
then Response.ok |>.text "users list"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 10\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nusers list"
}
#eval runTestCase {
name := "POST with body"
request := Request.new
|>.method .post
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/json"
|>.header! "Content-Length" "16"
|>.header! "Connection" "close"
|>.body #[.mk "{\"name\":\"Alice\"}".toUTF8 #[]]
handler := fun req => do
if hasMethod req .post hasHeader req "Content-Type" "application/json"
then Response.new |>.status .created |>.text "Created"
else Response.badRequest |>.text ""
expected := "HTTP/1.1 201 Created\x0d\nContent-Length: 7\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nCreated"
}
#eval runTestCase {
name := "DELETE request"
request := Request.new
|>.method .delete
|>.uri! "/api/users/123"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .delete hasUri req "/api/users/123"
then Response.new |>.status .noContent |>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 204 No Content\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\n"
}
#eval runTestCase {
name := "HEAD request"
request := Request.new
|>.method .head
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .head
then Response.ok |>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\n"
}
#eval runTestCase {
name := "OPTIONS request"
request := Request.new
|>.method .options
|>.uri! "*"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .options
then Response.new
|>.status .ok
|>.header! "Allow" "GET, POST, PUT, DELETE, OPTIONS"
|>.text ""
else Response.badRequest |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nAllow: GET, POST, PUT, DELETE, OPTIONS\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Request with multiple headers"
request := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Accept" "application/json"
|>.header! "User-Agent" "TestClient/1.0"
|>.header! "Authorization" "Bearer token123"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasHeader req "Authorization" "Bearer token123" hasHeader req "Accept" "application/json"
then Response.ok |>.text "authenticated"
else Response.new |>.status .unauthorized |>.text "unauthorized"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 13\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nauthenticated"
}
#eval runTestCase {
name := "Request with query parameters"
request := Request.new
|>.method .get
|>.uri! "/api/search?q=test&limit=10"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasUri req "/api/search?q=test&limit=10"
then Response.ok |>.text "search results"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 14\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nsearch results"
}
#eval runTestCase {
name := "POST with empty body"
request := Request.new
|>.method .post
|>.uri! "/api/trigger"
|>.header! "Host" "api.example.com"
|>.header! "Content-Length" "0"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .post hasContentLength req "0"
then Response.new |>.status .accepted |>.text "triggered"
else Response.badRequest |>.text ""
expected := "HTTP/1.1 202 Accepted\x0d\nContent-Length: 9\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\ntriggered"
}
#eval runTestCase {
name := "Large response body"
request := Request.new
|>.method .get
|>.uri! "/api/large"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let largeBody := String.ofList (List.replicate 1000 'X')
Response.ok |>.text largeBody
expected := s!"HTTP/1.1 200 OK\x0d\nContent-Length: 1000\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\n{String.ofList (List.replicate 1000 'X')}"
}
#eval runTestCase {
name := "Custom status code"
request := Request.new
|>.method .get
|>.uri! "/api/teapot"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
Response.new
|>.status .imATeapot
|>.text "I'm a teapot"
expected := "HTTP/1.1 418 I'm a teapot\x0d\nContent-Length: 12\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nI'm a teapot"
}
#eval runTestCase {
name := "Request with special characters in URI"
request := Request.new
|>.method .get
|>.uri! "/api/users/%C3%A9"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasUri req "/api/users/%C3%A9"
then Response.ok |>.text "found"
else Response.notFound |>.text ""
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nfound"
}
#eval runTestCase {
name := "Response with custom headers"
request := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.header! "Cache-Control" "no-cache"
|>.body #[]
handler := fun _ => do
Response.new
|>.status .ok
|>.header! "Cache-Control" "no-cache"
|>.header! "X-Custom-Header" "custom-value"
|>.text "data"
expected := "HTTP/1.1 200 OK\x0d\nX-Custom-Header: custom-value\x0d\nContent-Length: 4\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\nCache-Control: no-cache\x0d\n\x0d\ndata"
}
#eval runTestCase {
name := "Request with Content-Type and body"
request := Request.new
|>.method .post
|>.uri! "/api/xml"
|>.header! "Host" "api.example.com"
|>.header! "Content-Type" "application/xml"
|>.header! "Content-Length" "17"
|>.header! "Connection" "close"
|>.body #[.mk "<data>test</data>".toUTF8 #[]]
handler := fun req => do
if hasHeader req "Content-Type" "application/xml"
then Response.ok |>.text "processed xml"
else Response.new |>.status .unsupportedMediaType |>.text "unsupported"
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 13\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nprocessed xml"
}
-- Limits
#eval
let bigString := String.fromUTF8! (ByteArray.mk (Array.ofFn (n := 257) (fun _ => 65)))
runTestCase {
name := "Huge String request"
request := Request.new
|>.method .head
|>.uri! "/api/users"
|>.header! "Host" "api.example.com"
|>.header! bigString "a"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
if hasMethod req .head
then Response.ok
|>.header (.ofString! bigString) (.ofString! "ata")
|>.text ""
else Response.notFound |>.text ""
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Request line too long"
request :=
Request.new
|>.method .get
|>.uri (.originForm (.mk #[URI.EncodedString.encode <| String.ofList (List.replicate 2000 'a')] true) none)
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
Response.ok |>.text (toString (toString req.head.uri).length)
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Header long"
request :=
Request.new
|>.method .get
|>.uri (.originForm (.mk #[URI.EncodedString.encode <| String.ofList (List.replicate 200 'a')] true) none)
|>.header! "Host" (String.ofList (List.replicate 8230 'a'))
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
Response.ok |>.text (toString (toString req.head.uri).length)
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Too many headers"
request := Id.run do
let mut req := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
for i in [0:101] do
req := req |>.header! s!"X-Header-{i}" s!"value{i}"
return req |>.body #[]
handler := fun _ => do
Response.ok |>.text "success"
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Header value too long"
request := Request.new
|>.method .get
|>.uri! "/api/test"
|>.header! "Host" "api.example.com"
|>.header! "X-Long-Value" (String.ofList (List.replicate 9000 'x'))
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
Response.ok |>.text "ok"
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Total headers size too large"
request := Id.run do
let mut req := Request.new
|>.method .get
|>.uri! "/api/data"
|>.header! "Host" "api.example.com"
|>.header! "Connection" "close"
for i in [0:200] do
req := req |>.header! s!"X-Header-{i}" (String.ofList (List.replicate 200 'a'))
return req |>.body #[]
handler := fun _ => do
Response.ok |>.text "success"
expected := "HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
}
-- Tests
#eval runTestCase {
name := "Streaming response with fixed Content-Length"
request := Request.new
|>.method .get
|>.uri! "/stream"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun req => do
let stream Body.Stream.empty
background do
for i in [0:3] do
let sleep Sleep.mk 5
sleep.wait
stream.send <| Chunk.ofByteArray s!"chunk{i}\n".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "21")
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 21\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nchunk0\nchunk1\nchunk2\n"
}
#eval runTestCase {
name := "Streaming response with setKnownSize fixed"
request := Request.new
|>.method .get
|>.uri! "/stream-sized"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let stream Body.Stream.empty
stream.setKnownSize (some (.fixed 15))
background do
for i in [0:3] do
stream.send <| Chunk.ofByteArray s!"data{i}".toUTF8
stream.close
return Response.ok
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 15\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\ndata0data1data2"
}
#eval runTestCase {
name := "Streaming response with chunked encoding"
request := Request.new
|>.method .get
|>.uri! "/stream-chunked"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
handler := fun _ => do
let stream Body.Stream.empty
background do
stream.send <| Chunk.ofByteArray "hello".toUTF8
stream.send <| Chunk.ofByteArray "world".toUTF8
stream.close
return Response.ok
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n5\x0d\nhello\x0d\n5\x0d\nworld\x0d\n0\x0d\n\x0d\n"
}
#eval runTestCase {
name := "Chunked request with streaming response"
request := Request.new
|>.method .post
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Transfer-Encoding" "chunked"
|>.header! "Connection" "close"
|>.body #[
.mk "data1".toUTF8 #[],
.mk "data2".toUTF8 #[]
]
handler := fun req => do
let stream Body.Stream.empty
if isChunkedRequest req
then
background do
for i in [0:2] do
stream.send <| Chunk.ofByteArray s!"response{i}".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "18")
|>.body stream
else
stream.send <| Chunk.ofByteArray "not chunked".toUTF8
stream.close
return Response.badRequest
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 18\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nresponse0response1"
chunked := true
}
#eval runTestCase {
name := "Chunked request with streaming response and other encodings"
request := Request.new
|>.method .post
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Transfer-Encoding" "gzip, chunked"
|>.header! "Connection" "close"
|>.body #[
.mk "data1".toUTF8 #[],
.mk "data2".toUTF8 #[]
]
handler := fun req => do
let stream Body.Stream.empty
if isChunkedRequest req
then
background do
for i in [0:2] do
stream.send <| Chunk.ofByteArray s!"response{i}".toUTF8
stream.close
return Response.ok
|>.header (.mk "content-length") (.mk "18")
|>.body stream
else
stream.send <| Chunk.ofByteArray "not chunked".toUTF8
stream.close
return Response.badRequest
|>.body stream
expected := "HTTP/1.1 200 OK\x0d\nContent-Length: 18\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\nresponse0response1"
chunked := true
}

View File

@@ -0,0 +1,135 @@
import Std.Internal.Http
import Std.Internal.Async
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Stream)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
/-- Send raw bytes to the server and return the response. -/
def sendRaw (client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : Request Body.Stream ContextAsync (Response Body.Stream))
(config : Config := { lingeringTimeout := 3000 }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
structure TestCase where
name : String
request : Request (Array Chunk)
handler : Request Body.Stream ContextAsync (Response Body.Stream)
config : Config := { lingeringTimeout := 3000 }
check : String IO Unit
chunked : Bool := false
def toByteArray (req : Request (Array Chunk)) (chunked := false) : IO ByteArray := Async.block do
let mut data := Internal.Encode.encode (v := .v11) .empty req.head
let toByteArray (chunkData : Internal.ChunkedBuffer) (part : Chunk) := Internal.Encode.encode .v11 chunkData part
for part in req.body do data := toByteArray data part
if chunked then data := toByteArray data (Chunk.mk .empty .empty)
return data.toByteArray
def runTestCase (tc : TestCase) : IO Unit := do
let (client, server) Mock.new
let raw toByteArray tc.request tc.chunked
let response sendRaw client server raw tc.handler tc.config
let responseData := String.fromUTF8! response
tc.check responseData
-- Test: Date header is automatically generated when generateDate is true
#eval runTestCase {
name := "Date header auto-generated"
request := Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
config := { lingeringTimeout := 3000, generateDate := true }
handler := fun _ => do
Response.ok |>.text "hello"
check := fun response => do
unless response.contains "Date: " do
throw <| IO.userError s!"Expected Date header in response but got:\n{response}"
unless response.contains "200 OK" do
throw <| IO.userError s!"Expected 200 OK in response but got:\n{response}"
}
-- Test: Date header is NOT generated when generateDate is false
#eval runTestCase {
name := "Date header not generated when disabled"
request := Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
config := { lingeringTimeout := 3000, generateDate := false }
handler := fun _ => do
Response.ok |>.text "hello"
check := fun response => do
if response.contains "Date: " then
throw <| IO.userError s!"Date header should NOT be present when generateDate is false:\n{response}"
}
-- Test: User-set Date header is not overwritten
#eval runTestCase {
name := "User-set Date header preserved"
request := Request.new
|>.method .get
|>.uri! "/"
|>.header! "Host" "example.com"
|>.header! "Connection" "close"
|>.body #[]
config := { lingeringTimeout := 3000, generateDate := true }
handler := fun _ => do
Response.ok
|>.header! "Date" "Mon, 01 Jan 2024 00:00:00 GMT"
|>.text "hello"
check := fun response => do
unless response.contains "Date: Mon, 01 Jan 2024 00:00:00 GMT" do
throw <| IO.userError s!"User-set Date header should be preserved:\n{response}"
}
-- Test: Normal POST without Expect does NOT get 100 Continue
#eval runTestCase {
name := "No 100-continue without Expect header"
request := Request.new
|>.method .post
|>.uri! "/upload"
|>.header! "Host" "example.com"
|>.header! "Content-Length" "5"
|>.header! "Connection" "close"
|>.body #[.mk "hello".toUTF8 #[]]
config := { lingeringTimeout := 3000, generateDate := false }
handler := fun req => do
let body : String req.body.readAll
Response.ok |>.text s!"got: {body}"
check := fun response => do
if response.contains "100 Continue" then
throw <| IO.userError s!"Should NOT have 100 Continue without Expect header:\n{response}"
unless response.contains "200 OK" do
throw <| IO.userError s!"Expected 200 OK:\n{response}"
}

View File

@@ -0,0 +1,614 @@
import Std.Internal.Http.Data.Chunk
import Std.Internal.Http.Data.Request
import Std.Internal.Http.Data.Response
open Std.Http
open Std.Http.Internal
private def encodeStr [Encode .v11 t] (v : t) : String :=
String.fromUTF8! (Encode.encode (v := .v11) ChunkedBuffer.empty v).toByteArray
/-! ## Version encoding -/
/--
info: "HTTP/1.1"
-/
#guard_msgs in
#eval encodeStr Version.v11
/--
info: "HTTP/2.0"
-/
#guard_msgs in
#eval encodeStr Version.v20
/--
info: "HTTP/3.0"
-/
#guard_msgs in
#eval encodeStr Version.v30
/-! ## Method encoding -/
/--
info: "GET"
-/
#guard_msgs in
#eval encodeStr Method.get
/--
info: "HEAD"
-/
#guard_msgs in
#eval encodeStr Method.head
/--
info: "POST"
-/
#guard_msgs in
#eval encodeStr Method.post
/--
info: "PUT"
-/
#guard_msgs in
#eval encodeStr Method.put
/--
info: "DELETE"
-/
#guard_msgs in
#eval encodeStr Method.delete
/--
info: "CONNECT"
-/
#guard_msgs in
#eval encodeStr Method.connect
/--
info: "OPTIONS"
-/
#guard_msgs in
#eval encodeStr Method.options
/--
info: "TRACE"
-/
#guard_msgs in
#eval encodeStr Method.trace
/--
info: "PATCH"
-/
#guard_msgs in
#eval encodeStr Method.patch
/-! ## Status encoding -/
/--
info: "200 OK"
-/
#guard_msgs in
#eval encodeStr Status.ok
/--
info: "201 Created"
-/
#guard_msgs in
#eval encodeStr Status.created
/--
info: "404 Not Found"
-/
#guard_msgs in
#eval encodeStr Status.notFound
/--
info: "500 Internal Server Error"
-/
#guard_msgs in
#eval encodeStr Status.internalServerError
/--
info: "999 999"
-/
#guard_msgs in
#eval encodeStr (Status.other 999)
/-! ## Request.Head encoding -/
/--
info: ""
-/
#guard_msgs in
#eval encodeStr Headers.empty
/--
info: "Content-Type: text/html\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Headers.empty.insert! "content-type" "text/html")
/--
info: "X-Custom-Header: value\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Headers.empty.insert! "x-custom-header" "value")
/--
info: "GET /path HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := .parse! "/path" } : Request.Head)
/--
info: "POST /submit HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .post, version := .v11, uri := .parse! "/submit" } : Request.Head)
/--
info: "PUT /resource HTTP/2.0\x0d\nContent-Type: application/json\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({
method := .put
version := .v20
uri := .parse! "/resource"
headers := Headers.empty.insert! "content-type" "application/json"
} : Request.Head)
/-! ## Response.Head encoding -/
/--
info: "HTTP/1.1 200 OK\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .ok, version := .v11 } : Response.Head)
/--
info: "HTTP/1.1 404 Not Found\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .notFound, version := .v11 } : Response.Head)
/--
info: "HTTP/2.0 500 Internal Server Error\x0d\nContent-Type: text/plain\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({
status := .internalServerError
version := .v20
headers := Headers.empty.insert! "content-type" "text/plain"
} : Response.Head)
/-! ## Chunk encoding -/
/--
info: "5\x0d\nhello\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "hello".toUTF8)
/--
info: "0\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr Chunk.empty
/--
info: "3;lang=en\x0d\nabc\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "abc".toUTF8 |>.withExtension (.mk "lang") "en")
/--
info: "3;lang=\"en \\\" u\";type=text\x0d\nabc\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "abc".toUTF8 |>.withExtension (.mk "lang") "en \" u" |>.withExtension (.mk "type") "text")
/--
info: "a\x0d\n0123456789\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "0123456789".toUTF8)
/-! ## Request builder -/
/--
info: "GET /index.html HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.get (.parse! "/index.html") |>.body ()).head
/--
info: "POST /api/data HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.post (.parse! "/api/data") |>.body ()).head
/--
info: "PUT /resource HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.put (.parse! "/resource") |>.body ()).head
/--
info: "DELETE /item HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.delete (.parse! "/item") |>.body ()).head
/--
info: "PATCH /update HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.patch (.parse! "/update") |>.body ()).head
/--
info: "HEAD /check HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.head' (.parse! "/check") |>.body ()).head
/--
info: "OPTIONS * HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.options (.parse! "*") |>.body ()).head
/--
info: "CONNECT proxy:8080 HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.connect (.parse! "proxy:8080") |>.body ()).head
/--
info: "TRACE /debug HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.trace (.parse! "/debug") |>.body ()).head
/--
info: "POST /v2 HTTP/2.0\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Request.new |>.method .post |>.uri (.parse! "/v2") |>.version .v20 |>.body ()).head
/-! ## Response builder -/
/--
info: "HTTP/1.1 200 OK\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.ok |>.body ()).head
/--
info: "HTTP/1.1 404 Not Found\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.notFound |>.body ()).head
/--
info: "HTTP/1.1 500 Internal Server Error\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.internalServerError |>.body ()).head
/--
info: "HTTP/1.1 400 Bad Request\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.badRequest |>.body ()).head
/--
info: "HTTP/1.1 201 Created\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.created |>.body ()).head
/--
info: "HTTP/1.1 202 Accepted\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.accepted |>.body ()).head
/--
info: "HTTP/1.1 401 Unauthorized\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.unauthorized |>.body ()).head
/--
info: "HTTP/1.1 403 Forbidden\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.forbidden |>.body ()).head
/--
info: "HTTP/1.1 409 Conflict\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.conflict |>.body ()).head
/--
info: "HTTP/1.1 503 Service Unavailable\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.serviceUnavailable |>.body ()).head
/--
info: "HTTP/1.1 418 I'm a teapot\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Response.withStatus .imATeapot |>.body ()).head
/-! ## Edge cases: Status encoding -/
-- Status.other 0: minimum possible value
/--
info: "0 0"
-/
#guard_msgs in
#eval encodeStr (Status.other 0)
-- Status.other that overlaps with a named status (100 = Continue)
/--
info: "100 100"
-/
#guard_msgs in
#eval encodeStr (Status.other 100)
-- Status.other max UInt16
/--
info: "65535 65535"
-/
#guard_msgs in
#eval encodeStr (Status.other 65535)
-- Non-standard status code in the middle
/--
info: "299 299"
-/
#guard_msgs in
#eval encodeStr (Status.other 299)
/-! ## Edge cases: Chunk size hex encoding -/
-- Size 16 → hex "10" (first two-digit hex)
/--
info: "10\x0d\n0123456789abcdef\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "0123456789abcdef".toUTF8)
-- Size 255 → hex "ff": verify prefix
/--
info: true
-/
#guard_msgs in
#eval do
let data := ByteArray.mk (Array.replicate 255 (0x41 : UInt8))
return encodeStr (Chunk.ofByteArray data) |>.startsWith "ff\r\n"
-- Size 256 → hex "100" (first three-digit hex): verify prefix
/--
info: true
-/
#guard_msgs in
#eval do
let data := ByteArray.mk (Array.replicate 256 (0x41 : UInt8))
return encodeStr (Chunk.ofByteArray data) |>.startsWith "100\r\n"
-- Size 15 → hex "f" (largest single hex digit)
/--
info: "f\x0d\n0123456789abcde\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "0123456789abcde".toUTF8)
-- Chunk.ofByteArray with empty data (same as Chunk.empty)
/--
info: "0\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray ByteArray.empty)
/-! ## Edge cases: Chunk extensions -/
-- Extension with no value (None case) via direct struct construction
/--
info: "3;marker\x0d\nabc\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ data := "abc".toUTF8, extensions := #[(.mk "marker", none)] } : Chunk)
-- Extension with empty string value (not quoted since "".any returns false)
/--
info: "3;key=\x0d\nabc\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "abc".toUTF8 |>.withExtension (.mk "key") "")
-- Extension value that is all token chars (no quoting needed)
/--
info: "3;key=abc123\x0d\nabc\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "abc".toUTF8 |>.withExtension (.mk "key") "abc123")
-- Extension value with space (must be quoted)
/--
info: "3;key=\"hello world\"\x0d\nabc\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "abc".toUTF8 |>.withExtension (.mk "key") "hello world")
-- Extension value with backslash (must be escaped)
/--
info: "3;key=\"a\\\\b\"\x0d\nabc\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Chunk.ofByteArray "abc".toUTF8 |>.withExtension (.mk "key") "a\\b")
-- Multiple extensions with no value and with value
/--
info: "3;a;b=1\x0d\nabc\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ data := "abc".toUTF8, extensions := #[(.mk "a", none), (.mk "b", some "1")] } : Chunk)
/-! ## Trailer encoding -/
-- Empty trailer: terminal chunk + CRLF
/--
info: "0\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr Trailer.empty
-- Trailer with a single header
/--
info: "0\x0d\nChecksum: abc123\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Trailer.empty.insert! "checksum" "abc123")
-- Trailer with a single header
/--
info: "0\x0d\nChecksum: abc 123\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Trailer.empty.insert! "checksum" "abc 123")
-- Trailer with multiple headers
/--
info: "0\x0d\nChecksum: abc123\x0d\nExpires: Thu, 01 Dec 2025 16:00:00 GMT\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr (Trailer.empty.insert! "checksum" "abc123" |>.insert! "expires" "Thu, 01 Dec 2025 16:00:00 GMT")
/-! ## Edge cases: Trailer validation -/
-- Empty header name is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "" |>.isNone : Bool)
-- Header name with spaces is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "bad name" |>.isNone : Bool)
-- Header name with colon is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "bad:name" |>.isNone : Bool)
-- Header name with newline is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "bad\nname" |>.isNone : Bool)
-- Header value with newline is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "bad\nvalue" |>.isNone : Bool)
-- Header value with null byte is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "bad\x00value" |>.isNone : Bool)
-- Header value with carriage return is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "bad\rvalue" |>.isNone : Bool)
-- Valid header name succeeds
/--
info: true
-/
#guard_msgs in
#eval (Header.Name.ofString? "content-type" |>.isSome : Bool)
-- Valid header value with tab succeeds (tab is allowed per RFC)
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "value\twith-tab" |>.isSome : Bool)
-- Empty header value is valid
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? "" |>.isSome : Bool)
-- Header value with DEL character (0x7F) is rejected
/--
info: true
-/
#guard_msgs in
#eval (Header.Value.ofString? (String.ofList [Char.ofNat 0x7F]) |>.isNone : Bool)
/-! ## Edge cases: Request URI encoding -/
-- URI with query parameters
/--
info: "GET /search?q=hello&lang=en HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri := .parse! "/search?q=hello&lang=en" } : Request.Head)
-- URI with percent-encoded characters
/--
info: "GET /path%20with%20spaces HTTP/1.1\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ method := .get, version := .v11, uri :=.parse! "/path%20with%20spaces" } : Request.Head)
/-! ## Edge cases: Response with unusual statuses -/
/--
info: "HTTP/1.1 100 Continue\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .«continue», version := .v11 } : Response.Head)
/--
info: "HTTP/1.1 204 No Content\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .noContent, version := .v11 } : Response.Head)
/--
info: "HTTP/1.1 301 Moved Permanently\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .movedPermanently, version := .v11 } : Response.Head)
/--
info: "HTTP/3.0 200 OK\x0d\n\x0d\n"
-/
#guard_msgs in
#eval encodeStr ({ status := .ok, version := .v30 } : Response.Head)

View File

@@ -0,0 +1,297 @@
import Std.Internal.Http.Data.Headers
open Std.Http
open Std.Http.Header
/-! ## Header.Name tests -/
-- Valid header names
#guard (Name.ofString? "content-type").isSome
#guard (Name.ofString? "host").isSome
#guard (Name.ofString? "x-custom-header").isSome
#guard (Name.ofString? "accept").isSome
-- Invalid header names (empty, spaces, control chars)
#guard (Name.ofString? "").isNone
#guard (Name.ofString? "invalid header").isNone
#guard (Name.ofString? "bad\nname").isNone
#guard (Name.ofString? "bad\x00name").isNone
#guard (Name.ofString? "bad(name").isNone
#guard (Name.ofString? "bad)name").isNone
#guard (Name.ofString? "bad,name").isNone
#guard (Name.ofString? "bad;name").isNone
#guard (Name.ofString? "bad[name").isNone
#guard (Name.ofString? "bad]name").isNone
#guard (Name.ofString? "bad{name").isNone
#guard (Name.ofString? "bad}name").isNone
#guard (Name.ofString? "bad\"name").isNone
-- Case normalization
/--
info: "content-type"
-/
#guard_msgs in
#eval (Name.ofString! "Content-Type").value
/--
info: "content-type"
-/
#guard_msgs in
#eval (Name.ofString! "CONTENT-TYPE").value
-- Canonical form
/--
info: "Content-Type"
-/
#guard_msgs in
#eval toString (Name.ofString! "content-type")
/--
info: "X-Custom-Header"
-/
#guard_msgs in
#eval toString (Name.ofString! "x-custom-header")
/--
info: "Host"
-/
#guard_msgs in
#eval toString (Name.ofString! "host")
-- Name.is case-insensitive comparison
#guard (Name.ofString! "content-type").is "Content-Type"
#guard (Name.ofString! "content-type").is "CONTENT-TYPE"
#guard (Name.ofString! "content-type").is "content-type"
#guard !(Name.ofString! "content-type").is "host"
-- Predefined names
#guard Name.contentType.value == "content-type"
#guard Name.contentLength.value == "content-length"
#guard Name.host.value == "host"
#guard Name.authorization.value == "authorization"
#guard Name.userAgent.value == "user-agent"
#guard Name.accept.value == "accept"
#guard Name.connection.value == "connection"
#guard Name.transferEncoding.value == "transfer-encoding"
#guard Name.server.value == "server"
-- Name equality
#guard Name.ofString! "content-type" == Name.ofString! "Content-Type"
#guard Name.ofString! "HOST" == Name.ofString! "host"
#guard !(Name.ofString! "content-type" == Name.ofString! "host")
/-! ## Header.Value tests -/
-- Valid header values (printable ASCII, tab, space)
#guard (Value.ofString? "text/html").isSome
#guard (Value.ofString? "application/json; charset=utf-8").isSome
#guard (Value.ofString? "").isSome
#guard (Value.ofString? "value with spaces").isSome
#guard (Value.ofString? "value\twith\ttabs").isSome
-- Invalid header values (control characters except tab)
#guard (Value.ofString? "bad\x00value").isNone
#guard (Value.ofString? "bad\nvalue").isNone
#guard (Value.ofString? "bad\rvalue").isNone
-- Value.is case-insensitive comparison
#guard (Value.ofString! "text/html").is "TEXT/HTML"
#guard (Value.ofString! "text/html").is "text/html"
#guard !(Value.ofString! "text/html").is "application/json"
-- Value toString
/--
info: "text/html"
-/
#guard_msgs in
#eval toString (Value.ofString! "text/html")
/-! ## Headers collection tests -/
-- Empty headers
#guard Headers.empty.isEmpty
#guard Headers.empty.size == 0
-- Add and retrieve
#guard (Headers.empty.insert! "content-type" "text/html").size == 1
#guard !(Headers.empty.insert! "content-type" "text/html").isEmpty
#guard (Headers.empty.insert! "content-type" "text/html").contains (Name.ofString! "content-type")
-- get? retrieves the value
/--
info: "text/html"
-/
#guard_msgs in
#eval do
let h := Headers.empty.insert! "content-type" "text/html"
return (h.get? (Name.ofString! "content-type")).get!.value
-- get? returns none for missing headers
#guard (Headers.empty.get? (Name.ofString! "content-type")).isNone
-- Multiple headers
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
|>.insert! "accept" "application/json"
h.size == 3
#guard
let h := Headers.empty.insert! "host" "example.com"
h.contains (Name.ofString! "host") && !h.contains (Name.ofString! "accept")
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
let h' := h.erase (Name.ofString! "content-type")
!h'.contains (Name.ofString! "content-type") && h'.contains (Name.ofString! "host")
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
(h.erase (Name.ofString! "content-type")).size == 1
-- hasEntry
#guard
let h := Headers.empty.insert! "content-type" "text/html"
h.hasEntry (Name.ofString! "content-type") (Value.ofString! "text/html")
#guard
let h := Headers.empty.insert! "content-type" "text/html"
!h.hasEntry (Name.ofString! "content-type") (Value.ofString! "application/json")
-- update existing
/--
info: "TEXT/HTML"
-/
#guard_msgs in
#eval do
let h := Headers.empty.insert! "content-type" "text/html"
let h' := h.update (Name.ofString! "content-type") (fun
| some v => Value.ofString! v.value.toUpper
| none => Value.ofString! "default")
return (h'.get? (Name.ofString! "content-type")).get!.value
-- update missing (inserts)
/--
info: "default-value"
-/
#guard_msgs in
#eval do
let h := Headers.empty
let h' := h.update (Name.ofString! "x-new") (fun
| some v => v
| none => Value.ofString! "default-value")
return (h'.get? (Name.ofString! "x-new")).get!.value
-- ofList
#guard
let h := Headers.ofList [
(Name.ofString! "host", Value.ofString! "example.com"),
(Name.ofString! "accept", Value.ofString! "*/*")
]
h.size == 2 && h.contains (Name.ofString! "host")
-- merge
#guard
let h1 := Headers.empty.insert! "content-type" "text/html"
let h2 := Headers.empty.insert! "host" "example.com"
let merged := h1.merge h2
merged.contains (Name.ofString! "content-type") && merged.contains (Name.ofString! "host")
-- filter
#guard
let h := Headers.empty
|>.insert! "content-type" "text/html"
|>.insert! "host" "example.com"
|>.insert! "accept" "application/json"
let filtered := h.filter (fun name _ => name.is "host")
filtered.size == 1 && filtered.contains (Name.ofString! "host")
-- fold
/--
info: 3
-/
#guard_msgs in
#eval do
let h := Headers.empty
|>.insert! "a" "1"
|>.insert! "b" "2"
|>.insert! "c" "3"
return h.fold 0 (fun acc _ _ => acc + 1)
-- getD with default
/--
info: "fallback"
-/
#guard_msgs in
#eval do
let h := Headers.empty
return (h.getD (Name.ofString! "missing") (Value.ofString! "fallback")).value
-- mapValues
/--
info: "TEXT/HTML"
-/
#guard_msgs in
#eval do
let h := Headers.empty.insert! "content-type" "text/html"
let h' := h.mapValues (fun _ v => Value.ofString! v.value.toUpper)
return (h'.get? (Name.ofString! "content-type")).get!.value
/-! ## Header typeclass tests -/
-- ContentLength parse
#guard
match Header.ContentLength.parse (Value.ofString! "42") with
| some cl => cl.length == 42
| none => false
#guard
match Header.ContentLength.parse (Value.ofString! "0") with
| some cl => cl.length == 0
| none => false
#guard (Header.ContentLength.parse =<< (Value.ofString! "abc")).isNone
#guard (Header.ContentLength.parse =<< (Value.ofString? "")).isNone
/--
info: ("content-length", "42")
-/
#guard_msgs in
#eval do
let (name, value) := Header.ContentLength.serialize 42
return (name.value, value.value)
#guard
match Header.TransferEncoding.parse (Value.ofString! "chunked") with
| some te => te.isChunked
| none => false
#guard
match Header.TransferEncoding.parse (Value.ofString! "gzip, chunked") with
| some te => te.isChunked && te.codings.size == 2
| none => false
#guard
match Header.TransferEncoding.parse (Value.ofString! "gzip") with
| some te => !te.isChunked
| none => false
#guard (Header.TransferEncoding.parse =<< (Value.ofString? "chunked, gzip")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? "chunked, chunked")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? "")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? ",")).isNone
#guard (Header.TransferEncoding.parse =<< (Value.ofString? " , , ")).isNone
/--
info: ("transfer-encoding", "gzip,chunked")
-/
#guard_msgs in
#eval do
let te : Header.TransferEncoding := #["gzip", "chunked"], by native_decide
let (name, value) := Header.TransferEncoding.serialize te
return (name.value, value.value)

View File

@@ -0,0 +1,305 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Stream)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
/-!
# Keep-Alive and Connection Persistence Tests
Tests for HTTP/1.1 keep-alive behavior, connection reuse, multiple sequential requests
on a single connection, and connection limits.
-/
/-- Send raw bytes to the server and return the response. -/
def sendRaw (client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : Request Body.Stream ContextAsync (Response Body.Stream))
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
/-- Assert the full response matches exactly. -/
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let responseStr := String.fromUTF8! response
if responseStr != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{responseStr.quote}"
/-- Assert response string contains a substring. -/
def assertContains (name : String) (response : ByteArray) (needle : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.contains needle do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to contain: {needle.quote}\nGot:\n{responseStr.quote}"
/-- Assert response starts with given prefix. -/
def assertStartsWith (name : String) (response : ByteArray) (prefix_ : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.startsWith prefix_ do
throw <| IO.userError s!"Test '{name}' failed:\nExpected to start with: {prefix_.quote}\nGot:\n{responseStr.quote}"
/-- Count occurrences of a substring in a string. -/
partial def countOccurrences (haystack : String) (needle : String) : Nat :=
let rec go (s : haystack.Pos) (count : Nat) : Nat :=
if let some idx := s.find? needle then
go (idx.nextn needle.length) (count + 1)
else
count
go haystack.startPos 0
def okHandler : Request Body.Stream ContextAsync (Response Body.Stream) :=
fun _ => Response.ok |>.text "ok"
-- =============================================================================
-- Two sequential requests on the same keep-alive connection
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /first HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
let uri := toString req.head.uri
if uri == "/first" then Response.ok |>.text "first"
else if uri == "/second" then Response.ok |>.text "second"
else Response.notFound |>.text "not found")
assertContains "Keep-alive: two responses" response "HTTP/1.1 200 OK"
assertContains "Keep-alive: first body" response "first"
assertContains "Keep-alive: second body" response "second"
-- =============================================================================
-- Three sequential requests on keep-alive
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /a HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /b HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req3 := "GET /c HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2 ++ req3).toUTF8
let response sendRaw client server raw (fun req => do
let uri := toString req.head.uri
Response.ok |>.text uri)
let responseStr := String.fromUTF8! response
assertContains "Three keep-alive: response a" response "/a"
assertContains "Three keep-alive: response b" response "/b"
assertContains "Three keep-alive: response c" response "/c"
-- =============================================================================
-- Explicit Connection: keep-alive header
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /1 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: keep-alive\x0d\n\x0d\n"
let req2 := "GET /2 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
Response.ok |>.text (toString req.head.uri))
assertContains "Explicit keep-alive: first" response "/1"
assertContains "Explicit keep-alive: second" response "/2"
-- =============================================================================
-- Connection: close on first request - server should not process second
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /first HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let req2 := "GET /second HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
Response.ok |>.text (toString req.head.uri))
let responseStr := String.fromUTF8! response
assertContains "Connection close: first served" response "/first"
-- Second request should NOT be processed since first had Connection: close
if responseStr.contains "/second" then
throw <| IO.userError "Test 'Connection close stops pipeline' failed: second request was served"
-- =============================================================================
-- Default keep-alive (no Connection header = keep-alive in HTTP/1.1)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
-- No Connection header at all - HTTP/1.1 defaults to keep-alive
let req1 := "GET /default1 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /default2 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
Response.ok |>.text (toString req.head.uri))
assertContains "Default keep-alive: first" response "/default1"
assertContains "Default keep-alive: second" response "/default2"
-- =============================================================================
-- Keep-alive disabled via config
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /1 HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /2 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw
(fun req => Response.ok |>.text (toString req.head.uri))
(config := { lingeringTimeout := 3000, enableKeepAlive := false, generateDate := false })
let responseStr := String.fromUTF8! response
assertContains "Keep-alive disabled: first served" response "/1"
-- With keep-alive disabled, second request should not be processed
if responseStr.contains "/2" then
throw <| IO.userError "Test 'Keep-alive disabled' failed: second request was served"
-- =============================================================================
-- Max requests per connection limit
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let config : Config := { lingeringTimeout := 3000, maxRequests := 3, generateDate := false }
-- Send 4 requests but only 3 should be processed
let mut raw := ""
for i in [0:3] do
raw := raw ++ s!"GET /{i} HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
raw := raw ++ "GET /3 HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let response sendRaw client server raw.toUTF8
(fun req => Response.ok |>.text (toString req.head.uri))
(config := config)
assertContains "Max requests: /0 served" response "/0"
assertContains "Max requests: /1 served" response "/1"
assertContains "Max requests: /2 served" response "/2"
-- =============================================================================
-- Keep-alive with POST bodies (body must be fully consumed before next request)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /upload HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello"
let req2 := "GET /after HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
let uri := toString req.head.uri
if uri == "/upload" then Response.ok |>.text "uploaded"
else if uri == "/after" then Response.ok |>.text "after"
else Response.notFound |>.text "")
assertContains "Keep-alive POST then GET: uploaded" response "uploaded"
assertContains "Keep-alive POST then GET: after" response "after"
-- =============================================================================
-- Keep-alive response should include Connection header appropriately
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
-- First request without Connection: close - response should NOT have Connection: close
let req1 := "GET /check HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "GET /end HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw okHandler
assertContains "Last response has Connection: close" response "Connection: close"
-- =============================================================================
-- Mixed methods on keep-alive
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "GET /get HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n"
let req2 := "POST /post HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 4\x0d\n\x0d\ndata"
let req3 := "DELETE /delete HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2 ++ req3).toUTF8
let response sendRaw client server raw (fun req => do
match req.head.method with
| .get => Response.ok |>.text "got"
| .post => Response.ok |>.text "posted"
| .delete => Response.ok |>.text "deleted"
| _ => Response.badRequest |>.text "unknown")
assertContains "Mixed methods: got" response "got"
assertContains "Mixed methods: posted" response "posted"
assertContains "Mixed methods: deleted" response "deleted"
-- =============================================================================
-- Keep-alive with chunked request body then another request
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /chunked HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n"
let req2 := "GET /after HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
Response.ok |>.text (toString req.head.uri))
assertContains "Chunked then GET: first" response "/chunked"
assertContains "Chunked then GET: second" response "/after"
-- =============================================================================
-- Keep-alive with varying Content-Lengths
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /a HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 3\x0d\n\x0d\nabc"
let req2 := "POST /b HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 10\x0d\n\x0d\n0123456789"
let req3 := "POST /c HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\n\x0d\n"
let raw := (req1 ++ req2 ++ req3).toUTF8
let response sendRaw client server raw (fun req => do
Response.ok |>.text (toString req.head.uri))
assertContains "Varying CL: /a" response "/a"
assertContains "Varying CL: /b" response "/b"
assertContains "Varying CL: /c" response "/c"
-- =============================================================================
-- Handler reads body on keep-alive connection
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let req1 := "POST /read HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\n\x0d\nhello"
let req2 := "POST /read HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\nworld"
let raw := (req1 ++ req2).toUTF8
let response sendRaw client server raw (fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
let bodyStr := String.fromUTF8! body
Response.ok |>.text s!"body={bodyStr}")
assertContains "Body read keep-alive: first" response "body=hello"
assertContains "Body read keep-alive: second" response "body=world"

View File

@@ -0,0 +1,489 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Stream)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
/-!
# Malformed HTTP Request Tests
Tests for HTTP/1.1 compliance when handling malformed, invalid, or edge-case requests.
Covers: missing Host, invalid methods, bad headers, CRLF issues, invalid characters.
-/
/-- Send raw bytes to the server and return the response. -/
def sendRaw (client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : Request Body.Stream ContextAsync (Response Body.Stream))
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
/-- Assert that the response starts with the expected status line. -/
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected status: {status}\nGot: {responseStr.quote}"
/-- Assert the full response matches exactly. -/
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let responseStr := String.fromUTF8! response
if responseStr != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{responseStr.quote}"
def okHandler : Request Body.Stream ContextAsync (Response Body.Stream) :=
fun _ => Response.ok |>.text "ok"
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
def ok200 : String :=
"HTTP/1.1 200 OK\x0d\nContent-Length: 2\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\nContent-Type: text/plain; charset=utf-8\x0d\n\x0d\nok"
-- =============================================================================
-- Missing Host header (RFC 9112 §3.2 - Host is REQUIRED in HTTP/1.1)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Missing Host header returns 400" response bad400
-- =============================================================================
-- Empty Host header value
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: \x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertStatus "Empty Host header" response "HTTP/1.1"
-- =============================================================================
-- Invalid HTTP version
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/2.0\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Invalid HTTP version HTTP/2.0" response bad400
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/0.9\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Invalid HTTP version HTTP/0.9" response bad400
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.0\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Invalid HTTP version HTTP/1.0" response bad400
-- =============================================================================
-- Malformed request line - missing version
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET /\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Missing HTTP version" response bad400
-- =============================================================================
-- Malformed request line - missing URI
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET HTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertStatus "Missing URI" response "HTTP/1.1 400"
-- =============================================================================
-- Malformed request line - extra spaces
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
-- Extra spaces in request line - should still parse or reject
assertStatus "Extra spaces in request line" response "HTTP/1.1"
-- =============================================================================
-- Completely empty request line
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Empty request line" response bad400
-- =============================================================================
-- Invalid method name
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "FOOBAR / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertStatus "Invalid method FOOBAR" response "HTTP/1.1 400"
-- =============================================================================
-- Method with lowercase (HTTP methods are case-sensitive)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "get / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertStatus "Lowercase method" response "HTTP/1.1 400"
-- =============================================================================
-- Header without colon separator
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBadHeader value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Header without colon" response bad400
-- =============================================================================
-- Header with leading whitespace (obsolete line folding - should reject)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\n X-Bad: folded\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Leading whitespace in header (obs-fold)" response bad400
-- =============================================================================
-- Null byte in header name
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let before := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Bad".toUTF8
let null := ByteArray.mk #[0]
let after := "Header: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let raw := before ++ null ++ after
let response sendRaw client server raw okHandler
assertExact "Null byte in header name" response bad400
-- =============================================================================
-- Null byte in header value
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let before := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Header: bad".toUTF8
let null := ByteArray.mk #[0]
let after := "value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let raw := before ++ null ++ after
let response sendRaw client server raw okHandler
assertExact "Null byte in header value" response bad400
-- =============================================================================
-- Bare LF without CR (strict parser should reject)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\nHost: example.com\nConnection: close\n\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Bare LF without CR" response ok200
-- =============================================================================
-- CRLF injection attempt in header value
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Inject: value\x0d\nEvil: injected\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
-- This is actually two valid headers, not an injection. Server should process normally.
assertStatus "CRLF in header (two valid headers)" response "HTTP/1.1 200"
-- =============================================================================
-- Non-ASCII in method
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GÉT / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Non-ASCII in method" response bad400
-- =============================================================================
-- Tab character in header value (allowed per RFC 9110)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Tab: value\twith\ttabs\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertStatus "Tab in header value (allowed)" response "HTTP/1.1 200"
-- =============================================================================
-- Very long method name (exceeds maxMethodLength=16)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let longMethod := String.ofList (List.replicate 20 'G')
let raw := s!"{longMethod} / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Method too long" response bad400
-- =============================================================================
-- Request with only whitespace
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := " \x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Whitespace-only request" response bad400
-- =============================================================================
-- Double CRLF before request (empty lines before request)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "\x0d\nGET / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
-- Leading CRLF before request line - per RFC, servers SHOULD ignore at least one empty line
assertStatus "Leading CRLF before request" response "HTTP/1.1"
-- =============================================================================
-- Non-numeric Content-Length
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: abc\x0d\nConnection: close\x0d\n\x0d\nbody".toUTF8
let response sendRaw client server raw okHandler
assertExact "Non-numeric Content-Length" response bad400
-- =============================================================================
-- Negative Content-Length
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: -1\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Negative Content-Length" response bad400
-- =============================================================================
-- Duplicate Content-Length with different values (MUST reject per RFC 9110 §8.6)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nContent-Length: 10\x0d\nConnection: close\x0d\n\x0d\nhello".toUTF8
let response sendRaw client server raw okHandler
assertExact "Duplicate Content-Length different values" response bad400
-- =============================================================================
-- Space in header name (invalid token character)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBad Header: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Space in header name" response bad400
-- =============================================================================
-- Colon in header name (invalid)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nBad:Name: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
-- "Bad" is the name, "Name: value" is the value - this is actually valid parsing
assertStatus "Colon parsed as header name delimiter" response "HTTP/1.1"
-- =============================================================================
-- Request with absolute-form URI
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET http://example.com/path HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertStatus "Absolute-form URI" response "HTTP/1.1"
-- =============================================================================
-- PUT request with body
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "PUT /resource HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 11\x0d\nConnection: close\x0d\n\x0d\nhello world".toUTF8
let response sendRaw client server raw (fun req => do
if req.head.method == .put then Response.ok |>.text "updated"
else Response.badRequest |>.text "wrong method")
assertStatus "PUT request" response "HTTP/1.1 200"
-- =============================================================================
-- PATCH request with body
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "PATCH /resource HTTP/1.1\x0d\nHost: example.com\x0d\nContent-Length: 5\x0d\nConnection: close\x0d\n\x0d\npatch".toUTF8
let response sendRaw client server raw (fun req => do
if req.head.method == .patch then Response.ok |>.text "patched"
else Response.badRequest |>.text "wrong method")
assertStatus "PATCH request" response "HTTP/1.1 200"
-- =============================================================================
-- TRACE request
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "TRACE / HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw (fun req => do
if req.head.method == .trace then Response.ok |>.text "traced"
else Response.badRequest |>.text "wrong method")
assertStatus "TRACE request" response "HTTP/1.1 200"
-- =============================================================================
-- Multiple Host headers (MUST return 400 per RFC 9112 §3.2)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nHost: other.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Multiple Host headers" response bad400
-- =============================================================================
-- Header value with leading/trailing OWS (should be trimmed per RFC 9110 §5.5)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GET / HTTP/1.1\x0d\nHost: example.com \x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertStatus "OWS around header value" response "HTTP/1.1 200"
-- =============================================================================
-- Mixed-case Transfer-Encoding (e.g., Chunked instead of chunked)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: Chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw (fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text (String.fromUTF8! body))
assertStatus "Mixed-case TE: Chunked" response "HTTP/1.1 200"
-- =============================================================================
-- Transfer-Encoding with trailing space (e.g., "chunked ")
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked \x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw (fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text (String.fromUTF8! body))
assertStatus "TE with trailing space" response "HTTP/1.1"
-- =============================================================================
-- Transfer-Encoding: chunked, chunked (double chunked - should reject)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked, chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Double chunked TE" response bad400
-- =============================================================================
-- Empty connection (client connects then immediately disconnects)
-- =============================================================================
#eval show IO _ from do Async.block do
let (client, server) Mock.new
let raw := ByteArray.empty
client.send raw
client.close
let result Async.block do
Std.Http.Server.serveConnection server okHandler { lingeringTimeout := 500, generateDate := false }
|>.run
let res client.recv?
pure <| res.getD .empty
-- Empty connection should result in no response or a timeout
assert! result.size == 0 (String.fromUTF8! result).startsWith "HTTP/1.1"
-- =============================================================================
-- Request with extremely long header name (boundary test at maxHeaderNameLength)
-- =============================================================================
#eval show IO _ from do Async.block do
let (client, server) Mock.new
let longName := String.ofList (List.replicate 257 'X')
let raw := s!"GET / HTTP/1.1\x0d\nHost: example.com\x0d\n{longName}: value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Header name at 257 chars" response bad400
-- =============================================================================
-- Control character (0x01) in header value
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let before := "GET / HTTP/1.1\x0d\nHost: example.com\x0d\nX-Header: bad".toUTF8
let ctrl := ByteArray.mk #[0x01]
let after := "value\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let raw := before ++ ctrl ++ after
let response sendRaw client server raw okHandler
assertExact "Control char in header value" response bad400
-- =============================================================================
-- Request line with no spaces at all
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "GETHTTP/1.1\x0d\nHost: example.com\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "Request line no spaces" response bad400
-- =============================================================================
-- Very long URI (exceeds maxUriLength=8192)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let longUri := "/" ++ String.ofList (List.replicate 9000 'a')
let raw := s!"GET {longUri} HTTP/1.1\x0d\nHost: example.com\x0d\nConnection: close\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw okHandler
assertExact "URI too long" response bad400

View File

@@ -0,0 +1,347 @@
import Std.Internal.Http
import Std.Internal.Async
import Std.Internal.Async.Timer
open Std.Internal.IO Async
open Std Http
abbrev TestHandler := Request Body.Stream ContextAsync (Response Body.Stream)
instance : Std.Http.Server.Handler TestHandler where
onRequest handler request := handler request
open Std.Http.Internal
/-!
# HTTP Trailer Tests
Tests for HTTP/1.1 chunked transfer encoding trailers (RFC 9112 §7.1.2).
Covers: basic trailer parsing, empty trailers, limit enforcement, malformed trailers,
and potential parser abuse scenarios.
-/
/-- Send raw bytes to the server and return the response. -/
def sendRaw (client : Mock.Client) (server : Mock.Server) (raw : ByteArray)
(handler : Request Body.Stream ContextAsync (Response Body.Stream))
(config : Config := { lingeringTimeout := 3000, generateDate := false }) : IO ByteArray := Async.block do
client.send raw
Std.Http.Server.serveConnection server handler config
|>.run
let res client.recv?
pure <| res.getD .empty
/-- Assert that the response starts with the expected status line. -/
def assertStatus (name : String) (response : ByteArray) (status : String) : IO Unit := do
let responseStr := String.fromUTF8! response
unless responseStr.startsWith status do
throw <| IO.userError s!"Test '{name}' failed:\nExpected status: {status}\nGot: {responseStr.quote}"
/-- Assert the full response matches exactly. -/
def assertExact (name : String) (response : ByteArray) (expected : String) : IO Unit := do
let responseStr := String.fromUTF8! response
if responseStr != expected then
throw <| IO.userError s!"Test '{name}' failed:\nExpected:\n{expected.quote}\nGot:\n{responseStr.quote}"
def bodyHandler : Request Body.Stream ContextAsync (Response Body.Stream) :=
fun req => do
let mut body := ByteArray.empty
for chunk in req.body do
body := body ++ chunk.data
Response.ok |>.text (String.fromUTF8! body)
def bad400 : String :=
"HTTP/1.1 400 Bad Request\x0d\nContent-Length: 0\x0d\nConnection: close\x0d\nServer: LeanHTTP/1.1\x0d\n\x0d\n"
-- =============================================================================
-- Chunked body with no trailers (just terminal chunk + empty line)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Chunked no trailers" response "HTTP/1.1 200"
-- =============================================================================
-- Chunked body with a single trailer header
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nChecksum: abc123\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Single trailer header" response "HTTP/1.1 200"
-- =============================================================================
-- Chunked body with multiple trailer headers
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0\x0d\nChecksum: abc123\x0d\nExpires: Thu, 01 Dec 1994 16:00:00 GMT\x0d\nX-Custom: value\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Multiple trailer headers" response "HTTP/1.1 200"
-- =============================================================================
-- Chunked body with trailer having long value (within default limit of 8192)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let longValue := String.ofList (List.replicate 8000 'v')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Long: {longValue}\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Trailer with long value (8000 chars)" response "HTTP/1.1 200"
-- =============================================================================
-- Chunked body with trailer value exceeding maxHeaderValueLength (8192)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let longValue := String.ofList (List.replicate 8193 'v')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Long: {longValue}\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertExact "Trailer value exceeds limit" response bad400
-- =============================================================================
-- Chunked body with trailer name exceeding maxHeaderNameLength (256)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let longName := String.ofList (List.replicate 257 'X')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{longName}: value\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertExact "Trailer name exceeds limit" response bad400
-- =============================================================================
-- Exceed maxTrailerHeaders limit (default 100)
-- Use a reduced limit to keep the test fast.
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
-- Use a config with maxTrailerHeaders = 3 to keep test small
let config : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 3, generateDate := false }
let trailers := "T1: v1\x0d\nT2: v2\x0d\nT3: v3\x0d\nT4: v4\x0d\n"
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{trailers}\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler (config := config)
assertExact "Too many trailer headers (4 > 3)" response bad400
-- =============================================================================
-- Trailer count exactly at the limit should succeed
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let config : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 3, generateDate := false }
let trailers := "T1: v1\x0d\nT2: v2\x0d\nT3: v3\x0d\n"
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{trailers}\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler (config := config)
assertStatus "Trailer count exactly at limit (3)" response "HTTP/1.1 200"
-- =============================================================================
-- Trailer with null byte in name
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let before := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Bad".toUTF8
let null := ByteArray.mk #[0]
let after := "Name: value\x0d\n\x0d\n".toUTF8
let raw := before ++ null ++ after
let response sendRaw client server raw bodyHandler
assertExact "Null byte in trailer name" response bad400
-- =============================================================================
-- Trailer with null byte in value
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let before := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Header: bad".toUTF8
let null := ByteArray.mk #[0]
let after := "value\x0d\n\x0d\n".toUTF8
let raw := before ++ null ++ after
let response sendRaw client server raw bodyHandler
assertExact "Null byte in trailer value" response bad400
-- =============================================================================
-- Trailer with control character (0x01) in value
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let before := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Header: bad".toUTF8
let ctrl := ByteArray.mk #[0x01]
let after := "value\x0d\n\x0d\n".toUTF8
let raw := before ++ ctrl ++ after
let response sendRaw client server raw bodyHandler
assertExact "Control char in trailer value" response bad400
-- =============================================================================
-- Trailer without colon separator (malformed field line)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nBadTrailer value\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertExact "Trailer without colon" response bad400
-- =============================================================================
-- Trailer with leading whitespace (obsolete line folding)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n X-Bad: folded\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertExact "Leading whitespace in trailer (obs-fold)" response bad400
-- =============================================================================
-- Trailer with space in name (invalid token character)
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nBad Name: value\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertExact "Space in trailer name" response bad400
-- =============================================================================
-- Multiple chunks followed by trailers
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n6\x0d\n world\x0d\n0\x0d\nChecksum: deadbeef\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Multiple chunks then trailers" response "HTTP/1.1 200"
-- =============================================================================
-- Terminal chunk with extensions followed by trailers
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n5\x0d\nhello\x0d\n0;ext=val\x0d\nX-Trailer: yes\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Terminal chunk ext + trailers" response "HTTP/1.1 200"
-- =============================================================================
-- Missing final CRLF after trailers (incomplete message)
-- The server should either reject or timeout waiting for more data.
-- =============================================================================
#eval show IO _ from Async.block do
let (client, server) Mock.new
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Trailer: value\x0d\n".toUTF8
client.send raw
client.close
let result Async.block do
Std.Http.Server.serveConnection server bodyHandler { lingeringTimeout := 500, generateDate := false }
|>.run
let res client.recv?
pure <| res.getD .empty
-- Incomplete trailer section: server should return 400 or empty response
let responseStr := String.fromUTF8! result
assert! result.size == 0 responseStr.startsWith "HTTP/1.1 400"
-- =============================================================================
-- Trailer encoding round-trip test
-- =============================================================================
#eval show IO _ from Async.block do
let trailer := Trailer.empty
|>.insert (.mk "checksum") (.mk "abc123")
|>.insert (.mk "expires") (.mk "Thu, 01 Dec 1994")
let encoded := (Encode.encode (v := .v11) ChunkedBuffer.empty trailer).toByteArray
let s := String.fromUTF8! encoded
-- Should contain terminal chunk "0\r\n", trailer fields, and final "\r\n"
assert! s.contains "0\x0d\n"
assert! s.contains "Checksum: abc123\x0d\n"
assert! s.contains "Expires: Thu, 01 Dec 1994\x0d\n"
-- =============================================================================
-- Empty trailer encoding
-- =============================================================================
#eval show IO _ from Async.block do
let trailer := Trailer.empty
let encoded := (Encode.encode (v := .v11) ChunkedBuffer.empty trailer).toByteArray
let s := String.fromUTF8! encoded
-- Should be just "0\r\n\r\n"
assert! s == "0\x0d\n\x0d\n"
-- =============================================================================
-- maxTrailerHeaders = 0 means no trailers allowed
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let config : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 0, generateDate := false }
-- Even a single trailer should be rejected
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Trailer: rejected\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler (config := config)
assertExact "maxTrailerHeaders=0 rejects any trailer" response bad400
-- =============================================================================
-- maxTrailerHeaders = 0 but no trailers present should succeed
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let config : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 0, generateDate := false }
let raw := "POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler (config := config)
assertStatus "maxTrailerHeaders=0 with no trailers" response "HTTP/1.1 200"
-- =============================================================================
-- Trailer with very long name at exactly the limit (256 chars) should succeed
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let exactName := String.ofList (List.replicate 256 'X')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{exactName}: value\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Trailer name at exactly 256 chars" response "HTTP/1.1 200"
-- =============================================================================
-- Trailer with value at exactly the limit (8192 chars) should succeed
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let exactValue := String.ofList (List.replicate 8192 'v')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nX-Exact: {exactValue}\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler
assertStatus "Trailer value at exactly 8192 chars" response "HTTP/1.1 200"
-- =============================================================================
-- Many trailers at a reduced limit with varied field sizes
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let config : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 5, maxHeaderValueLength := 50, generateDate := false }
-- 5 trailers with values near the limit
let longVal := String.ofList (List.replicate 50 'z')
let trailers := s!"T1: {longVal}\x0d\nT2: {longVal}\x0d\nT3: {longVal}\x0d\nT4: {longVal}\x0d\nT5: {longVal}\x0d\n"
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\n{trailers}\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler (config := config)
assertStatus "5 trailers at limit with long values" response "HTTP/1.1 200"
-- =============================================================================
-- Many trailers exceed reduced limit with large values
-- =============================================================================
#eval show IO _ from do
let (client, server) Mock.new
let config : Config := { lingeringTimeout := 3000, maxTrailerHeaders := 5, maxHeaderValueLength := 50, generateDate := false }
let longVal := String.ofList (List.replicate 51 'z')
let raw := s!"POST / HTTP/1.1\x0d\nHost: example.com\x0d\nTransfer-Encoding: chunked\x0d\nConnection: close\x0d\n\x0d\n3\x0d\nabc\x0d\n0\x0d\nT1: {longVal}\x0d\n\x0d\n".toUTF8
let response sendRaw client server raw bodyHandler (config := config)
assertExact "Trailer value exceeds reduced limit (51 > 50)" response bad400

View File

@@ -0,0 +1,836 @@
/-
Copyright (c) 2025 Lean FRO, LLC. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Authors: Sofia Rodrigues
-/
import Std.Internal.Http.Data.URI
import Std.Internal.Http.Data.URI.Encoding
open Std.Http
open Std.Http.URI
open Std.Http.URI.Parser
/-!
# URI Tests
Comprehensive tests for URI parsing, encoding, normalization, and manipulation.
This file consolidates tests from multiple URI-related test files.
-/
-- ============================================================================
-- Helper Functions
-- ============================================================================
def runParser (parser : Std.Internal.Parsec.ByteArray.Parser α) (s : String) : IO α :=
IO.ofExcept ((parser <* Std.Internal.Parsec.eof).run s.toUTF8)
def parseCheck (s : String) (exact : String := s) : IO Unit := do
let result runParser parseRequestTarget s
if toString result = exact then
pure ()
else
throw (.userError s!"expect {exact.quote} but got {(toString result).quote}")
def parseCheckFail (s : String) : IO Unit := do
match (parseRequestTarget <* Std.Internal.Parsec.eof).run s.toUTF8 with
| .ok r =>
throw <| .userError
s!"expected parse failure, but succeeded with {(repr r)}"
| .error _ =>
pure ()
-- ============================================================================
-- Percent Encoding Tests (EncodedString)
-- ============================================================================
-- Valid percent encoding validation
/--
info: some "abc"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "abc".toUTF8))
/--
info: some "%20"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%20".toUTF8))
/--
info: some "hello%20world"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "hello%20world".toUTF8))
/--
info: some "%FF"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%FF".toUTF8))
/--
info: some "%00"
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%00".toUTF8))
-- Invalid percent encoding: incomplete
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "hello%".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%2".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%A".toUTF8))
-- Invalid percent encoding: non-hex characters
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%GG".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%2G".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedSegment.ofByteArray? "%G2".toUTF8))
-- ============================================================================
-- Percent Encoding Decode Tests
-- ============================================================================
/--
info: some "abc"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "abc".toUTF8))
/--
info: some " "
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%20".toUTF8))
/--
info: some "hello world"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "hello%20world".toUTF8))
/--
info: some " !"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%20%21".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%FF".toUTF8))
/--
info: some "\x00"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedSegment.decode =<< (EncodedSegment.ofByteArray? "%00".toUTF8))
-- ============================================================================
-- Query String Encoding Tests
-- ============================================================================
/--
info: some "hello+world"
-/
#guard_msgs in
#eval IO.println (repr (EncodedQueryString.ofByteArray? (r := isQueryChar) "hello+world".toUTF8))
/--
info: none
-/
#guard_msgs in
#eval IO.println (repr (EncodedQueryString.ofByteArray? (r := isQueryChar) "%".toUTF8))
/--
info: some "hello world"
-/
#guard_msgs in
#eval IO.println (repr <| EncodedQueryString.decode (r := isQueryChar) =<< (EncodedQueryString.ofByteArray? (r := isQueryChar) "hello+world".toUTF8))
/--
info: some " "
-/
#guard_msgs in
#eval IO.println (repr <| EncodedQueryString.decode (r := isQueryChar) =<< (EncodedQueryString.ofByteArray? (r := isQueryChar) "%20".toUTF8))
-- ============================================================================
-- Request Target Parsing - Basic Tests
-- ============================================================================
#eval parseCheck "/path/with/encoded%20space"
#eval parseCheck "/path/with/encoded%20space/"
#eval parseCheck "*"
#eval parseCheck "/api/search?q=hello%20world&category=tech%2Bgames"
#eval parseCheck "/"
#eval parseCheck "/api/v1/users/123/posts/456/comments/789"
#eval parseCheck "/files/../etc/passwd"
#eval parseCheck "example.com:8080"
#eval parseCheck "https://example.com:8080/ata"
#eval parseCheck "192.168.1.1:3000"
#eval parseCheck "[::1]:8080"
#eval parseCheck "http://example.com/path/to/resource?query=value"
#eval parseCheck "https://api.example.com:443/v1/users?limit=10"
#eval parseCheck "http://[2001:db8::1]:8080/path"
#eval parseCheck "https://xn--nxasmq6b.xn--o3cw4h/path"
#eval parseCheck "localhost:65535"
#eval parseCheck "https://user:pass@secure.example.com/private"
#eval parseCheck "/double//slash//path"
#eval parseCheck "http://user%40example:pass%3Aword@host.com"
-- Parse failure tests
#eval parseCheckFail "/path with space"
#eval parseCheckFail "/path/%"
#eval parseCheckFail "/path/%2"
#eval parseCheckFail "/path/%ZZ"
#eval parseCheckFail ""
#eval parseCheckFail "[::1"
#eval parseCheckFail "[:::1]:80"
#eval parseCheckFail "#frag"
#eval parseCheckFail "/path/\n"
#eval parseCheckFail "/path/\u0000"
-- ============================================================================
-- Request Target Parsing - Detailed Output Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.originForm { segments := #["path", "with", "encoded%20space"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path/with/encoded%20space"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.asteriskForm
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "*"
IO.println (repr result)
/--
info: none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://ata/b?ata=be"
IO.println (repr (result.fragment?))
/--
info: #[("q", some "hello%20world"), ("category", some "tech%2Bgames")]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api/search?q=hello%20world&category=tech%2Bgames"
IO.println (repr result.query)
/--
info: Std.Http.RequestTarget.originForm { segments := #[], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "example.com", port := some 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "example.com:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm { userInfo := none, host := Std.Http.URI.Host.ipv4 192.168.1.1, port := some 3000 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "192.168.1.1:3000"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm { userInfo := none, host := Std.Http.URI.Host.ipv6 ::1, port := some 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "[::1]:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "https",
authority := some { userInfo := none, host := Std.Http.URI.Host.name "example.com", port := some 8080 },
path := { segments := #["ata"], absolute := true },
query := #[],
fragment := none }
_
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://example.com:8080/ata"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none, host := Std.Http.URI.Host.ipv6 2001:db8::1, port := some 8080 },
path := { segments := #["path"], absolute := true },
query := #[],
fragment := none }
_
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[2001:db8::1]:8080/path"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "https",
authority := some { userInfo := some { username := "user b", password := some "pass" },
host := Std.Http.URI.Host.name "secure.example.com",
port := none },
path := { segments := #["private"], absolute := true },
query := #[],
fragment := none }
_
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user%20b:pass@secure.example.com/private"
IO.println (repr result)
-- ============================================================================
-- IPv6 Host Tests
-- ============================================================================
/--
info: Std.Http.URI.Host.ipv6 ::1
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "[::1]:8080"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
/--
info: Std.Http.URI.Host.ipv6 2001:db8::8a2e:370:7334
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[2001:db8::8a2e:370:7334]:8080/api"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
/--
info: Std.Http.URI.Host.ipv6 ::
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://[::]/path"
match result.authority? with
| some auth => IO.println (repr auth.host)
| none => IO.println "no authority"
-- ============================================================================
-- UserInfo Tests
-- ============================================================================
/--
info: some { username := "user", password := some "pass" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user:pass@example.com/private"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "user only", password := none }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user%20only@example.com/path"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "", password := some "pass" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://:pass@example.com/path"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
/--
info: some { username := "user", password := some "p@ss:w0rd" }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "https://user:p%40ss%3Aw0rd@example.com/"
match result.authority? with
| some auth => IO.println (repr auth.userInfo)
| none => IO.println "no authority"
-- ============================================================================
-- Path.normalize Tests (RFC 3986 Section 5.2.4)
-- ============================================================================
/--
info: /a/b
-/
#guard_msgs in
#eval IO.println <| toString (URI.parse! "http://example.com/a/./b").path.normalize
/--
info: /a
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/..").path.normalize
/--
info: /a/g
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c/./../../g").path.normalize
/--
info: /g
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/../../../g").path.normalize
/--
info: /a/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../c").path.normalize
/--
info: /a/
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c/../.././").path.normalize
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../../..").path.normalize
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/../../../").path.normalize
/--
info: /a/b/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/./a/./b/./c/.").path.normalize
/--
info: /c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/../b/../c").path.normalize
-- ============================================================================
-- Path.parent Tests
-- ============================================================================
/--
info: /a/b
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/c").path.parent
/--
info: /a
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b").path.parent
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a").path.parent
/--
info: /
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/").path.parent
-- ============================================================================
-- Path.join Tests
-- ============================================================================
/--
info: /a/b/c/d
-/
#guard_msgs in
#eval do
let p1 := (URI.parse! "http://example.com/a/b").path
let p2 : URI.Path := { segments := #[URI.EncodedString.encode "c", URI.EncodedString.encode "d"], absolute := false }
IO.println (p1.join p2)
/--
info: /x/y
-/
#guard_msgs in
#eval do
let p1 := (URI.parse! "http://example.com/a/b").path
let p2 : URI.Path := { segments := #[URI.EncodedString.encode "x", URI.EncodedString.encode "y"], absolute := true }
IO.println (p1.join p2)
-- ============================================================================
-- Path.isEmpty Tests
-- ============================================================================
#guard (URI.parse! "http://example.com").path.isEmpty = true
#guard (URI.parse! "http://example.com/").path.absolute = true
#guard (URI.parse! "http://example.com/a").path.isEmpty = false
#guard (URI.parse! "http://example.com/a").path.absolute = true
-- ============================================================================
-- URI Modification Helpers
-- ============================================================================
#guard ((URI.parse! "http://example.com").withScheme "htTps" |>.scheme) == "https"
#guard ((URI.parse! "http://example.com").withScheme "ftP" |>.scheme) == "ftp"
/--
info: http://example.com/#section1
-/
#guard_msgs in
#eval IO.println ((URI.parse! "http://example.com/").withFragment (some (toString (URI.EncodedString.encode "section1" : URI.EncodedFragment))))
/--
info: http://example.com/?key=value
-/
#guard_msgs in
#eval do
let uri := URI.parse! "http://example.com/"
let query := URI.Query.empty.insert "key" "value"
IO.println (uri.withQuery query)
/--
info: http://example.com/new/path
-/
#guard_msgs in
#eval do
let uri := URI.parse! "http://example.com/old/path"
let newPath : URI.Path := { segments := #[URI.EncodedString.encode "new", URI.EncodedString.encode "path"], absolute := true }
IO.println (uri.withPath newPath)
-- ============================================================================
-- URI.normalize Tests (RFC 3986 Section 6)
-- ============================================================================
#guard (URI.parse! "HTTP://example.com").normalize.scheme == "http"
#guard (URI.parse! "HtTpS://example.com").normalize.scheme == "https"
/--
info: http://example.com/
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://EXAMPLE.COM/").normalize
/--
info: http://example.com/
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTP://Example.COM/").normalize
/--
info: http://example.com/a/c
-/
#guard_msgs in
#eval IO.println (URI.parse! "http://example.com/a/b/../c").normalize
/--
info: http://example.com/a/g
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTP://EXAMPLE.COM/a/b/c/./../../g").normalize
/--
info: https://www.example.com/PATH
-/
#guard_msgs in
#eval IO.println (URI.parse! "HTTPS://WWW.EXAMPLE.COM/PATH").normalize
-- ============================================================================
-- Query Parameter Tests
-- ============================================================================
-- Query with duplicate keys
/--
info: 3
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?tag=a&tag=b&tag=c"
let all := result.query.findAll "tag"
IO.println all.size
/--
info: #[some "a", some "b", some "c"]
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?tag=a&tag=b&tag=c"
let all := result.query.findAll "tag"
IO.println (repr all)
/--
info: some (some "a")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/search?key=a&key=b&key=c"
IO.println (repr (result.query.find? "key"))
-- Empty value vs no value
/--
info: some (some "")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key="
IO.println (repr (result.query.find? "key"))
/--
info: some none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key"
IO.println (repr (result.query.find? "key"))
/--
info: some (some "value")
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/api?key=value"
IO.println (repr (result.query.find? "key"))
-- ============================================================================
-- Query Operations
-- ============================================================================
#guard (URI.Query.empty.insert "a" "1" |>.contains "a") = true
#guard (URI.Query.empty.contains "nonexistent") = false
/--
info: a=1&b=2
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "a" "1"
|>.insert "b" "2"
IO.println query.toRawString
/--
info: b=2
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "a" "1"
|>.insert "b" "2"
|>.erase "a"
IO.println query.toRawString
/--
info: key=new
-/
#guard_msgs in
#eval do
let query := URI.Query.empty
|>.insert "key" "old"
|>.set "key" "new"
IO.println query.toRawString
/--
info: none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path"
IO.println (repr result.fragment?)
-- ============================================================================
-- URI Builder Tests
-- ============================================================================
/--
info: https://example.com/api/users?page=1
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme "https"
|>.setHost! "example.com"
|>.appendPathSegment "api"
|>.appendPathSegment "users"
|>.addQueryParam "page" "1"
|>.build
IO.println uri
/--
info: http://localhost:8080/
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme "http"
|>.setHost! "localhost"
|>.setPort 8080
|>.build
IO.println uri
/--
info: https://user:pass@secure.example.com/private
-/
#guard_msgs in
#eval do
let uri := URI.Builder.empty
|>.setScheme "https"
|>.setUserInfo "user" (some "pass")
|>.setHost! "secure.example.com"
|>.appendPathSegment "private"
|>.build
IO.println uri
-- ============================================================================
-- Encoded Path Segment Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.originForm { segments := #["path%2Fwith%2Fslashes"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/path%2Fwith%2Fslashes"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { segments := #["file%20name.txt"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/file%20name.txt"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.originForm { segments := #["caf%C3%A9"], absolute := true } none
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "/caf%C3%A9"
IO.println (repr result)
-- ============================================================================
-- Authority Form Tests
-- ============================================================================
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "proxy.example.com", port := some 3128 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "proxy.example.com:3128"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm { userInfo := none, host := Std.Http.URI.Host.ipv4 127.0.0.1, port := some 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "127.0.0.1:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.authorityForm
{ userInfo := none, host := Std.Http.URI.Host.name "1example.com", port := some 8080 }
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "1example.com:8080"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none, host := Std.Http.URI.Host.name "1example.com", port := none },
path := { segments := #["path"], absolute := true },
query := #[],
fragment := none }
_
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://1example.com/path"
IO.println (repr result)
/--
info: Std.Http.RequestTarget.absoluteForm
{ scheme := "http",
authority := some { userInfo := none, host := Std.Http.URI.Host.name "123abc.example.com", port := none },
path := { segments := #["page"], absolute := true },
query := #[],
fragment := none }
_
-/
#guard_msgs in
#eval show IO _ from do
let result runParser parseRequestTarget "http://123abc.example.com/page"
IO.println (repr result)