aboutsummaryrefslogtreecommitdiff
path: root/v_windows/v/vlib/net/http
diff options
context:
space:
mode:
authorIndrajith K L2022-12-03 17:00:20 +0530
committerIndrajith K L2022-12-03 17:00:20 +0530
commitf5c4671bfbad96bf346bd7e9a21fc4317b4959df (patch)
tree2764fc62da58f2ba8da7ed341643fc359873142f /v_windows/v/vlib/net/http
downloadcli-tools-windows-master.tar.gz
cli-tools-windows-master.tar.bz2
cli-tools-windows-master.zip
Adds most of the toolsHEADmaster
Diffstat (limited to 'v_windows/v/vlib/net/http')
-rw-r--r--v_windows/v/vlib/net/http/backend_nix.c.v74
-rw-r--r--v_windows/v/vlib/net/http/backend_windows.c.v28
-rw-r--r--v_windows/v/vlib/net/http/chunked/dechunk.v72
-rw-r--r--v_windows/v/vlib/net/http/cookie.v413
-rw-r--r--v_windows/v/vlib/net/http/cookie_test.v468
-rw-r--r--v_windows/v/vlib/net/http/download.v18
-rw-r--r--v_windows/v/vlib/net/http/download_nix.c.v52
-rw-r--r--v_windows/v/vlib/net/http/download_windows.c.v29
-rw-r--r--v_windows/v/vlib/net/http/header.v698
-rw-r--r--v_windows/v/vlib/net/http/header_test.v387
-rw-r--r--v_windows/v/vlib/net/http/http.v186
-rw-r--r--v_windows/v/vlib/net/http/http_httpbin_test.v95
-rw-r--r--v_windows/v/vlib/net/http/http_test.v56
-rw-r--r--v_windows/v/vlib/net/http/method.v48
-rw-r--r--v_windows/v/vlib/net/http/request.v324
-rw-r--r--v_windows/v/vlib/net/http/request_test.v138
-rw-r--r--v_windows/v/vlib/net/http/response.v152
-rw-r--r--v_windows/v/vlib/net/http/response_test.v36
-rw-r--r--v_windows/v/vlib/net/http/server.v123
-rw-r--r--v_windows/v/vlib/net/http/server_test.v90
-rw-r--r--v_windows/v/vlib/net/http/status.v255
-rw-r--r--v_windows/v/vlib/net/http/status_test.v49
-rw-r--r--v_windows/v/vlib/net/http/version.v40
23 files changed, 3831 insertions, 0 deletions
diff --git a/v_windows/v/vlib/net/http/backend_nix.c.v b/v_windows/v/vlib/net/http/backend_nix.c.v
new file mode 100644
index 0000000..1243442
--- /dev/null
+++ b/v_windows/v/vlib/net/http/backend_nix.c.v
@@ -0,0 +1,74 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+import strings
+import net.openssl
+
+const (
+ is_used = openssl.is_used
+)
+
+fn (req &Request) ssl_do(port int, method Method, host_name string, path string) ?Response {
+ // ssl_method := C.SSLv23_method()
+ ctx := C.SSL_CTX_new(C.TLS_method())
+ C.SSL_CTX_set_verify_depth(ctx, 4)
+ flags := C.SSL_OP_NO_SSLv2 | C.SSL_OP_NO_SSLv3 | C.SSL_OP_NO_COMPRESSION
+ C.SSL_CTX_set_options(ctx, flags)
+ mut res := C.SSL_CTX_load_verify_locations(ctx, c'random-org-chain.pem', 0)
+ web := C.BIO_new_ssl_connect(ctx)
+ addr := host_name + ':' + port.str()
+ res = C.BIO_set_conn_hostname(web, addr.str)
+ ssl := &openssl.SSL(0)
+ C.BIO_get_ssl(web, &ssl)
+ preferred_ciphers := 'HIGH:!aNULL:!kRSA:!PSK:!SRP:!MD5:!RC4'
+ res = C.SSL_set_cipher_list(voidptr(ssl), &char(preferred_ciphers.str))
+ if res != 1 {
+ println('http: openssl: cipher failed')
+ }
+ res = C.SSL_set_tlsext_host_name(voidptr(ssl), host_name.str)
+ res = C.BIO_do_connect(web)
+ if res != 1 {
+ return error('cannot connect the endpoint')
+ }
+ res = C.BIO_do_handshake(web)
+ C.SSL_get_peer_certificate(voidptr(ssl))
+ res = C.SSL_get_verify_result(voidptr(ssl))
+ // /////
+ req_headers := req.build_request_headers(method, host_name, path)
+ $if trace_http_request ? {
+ eprintln('> $req_headers')
+ }
+ // println(req_headers)
+ C.BIO_puts(web, &char(req_headers.str))
+ mut content := strings.new_builder(100)
+ mut buff := [bufsize]byte{}
+ bp := unsafe { &buff[0] }
+ mut readcounter := 0
+ for {
+ readcounter++
+ len := unsafe { C.BIO_read(web, bp, bufsize) }
+ if len <= 0 {
+ break
+ }
+ $if debug_http ? {
+ eprintln('ssl_do, read ${readcounter:4d} | len: $len')
+ eprintln('-'.repeat(20))
+ eprintln(unsafe { tos(bp, len) })
+ eprintln('-'.repeat(20))
+ }
+ unsafe { content.write_ptr(bp, len) }
+ }
+ if web != 0 {
+ C.BIO_free_all(web)
+ }
+ if ctx != 0 {
+ C.SSL_CTX_free(ctx)
+ }
+ response_text := content.str()
+ $if trace_http_response ? {
+ eprintln('< $response_text')
+ }
+ return parse_response(response_text)
+}
diff --git a/v_windows/v/vlib/net/http/backend_windows.c.v b/v_windows/v/vlib/net/http/backend_windows.c.v
new file mode 100644
index 0000000..9181166
--- /dev/null
+++ b/v_windows/v/vlib/net/http/backend_windows.c.v
@@ -0,0 +1,28 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+#flag windows -I @VEXEROOT/thirdparty/vschannel
+#flag -l ws2_32 -l crypt32 -l secur32 -l user32
+#include "vschannel.c"
+
+fn C.new_tls_context() C.TlsContext
+
+fn (req &Request) ssl_do(port int, method Method, host_name string, path string) ?Response {
+ mut ctx := C.new_tls_context()
+ C.vschannel_init(&ctx)
+ mut buff := unsafe { malloc_noscan(C.vsc_init_resp_buff_size) }
+ addr := host_name
+ sdata := req.build_request_headers(method, host_name, path)
+ $if trace_http_request ? {
+ eprintln('> $sdata')
+ }
+ length := C.request(&ctx, port, addr.to_wide(), sdata.str, &buff)
+ C.vschannel_cleanup(&ctx)
+ response_text := unsafe { buff.vstring_with_len(length) }
+ $if trace_http_response ? {
+ eprintln('< $response_text')
+ }
+ return parse_response(response_text)
+}
diff --git a/v_windows/v/vlib/net/http/chunked/dechunk.v b/v_windows/v/vlib/net/http/chunked/dechunk.v
new file mode 100644
index 0000000..0e82586
--- /dev/null
+++ b/v_windows/v/vlib/net/http/chunked/dechunk.v
@@ -0,0 +1,72 @@
+module chunked
+
+import strings
+// See: https://en.wikipedia.org/wiki/Chunked_transfer_encoding
+// /////////////////////////////////////////////////////////////
+// The chunk size is transferred as a hexadecimal number
+// followed by \r\n as a line separator,
+// followed by a chunk of data of the given size.
+// The end is marked with a chunk with size 0.
+
+struct ChunkScanner {
+mut:
+ pos int
+ text string
+}
+
+fn (mut s ChunkScanner) read_chunk_size() int {
+ mut n := 0
+ for {
+ if s.pos >= s.text.len {
+ break
+ }
+ c := s.text[s.pos]
+ if !c.is_hex_digit() {
+ break
+ }
+ n = n << 4
+ n += int(unhex(c))
+ s.pos++
+ }
+ return n
+}
+
+fn unhex(c byte) byte {
+ if `0` <= c && c <= `9` {
+ return c - `0`
+ } else if `a` <= c && c <= `f` {
+ return c - `a` + 10
+ } else if `A` <= c && c <= `F` {
+ return c - `A` + 10
+ }
+ return 0
+}
+
+fn (mut s ChunkScanner) skip_crlf() {
+ s.pos += 2
+}
+
+fn (mut s ChunkScanner) read_chunk(chunksize int) string {
+ startpos := s.pos
+ s.pos += chunksize
+ return s.text[startpos..s.pos]
+}
+
+pub fn decode(text string) string {
+ mut sb := strings.new_builder(100)
+ mut cscanner := ChunkScanner{
+ pos: 0
+ text: text
+ }
+ for {
+ csize := cscanner.read_chunk_size()
+ if 0 == csize {
+ break
+ }
+ cscanner.skip_crlf()
+ sb.write_string(cscanner.read_chunk(csize))
+ cscanner.skip_crlf()
+ }
+ cscanner.skip_crlf()
+ return sb.str()
+}
diff --git a/v_windows/v/vlib/net/http/cookie.v b/v_windows/v/vlib/net/http/cookie.v
new file mode 100644
index 0000000..d647b3d
--- /dev/null
+++ b/v_windows/v/vlib/net/http/cookie.v
@@ -0,0 +1,413 @@
+// Copyright (c) 2019 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+import time
+import strings
+
+pub struct Cookie {
+pub mut:
+ name string
+ value string
+ path string // optional
+ domain string // optional
+ expires time.Time // optional
+ raw_expires string // for reading cookies only. optional.
+ // max_age=0 means no 'Max-Age' attribute specified.
+ // max_age<0 means delete cookie now, equivalently 'Max-Age: 0'
+ // max_age>0 means Max-Age attribute present and given in seconds
+ max_age int
+ secure bool
+ http_only bool
+ same_site SameSite
+ raw string
+ unparsed []string // Raw text of unparsed attribute-value pairs
+}
+
+// SameSite allows a server to define a cookie attribute making it impossible for
+// the browser to send this cookie along with cross-site requests. The main
+// goal is to mitigate the risk of cross-origin information leakage, and provide
+// some protection against cross-site request forgery attacks.
+//
+// See https://tools.ietf.org/html/draft-ietf-httpbis-cookie-same-site-00 for details.
+pub enum SameSite {
+ same_site_default_mode = 1
+ same_site_lax_mode
+ same_site_strict_mode
+ same_site_none_mode
+}
+
+// Parses all "Set-Cookie" values from the header `h` and
+// returns the successfully parsed Cookies.
+pub fn read_set_cookies(h map[string][]string) []&Cookie {
+ cookies_s := h['Set-Cookie']
+ cookie_count := cookies_s.len
+ if cookie_count == 0 {
+ return []
+ }
+ mut cookies := []&Cookie{}
+ for _, line in cookies_s {
+ c := parse_cookie(line) or { continue }
+ cookies << &c
+ }
+ return cookies
+}
+
+// Parses all "Cookie" values from the header `h` and
+// returns the successfully parsed Cookies.
+//
+// if `filter` isn't empty, only cookies of that name are returned
+pub fn read_cookies(h map[string][]string, filter string) []&Cookie {
+ lines := h['Cookie']
+ if lines.len == 0 {
+ return []
+ }
+ mut cookies := []&Cookie{}
+ for _, line_ in lines {
+ mut line := line_.trim_space()
+ mut part := ''
+ for line.len > 0 {
+ if line.index_any(';') > 0 {
+ line_parts := line.split(';')
+ part = line_parts[0]
+ line = line_parts[1]
+ } else {
+ part = line
+ line = ''
+ }
+ part = part.trim_space()
+ if part.len == 0 {
+ continue
+ }
+ mut name := part
+ mut val := ''
+ if part.contains('=') {
+ val_parts := part.split('=')
+ name = val_parts[0]
+ val = val_parts[1]
+ }
+ if !is_cookie_name_valid(name) {
+ continue
+ }
+ if filter != '' && filter != name {
+ continue
+ }
+ val = parse_cookie_value(val, true) or { continue }
+ cookies << &Cookie{
+ name: name
+ value: val
+ }
+ }
+ }
+ return cookies
+}
+
+// Returns the serialization of the cookie for use in a Cookie header
+// (if only Name and Value are set) or a Set-Cookie response
+// header (if other fields are set).
+//
+// If c.name is invalid, the empty string is returned.
+pub fn (c &Cookie) str() string {
+ if !is_cookie_name_valid(c.name) {
+ return ''
+ }
+ // extra_cookie_length derived from typical length of cookie attributes
+ // see RFC 6265 Sec 4.1.
+ extra_cookie_length := 110
+ mut b := strings.new_builder(c.name.len + c.value.len + c.domain.len + c.path.len +
+ extra_cookie_length)
+ b.write_string(c.name)
+ b.write_string('=')
+ b.write_string(sanitize_cookie_value(c.value))
+ if c.path.len > 0 {
+ b.write_string('; path=')
+ b.write_string(sanitize_cookie_path(c.path))
+ }
+ if c.domain.len > 0 {
+ if valid_cookie_domain(c.domain) {
+ // A `domain` containing illegal characters is not
+ // sanitized but simply dropped which turns the cookie
+ // into a host-only cookie. A leading dot is okay
+ // but won't be sent.
+ mut d := c.domain
+ if d[0] == `.` {
+ d = d.substr(1, d.len)
+ }
+ b.write_string('; domain=')
+ b.write_string(d)
+ } else {
+ // TODO: Log invalid cookie domain warning
+ }
+ }
+ if c.expires.year > 1600 {
+ e := c.expires
+ time_str := '$e.weekday_str(), $e.day.str() $e.smonth() $e.year $e.hhmmss() GMT'
+ b.write_string('; expires=')
+ b.write_string(time_str)
+ }
+ // TODO: Fix this. Techically a max age of 0 or less should be 0
+ // We need a way to not have a max age.
+ if c.max_age > 0 {
+ b.write_string('; Max-Age=')
+ b.write_string(c.max_age.str())
+ } else if c.max_age < 0 {
+ b.write_string('; Max-Age=0')
+ }
+ if c.http_only {
+ b.write_string('; HttpOnly')
+ }
+ if c.secure {
+ b.write_string('; Secure')
+ }
+ match c.same_site {
+ .same_site_default_mode {
+ b.write_string('; SameSite')
+ }
+ .same_site_none_mode {
+ b.write_string('; SameSite=None')
+ }
+ .same_site_lax_mode {
+ b.write_string('; SameSite=Lax')
+ }
+ .same_site_strict_mode {
+ b.write_string('; SameSite=Strict')
+ }
+ }
+ return b.str()
+}
+
+fn sanitize(valid fn (byte) bool, v string) string {
+ mut ok := true
+ for i in 0 .. v.len {
+ if valid(v[i]) {
+ continue
+ }
+ // TODO: Warn that we're dropping the invalid byte?
+ ok = false
+ break
+ }
+ if ok {
+ return v.clone()
+ }
+ return v.bytes().filter(valid(it)).bytestr()
+}
+
+fn sanitize_cookie_name(name string) string {
+ return name.replace_each(['\n', '-', '\r', '-'])
+}
+
+// https://tools.ietf.org/html/rfc6265#section-4.1.1
+// cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
+// cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
+// ; US-ASCII characters excluding CTLs,
+// ; whitespace DQUOTE, comma, semicolon,
+// ; and backslash
+// We loosen this as spaces and commas are common in cookie values
+// but we produce a quoted cookie-value in when value starts or ends
+// with a comma or space.
+pub fn sanitize_cookie_value(v string) string {
+ val := sanitize(valid_cookie_value_byte, v)
+ if v.len == 0 {
+ return v
+ }
+ // Check for the existence of a space or comma
+ if val.starts_with(' ') || val.ends_with(' ') || val.starts_with(',') || val.ends_with(',') {
+ return '"$v"'
+ }
+ return v
+}
+
+fn sanitize_cookie_path(v string) string {
+ return sanitize(valid_cookie_path_byte, v)
+}
+
+fn valid_cookie_value_byte(b byte) bool {
+ return 0x20 <= b && b < 0x7f && b != `"` && b != `;` && b != `\\`
+}
+
+fn valid_cookie_path_byte(b byte) bool {
+ return 0x20 <= b && b < 0x7f && b != `!`
+}
+
+fn valid_cookie_domain(v string) bool {
+ if is_cookie_domain_name(v) {
+ return true
+ }
+ // TODO
+ // valid_ip := net.parse_ip(v) or {
+ // false
+ // }
+ // if valid_ip {
+ // return true
+ // }
+ return false
+}
+
+pub fn is_cookie_domain_name(_s string) bool {
+ mut s := _s
+ if s.len == 0 {
+ return false
+ }
+ if s.len > 255 {
+ return false
+ }
+ if s[0] == `.` {
+ s = s.substr(1, s.len)
+ }
+ mut last := `.`
+ mut ok := false
+ mut part_len := 0
+ for i, _ in s {
+ c := s[i]
+ if (`a` <= c && c <= `z`) || (`A` <= c && c <= `Z`) {
+ // No '_' allowed here (in contrast to package net).
+ ok = true
+ part_len++
+ } else if `0` <= c && c <= `9` {
+ // fine
+ part_len++
+ } else if c == `-` {
+ // Byte before dash cannot be dot.
+ if last == `.` {
+ return false
+ }
+ part_len++
+ } else if c == `.` {
+ // Byte before dot cannot be dot, dash.
+ if last == `.` || last == `-` {
+ return false
+ }
+ if part_len > 63 || part_len == 0 {
+ return false
+ }
+ part_len = 0
+ } else {
+ return false
+ }
+ last = c
+ }
+ if last == `-` || part_len > 63 {
+ return false
+ }
+ return ok
+}
+
+fn parse_cookie_value(_raw string, allow_double_quote bool) ?string {
+ mut raw := _raw
+ // Strip the quotes, if present
+ if allow_double_quote && raw.len > 1 && raw[0] == `"` && raw[raw.len - 1] == `"` {
+ raw = raw.substr(1, raw.len - 1)
+ }
+ for i in 0 .. raw.len {
+ if !valid_cookie_value_byte(raw[i]) {
+ return error('http.cookie: invalid cookie value')
+ }
+ }
+ return raw
+}
+
+fn is_cookie_name_valid(name string) bool {
+ if name == '' {
+ return false
+ }
+ for b in name {
+ if b < 33 || b > 126 {
+ return false
+ }
+ }
+ return true
+}
+
+fn parse_cookie(line string) ?Cookie {
+ mut parts := line.trim_space().split(';')
+ if parts.len == 1 && parts[0] == '' {
+ return error('malformed cookie')
+ }
+ parts[0] = parts[0].trim_space()
+ keyval := parts[0].split('=')
+ if keyval.len != 2 {
+ return error('malformed cookie')
+ }
+ name := keyval[0]
+ raw_value := keyval[1]
+ if !is_cookie_name_valid(name) {
+ return error('malformed cookie')
+ }
+ value := parse_cookie_value(raw_value, true) or { return error('malformed cookie') }
+ mut c := Cookie{
+ name: name
+ value: value
+ raw: line
+ }
+ for i, _ in parts {
+ parts[i] = parts[i].trim_space()
+ if parts[i].len == 0 {
+ continue
+ }
+ mut attr := parts[i]
+ mut raw_val := ''
+ if attr.contains('=') {
+ pieces := attr.split('=')
+ attr = pieces[0]
+ raw_val = pieces[1]
+ }
+ lower_attr := attr.to_lower()
+ val := parse_cookie_value(raw_val, false) or {
+ c.unparsed << parts[i]
+ continue
+ }
+ match lower_attr {
+ 'samesite' {
+ lower_val := val.to_lower()
+ match lower_val {
+ 'lax' { c.same_site = .same_site_lax_mode }
+ 'strict' { c.same_site = .same_site_strict_mode }
+ 'none' { c.same_site = .same_site_none_mode }
+ else { c.same_site = .same_site_default_mode }
+ }
+ }
+ 'secure' {
+ c.secure = true
+ continue
+ }
+ 'httponly' {
+ c.http_only = true
+ continue
+ }
+ 'domain' {
+ c.domain = val
+ continue
+ }
+ 'max-age' {
+ mut secs := val.int()
+ if secs != 0 && val[0] != `0` {
+ break
+ }
+ if secs <= 0 {
+ secs = -1
+ }
+ c.max_age = secs
+ continue
+ }
+ // TODO: Fix this once time works better
+ // 'expires' {
+ // c.raw_expires = val
+ // mut exptime := time.parse_iso(val)
+ // if exptime.year == 0 {
+ // exptime = time.parse_iso('Mon, 02-Jan-2006 15:04:05 MST')
+ // }
+ // c.expires = exptime
+ // continue
+ // }
+ 'path' {
+ c.path = val
+ continue
+ }
+ else {
+ c.unparsed << parts[i]
+ }
+ }
+ }
+ return c
+}
diff --git a/v_windows/v/vlib/net/http/cookie_test.v b/v_windows/v/vlib/net/http/cookie_test.v
new file mode 100644
index 0000000..6a0c0cd
--- /dev/null
+++ b/v_windows/v/vlib/net/http/cookie_test.v
@@ -0,0 +1,468 @@
+import net.http
+
+struct SetCookieTestCase {
+ cookie &http.Cookie
+ raw string
+}
+
+struct ReadSetCookiesTestCase {
+ header map[string][]string
+ cookies []&http.Cookie
+}
+
+struct AddCookieTestCase {
+ cookie []&http.Cookie
+ raw string
+}
+
+const (
+ write_set_cookie_tests = [
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-1'
+ value: 'v1'
+ }
+ raw: 'cookie-1=v1'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-2'
+ value: 'two'
+ max_age: 3600
+ }
+ raw: 'cookie-2=two; Max-Age=3600'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-3'
+ value: 'three'
+ domain: '.example.com'
+ }
+ raw: 'cookie-3=three; domain=example.com'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-4'
+ value: 'four'
+ path: '/restricted/'
+ }
+ raw: 'cookie-4=four; path=/restricted/'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-5'
+ value: 'five'
+ domain: 'wrong;bad.abc'
+ }
+ raw: 'cookie-5=five'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-6'
+ value: 'six'
+ domain: 'bad-.abc'
+ }
+ raw: 'cookie-6=six'
+ },
+ // SetCookieTestCase{
+ // cookie: &http.Cookie{name: 'cookie-7', value: 'seven', domain: '127.0.0.1'},
+ // raw: 'cookie-7=seven; domain=127.0.0.1'
+ // },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-8'
+ value: 'eight'
+ domain: '::1'
+ }
+ raw: 'cookie-8=eight'
+ },
+ // {
+ // cookie: &http.Cookie{name: 'cookie-9', value: 'expiring', expires: time.unix(1257894000, 0)},
+ // 'cookie-9=expiring; Expires=Tue, 10 Nov 2009 23:00:00 GMT',
+ // },
+ // According to IETF 6265 Section 5.1.1.5, the year cannot be less than 1601
+ // SetCookieTestCase{
+ // cookie: &http.Cookie{name: 'cookie-10', value: 'expiring-1601', expires: time.parse('Mon, 01 Jan 1601 01:01:01 GMT')},
+ // raw: 'cookie-10=expiring-1601; Expires=Mon, 01 Jan 1601 01:01:01 GMT'
+ // },
+ // SetCookieTestCase{
+ // cookie: &http.Cookie{name: 'cookie-11', value: 'invalid-expiry', expires: time.parse('Mon, 01 Jan 1600 01:01:01 GMT')},
+ // raw: 'cookie-11=invalid-expiry'
+ // },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-12'
+ value: 'samesite-default'
+ same_site: .same_site_default_mode
+ }
+ raw: 'cookie-12=samesite-default; SameSite'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-13'
+ value: 'samesite-lax'
+ same_site: .same_site_lax_mode
+ }
+ raw: 'cookie-13=samesite-lax; SameSite=Lax'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-14'
+ value: 'samesite-strict'
+ same_site: .same_site_strict_mode
+ }
+ raw: 'cookie-14=samesite-strict; SameSite=Strict'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'cookie-15'
+ value: 'samesite-none'
+ same_site: .same_site_none_mode
+ }
+ raw: 'cookie-15=samesite-none; SameSite=None'
+ },
+ // The 'special' cookies have values containing commas or spaces which
+ // are disallowed by RFC 6265 but are common in the wild.
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'special-1'
+ value: 'a z'
+ }
+ raw: 'special-1=a z'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'special-2'
+ value: ' z'
+ }
+ raw: 'special-2=" z"'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'special-3'
+ value: 'a '
+ }
+ raw: 'special-3="a "'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'special-4'
+ value: ' '
+ }
+ raw: 'special-4=" "'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'special-5'
+ value: 'a,z'
+ }
+ raw: 'special-5=a,z'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'special-6'
+ value: ',z'
+ }
+ raw: 'special-6=",z"'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'special-7'
+ value: 'a,'
+ }
+ raw: 'special-7="a,"'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'special-8'
+ value: ','
+ }
+ raw: 'special-8=","'
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'empty-value'
+ value: ''
+ }
+ raw: 'empty-value='
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: ''
+ }
+ raw: ''
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: '\t'
+ }
+ raw: ''
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: '\r'
+ }
+ raw: ''
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'a\nb'
+ value: 'v'
+ }
+ raw: ''
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'a\nb'
+ value: 'v'
+ }
+ raw: ''
+ },
+ SetCookieTestCase{
+ cookie: &http.Cookie{
+ name: 'a\rb'
+ value: 'v'
+ }
+ raw: ''
+ },
+ ]
+ add_cookies_tests = [
+ AddCookieTestCase{
+ cookie: []
+ raw: ''
+ },
+ AddCookieTestCase{
+ cookie: [&http.Cookie{
+ name: 'cookie-1'
+ value: 'v1'
+ }]
+ raw: 'cookie-1=v1'
+ },
+ AddCookieTestCase{
+ cookie: [&http.Cookie{
+ name: 'cookie-1'
+ value: 'v1'
+ },
+ &http.Cookie{
+ name: 'cookie-2'
+ value: 'v2'
+ },
+ &http.Cookie{
+ name: 'cookie-3'
+ value: 'v3'
+ },
+ ]
+ raw: 'cookie-1=v1; cookie-2=v2; cookie-3=v3'
+ },
+ ]
+ read_set_cookies_tests = [
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['Cookie-1=v1']
+ }
+ cookies: [&http.Cookie{
+ name: 'Cookie-1'
+ value: 'v1'
+ raw: 'Cookie-1=v1'
+ }]
+ },
+ // ReadSetCookiesTestCase{
+ // header: {"Set-Cookie": ["NID=99=YsDT5i3E-CXax-; expires=Wed, 23-Nov-2011 01:05:03 GMT; path=/; domain=.google.ch; HttpOnly"]},
+ // cookies: [&http.Cookie{
+ // name: "NID",
+ // value: "99=YsDT5i3E-CXax-",
+ // path: "/",
+ // domain: ".google.ch",
+ // http_only: true,
+ // expires: time.parse_iso('Wed, 23-Nov-2011 01:05:03 GMT'),
+ // raw_expires: "Wed, 23-Nov-2011 01:05:03 GMT",
+ // raw: "NID=99=YsDT5i3E-CXax-; expires=Wed, 23-Nov-2011 01:05:03 GMT; path=/; domain=.google.ch; HttpOnly"
+ // }]
+ // },
+ // ReadSetCookiesTestCase{
+ // header: {"Set-Cookie": [".ASPXAUTH=7E3AA; expires=Wed, 07-Mar-2012 14:25:06 GMT; path=/; HttpOnly"]},
+ // cookies: [&http.Cookie{
+ // name: ".ASPXAUTH",
+ // value: "7E3AA",
+ // path: "/",
+ // expires: time.parse_iso('Wed, 07-Mar-2012 14:25:06 GMT'),
+ // raw_expires: "Wed, 07-Mar-2012 14:25:06 GMT",
+ // http_only: true,
+ // raw: ".ASPXAUTH=7E3AA; expires=Wed, 07-Mar-2012 14:25:06 GMT; path=/; HttpOnly"
+ // }]
+ // },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['ASP.NET_SessionId=foo; path=/; HttpOnly']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'ASP.NET_SessionId'
+ value: 'foo'
+ path: '/'
+ http_only: true
+ raw: 'ASP.NET_SessionId=foo; path=/; HttpOnly'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['samesitedefault=foo; SameSite']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'samesitedefault'
+ value: 'foo'
+ same_site: .same_site_default_mode
+ raw: 'samesitedefault=foo; SameSite'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['samesitelax=foo; SameSite=Lax']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'samesitelax'
+ value: 'foo'
+ same_site: .same_site_lax_mode
+ raw: 'samesitelax=foo; SameSite=Lax'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['samesitestrict=foo; SameSite=Strict']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'samesitestrict'
+ value: 'foo'
+ same_site: .same_site_strict_mode
+ raw: 'samesitestrict=foo; SameSite=Strict'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['samesitenone=foo; SameSite=None']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'samesitenone'
+ value: 'foo'
+ same_site: .same_site_none_mode
+ raw: 'samesitenone=foo; SameSite=None'
+ },
+ ]
+ },
+ // Make sure we can properly read back the Set-Cookie headers we create
+ // for values containing spaces or commas:
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['special-1=a z']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'special-1'
+ value: 'a z'
+ raw: 'special-1=a z'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['special-2=" z"']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'special-2'
+ value: ' z'
+ raw: 'special-2=" z"'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['special-3="a "']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'special-3'
+ value: 'a '
+ raw: 'special-3="a "'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['special-4=" "']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'special-4'
+ value: ' '
+ raw: 'special-4=" "'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['special-5=a,z']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'special-5'
+ value: 'a,z'
+ raw: 'special-5=a,z'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['special-6=",z"']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'special-6'
+ value: ',z'
+ raw: 'special-6=",z"'
+ },
+ ]
+ },
+ ReadSetCookiesTestCase{
+ header: {
+ 'Set-Cookie': ['special-7=","']
+ }
+ cookies: [
+ &http.Cookie{
+ name: 'special-7'
+ value: ','
+ raw: 'special-8=","'
+ },
+ ]
+ }
+ // TODO(bradfitz): users have reported seeing this in the
+ // wild, but do browsers handle it? RFC 6265 just says "don't
+ // do that" (section 3) and then never mentions header folding
+ // again.
+ // Header{"Set-Cookie": ["ASP.NET_SessionId=foo; path=/; HttpOnly, .ASPXAUTH=7E3AA; expires=Wed, 07-Mar-2012 14:25:06 GMT; path=/; HttpOnly"]},
+ ]
+)
+
+fn test_write_set_cookies() {
+ for _, tt in write_set_cookie_tests {
+ assert tt.cookie.str() == tt.raw
+ }
+}
+
+fn test_read_set_cookies() {
+ for _, tt in read_set_cookies_tests {
+ h := tt.header['Set-Cookie'][0]
+ c := http.read_set_cookies(tt.header)
+ println(h)
+ println(c[0].str())
+ assert c[0].str() == h
+ }
+}
diff --git a/v_windows/v/vlib/net/http/download.v b/v_windows/v/vlib/net/http/download.v
new file mode 100644
index 0000000..455c1e0
--- /dev/null
+++ b/v_windows/v/vlib/net/http/download.v
@@ -0,0 +1,18 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+import os
+
+pub fn download_file(url string, out string) ? {
+ $if debug_http ? {
+ println('download file url=$url out=$out')
+ }
+ s := get(url) or { return err }
+ if s.status() != .ok {
+ return error('received http code $s.status_code')
+ }
+ os.write_file(out, s.text) ?
+ // download_file_with_progress(url, out, empty, empty)
+}
diff --git a/v_windows/v/vlib/net/http/download_nix.c.v b/v_windows/v/vlib/net/http/download_nix.c.v
new file mode 100644
index 0000000..724a256
--- /dev/null
+++ b/v_windows/v/vlib/net/http/download_nix.c.v
@@ -0,0 +1,52 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+type DownloadFn = fn (written int)
+
+/*
+struct DownloadStruct {
+mut:
+ stream voidptr
+ written int
+ cb DownloadFn
+}
+*/
+fn download_cb(ptr voidptr, size size_t, nmemb size_t, userp voidptr) {
+ /*
+ mut data := &DownloadStruct(userp)
+ written := C.fwrite(ptr, size, nmemb, data.stream)
+ data.written += written
+ data.cb(data.written)
+ //#data->cb(data->written); // TODO
+ return written
+ */
+}
+
+pub fn download_file_with_progress(url string, out string, cb DownloadFn, cb_finished fn ()) {
+ /*
+ curl := C.curl_easy_init()
+ if isnil(curl) {
+ return
+ }
+ cout := out.str
+ fp := C.fopen(cout, 'wb')
+ C.curl_easy_setopt(curl, CURLOPT_URL, url.str)
+ C.curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, download_cb)
+ data := &DownloadStruct {
+ stream:fp
+ cb: cb
+ }
+ C.curl_easy_setopt(curl, CURLOPT_WRITEDATA, data)
+ mut d := 0.0
+ C.curl_easy_getinfo(curl, CURLINFO_CONTENT_LENGTH_DOWNLOAD, &d)
+ C.curl_easy_perform(curl)
+ C.curl_easy_cleanup(curl)
+ C.fclose(fp)
+ cb_finished()
+ */
+}
+
+fn empty() {
+}
diff --git a/v_windows/v/vlib/net/http/download_windows.c.v b/v_windows/v/vlib/net/http/download_windows.c.v
new file mode 100644
index 0000000..422b6da
--- /dev/null
+++ b/v_windows/v/vlib/net/http/download_windows.c.v
@@ -0,0 +1,29 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+
+module http
+
+#flag -l urlmon
+
+#include <urlmon.h>
+
+fn download_file_with_progress(url string, out string, cb voidptr, cb_finished voidptr) {
+}
+
+/*
+pub fn download_file(url, out string) {
+ C.URLDownloadToFile(0, url.to_wide(), out.to_wide(), 0, 0)
+ /*
+ if (res == S_OK) {
+ println('Download Ok')
+ # } else if(res == E_OUTOFMEMORY) {
+ println('Buffer length invalid, or insufficient memory')
+ # } else if(res == INET_E_DOWNLOAD_FAILURE) {
+ println('URL is invalid')
+ # } else {
+ # printf("Download error: %d\n", res);
+ # }
+ */
+}
+*/
diff --git a/v_windows/v/vlib/net/http/header.v b/v_windows/v/vlib/net/http/header.v
new file mode 100644
index 0000000..c05bdbc
--- /dev/null
+++ b/v_windows/v/vlib/net/http/header.v
@@ -0,0 +1,698 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+import strings
+
+// CommonHeader is an enum of the most common HTTP headers
+pub enum CommonHeader {
+ accept
+ accept_ch
+ accept_charset
+ accept_ch_lifetime
+ accept_encoding
+ accept_language
+ accept_patch
+ accept_post
+ accept_ranges
+ access_control_allow_credentials
+ access_control_allow_headers
+ access_control_allow_methods
+ access_control_allow_origin
+ access_control_expose_headers
+ access_control_max_age
+ access_control_request_headers
+ access_control_request_method
+ age
+ allow
+ alt_svc
+ authorization
+ cache_control
+ clear_site_data
+ connection
+ content_disposition
+ content_encoding
+ content_language
+ content_length
+ content_location
+ content_range
+ content_security_policy
+ content_security_policy_report_only
+ content_type
+ cookie
+ cross_origin_embedder_policy
+ cross_origin_opener_policy
+ cross_origin_resource_policy
+ date
+ device_memory
+ digest
+ dnt
+ early_data
+ etag
+ expect
+ expect_ct
+ expires
+ feature_policy
+ forwarded
+ from
+ host
+ if_match
+ if_modified_since
+ if_none_match
+ if_range
+ if_unmodified_since
+ index
+ keep_alive
+ large_allocation
+ last_modified
+ link
+ location
+ nel
+ origin
+ pragma
+ proxy_authenticate
+ proxy_authorization
+ range
+ referer
+ referrer_policy
+ retry_after
+ save_data
+ sec_fetch_dest
+ sec_fetch_mode
+ sec_fetch_site
+ sec_fetch_user
+ sec_websocket_accept
+ server
+ server_timing
+ set_cookie
+ sourcemap
+ strict_transport_security
+ te
+ timing_allow_origin
+ tk
+ trailer
+ transfer_encoding
+ upgrade
+ upgrade_insecure_requests
+ user_agent
+ vary
+ via
+ want_digest
+ warning
+ www_authenticate
+ x_content_type_options
+ x_dns_prefetch_control
+ x_forwarded_for
+ x_forwarded_host
+ x_forwarded_proto
+ x_frame_options
+ x_xss_protection
+}
+
+pub fn (h CommonHeader) str() string {
+ return match h {
+ .accept { 'Accept' }
+ .accept_ch { 'Accept-CH' }
+ .accept_charset { 'Accept-Charset' }
+ .accept_ch_lifetime { 'Accept-CH-Lifetime' }
+ .accept_encoding { 'Accept-Encoding' }
+ .accept_language { 'Accept-Language' }
+ .accept_patch { 'Accept-Patch' }
+ .accept_post { 'Accept-Post' }
+ .accept_ranges { 'Accept-Ranges' }
+ .access_control_allow_credentials { 'Access-Control-Allow-Credentials' }
+ .access_control_allow_headers { 'Access-Control-Allow-Headers' }
+ .access_control_allow_methods { 'Access-Control-Allow-Methods' }
+ .access_control_allow_origin { 'Access-Control-Allow-Origin' }
+ .access_control_expose_headers { 'Access-Control-Expose-Headers' }
+ .access_control_max_age { 'Access-Control-Max-Age' }
+ .access_control_request_headers { 'Access-Control-Request-Headers' }
+ .access_control_request_method { 'Access-Control-Request-Method' }
+ .age { 'Age' }
+ .allow { 'Allow' }
+ .alt_svc { 'Alt-Svc' }
+ .authorization { 'Authorization' }
+ .cache_control { 'Cache-Control' }
+ .clear_site_data { 'Clear-Site-Data' }
+ .connection { 'Connection' }
+ .content_disposition { 'Content-Disposition' }
+ .content_encoding { 'Content-Encoding' }
+ .content_language { 'Content-Language' }
+ .content_length { 'Content-Length' }
+ .content_location { 'Content-Location' }
+ .content_range { 'Content-Range' }
+ .content_security_policy { 'Content-Security-Policy' }
+ .content_security_policy_report_only { 'Content-Security-Policy-Report-Only' }
+ .content_type { 'Content-Type' }
+ .cookie { 'Cookie' }
+ .cross_origin_embedder_policy { 'Cross-Origin-Embedder-Policy' }
+ .cross_origin_opener_policy { 'Cross-Origin-Opener-Policy' }
+ .cross_origin_resource_policy { 'Cross-Origin-Resource-Policy' }
+ .date { 'Date' }
+ .device_memory { 'Device-Memory' }
+ .digest { 'Digest' }
+ .dnt { 'DNT' }
+ .early_data { 'Early-Data' }
+ .etag { 'ETag' }
+ .expect { 'Expect' }
+ .expect_ct { 'Expect-CT' }
+ .expires { 'Expires' }
+ .feature_policy { 'Feature-Policy' }
+ .forwarded { 'Forwarded' }
+ .from { 'From' }
+ .host { 'Host' }
+ .if_match { 'If-Match' }
+ .if_modified_since { 'If-Modified-Since' }
+ .if_none_match { 'If-None-Match' }
+ .if_range { 'If-Range' }
+ .if_unmodified_since { 'If-Unmodified-Since' }
+ .index { 'Index' }
+ .keep_alive { 'Keep-Alive' }
+ .large_allocation { 'Large-Allocation' }
+ .last_modified { 'Last-Modified' }
+ .link { 'Link' }
+ .location { 'Location' }
+ .nel { 'NEL' }
+ .origin { 'Origin' }
+ .pragma { 'Pragma' }
+ .proxy_authenticate { 'Proxy-Authenticate' }
+ .proxy_authorization { 'Proxy-Authorization' }
+ .range { 'Range' }
+ .referer { 'Referer' }
+ .referrer_policy { 'Referrer-Policy' }
+ .retry_after { 'Retry-After' }
+ .save_data { 'Save-Data' }
+ .sec_fetch_dest { 'Sec-Fetch-Dest' }
+ .sec_fetch_mode { 'Sec-Fetch-Mode' }
+ .sec_fetch_site { 'Sec-Fetch-Site' }
+ .sec_fetch_user { 'Sec-Fetch-User' }
+ .sec_websocket_accept { 'Sec-WebSocket-Accept' }
+ .server { 'Server' }
+ .server_timing { 'Server-Timing' }
+ .set_cookie { 'Set-Cookie' }
+ .sourcemap { 'SourceMap' }
+ .strict_transport_security { 'Strict-Transport-Security' }
+ .te { 'TE' }
+ .timing_allow_origin { 'Timing-Allow-Origin' }
+ .tk { 'Tk' }
+ .trailer { 'Trailer' }
+ .transfer_encoding { 'Transfer-Encoding' }
+ .upgrade { 'Upgrade' }
+ .upgrade_insecure_requests { 'Upgrade-Insecure-Requests' }
+ .user_agent { 'User-Agent' }
+ .vary { 'Vary' }
+ .via { 'Via' }
+ .want_digest { 'Want-Digest' }
+ .warning { 'Warning' }
+ .www_authenticate { 'WWW-Authenticate' }
+ .x_content_type_options { 'X-Content-Type-Options' }
+ .x_dns_prefetch_control { 'X-DNS-Prefetch-Control' }
+ .x_forwarded_for { 'X-Forwarded-For' }
+ .x_forwarded_host { 'X-Forwarded-Host' }
+ .x_forwarded_proto { 'X-Forwarded-Proto' }
+ .x_frame_options { 'X-Frame-Options' }
+ .x_xss_protection { 'X-XSS-Protection' }
+ }
+}
+
+const common_header_map = {
+ 'accept': CommonHeader.accept
+ 'accept-ch': .accept_ch
+ 'accept-charset': .accept_charset
+ 'accept-ch-lifetime': .accept_ch_lifetime
+ 'accept-encoding': .accept_encoding
+ 'accept-language': .accept_language
+ 'accept-patch': .accept_patch
+ 'accept-post': .accept_post
+ 'accept-ranges': .accept_ranges
+ 'access-control-allow-credentials': .access_control_allow_credentials
+ 'access-control-allow-headers': .access_control_allow_headers
+ 'access-control-allow-methods': .access_control_allow_methods
+ 'access-control-allow-origin': .access_control_allow_origin
+ 'access-control-expose-headers': .access_control_expose_headers
+ 'access-control-max-age': .access_control_max_age
+ 'access-control-request-headers': .access_control_request_headers
+ 'access-control-request-method': .access_control_request_method
+ 'age': .age
+ 'allow': .allow
+ 'alt-svc': .alt_svc
+ 'authorization': .authorization
+ 'cache-control': .cache_control
+ 'clear-site-data': .clear_site_data
+ 'connection': .connection
+ 'content-disposition': .content_disposition
+ 'content-encoding': .content_encoding
+ 'content-language': .content_language
+ 'content-length': .content_length
+ 'content-location': .content_location
+ 'content-range': .content_range
+ 'content-security-policy': .content_security_policy
+ 'content-security-policy-report-only': .content_security_policy_report_only
+ 'content-type': .content_type
+ 'cookie': .cookie
+ 'cross-origin-embedder-policy': .cross_origin_embedder_policy
+ 'cross-origin-opener-policy': .cross_origin_opener_policy
+ 'cross-origin-resource-policy': .cross_origin_resource_policy
+ 'date': .date
+ 'device-memory': .device_memory
+ 'digest': .digest
+ 'dnt': .dnt
+ 'early-data': .early_data
+ 'etag': .etag
+ 'expect': .expect
+ 'expect-ct': .expect_ct
+ 'expires': .expires
+ 'feature-policy': .feature_policy
+ 'forwarded': .forwarded
+ 'from': .from
+ 'host': .host
+ 'if-match': .if_match
+ 'if-modified-since': .if_modified_since
+ 'if-none-match': .if_none_match
+ 'if-range': .if_range
+ 'if-unmodified-since': .if_unmodified_since
+ 'index': .index
+ 'keep-alive': .keep_alive
+ 'large-allocation': .large_allocation
+ 'last-modified': .last_modified
+ 'link': .link
+ 'location': .location
+ 'nel': .nel
+ 'origin': .origin
+ 'pragma': .pragma
+ 'proxy-authenticate': .proxy_authenticate
+ 'proxy-authorization': .proxy_authorization
+ 'range': .range
+ 'referer': .referer
+ 'referrer-policy': .referrer_policy
+ 'retry-after': .retry_after
+ 'save-data': .save_data
+ 'sec-fetch-dest': .sec_fetch_dest
+ 'sec-fetch-mode': .sec_fetch_mode
+ 'sec-fetch-site': .sec_fetch_site
+ 'sec-fetch-user': .sec_fetch_user
+ 'sec-websocket-accept': .sec_websocket_accept
+ 'server': .server
+ 'server-timing': .server_timing
+ 'set-cookie': .set_cookie
+ 'sourcemap': .sourcemap
+ 'strict-transport-security': .strict_transport_security
+ 'te': .te
+ 'timing-allow-origin': .timing_allow_origin
+ 'tk': .tk
+ 'trailer': .trailer
+ 'transfer-encoding': .transfer_encoding
+ 'upgrade': .upgrade
+ 'upgrade-insecure-requests': .upgrade_insecure_requests
+ 'user-agent': .user_agent
+ 'vary': .vary
+ 'via': .via
+ 'want-digest': .want_digest
+ 'warning': .warning
+ 'www-authenticate': .www_authenticate
+ 'x-content-type-options': .x_content_type_options
+ 'x-dns-prefetch-control': .x_dns_prefetch_control
+ 'x-forwarded-for': .x_forwarded_for
+ 'x-forwarded-host': .x_forwarded_host
+ 'x-forwarded-proto': .x_forwarded_proto
+ 'x-frame-options': .x_frame_options
+ 'x-xss-protection': .x_xss_protection
+}
+
+// Header represents the key-value pairs in an HTTP header
+[noinit]
+pub struct Header {
+mut:
+ data map[string][]string
+ // map of lowercase header keys to their original keys
+ // in order of appearance
+ keys map[string][]string
+}
+
+pub fn (mut h Header) free() {
+ unsafe {
+ h.data.free()
+ h.keys.free()
+ }
+}
+
+pub struct HeaderConfig {
+ key CommonHeader
+ value string
+}
+
+// Create a new Header object
+pub fn new_header(kvs ...HeaderConfig) Header {
+ mut h := Header{
+ data: map[string][]string{}
+ }
+ for kv in kvs {
+ h.add(kv.key, kv.value)
+ }
+ return h
+}
+
+// new_header_from_map creates a Header from key value pairs
+pub fn new_header_from_map(kvs map[CommonHeader]string) Header {
+ mut h := new_header()
+ h.add_map(kvs)
+ return h
+}
+
+// new_custom_header_from_map creates a Header from string key value pairs
+pub fn new_custom_header_from_map(kvs map[string]string) ?Header {
+ mut h := new_header()
+ h.add_custom_map(kvs) ?
+ return h
+}
+
+// add appends a value to the header key.
+pub fn (mut h Header) add(key CommonHeader, value string) {
+ k := key.str()
+ h.data[k] << value
+ h.add_key(k)
+}
+
+// add_custom appends a value to a custom header key. This function will
+// return an error if the key contains invalid header characters.
+pub fn (mut h Header) add_custom(key string, value string) ? {
+ is_valid(key) ?
+ h.data[key] << value
+ h.add_key(key)
+}
+
+// add_map appends the value for each header key.
+pub fn (mut h Header) add_map(kvs map[CommonHeader]string) {
+ for k, v in kvs {
+ h.add(k, v)
+ }
+}
+
+// add_custom_map appends the value for each custom header key.
+pub fn (mut h Header) add_custom_map(kvs map[string]string) ? {
+ for k, v in kvs {
+ h.add_custom(k, v) ?
+ }
+}
+
+// set sets the key-value pair. This function will clear any other values
+// that exist for the CommonHeader.
+pub fn (mut h Header) set(key CommonHeader, value string) {
+ k := key.str()
+ h.data[k] = [value]
+ h.add_key(k)
+}
+
+// set_custom sets the key-value pair for a custom header key. This
+// function will clear any other values that exist for the header. This
+// function will return an error if the key contains invalid header
+// characters.
+pub fn (mut h Header) set_custom(key string, value string) ? {
+ is_valid(key) ?
+ h.data[key] = [value]
+ h.add_key(key)
+}
+
+// delete deletes all values for a key.
+pub fn (mut h Header) delete(key CommonHeader) {
+ h.delete_custom(key.str())
+}
+
+// delete_custom deletes all values for a custom header key.
+pub fn (mut h Header) delete_custom(key string) {
+ h.data.delete(key)
+
+ // remove key from keys metadata
+ kl := key.to_lower()
+ if kl in h.keys {
+ h.keys[kl] = h.keys[kl].filter(it != key)
+ }
+}
+
+pub struct HeaderCoerceConfig {
+ canonicalize bool
+}
+
+// coerce coerces data in the Header by joining keys that match
+// case-insensitively into one entry.
+pub fn (mut h Header) coerce(flags ...HeaderCoerceConfig) {
+ canon := flags.any(it.canonicalize)
+
+ for kl, data_keys in h.keys {
+ master_key := if canon { canonicalize(kl) } else { data_keys[0] }
+
+ // save master data
+ master_data := h.data[master_key]
+ h.data.delete(master_key)
+
+ for key in data_keys {
+ if key == master_key {
+ h.data[master_key] << master_data
+ continue
+ }
+ h.data[master_key] << h.data[key]
+ h.data.delete(key)
+ }
+ h.keys[kl] = [master_key]
+ }
+}
+
+// contains returns whether the header key exists in the map.
+pub fn (h Header) contains(key CommonHeader) bool {
+ return h.contains_custom(key.str())
+}
+
+pub struct HeaderQueryConfig {
+ exact bool
+}
+
+// contains_custom returns whether the custom header key exists in the map.
+pub fn (h Header) contains_custom(key string, flags ...HeaderQueryConfig) bool {
+ if flags.any(it.exact) {
+ return key in h.data
+ }
+ return key.to_lower() in h.keys
+}
+
+// get gets the first value for the CommonHeader, or none if the key
+// does not exist.
+pub fn (h Header) get(key CommonHeader) ?string {
+ return h.get_custom(key.str())
+}
+
+// get_custom gets the first value for the custom header, or none if
+// the key does not exist.
+pub fn (h Header) get_custom(key string, flags ...HeaderQueryConfig) ?string {
+ mut data_key := key
+ if !flags.any(it.exact) {
+ // get the first key from key metadata
+ k := key.to_lower()
+ if h.keys[k].len == 0 {
+ return none
+ }
+ data_key = h.keys[k][0]
+ }
+ if h.data[data_key].len == 0 {
+ return none
+ }
+ return h.data[data_key][0]
+}
+
+// starting_with gets the first header starting with key, or none if
+// the key does not exist.
+pub fn (h Header) starting_with(key string) ?string {
+ for k, _ in h.data {
+ if k.starts_with(key) {
+ return k
+ }
+ }
+ return none
+}
+
+// values gets all values for the CommonHeader.
+pub fn (h Header) values(key CommonHeader) []string {
+ return h.custom_values(key.str())
+}
+
+// custom_values gets all values for the custom header.
+pub fn (h Header) custom_values(key string, flags ...HeaderQueryConfig) []string {
+ if flags.any(it.exact) {
+ return h.data[key]
+ }
+ // case insensitive lookup
+ mut values := []string{cap: 10}
+ for k in h.keys[key.to_lower()] {
+ values << h.data[k]
+ }
+ return values
+}
+
+// keys gets all header keys as strings
+pub fn (h Header) keys() []string {
+ return h.data.keys()
+}
+
+pub struct HeaderRenderConfig {
+ version Version
+ coerce bool
+ canonicalize bool
+}
+
+// render renders the Header into a string for use in sending HTTP
+// requests. All header lines will end in `\r\n`
+[manualfree]
+pub fn (h Header) render(flags HeaderRenderConfig) string {
+ // estimate ~48 bytes per header
+ mut sb := strings.new_builder(h.data.len * 48)
+ if flags.coerce {
+ for kl, data_keys in h.keys {
+ key := if flags.version == .v2_0 {
+ kl
+ } else if flags.canonicalize {
+ canonicalize(kl)
+ } else {
+ data_keys[0]
+ }
+ for k in data_keys {
+ for v in h.data[k] {
+ sb.write_string(key)
+ sb.write_string(': ')
+ sb.write_string(v)
+ sb.write_string('\r\n')
+ }
+ }
+ }
+ } else {
+ for k, vs in h.data {
+ key := if flags.version == .v2_0 {
+ k.to_lower()
+ } else if flags.canonicalize {
+ canonicalize(k.to_lower())
+ } else {
+ k
+ }
+ for v in vs {
+ sb.write_string(key)
+ sb.write_string(': ')
+ sb.write_string(v)
+ sb.write_string('\r\n')
+ }
+ }
+ }
+ res := sb.str()
+ unsafe { sb.free() }
+ return res
+}
+
+// join combines two Header structs into a new Header struct
+pub fn (h Header) join(other Header) Header {
+ mut combined := Header{
+ data: h.data.clone()
+ keys: h.keys.clone()
+ }
+ for k in other.keys() {
+ for v in other.custom_values(k, exact: true) {
+ combined.add_custom(k, v) or {
+ // panic because this should never fail
+ panic('unexpected error: $err')
+ }
+ }
+ }
+ return combined
+}
+
+// canonicalize canonicalizes an HTTP header key
+// Common headers are determined by the common_header_map
+// Custom headers are capitalized on the first letter and any letter after a '-'
+// NOTE: Assumes sl is lowercase, since the caller usually already has the lowercase key
+fn canonicalize(sl string) string {
+ // check if we have a common header
+ if sl in http.common_header_map {
+ return http.common_header_map[sl].str()
+ }
+ return sl.split('-').map(it.capitalize()).join('-')
+}
+
+// Helper function to add a key to the keys map
+fn (mut h Header) add_key(key string) {
+ kl := key.to_lower()
+ if !h.keys[kl].contains(key) {
+ h.keys[kl] << key
+ }
+}
+
+// Custom error struct for invalid header tokens
+struct HeaderKeyError {
+ msg string
+ code int
+ header string
+ invalid_char byte
+}
+
+// is_valid checks if the header token contains all valid bytes
+fn is_valid(header string) ? {
+ for _, c in header {
+ if int(c) >= 128 || !is_token(c) {
+ return IError(HeaderKeyError{
+ msg: "Invalid header key: '$header'"
+ code: 1
+ header: header
+ invalid_char: c
+ })
+ }
+ }
+ if header.len == 0 {
+ return IError(HeaderKeyError{
+ msg: "Invalid header key: '$header'"
+ code: 2
+ header: header
+ invalid_char: 0
+ })
+ }
+}
+
+// is_token checks if the byte is valid for a header token
+fn is_token(b byte) bool {
+ return match b {
+ 33, 35...39, 42, 43, 45, 46, 48...57, 65...90, 94...122, 124, 126 { true }
+ else { false }
+ }
+}
+
+// str returns the headers string as seen in HTTP/1.1 requests.
+// Key order is not guaranteed.
+pub fn (h Header) str() string {
+ return h.render(version: .v1_1)
+}
+
+// parse_headers parses a newline delimited string into a Header struct
+fn parse_headers(s string) ?Header {
+ mut h := new_header()
+ mut last_key := ''
+ mut last_value := ''
+ for line in s.split_into_lines() {
+ if line.len == 0 {
+ break
+ }
+ // handle header fold
+ if line[0] == ` ` || line[0] == `\t` {
+ last_value += ' ${line.trim(' \t')}'
+ continue
+ } else if last_key != '' {
+ h.add_custom(last_key, last_value) ?
+ }
+ last_key, last_value = parse_header(line) ?
+ }
+ h.add_custom(last_key, last_value) ?
+ return h
+}
+
+fn parse_header(s string) ?(string, string) {
+ if !s.contains(':') {
+ return error('missing colon in header')
+ }
+ words := s.split_nth(':', 2)
+ // TODO: parse quoted text according to the RFC
+ return words[0], words[1].trim(' \t')
+}
diff --git a/v_windows/v/vlib/net/http/header_test.v b/v_windows/v/vlib/net/http/header_test.v
new file mode 100644
index 0000000..4f5f2ce
--- /dev/null
+++ b/v_windows/v/vlib/net/http/header_test.v
@@ -0,0 +1,387 @@
+module http
+
+fn test_header_new() {
+ h := new_header(HeaderConfig{ key: .accept, value: 'nothing' },
+ key: .expires
+ value: 'yesterday'
+ )
+ assert h.contains(.accept)
+ assert h.contains(.expires)
+ accept := h.get(.accept) or { '' }
+ expires := h.get(.expires) or { '' }
+ assert accept == 'nothing'
+ assert expires == 'yesterday'
+}
+
+fn test_header_invalid_key() {
+ mut h := new_header()
+ h.add_custom('space is invalid', ':(') or { return }
+ panic('should have returned')
+}
+
+fn test_header_adds_multiple() {
+ mut h := new_header()
+ h.add(.accept, 'one')
+ h.add(.accept, 'two')
+
+ assert h.values(.accept) == ['one', 'two']
+}
+
+fn test_header_get() ? {
+ mut h := new_header(key: .dnt, value: 'one')
+ h.add_custom('dnt', 'two') ?
+ dnt := h.get_custom('dnt') or { '' }
+ exact := h.get_custom('dnt', exact: true) or { '' }
+ assert dnt == 'one'
+ assert exact == 'two'
+}
+
+fn test_header_set() ? {
+ mut h := new_header(HeaderConfig{ key: .dnt, value: 'one' },
+ key: .dnt
+ value: 'two'
+ )
+ assert h.values(.dnt) == ['one', 'two']
+ h.set_custom('DNT', 'three') ?
+ assert h.values(.dnt) == ['three']
+}
+
+fn test_header_delete() {
+ mut h := new_header(HeaderConfig{ key: .dnt, value: 'one' },
+ key: .dnt
+ value: 'two'
+ )
+ assert h.values(.dnt) == ['one', 'two']
+ h.delete(.dnt)
+ assert h.values(.dnt) == []
+}
+
+fn test_header_delete_not_existing() {
+ mut h := new_header()
+ assert h.data.len == 0
+ assert h.keys.len == 0
+ h.delete(.dnt)
+ assert h.data.len == 0
+ assert h.keys.len == 0
+}
+
+fn test_custom_header() ? {
+ mut h := new_header()
+ h.add_custom('AbC', 'dEf') ?
+ h.add_custom('aBc', 'GhI') ?
+ assert h.custom_values('AbC', exact: true) == ['dEf']
+ assert h.custom_values('aBc', exact: true) == ['GhI']
+ assert h.custom_values('ABC') == ['dEf', 'GhI']
+ assert h.custom_values('abc') == ['dEf', 'GhI']
+ assert h.keys() == ['AbC', 'aBc']
+ h.delete_custom('AbC')
+ h.delete_custom('aBc')
+
+ h.add_custom('abc', 'def') ?
+ assert h.custom_values('abc') == ['def']
+ assert h.custom_values('ABC') == ['def']
+ assert h.keys() == ['abc']
+ h.delete_custom('abc')
+
+ h.add_custom('accEPT', '*/*') ?
+ assert h.custom_values('ACCept') == ['*/*']
+ assert h.values(.accept) == ['*/*']
+ assert h.keys() == ['accEPT']
+}
+
+fn test_contains_custom() ? {
+ mut h := new_header()
+ h.add_custom('Hello', 'world') ?
+ assert h.contains_custom('hello')
+ assert h.contains_custom('HELLO')
+ assert h.contains_custom('Hello', exact: true)
+ assert h.contains_custom('hello', exact: true) == false
+ assert h.contains_custom('HELLO', exact: true) == false
+}
+
+fn test_get_custom() ? {
+ mut h := new_header()
+ h.add_custom('Hello', 'world') ?
+ assert h.get_custom('hello') ? == 'world'
+ assert h.get_custom('HELLO') ? == 'world'
+ assert h.get_custom('Hello', exact: true) ? == 'world'
+ if _ := h.get_custom('hello', exact: true) {
+ // should be none
+ assert false
+ }
+ if _ := h.get_custom('HELLO', exact: true) {
+ // should be none
+ assert false
+ }
+}
+
+fn test_starting_with() ? {
+ mut h := new_header()
+ h.add_custom('Hello-1', 'world') ?
+ h.add_custom('Hello-21', 'world') ?
+ assert h.starting_with('Hello-') ? == 'Hello-1'
+ assert h.starting_with('Hello-2') ? == 'Hello-21'
+}
+
+fn test_custom_values() ? {
+ mut h := new_header()
+ h.add_custom('Hello', 'world') ?
+ assert h.custom_values('hello') == ['world']
+ assert h.custom_values('HELLO') == ['world']
+ assert h.custom_values('Hello', exact: true) == ['world']
+ assert h.custom_values('hello', exact: true) == []
+ assert h.custom_values('HELLO', exact: true) == []
+}
+
+fn test_coerce() ? {
+ mut h := new_header()
+ h.add_custom('accept', 'foo') ?
+ h.add(.accept, 'bar')
+ assert h.values(.accept) == ['foo', 'bar']
+ assert h.keys().len == 2
+
+ h.coerce()
+ assert h.values(.accept) == ['foo', 'bar']
+ assert h.keys() == ['accept'] // takes the first occurrence
+}
+
+fn test_coerce_canonicalize() ? {
+ mut h := new_header()
+ h.add_custom('accept', 'foo') ?
+ h.add(.accept, 'bar')
+ assert h.values(.accept) == ['foo', 'bar']
+ assert h.keys().len == 2
+
+ h.coerce(canonicalize: true)
+ assert h.values(.accept) == ['foo', 'bar']
+ assert h.keys() == ['Accept'] // canonicalize header
+}
+
+fn test_coerce_custom() ? {
+ mut h := new_header()
+ h.add_custom('Hello', 'foo') ?
+ h.add_custom('hello', 'bar') ?
+ h.add_custom('HELLO', 'baz') ?
+ assert h.custom_values('hello') == ['foo', 'bar', 'baz']
+ assert h.keys().len == 3
+
+ h.coerce()
+ assert h.custom_values('hello') == ['foo', 'bar', 'baz']
+ assert h.keys() == ['Hello'] // takes the first occurrence
+}
+
+fn test_coerce_canonicalize_custom() ? {
+ mut h := new_header()
+ h.add_custom('foo-BAR', 'foo') ?
+ h.add_custom('FOO-bar', 'bar') ?
+ assert h.custom_values('foo-bar') == ['foo', 'bar']
+ assert h.keys().len == 2
+
+ h.coerce(canonicalize: true)
+ assert h.custom_values('foo-bar') == ['foo', 'bar']
+ assert h.keys() == ['Foo-Bar'] // capitalizes the header
+}
+
+fn test_render_version() ? {
+ mut h := new_header()
+ h.add_custom('accept', 'foo') ?
+ h.add_custom('Accept', 'bar') ?
+ h.add(.accept, 'baz')
+
+ s1_0 := h.render(version: .v1_0)
+ assert s1_0.contains('accept: foo\r\n')
+ assert s1_0.contains('Accept: bar\r\n')
+ assert s1_0.contains('Accept: baz\r\n')
+
+ s1_1 := h.render(version: .v1_1)
+ assert s1_1.contains('accept: foo\r\n')
+ assert s1_1.contains('Accept: bar\r\n')
+ assert s1_1.contains('Accept: baz\r\n')
+
+ s2_0 := h.render(version: .v2_0)
+ assert s2_0.contains('accept: foo\r\n')
+ assert s2_0.contains('accept: bar\r\n')
+ assert s2_0.contains('accept: baz\r\n')
+}
+
+fn test_render_coerce() ? {
+ mut h := new_header()
+ h.add_custom('accept', 'foo') ?
+ h.add_custom('Accept', 'bar') ?
+ h.add(.accept, 'baz')
+ h.add(.host, 'host')
+
+ s1_0 := h.render(version: .v1_1, coerce: true)
+ assert s1_0.contains('accept: foo\r\n')
+ assert s1_0.contains('accept: bar\r\n')
+ assert s1_0.contains('accept: baz\r\n')
+ assert s1_0.contains('Host: host\r\n')
+
+ s1_1 := h.render(version: .v1_1, coerce: true)
+ assert s1_1.contains('accept: foo\r\n')
+ assert s1_1.contains('accept: bar\r\n')
+ assert s1_1.contains('accept: baz\r\n')
+ assert s1_1.contains('Host: host\r\n')
+
+ s2_0 := h.render(version: .v2_0, coerce: true)
+ assert s2_0.contains('accept: foo\r\n')
+ assert s2_0.contains('accept: bar\r\n')
+ assert s2_0.contains('accept: baz\r\n')
+ assert s2_0.contains('host: host\r\n')
+}
+
+fn test_render_canonicalize() ? {
+ mut h := new_header()
+ h.add_custom('accept', 'foo') ?
+ h.add_custom('Accept', 'bar') ?
+ h.add(.accept, 'baz')
+ h.add(.host, 'host')
+
+ s1_0 := h.render(version: .v1_1, canonicalize: true)
+ assert s1_0.contains('Accept: foo\r\n')
+ assert s1_0.contains('Accept: bar\r\n')
+ assert s1_0.contains('Accept: baz\r\n')
+ assert s1_0.contains('Host: host\r\n')
+
+ s1_1 := h.render(version: .v1_1, canonicalize: true)
+ assert s1_1.contains('Accept: foo\r\n')
+ assert s1_1.contains('Accept: bar\r\n')
+ assert s1_1.contains('Accept: baz\r\n')
+ assert s1_1.contains('Host: host\r\n')
+
+ s2_0 := h.render(version: .v2_0, canonicalize: true)
+ assert s2_0.contains('accept: foo\r\n')
+ assert s2_0.contains('accept: bar\r\n')
+ assert s2_0.contains('accept: baz\r\n')
+ assert s2_0.contains('host: host\r\n')
+}
+
+fn test_render_coerce_canonicalize() ? {
+ mut h := new_header()
+ h.add_custom('accept', 'foo') ?
+ h.add_custom('Accept', 'bar') ?
+ h.add(.accept, 'baz')
+ h.add(.host, 'host')
+
+ s1_0 := h.render(version: .v1_1, coerce: true, canonicalize: true)
+ assert s1_0.contains('Accept: foo\r\n')
+ assert s1_0.contains('Accept: bar\r\n')
+ assert s1_0.contains('Accept: baz\r\n')
+ assert s1_0.contains('Host: host\r\n')
+
+ s1_1 := h.render(version: .v1_1, coerce: true, canonicalize: true)
+ assert s1_1.contains('Accept: foo\r\n')
+ assert s1_1.contains('Accept: bar\r\n')
+ assert s1_1.contains('Accept: baz\r\n')
+ assert s1_1.contains('Host: host\r\n')
+
+ s2_0 := h.render(version: .v2_0, coerce: true, canonicalize: true)
+ assert s2_0.contains('accept: foo\r\n')
+ assert s2_0.contains('accept: bar\r\n')
+ assert s2_0.contains('accept: baz\r\n')
+ assert s2_0.contains('host: host\r\n')
+}
+
+fn test_str() ? {
+ mut h := new_header()
+ h.add(.accept, 'text/html')
+ h.add_custom('Accept', 'image/jpeg') ?
+ h.add_custom('X-custom', 'Hello') ?
+
+ // key order is not guaranteed
+ assert h.str() == 'Accept: text/html\r\nAccept: image/jpeg\r\nX-custom: Hello\r\n'
+ || h.str() == 'X-custom: Hello\r\nAccept:text/html\r\nAccept: image/jpeg\r\n'
+}
+
+fn test_header_from_map() ? {
+ h := new_header_from_map({
+ CommonHeader.accept: 'nothing'
+ CommonHeader.expires: 'yesterday'
+ })
+ assert h.contains(.accept)
+ assert h.contains(.expires)
+ assert h.get(.accept) or { '' } == 'nothing'
+ assert h.get(.expires) or { '' } == 'yesterday'
+}
+
+fn test_custom_header_from_map() ? {
+ h := new_custom_header_from_map({
+ 'Server': 'VWeb'
+ 'foo': 'bar'
+ }) ?
+ assert h.contains_custom('server')
+ assert h.contains_custom('foo')
+ assert h.get_custom('server') or { '' } == 'VWeb'
+ assert h.get_custom('foo') or { '' } == 'bar'
+}
+
+fn test_header_join() ? {
+ h1 := new_header_from_map({
+ CommonHeader.accept: 'nothing'
+ CommonHeader.expires: 'yesterday'
+ })
+ h2 := new_custom_header_from_map({
+ 'Server': 'VWeb'
+ 'foo': 'bar'
+ }) ?
+ h3 := h1.join(h2)
+ // h1 is unchanged
+ assert h1.contains(.accept)
+ assert h1.contains(.expires)
+ assert !h1.contains_custom('Server')
+ assert !h1.contains_custom('foo')
+ // h2 is unchanged
+ assert !h2.contains(.accept)
+ assert !h2.contains(.expires)
+ assert h2.contains_custom('Server')
+ assert h2.contains_custom('foo')
+ // h3 has all four headers
+ assert h3.contains(.accept)
+ assert h3.contains(.expires)
+ assert h3.contains_custom('Server')
+ assert h3.contains_custom('foo')
+}
+
+fn parse_headers_test(s string, expected map[string]string) ? {
+ assert parse_headers(s) ? == new_custom_header_from_map(expected) ?
+}
+
+fn test_parse_headers() ? {
+ parse_headers_test('foo: bar', {
+ 'foo': 'bar'
+ }) ?
+ parse_headers_test('foo: \t bar', {
+ 'foo': 'bar'
+ }) ?
+ parse_headers_test('foo: bar\r\n\tbaz', {
+ 'foo': 'bar baz'
+ }) ?
+ parse_headers_test('foo: bar \r\n\tbaz\r\n buzz', {
+ 'foo': 'bar baz buzz'
+ }) ?
+ parse_headers_test('foo: bar\r\nbar:baz', {
+ 'foo': 'bar'
+ 'bar': 'baz'
+ }) ?
+ parse_headers_test('foo: bar\r\nbar:baz\r\n', {
+ 'foo': 'bar'
+ 'bar': 'baz'
+ }) ?
+ parse_headers_test('foo: bar\r\nbar:baz\r\n\r\n', {
+ 'foo': 'bar'
+ 'bar': 'baz'
+ }) ?
+ assert parse_headers('foo: bar\r\nfoo:baz') ?.custom_values('foo') == ['bar', 'baz']
+
+ if x := parse_headers(' oops: oh no') {
+ return error('should have errored, but got $x')
+ }
+}
+
+fn test_set_cookie() {
+ // multiple Set-Cookie headers should be sent when rendered
+ mut h := new_header()
+ h.add(.set_cookie, 'foo')
+ h.add(.set_cookie, 'bar')
+ assert h.render() == 'Set-Cookie: foo\r\nSet-Cookie: bar\r\n'
+}
diff --git a/v_windows/v/vlib/net/http/http.v b/v_windows/v/vlib/net/http/http.v
new file mode 100644
index 0000000..7bdc5e2
--- /dev/null
+++ b/v_windows/v/vlib/net/http/http.v
@@ -0,0 +1,186 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+import net.urllib
+
+const (
+ max_redirects = 4
+ content_type_default = 'text/plain'
+ bufsize = 1536
+)
+
+// FetchConfig holds configurations of fetch
+pub struct FetchConfig {
+pub mut:
+ url string
+ method Method
+ header Header
+ data string
+ params map[string]string
+ cookies map[string]string
+ user_agent string = 'v.http'
+ verbose bool
+}
+
+pub fn new_request(method Method, url_ string, data string) ?Request {
+ url := if method == .get { url_ + '?' + data } else { url_ }
+ // println('new req() method=$method url="$url" dta="$data"')
+ return Request{
+ method: method
+ url: url
+ data: data
+ /*
+ headers: {
+ 'Accept-Encoding': 'compress'
+ }
+ */
+ }
+}
+
+// get sends a GET HTTP request to the URL
+pub fn get(url string) ?Response {
+ return fetch(method: .get, url: url)
+}
+
+// post sends a POST HTTP request to the URL with a string data
+pub fn post(url string, data string) ?Response {
+ return fetch(
+ method: .post
+ url: url
+ data: data
+ header: new_header(key: .content_type, value: http.content_type_default)
+ )
+}
+
+// post_json sends a POST HTTP request to the URL with a JSON data
+pub fn post_json(url string, data string) ?Response {
+ return fetch(
+ method: .post
+ url: url
+ data: data
+ header: new_header(key: .content_type, value: 'application/json')
+ )
+}
+
+// post_form sends a POST HTTP request to the URL with X-WWW-FORM-URLENCODED data
+pub fn post_form(url string, data map[string]string) ?Response {
+ return fetch(
+ method: .post
+ url: url
+ header: new_header(key: .content_type, value: 'application/x-www-form-urlencoded')
+ data: url_encode_form_data(data)
+ )
+}
+
+// put sends a PUT HTTP request to the URL with a string data
+pub fn put(url string, data string) ?Response {
+ return fetch(
+ method: .put
+ url: url
+ data: data
+ header: new_header(key: .content_type, value: http.content_type_default)
+ )
+}
+
+// patch sends a PATCH HTTP request to the URL with a string data
+pub fn patch(url string, data string) ?Response {
+ return fetch(
+ method: .patch
+ url: url
+ data: data
+ header: new_header(key: .content_type, value: http.content_type_default)
+ )
+}
+
+// head sends a HEAD HTTP request to the URL
+pub fn head(url string) ?Response {
+ return fetch(method: .head, url: url)
+}
+
+// delete sends a DELETE HTTP request to the URL
+pub fn delete(url string) ?Response {
+ return fetch(method: .delete, url: url)
+}
+
+// fetch sends an HTTP request to the URL with the given method and configurations
+pub fn fetch(config FetchConfig) ?Response {
+ if config.url == '' {
+ return error('http.fetch: empty url')
+ }
+ url := build_url_from_fetch(config) or { return error('http.fetch: invalid url $config.url') }
+ req := Request{
+ method: config.method
+ url: url
+ data: config.data
+ header: config.header
+ cookies: config.cookies
+ user_agent: config.user_agent
+ user_ptr: 0
+ verbose: config.verbose
+ }
+ res := req.do() ?
+ return res
+}
+
+// get_text sends a GET HTTP request to the URL and returns the text content of the response
+pub fn get_text(url string) string {
+ resp := fetch(url: url, method: .get) or { return '' }
+ return resp.text
+}
+
+// url_encode_form_data converts mapped data to an URL encoded string
+pub fn url_encode_form_data(data map[string]string) string {
+ mut pieces := []string{}
+ for key_, value_ in data {
+ key := urllib.query_escape(key_)
+ value := urllib.query_escape(value_)
+ pieces << '$key=$value'
+ }
+ return pieces.join('&')
+}
+
+[deprecated: 'use fetch()']
+fn fetch_with_method(method Method, _config FetchConfig) ?Response {
+ mut config := _config
+ config.method = method
+ return fetch(config)
+}
+
+fn build_url_from_fetch(config FetchConfig) ?string {
+ mut url := urllib.parse(config.url) ?
+ if config.params.len == 0 {
+ return url.str()
+ }
+ mut pieces := []string{cap: config.params.len}
+ for key, val in config.params {
+ pieces << '$key=$val'
+ }
+ mut query := pieces.join('&')
+ if url.raw_query.len > 1 {
+ query = url.raw_query + '&' + query
+ }
+ url.raw_query = query
+ return url.str()
+}
+
+// unescape_url is deprecated, use urllib.query_unescape() instead
+pub fn unescape_url(s string) string {
+ panic('http.unescape_url() was replaced with urllib.query_unescape()')
+}
+
+// escape_url is deprecated, use urllib.query_escape() instead
+pub fn escape_url(s string) string {
+ panic('http.escape_url() was replaced with urllib.query_escape()')
+}
+
+// unescape is deprecated, use urllib.query_escape() instead
+pub fn unescape(s string) string {
+ panic('http.unescape() was replaced with http.unescape_url()')
+}
+
+// escape is deprecated, use urllib.query_unescape() instead
+pub fn escape(s string) string {
+ panic('http.escape() was replaced with http.escape_url()')
+}
diff --git a/v_windows/v/vlib/net/http/http_httpbin_test.v b/v_windows/v/vlib/net/http/http_httpbin_test.v
new file mode 100644
index 0000000..a3ddccc
--- /dev/null
+++ b/v_windows/v/vlib/net/http/http_httpbin_test.v
@@ -0,0 +1,95 @@
+module http
+
+// internal tests have access to *everything in the module*
+import json
+
+struct HttpbinResponseBody {
+ args map[string]string
+ data string
+ files map[string]string
+ form map[string]string
+ headers map[string]string
+ json map[string]string
+ origin string
+ url string
+}
+
+fn http_fetch_mock(_methods []string, _config FetchConfig) ?[]Response {
+ url := 'https://httpbin.org/'
+ methods := if _methods.len == 0 { ['GET', 'POST', 'PATCH', 'PUT', 'DELETE'] } else { _methods }
+ mut config := _config
+ mut result := []Response{}
+ // Note: httpbin doesn't support head
+ for method in methods {
+ lmethod := method.to_lower()
+ config.method = method_from_str(method)
+ res := fetch(FetchConfig{ ...config, url: url + lmethod }) ?
+ // TODO
+ // body := json.decode(HttpbinResponseBody,res.text)?
+ result << res
+ }
+ return result
+}
+
+fn test_http_fetch_bare() {
+ $if !network ? {
+ return
+ }
+ responses := http_fetch_mock([], FetchConfig{}) or { panic(err) }
+ for response in responses {
+ assert response.status() == .ok
+ }
+}
+
+fn test_http_fetch_with_data() {
+ $if !network ? {
+ return
+ }
+ responses := http_fetch_mock(['POST', 'PUT', 'PATCH', 'DELETE'],
+ data: 'hello world'
+ ) or { panic(err) }
+ for response in responses {
+ payload := json.decode(HttpbinResponseBody, response.text) or { panic(err) }
+ assert payload.data == 'hello world'
+ }
+}
+
+fn test_http_fetch_with_params() {
+ $if !network ? {
+ return
+ }
+ responses := http_fetch_mock([],
+ params: {
+ 'a': 'b'
+ 'c': 'd'
+ }
+ ) or { panic(err) }
+ for response in responses {
+ // payload := json.decode(HttpbinResponseBody,response.text) or {
+ // panic(err)
+ // }
+ assert response.status() == .ok
+ // TODO
+ // assert payload.args['a'] == 'b'
+ // assert payload.args['c'] == 'd'
+ }
+}
+
+fn test_http_fetch_with_headers() ? {
+ $if !network ? {
+ return
+ }
+ mut header := new_header()
+ header.add_custom('Test-Header', 'hello world') ?
+ responses := http_fetch_mock([],
+ header: header
+ ) or { panic(err) }
+ for response in responses {
+ // payload := json.decode(HttpbinResponseBody,response.text) or {
+ // panic(err)
+ // }
+ assert response.status() == .ok
+ // TODO
+ // assert payload.headers['Test-Header'] == 'hello world'
+ }
+}
diff --git a/v_windows/v/vlib/net/http/http_test.v b/v_windows/v/vlib/net/http/http_test.v
new file mode 100644
index 0000000..8b68073
--- /dev/null
+++ b/v_windows/v/vlib/net/http/http_test.v
@@ -0,0 +1,56 @@
+import net.http
+
+fn test_http_get() {
+ $if !network ? {
+ return
+ }
+ assert http.get_text('https://vlang.io/version') == '0.1.5'
+ println('http ok')
+}
+
+fn test_http_get_from_vlang_utc_now() {
+ $if !network ? {
+ return
+ }
+ urls := ['http://vlang.io/utc_now', 'https://vlang.io/utc_now']
+ for url in urls {
+ println('Test getting current time from $url by http.get')
+ res := http.get(url) or { panic(err) }
+ assert res.status() == .ok
+ assert res.text.len > 0
+ assert res.text.int() > 1566403696
+ println('Current time is: $res.text.int()')
+ }
+}
+
+fn test_public_servers() {
+ $if !network ? {
+ return
+ }
+ urls := [
+ 'http://github.com/robots.txt',
+ 'http://google.com/robots.txt',
+ 'https://github.com/robots.txt',
+ 'https://google.com/robots.txt',
+ // 'http://yahoo.com/robots.txt',
+ // 'https://yahoo.com/robots.txt',
+ ]
+ for url in urls {
+ println('Testing http.get on public url: $url ')
+ res := http.get(url) or { panic(err) }
+ assert res.status() == .ok
+ assert res.text.len > 0
+ }
+}
+
+fn test_relative_redirects() {
+ $if !network ? {
+ return
+ } $else {
+ return
+ } // tempfix periodic: httpbin relative redirects are broken
+ res := http.get('https://httpbin.org/relative-redirect/3?abc=xyz') or { panic(err) }
+ assert res.status() == .ok
+ assert res.text.len > 0
+ assert res.text.contains('"abc": "xyz"')
+}
diff --git a/v_windows/v/vlib/net/http/method.v b/v_windows/v/vlib/net/http/method.v
new file mode 100644
index 0000000..91c93e1
--- /dev/null
+++ b/v_windows/v/vlib/net/http/method.v
@@ -0,0 +1,48 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+// The methods listed here are some of the most used ones, ordered by
+// commonality. A comprehensive list is available at:
+// https://www.iana.org/assignments/http-methods/http-methods.xhtml
+pub enum Method {
+ get
+ post
+ put
+ head
+ delete
+ options
+ trace
+ connect
+ patch
+}
+
+pub fn (m Method) str() string {
+ return match m {
+ .get { 'GET' }
+ .post { 'POST' }
+ .put { 'PUT' }
+ .head { 'HEAD' }
+ .delete { 'DELETE' }
+ .options { 'OPTIONS' }
+ .trace { 'TRACE' }
+ .connect { 'CONNECT' }
+ .patch { 'PATCH' }
+ }
+}
+
+pub fn method_from_str(m string) Method {
+ return match m {
+ 'GET' { Method.get }
+ 'POST' { Method.post }
+ 'PUT' { Method.put }
+ 'HEAD' { Method.head }
+ 'DELETE' { Method.delete }
+ 'OPTIONS' { Method.options }
+ 'TRACE' { Method.trace }
+ 'CONNECT' { Method.connect }
+ 'PATCH' { Method.patch }
+ else { Method.get } // should we default to GET?
+ }
+}
diff --git a/v_windows/v/vlib/net/http/request.v b/v_windows/v/vlib/net/http/request.v
new file mode 100644
index 0000000..4664659
--- /dev/null
+++ b/v_windows/v/vlib/net/http/request.v
@@ -0,0 +1,324 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+import io
+import net
+import net.urllib
+import strings
+import time
+
+// Request holds information about an HTTP request (either received by
+// a server or to be sent by a client)
+pub struct Request {
+pub mut:
+ version Version = .v1_1
+ method Method
+ header Header
+ cookies map[string]string
+ data string
+ url string
+ user_agent string = 'v.http'
+ verbose bool
+ user_ptr voidptr
+ // NOT implemented for ssl connections
+ // time = -1 for no timeout
+ read_timeout i64 = 30 * time.second
+ write_timeout i64 = 30 * time.second
+}
+
+fn (mut req Request) free() {
+ unsafe { req.header.free() }
+}
+
+// add_header adds the key and value of an HTTP request header
+// To add a custom header, use add_custom_header
+pub fn (mut req Request) add_header(key CommonHeader, val string) {
+ req.header.add(key, val)
+}
+
+// add_custom_header adds the key and value of an HTTP request header
+// This method may fail if the key contains characters that are not permitted
+pub fn (mut req Request) add_custom_header(key string, val string) ? {
+ return req.header.add_custom(key, val)
+}
+
+// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
+pub fn (req &Request) do() ?Response {
+ mut url := urllib.parse(req.url) or { return error('http.Request.do: invalid url $req.url') }
+ mut rurl := url
+ mut resp := Response{}
+ mut no_redirects := 0
+ for {
+ if no_redirects == max_redirects {
+ return error('http.request.do: maximum number of redirects reached ($max_redirects)')
+ }
+ qresp := req.method_and_url_to_response(req.method, rurl) ?
+ resp = qresp
+ if resp.status() !in [.moved_permanently, .found, .see_other, .temporary_redirect,
+ .permanent_redirect,
+ ] {
+ break
+ }
+ // follow any redirects
+ mut redirect_url := resp.header.get(.location) or { '' }
+ if redirect_url.len > 0 && redirect_url[0] == `/` {
+ url.set_path(redirect_url) or {
+ return error('http.request.do: invalid path in redirect: "$redirect_url"')
+ }
+ redirect_url = url.str()
+ }
+ qrurl := urllib.parse(redirect_url) or {
+ return error('http.request.do: invalid URL in redirect "$redirect_url"')
+ }
+ rurl = qrurl
+ no_redirects++
+ }
+ return resp
+}
+
+fn (req &Request) method_and_url_to_response(method Method, url urllib.URL) ?Response {
+ host_name := url.hostname()
+ scheme := url.scheme
+ p := url.escaped_path().trim_left('/')
+ path := if url.query().len > 0 { '/$p?$url.query().encode()' } else { '/$p' }
+ mut nport := url.port().int()
+ if nport == 0 {
+ if scheme == 'http' {
+ nport = 80
+ }
+ if scheme == 'https' {
+ nport = 443
+ }
+ }
+ // println('fetch $method, $scheme, $host_name, $nport, $path ')
+ if scheme == 'https' {
+ // println('ssl_do( $nport, $method, $host_name, $path )')
+ res := req.ssl_do(nport, method, host_name, path) ?
+ return res
+ } else if scheme == 'http' {
+ // println('http_do( $nport, $method, $host_name, $path )')
+ res := req.http_do('$host_name:$nport', method, path) ?
+ return res
+ }
+ return error('http.request.method_and_url_to_response: unsupported scheme: "$scheme"')
+}
+
+fn (req &Request) build_request_headers(method Method, host_name string, path string) string {
+ ua := req.user_agent
+ mut uheaders := []string{}
+ if !req.header.contains(.host) {
+ uheaders << 'Host: $host_name\r\n'
+ }
+ if !req.header.contains(.user_agent) {
+ uheaders << 'User-Agent: $ua\r\n'
+ }
+ if req.data.len > 0 && !req.header.contains(.content_length) {
+ uheaders << 'Content-Length: $req.data.len\r\n'
+ }
+ for key in req.header.keys() {
+ if key == CommonHeader.cookie.str() {
+ continue
+ }
+ val := req.header.custom_values(key).join('; ')
+ uheaders << '$key: $val\r\n'
+ }
+ uheaders << req.build_request_cookies_header()
+ version := if req.version == .unknown { Version.v1_1 } else { req.version }
+ return '$method $path $version\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' + req.data
+}
+
+fn (req &Request) build_request_cookies_header() string {
+ if req.cookies.keys().len < 1 {
+ return ''
+ }
+ mut cookie := []string{}
+ for key, val in req.cookies {
+ cookie << '$key=$val'
+ }
+ cookie << req.header.values(.cookie)
+ return 'Cookie: ' + cookie.join('; ') + '\r\n'
+}
+
+fn (req &Request) http_do(host string, method Method, path string) ?Response {
+ host_name, _ := net.split_address(host) ?
+ s := req.build_request_headers(method, host_name, path)
+ mut client := net.dial_tcp(host) ?
+ client.set_read_timeout(req.read_timeout)
+ client.set_write_timeout(req.write_timeout)
+ // TODO this really needs to be exposed somehow
+ client.write(s.bytes()) ?
+ $if trace_http_request ? {
+ eprintln('> $s')
+ }
+ mut bytes := io.read_all(reader: client) ?
+ client.close() ?
+ response_text := bytes.bytestr()
+ $if trace_http_response ? {
+ eprintln('< $response_text')
+ }
+ return parse_response(response_text)
+}
+
+// referer returns 'Referer' header value of the given request
+pub fn (req &Request) referer() string {
+ return req.header.get(.referer) or { '' }
+}
+
+// Parse a raw HTTP request into a Request object
+pub fn parse_request(mut reader io.BufferedReader) ?Request {
+ // request line
+ mut line := reader.read_line() ?
+ method, target, version := parse_request_line(line) ?
+
+ // headers
+ mut header := new_header()
+ line = reader.read_line() ?
+ for line != '' {
+ key, value := parse_header(line) ?
+ header.add_custom(key, value) ?
+ line = reader.read_line() ?
+ }
+ header.coerce(canonicalize: true)
+
+ // body
+ mut body := []byte{}
+ if length := header.get(.content_length) {
+ n := length.int()
+ if n > 0 {
+ body = []byte{len: n}
+ mut count := 0
+ for count < body.len {
+ count += reader.read(mut body[count..]) or { break }
+ }
+ }
+ }
+
+ return Request{
+ method: method
+ url: target.str()
+ header: header
+ data: body.bytestr()
+ version: version
+ }
+}
+
+fn parse_request_line(s string) ?(Method, urllib.URL, Version) {
+ words := s.split(' ')
+ if words.len != 3 {
+ return error('malformed request line')
+ }
+ method := method_from_str(words[0])
+ target := urllib.parse(words[1]) ?
+ version := version_from_str(words[2])
+ if version == .unknown {
+ return error('unsupported version')
+ }
+
+ return method, target, version
+}
+
+// Parse URL encoded key=value&key=value forms
+fn parse_form(body string) map[string]string {
+ words := body.split('&')
+ mut form := map[string]string{}
+ for word in words {
+ kv := word.split_nth('=', 2)
+ if kv.len != 2 {
+ continue
+ }
+ key := urllib.query_unescape(kv[0]) or { continue }
+ val := urllib.query_unescape(kv[1]) or { continue }
+ form[key] = val
+ }
+ return form
+ // }
+ // todo: parse form-data and application/json
+ // ...
+}
+
+struct FileData {
+pub:
+ filename string
+ content_type string
+ data string
+}
+
+struct UnexpectedExtraAttributeError {
+ msg string
+ code int
+}
+
+struct MultiplePathAttributesError {
+ msg string = 'Expected at most one path attribute'
+ code int
+}
+
+fn parse_multipart_form(body string, boundary string) (map[string]string, map[string][]FileData) {
+ sections := body.split(boundary)
+ fields := sections[1..sections.len - 1]
+ mut form := map[string]string{}
+ mut files := map[string][]FileData{}
+
+ for field in fields {
+ // TODO: do not split into lines; do same parsing for HTTP body
+ lines := field.split_into_lines()[1..]
+ disposition := parse_disposition(lines[0])
+ // Grab everything between the double quotes
+ name := disposition['name'] or { continue }
+ // Parse files
+ // TODO: filename*
+ if 'filename' in disposition {
+ filename := disposition['filename']
+ // Parse Content-Type header
+ if lines.len == 1 || !lines[1].to_lower().starts_with('content-type:') {
+ continue
+ }
+ mut ct := lines[1].split_nth(':', 2)[1]
+ ct = ct.trim_left(' \t')
+ data := lines_to_string(field.len, lines, 3, lines.len - 1)
+ files[name] << FileData{
+ filename: filename
+ content_type: ct
+ data: data
+ }
+ continue
+ }
+ data := lines_to_string(field.len, lines, 2, lines.len - 1)
+ form[name] = data
+ }
+ return form, files
+}
+
+// Parse the Content-Disposition header of a multipart form
+// Returns a map of the key="value" pairs
+// Example: parse_disposition('Content-Disposition: form-data; name="a"; filename="b"') == {'name': 'a', 'filename': 'b'}
+fn parse_disposition(line string) map[string]string {
+ mut data := map[string]string{}
+ for word in line.split(';') {
+ kv := word.split_nth('=', 2)
+ if kv.len != 2 {
+ continue
+ }
+ key, value := kv[0].to_lower().trim_left(' \t'), kv[1]
+ if value.starts_with('"') && value.ends_with('"') {
+ data[key] = value[1..value.len - 1]
+ } else {
+ data[key] = value
+ }
+ }
+ return data
+}
+
+[manualfree]
+fn lines_to_string(len int, lines []string, start int, end int) string {
+ mut sb := strings.new_builder(len)
+ for i in start .. end {
+ sb.writeln(lines[i])
+ }
+ sb.cut_last(1) // last newline
+ res := sb.str()
+ unsafe { sb.free() }
+ return res
+}
diff --git a/v_windows/v/vlib/net/http/request_test.v b/v_windows/v/vlib/net/http/request_test.v
new file mode 100644
index 0000000..3950ad8
--- /dev/null
+++ b/v_windows/v/vlib/net/http/request_test.v
@@ -0,0 +1,138 @@
+module http
+
+import io
+
+struct StringReader {
+ text string
+mut:
+ place int
+}
+
+fn (mut s StringReader) read(mut buf []byte) ?int {
+ if s.place >= s.text.len {
+ return none
+ }
+ max_bytes := 100
+ end := if s.place + max_bytes >= s.text.len { s.text.len } else { s.place + max_bytes }
+ n := copy(buf, s.text[s.place..end].bytes())
+ s.place += n
+ return n
+}
+
+fn reader(s string) &io.BufferedReader {
+ return io.new_buffered_reader(
+ reader: &StringReader{
+ text: s
+ }
+ )
+}
+
+fn test_parse_request_not_http() {
+ mut reader__ := reader('hello')
+ parse_request(mut reader__) or { return }
+ panic('should not have parsed')
+}
+
+fn test_parse_request_no_headers() {
+ mut reader_ := reader('GET / HTTP/1.1\r\n\r\n')
+ req := parse_request(mut reader_) or { panic('did not parse: $err') }
+ assert req.method == .get
+ assert req.url == '/'
+ assert req.version == .v1_1
+}
+
+fn test_parse_request_two_headers() {
+ mut reader_ := reader('GET / HTTP/1.1\r\nTest1: a\r\nTest2: B\r\n\r\n')
+ req := parse_request(mut reader_) or { panic('did not parse: $err') }
+ assert req.header.custom_values('Test1') == ['a']
+ assert req.header.custom_values('Test2') == ['B']
+}
+
+fn test_parse_request_two_header_values() {
+ mut reader_ := reader('GET / HTTP/1.1\r\nTest1: a; b\r\nTest2: c\r\nTest2: d\r\n\r\n')
+ req := parse_request(mut reader_) or { panic('did not parse: $err') }
+ assert req.header.custom_values('Test1') == ['a; b']
+ assert req.header.custom_values('Test2') == ['c', 'd']
+}
+
+fn test_parse_request_body() {
+ mut reader_ := reader('GET / HTTP/1.1\r\nTest1: a\r\nTest2: b\r\nContent-Length: 4\r\n\r\nbodyabc')
+ req := parse_request(mut reader_) or { panic('did not parse: $err') }
+ assert req.data == 'body'
+}
+
+fn test_parse_request_line() {
+ method, target, version := parse_request_line('GET /target HTTP/1.1') or {
+ panic('did not parse: $err')
+ }
+ assert method == .get
+ assert target.str() == '/target'
+ assert version == .v1_1
+}
+
+fn test_parse_form() {
+ assert parse_form('foo=bar&bar=baz') == {
+ 'foo': 'bar'
+ 'bar': 'baz'
+ }
+ assert parse_form('foo=bar=&bar=baz') == {
+ 'foo': 'bar='
+ 'bar': 'baz'
+ }
+ assert parse_form('foo=bar%3D&bar=baz') == {
+ 'foo': 'bar='
+ 'bar': 'baz'
+ }
+ assert parse_form('foo=b%26ar&bar=baz') == {
+ 'foo': 'b&ar'
+ 'bar': 'baz'
+ }
+ assert parse_form('a=b& c=d') == {
+ 'a': 'b'
+ ' c': 'd'
+ }
+ assert parse_form('a=b&c= d ') == {
+ 'a': 'b'
+ 'c': ' d '
+ }
+}
+
+fn test_parse_multipart_form() {
+ boundary := '6844a625b1f0b299'
+ names := ['foo', 'fooz']
+ file := 'bar.v'
+ ct := 'application/octet-stream'
+ contents := ['baz', 'buzz']
+ data := "--------------------------$boundary
+Content-Disposition: form-data; name=\"${names[0]}\"; filename=\"$file\"
+Content-Type: $ct
+
+${contents[0]}
+--------------------------$boundary
+Content-Disposition: form-data; name=\"${names[1]}\"
+
+${contents[1]}
+--------------------------$boundary--
+"
+ form, files := parse_multipart_form(data, boundary)
+ assert files == {
+ names[0]: [FileData{
+ filename: file
+ content_type: ct
+ data: contents[0]
+ }]
+ }
+
+ assert form == {
+ names[1]: contents[1]
+ }
+}
+
+fn test_parse_large_body() ? {
+ body := 'A'.repeat(101) // greater than max_bytes
+ req := 'GET / HTTP/1.1\r\nContent-Length: $body.len\r\n\r\n$body'
+ mut reader_ := reader(req)
+ result := parse_request(mut reader_) ?
+ assert result.data.len == body.len
+ assert result.data == body
+}
diff --git a/v_windows/v/vlib/net/http/response.v b/v_windows/v/vlib/net/http/response.v
new file mode 100644
index 0000000..caa8228
--- /dev/null
+++ b/v_windows/v/vlib/net/http/response.v
@@ -0,0 +1,152 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+import net.http.chunked
+import strconv
+
+// Response represents the result of the request
+pub struct Response {
+pub mut:
+ text string
+ header Header
+ status_code int
+ status_msg string
+ http_version string
+}
+
+fn (mut resp Response) free() {
+ unsafe { resp.header.free() }
+}
+
+// Formats resp to bytes suitable for HTTP response transmission
+pub fn (resp Response) bytes() []byte {
+ // TODO: build []byte directly; this uses two allocations
+ return resp.bytestr().bytes()
+}
+
+// Formats resp to a string suitable for HTTP response transmission
+pub fn (resp Response) bytestr() string {
+ return ('HTTP/$resp.http_version $resp.status_code $resp.status_msg\r\n' + '${resp.header.render(
+ version: resp.version()
+ )}\r\n' + '$resp.text')
+}
+
+// Parse a raw HTTP response into a Response object
+pub fn parse_response(resp string) ?Response {
+ version, status_code, status_msg := parse_status_line(resp.all_before('\n')) ?
+ // Build resp header map and separate the body
+ start_idx, end_idx := find_headers_range(resp) ?
+ header := parse_headers(resp.substr(start_idx, end_idx)) ?
+ mut text := resp.substr(end_idx, resp.len)
+ if header.get(.transfer_encoding) or { '' } == 'chunked' {
+ text = chunked.decode(text)
+ }
+ return Response{
+ http_version: version
+ status_code: status_code
+ status_msg: status_msg
+ header: header
+ text: text
+ }
+}
+
+// parse_status_line parses the first HTTP response line into the HTTP
+// version, status code, and reason phrase
+fn parse_status_line(line string) ?(string, int, string) {
+ if line.len < 5 || line[..5].to_lower() != 'http/' {
+ return error('response does not start with HTTP/')
+ }
+ data := line.split_nth(' ', 3)
+ if data.len != 3 {
+ return error('expected at least 3 tokens')
+ }
+ version := data[0].substr(5, data[0].len)
+ // validate version is 1*DIGIT "." 1*DIGIT
+ digits := version.split_nth('.', 3)
+ if digits.len != 2 {
+ return error('HTTP version malformed')
+ }
+ for digit in digits {
+ strconv.atoi(digit) or { return error('HTTP version must contain only integers') }
+ }
+ return version, strconv.atoi(data[1]) ?, data[2]
+}
+
+// cookies parses the Set-Cookie headers into Cookie objects
+pub fn (r Response) cookies() []Cookie {
+ mut cookies := []Cookie{}
+ for cookie in r.header.values(.set_cookie) {
+ cookies << parse_cookie(cookie) or { continue }
+ }
+ return cookies
+}
+
+// status parses the status_code into a Status struct
+pub fn (r Response) status() Status {
+ return status_from_int(r.status_code)
+}
+
+// set_status sets the status_code and status_msg of the response
+pub fn (mut r Response) set_status(s Status) {
+ r.status_code = s.int()
+ r.status_msg = s.str()
+}
+
+// version parses the version
+pub fn (r Response) version() Version {
+ return version_from_str('HTTP/$r.http_version')
+}
+
+// set_version sets the http_version string of the response
+pub fn (mut r Response) set_version(v Version) {
+ if v == .unknown {
+ r.http_version = ''
+ return
+ }
+ maj, min := v.protos()
+ r.http_version = '${maj}.$min'
+}
+
+pub struct ResponseConfig {
+ version Version = .v1_1
+ status Status = .ok
+ header Header
+ text string
+}
+
+// new_response creates a Response object from the configuration. This
+// function will add a Content-Length header if text is not empty.
+pub fn new_response(conf ResponseConfig) Response {
+ mut resp := Response{
+ text: conf.text
+ header: conf.header
+ }
+ if conf.text.len > 0 && !resp.header.contains(.content_length) {
+ resp.header.add(.content_length, conf.text.len.str())
+ }
+ resp.set_status(conf.status)
+ resp.set_version(conf.version)
+ return resp
+}
+
+// find_headers_range returns the start (inclusive) and end (exclusive)
+// index of the headers in the string, including the trailing newlines. This
+// helper function expects the first line in `data` to be the HTTP status line
+// (HTTP/1.1 200 OK).
+fn find_headers_range(data string) ?(int, int) {
+ start_idx := data.index('\n') or { return error('no start index found') } + 1
+ mut count := 0
+ for i := start_idx; i < data.len; i++ {
+ if data[i] == `\n` {
+ count++
+ } else if data[i] != `\r` {
+ count = 0
+ }
+ if count == 2 {
+ return start_idx, i + 1
+ }
+ }
+ return error('no end index found')
+}
diff --git a/v_windows/v/vlib/net/http/response_test.v b/v_windows/v/vlib/net/http/response_test.v
new file mode 100644
index 0000000..bf2fba3
--- /dev/null
+++ b/v_windows/v/vlib/net/http/response_test.v
@@ -0,0 +1,36 @@
+module http
+
+fn test_response_bytestr() ? {
+ {
+ resp := new_response(
+ status: .ok
+ text: 'Foo'
+ )
+ assert resp.bytestr() == 'HTTP/1.1 200 OK\r\n' + 'Content-Length: 3\r\n' + '\r\n' + 'Foo'
+ }
+ {
+ resp := new_response(
+ status: .found
+ text: 'Foo'
+ header: new_header(key: .location, value: '/')
+ )
+ lines := resp.bytestr().split_into_lines()
+ assert lines[0] == 'HTTP/1.1 302 Found'
+ // header order is not guaranteed
+ check_headers(['Location: /', 'Content-Length: 3'], lines[1..3]) ?
+ assert lines[3] == ''
+ assert lines[4] == 'Foo'
+ }
+}
+
+// check_headers is a helper function for asserting all expected headers
+// are found because rendered header order is not guaranteed. The check
+// is O(n^2) which is fine for small lists.
+fn check_headers(expected []string, found []string) ? {
+ assert expected.len == found.len
+ for header in expected {
+ if !found.contains(header) {
+ return error('expected header "$header" not in $found')
+ }
+ }
+}
diff --git a/v_windows/v/vlib/net/http/server.v b/v_windows/v/vlib/net/http/server.v
new file mode 100644
index 0000000..7a9660d
--- /dev/null
+++ b/v_windows/v/vlib/net/http/server.v
@@ -0,0 +1,123 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+import io
+import net
+import time
+
+// ServerStatus is the current status of the server.
+// .running means that the server is active and serving.
+// .stopped means that the server is not active but still listening.
+// .closed means that the server is completely inactive.
+pub enum ServerStatus {
+ running
+ stopped
+ closed
+}
+
+interface Handler {
+ handle(Request) Response
+}
+
+pub struct Server {
+mut:
+ state ServerStatus = .closed
+ listener net.TcpListener
+pub mut:
+ port int = 8080
+ handler Handler = DebugHandler{}
+ read_timeout time.Duration = 30 * time.second
+ write_timeout time.Duration = 30 * time.second
+ accept_timeout time.Duration = 30 * time.second
+}
+
+pub fn (mut s Server) listen_and_serve() ? {
+ if s.handler is DebugHandler {
+ eprintln('Server handler not set, using debug handler')
+ }
+ s.listener = net.listen_tcp(.ip6, ':$s.port') ?
+ s.listener.set_accept_timeout(s.accept_timeout)
+ eprintln('Listening on :$s.port')
+ s.state = .running
+ for {
+ // break if we have a stop signal
+ if s.state != .running {
+ break
+ }
+ mut conn := s.listener.accept() or {
+ if err.msg != 'net: op timed out' {
+ eprintln('accept() failed: $err; skipping')
+ }
+ continue
+ }
+ conn.set_read_timeout(s.read_timeout)
+ conn.set_write_timeout(s.write_timeout)
+ // TODO: make concurrent
+ s.parse_and_respond(mut conn)
+ }
+ if s.state == .stopped {
+ s.close()
+ }
+}
+
+// stop signals the server that it should not respond anymore
+[inline]
+pub fn (mut s Server) stop() {
+ s.state = .stopped
+}
+
+// close immediatly closes the port and signals the server that it has been closed
+[inline]
+pub fn (mut s Server) close() {
+ s.state = .closed
+ s.listener.close() or { return }
+}
+
+[inline]
+pub fn (s &Server) status() ServerStatus {
+ return s.state
+}
+
+fn (s &Server) parse_and_respond(mut conn net.TcpConn) {
+ defer {
+ conn.close() or { eprintln('close() failed: $err') }
+ }
+
+ mut reader := io.new_buffered_reader(reader: conn)
+ defer {
+ reader.free()
+ }
+ req := parse_request(mut reader) or {
+ $if debug {
+ // only show in debug mode to prevent abuse
+ eprintln('error parsing request: $err')
+ }
+ return
+ }
+ mut resp := s.handler.handle(req)
+ if resp.version() == .unknown {
+ resp.set_version(req.version)
+ }
+ conn.write(resp.bytes()) or { eprintln('error sending response: $err') }
+}
+
+// DebugHandler implements the Handler interface by echoing the request
+// in the response
+struct DebugHandler {}
+
+fn (d DebugHandler) handle(req Request) Response {
+ $if debug {
+ eprintln('[$time.now()] $req.method $req.url\n\r$req.header\n\r$req.data - 200 OK')
+ } $else {
+ eprintln('[$time.now()] $req.method $req.url - 200')
+ }
+ mut r := Response{
+ text: req.data
+ header: req.header
+ }
+ r.set_status(.ok)
+ r.set_version(req.version)
+ return r
+}
diff --git a/v_windows/v/vlib/net/http/server_test.v b/v_windows/v/vlib/net/http/server_test.v
new file mode 100644
index 0000000..790da30
--- /dev/null
+++ b/v_windows/v/vlib/net/http/server_test.v
@@ -0,0 +1,90 @@
+import net.http
+import time
+
+fn test_server_stop() ? {
+ mut server := &http.Server{
+ accept_timeout: 1 * time.second
+ }
+ t := go server.listen_and_serve()
+ time.sleep(250 * time.millisecond)
+ mut watch := time.new_stopwatch()
+ server.stop()
+ assert server.status() == .stopped
+ assert watch.elapsed() < 100 * time.millisecond
+ t.wait() ?
+ assert watch.elapsed() < 999 * time.millisecond
+}
+
+fn test_server_close() ? {
+ mut server := &http.Server{
+ accept_timeout: 1 * time.second
+ handler: MyHttpHandler{}
+ }
+ t := go server.listen_and_serve()
+ time.sleep(250 * time.millisecond)
+ mut watch := time.new_stopwatch()
+ server.close()
+ assert server.status() == .closed
+ assert watch.elapsed() < 100 * time.millisecond
+ t.wait() ?
+ assert watch.elapsed() < 999 * time.millisecond
+}
+
+struct MyHttpHandler {
+mut:
+ counter int
+ oks int
+ not_founds int
+}
+
+fn (mut handler MyHttpHandler) handle(req http.Request) http.Response {
+ handler.counter++
+ // eprintln('$time.now() | counter: $handler.counter | $req.method $req.url\n$req.header\n$req.data - 200 OK\n')
+ mut r := http.Response{
+ text: req.data + ', $req.url'
+ header: req.header
+ }
+ match req.url.all_before('?') {
+ '/endpoint', '/another/endpoint' {
+ r.set_status(.ok)
+ handler.oks++
+ }
+ else {
+ r.set_status(.not_found)
+ handler.not_founds++
+ }
+ }
+ r.set_version(req.version)
+ return r
+}
+
+const cport = 8198
+
+fn test_server_custom_handler() ? {
+ mut handler := MyHttpHandler{}
+ mut server := &http.Server{
+ accept_timeout: 1 * time.second
+ handler: handler
+ port: cport
+ }
+ t := go server.listen_and_serve()
+ for server.status() != .running {
+ time.sleep(10 * time.millisecond)
+ }
+ x := http.fetch(url: 'http://localhost:$cport/endpoint?abc=xyz', data: 'my data') ?
+ assert x.text == 'my data, /endpoint?abc=xyz'
+ assert x.status_code == 200
+ assert x.http_version == '1.1'
+ y := http.fetch(url: 'http://localhost:$cport/another/endpoint', data: 'abcde') ?
+ assert y.text == 'abcde, /another/endpoint'
+ assert y.status_code == 200
+ assert y.status() == .ok
+ assert y.http_version == '1.1'
+ //
+ http.fetch(url: 'http://localhost:$cport/something/else') ?
+ server.stop()
+ t.wait() ?
+ assert handler.counter == 3
+ assert handler.oks == 2
+ assert handler.not_founds == 1
+}
diff --git a/v_windows/v/vlib/net/http/status.v b/v_windows/v/vlib/net/http/status.v
new file mode 100644
index 0000000..f4bc9ee
--- /dev/null
+++ b/v_windows/v/vlib/net/http/status.v
@@ -0,0 +1,255 @@
+// Copyright (c) 2020 Justin E. Jones. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+// The status codes listed here are based on the comprehensive list,
+// available at:
+// https://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml
+pub enum Status {
+ unknown = -1
+ unassigned = 0
+ cont = 100
+ switching_protocols = 101
+ processing = 102
+ checkpoint_draft = 103
+ ok = 200
+ created = 201
+ accepted = 202
+ non_authoritative_information = 203
+ no_content = 204
+ reset_content = 205
+ partial_content = 206
+ multi_status = 207
+ already_reported = 208
+ im_used = 226
+ multiple_choices = 300
+ moved_permanently = 301
+ found = 302
+ see_other = 303
+ not_modified = 304
+ use_proxy = 305
+ switch_proxy = 306
+ temporary_redirect = 307
+ permanent_redirect = 308
+ bad_request = 400
+ unauthorized = 401
+ payment_required = 402
+ forbidden = 403
+ not_found = 404
+ method_not_allowed = 405
+ not_acceptable = 406
+ proxy_authentication_required = 407
+ request_timeout = 408
+ conflict = 409
+ gone = 410
+ length_required = 411
+ precondition_failed = 412
+ request_entity_too_large = 413
+ request_uri_too_long = 414
+ unsupported_media_type = 415
+ requested_range_not_satisfiable = 416
+ expectation_failed = 417
+ im_a_teapot = 418
+ misdirected_request = 421
+ unprocessable_entity = 422
+ locked = 423
+ failed_dependency = 424
+ unordered_collection = 425
+ upgrade_required = 426
+ precondition_required = 428
+ too_many_requests = 429
+ request_header_fields_too_large = 431
+ unavailable_for_legal_reasons = 451
+ client_closed_request = 499
+ internal_server_error = 500
+ not_implemented = 501
+ bad_gateway = 502
+ service_unavailable = 503
+ gateway_timeout = 504
+ http_version_not_supported = 505
+ variant_also_negotiates = 506
+ insufficient_storage = 507
+ loop_detected = 508
+ bandwidth_limit_exceeded = 509
+ not_extended = 510
+ network_authentication_required = 511
+}
+
+pub fn status_from_int(code int) Status {
+ return match code {
+ 100 { Status.cont }
+ 101 { Status.switching_protocols }
+ 102 { Status.processing }
+ 103 { Status.checkpoint_draft }
+ 104...199 { Status.unassigned }
+ 200 { Status.ok }
+ 201 { Status.created }
+ 202 { Status.accepted }
+ 203 { Status.non_authoritative_information }
+ 204 { Status.no_content }
+ 205 { Status.reset_content }
+ 206 { Status.partial_content }
+ 207 { Status.multi_status }
+ 208 { Status.already_reported }
+ 209...225 { Status.unassigned }
+ 226 { Status.im_used }
+ 227...299 { Status.unassigned }
+ 300 { Status.multiple_choices }
+ 301 { Status.moved_permanently }
+ 302 { Status.found }
+ 303 { Status.see_other }
+ 304 { Status.not_modified }
+ 305 { Status.use_proxy }
+ 306 { Status.switch_proxy }
+ 307 { Status.temporary_redirect }
+ 308 { Status.permanent_redirect }
+ 309...399 { Status.unassigned }
+ 400 { Status.bad_request }
+ 401 { Status.unauthorized }
+ 402 { Status.payment_required }
+ 403 { Status.forbidden }
+ 404 { Status.not_found }
+ 405 { Status.method_not_allowed }
+ 406 { Status.not_acceptable }
+ 407 { Status.proxy_authentication_required }
+ 408 { Status.request_timeout }
+ 409 { Status.conflict }
+ 410 { Status.gone }
+ 411 { Status.length_required }
+ 412 { Status.precondition_failed }
+ 413 { Status.request_entity_too_large }
+ 414 { Status.request_uri_too_long }
+ 415 { Status.unsupported_media_type }
+ 416 { Status.requested_range_not_satisfiable }
+ 417 { Status.expectation_failed }
+ 418 { Status.im_a_teapot }
+ 419...420 { Status.unassigned }
+ 421 { Status.misdirected_request }
+ 422 { Status.unprocessable_entity }
+ 423 { Status.locked }
+ 424 { Status.failed_dependency }
+ 425 { Status.unordered_collection }
+ 426 { Status.upgrade_required }
+ 428 { Status.precondition_required }
+ 429 { Status.too_many_requests }
+ 431 { Status.request_header_fields_too_large }
+ 432...450 { Status.unassigned }
+ 451 { Status.unavailable_for_legal_reasons }
+ 452...499 { Status.unassigned }
+ 500 { Status.internal_server_error }
+ 501 { Status.not_implemented }
+ 502 { Status.bad_gateway }
+ 503 { Status.service_unavailable }
+ 504 { Status.gateway_timeout }
+ 505 { Status.http_version_not_supported }
+ 506 { Status.variant_also_negotiates }
+ 507 { Status.insufficient_storage }
+ 508 { Status.loop_detected }
+ 509 { Status.bandwidth_limit_exceeded }
+ 510 { Status.not_extended }
+ 511 { Status.network_authentication_required }
+ 512...599 { Status.unassigned }
+ else { Status.unknown }
+ }
+}
+
+pub fn (code Status) str() string {
+ return match code {
+ .cont { 'Continue' }
+ .switching_protocols { 'Switching Protocols' }
+ .processing { 'Processing' }
+ .checkpoint_draft { 'Checkpoint Draft' }
+ .ok { 'OK' }
+ .created { 'Created' }
+ .accepted { 'Accepted' }
+ .non_authoritative_information { 'Non Authoritative Information' }
+ .no_content { 'No Content' }
+ .reset_content { 'Reset Content' }
+ .partial_content { 'Partial Content' }
+ .multi_status { 'Multi Status' }
+ .already_reported { 'Already Reported' }
+ .im_used { 'IM Used' }
+ .multiple_choices { 'Multiple Choices' }
+ .moved_permanently { 'Moved Permanently' }
+ .found { 'Found' }
+ .see_other { 'See Other' }
+ .not_modified { 'Not Modified' }
+ .use_proxy { 'Use Proxy' }
+ .switch_proxy { 'Switch Proxy' }
+ .temporary_redirect { 'Temporary Redirect' }
+ .permanent_redirect { 'Permanent Redirect' }
+ .bad_request { 'Bad Request' }
+ .unauthorized { 'Unauthorized' }
+ .payment_required { 'Payment Required' }
+ .forbidden { 'Forbidden' }
+ .not_found { 'Not Found' }
+ .method_not_allowed { 'Method Not Allowed' }
+ .not_acceptable { 'Not Acceptable' }
+ .proxy_authentication_required { 'Proxy Authentication Required' }
+ .request_timeout { 'Request Timeout' }
+ .conflict { 'Conflict' }
+ .gone { 'Gone' }
+ .length_required { 'Length Required' }
+ .precondition_failed { 'Precondition Failed' }
+ .request_entity_too_large { 'Request Entity Too Large' }
+ .request_uri_too_long { 'Request URI Too Long' }
+ .unsupported_media_type { 'Unsupported Media Type' }
+ .requested_range_not_satisfiable { 'Requested Range Not Satisfiable' }
+ .expectation_failed { 'Expectation Failed' }
+ .im_a_teapot { 'Im a teapot' }
+ .misdirected_request { 'Misdirected Request' }
+ .unprocessable_entity { 'Unprocessable Entity' }
+ .locked { 'Locked' }
+ .failed_dependency { 'Failed Dependency' }
+ .unordered_collection { 'Unordered Collection' }
+ .upgrade_required { 'Upgrade Required' }
+ .precondition_required { 'Precondition Required' }
+ .too_many_requests { 'Too Many Requests' }
+ .request_header_fields_too_large { 'Request Header Fields Too Large' }
+ .unavailable_for_legal_reasons { 'Unavailable For Legal Reasons' }
+ .internal_server_error { 'Internal Server Error' }
+ .not_implemented { 'Not Implemented' }
+ .bad_gateway { 'Bad Gateway' }
+ .service_unavailable { 'Service Unavailable' }
+ .gateway_timeout { 'Gateway Timeout' }
+ .http_version_not_supported { 'HTTP Version Not Supported' }
+ .variant_also_negotiates { 'Variant Also Negotiates' }
+ .insufficient_storage { 'Insufficient Storage' }
+ .loop_detected { 'Loop Detected' }
+ .bandwidth_limit_exceeded { 'Bandwidth Limit Exceeded' }
+ .not_extended { 'Not Extended' }
+ .network_authentication_required { 'Network Authentication Required' }
+ .unassigned { 'Unassigned' }
+ else { 'Unknown' }
+ }
+}
+
+// int converts an assigned and known Status to its integral equivalent.
+// if a Status is unknown or unassigned, this method will return zero
+pub fn (code Status) int() int {
+ if code in [.unknown, .unassigned] {
+ return 0
+ }
+ return int(code)
+}
+
+// is_valid returns true if the status code is assigned and known
+pub fn (code Status) is_valid() bool {
+ number := code.int()
+ return number >= 100 && number < 600
+}
+
+// is_error will return true if the status code represents either a client or
+// a server error; otherwise will return false
+pub fn (code Status) is_error() bool {
+ number := code.int()
+ return number >= 400 && number < 600
+}
+
+// is_success will return true if the status code represents either an
+// informational, success, or redirection response; otherwise will return false
+pub fn (code Status) is_success() bool {
+ number := code.int()
+ return number >= 100 && number < 400
+}
diff --git a/v_windows/v/vlib/net/http/status_test.v b/v_windows/v/vlib/net/http/status_test.v
new file mode 100644
index 0000000..154aec3
--- /dev/null
+++ b/v_windows/v/vlib/net/http/status_test.v
@@ -0,0 +1,49 @@
+module http
+
+fn test_str() {
+ code := Status.bad_gateway
+ actual := code.str()
+ assert actual == 'Bad Gateway'
+}
+
+fn test_int() {
+ code := Status.see_other
+ actual := code.int()
+ assert actual == 303
+}
+
+fn test_is_valid() {
+ code := Status.gateway_timeout
+ actual := code.is_valid()
+ assert actual == true
+}
+
+fn test_is_valid_negative() {
+ code := Status.unassigned
+ actual := code.is_valid()
+ assert actual == false
+}
+
+fn test_is_error() {
+ code := Status.too_many_requests
+ actual := code.is_error()
+ assert actual == true
+}
+
+fn test_is_error_negative() {
+ code := Status.cont
+ actual := code.is_error()
+ assert actual == false
+}
+
+fn test_is_success() {
+ code := Status.accepted
+ actual := code.is_success()
+ assert actual == true
+}
+
+fn test_is_success_negative() {
+ code := Status.forbidden
+ actual := code.is_success()
+ assert actual == false
+}
diff --git a/v_windows/v/vlib/net/http/version.v b/v_windows/v/vlib/net/http/version.v
new file mode 100644
index 0000000..f4388a3
--- /dev/null
+++ b/v_windows/v/vlib/net/http/version.v
@@ -0,0 +1,40 @@
+// Copyright (c) 2019-2021 Alexander Medvednikov. All rights reserved.
+// Use of this source code is governed by an MIT license
+// that can be found in the LICENSE file.
+module http
+
+// The versions listed here are the most common ones.
+pub enum Version {
+ unknown
+ v1_1
+ v2_0
+ v1_0
+}
+
+pub fn (v Version) str() string {
+ return match v {
+ .v1_1 { 'HTTP/1.1' }
+ .v2_0 { 'HTTP/2.0' }
+ .v1_0 { 'HTTP/1.0' }
+ .unknown { 'unknown' }
+ }
+}
+
+pub fn version_from_str(v string) Version {
+ return match v.to_lower() {
+ 'http/1.1' { Version.v1_1 }
+ 'http/2.0' { Version.v2_0 }
+ 'http/1.0' { Version.v1_0 }
+ else { Version.unknown }
+ }
+}
+
+// protos returns the version major and minor numbers
+pub fn (v Version) protos() (int, int) {
+ match v {
+ .v1_1 { return 1, 1 }
+ .v2_0 { return 2, 0 }
+ .v1_0 { return 1, 0 }
+ .unknown { return 0, 0 }
+ }
+}