diff --git a/CHANGELOG.md b/CHANGELOG.md index bea15a552..ed1900978 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - Fixed CVE-2023-44487 (HTTP/2 Rapid Reset) [PR #1417](https://github.com/3scale/apicast/pull/1417) [THREESCALE-10224](https://issues.redhat.com/browse/THREESCALE-10224) +- Fixed issue where the proxy policy could not handle requests with "Transfer-Encoding: chunked" header [PR #1403](https://github.com/3scale/APIcast/pull/1403) [THREESCALE-9542](https://issues.redhat.com/browse/THREESCALE-9542) + ### Added - Detect number of CPU shares when running on Cgroups V2 [PR #1410](https://github.com/3scale/apicast/pull/1410) [THREESCALE-10167](https://issues.redhat.com/browse/THREESCALE-10167) diff --git a/gateway/src/apicast/http_proxy.lua b/gateway/src/apicast/http_proxy.lua index 624aa0502..a14135f27 100644 --- a/gateway/src/apicast/http_proxy.lua +++ b/gateway/src/apicast/http_proxy.lua @@ -1,14 +1,30 @@ local format = string.format +local tostring = tostring +local ngx_flush = ngx.flush +local ngx_get_method = ngx.req.get_method +local ngx_http_version = ngx.req.http_version +local ngx_send_headers = ngx.send_headers local resty_url = require "resty.url" local resty_resolver = require 'resty.resolver' local round_robin = require 'resty.balancer.round_robin' local http_proxy = require 'resty.http.proxy' local file_reader = require("resty.file").file_reader +local file_size = require("resty.file").file_size +local client_body_reader = require("resty.http.request_reader").get_client_body_reader +local send_response = require("resty.http.response_writer").send_response local concat = table.concat local _M = { } +local http_methods_with_body = { + POST = true, + PUT = true, + PATCH = true +} + +local DEFAULT_CHUNKSIZE = 32 * 1024 + function _M.reset() _M.balancer = round_robin.new() _M.resolver = resty_resolver @@ -82,15 +98,50 @@ local function absolute_url(uri) ) end -local function forward_https_request(proxy_uri, proxy_auth, uri, skip_https_connect) - -- This is needed to call ngx.req.get_body_data() below. - ngx.req.read_body() +local function forward_https_request(proxy_uri, uri, proxy_opts) + local body, err + local sock + local opts = proxy_opts or {} + local req_method = ngx_get_method() + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + local is_chunked = encoding and encoding:lower() == "chunked" + local content_type = ngx.req.get_headers()["Content-Type"] + local content_type_is_urlencoded = content_type and content_type:lower() == "application/x-www-form-urlencoded" + local raw = false + + if http_methods_with_body[req_method] then + + -- When the content type is "application/x-www-form-urlencoded" the body is always pre-read. + -- See: gateway/src/apicast/configuration/service.lua:214 + -- + -- Due to this, ngx.req.socket() will fail with "request body already exists" error or return + -- socket but hang on read in case of raw socket. Therefore, we only retrieve body from the + -- socket if the content type is not "application/x-www-form-urlencoded" + if opts.request_unbuffered and ngx_http_version() == 1.1 and not content_type_is_urlencoded then + if is_chunked then + -- The default ngx reader does not support chunked request + -- so we will need to get the raw request socket and manually + -- decode the chunked request + sock, err = ngx.req.socket(true) + raw = true + else + sock, err = ngx.req.socket() + end - local request = { - uri = uri, - method = ngx.req.get_method(), - headers = ngx.req.get_headers(0, true), - path = format('%s%s%s', ngx.var.uri, ngx.var.is_args, ngx.var.query_string or ''), + if not sock then + ngx.log(ngx.ERR, "unable to obtain request socket: ", err) + return ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR) + end + + body = client_body_reader(sock, DEFAULT_CHUNKSIZE, is_chunked) + else + -- TODO: Due to ngx.req.read_body(). The current implementation will not work with grpc service + -- See: https://github.com/3scale/APIcast/pull/1419 + -- Should we get the body from socket by default and only read buffered body if + -- "Content-Type: application/x-www-form-urlencoded"? + -- + -- This is needed to call ngx.req.get_body_data() below. + ngx.req.read_body() -- We cannot use resty.http's .get_client_body_reader(). -- In POST requests with HTTPS, the result of that call is nil, and it @@ -101,26 +152,55 @@ local function forward_https_request(proxy_uri, proxy_auth, uri, skip_https_conn -- read and need to be cached in a local file. This request will return -- nil, so after this we need to read the temp file. -- https://github.com/openresty/lua-nginx-module#ngxreqget_body_data - body = ngx.req.get_body_data(), - proxy_uri = proxy_uri, - proxy_auth = proxy_auth - } + body = ngx.req.get_body_data() + + if not body then + local temp_file_path = ngx.req.get_body_file() + ngx.log(ngx.INFO, "HTTPS Proxy: Request body is bigger than client_body_buffer_size, read the content from path='", temp_file_path, "'") + + if temp_file_path then + body, err = file_reader(temp_file_path) + if err then + ngx.log(ngx.ERR, "HTTPS proxy: Failed to read temp body file, err: ", err) + ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR) + end + + if is_chunked then + -- If the body is smaller than "client_boby_buffer_size" the Content-Length header is + -- set by openresty based on the size of the buffer. However, when the body is rendered + -- to a file, we will need to calculate and manually set the Content-Length header based + -- on the file size + local contentLength, err = file_size(temp_file_path)() + if err then + ngx.log(ngx.ERR, "HTTPS proxy: Failed to set content length, err: ", err) + ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR) + end + + ngx.req.set_header("Content-Length", tostring(contentLength)) + end + end + end - if not request.body then - local temp_file_path = ngx.req.get_body_file() - ngx.log(ngx.INFO, "HTTPS Proxy: Request body is bigger than client_body_buffer_size, read the content from path='", temp_file_path, "'") - - if temp_file_path then - local body, err = file_reader(temp_file_path) - if err then - ngx.log(ngx.ERR, "HTTPS proxy: Failed to read temp body file, err: ", err) - ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR) - end - request.body = body + -- The whole request is buffered with chunked encoding removed, so remove the Transfer-Encoding: chunked + -- header, otherwise the upstream won't be able to read the body as it expected chunk encoded + -- body + if is_chunked then + ngx.req.set_header("Transfer-Encoding", nil) end + end end - local httpc, err = http_proxy.new(request, skip_https_connect) + local request = { + uri = uri, + method = ngx.req.get_method(), + headers = ngx.req.get_headers(0, true), + path = format('%s%s%s', ngx.var.uri, ngx.var.is_args, ngx.var.query_string or ''), + body = body, + proxy_uri = proxy_uri, + proxy_auth = opts.proxy_auth + } + + local httpc, err = http_proxy.new(request, opts.skip_https_connect) if not httpc then ngx.log(ngx.ERR, 'could not connect to proxy: ', proxy_uri, ' err: ', err) @@ -132,8 +212,16 @@ local function forward_https_request(proxy_uri, proxy_auth, uri, skip_https_conn res, err = httpc:request(request) if res then - httpc:proxy_response(res) - httpc:set_keepalive() + if opts.request_unbuffered and raw then + local bytes, err = send_response(sock, res, DEFAULT_CHUNKSIZE) + if not bytes then + ngx.log(ngx.ERR, "failed to send response: ", err) + return sock:send("HTTP/1.1 502 Bad Gateway") + end + else + httpc:proxy_response(res) + httpc:set_keepalive() + end else ngx.log(ngx.ERR, 'failed to proxy request to: ', proxy_uri, ' err : ', err) return ngx.exit(ngx.HTTP_BAD_GATEWAY) @@ -186,7 +274,13 @@ function _M.request(upstream, proxy_uri) return elseif uri.scheme == 'https' then upstream:rewrite_request() - forward_https_request(proxy_uri, proxy_auth, uri, upstream.skip_https_connect) + local proxy_opts = { + proxy_auth = proxy_auth, + skip_https_connect = upstream.skip_https_connect, + request_unbuffered = upstream.request_unbuffered + } + + forward_https_request(proxy_uri, uri, proxy_opts) return ngx.exit(ngx.OK) -- terminate phase else ngx.log(ngx.ERR, 'could not connect to proxy: ', proxy_uri, ' err: ', 'invalid request scheme') diff --git a/gateway/src/apicast/policy/request_unbuffered/README.md b/gateway/src/apicast/policy/request_unbuffered/README.md index ebc7b3508..e1d27051d 100644 --- a/gateway/src/apicast/policy/request_unbuffered/README.md +++ b/gateway/src/apicast/policy/request_unbuffered/README.md @@ -1,14 +1,97 @@ # APICast Request Unbuffered -This policy allows to disable request buffering +## Description + +When enable this policy will dymanically sets the [`proxy_request_buffering: off`](https://nginx.org/en/docs/http/ngx_http_proxy_module.html#proxy_request_buffering +) directive per service. + +## Technical details + +By default, NGINX reads the entire request body into memory or buffers large requests to disk before forwarding them to the upstream server. Reading bodies can become expensive, especially when sending requests containing large payloads. + +For example, when the client sends 10GB, NGINX will buffer the entire 10GB to disk before sending anything to the upstream server. + +When the `request_unbuffered` is in the chain, request buffering is disabled, sending the request body to the proxied server immediately upon receiving it. This can help minimize time spent sending data to a service and disk I/O for requests with big body. However, there are caveats and corner cases applied, [**Caveats**](#caveats) + +The policy also provides a consistent behavior across multiple scenarios like: + +``` +- APIcast <> upstream HTTP 1.1 plain +- APIcast <> upstream TLS +- APIcast <> HTTP Proxy (env var) <> upstream HTTP 1.1 plain +- APIcast <> HTTP Proxy (policy) <> upstream HTTP 1.1 plain +- APIcast <> HTTP Proxy (camel proxy) <> upstream HTTP 1.1 plain +- APIcast <> HTTP Proxy (env var) <> upstream TLS +- APIcast <> HTTP Proxy (policy) <> upstream TLS +- APIcast <> HTTP Proxy (camel proxy) <> upstream TLS +``` + +## Why don't we also support disable response buffering? + +The response buffering is enabled by default in NGINX (the [`proxy_buffering: on`]() directive). It does this to shield the backend against slow clients ([slowloris attack](https://en.wikipedia.org/wiki/Slowloris_(computer_security))). + +If the `proxy_buffering` is disabled, the upstream server keeps the connection open until all data is received by the client. NGINX [advises](https://www.nginx.com/blog/avoiding-top-10-nginx-configuration-mistakes/#proxy_buffering-off) against disabling `proxy_buffering` as it will potentially waste upstream server resources. + +## Why does upstream receive a "Content-Length" header when the original request is sent with "Transfer-Encoding: chunked" + +For a request with "small" body that fits into [`client_body_buffer_size`](https://nginx.org/en/docs/http/ngx_http_core_module.html#client_body_buffer_size) and with header "Transfer-Encoding: chunked", NGINX will always read and know the length of the body. Then it will send the request to upstream with the "Content-Length" header. + +If a client uses chunked transfer encoding with HTTP/1.0, NGINX will always buffer the request body ## Example configuration ``` -{ - "name": "request_unbuffered", - "version": "builtin", - "configuration": {} -} +"policy_chain": [ + { + "name": "request_unbuffered", + "version": "builtin", + }, + { + "name": "apicast.policy.apicast" + } +] +``` + +Use with Proxy policy + +``` +"policy_chain": [ + { + "name": "request_unbuffered", + "version": "builtin", + }, + { + "name": "apicast.policy.http_proxy", + "configuration": { + "all_proxy": "http://foo:bar@192.168.15.103:8888/", + "https_proxy": "http://192.168.15.103:8888/", + "http_proxy": "http://192.168.15.103:8888/" + } + } +] +``` + +Use with Camel Proxy policy + +``` +"policy_chain": [ + { + "name": "request_unbuffered", + "version": "builtin", + }, + { + "name": "apicast.policy.camel", + "configuration": { + "http_proxy": "http://192.168.15.103:8080/", + "https_proxy": "http://192.168.15.103:8443/", + "all_proxy": "http://192.168.15.103:8080/" + } + } +] ``` +## Caveats + +- APIcast allows defining of mapping rules based on request content. For example, `POST /some_path?a_param={a_value}` will match a request like `POST "http://apicast_host:8080/some_path"` with a form URL-encoded body like: `a_param=abc`, requests with `Content-type: application/x-www-form-urlencoded` will always be buffered regardless of the + `request_unbuffered` policy is enabled or not. +- Disable request buffering could potentially expose the backend to [slowloris attack](https://en.wikipedia.org/wiki/Slowloris_(computer_security)). Therefore, we recommend to only use this policy when needed. diff --git a/gateway/src/apicast/upstream.lua b/gateway/src/apicast/upstream.lua index 0aff47359..43c395c68 100644 --- a/gateway/src/apicast/upstream.lua +++ b/gateway/src/apicast/upstream.lua @@ -241,6 +241,7 @@ function _M:call(context) self:set_skip_https_connect_on_proxy(); end + self.request_unbuffered = context.request_unbuffered http_proxy.request(self, proxy_uri) else local err = self:rewrite_request() diff --git a/gateway/src/resty/file.lua b/gateway/src/resty/file.lua index 40b3a7f0c..4ea5ff5c2 100644 --- a/gateway/src/resty/file.lua +++ b/gateway/src/resty/file.lua @@ -1,4 +1,5 @@ local co_yield = coroutine._yield +local co_wrap = coroutine._wrap local open = io.open local co_wrap_iter = require("resty.coroutines").co_wrap_iter @@ -28,4 +29,22 @@ function _M.file_reader(filename) end) end +function _M.file_size(filename) + return co_wrap(function() + local handle, err = open(filename) + + if err then + return nil, err + end + + local current = handle:seek() + local size = handle:seek("end") + + handle:seek("set", current) + handle:close() + + return size + end) +end + return _M diff --git a/gateway/src/resty/http/request_reader.lua b/gateway/src/resty/http/request_reader.lua new file mode 100644 index 000000000..07188a705 --- /dev/null +++ b/gateway/src/resty/http/request_reader.lua @@ -0,0 +1,73 @@ +local httpc = require "resty.resolver.http" +local ngx_req = ngx.req + +local _M = { +} + +local cr_lf = "\r\n" + +local function test_expect(sock) + local expect = ngx_req.get_headers()["Expect"] + + if expect == "" or ngx_req.http_version == 1.0 then + return true + end + + if expect and expect:lower() == "100-continue" then + local _, err = sock:send("HTTP/1.1 100 Continue\r\n\r\n") + if err then + ngx.log(ngx.ERR, "failed to handle expect header, err: ", err) + return false, err + end + end + return true +end + +-- chunked_reader return a body reader that translates the data read from +-- lua-resty-http client_body_reader to HTTP "chunked" format before returning it +-- +-- The chunked reader return nil when the final 0-length chunk is read +local function chunked_reader(sock, chunksize) + chunksize = chunksize or 65536 + local eof = false + local reader = httpc:get_client_body_reader(chunksize, sock) + if not reader then + return nil + end + + -- If Expect: 100-continue is sent upstream, lua-resty-http will only call + -- _send_body after receiving "100 Continue". So it's safe to process the + -- Expect header and send "100 Continue" downstream here. + local ok, err = test_expect(sock) + if not ok then + return nil, err + end + + return function() + if eof then + return nil + end + + local buffer, err = reader() + if err then + return nil, err + end + if buffer then + local chunk = string.format("%x\r\n", #buffer) .. buffer .. cr_lf + return chunk + else + eof = true + return "0\r\n\r\n" + end + end +end + +function _M.get_client_body_reader(sock, chunksize, is_chunked) + if is_chunked then + return chunked_reader(sock, chunksize) + else + return httpc:get_client_body_reader(chunksize, sock) + end +end + +return _M diff --git a/gateway/src/resty/http/response_writer.lua b/gateway/src/resty/http/response_writer.lua new file mode 100644 index 000000000..fc320512d --- /dev/null +++ b/gateway/src/resty/http/response_writer.lua @@ -0,0 +1,93 @@ +local fmt = string.format +local str_lower = string.lower + +local _M = { +} + +local cr_lf = "\r\n" + +-- http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.5.1 +local HOP_BY_HOP_HEADERS = { + ["connection"] = true, + ["keep-alive"] = true, + ["proxy-authenticate"] = true, + ["proxy-authorization"] = true, + ["te"] = true, + ["trailers"] = true, + ["transfer-encoding"] = true, + ["upgrade"] = true, + ["content-length"] = true, -- Not strictly hop-by-hop, but Nginx will deal + -- with this (may send chunked for example). +} + +local function send(socket, data) + if not data or data == '' then + ngx.log(ngx.DEBUG, 'skipping sending nil') + return + end + + return socket:send(data) +end + +-- write_response writes response body reader to sock in the HTTP/1.x server response format, +-- The connection is closed if send() fails or when returning a non-zero +function _M.send_response(sock, response, chunksize) + local bytes, err + chunksize = chunksize or 65536 + + if not response then + ngx.log(ngx.ERR, "no response provided") + return + end + + if not sock then + return nil, "socket not initialized yet" + end + + -- Status line + -- TODO: get HTTP version from request + local status = fmt("HTTP/%d.%d %03d %s\r\n", 1, 1, response.status, response.reason) + bytes, err = send(sock, status) + if not bytes then + return nil, "failed to send status line, err: " .. (err or "unknown") + end + + -- Filter out hop-by-hop headeres + for k, v in pairs(response.headers) do + if not HOP_BY_HOP_HEADERS[str_lower(k)] then + local header = fmt("%s: %s\r\n", k, v) + bytes, err = sock:send(header) + if not bytes then + return nil, "failed to send status line, err: " .. (err or "unknown") + end + end + end + + -- End-of-header + bytes, err = send(sock, cr_lf) + if not bytes then + return nil, "failed to send status line, err: " .. (err or "unknown") + end + + -- Write body + local reader = response.body_reader + repeat + local chunk, read_err + + chunk, read_err = reader(chunksize) + if read_err then + return nil, "failed to read response body, err: " .. (err or "unknown") + end + + if chunk then + bytes, err = send(sock, chunk) + if not bytes then + return nil, "failed to send response body, err: " .. (err or "unknown") + end + end + until not chunk + + return true, nil +end + +return _M diff --git a/t/apicast-policy-camel.t b/t/apicast-policy-camel.t index ddaac389c..6577e6391 100644 --- a/t/apicast-policy-camel.t +++ b/t/apicast-policy-camel.t @@ -3,6 +3,16 @@ use Test::APIcast::Blackbox 'no_plan'; require("http_proxy.pl"); +sub large_body { + my $res = ""; + for (my $i=0; $i <= 1024; $i++) { + $res = $res . "1111111 1111111 1111111 1111111\n"; + } + return $res; +} + +$ENV{'LARGE_BODY'} = large_body(); + repeat_each(1); run_tests(); @@ -506,3 +516,709 @@ using proxy: http://foo:bar\@127.0.0.1:$Test::Nginx::Util::PROXY_SSL_PORT, EOF --- no_error_log eval [qr/\[error\]/, qr/\got header line: Proxy-Authorization: Basic Zm9vOmJhcg==/] + + + +=== TEST 8: API backend connection uses http proxy with chunked request +--- configuration +{ + "services": [ + { + "id": 42, + "backend_version": 1, + "backend_authentication_type": "service_token", + "backend_authentication_value": "token-value", + "proxy": { + "api_backend": "http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT/", + "proxy_rules": [ + { "pattern": "/", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "apicast.policy.apicast" + }, + { + "name": "apicast.policy.camel", + "configuration": { + "http_proxy": "$TEST_NGINX_HTTP_PROXY" + } + } + ] + } + } + ] +} +--- backend + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream + server_name test-upstream.lvh.me; + location / { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('12', content_length) + assert.falsy(encoding) + } + echo_read_request_body; + echo $request_body; + } +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /?user_key=value +7\r +hello, \r +5\r +world\r +0\r +\r +" +--- response_body +hello, world +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTP_PROXY +--- no_error_log +[error] + + + +=== TEST 9: API backend using all_proxy with chunked request +--- configuration +{ + "services": [ + { + "id": 42, + "backend_version": 1, + "backend_authentication_type": "service_token", + "backend_authentication_value": "token-value", + "proxy": { + "api_backend": "http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT/", + "proxy_rules": [ + { "pattern": "/", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "apicast.policy.apicast" + }, + { + "name": "apicast.policy.http_proxy", + "configuration": { + "all_proxy": "$TEST_NGINX_HTTP_PROXY" + } + } + ] + } + } + ] +} +--- backend + location /transactions/authrep.xml { + content_by_lua_block { + local expected = "service_token=token-value&service_id=42&usage%5Bhits%5D=2&user_key=value" + require('luassert').same(ngx.decode_args(expected), ngx.req.get_uri_args(0)) + } + } +--- upstream + server_name test-upstream.lvh.me; + location / { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('12', content_length) + assert.falsy(encoding) + } + echo_read_request_body; + echo $request_body; + } +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /?user_key=value +7\r +hello, \r +5\r +world\r +0\r +\r +" +--- response_body +hello, world +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTP_PROXY +--- no_error_log +[error] + + + +=== TEST 10: using HTTPS proxy for backend with chunked request +--- init eval +$Test::Nginx::Util::PROXY_SSL_PORT = Test::APIcast::get_random_port(); +$Test::Nginx::Util::ENDPOINT_SSL_PORT = Test::APIcast::get_random_port(); +--- configuration random_port env eval +</tmp/out.txt' or die $!; +print $out $s; +close $out; +$s +--- response_body eval +$ENV{"LARGE_BODY"} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTP_PROXY +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file + + + +=== TEST 12: all_proxy with request_unbuffered policy +--- configuration +{ + "services": [ + { + "backend_version": 1, + "backend_authentication_type": "service_token", + "backend_authentication_value": "token-value", + "proxy": { + "api_backend": "http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT/", + "proxy_rules": [ + { "pattern": "/", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "request_unbuffered" + }, + { + "name": "apicast.policy.apicast" + }, + { + "name": "apicast.policy.http_proxy", + "configuration": { + "all_proxy": "$TEST_NGINX_HTTP_PROXY" + } + } + ] + } + } + ] +} +--- backend +server_name test_backend.lvh.me; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream +server_name test-upstream.lvh.me; + location /test { + echo_read_request_body; + echo_request_body; + } +--- request eval +"POST /test?user_key= \n" . $ENV{LARGE_BODY} +--- response_body eval chomp +$ENV{LARGE_BODY} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTP_PROXY +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file + + + +=== TEST 13: all_proxy with request_unbuffered policy + chunked request +--- configuration +{ + "services": [ + { + "id": 42, + "backend_version": 1, + "backend_authentication_type": "service_token", + "backend_authentication_value": "token-value", + "proxy": { + "api_backend": "http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT/", + "proxy_rules": [ + { "pattern": "/", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "request_unbuffered" + }, + { + "name": "apicast.policy.apicast" + }, + { + "name": "apicast.policy.http_proxy", + "configuration": { + "all_proxy": "$TEST_NGINX_HTTP_PROXY" + } + } + ] + } + } + ] +} +--- backend + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream +server_name test-upstream.lvh.me; + location / { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('chunked', encoding) + assert.falsy(content_length) + } + echo_read_request_body; + echo_request_body; + } +--- more_headers +Transfer-Encoding: chunked +--- request eval +my $s = "POST /test?user_key=value +". +sprintf("%x\r\n", length $ENV{"LARGE_BODY"}). +$ENV{LARGE_BODY} +."\r +0\r +\r +"; +open my $out, '>/tmp/out.txt' or die $!; +print $out $s; +close $out; +$s +--- response_body eval +$ENV{"LARGE_BODY"} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTP_PROXY +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file + + + +=== TEST 14: https_proxy with request_unbuffered policy +--- configuration random_port env +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT", + "proxy_rules": [ + { "pattern": "/", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "request_unbuffered" + }, + { + "name": "apicast.policy.apicast" + }, + { + "name": "apicast.policy.http_proxy", + "configuration": { + "https_proxy": "$TEST_NGINX_HTTPS_PROXY" + } + } + ] + } + } + ] +} +--- backend env + server_name test-backend.lvh.me; + listen $TEST_NGINX_RANDOM_PORT ssl; + ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; + ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; +location /test { + echo_read_request_body; + echo_request_body; +} +--- request eval +"POST /test?user_key= \n" . $ENV{LARGE_BODY} +--- response_body eval chomp +$ENV{LARGE_BODY} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTPS_PROXY +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file +--- user_files fixture=tls.pl eval + + + +=== TEST 15: https_proxy with request_unbuffered policy +--- configuration random_port env +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT", + "proxy_rules": [ + { "pattern": "/", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "request_unbuffered" + }, + { + "name": "apicast.policy.apicast" + }, + { + "name": "apicast.policy.http_proxy", + "configuration": { + "https_proxy": "$TEST_NGINX_HTTPS_PROXY" + } + } + ] + } + } + ] +} +--- backend env + server_name test-backend.lvh.me; + listen $TEST_NGINX_RANDOM_PORT ssl; + ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; + ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; +location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('chunked', encoding) + assert.falsy(content_length) + } + echo_read_request_body; + echo_request_body; +} +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /test?user_key=value +". +sprintf("%x\r\n", length $ENV{"LARGE_BODY"}). +$ENV{LARGE_BODY} +."\r +0\r +\r +" +--- response_body eval +$ENV{LARGE_BODY} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTPS_PROXY +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file +--- user_files fixture=tls.pl eval diff --git a/t/http-proxy.t b/t/http-proxy.t index c19e0b9bc..de623f152 100644 --- a/t/http-proxy.t +++ b/t/http-proxy.t @@ -1320,3 +1320,766 @@ got header line: Proxy-Authorization: Basic Zm9vOmJhcg== --- no_error_log [error] --- user_files fixture=tls.pl eval + + + +=== TEST 25: Upstream API with HTTP POST chunked request, HTTP_PROXY and HTTP api_backend +--- env eval +( + "http_proxy" => $ENV{TEST_NGINX_HTTP_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "http://test_backend.lvh.me:$ENV{TEST_NGINX_SERVER_PORT}" +) +--- configuration +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT", + "proxy_rules": [ + { "pattern": "/test", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ] + } + } + ] +} +--- backend +server_name test_backend.lvh.me; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream +server_name test-upstream.lvh.me; + location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('12', content_length) + assert.falsy(encoding) + } + echo_read_request_body; + echo $request_body; + } +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /test?user_key=value +7\r +hello, \r +5\r +world\r +0\r +\r +" +--- response_body +hello, world +--- error_code: 200 +--- error_log env +proxy request: POST http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT/test?user_key=value HTTP/1.1 +--- no_error_log +[error] + + + +=== TEST 26: Upstream API with HTTPS POST chunked request, HTTPS_PROXY and HTTPS api_backend +--- env random_port eval +( + 'https_proxy' => $ENV{TEST_NGINX_HTTPS_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "https://test-backend.lvh.me:$ENV{TEST_NGINX_RANDOM_PORT}" +) +--- configuration random_port env +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT", + "proxy_rules": [ + { "pattern": "/test", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ] + } + } + ] +} +--- backend env + server_name test-backend.lvh.me; + listen $TEST_NGINX_RANDOM_PORT ssl; + ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; + ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; +location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('12', content_length) + assert.falsy(encoding) + } + echo_read_request_body; + echo $request_body; +} +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /test?user_key=value +7\r +hello, \r +5\r +world\r +0\r +\r +" +--- response_body +hello, world +--- error_code: 200 +--- error_log env +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- user_files fixture=tls.pl eval + + + +=== TEST 27: Upstream Policy connection uses proxy with POST chunked request +--- env random_port eval +("http_proxy" => $ENV{TEST_NGINX_HTTP_PROXY}) +--- configuration +{ + "services": [ + { + "proxy": { + "policy_chain": [ + { "name": "apicast.policy.upstream", + "configuration": + { + "rules": [ { "regex": "/test", "url": "http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT" } ] + } + } + ] + } + } + ] +} +--- upstream +server_name test-upstream.lvh.me; + location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('12', content_length) + assert.falsy(encoding) + } + echo_read_request_body; + echo $request_body; + } +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /test?user_key=value +7\r +hello, \r +5\r +world\r +0\r +\r +" +--- response_body +hello, world +--- error_code: 200 +--- error_log env +proxy request: POST http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT/test?user_key=value HTTP/1.1 +--- no_error_log +[error] + + + +=== TEST 28: Upstream Policy connection uses proxy for https with POST chunked request +--- env eval +("https_proxy" => $ENV{TEST_NGINX_HTTPS_PROXY}) +--- configuration random_port env +{ + "services": [ + { + "proxy": { + "policy_chain": [ + { "name": "apicast.policy.upstream", + "configuration": + { + "rules": [ { "regex": "/test", "url": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT" } ] + } + } + ] + } + } + ] +} +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; +location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('12', content_length) + assert.falsy(encoding) + } + echo_read_request_body; + echo $request_body; +} +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /test?user_key=value +7\r +hello, \r +5\r +world\r +0\r +\r +" +--- response_body +hello, world +--- error_code: 200 +--- error_log env +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- user_files fixture=tls.pl eval + + + +=== TEST 29: Upstream policy with HTTPS POST chunked request, HTTPS_PROXY and HTTPS backend +--- env random_port eval +( + 'https_proxy' => $ENV{TEST_NGINX_HTTPS_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "https://test-backend.lvh.me:$ENV{TEST_NGINX_RANDOM_PORT}" +) +--- configuration random_port env +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT", + "proxy_rules": [ + { "pattern": "/test", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { "name": "apicast.policy.upstream", + "configuration": + { + "rules": [ { "regex": "/test", "url": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT" } ] + } + }, + { + "name": "apicast.policy.apicast" + } + ] + } + } + ] +} +--- backend env + server_name test-backend.lvh.me; + listen $TEST_NGINX_RANDOM_PORT ssl; + ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; + ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; +location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('12', content_length) + assert.falsy(encoding) + } + echo_read_request_body; + echo $request_body; +} +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /test?user_key=value +7\r +hello, \r +5\r +world\r +0\r +\r +" +--- response_body +hello, world +--- error_code: 200 +--- error_log env +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- user_files fixture=tls.pl eval + + + +=== TEST 30: Upstream policy with HTTPS POST chunked request, HTTPS_PROXY and HTTPS backend and lower +case header +--- env random_port eval +( + 'https_proxy' => $ENV{TEST_NGINX_HTTPS_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "https://test-backend.lvh.me:$ENV{TEST_NGINX_RANDOM_PORT}" +) +--- configuration random_port env +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT", + "proxy_rules": [ + { "pattern": "/test", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { "name": "apicast.policy.upstream", + "configuration": + { + "rules": [ { "regex": "/test", "url": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT" } ] + } + }, + { + "name": "apicast.policy.apicast" + } + ] + } + } + ] +} +--- backend env + server_name test-backend.lvh.me; + listen $TEST_NGINX_RANDOM_PORT ssl; + ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; + ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; +location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('12', content_length) + assert.falsy(encoding) + } + echo_read_request_body; + echo $request_body; +} +--- more_headers +transfer-encoding: chunked +--- request eval +"POST /test?user_key=value +7\r +hello, \r +5\r +world\r +0\r +\r +" +--- response_body +hello, world +--- error_code: 200 +--- error_log env +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- user_files fixture=tls.pl eval + + + +=== TEST 31: POST chunked request with random chunk size, HTTPS_PROXY and HTTPS api_backend +this will also test the case when the request body is big and saved to a temp file +--- env random_port eval +( + 'https_proxy' => $ENV{TEST_NGINX_HTTPS_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "https://test-backend.lvh.me:$ENV{TEST_NGINX_RANDOM_PORT}" +) +--- configuration random_port env +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT", + "proxy_rules": [ + { "pattern": "/", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ] + } + } + ] +} +--- backend env + server_name test-backend.lvh.me; + listen $TEST_NGINX_RANDOM_PORT ssl; + ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; + ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; + +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + +location / { + echo_read_request_body; + echo_request_body; +} + +--- more_headers +Transfer-Encoding: chunked +--- request eval +$::data = ''; +my $count = (int rand 32766) + 1; +for (my $i = 0; $i < $count; $i++) { + my $c = chr int rand 128; + $::data .= $c; +} +my $s = "POST /test?user_key=value +". +sprintf("%x\r\n", length $::data). +$::data +."\r +0\r +\r +"; +open my $out, '>/tmp/out.txt' or die $!; +print $out $s; +close $out; +$s +--- response_body eval +$::data +--- error_log env +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- user_files fixture=tls.pl eval + + + +=== TEST 32: HTTP_PROXY with request_unbuffered policy, only upstream server will buffer the +request +--- env eval +( + "http_proxy" => $ENV{TEST_NGINX_HTTP_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "http://test_backend.lvh.me:$ENV{TEST_NGINX_SERVER_PORT}" +) +--- configuration +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT", + "proxy_rules": [ + { "pattern": "/test", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "request_unbuffered" + }, + { + "name": "apicast", + "version": "builtin", + "configuration": {} + } + ] + } + } + ] +} +--- backend +server_name test_backend.lvh.me; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream +server_name test-upstream.lvh.me; + location /test { + echo_read_request_body; + echo_request_body; + } +--- request eval +"POST /test?user_key= \n" . $ENV{LARGE_BODY} +--- response_body eval chomp +$ENV{LARGE_BODY} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTP_PROXY +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file + + + +=== TEST 33: HTTP_PROXY with request_unbuffered policy and chunked body, only upstream server will buffer the +request +--- env eval +( + "http_proxy" => $ENV{TEST_NGINX_HTTP_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "http://test_backend.lvh.me:$ENV{TEST_NGINX_SERVER_PORT}" +) +--- configuration +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "http://test-upstream.lvh.me:$TEST_NGINX_SERVER_PORT", + "proxy_rules": [ + { "pattern": "/test", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "request_unbuffered" + }, + { + "name": "apicast", + "version": "builtin", + "configuration": {} + } + ] + } + } + ] +} +--- backend +server_name test_backend.lvh.me; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream +server_name test-upstream.lvh.me; + location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('chunked', encoding) + assert.falsy(content_length) + } + echo_read_request_body; + echo_request_body; + } +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /test?user_key=value +". +sprintf("%x\r\n", length $ENV{"LARGE_BODY"}). +$ENV{LARGE_BODY} +."\r +0\r +\r +" +--- response_body eval +$ENV{LARGE_BODY} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTP_PROXY +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file + + + +=== TEST 34: HTTPS_PROXY with request_unbuffered policy, only the upstream server will buffer +the request +--- env random_port eval +( + 'https_proxy' => $ENV{TEST_NGINX_HTTPS_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "https://test-backend.lvh.me:$ENV{TEST_NGINX_RANDOM_PORT}" +) +--- configuration random_port env +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT", + "proxy_rules": [ + { "pattern": "/test", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "request_unbuffered" + }, + { + "name": "apicast", + "version": "builtin", + "configuration": {} + } + ] + } + } + ] +} +--- backend env + server_name test-backend.lvh.me; + listen $TEST_NGINX_RANDOM_PORT ssl; + ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; + ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; +location /test { + echo_read_request_body; + echo_request_body; +} +--- request eval +"POST /test?user_key= \n" . $ENV{LARGE_BODY} +--- response_body eval chomp +$ENV{LARGE_BODY} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTPS_PROXY +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file +--- user_files fixture=tls.pl eval + + + +=== TEST 35: HTTPS_PROXY with request_unbuffered policy, only the upstream server will buffer +the request +--- env random_port eval +( + 'https_proxy' => $ENV{TEST_NGINX_HTTPS_PROXY}, + 'BACKEND_ENDPOINT_OVERRIDE' => "https://test-backend.lvh.me:$ENV{TEST_NGINX_RANDOM_PORT}" +) +--- configuration random_port env +{ + "services": [ + { + "backend_version": 1, + "proxy": { + "api_backend": "https://test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT", + "proxy_rules": [ + { "pattern": "/test", "http_method": "POST", "metric_system_name": "hits", "delta": 2 } + ], + "policy_chain": [ + { + "name": "request_unbuffered" + }, + { + "name": "apicast", + "version": "builtin", + "configuration": {} + } + ] + } + } + ] +} +--- backend env + server_name test-backend.lvh.me; + listen $TEST_NGINX_RANDOM_PORT ssl; + ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; + ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; + location /transactions/authrep.xml { + content_by_lua_block { + ngx.exit(ngx.OK) + } + } +--- upstream env +server_name test-upstream.lvh.me; +listen $TEST_NGINX_RANDOM_PORT ssl; +ssl_certificate $TEST_NGINX_SERVER_ROOT/html/server.crt; +ssl_certificate_key $TEST_NGINX_SERVER_ROOT/html/server.key; +location /test { + access_by_lua_block { + assert = require('luassert') + local content_length = ngx.req.get_headers()["Content-Length"] + local encoding = ngx.req.get_headers()["Transfer-Encoding"] + assert.equal('chunked', encoding) + assert.falsy(content_length) + } + echo_read_request_body; + echo_request_body; +} +--- more_headers +Transfer-Encoding: chunked +--- request eval +"POST /test?user_key=value +". +sprintf("%x\r\n", length $ENV{"LARGE_BODY"}). +$ENV{LARGE_BODY} +."\r +0\r +\r +" +--- response_body eval +$ENV{LARGE_BODY} +--- error_code: 200 +--- error_log env +using proxy: $TEST_NGINX_HTTPS_PROXY +proxy request: CONNECT test-upstream.lvh.me:$TEST_NGINX_RANDOM_PORT HTTP/1.1 +--- no_error_log +[error] +--- grep_error_log eval +qr/a client request body is buffered to a temporary file/ +--- grep_error_log_out +a client request body is buffered to a temporary file +--- user_files fixture=tls.pl eval