Ok, this should reproduce. I had to find another file on that server that was returning 200 for me. This is one of the smaller payloads too, 669kb.
iex> url = "https://www2.census.gov/geo/tiger/TIGER2021/SLDL/tl_2021_50_sldl.zip"
iex> Req.get!(url, raw: true)
%Req.Response{
body: <<80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 178, 181, 47, 83, 80, 60, 129, 14, 5,
0, 0, 0, 5, 0, 0, 0, 19, 0, 28, 0, 116, 108, 95, 50, 48, 50, 49, 95, 53, 48,
95, 115, 108, 100, 108, 46, 99, 112, 103, ...>>,
headers: [
{"x-frame-options", "SAMEORIGIN"},
{"x-content-type-options", "nosniff"},
{"last-modified", "Wed, 22 Sep 2021 19:58:21 GMT"},
{"accept-ranges", "bytes"},
{"content-length", "684760"},
{"vary", "Accept-Encoding"},
{"x-xss-protection", "1; mode=block"},
{"content-security-policy", "frame-ancestors 'self';"},
{"content-type", "application/zip"},
{"strict-transport-security", "max-age=31536000"},
{"cache-control", "private, max-age=172761"},
{"date", "Mon, 04 Jul 2022 14:13:45 GMT"},
{"connection", "keep-alive"},
{"set-cookie",
"TS01d1a586=01283c52a476381da9e86d878350dcabc68f6ca87923d686b7be476d407bb8afb678fe959fb5577f02211bd35ce00f4187f734ef9c; Path=/; Domain=.www2.census.gov"},
{"set-cookie",
"TS01d1a586=01283c52a496d659b2f32b5f9a08a459ae22df2a5b8309f9b9c5c439f8c7cf2649bd5eeb2c0523b73215578f15965efeaef5da6b01; Path=/; Domain=.www2.census.gov"},
{"set-cookie",
"TS01d1a586=01283c52a4dd1a41674ce8f8d9ed1cec027fe5712ad7460a60a0a01725a3de02d85e1b5c5a6d69fd94f46e78fd1e8b6d70bd807d0d; Path=/; Domain=.www2.census.gov"},
{"set-cookie",
"TS01d1a586=01283c52a4163f7ab13932973e1423b85fc08b84e66eaecb3c2867742726ef10b3e43bdf7d3c01899cef1c35c1fd32d0d2bd805fdf; Path=/; Domain=.www2.census.gov"},
{"set-cookie",
"TS01d1a586=01283c52a4cca094407ccd48149f13e2f002146b6d7757be8165acb9fa09c565748091e9f4e7f9e508d30fb999c24467a1d45b404f; Path=/; Domain=.www2.census.gov"},
{"set-cookie",
"TS01d1a586=01283c52a40b87d78adacbc04dcc5b74e13b4a3c6215f12d91334e4a87e63c5eb91be5219a8885a13d90e8469918de621ab68b8794; Path=/; Domain=.www2.census.gov"},
{"set-cookie",
"TS01d1a586=01283c52a4e77b55c32a8473da38addffafd2d9ebc73db6794227db5c8f12082a3b8075be9fe0badf7332575d54be33b9418518d64; Path=/; Domain=.www2.census.gov"},
{"set-cookie",
"TS01d1a586=01283c52a40d65ef2a6cae4d5768f4b9b8da5b19242e22d038f65a10d7fc3c8edb34e3118e0d0736ac325d52c293e57e34789c41b3; Path=/; Domain=.www2.census.gov"}
],
private: %{},
status: 200
}
# 50 is random, I have no idea what the limit might be
iex> Enum.each(1..50, fn _ ->
Req.get!(url, raw: true)
Process.sleep(2000)
end)
:ok
iex> Req.get!(url, raw: true)
%Req.Response{
body: "",
headers: [
{"content-type", "application/zip"},
{"last-modified", "Wed, 22 Sep 2021 19:58:21 GMT"},
{"cache-control", "private, max-age=172333"},
{"date", "Mon, 04 Jul 2022 14:20:53 GMT"},
{"connection", "keep-alive"}
],
private: %{},
status: 304
}
iex(14)> HTTPoison.get!(url)
%HTTPoison.Response{
body: <<80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 178, 181, 47, 83, 80, 60, 129, 14, 5,
0, 0, 0, 5, 0, 0, 0, 19, 0, 28, 0, 116, 108, 95, 50, 48, 50, 49, 95, 53, 48,
95, 115, 108, 100, 108, 46, 99, 112, 103, ...>>,
headers: [
{"X-Frame-Options", "SAMEORIGIN"},
{"X-Content-Type-Options", "nosniff"},
{"Cache-Control", "private"},
{"Last-Modified", "Wed, 22 Sep 2021 19:58:21 GMT"},
{"X-XSS-Protection", "1; mode=block"},
{"Content-Security-Policy", "frame-ancestors 'self';"},
{"Content-Type", "application/zip"},
{"Strict-Transport-Security", "max-age=31536000"},
{"Date", "Mon, 04 Jul 2022 14:21:31 GMT"},
{"Transfer-Encoding", "chunked"},
{"Connection", "keep-alive"},
{"Connection", "Transfer-Encoding"}
],
request: %HTTPoison.Request{
body: "",
headers: [],
method: :get,
options: [],
params: %{},
url: "https://www2.census.gov/geo/tiger/TIGER2021/SLDL/tl_2021_50_sldl.zip"
},
request_url: "https://www2.census.gov/geo/tiger/TIGER2021/SLDL/tl_2021_50_sldl.zip",
status_code: 200
}
I am noticing that in the response headers when using Req (for both statuses 200 and 304) there is this for "cache-control"
:
{"cache-control", "private, max-age=172761"}
While with HTTPoison:
{"Cache-Control", "private"}
max_age
is counting down in seconds on each request.