Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 44 additions & 2 deletions lib/tesla/adapter/hackney.ex
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,10 @@ if Code.ensure_loaded?(:hackney) do
end

defp format_body(data) when is_list(data), do: IO.iodata_to_binary(data)
defp format_body(data) when is_binary(data) or is_reference(data), do: data

defp format_body(data)
when is_binary(data) or is_reference(data) or is_function(data),
do: data

defp request(env, opts) do
request(
Expand All @@ -68,7 +71,12 @@ if Code.ensure_loaded?(:hackney) do
end

defp request(method, url, headers, body, opts) do
handle(:hackney.request(method, url, headers, body || '', opts))
response = :hackney.request(method, url, headers, body || '', opts)

case Keyword.get(opts, :stream_to_pid) do
pid when is_pid(pid) -> handle_stream(response, pid)
_ -> handle(response)
end
end

defp request_stream(method, url, headers, body, opts) do
Expand Down Expand Up @@ -106,6 +114,40 @@ if Code.ensure_loaded?(:hackney) do

defp handle({:ok, status, headers, body}), do: {:ok, status, headers, body}

defp handle_stream({:ok, status, headers, ref}, pid) when is_reference(ref) do
:hackney.controlling_process(ref, pid)

body =
Stream.resource(
fn -> nil end,
fn _ ->
case :hackney.stream_body(ref) do
:done ->
{:halt, nil}

{:ok, data} ->
{[data], nil}

{:error, reason} ->
raise inspect(reason)
end
end,
fn _ -> :hackney.close(ref) end
)

{:ok, status, headers, body}
end

defp handle_stream(response, pid) do
case handle(response) do
{:ok, _status, _headers, ref} = response when is_reference(ref) ->
handle_stream(response, pid)

response ->
response
end
end

defp handle_async_response({ref, %{headers: headers, status: status}})
when not (is_nil(headers) or is_nil(status)) do
{:ok, status, headers, ref}
Expand Down
13 changes: 13 additions & 0 deletions test/tesla/adapter/hackney_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,17 @@ defmodule Tesla.Adapter.HackneyTest do

assert {:error, :fake_error} = call(request)
end

test "get with `stream_to_pid: pid` option" do
request = %Env{
method: :get,
url: "#{@http}/ip"
}

assert {:ok, %Env{} = response} = call(request, stream_to_pid: self())
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When passing pid explicitly, I'd expect to handle incoming message manually.
Is there anything stopping us from using steam_response: true instead?

Does the implementation handle multiple concurrent requests originating from the same process?

Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hackney requires you to transfer ownership of the response to the desired PID, otherwise it will get GCed.

Workaround will be to capture self() during the request phase, and assume that will be the PID that will consume the steam.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And what happens when the stream is consumed from a different process?

Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

From what I recall, Hackney shuts down the request after a short amount of time.

So for long running responses you get a partial reply.


assert response.status == 200
assert is_function(response.body)
assert is_bitstring(Enum.join(response.body))
end
end