I’m back on my PC so I will make a better question.
This is my openAI module :
defmodule VetupAI.OpenAI do
@moduledoc """
Handles OpenAI API request and response.
"""
@endpoint "api.openai.com"
@doc"""
Returns full response from OpenAI stream given the input and options.
Takes the following options:
- model
- max_tokens
- temperature
- consume_func, function that processes streamed response
"""
@spec generate_completion(String.t(), list()) :: {:ok, list()} | {:error, atom()}
def generate_completion(_input, _pid, opts \\ [])
def generate_completion("", _pid, _opts), do: {:error, :missing_input}
def generate_completion(input, pid, _opts) do
body = %{
model: "gpt-3.5-turbo",
messages: [%{role: "user", content: input }],
max_tokens: 3000,
stream: true,
temperature: 0.2
} |> Jason.encode_to_iodata!()
url = url("/v1/chat/completions")
acc = {nil, [], []}
consume_func = fn
{:status, value}, {_, headers, body} -> {value, headers, body}
{:headers, value}, {status, headers, body} -> {status, headers ++ value, body}
{:data, value}, {status, headers, body} ->
if value == "data: [DONE]\n\n" do
{status, headers, body}
else
value =
value
|> String.replace_prefix("data: ", "")
|> String.replace_suffix("\n\n", "")
|> String.split("\n\ndata: ")
|> Enum.reduce("", fn data, acc ->
if is_nil(data) || data == "[DONE]" do
acc
else
value = Jason.decode!(data)
value = List.first(value["choices"], %{}) |> Map.get("delta", "")|> Map.get("content", "")
new_response = acc <> value
IO.inspect(new_response)
send(pid, {:new_response, new_response})
new_response
end
end)
{status, headers, [value | body]}
end
end
Finch.build(:post, url, headers(), body)
|> Finch.stream(VetupAI.Finch, acc, consume_func)
#|> Finch.stream(VetupAI.Finch, acc, consume_func, [{:receive_timeout, :infinity}])
|> handle_stream(:generate_completion)
end
And in my liveview :
def mount(_params, _session, socket) do
{:ok, socket
|> assign(:response, "")
|> assign(:loading, false)
|> assign(:prompt, "")
}
end
@impl true
def handle_params(%{"id" => id}, _, socket) do
{:noreply,
socket
|> assign(:page_title, page_title(socket.assigns.live_action))
|> assign(:case, Cases.get_case!(id))}
end
@impl true
@spec handle_event(<<_::56>>, map, map) :: {:noreply, map}
def handle_event("analyse", params, socket) do
part_number = case Map.has_key?(params, "part_number") do
true -> params["part_number"]
false -> ""
end
Process.send(self(), {:generate_completion, part_number}, [])
{:noreply,
socket
|> assign(:loading, true)
}
end
@impl true
@spec handle_info({:new_response, binary}, %{
:assigns => atom | %{:response => binary, optional(any) => any},
optional(any) => any
}) :: {:noreply, map}
def handle_info({:generate_completion, part_number}, socket) do
pid = self()
{case_prompt, prompt} = VetupAI.Prompt.generatePrompt(socket, part_number)
response = VetupAI.OpenAI.generate_completion("Count from 1 to 3", pid)
IO.puts("final answer")
IO.inspect(response)
{:noreply,
socket
|> assign(:loading, false)
}
end
@impl true
def handle_info({:new_response, value}, socket) do
IO.puts("new response in liveview")
IO.inspect(value)
{:noreply,
socket
|> assign(:response, socket.assigns.response <> value)
}
end
And I get :
""
"1"
","
" "
"2"
","
" "
"3"
"."
""
final answer
{:ok, ["1", ",", " ", "2", ",", " ", "3", ".", ""]}
new response in liveview
""
new response in liveview
"1"
new response in liveview
","
new response in liveview
" "
new response in liveview
"2"
new response in liveview
","
new response in liveview
" "
new response in liveview
"3"
new response in liveview
"."
new response in liveview
""
So I guess when I say :
Process.send(self(), {:generate_completion, part_number}, [])
then all the processes sent to this liveview are “stored” until this process is done (which is when it receives the final response and send the {noreply} response).
Is that what is happening? How could I modify the code to work?