I have the following scenario:
- a live view which displays a large dataset
- an API client that receive notification to update the dataset (controller)
I have am trying this approach:
# live view
def mount(assigns, session, socket) do
{:ok, con} = Controller.start("...sesssion_data...")
{:ok,
socket
|> assign(controller: con))
end
@impl true
def handle_event(
"send_data",
%{"data" => data},
%{assigns: %{controller: con}} = socket
) do
con = Controller.push_data(con, data)
{:noreply,
socket
|> assign(controller: con))
end
@impl true
def handle_info(
{:controller, msg},
%{assigns: %{controller: con}} = socket
) do
con = Controller.handle_message(con, msg)
socket
{:noreply,
socket
|> assign(controller: con))
end
defmodule Controller do
use GenServer
alias Controller
defstruct pid: nil, processing: false, super_large_dataset: []
# API to be used by calling (parent) process
def start(id) do
{:ok, pid} = GenServer.start_link(__MODULE__, {self(), id})
%Controller{pid: pid}
end
def push_data(%Controller{pid: pid} = con, data) do
GenServer.cast(pid, {:push_data, data})
%{con | processing: true}
end
def handle_message(con, {:data_processed, data}) do
%{con | processing: false, super_large_dataset: [data | con.super_large_dataset]}
end
def handle_message(con, msg) do
con
end
@impl true
def init({parent_pid, id}) do
{:ok, %{parent_pid: parent_pid, id: id}}
end
@impl true
def handle_cast({:push_data, data}, state) do
Process.sleep(500) # emulate processing
send(state.parent_pid, {:controller, {:data_processed, data}})
{:noreply, state}
end
end
The idea here is to keep the super_large_dataset
in the live view process to avoid copying it when data chunks are processed.
Is this pattern the way to go?