chai / lib /chai_web /live /chat_live.ex
jonatanklosko's picture
Add chat
0fea377
defmodule ChaiWeb.ChatLive do
use ChaiWeb, :live_view
@impl true
def mount(_params, _session, socket) do
{:ok,
socket
|> assign(
messages: [],
message: "",
history: nil,
reply_task: nil,
transcribe_task: nil,
caption_task: nil
)
|> allow_upload(:audio, accept: :any, progress: &handle_progress/3, auto_upload: true)
|> allow_upload(:image,
accept: ~w(.jpg .jpeg .png),
progress: &handle_progress/3,
auto_upload: true
)}
end
@impl true
def render(assigns) do
~H"""
<div class="mt-20 h-[512px] flex flex-col justify-end border border-zinc-100 rounded-lg">
<div id="messages" phx-hook="Messages" class="flex flex-col gap-2 p-3 overflow-y-auto">
<div class="flex flex-col-reverse gap-2">
<div
:for={message <- @messages}
class={["relative max-w-[80%]", if(message.user?, do: "self-end", else: "self-start")]}
>
<.message_content message={message} />
<div
:if={message.transcribed?}
class="flex absolute top-1/2 -left-5 transform -translate-y-1/2 text-zinc-500"
>
<.icon name="hero-microphone-solid" class="w-4 h-4" />
</div>
<div
:if={message.reaction}
class="flex absolute bottom-0 right-0 transform translate-y-1/2 bg-white shadow-md rounded-full p-0.5 leading-none"
>
<%= message.reaction %>
</div>
</div>
</div>
<div :if={@reply_task} class="self-start px-4 py-1.5 rounded-3xl text-zinc-900 bg-zinc-100">
<.typing />
</div>
</div>
<form phx-submit="send_message" class="px-3 py-2 flex gap-2 items-center">
<button
type="button"
id="microphone"
phx-hook="Microphone"
data-endianness={System.endianness()}
class="flex p-2.5 rounded-full text-white bg-zinc-900 hover:bg-zinc-700 active:bg-red-400 group"
>
<.icon name="hero-microphone-solid" class="w-5 h-5 group-active:animate-pulse" />
</button>
<button
type="button"
phx-click={JS.dispatch("click", to: "##{@uploads.image.ref}")}
class="flex p-2.5 rounded-full text-white bg-zinc-900 hover:bg-zinc-700"
>
<.icon name="hero-photo-solid" class="w-5 h-5" />
</button>
<input
type="text"
id="message"
name="message"
value={@message}
autofocus
autocomplete="off"
class="w-full rounded-xl bg-zinc-100 border-none focus:ring-0"
/>
<button type="submit" class="flex text-zinc-900 hover:text-zinc-700 active:text-zinc-500">
<.icon name="hero-paper-airplane-solid" class="w-6 h-6" />
</button>
</form>
<form phx-change="noop" phx-submit="noop" class="hidden">
<.live_file_input upload={@uploads.audio} />
</form>
<form phx-change="noop" phx-submit="noop" class="hidden">
<.live_file_input upload={@uploads.image} />
</form>
</div>
"""
end
defp typing(assigns) do
~H"""
<div class="relative h-6 flex gap-1 items-center">
<div class="h-2 w-2 bg-current rounded-full opacity-0 animate-[loading-fade_1s_infinite] " />
<div class="h-2 w-2 bg-current rounded-full opacity-0 animate-[loading-fade_1s_infinite_0.2s] " />
<div class="h-2 w-2 bg-current rounded-full opacity-0 animate-[loading-fade_1s_infinite_0.4s] " />
</div>
"""
end
defp message_content(assigns) when assigns.message.image != nil do
~H"""
<img src={~p"/uploads/#{@message.image}"} class="max-w-[300px] max-h-[200px] rounded-xl" />
"""
end
defp message_content(assigns) do
~H"""
<div class={[
"px-4 py-1.5 rounded-3xl",
if(@message.user?, do: "text-white bg-blue-500", else: "text-zinc-900 bg-zinc-100")
]}>
<span :for={{text, label} <- Chai.Utils.labelled_chunks(@message.text, @message.entities)}>
<%= if label do %>
<span class="inline-flex items-center gap-0.5">
<span class="font-bold"><%= text %></span>
<.icon :if={icon = label_to_icon(label)} name={icon} class="w-4 h-4" />
</span>
<% else %>
<%= text %>
<% end %>
</span>
</div>
"""
end
defp label_to_icon("LOC"), do: "hero-map-pin-solid"
defp label_to_icon("PER"), do: "hero-user-solid"
defp label_to_icon("ORG"), do: "hero-building-office-solid"
defp label_to_icon("MISC"), do: nil
defp handle_progress(:audio, entry, socket) when entry.done? do
binary =
consume_uploaded_entry(socket, entry, fn %{path: path} ->
{:ok, File.read!(path)}
end)
# We always pre-process audio on the client into a single channel
audio = Nx.from_binary(binary, :f32)
{:noreply, request_transcription(socket, audio)}
end
defp handle_progress(:image, entry, socket) when entry.done? do
filename =
consume_uploaded_entry(socket, entry, fn %{path: path} ->
filename = Path.basename(path)
File.cp!(path, Chai.upload_path(filename))
{:ok, filename}
end)
{:noreply, request_caption(socket, filename)}
end
defp handle_progress(_name, _entry, socket), do: {:noreply, socket}
@impl true
def handle_event("send_message", %{"message" => text}, socket) do
{:noreply,
socket
|> insert_message(text, user?: true)
|> request_reply(text)
|> assign(message: "")}
end
def handle_event("noop", %{}, socket) do
# We need phx-change and phx-submit on the form for live uploads,
# but we process the upload immediately in handle_progress/3
{:noreply, socket}
end
@impl true
def handle_info({ref, {:reply, {text, history}}}, socket)
when socket.assigns.reply_task.ref == ref do
{:noreply,
socket
|> insert_message(text)
|> assign(history: history, reply_task: nil)}
end
def handle_info({ref, {:transcription, text}}, socket)
when socket.assigns.transcribe_task.ref == ref do
{:noreply,
socket
|> insert_message(text, user?: true, transcribed?: true)
|> request_reply(text)
|> assign(transcribe_task: nil)}
end
def handle_info({ref, {:caption, filename, text}}, socket)
when socket.assigns.caption_task.ref == ref do
text = "look, an image of " <> text
{:noreply,
socket
|> insert_message(text, user?: true, image: filename)
|> request_reply(text)
|> assign(caption_task: nil)}
end
def handle_info({_ref, {:reaction, message_id, reaction}}, socket) when reaction != nil do
{:noreply, update_message(socket, message_id, &%{&1 | reaction: reaction})}
end
def handle_info({_ref, {:entities, message_id, entities}}, socket) when entities != [] do
{:noreply, update_message(socket, message_id, &%{&1 | entities: entities})}
end
def handle_info(_message, socket), do: {:noreply, socket}
defp insert_message(socket, text, opts \\ []) do
message = %{
id: System.unique_integer(),
text: text,
user?: Keyword.get(opts, :user?, false),
transcribed?: Keyword.get(opts, :transcribed?, false),
image: Keyword.get(opts, :image),
reaction: nil,
entities: []
}
socket = update(socket, :messages, &[message | &1])
socket =
if message.image do
socket
else
request_entities(socket, message.text, message.id)
end
if message.user? do
request_reaction(socket, message.text, message.id)
else
socket
end
end
defp update_message(socket, message_id, fun) do
update(socket, :messages, fn messages ->
Enum.map(messages, fn
%{id: ^message_id} = message -> fun.(message)
message -> message
end)
end)
end
defp request_reply(socket, text) do
history = socket.assigns.history
task = Task.async(fn -> {:reply, Chai.AI.generate_reply(text, history)} end)
assign(socket, reply_task: task)
end
defp request_transcription(socket, audio) do
task = Task.async(fn -> {:transcription, Chai.AI.transcribe(audio)} end)
assign(socket, transcribe_task: task)
end
defp request_caption(socket, filename) do
task =
Task.async(fn ->
image = filename |> Chai.upload_path() |> StbImage.read_file!()
{:caption, filename, Chai.AI.describe_image(image)}
end)
assign(socket, caption_task: task)
end
defp request_reaction(socket, text, message_id) do
Task.async(fn -> {:reaction, message_id, Chai.AI.get_reaction(text)} end)
socket
end
defp request_entities(socket, text, message_id) do
Task.async(fn -> {:entities, message_id, Chai.AI.get_entities(text)} end)
socket
end
end