diff --git a/susconecta/app/(main-routes)/pacientes/layout.tsx b/susconecta/app/(main-routes)/pacientes/layout.tsx new file mode 100644 index 0000000..4e9bd5c --- /dev/null +++ b/susconecta/app/(main-routes)/pacientes/layout.tsx @@ -0,0 +1,11 @@ +import type { ReactNode } from "react"; +import { ChatWidget } from "@/components/features/pacientes/chat-widget"; + +export default function PacientesLayout({ children }: { children: ReactNode }) { + return ( + <> + {children} + + + ); +} diff --git a/susconecta/app/paciente/layout.tsx b/susconecta/app/paciente/layout.tsx new file mode 100644 index 0000000..1ff2978 --- /dev/null +++ b/susconecta/app/paciente/layout.tsx @@ -0,0 +1,11 @@ +import type { ReactNode } from "react"; +import { ChatWidget } from "@/components/features/pacientes/chat-widget"; + +export default function PacienteLayout({ children }: { children: ReactNode }) { + return ( + <> + {children} + + + ); +} diff --git a/susconecta/components/ZoeIA/ai-assistant-interface.tsx b/susconecta/components/ZoeIA/ai-assistant-interface.tsx new file mode 100644 index 0000000..9a064ed --- /dev/null +++ b/susconecta/components/ZoeIA/ai-assistant-interface.tsx @@ -0,0 +1,685 @@ +"use client"; + +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { motion } from "framer-motion"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { SimpleThemeToggle } from "@/components/ui/simple-theme-toggle"; +import { Clock, Info, Lock, MessageCircle, Plus, Upload } from "lucide-react"; + +const API_ENDPOINT = "https://n8n.jonasbomfim.store/webhook/cd7d10e6-bcfc-4f3a-b649-351d12b714f1"; +const FALLBACK_RESPONSE = "Tive um problema para responder agora. Tente novamente em alguns instantes."; + +export interface ChatMessage { + id: string; + sender: "user" | "assistant"; + content: string; + createdAt: string; +} + +export interface ChatSession { + id: string; + startedAt: string; + updatedAt: string; + topic: string; + messages: ChatMessage[]; +} + +interface AIAssistantInterfaceProps { + onOpenDocuments?: () => void; + onOpenChat?: () => void; + history?: ChatSession[]; + onAddHistory?: (session: ChatSession) => void; + onClearHistory?: () => void; +} + +export function AIAssistantInterface({ + onOpenDocuments, + onOpenChat, + history: externalHistory, + onAddHistory, + onClearHistory, +}: AIAssistantInterfaceProps) { + const [question, setQuestion] = useState(""); + const [internalHistory, setInternalHistory] = useState(externalHistory ?? []); + const [activeSessionId, setActiveSessionId] = useState(null); + const [manualSelection, setManualSelection] = useState(false); + const [historyPanelOpen, setHistoryPanelOpen] = useState(false); + const messageListRef = useRef(null); + const history = internalHistory; + const historyRef = useRef(history); + const baseGreeting = "Olá, eu sou Zoe. Como posso ajudar hoje?"; + const greetingWords = useMemo(() => baseGreeting.split(" "), [baseGreeting]); + const [typedGreeting, setTypedGreeting] = useState(""); + const [typedIndex, setTypedIndex] = useState(0); + const [isTypingGreeting, setIsTypingGreeting] = useState(true); + + const [gradientGreeting, plainGreeting] = useMemo(() => { + if (!typedGreeting) return ["", ""] as const; + const separatorIndex = typedGreeting.indexOf("Como"); + if (separatorIndex === -1) { + return [typedGreeting, ""] as const; + } + const gradientPart = typedGreeting.slice(0, separatorIndex).trimEnd(); + const plainPart = typedGreeting.slice(separatorIndex).trimStart(); + return [gradientPart, plainPart] as const; + }, [typedGreeting]); + + useEffect(() => { + if (externalHistory) { + setInternalHistory(externalHistory); + } + }, [externalHistory]); + + useEffect(() => { + historyRef.current = history; + }, [history]); + + const activeSession = useMemo( + () => history.find((session) => session.id === activeSessionId) ?? null, + [history, activeSessionId] + ); + + const activeMessages = activeSession?.messages ?? []; + + const formatDateTime = useCallback( + (value: string) => + new Date(value).toLocaleString("pt-BR", { + day: "2-digit", + month: "2-digit", + hour: "2-digit", + minute: "2-digit", + }), + [] + ); + + const formatTime = useCallback( + (value: string) => + new Date(value).toLocaleTimeString("pt-BR", { + hour: "2-digit", + minute: "2-digit", + }), + [] + ); + + useEffect(() => { + if (history.length === 0) { + setActiveSessionId(null); + setManualSelection(false); + return; + } + + if (!activeSessionId && !manualSelection) { + setActiveSessionId(history[history.length - 1].id); + return; + } + + const exists = history.some((session) => session.id === activeSessionId); + if (!exists && !manualSelection) { + setActiveSessionId(history[history.length - 1].id); + } + }, [history, activeSessionId, manualSelection]); + + useEffect(() => { + if (!messageListRef.current) return; + messageListRef.current.scrollTo({ + top: messageListRef.current.scrollHeight, + behavior: "smooth", + }); + }, [activeMessages.length]); + + useEffect(() => { + setTypedGreeting(""); + setTypedIndex(0); + setIsTypingGreeting(true); + }, []); + + useEffect(() => { + if (!isTypingGreeting) return; + if (typedIndex >= greetingWords.length) { + setIsTypingGreeting(false); + return; + } + + const timeout = window.setTimeout(() => { + setTypedGreeting((previous) => + previous + ? `${previous} ${greetingWords[typedIndex]}` + : greetingWords[typedIndex] + ); + setTypedIndex((previous) => previous + 1); + }, 260); + + return () => window.clearTimeout(timeout); + }, [greetingWords, isTypingGreeting, typedIndex]); + + const handleDocuments = () => { + if (onOpenDocuments) { + onOpenDocuments(); + return; + } + console.log("[ZoeIA] Abrir fluxo de documentos"); + }; + + const handleOpenRealtimeChat = () => { + if (onOpenChat) { + onOpenChat(); + return; + } + console.log("[ZoeIA] Abrir chat em tempo real"); + }; + + const buildSessionTopic = useCallback((content: string) => { + const normalized = content.trim(); + if (!normalized) return "Atendimento"; + return normalized.length > 60 ? `${normalized.slice(0, 57)}…` : normalized; + }, []); + + const upsertSession = useCallback( + (session: ChatSession) => { + if (onAddHistory) { + onAddHistory(session); + } else { + setInternalHistory((previous) => { + const index = previous.findIndex((item) => item.id === session.id); + if (index >= 0) { + const updated = [...previous]; + updated[index] = session; + return updated; + } + return [...previous, session]; + }); + } + setActiveSessionId(session.id); + setManualSelection(false); + }, + [onAddHistory] + ); + + const sendMessageToAssistant = useCallback( + async (prompt: string, baseSession: ChatSession) => { + const sessionId = baseSession.id; + + const appendAssistantMessage = (content: string) => { + const createdAt = new Date().toISOString(); + const latestSession = + historyRef.current.find((session) => session.id === sessionId) ?? baseSession; + const assistantMessage: ChatMessage = { + id: `msg-assistant-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`, + sender: "assistant", + content, + createdAt, + }; + + const updatedSession: ChatSession = { + ...latestSession, + updatedAt: assistantMessage.createdAt, + messages: [...latestSession.messages, assistantMessage], + }; + + upsertSession(updatedSession); + }; + + try { + const response = await fetch(API_ENDPOINT, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ message: prompt }), + }); + + if (!response.ok) { + throw new Error(`HTTP ${response.status}`); + } + + const rawPayload = await response.text(); + let replyText = ""; + + if (rawPayload.trim().length > 0) { + try { + const parsed = JSON.parse(rawPayload) as { reply?: unknown }; + replyText = typeof parsed.reply === "string" ? parsed.reply.trim() : ""; + } catch (parseError) { + console.error("[ZoeIA] Resposta JSON inválida", parseError, rawPayload); + } + } + + appendAssistantMessage(replyText || FALLBACK_RESPONSE); + } catch (error) { + console.error("[ZoeIA] Falha ao obter resposta da API", error); + appendAssistantMessage(FALLBACK_RESPONSE); + } + }, + [upsertSession] + ); + + const handleSendMessage = () => { + const trimmed = question.trim(); + if (!trimmed) return; + + const now = new Date(); + const userMessage: ChatMessage = { + id: `msg-user-${now.getTime()}`, + sender: "user", + content: trimmed, + createdAt: now.toISOString(), + }; + + const existingSession = history.find((session) => session.id === activeSessionId) ?? null; + + const sessionToPersist: ChatSession = existingSession + ? { + ...existingSession, + updatedAt: userMessage.createdAt, + topic: + existingSession.messages.length === 0 + ? buildSessionTopic(trimmed) + : existingSession.topic, + messages: [...existingSession.messages, userMessage], + } + : { + id: `session-${now.getTime()}`, + startedAt: now.toISOString(), + updatedAt: userMessage.createdAt, + topic: buildSessionTopic(trimmed), + messages: [userMessage], + }; + + upsertSession(sessionToPersist); + console.log("[ZoeIA] Mensagem registrada na Zoe", trimmed); + setQuestion(""); + setHistoryPanelOpen(false); + + void sendMessageToAssistant(trimmed, sessionToPersist); + }; + + const RealtimeTriggerButton = () => ( + + ); + + const handleClearHistory = () => { + if (onClearHistory) { + onClearHistory(); + } else { + setInternalHistory([]); + } + setActiveSessionId(null); + setManualSelection(false); + setQuestion(""); + setHistoryPanelOpen(false); + }; + + const handleSelectSession = useCallback((sessionId: string) => { + setManualSelection(true); + setActiveSessionId(sessionId); + setHistoryPanelOpen(false); + }, []); + + const startNewConversation = useCallback(() => { + setManualSelection(true); + setActiveSessionId(null); + setQuestion(""); + setHistoryPanelOpen(false); + }, []); + + return ( +
+
+ +
+
+
+ + Zoe + +
+

+ Assistente Clínica Zoe +

+ + {gradientGreeting && ( + + {gradientGreeting} + {plainGreeting ? " " : ""} + + )} + {plainGreeting && {plainGreeting}} + + +
+
+
+ {history.length > 0 && ( + + )} + {history.length > 0 && ( + + )} + + +
+
+ + Organizamos exames, orientações e tarefas assistenciais em um painel único para acelerar decisões clínicas. Utilize a Zoe para revisar resultados, registrar percepções e alinhar próximos passos com a equipe de saúde. + +
+
+ + + + Suas informações permanecem criptografadas e seguras com a equipe Zoe. + + + + +
+ + Informativo importante +
+

+ A Zoe acompanha toda a jornada clínica, consolida exames e registra orientações para que você tenha clareza em cada etapa do cuidado. + As respostas são informativas e complementam a avaliação de um profissional de saúde qualificado. +

+

+ Em situações de urgência, entre em contato com a equipe médica presencial ou acione os serviços de emergência da sua região. +

+
+ +
+ + +
+ +
+

+ Estamos reunindo o histórico da sua jornada. Enquanto isso, você pode anexar exames, enviar dúvidas ou solicitar contato com a equipe Zoe. +

+
+
+ + +
+
+

+ {activeSession ? "Atendimento em andamento" : "Inicie uma conversa"} +

+

+ {activeSession?.topic ?? "O primeiro contato orienta nossas recomendações clínicas"} +

+
+ {activeSession && ( + + Atualizado às {formatTime(activeSession.updatedAt)} + + )} +
+ +
+ {activeMessages.length > 0 ? ( + activeMessages.map((message) => ( +
+
+

{message.content}

+ + {formatTime(message.createdAt)} + +
+
+ )) + ) : ( +
+

Envie sua primeira mensagem

+

+ Compartilhe uma dúvida, exame ou orientação que deseja revisar. A Zoe registra o pedido e te retorna com um resumo organizado para a equipe de saúde. +

+
+ )} +
+
+ +
+
+ +
+ setQuestion(event.target.value)} + onKeyDown={(event) => { + if (event.key === "Enter") { + event.preventDefault(); + handleSendMessage(); + } + }} + placeholder="Pergunte qualquer coisa para a Zoe" + className="w-full flex-1 border-none bg-transparent text-sm shadow-none focus-visible:ring-0" + /> +
+ + +
+
+ +
+ + {historyPanelOpen && ( + + )} +
+ ); +} diff --git a/susconecta/components/ZoeIA/demo-voice-orb.tsx b/susconecta/components/ZoeIA/demo-voice-orb.tsx new file mode 100644 index 0000000..66087c0 --- /dev/null +++ b/susconecta/components/ZoeIA/demo-voice-orb.tsx @@ -0,0 +1,107 @@ +"use client"; + +import { useEffect, useState } from "react"; +import { VoicePoweredOrb } from "@/components/ZoeIA/voice-powered-orb"; +import { AIAssistantInterface } from "@/components/ZoeIA/ai-assistant-interface"; +import { Button } from "@/components/ui/button"; +import { ArrowLeft, Mic, MicOff } from "lucide-react"; + +export default function VoicePoweredOrbPage() { + const [isRecording, setIsRecording] = useState(false); + const [voiceDetected, setVoiceDetected] = useState(false); + const [assistantOpen, setAssistantOpen] = useState(false); + + const toggleRecording = () => { + setIsRecording(!isRecording); + }; + + useEffect(() => { + if (!assistantOpen) return; + + const original = document.body.style.overflow; + document.body.style.overflow = "hidden"; + + return () => { + document.body.style.overflow = original; + }; + }, [assistantOpen]); + + const openAssistant = () => setAssistantOpen(true); + const closeAssistant = () => setAssistantOpen(false); + + return ( +
+
+ {assistantOpen && ( +
+
+ +
+
+ +
+
+ )} + + {/* Orb */} +
{ + if (event.key === "Enter" || event.key === " ") { + event.preventDefault(); + openAssistant(); + } + }} + > + + {voiceDetected && ( + + Ouvindo… + + )} +
+ + {/* Control Button */} + + + {/* Simple Instructions */} +

+ Click the button to enable voice control. Speak to see the orb respond to your voice with subtle movements. +

+
+
+ ); +} diff --git a/susconecta/components/ZoeIA/demo.tsx b/susconecta/components/ZoeIA/demo.tsx new file mode 100644 index 0000000..be4c889 --- /dev/null +++ b/susconecta/components/ZoeIA/demo.tsx @@ -0,0 +1,10 @@ +import * as React from "react" +import { AIAssistantInterface } from "@/components/ZoeIA/ai-assistant-interface" + +export function Demo() { + return ( +
+ +
+ ) +} diff --git a/susconecta/components/ZoeIA/voice-powered-orb.tsx b/susconecta/components/ZoeIA/voice-powered-orb.tsx new file mode 100644 index 0000000..ca28076 --- /dev/null +++ b/susconecta/components/ZoeIA/voice-powered-orb.tsx @@ -0,0 +1,493 @@ +"use client"; + +import React, { useEffect, useRef, FC } from "react"; +import { Renderer, Program, Mesh, Triangle, Vec3 } from "ogl"; +import { cn } from "@/lib/utils"; + +interface VoicePoweredOrbProps { + className?: string; + hue?: number; + enableVoiceControl?: boolean; + voiceSensitivity?: number; + maxRotationSpeed?: number; + maxHoverIntensity?: number; + onVoiceDetected?: (detected: boolean) => void; +} + +export const VoicePoweredOrb: FC = ({ + className, + hue = 0, + enableVoiceControl = true, + voiceSensitivity = 1.5, + maxRotationSpeed = 1.2, + maxHoverIntensity = 0.8, + onVoiceDetected, +}) => { + const ctnDom = useRef(null); + const audioContextRef = useRef(null); + const analyserRef = useRef(null); + const microphoneRef = useRef(null); + const dataArrayRef = useRef(null); + const animationFrameRef = useRef(); + const mediaStreamRef = useRef(null); + + const vert = /* glsl */ ` + precision highp float; + attribute vec2 position; + attribute vec2 uv; + varying vec2 vUv; + void main() { + vUv = uv; + gl_Position = vec4(position, 0.0, 1.0); + } + `; + + const frag = /* glsl */ ` + precision highp float; + + uniform float iTime; + uniform vec3 iResolution; + uniform float hue; + uniform float hover; + uniform float rot; + uniform float hoverIntensity; + varying vec2 vUv; + + vec3 rgb2yiq(vec3 c) { + float y = dot(c, vec3(0.299, 0.587, 0.114)); + float i = dot(c, vec3(0.596, -0.274, -0.322)); + float q = dot(c, vec3(0.211, -0.523, 0.312)); + return vec3(y, i, q); + } + + vec3 yiq2rgb(vec3 c) { + float r = c.x + 0.956 * c.y + 0.621 * c.z; + float g = c.x - 0.272 * c.y - 0.647 * c.z; + float b = c.x - 1.106 * c.y + 1.703 * c.z; + return vec3(r, g, b); + } + + vec3 adjustHue(vec3 color, float hueDeg) { + float hueRad = hueDeg * 3.14159265 / 180.0; + vec3 yiq = rgb2yiq(color); + float cosA = cos(hueRad); + float sinA = sin(hueRad); + float i = yiq.y * cosA - yiq.z * sinA; + float q = yiq.y * sinA + yiq.z * cosA; + yiq.y = i; + yiq.z = q; + return yiq2rgb(yiq); + } + + vec3 hash33(vec3 p3) { + p3 = fract(p3 * vec3(0.1031, 0.11369, 0.13787)); + p3 += dot(p3, p3.yxz + 19.19); + return -1.0 + 2.0 * fract(vec3( + p3.x + p3.y, + p3.x + p3.z, + p3.y + p3.z + ) * p3.zyx); + } + + float snoise3(vec3 p) { + const float K1 = 0.333333333; + const float K2 = 0.166666667; + vec3 i = floor(p + (p.x + p.y + p.z) * K1); + vec3 d0 = p - (i - (i.x + i.y + i.z) * K2); + vec3 e = step(vec3(0.0), d0 - d0.yzx); + vec3 i1 = e * (1.0 - e.zxy); + vec3 i2 = 1.0 - e.zxy * (1.0 - e); + vec3 d1 = d0 - (i1 - K2); + vec3 d2 = d0 - (i2 - K1); + vec3 d3 = d0 - 0.5; + vec4 h = max(0.6 - vec4( + dot(d0, d0), + dot(d1, d1), + dot(d2, d2), + dot(d3, d3) + ), 0.0); + vec4 n = h * h * h * h * vec4( + dot(d0, hash33(i)), + dot(d1, hash33(i + i1)), + dot(d2, hash33(i + i2)), + dot(d3, hash33(i + 1.0)) + ); + return dot(vec4(31.316), n); + } + + vec4 extractAlpha(vec3 colorIn) { + float a = max(max(colorIn.r, colorIn.g), colorIn.b); + return vec4(colorIn.rgb / (a + 1e-5), a); + } + + const vec3 baseColor1 = vec3(0.611765, 0.262745, 0.996078); + const vec3 baseColor2 = vec3(0.298039, 0.760784, 0.913725); + const vec3 baseColor3 = vec3(0.062745, 0.078431, 0.600000); + const float innerRadius = 0.6; + const float noiseScale = 0.65; + + float light1(float intensity, float attenuation, float dist) { + return intensity / (1.0 + dist * attenuation); + } + + float light2(float intensity, float attenuation, float dist) { + return intensity / (1.0 + dist * dist * attenuation); + } + + vec4 draw(vec2 uv) { + vec3 color1 = adjustHue(baseColor1, hue); + vec3 color2 = adjustHue(baseColor2, hue); + vec3 color3 = adjustHue(baseColor3, hue); + + float ang = atan(uv.y, uv.x); + float len = length(uv); + float invLen = len > 0.0 ? 1.0 / len : 0.0; + + float n0 = snoise3(vec3(uv * noiseScale, iTime * 0.5)) * 0.5 + 0.5; + float r0 = mix(mix(innerRadius, 1.0, 0.4), mix(innerRadius, 1.0, 0.6), n0); + float d0 = distance(uv, (r0 * invLen) * uv); + float v0 = light1(1.0, 10.0, d0); + v0 *= smoothstep(r0 * 1.05, r0, len); + float cl = cos(ang + iTime * 2.0) * 0.5 + 0.5; + + float a = iTime * -1.0; + vec2 pos = vec2(cos(a), sin(a)) * r0; + float d = distance(uv, pos); + float v1 = light2(1.5, 5.0, d); + v1 *= light1(1.0, 50.0, d0); + + float v2 = smoothstep(1.0, mix(innerRadius, 1.0, n0 * 0.5), len); + float v3 = smoothstep(innerRadius, mix(innerRadius, 1.0, 0.5), len); + + vec3 col = mix(color1, color2, cl); + col = mix(color3, col, v0); + col = (col + v1) * v2 * v3; + col = clamp(col, 0.0, 1.0); + + return extractAlpha(col); + } + + vec4 mainImage(vec2 fragCoord) { + vec2 center = iResolution.xy * 0.5; + float size = min(iResolution.x, iResolution.y); + vec2 uv = (fragCoord - center) / size * 2.0; + + float angle = rot; + float s = sin(angle); + float c = cos(angle); + uv = vec2(c * uv.x - s * uv.y, s * uv.x + c * uv.y); + + uv.x += hover * hoverIntensity * 0.1 * sin(uv.y * 10.0 + iTime); + uv.y += hover * hoverIntensity * 0.1 * sin(uv.x * 10.0 + iTime); + + return draw(uv); + } + + void main() { + vec2 fragCoord = vUv * iResolution.xy; + vec4 col = mainImage(fragCoord); + gl_FragColor = vec4(col.rgb * col.a, col.a); + } + `; + + // Voice analysis function + const analyzeAudio = () => { + if (!analyserRef.current || !dataArrayRef.current) return 0; + + // To avoid type incompatibilities between different ArrayBuffer-like types + // (Uint8Array vs Uint8Array), create a + // standard Uint8Array copy with an ArrayBuffer backing it. This satisfies + // the Web Audio API typing and is safe (small cost to copy). + const src = dataArrayRef.current as Uint8Array; + const buffer = Uint8Array.from(src); + analyserRef.current.getByteFrequencyData(buffer); + + // Calculate RMS (Root Mean Square) for better voice detection + let sum = 0; + for (let i = 0; i < buffer.length; i++) { + const value = buffer[i] / 255; + sum += value * value; + } + const rms = Math.sqrt(sum / buffer.length); + + // Apply sensitivity and boost the signal + const level = Math.min(rms * voiceSensitivity * 3.0, 1); + + return level; + }; + + // Stop microphone and cleanup + const stopMicrophone = () => { + try { + // Stop all tracks in the media stream + if (mediaStreamRef.current) { + mediaStreamRef.current.getTracks().forEach(track => { + track.stop(); + }); + mediaStreamRef.current = null; + } + + // Disconnect and cleanup audio nodes + if (microphoneRef.current) { + microphoneRef.current.disconnect(); + microphoneRef.current = null; + } + + if (analyserRef.current) { + analyserRef.current.disconnect(); + analyserRef.current = null; + } + + // Close audio context + if (audioContextRef.current && audioContextRef.current.state !== 'closed') { + audioContextRef.current.close(); + audioContextRef.current = null; + } + + dataArrayRef.current = null; + console.log('Microphone stopped and cleaned up'); + } catch (error) { + console.warn('Error stopping microphone:', error); + } + }; + + // Initialize microphone access + const initMicrophone = async () => { + try { + // Clean up any existing microphone first + stopMicrophone(); + + const stream = await navigator.mediaDevices.getUserMedia({ + audio: { + echoCancellation: false, + noiseSuppression: false, + autoGainControl: false, + sampleRate: 44100, + }, + }); + + mediaStreamRef.current = stream; + + audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)(); + + if (audioContextRef.current.state === 'suspended') { + await audioContextRef.current.resume(); + } + + analyserRef.current = audioContextRef.current.createAnalyser(); + microphoneRef.current = audioContextRef.current.createMediaStreamSource(stream); + + analyserRef.current.fftSize = 512; + analyserRef.current.smoothingTimeConstant = 0.3; + analyserRef.current.minDecibels = -90; + analyserRef.current.maxDecibels = -10; + + microphoneRef.current.connect(analyserRef.current); + dataArrayRef.current = new Uint8Array(analyserRef.current.frequencyBinCount); + + console.log('Microphone initialized successfully'); + return true; + } catch (error) { + console.warn("Microphone access denied or not available:", error); + return false; + } + }; + + useEffect(() => { + const container = ctnDom.current; + if (!container) return; + + let rendererInstance: any = null; + let glContext: WebGLRenderingContext | WebGL2RenderingContext | null = null; + let rafId: number; + let program: any = null; + + try { + rendererInstance = new Renderer({ + alpha: true, + premultipliedAlpha: false, + antialias: true, + dpr: window.devicePixelRatio || 1 + }); + glContext = rendererInstance.gl as WebGLRenderingContext; + glContext.clearColor(0, 0, 0, 0); + glContext.enable((glContext as any).BLEND); + glContext.blendFunc((glContext as any).SRC_ALPHA, (glContext as any).ONE_MINUS_SRC_ALPHA); + + while (container.firstChild) { + container.removeChild(container.firstChild); + } + container.appendChild((glContext as any).canvas); + + const geometry = new Triangle(glContext as any); + program = new Program(glContext as any, { + vertex: vert, + fragment: frag, + uniforms: { + iTime: { value: 0 }, + iResolution: { + value: new Vec3( + (glContext as any).canvas.width, + (glContext as any).canvas.height, + (glContext as any).canvas.width / (glContext as any).canvas.height + ), + }, + hue: { value: hue }, + hover: { value: 0 }, + rot: { value: 0 }, + hoverIntensity: { value: 0 }, + }, + }); + + const mesh = new Mesh(glContext as any, { geometry, program }); + + const resize = () => { + if (!container || !rendererInstance || !glContext) return; + const dpr = window.devicePixelRatio || 1; + const width = container.clientWidth; + const height = container.clientHeight; + + if (width === 0 || height === 0) return; + + rendererInstance.setSize(width * dpr, height * dpr); + (glContext as any).canvas.style.width = width + "px"; + (glContext as any).canvas.style.height = height + "px"; + + if (program) { + program.uniforms.iResolution.value.set( + (glContext as any).canvas.width, + (glContext as any).canvas.height, + (glContext as any).canvas.width / (glContext as any).canvas.height + ); + } + }; + window.addEventListener("resize", resize); + resize(); + + let lastTime = 0; + let currentRot = 0; + let voiceLevel = 0; + const baseRotationSpeed = 0.3; + let isMicrophoneInitialized = false; + + if (enableVoiceControl) { + initMicrophone().then((success) => { + isMicrophoneInitialized = success; + }); + } else { + stopMicrophone(); + isMicrophoneInitialized = false; + } + + const update = (t: number) => { + rafId = requestAnimationFrame(update); + if (!program) return; + + const dt = (t - lastTime) * 0.001; + lastTime = t; + program.uniforms.iTime.value = t * 0.001; + program.uniforms.hue.value = hue; + + if (enableVoiceControl && isMicrophoneInitialized) { + voiceLevel = analyzeAudio(); + + if (onVoiceDetected) { + onVoiceDetected(voiceLevel > 0.1); + } + + const voiceRotationSpeed = baseRotationSpeed + (voiceLevel * maxRotationSpeed * 2.0); + + if (voiceLevel > 0.05) { + currentRot += dt * voiceRotationSpeed; + } + + program.uniforms.hover.value = Math.min(voiceLevel * 2.0, 1.0); + program.uniforms.hoverIntensity.value = Math.min(voiceLevel * maxHoverIntensity * 0.8, maxHoverIntensity); + } else { + program.uniforms.hover.value = 0; + program.uniforms.hoverIntensity.value = 0; + if (onVoiceDetected) { + onVoiceDetected(false); + } + } + + program.uniforms.rot.value = currentRot; + + if (rendererInstance && glContext) { + glContext.clear((glContext as any).COLOR_BUFFER_BIT | (glContext as any).DEPTH_BUFFER_BIT); + rendererInstance.render({ scene: mesh }); + } + }; + + rafId = requestAnimationFrame(update); + + return () => { + cancelAnimationFrame(rafId); + window.removeEventListener("resize", resize); + + try { + if (container && glContext && (glContext as any).canvas) { + if (container.contains((glContext as any).canvas)) { + container.removeChild((glContext as any).canvas); + } + } + } catch (error) { + console.warn("Canvas cleanup error:", error); + } + + stopMicrophone(); + + if (glContext) { + (glContext as any).getExtension("WEBGL_lose_context")?.loseContext(); + } + }; + + } catch (error) { + console.error("Error initializing Voice Powered Orb:", error); + if (container && container.firstChild) { + container.removeChild(container.firstChild); + } + return () => { + window.removeEventListener("resize", () => {}); + }; + } + }, [ + hue, + enableVoiceControl, + voiceSensitivity, + maxRotationSpeed, + maxHoverIntensity, + vert, + frag, + ]); + + useEffect(() => { + let isMounted = true; + + const handleMicrophoneState = async () => { + if (enableVoiceControl) { + const success = await initMicrophone(); + if (!isMounted) return; + } else { + stopMicrophone(); + } + }; + + handleMicrophoneState(); + + return () => { + isMounted = false; + }; + }, [enableVoiceControl]); + + return ( +
+
+ ); +}; diff --git a/susconecta/components/features/pacientes/chat-widget.tsx b/susconecta/components/features/pacientes/chat-widget.tsx new file mode 100644 index 0000000..f038ea2 --- /dev/null +++ b/susconecta/components/features/pacientes/chat-widget.tsx @@ -0,0 +1,197 @@ + + +"use client"; + +import { useEffect, useMemo, useState } from "react"; +import { ArrowLeft, Mic, MicOff, Sparkles } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { + AIAssistantInterface, + ChatSession, +} from "@/components/ZoeIA/ai-assistant-interface"; +import { VoicePoweredOrb } from "@/components/ZoeIA/voice-powered-orb"; + +export function ChatWidget() { + const [assistantOpen, setAssistantOpen] = useState(false); + const [realtimeOpen, setRealtimeOpen] = useState(false); + const [isRecording, setIsRecording] = useState(false); + const [voiceDetected, setVoiceDetected] = useState(false); + const [history, setHistory] = useState([]); + + useEffect(() => { + if (!assistantOpen && !realtimeOpen) return; + + const original = document.body.style.overflow; + document.body.style.overflow = "hidden"; + + return () => { + document.body.style.overflow = original; + }; + }, [assistantOpen, realtimeOpen]); + + const gradientRing = useMemo( + () => ( + + ), + [] + ); + + const openAssistant = () => setAssistantOpen(true); + const closeAssistant = () => setAssistantOpen(false); + + const openRealtime = () => setRealtimeOpen(true); + const closeRealtime = () => { + setRealtimeOpen(false); + setAssistantOpen(true); + setIsRecording(false); + setVoiceDetected(false); + }; + + const toggleRecording = () => { + setIsRecording((prev) => { + const next = !prev; + if (!next) { + setVoiceDetected(false); + } + return next; + }); + }; + + const handleOpenDocuments = () => { + console.log("[ChatWidget] Abrindo fluxo de documentos"); + closeAssistant(); + }; + + const handleOpenChat = () => { + console.log("[ChatWidget] Encaminhando para chat em tempo real"); + setAssistantOpen(false); + openRealtime(); + }; + + const handleUpsertHistory = (session: ChatSession) => { + setHistory((previous) => { + const index = previous.findIndex((item) => item.id === session.id); + if (index >= 0) { + const updated = [...previous]; + updated[index] = session; + return updated; + } + return [...previous, session]; + }); + }; + + const handleClearHistory = () => { + setHistory([]); + }; + + return ( + <> + {assistantOpen && ( +
+
+ +
+
+ +
+
+ )} + + {realtimeOpen && ( +
+
+ +
+ +
+
+
+ + {voiceDetected && ( + + Ouvindo… + + )} +
+ +
+ +

+ Ative a captura para falar com a equipe em tempo real. Assim que sua voz for detectada, a Zoe sinaliza visualmente e encaminha o atendimento. +

+
+
+
+
+ )} + +
+ +
+ + ); +} diff --git a/susconecta/package.json b/susconecta/package.json index 65a7442..7a06058 100644 --- a/susconecta/package.json +++ b/susconecta/package.json @@ -58,6 +58,7 @@ "jspdf": "^3.0.3", "lucide-react": "^0.454.0", "next-themes": "latest", + "ogl": "^1.0.11", "react": "^18", "react-day-picker": "latest", "react-dom": "^18", diff --git a/susconecta/pnpm-lock.yaml b/susconecta/pnpm-lock.yaml index 996088a..da9e9cb 100644 --- a/susconecta/pnpm-lock.yaml +++ b/susconecta/pnpm-lock.yaml @@ -152,6 +152,9 @@ importers: next-themes: specifier: latest version: 0.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + ogl: + specifier: ^1.0.11 + version: 1.0.11 react: specifier: ^18 version: 18.3.1 @@ -2809,6 +2812,9 @@ packages: resolution: {integrity: sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==} engines: {node: '>= 0.4'} + ogl@1.0.11: + resolution: {integrity: sha512-kUpC154AFfxi16pmZUK4jk3J+8zxwTWGPo03EoYA8QPbzikHoaC82n6pNTbd+oEaJonaE8aPWBlX7ad9zrqLsA==} + optionator@0.9.4: resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} engines: {node: '>= 0.8.0'} @@ -6089,6 +6095,8 @@ snapshots: define-properties: 1.2.1 es-object-atoms: 1.1.1 + ogl@1.0.11: {} + optionator@0.9.4: dependencies: deep-is: 0.1.4 diff --git a/susconecta/types/ogl.d.ts b/susconecta/types/ogl.d.ts new file mode 100644 index 0000000..e897bbb --- /dev/null +++ b/susconecta/types/ogl.d.ts @@ -0,0 +1 @@ +declare module 'ogl';