diff --git a/backend/server/src/conversation/views.py b/backend/server/src/conversation/views.py
index d95a2ec..769bece 100644
--- a/backend/server/src/conversation/views.py
+++ b/backend/server/src/conversation/views.py
@@ -28,7 +28,6 @@ def start():
return jsonify({"reply": greeting}), 200
-
@conversation_blueprint.route("/send_message", methods=["POST"])
def send_message():
practitioner = request.args.get("practitioner")
@@ -38,33 +37,22 @@ def send_message():
)
if "conversation_id" not in session:
return jsonify({"reply": "Please start a conversation first"}), 400
-
conversation_id = session["conversation_id"]
conversation = Conversation(
user_doc_ref=user_doc_ref, conversaton_id=conversation_id
)
- # Store audio in a temp file
- message = request.json.get("message")
-
- # Generate a reply using the Conversation object
- reply = conversation.generate_reply(message)
- return jsonify({"reply": reply}), 200
-
-@conversation_blueprint.route("/transcribe", methods=["POST"])
-def transcribe():
-
# Store audio in a temp file
audio = request.files["audioFile"]
temp_audio_path = os.path.join(tempfile.gettempdir(), "received_audio.wav")
audio.save(temp_audio_path)
-
# Transcribe the audio
message = Conversation.transcribe(str(temp_audio_path))
os.remove(temp_audio_path)
# Generate a reply using the Conversation object
- return jsonify({"user_msg": message}), 200
+ reply = conversation.generate_reply(message)
+ return jsonify({"reply": reply}), 200
@conversation_blueprint.route('/end', methods=['POST'])
def end():
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index 546a0b3..102e91d 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -9,9 +9,10 @@
"version": "0.0.0",
"dependencies": {
"@splinetool/react-spline": "^2.2.6",
- "@splinetool/runtime": "^1.0.18",
+ "@splinetool/runtime": "^1.0.19",
"axios": "^1.6.5",
"firebase": "^10.7.1",
+ "gsap": "^3.12.4",
"lucide-react": "^0.307.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
@@ -1793,9 +1794,9 @@
}
},
"node_modules/@splinetool/runtime": {
- "version": "1.0.18",
- "resolved": "https://registry.npmjs.org/@splinetool/runtime/-/runtime-1.0.18.tgz",
- "integrity": "sha512-ywtyXRTm96C8czB/eezmb4dLNZ5fncOdBAW5PLfaE/EtmkIZX2ofFGU8u8cnYcKHfdCaFO0+WfhXQtcpPMSFBA==",
+ "version": "1.0.19",
+ "resolved": "https://registry.npmjs.org/@splinetool/runtime/-/runtime-1.0.19.tgz",
+ "integrity": "sha512-AMN1AhhaE9FPvEkxrNFmohiYQfQIRcrSxh9Yjg1Jk1Rx1UCvbqxbLEGnxEFH022o+xBT5emhb8p8M0aMDQiOHA==",
"dependencies": {
"on-change": "^4.0.0",
"semver-compare": "^1.0.0"
@@ -3515,6 +3516,11 @@
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
"dev": true
},
+ "node_modules/gsap": {
+ "version": "3.12.4",
+ "resolved": "https://registry.npmjs.org/gsap/-/gsap-3.12.4.tgz",
+ "integrity": "sha512-1ByAq8dD0W4aBZ/JArgaQvc0gyUfkGkP8mgAQa0qZGdpOKlSOhOf+WNXjoLimKaKG3Z4Iu6DKZtnyszqQeyqWQ=="
+ },
"node_modules/has-bigints": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz",
diff --git a/frontend/package.json b/frontend/package.json
index e6ad609..0cd496e 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -11,11 +11,12 @@
"preview": "vite preview"
},
"dependencies": {
+ "@splinetool/react-spline": "^2.2.6",
+ "@splinetool/runtime": "^1.0.19",
"axios": "^1.6.5",
"firebase": "^10.7.1",
+ "gsap": "^3.12.4",
"lucide-react": "^0.307.0",
- "@splinetool/react-spline": "^2.2.6",
- "@splinetool/runtime": "^1.0.18",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-router-dom": "^6.21.1",
diff --git a/frontend/src/views/patient/PatientHome.jsx b/frontend/src/views/patient/PatientHome.jsx
index 5af51b2..6118f00 100644
--- a/frontend/src/views/patient/PatientHome.jsx
+++ b/frontend/src/views/patient/PatientHome.jsx
@@ -1,15 +1,15 @@
-import Navbar from "./components/Navbar";
-import RecordButton from "./components/RecordButton";
-// import Ai3D from './components/Ai3D';
-import Conversation from './components/Conversation';
+import Navbar from './components/Navbar';
import Exercises from './components/Exercises';
-import "./styles.css";
+import './styles.css';
import { useState, useEffect, useCallback } from 'react';
import VoiceAI from './components/VoiceAI';
import axios from 'axios';
import Skeleton from './components/Skeleton';
+import { LogOut } from 'lucide-react';
+import { useNavigate } from 'react-router-dom';
const PatientHome = () => {
+ const navigate = useNavigate();
const [convo, setConvo] = useState({
user: null,
gpt: null,
@@ -33,7 +33,12 @@ const PatientHome = () => {
const response = await axios.get(
`http://localhost:8080/conversation/start?${queryParams.toString()}`
);
- setConvo((prevConvo) => ({ ...prevConvo, gpt: response.data.reply }));
+ setConvo((prevConvo) => {
+ if (prevConvo.gpt === null) {
+ return { ...prevConvo, gpt: response.data.reply };
+ }
+ return prevConvo;
+ });
} catch (error) {
console.error('Error fetching conversation start:', error);
}
@@ -41,47 +46,69 @@ const PatientHome = () => {
startConversation();
}, []);
+ const handleEndSession = async () => {
+ try {
+ await axios.post('http://localhost:8080/conversation/end', {}, {
+ // TODO: what are thooooose
+ params: new URLSearchParams({
+ patient: 'demo',
+ practitioner: 'demo',
+ })
+ });
+ navigate('/')
+ } catch (error) {
+ console.error('Error ending conversation:', error);
+ }
+ };
+
return (
-
-
-
-
-
-
- {/*
*/}
-
-
{convo.user}
-
- {convo.gpt !== null
- ? convo.gpt
- : }
-
+
+
+ {/*
*/}
+
+
+
+
+
+
+
+ {/*
*/}
+
+
{convo.user}
+
+ {convo.gpt !== null ? convo.gpt : }
+
+
+
+
+
+
-
-
-
-
-
+
+
+
-
-
-
+ {/* TODO: finish button that calls conversation/end */}
- {/* TODO: finish button that calls conversation/end */}
);
};
diff --git a/frontend/src/views/patient/components/Exercises.jsx b/frontend/src/views/patient/components/Exercises.jsx
index da73c51..cf859a6 100644
--- a/frontend/src/views/patient/components/Exercises.jsx
+++ b/frontend/src/views/patient/components/Exercises.jsx
@@ -49,15 +49,15 @@ const Exercises = () => {
instructions: ["Step 1 for Card 5", "Step 2 for Card 5", "Step 3 for Card 5"]
},
{
- id: 5,
- title: "Card 5",
+ id: 6,
+ title: "Card 6",
description: "Description for Card 5",
imageUrl: glutesImage, // Placeholder image
instructions: ["Step 1 for Card 5", "Step 2 for Card 5", "Step 3 for Card 5"]
},
{
- id: 5,
- title: "Card 5",
+ id: 7,
+ title: "Card 7",
description: "Description for Card 5",
imageUrl: glutesImage, // Placeholder image
instructions: ["Step 1 for Card 5", "Step 2 for Card 5", "Step 3 for Card 5"]
diff --git a/frontend/src/views/patient/components/Skeleton.jsx b/frontend/src/views/patient/components/Skeleton.jsx
index f196edb..5c3ff41 100644
--- a/frontend/src/views/patient/components/Skeleton.jsx
+++ b/frontend/src/views/patient/components/Skeleton.jsx
@@ -1,9 +1,9 @@
const Skeleton = () => {
return (
-
-
+
);
};
diff --git a/frontend/src/views/patient/components/VoiceAI.jsx b/frontend/src/views/patient/components/VoiceAI.jsx
index e53ef72..b277e62 100644
--- a/frontend/src/views/patient/components/VoiceAI.jsx
+++ b/frontend/src/views/patient/components/VoiceAI.jsx
@@ -1,43 +1,46 @@
import { useState, useEffect, useRef } from 'react';
import axios from 'axios';
+import gsap from 'gsap';
+import React, { Suspense } from 'react';
+
+const Spline = React.lazy(() => import('@splinetool/react-spline'));
const VoiceAI = ({ updateUserMessage, updateGptResponse }) => {
const sphere = useRef();
const [isRecording, setIsRecording] = useState(false);
const [mediaStream, setMediaStream] = useState(null);
const [mediaRecorder, setMediaRecorder] = useState(null);
- const [isConvoStarted, setIsConvoStarted] = useState(false);
const [speechRecognition, setSpeechRecognition] = useState(null);
useEffect(() => {
- const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
+ const SpeechRecognition =
+ window.SpeechRecognition || window.webkitSpeechRecognition;
if (SpeechRecognition) {
const recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
+ let accumulatedTranscript = '';
+
recognition.onresult = (event) => {
- // Only use this for real-time display, not for sending to the server
- const latestResult = event.results[event.resultIndex];
- const latestTranscript = latestResult[0].transcript.trim();
- updateUserMessage(latestTranscript);
+ accumulatedTranscript = '';
+ for (let i = 0; i < event.results.length; i++) {
+ accumulatedTranscript += event.results[i][0].transcript.trim() + ' ';
+ }
+ updateUserMessage(accumulatedTranscript);
};
setSpeechRecognition(recognition);
} else {
- console.warn("Speech recognition not supported in this browser.");
+ console.warn('Speech recognition not supported in this browser.');
}
}, [updateUserMessage]);
const startRecording = async () => {
- const queryParams = new URLSearchParams({ patient: 'demo', practitioner: 'demo' });
- if (!isConvoStarted) {
- // Start a new conversation
- const gptResponse = await axios.get(`http://localhost:8080/conversation/start?${queryParams.toString()}`);
- setIsConvoStarted(true);
- console.log(gptResponse.data.reply); // TODO: speak/display the AI response here
- updateGptResponse(gptResponse.data.reply);
- }
+ const queryParams = new URLSearchParams({
+ patient: 'demo',
+ practitioner: 'demo',
+ });
// Start recording audio
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
@@ -51,21 +54,17 @@ const VoiceAI = ({ updateUserMessage, updateGptResponse }) => {
};
recorder.onstop = async () => {
+ updateGptResponse(null);
// Process and send the audio data to the server for transcription
const audioBlob = new Blob(chunks, { type: 'audio/wav' });
const formData = new FormData();
formData.append('audioFile', audioBlob, 'recorded_audio.wav');
- const userMessage = await axios.post(`http://localhost:8080/conversation/transcribe`, formData);
- updateUserMessage(userMessage.data.user_msg); // Update with the final, reliable transcription
- console.log(userMessage);
-
- // Fetch GPT response
- const gptResponse = await axios.post(
+ const response = await axios.post(
`http://localhost:8080/conversation/send_message?${queryParams.toString()}`,
- { "message": userMessage.data.user_msg }
+ formData
);
- updateGptResponse(gptResponse.data.reply);
+ updateGptResponse(response.data.reply);
};
recorder.start();
@@ -85,38 +84,58 @@ const VoiceAI = ({ updateUserMessage, updateGptResponse }) => {
};
function onLoad(spline) {
- spline.setZoom(1);
- const obj = spline.findObjectById('ec9f2de1-4a48-4948-a32f-653838ab50ec');
- sphere.current = obj
+ spline.setZoom(0.1);
+ const obj = spline.findObjectById('f5f3b334-53b6-4337-8497-c6815ba02c98');
+ sphere.current = obj;
}
const triggerStart = () => {
startRecording();
- // sphere.current.emitEvent('start', 'Sphere');
- }
+ console.log(sphere.current.scale);
+ gsap.to(sphere.current.scale, {
+ duration: 3,
+ x: 1.5,
+ y: 1.5,
+ z: 1.5,
+ ease: 'power3.out',
+ });
+ };
const triggerEnd = () => {
stopRecording();
- // sphere.current.emitEvent('mouseHover', 'Sphere');
- }
+ gsap.to(sphere.current.scale, {
+ duration: 2,
+ x: 1,
+ y: 1,
+ z: 1,
+ ease: 'power3.out',
+ });
+ };
return (
- {isRecording ? (
-
Recording...
- ) : (
-
Click Start Recording to begin recording.
- )}
}>
+ {/*
*/}
+ {/*
*/}
+
*/}
+ scene="https://prod.spline.design/Omn4EqepHAUv5XKP/scene.splinecode"
+ />
+
);
diff --git a/frontend/src/views/patient/styles.css b/frontend/src/views/patient/styles.css
index 20e4ec6..2ebe36d 100644
--- a/frontend/src/views/patient/styles.css
+++ b/frontend/src/views/patient/styles.css
@@ -104,3 +104,21 @@
max-width: 100%;
}
}
+
+
+.outer-frame {
+ border: 20px solid white; /* Adjust the thickness here */
+ position: relative;
+ height: 100vh;
+ width: 100%;
+ border-radius: 20px; /* Rounded corners for outer border */
+ overflow: hidden; /* Ensures inner content respects border radius */
+}
+
+.inner-frame {
+ height: 80%;
+ width: 100%;
+ border: 2px solid #E1E5F2; /* Inner border */
+ border-radius: 15px; /* Adjust for inner rounded corners */
+ box-sizing: border-box; /* Ensures the border is included in the width/height */
+}
\ No newline at end of file