diff --git a/.gitignore b/.gitignore index aacd295..91ba992 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,9 @@ coverage/ # Misc *.tsbuildinfo next-env.d.ts + +backend/.env +backend/transcripts.db-journal +backend/__pycache__/ +backend/__pycache__/ +backend/audio_transcripts/dear_man_role_play_analysis.json diff --git a/backend/.python-version b/backend/.python-version new file mode 100644 index 0000000..e4fba21 --- /dev/null +++ b/backend/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/backend/README.md b/backend/README.md new file mode 100644 index 0000000..42c4e06 --- /dev/null +++ b/backend/README.md @@ -0,0 +1,51 @@ +# Mediator Backend + +FastAPI backend for the Mediator app: sentiment analysis, transcript reports API, and SQLite-backed metrics. + +## Running the server + +From the `backend` directory, start the API server with: + +```bash +uv run main.py +``` + +This uses [uv](https://docs.astral.sh/uv/) to run the project’s Python environment and starts the app with **uvicorn** on **http://0.0.0.0:8000**. + +- **Root:** http://localhost:8000/ +- **API docs (Swagger):** http://localhost:8000/docs +- **ReDoc:** http://localhost:8000/redoc + +## Prerequisites + +- **Python 3.12+** +- **uv** (install: `curl -LsSf https://astral.sh/uv/install.sh | sh`) + +Install dependencies (if needed): + +```bash +uv sync +``` + +## Environment + +Optional `.env` in `backend/` for: + +- `ASSEMBLYAI_API_KEY` – used by the audio transcription pipeline +- `OPENROUTER_API_KEY` – used by the analysis pipeline + +The reports API reads from a local SQLite DB (`transcripts.db`). Populate it by running the audio processing script (see repo root or `scripts/process_audio.py`). + +## Main endpoints + +| Endpoint | Description | +|----------|-------------| +| `GET /` | Health check | +| `POST /sentiment` | VADER sentiment for a single text | +| `GET /api/reports/speakers` | List speakers | +| `GET /api/reports/transcripts` | List meeting transcripts | +| `GET /api/reports/categories` | List categories (sentiment, dear_man, fast) | +| `GET /api/reports/metrics` | Aggregated metrics (optional filters) | +| `GET /api/reports/pie-chart-data` | Data for pie charts | + +CORS is set for `http://localhost:3000` and `http://127.0.0.1:3000` so the Next.js frontend can call the API. diff --git a/backend/__init__.py b/backend/__init__.py new file mode 100644 index 0000000..7f83169 --- /dev/null +++ b/backend/__init__.py @@ -0,0 +1 @@ +# Backend package diff --git a/backend/analysis_utilities.py b/backend/analysis_utilities.py new file mode 100644 index 0000000..e11df57 --- /dev/null +++ b/backend/analysis_utilities.py @@ -0,0 +1,129 @@ +""" +Analysis utilities for Mediator session analytics. +Provides standalone functions for sentiment analysis and assertiveness scoring. +""" + +import re +from typing import Tuple, Dict +import nltk +from nltk.sentiment.vader import SentimentIntensityAnalyzer + +# Download VADER lexicon (only needed once) +nltk.download('vader_lexicon', quiet=True) + +# Initialize VADER analyzer +sia = SentimentIntensityAnalyzer() + + +def analyze_sentiment(text: str) -> Tuple[str, float]: + """ + Analyze the sentiment of the given text using VADER. + + Args: + text: The text to analyze + + Returns: + A tuple of (sentiment_label, sentiment_score) where: + - sentiment_label is one of "positive", "neutral", "negative" + - sentiment_score is the compound score from -1 to 1 + """ + # Get VADER scores + scores = sia.polarity_scores(text) + + # Find which sentiment (neg, neu, pos) has the highest score + sentiment_scores = { + "negative": scores["neg"], + "neutral": scores["neu"], + "positive": scores["pos"], + } + + # Get the label with the highest score + sentiment_label = max(sentiment_scores, key=sentiment_scores.get) + + # Use compound score as the sentiment_score (-1 to 1) + sentiment_score = round(scores["compound"], 2) + + return sentiment_label, sentiment_score + + +def calculate_assertiveness(text: str) -> Dict: + """ + Calculate an assertiveness score for DEAR MAN + FAST skill practice. + + Measures how directly the user communicated by: + - Counting first-person pronouns ("I", "my", "me") + - Counting declarative statements (sentences without question marks) + - Weighting by total word count + + Formula: assertiveness = (first_person_count / word_count * 0.5) + (declarative_ratio * 0.5) + + Args: + text: The text to analyze + + Returns: + A dict containing: + - assertiveness_score: float 0-1 + - first_person_count: int + - declarative_count: int + - total_sentences: int + """ + if not text or not text.strip(): + return { + "assertiveness_score": 0.0, + "first_person_count": 0, + "declarative_count": 0, + "total_sentences": 0, + } + + # Count first-person pronouns (case-insensitive) + # Match "I", "my", "me" as whole words + first_person_pattern = r'\b(I|my|me)\b' + first_person_matches = re.findall(first_person_pattern, text, re.IGNORECASE) + first_person_count = len(first_person_matches) + + # Count words + words = text.split() + word_count = len(words) + + # Split into sentences using basic punctuation + # This handles ., !, and ? as sentence terminators + sentences = re.split(r'[.!?]+', text) + # Filter out empty sentences + sentences = [s.strip() for s in sentences if s.strip()] + total_sentences = len(sentences) + + # Count declarative statements (sentences that don't end with ?) + # We need to check the original text for question marks + # Find all sentence-ending punctuation and check if they're questions + sentence_endings = re.findall(r'[.!?]+', text) + + declarative_count = 0 + for ending in sentence_endings: + if '?' not in ending: + declarative_count += 1 + + # Handle case where text doesn't end with punctuation + if total_sentences > len(sentence_endings): + # The last sentence has no punctuation, assume declarative + declarative_count += (total_sentences - len(sentence_endings)) + + # Calculate declarative ratio + declarative_ratio = declarative_count / total_sentences if total_sentences > 0 else 0 + + # Calculate first-person ratio (normalized) + # Cap at 1.0 to prevent scores > 1 when there are many first-person pronouns + first_person_ratio = min(first_person_count / word_count, 1.0) if word_count > 0 else 0 + + # Calculate assertiveness score using the formula from the brief + # assertiveness = (first_person_count / word_count * 0.5) + (declarative_ratio * 0.5) + assertiveness_score = (first_person_ratio * 0.5) + (declarative_ratio * 0.5) + + # Ensure score is between 0 and 1 + assertiveness_score = max(0.0, min(1.0, assertiveness_score)) + + return { + "assertiveness_score": round(assertiveness_score, 2), + "first_person_count": first_person_count, + "declarative_count": declarative_count, + "total_sentences": total_sentences, + } diff --git a/backend/audio_files/dear_man_role_play.mp3 b/backend/audio_files/dear_man_role_play.mp3 new file mode 100644 index 0000000..bae4305 Binary files /dev/null and b/backend/audio_files/dear_man_role_play.mp3 differ diff --git a/backend/audio_transcripts/dear_man_role_play.json b/backend/audio_transcripts/dear_man_role_play.json new file mode 100644 index 0000000..40c5f12 --- /dev/null +++ b/backend/audio_transcripts/dear_man_role_play.json @@ -0,0 +1,55 @@ +{"transcript": [ + { + "speaker": "Speaker A", + "message": "Christina. Oh, I had the worst day. Like, you won't believe what my boss did. I can't wait to tell you about this. You will not believe it. It's just horrendous. Like, she just doesn't respect me. She doesn't, like, listen to me. She pushes me too hard. Like, stop." + }, + { + "speaker": "Speaker B", + "message": "Stop before you keep going. I see that this is really important to you, and you really want to talk about this right now. And I've had the most overwhelming day today at work, and as much as I wish I had the bandwidth for this right now, I don't. And I really want to support you. I know that this is really important for you to share. I just cannot do this. And I feel that if you can respect me in this decision, if you can respect where I'm coming from, I can be more attentive at a later time." + }, + { + "speaker": "Speaker A", + "message": "I mean. I mean, I do get having a hard day. Obviously, I have a hard day, but I had one, too. But, like, I really want to talk. I really feel like I need to talk about it. Like, and you give such good advice, and, like, I feel like you just, like, help me see things clearly. So I just, like, I just really want to talk, you know?" + }, + { + "speaker": "Speaker B", + "message": "I know you want to talk. I hear it. I hear how important it is for you, and I just can't do it. You know how it is when we start having these conversations late at night, I then can't go to sleep, and then it just messes up my whole schedule. We can't have the conversation tonight." + }, + { + "speaker": "Speaker A", + "message": "So, like, you're shutting me down again." + }, + { + "speaker": "Speaker B", + "message": "Kristen, I'm just saying, tonight I can. Is there something else that we can do? Like, is there another way that we can, like, make this work? Like, is there something that you can think of? You know, if I can't do it tonight, can we maybe do it the next day? I mean, what are your thoughts?" + }, + { + "speaker": "Speaker A", + "message": "I mean, I guess tomorrow morning will be okay. I guess we could talk about it in the morning." + }, + { + "speaker": "Speaker B", + "message": "That would be great." + }, + { + "speaker": "Speaker A", + "message": "You have time?" + }, + { + "speaker": "Speaker B", + "message": "Yeah. Yeah, I have time. Yeah. We can do it over breakfast." + }, + { + "speaker": "Speaker A", + "message": "Okay. Okay. I can do that. I can do that. I mean, I get. I get that you had a hard day. Yeah. That would mean a lot if we could talk about it at breakfast." + }, + { + "speaker": "Speaker B", + "message": "Perfect. Trust me, it's going to be better. I'll be better able to listen to you. I'll be more attentive. You'll get your. Your needs met if we do it tomorrow." + }, + { + "speaker": "Speaker A", + "message": "Okay. Okay. I can respect that. I can do that. Okay. Thank you. Go back to your book. Now we're good." + } + ] +} \ No newline at end of file diff --git a/backend/audio_transcripts/dear_man_role_play.txt b/backend/audio_transcripts/dear_man_role_play.txt new file mode 100644 index 0000000..1a0c752 --- /dev/null +++ b/backend/audio_transcripts/dear_man_role_play.txt @@ -0,0 +1,13 @@ +Speaker A: Christina. Oh, I had the worst day. Like, you won't believe what my boss did. I can't wait to tell you about this. You will not believe it. It's just horrendous. Like, she just doesn't respect me. She doesn't, like, listen to me. She pushes me too hard. Like, stop. +Speaker B: Stop before you keep going. I see that this is really important to you, and you really want to talk about this right now. And I've had the most overwhelming day today at work, and as much as I wish I had the bandwidth for this right now, I don't. And I really want to support you. I know that this is really important for you to share. I just cannot do this. And I feel that if you can respect me in this decision, if you can respect where I'm coming from, I can be more attentive at a later time. +Speaker A: I mean. I mean, I do get having a hard day. Obviously, I have a hard day, but I had one, too. But, like, I really want to talk. I really feel like I need to talk about it. Like, and you give such good advice, and, like, I feel like you just, like, help me see things clearly. So I just, like, I just really want to talk, you know? +Speaker B: I know you want to talk. I hear it. I hear how important it is for you, and I just can't do it. You know how it is when we start having these conversations late at night, I then can't go to sleep, and then it just messes up my whole schedule. We can't have the conversation tonight. +Speaker A: So, like, you're shutting me down again. +Speaker B: Kristen, I'm just saying, tonight I can. Is there something else that we can do? Like, is there another way that we can, like, make this work? Like, is there something that you can think of? You know, if I can't do it tonight, can we maybe do it the next day? I mean, what are your thoughts? +Speaker A: I mean, I guess tomorrow morning will be okay. I guess we could talk about it in the morning. +Speaker B: That would be great. +Speaker A: You have time? +Speaker B: Yeah. Yeah, I have time. Yeah. We can do it over breakfast. +Speaker A: Okay. Okay. I can do that. I can do that. I mean, I get. I get that you had a hard day. Yeah. That would mean a lot if we could talk about it at breakfast. +Speaker B: Perfect. Trust me, it's going to be better. I'll be better able to listen to you. I'll be more attentive. You'll get your. Your needs met if we do it tomorrow. +Speaker A: Okay. Okay. I can respect that. I can do that. Okay. Thank you. Go back to your book. Now we're good. \ No newline at end of file diff --git a/backend/audio_transcripts/dear_man_role_play_analysis.json b/backend/audio_transcripts/dear_man_role_play_analysis.json new file mode 100644 index 0000000..2d0998a --- /dev/null +++ b/backend/audio_transcripts/dear_man_role_play_analysis.json @@ -0,0 +1,1388 @@ +{ + "transcript": "Speaker A: Christina. Oh, I had the worst day. Like, you won't believe what my boss did. I can't wait to tell you about this. You will not believe it. It's just horrendous. Like, she just doesn't respect me. She doesn't, like, listen to me. She pushes me too hard. Like, stop.\nSpeaker B: Stop before you keep going. I see that this is really important to you, and you really want to talk about this right now. And I've had the most overwhelming day today at work, and as much as I wish I had the bandwidth for this right now, I don't. And I really want to support you. I know that this is really important for you to share. I just cannot do this. And I feel that if you can respect me in this decision, if you can respect where I'm coming from, I can be more attentive at a later time.\nSpeaker A: I mean. I mean, I do get having a hard day. Obviously, I have a hard day, but I had one, too. But, like, I really want to talk. I really feel like I need to talk about it. Like, and you give such good advice, and, like, I feel like you just, like, help me see things clearly. So I just, like, I just really want to talk, you know?\nSpeaker B: I know you want to talk. I hear it. I hear how important it is for you, and I just can't do it. You know how it is when we start having these conversations late at night, I then can't go to sleep, and then it just messes up my whole schedule. We can't have the conversation tonight.\nSpeaker A: So, like, you're shutting me down again.\nSpeaker B: Kristen, I'm just saying, tonight I can. Is there something else that we can do? Like, is there another way that we can, like, make this work? Like, is there something that you can think of? You know, if I can't do it tonight, can we maybe do it the next day? I mean, what are your thoughts?\nSpeaker A: I mean, I guess tomorrow morning will be okay. I guess we could talk about it in the morning.\nSpeaker B: That would be great.\nSpeaker A: You have time?\nSpeaker B: Yeah. Yeah, I have time. Yeah. We can do it over breakfast.\nSpeaker A: Okay. Okay. I can do that. I can do that. I mean, I get. I get that you had a hard day. Yeah. That would mean a lot if we could talk about it at breakfast.\nSpeaker B: Perfect. Trust me, it's going to be better. I'll be better able to listen to you. I'll be more attentive. You'll get your. Your needs met if we do it tomorrow.\nSpeaker A: Okay. Okay. I can respect that. I can do that. Okay. Thank you. Go back to your book. Now we're good.", + "transcript_with_speakers": "Speaker A: Christina. Oh, I had the worst day. Like, you won't believe what my boss did. I can't wait to tell you about this. You will not believe it. It's just horrendous. Like, she just doesn't respect me. She doesn't, like, listen to me. She pushes me too hard. Like, stop.\n\nSpeaker B: Stop before you keep going. I see that this is really important to you, and you really want to talk about this right now. And I've had the most overwhelming day today at work, and as much as I wish I had the bandwidth for this right now, I don't. And I really want to support you. I know that this is really important for you to share. I just cannot do this. And I feel that if you can respect me in this decision, if you can respect where I'm coming from, I can be more attentive at a later time.\n\nSpeaker A: I mean. I mean, I do get having a hard day. Obviously, I have a hard day, but I had one, too. But, like, I really want to talk. I really feel like I need to talk about it. Like, and you give such good advice, and, like, I feel like you just, like, help me see things clearly. So I just, like, I just really want to talk, you know?\n\nSpeaker B: I know you want to talk. I hear it. I hear how important it is for you, and I just can't do it. You know how it is when we start having these conversations late at night, I then can't go to sleep, and then it just messes up my whole schedule. We can't have the conversation tonight.\n\nSpeaker A: So, like, you're shutting me down again.\n\nSpeaker B: Kristen, I'm just saying, tonight I can. Is there something else that we can do? Like, is there another way that we can, like, make this work? Like, is there something that you can think of? You know, if I can't do it tonight, can we maybe do it the next day? I mean, what are your thoughts?\n\nSpeaker A: I mean, I guess tomorrow morning will be okay. I guess we could talk about it in the morning.\n\nSpeaker B: That would be great.\n\nSpeaker A: You have time?\n\nSpeaker B: Yeah. Yeah, I have time. Yeah. We can do it over breakfast.\n\nSpeaker A: Okay. Okay. I can do that. I can do that. I mean, I get. I get that you had a hard day. Yeah. That would mean a lot if we could talk about it at breakfast.\n\nSpeaker B: Perfect. Trust me, it's going to be better. I'll be better able to listen to you. I'll be more attentive. You'll get your. Your needs met if we do it tomorrow.\n\nSpeaker A: Okay. Okay. I can respect that. I can do that. Okay. Thank you. Go back to your book. Now we're good.", + "raw_category_results": { + "sentiment": { + "messages": [ + { + "speaker": "Speaker A", + "text": "Christina. Oh, I had the worst day. Like, you won't believe what my boss did. I can't wait to tell you about this. You will not believe it. It's just horrendous. Like, she just doesn't respect me. She doesn't, like, listen to me. She pushes me too hard. Like, stop.", + "sentiment": { + "label": "negative", + "explanation": "The message contains strong expressions of frustration, anger, and dissatisfaction ('worst day,' 'horrendous,' 'doesn't respect me,' 'doesn't listen to me'). The speaker expresses a desire to vent about a negative experience." + } + }, + { + "speaker": "Speaker B", + "text": "Stop before you keep going. I see that this is really important to you, and you really want to talk about this right now. And I've had the most overwhelming day today at work, and as much as I wish I had the bandwidth for this right now, I don't. And I really want to support you. I know that this is really important for you to share. I just cannot do this. And I feel that if you can respect me in this decision, if you can respect where I'm coming from, I can be more attentive at a later time.", + "sentiment": { + "label": "neutral", + "explanation": "The message contains a mix of empathy ('I see that this is really important to you,' 'I really want to support you') and a firm boundary ('I just cannot do this'). The tone is measured and respectful, but the overall sentiment leans neutral due to the balance of supportive and limiting language." + } + }, + { + "speaker": "Speaker A", + "text": "I mean. I mean, I do get having a hard day. Obviously, I have a hard day, but I had one, too. But, like, I really want to talk. I really feel like I need to talk about it. Like, and you give such good advice, and, like, I feel like you just, like, help me see things clearly. So I just, like, I just really want to talk, you know?", + "sentiment": { + "label": "positive", + "explanation": "The message expresses a desire for connection and appreciation ('you give such good advice,' 'help me see things clearly'). While the speaker acknowledges their own struggles, the overall sentiment is positive due to the emphasis on wanting to talk and feeling supported." + } + }, + { + "speaker": "Speaker B", + "text": "I know you want to talk. I hear it. I hear how important it is for you, and I just can't do it. You know how it is when we start having these conversations late at night, I then can't go to sleep, and then it just messes up my whole schedule. We can't have the conversation tonight.", + "sentiment": { + "label": "neutral", + "explanation": "The message conveys empathy ('I know you want to talk,' 'I hear it') but also a firm boundary ('I just can't do it'). The sentiment is neutral due to the balanced tone of understanding and limitation." + } + }, + { + "speaker": "Speaker A", + "text": "So, like, you're shutting me down again.", + "sentiment": { + "label": "negative", + "explanation": "The message is accusatory and conveys frustration ('shutting me down again'), indicating a negative sentiment." + } + }, + { + "speaker": "Speaker B", + "text": "Kristen, I'm just saying, tonight I can. Is there something else that we can do? Like, is there another way that we can, like, make this work? Like, is there something that you can think of? You know, if I can't do it tonight, can we maybe do it the next day? I mean, what are your thoughts?", + "sentiment": { + "label": "neutral", + "explanation": "The message is solution-oriented and shows a willingness to accommodate ('tonight I can,' 'can we do it the next day'). The tone is neutral, as it focuses on finding a compromise rather than expressing strong emotions." + } + }, + { + "speaker": "Speaker A", + "text": "I mean, I guess tomorrow morning will be okay. I guess we could talk about it in the morning.", + "sentiment": { + "label": "neutral", + "explanation": "The message is resigned and pragmatic ('I guess,' 'will be okay'), indicating a neutral sentiment with a hint of acceptance." + } + }, + { + "speaker": "Speaker B", + "text": "Yeah. Yeah, I have time. Yeah. We can do it over breakfast.", + "sentiment": { + "label": "positive", + "explanation": "The message is concise and affirmative ('Yeah, I have time,' 'We can do it over breakfast'), with a positive tone of agreement and willingness to accommodate." + } + }, + { + "speaker": "Speaker A", + "text": "Okay. Okay. I can do that. I mean, I get. I get that you had a hard day. Yeah. That would mean a lot if we could talk about it at breakfast.", + "sentiment": { + "label": "positive", + "explanation": "The message shows understanding ('I get that you had a hard day') and appreciation ('that would mean a lot'). The overall sentiment is positive, reflecting acceptance and gratitude." + } + }, + { + "speaker": "Speaker B", + "text": "Perfect. Trust me, it's going to be better. I'll be better able to listen to you. I'll be more attentive. You'll get your needs met if we do it tomorrow.", + "sentiment": { + "label": "positive", + "explanation": "The message is reassuring and optimistic ('Perfect,' 'it's going to be better,' 'I'll be more attentive'). The tone is positive, indicating confidence in the upcoming conversation." + } + }, + { + "speaker": "Speaker A", + "text": "Okay. Okay. I can respect that. I can do that. Okay. Thank you. Go back to your book. Now we're good.", + "sentiment": { + "label": "positive", + "explanation": "The message conveys acceptance ('I can respect that'), gratitude ('Thank you'), and a sense of resolution ('Now we're good'). The overall sentiment is positive." + } + } + ] + }, + "dear_man": { + "messages": [ + { + "speaker": "Speaker A", + "text": "Christina. Oh, I had the worst day. Like, you won't believe what my boss did. I can't wait to tell you about this. You will not believe it. It's just horrendous. Like, she just doesn't respect me. She doesn't, like, listen to me. She pushes me too hard. Like, stop.", + "dear_man": { + "score": 0, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "The speaker does not provide factual statements. Instead, they use emotional language ('worst day', 'horrendous') and vague descriptions without specifics." + }, + "express": { + "adhered": true, + "explanation": "The speaker expresses feelings implicitly ('worst day', 'she doesn't respect me', 'she pushes me too hard'). However, they do not explicitly use 'I feel' or 'I felt'." + }, + "assert": { + "adhered": false, + "explanation": "The speaker does not assert their needs using 'I want', 'I need', or similar phrases." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because', 'this would', or similar phrases." + }, + "mindful": { + "adhered": false, + "explanation": "The speaker goes off-topic with tangents ('Like, stop') and excessive emotional venting." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('like', 'you won't believe') and lacks directness." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt is made." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Stop before you keep going. I see that this is really important to you, and you really want to talk about this right now. And I've had the most overwhelming day today at work, and as much as I wish I had the bandwidth for this right now, I don't. And I really want to support you. I know that this is really important for you to share. I just cannot do this. And I feel that if you can respect me in this decision, if you can respect where I'm coming from, I can be more attentive at a later time.", + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": true, + "explanation": "The speaker expresses feelings ('I've had the most overwhelming day', 'I really want to support you')." + }, + "assert": { + "adhered": false, + "explanation": "No clear assertion of needs using 'I want', 'I need', etc." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because' or similar phrases." + }, + "mindful": { + "adhered": false, + "explanation": "The speaker does not stay strictly on topic; they shift focus to their own day and bandwidth." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('I just cannot do this') and is not assertive in their refusal." + }, + "negotiate": { + "adhered": false, + "explanation": "No explicit negotiation attempt, though there is an implicit suggestion of a later time." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "I mean. I mean, I do get having a hard day. Obviously, I have a hard day, but I had one, too. But, like, I really want to talk. I really feel like I need to talk about it. Like, and you give such good advice, and, like, I feel like you just, like, help me see things clearly. So I just, like, I just really want to talk, you know?", + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": true, + "explanation": "The speaker expresses feelings ('I really feel like I need to talk', 'I feel like you help me see things clearly')." + }, + "assert": { + "adhered": false, + "explanation": "The speaker does not assert needs using 'I want', 'I need' explicitly, though they imply it." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because' or similar phrases." + }, + "mindful": { + "adhered": false, + "explanation": "The speaker goes off-topic with compliments ('you give such good advice') and tangents ('like, I just really want to talk')." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('like', 'I mean') and lacks directness." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt is made." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "I know you want to talk. I hear it. I hear how important it is for you, and I just can't do it. You know how it is when we start having these conversations late at night, I then can't go to sleep, and then it just messes up my whole schedule. We can't have the conversation tonight.", + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": true, + "explanation": "The speaker expresses feelings ('I just can't do it') and acknowledges the importance of the conversation to the other speaker." + }, + "assert": { + "adhered": false, + "explanation": "No clear assertion of needs using 'I want', 'I need', etc." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because' or similar phrases." + }, + "mindful": { + "adhered": false, + "explanation": "The speaker shifts focus to their own schedule and sleep, which is tangential." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses a passive tone ('I just can't do it') and does not assert their boundary confidently." + }, + "negotiate": { + "adhered": false, + "explanation": "No explicit negotiation attempt, though the refusal is stated." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "So, like, you're shutting me down again.", + "dear_man": { + "score": 0, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description; it is an emotional accusation." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings using 'I feel' or similar phrases." + }, + "assert": { + "adhered": false, + "explanation": "No assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": false, + "explanation": "The message is confrontational and off-topic." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses a hedging word ('like') and an accusatory tone." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Kristen, I'm just saying, tonight I can. Is there something else that we can do? Like, is there another way that we can, like, make this work? Like, is there something that you can think of? You know, if I can't do it tonight, can we maybe do it the next day? I mean, what are your thoughts?", + "dear_man": { + "score": 3, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No clear assertion of needs using 'I want', 'I need', etc." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because' or similar phrases." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic and focuses on finding a solution." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('like', 'maybe') and lacks directness." + }, + "negotiate": { + "adhered": true, + "explanation": "The speaker attempts negotiation with 'what if', 'can we', and 'what are your thoughts?'" + } + } + } + }, + { + "speaker": "Speaker A", + "text": "I mean, I guess tomorrow morning will be okay. I guess we could talk about it in the morning.", + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No clear assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic and agrees to a solution." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('I guess')." + }, + "negotiate": { + "adhered": false, + "explanation": "No explicit negotiation attempt." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Yeah. Yeah, I have time. Yeah. We can do it over breakfast.", + "dear_man": { + "score": 0, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description." + }, + "express": { + "adhered": false, + "explanation": "No expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker is vague and lacks directness." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "Okay. Okay. I can do that. I mean, I get. I get that you had a hard day. Yeah. That would mean a lot if we could talk about it at breakfast.", + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('I mean') and is not assertive." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Perfect. Trust me, it's going to be better. I'll be better able to listen to you. I'll be more attentive. You'll get your needs met if we do it tomorrow.", + "dear_man": { + "score": 2, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No explicit assertion of needs using 'I want', 'I need', etc." + }, + "reinforce": { + "adhered": true, + "explanation": "The speaker reinforces the request with 'because' implied ('it's going to be better' and 'you'll get your needs met')." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic." + }, + "appear_confident": { + "adhered": true, + "explanation": "The speaker appears confident and direct." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "Okay. Okay. I can respect that. I can do that. Okay. Thank you. Go back to your book. Now we're good.", + "dear_man": { + "score": 0, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker is vague and lacks assertiveness." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + } + } + ] + }, + "fast": { + "messages": [ + { + "speaker": "Speaker A", + "text": "Christina. Oh, I had the worst day. Like, you won't believe what my boss did. I can't wait to tell you about this. You will not believe it. It's just horrendous. Like, she just doesn't respect me. She doesn't, like, listen to me. She pushes me too hard. Like, stop.", + "fast": { + "score": 1, + "breakdown": { + "fair": { + "adhered": false, + "explanation": "The speaker is venting about their boss but does not acknowledge any potential fairness in their boss's actions or their own role in the situation. The tone is overly dramatic and lacks consideration for the boss's perspective." + }, + "apologies": { + "adhered": true, + "explanation": "No apologies are given, and none are needed in this context, so this adheres to the FAST guideline of avoiding unnecessary apologies." + }, + "stick_to_values": { + "adhered": false, + "explanation": "The speaker does not demonstrate adherence to values like empathy, patience, or professionalism. They focus solely on their own frustration without considering the situation from another perspective." + }, + "truthful": { + "adhered": true, + "explanation": "The speaker is truthful in expressing their feelings and experiences, though the tone and framing may lack nuance." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Stop before you keep going. I see that this is really important to you, and you really want to talk about this right now. And I've had the most overwhelming day today at work, and as much as I wish I had the bandwidth for this right now, I don't. And I really want to support you. I just cannot do this. And I feel that if you can respect me in this decision, if you can respect where I'm coming from, I can be more attentive at a later time.", + "fast": { + "score": 4, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker B acknowledges the importance of the issue to Speaker A while also recognizing their own limitations. They do not blame Speaker A for their own constraints." + }, + "apologies": { + "adhered": true, + "explanation": "Speaker B does not over-apologize or apologize for things outside their control. They clearly communicate their boundaries without unnecessary guilt." + }, + "stick_to_values": { + "adhered": true, + "explanation": "Speaker B demonstrates respect (a key value) by honoring their own boundaries and encouraging Speaker A to do the same. They also show empathy and consideration." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker B is honest about their own limitations and the reasons behind their inability to engage fully at that moment." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "I mean. I mean, I do get having a hard day. Obviously, I have a hard day, but I had one, too. But, like, I really want to talk. I really feel like I need to talk about it. Like, and you give such good advice, and, like, I feel like you just, like, help me see things clearly. So I just, like, I just really want to talk, you know?", + "fast": { + "score": 2, + "breakdown": { + "fair": { + "adhered": false, + "explanation": "Speaker A acknowledges Speaker B's hard day but quickly shifts back to their own needs without fully validating Speaker B's boundaries or perspective." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given, and Speaker A is not apologizing for their own needs." + }, + "stick_to_values": { + "adhered": false, + "explanation": "Speaker A does not fully respect Speaker B's boundaries or values by continuing to push for immediate attention despite Speaker B's clear limits." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker A is truthful about their feelings and their need to talk, though their tone and phrasing lack nuance." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "I know you want to talk. I hear it. I hear how important it is for you, and I just can't do it. You know how it is when we start having these conversations late at night, I then can't go to sleep, and then it just messes up my whole schedule. We can't have the conversation tonight.", + "fast": { + "score": 4, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker B reaffirms their inability to engage while still acknowledging Speaker A's feelings, maintaining fairness by not dismissing Speaker A's needs entirely." + }, + "apologies": { + "adhered": true, + "explanation": "No over-apologizing or unnecessary guilt is expressed; Speaker B clearly communicates their limits without over-apologizing." + }, + "stick_to_values": { + "adhered": true, + "explanation": "Speaker B upholds their values by respecting their own boundaries and the importance of their schedule, while still showing empathy for Speaker A." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker B is honest about the consequences of their engagement and why they cannot accommodate Speaker A at that moment." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "So, like, you're shutting me down again.", + "fast": { + "score": 1, + "breakdown": { + "fair": { + "adhered": false, + "explanation": "The phrasing implies that Speaker B is intentionally rejecting Speaker A, which is unfair and dismissive of Speaker B's valid reasons for setting boundaries." + }, + "apologies": { + "adhered": true, + "explanation": "No apologies are given, and none are needed here, so this adheres to the guideline." + }, + "stick_to_values": { + "adhered": false, + "explanation": "Speaker A does not demonstrate respect or consideration for Speaker B's boundaries, undermining values like mutual respect." + }, + "truthful": { + "adhered": false, + "explanation": "While the statement is factually true, the tone and phrasing are unconstructive and unfair, which undermines authenticity in communication." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Kristen, I'm just saying, tonight I can. Is there something else that we can do? Like, is there another way that we can, like, make this work? Like, is there something that you can think of? You know, if I can't do it tonight, can we maybe do it the next day? I mean, what are your thoughts?", + "fast": { + "score": 4, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker B offers an alternative solution and invites collaboration, showing fairness by considering Speaker A's needs while maintaining their own boundaries." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given, and Speaker B remains firm in their boundaries while still being accommodating." + }, + "stick_to_values": { + "adhered": true, + "explanation": "Speaker B demonstrates respect and problem-solving, adhering to values of collaboration and mutual support." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker B is honest about their availability and open about seeking a mutually agreeable solution." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "I mean, I guess tomorrow morning will be okay. I guess we could talk about it in the morning.", + "fast": { + "score": 3, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker A agrees to a compromise, showing some consideration for Speaker B's boundaries." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given, and Speaker A does not apologize for their own needs." + }, + "stick_to_values": { + "adhered": false, + "explanation": "While Speaker A compromises, the phrasing ('I guess') still lacks enthusiasm or full engagement with the values of mutual respect and collaboration." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker A is truthful about their willingness to accommodate Speaker B's suggestion." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Perfect. Trust me, it's going to be better. I'll be better able to listen to you. I'll be more attentive. You'll get your needs met if we do it tomorrow.", + "fast": { + "score": 4, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker B reassures Speaker A while maintaining their own commitment to being attentive, showing fairness and consideration." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given, and Speaker B remains confident in their ability to meet Speaker A's needs." + }, + "stick_to_values": { + "adhered": true, + "explanation": "Speaker B upholds values of reliability, attentiveness, and mutual support." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker B is honest about their commitment to being more attentive and meeting Speaker A's needs." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "Okay. Okay. I can respect that. I can do that. Okay. Thank you. Go back to your book. Now we're good.", + "fast": { + "score": 3, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker A acknowledges Speaker B's boundaries and agrees to the compromise, showing fairness and respect." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given." + }, + "stick_to_values": { + "adhered": false, + "explanation": "While Speaker A agrees to the plan, the phrase 'Go back to your book' could be perceived as dismissive or sarcastic, undermining values of mutual respect." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker A is truthful in expressing their agreement and respect for Speaker B's decision." + } + } + } + } + ] + } + }, + "messages": [ + { + "speaker": "Speaker A", + "text": "Christina. Oh, I had the worst day. Like, you won't believe what my boss did. I can't wait to tell you about this. You will not believe it. It's just horrendous. Like, she just doesn't respect me. She doesn't, like, listen to me. She pushes me too hard. Like, stop.", + "sentiment": { + "label": "negative", + "explanation": "The message contains strong expressions of frustration, anger, and dissatisfaction ('worst day,' 'horrendous,' 'doesn't respect me,' 'doesn't listen to me'). The speaker expresses a desire to vent about a negative experience." + }, + "dear_man": { + "score": 0, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "The speaker does not provide factual statements. Instead, they use emotional language ('worst day', 'horrendous') and vague descriptions without specifics." + }, + "express": { + "adhered": true, + "explanation": "The speaker expresses feelings implicitly ('worst day', 'she doesn't respect me', 'she pushes me too hard'). However, they do not explicitly use 'I feel' or 'I felt'." + }, + "assert": { + "adhered": false, + "explanation": "The speaker does not assert their needs using 'I want', 'I need', or similar phrases." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because', 'this would', or similar phrases." + }, + "mindful": { + "adhered": false, + "explanation": "The speaker goes off-topic with tangents ('Like, stop') and excessive emotional venting." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('like', 'you won't believe') and lacks directness." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt is made." + } + } + }, + "fast": { + "score": 1, + "breakdown": { + "fair": { + "adhered": false, + "explanation": "The speaker is venting about their boss but does not acknowledge any potential fairness in their boss's actions or their own role in the situation. The tone is overly dramatic and lacks consideration for the boss's perspective." + }, + "apologies": { + "adhered": true, + "explanation": "No apologies are given, and none are needed in this context, so this adheres to the FAST guideline of avoiding unnecessary apologies." + }, + "stick_to_values": { + "adhered": false, + "explanation": "The speaker does not demonstrate adherence to values like empathy, patience, or professionalism. They focus solely on their own frustration without considering the situation from another perspective." + }, + "truthful": { + "adhered": true, + "explanation": "The speaker is truthful in expressing their feelings and experiences, though the tone and framing may lack nuance." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Stop before you keep going. I see that this is really important to you, and you really want to talk about this right now. And I've had the most overwhelming day today at work, and as much as I wish I had the bandwidth for this right now, I don't. And I really want to support you. I know that this is really important for you to share. I just cannot do this. And I feel that if you can respect me in this decision, if you can respect where I'm coming from, I can be more attentive at a later time.", + "sentiment": { + "label": "neutral", + "explanation": "The message contains a mix of empathy ('I see that this is really important to you,' 'I really want to support you') and a firm boundary ('I just cannot do this'). The tone is measured and respectful, but the overall sentiment leans neutral due to the balance of supportive and limiting language." + }, + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": true, + "explanation": "The speaker expresses feelings ('I've had the most overwhelming day', 'I really want to support you')." + }, + "assert": { + "adhered": false, + "explanation": "No clear assertion of needs using 'I want', 'I need', etc." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because' or similar phrases." + }, + "mindful": { + "adhered": false, + "explanation": "The speaker does not stay strictly on topic; they shift focus to their own day and bandwidth." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('I just cannot do this') and is not assertive in their refusal." + }, + "negotiate": { + "adhered": false, + "explanation": "No explicit negotiation attempt, though there is an implicit suggestion of a later time." + } + } + }, + "fast": { + "score": 4, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker B acknowledges the importance of the issue to Speaker A while also recognizing their own limitations. They do not blame Speaker A for their own constraints." + }, + "apologies": { + "adhered": true, + "explanation": "Speaker B does not over-apologize or apologize for things outside their control. They clearly communicate their boundaries without unnecessary guilt." + }, + "stick_to_values": { + "adhered": true, + "explanation": "Speaker B demonstrates respect (a key value) by honoring their own boundaries and encouraging Speaker A to do the same. They also show empathy and consideration." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker B is honest about their own limitations and the reasons behind their inability to engage fully at that moment." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "I mean. I mean, I do get having a hard day. Obviously, I have a hard day, but I had one, too. But, like, I really want to talk. I really feel like I need to talk about it. Like, and you give such good advice, and, like, I feel like you just, like, help me see things clearly. So I just, like, I just really want to talk, you know?", + "sentiment": { + "label": "positive", + "explanation": "The message expresses a desire for connection and appreciation ('you give such good advice,' 'help me see things clearly'). While the speaker acknowledges their own struggles, the overall sentiment is positive due to the emphasis on wanting to talk and feeling supported." + }, + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": true, + "explanation": "The speaker expresses feelings ('I really feel like I need to talk', 'I feel like you help me see things clearly')." + }, + "assert": { + "adhered": false, + "explanation": "The speaker does not assert needs using 'I want', 'I need' explicitly, though they imply it." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because' or similar phrases." + }, + "mindful": { + "adhered": false, + "explanation": "The speaker goes off-topic with compliments ('you give such good advice') and tangents ('like, I just really want to talk')." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('like', 'I mean') and lacks directness." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt is made." + } + } + }, + "fast": { + "score": 2, + "breakdown": { + "fair": { + "adhered": false, + "explanation": "Speaker A acknowledges Speaker B's hard day but quickly shifts back to their own needs without fully validating Speaker B's boundaries or perspective." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given, and Speaker A is not apologizing for their own needs." + }, + "stick_to_values": { + "adhered": false, + "explanation": "Speaker A does not fully respect Speaker B's boundaries or values by continuing to push for immediate attention despite Speaker B's clear limits." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker A is truthful about their feelings and their need to talk, though their tone and phrasing lack nuance." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "I know you want to talk. I hear it. I hear how important it is for you, and I just can't do it. You know how it is when we start having these conversations late at night, I then can't go to sleep, and then it just messes up my whole schedule. We can't have the conversation tonight.", + "sentiment": { + "label": "neutral", + "explanation": "The message conveys empathy ('I know you want to talk,' 'I hear it') but also a firm boundary ('I just can't do it'). The sentiment is neutral due to the balanced tone of understanding and limitation." + }, + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": true, + "explanation": "The speaker expresses feelings ('I just can't do it') and acknowledges the importance of the conversation to the other speaker." + }, + "assert": { + "adhered": false, + "explanation": "No clear assertion of needs using 'I want', 'I need', etc." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because' or similar phrases." + }, + "mindful": { + "adhered": false, + "explanation": "The speaker shifts focus to their own schedule and sleep, which is tangential." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses a passive tone ('I just can't do it') and does not assert their boundary confidently." + }, + "negotiate": { + "adhered": false, + "explanation": "No explicit negotiation attempt, though the refusal is stated." + } + } + }, + "fast": { + "score": 4, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker B reaffirms their inability to engage while still acknowledging Speaker A's feelings, maintaining fairness by not dismissing Speaker A's needs entirely." + }, + "apologies": { + "adhered": true, + "explanation": "No over-apologizing or unnecessary guilt is expressed; Speaker B clearly communicates their limits without over-apologizing." + }, + "stick_to_values": { + "adhered": true, + "explanation": "Speaker B upholds their values by respecting their own boundaries and the importance of their schedule, while still showing empathy for Speaker A." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker B is honest about the consequences of their engagement and why they cannot accommodate Speaker A at that moment." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "So, like, you're shutting me down again.", + "sentiment": { + "label": "negative", + "explanation": "The message is accusatory and conveys frustration ('shutting me down again'), indicating a negative sentiment." + }, + "dear_man": { + "score": 0, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description; it is an emotional accusation." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings using 'I feel' or similar phrases." + }, + "assert": { + "adhered": false, + "explanation": "No assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": false, + "explanation": "The message is confrontational and off-topic." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses a hedging word ('like') and an accusatory tone." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + }, + "fast": { + "score": 1, + "breakdown": { + "fair": { + "adhered": false, + "explanation": "The phrasing implies that Speaker B is intentionally rejecting Speaker A, which is unfair and dismissive of Speaker B's valid reasons for setting boundaries." + }, + "apologies": { + "adhered": true, + "explanation": "No apologies are given, and none are needed here, so this adheres to the guideline." + }, + "stick_to_values": { + "adhered": false, + "explanation": "Speaker A does not demonstrate respect or consideration for Speaker B's boundaries, undermining values like mutual respect." + }, + "truthful": { + "adhered": false, + "explanation": "While the statement is factually true, the tone and phrasing are unconstructive and unfair, which undermines authenticity in communication." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Kristen, I'm just saying, tonight I can. Is there something else that we can do? Like, is there another way that we can, like, make this work? Like, is there something that you can think of? You know, if I can't do it tonight, can we maybe do it the next day? I mean, what are your thoughts?", + "sentiment": { + "label": "neutral", + "explanation": "The message is solution-oriented and shows a willingness to accommodate ('tonight I can,' 'can we do it the next day'). The tone is neutral, as it focuses on finding a compromise rather than expressing strong emotions." + }, + "dear_man": { + "score": 3, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No clear assertion of needs using 'I want', 'I need', etc." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request with 'because' or similar phrases." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic and focuses on finding a solution." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('like', 'maybe') and lacks directness." + }, + "negotiate": { + "adhered": true, + "explanation": "The speaker attempts negotiation with 'what if', 'can we', and 'what are your thoughts?'" + } + } + }, + "fast": { + "score": 4, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker B offers an alternative solution and invites collaboration, showing fairness by considering Speaker A's needs while maintaining their own boundaries." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given, and Speaker B remains firm in their boundaries while still being accommodating." + }, + "stick_to_values": { + "adhered": true, + "explanation": "Speaker B demonstrates respect and problem-solving, adhering to values of collaboration and mutual support." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker B is honest about their availability and open about seeking a mutually agreeable solution." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "I mean, I guess tomorrow morning will be okay. I guess we could talk about it in the morning.", + "sentiment": { + "label": "neutral", + "explanation": "The message is resigned and pragmatic ('I guess,' 'will be okay'), indicating a neutral sentiment with a hint of acceptance." + }, + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description of the situation or needs." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No clear assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic and agrees to a solution." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('I guess')." + }, + "negotiate": { + "adhered": false, + "explanation": "No explicit negotiation attempt." + } + } + }, + "fast": { + "score": 3, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker A agrees to a compromise, showing some consideration for Speaker B's boundaries." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given, and Speaker A does not apologize for their own needs." + }, + "stick_to_values": { + "adhered": false, + "explanation": "While Speaker A compromises, the phrasing ('I guess') still lacks enthusiasm or full engagement with the values of mutual respect and collaboration." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker A is truthful about their willingness to accommodate Speaker B's suggestion." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "That would be great.", + "sentiment": { + "label": "positive", + "explanation": "The message is concise and affirmative ('Yeah, I have time,' 'We can do it over breakfast'), with a positive tone of agreement and willingness to accommodate." + }, + "dear_man": { + "score": 0, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description." + }, + "express": { + "adhered": false, + "explanation": "No expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker is vague and lacks directness." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + }, + "fast": { + "score": 4, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker B reassures Speaker A while maintaining their own commitment to being attentive, showing fairness and consideration." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given, and Speaker B remains confident in their ability to meet Speaker A's needs." + }, + "stick_to_values": { + "adhered": true, + "explanation": "Speaker B upholds values of reliability, attentiveness, and mutual support." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker B is honest about their commitment to being more attentive and meeting Speaker A's needs." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "You have time?", + "sentiment": { + "label": "positive", + "explanation": "The message shows understanding ('I get that you had a hard day') and appreciation ('that would mean a lot'). The overall sentiment is positive, reflecting acceptance and gratitude." + }, + "dear_man": { + "score": 1, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker uses hedging ('I mean') and is not assertive." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + }, + "fast": { + "score": 3, + "breakdown": { + "fair": { + "adhered": true, + "explanation": "Speaker A acknowledges Speaker B's boundaries and agrees to the compromise, showing fairness and respect." + }, + "apologies": { + "adhered": true, + "explanation": "No unnecessary apologies are given." + }, + "stick_to_values": { + "adhered": false, + "explanation": "While Speaker A agrees to the plan, the phrase 'Go back to your book' could be perceived as dismissive or sarcastic, undermining values of mutual respect." + }, + "truthful": { + "adhered": true, + "explanation": "Speaker A is truthful in expressing their agreement and respect for Speaker B's decision." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Yeah. Yeah, I have time. Yeah. We can do it over breakfast.", + "sentiment": { + "label": "positive", + "explanation": "The message is reassuring and optimistic ('Perfect,' 'it's going to be better,' 'I'll be more attentive'). The tone is positive, indicating confidence in the upcoming conversation." + }, + "dear_man": { + "score": 2, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No explicit assertion of needs using 'I want', 'I need', etc." + }, + "reinforce": { + "adhered": true, + "explanation": "The speaker reinforces the request with 'because' implied ('it's going to be better' and 'you'll get your needs met')." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic." + }, + "appear_confident": { + "adhered": true, + "explanation": "The speaker appears confident and direct." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + } + }, + { + "speaker": "Speaker A", + "text": "Okay. Okay. I can do that. I can do that. I mean, I get. I get that you had a hard day. Yeah. That would mean a lot if we could talk about it at breakfast.", + "sentiment": { + "label": "positive", + "explanation": "The message conveys acceptance ('I can respect that'), gratitude ('Thank you'), and a sense of resolution ('Now we're good'). The overall sentiment is positive." + }, + "dear_man": { + "score": 0, + "breakdown": { + "describe": { + "adhered": false, + "explanation": "No factual description." + }, + "express": { + "adhered": false, + "explanation": "No explicit expression of feelings." + }, + "assert": { + "adhered": false, + "explanation": "No assertion of needs." + }, + "reinforce": { + "adhered": false, + "explanation": "No reinforcement of a request." + }, + "mindful": { + "adhered": true, + "explanation": "The speaker stays on topic." + }, + "appear_confident": { + "adhered": false, + "explanation": "The speaker is vague and lacks assertiveness." + }, + "negotiate": { + "adhered": false, + "explanation": "No negotiation attempt." + } + } + } + }, + { + "speaker": "Speaker B", + "text": "Perfect. Trust me, it's going to be better. I'll be better able to listen to you. I'll be more attentive. You'll get your. Your needs met if we do it tomorrow." + }, + { + "speaker": "Speaker A", + "text": "Okay. Okay. I can respect that. I can do that. Okay. Thank you. Go back to your book. Now we're good." + } + ] +} \ No newline at end of file diff --git a/backend/clients.py b/backend/clients.py new file mode 100644 index 0000000..11a4693 --- /dev/null +++ b/backend/clients.py @@ -0,0 +1,88 @@ +import os +import json +import requests +import assemblyai as aai +from pathlib import Path +from typing import List, Dict, Any, Union + + +def get_assemblyai_client(): + """ + Initialize and return an AssemblyAI client for speech-to-text transcription. + """ + api_key = os.getenv('ASSEMBLYAI_API_KEY') + if not api_key: + raise ValueError("ASSEMBLYAI_API_KEY environment variable is not set") + + aai.settings.api_key = api_key + return aai.Transcriber() + + +def transcribe_audio(audio_file_path: str) -> Union[List[Dict[str, str]], Any]: + """ + Transcribe an audio file using AssemblyAI, or load from cache if available. + + Args: + audio_file_path: Path to the audio file to transcribe + + Returns: + If cached: List of dictionaries with 'speaker' and 'message' keys + If not cached: AssemblyAI transcript object + """ + # Extract just the filename (without path and extension) + audio_filename = Path(audio_file_path).stem + + # Check for cached transcript in audio_transcripts directory + backend_dir = Path(__file__).parent + transcript_dir = backend_dir / 'audio_transcripts' + cached_file = transcript_dir / f"{audio_filename}.json" + + if cached_file.exists(): + print(f"Loading cached transcript from: {cached_file}") + with open(cached_file, 'r', encoding='utf-8') as f: + cached_data = json.load(f).get("transcript") + # Return as list of dicts if it's already in the right format + if isinstance(cached_data, list) and all( + isinstance(item, dict) and 'speaker' in item and 'message' in item + for item in cached_data + ): + return cached_data + + # If no cache, transcribe using AssemblyAI + print(f"Transcribing audio file with AssemblyAI: {audio_file_path}") + transcriber = get_assemblyai_client() + transcript = transcriber.transcribe(audio_file_path) + + if transcript.error: + raise Exception(f"Transcription error: {transcript.error}") + + return transcript + + +def openrouter_request(model_name="qwen/qwen3-235b-a22b-2507", messages=[]): + """ + Make a request to OpenRouter API for LLM completions. + + Args: + model_name: The model to use (default: google/gemini-2.5-flash-lite) + messages: List of message dictionaries with 'role' and 'content' keys + + Returns: + Response object from requests + """ + openrouter_key = os.getenv('OPENROUTER_API_KEY') + if not openrouter_key: + raise ValueError("OPENROUTER_API_KEY environment variable is not set") + + response = requests.post( + url="https://openrouter.ai/api/v1/chat/completions", + headers={ + "Authorization": f"Bearer {openrouter_key}", + "Content-Type": "application/json", + }, + data=json.dumps({ + "model": model_name, + "messages": messages, + }) + ) + return response diff --git a/backend/data_pipelines/__init__.py b/backend/data_pipelines/__init__.py new file mode 100644 index 0000000..ff0273d --- /dev/null +++ b/backend/data_pipelines/__init__.py @@ -0,0 +1,2 @@ +# Data pipelines package +import backend \ No newline at end of file diff --git a/backend/data_pipelines/ingest_audio_and_do_analysis.py b/backend/data_pipelines/ingest_audio_and_do_analysis.py new file mode 100644 index 0000000..8ae0722 --- /dev/null +++ b/backend/data_pipelines/ingest_audio_and_do_analysis.py @@ -0,0 +1,313 @@ +import json +import os +from pathlib import Path +from typing import Dict, Any, Optional, List, Union + +from backend.clients import transcribe_audio, openrouter_request +from backend.prompts import ( + SENTIMENT_EVALUATION_PROMPT, + DEAR_MAN_EVALUATION_PROMPT, + FAST_EVALUATION_PROMPT, + SENTIMENT_TRANSCRIPT_PROMPT, + DEAR_MAN_TRANSCRIPT_PROMPT, + FAST_TRANSCRIPT_PROMPT, +) +from dotenv import load_dotenv +import re + +load_dotenv() + + +def transform_to_utterance_list(transcript: Union[List[Dict[str, str]], Any]) -> List[Dict[str, str]]: + """ + Transform transcript into a list of dictionaries with 'speaker' and 'message' keys. + + Args: + transcript: Either a list of dicts (cached) or an AssemblyAI transcript object + + Returns: + List of dictionaries with 'speaker' and 'message' keys + """ + # If it's already a list of dicts (cached), return it + if isinstance(transcript, list) and all( + isinstance(item, dict) and 'speaker' in item and 'message' in item + for item in transcript + ): + return transcript + + # Otherwise, it's an AssemblyAI transcript object - extract utterances + utterance_list = [] + # Check if speaker diarization is available + if hasattr(transcript, 'utterances') and transcript.utterances: + # Use utterances with speaker labels + for utterance in transcript.utterances: + speaker = utterance.speaker if hasattr(utterance, 'speaker') else "Unknown" + text = utterance.text if hasattr(utterance, 'text') else "" + utterance_list.append({ + "speaker": f"Speaker {speaker}" if not speaker.startswith("Speaker") else speaker, + "message": text + }) + else: + # Fallback: use the full text without speaker labels + text = transcript.text if hasattr(transcript, 'text') else str(transcript) + utterance_list.append({ + "speaker": "Speaker", + "message": text + }) + + return utterance_list + + +def format_transcript_for_analysis(utterance_list: List[Dict[str, str]]) -> str: + """ + Format a list of utterance dictionaries into a string for analysis. + + Args: + utterance_list: List of dictionaries with 'speaker' and 'message' keys + + Returns: + Formatted transcript string with speaker labels + """ + formatted_lines = [] + + for utterance in utterance_list: + speaker = utterance.get('speaker', 'Unknown') + message = utterance.get('message', '') + formatted_lines.append(f"{speaker}: {message}") + return "\n\n".join(formatted_lines) + + +def analyze_transcript( + transcript: str, + category: str, + model_name: str = "openai/gpt-4o-mini" +) -> Dict[str, Any]: + """ + Analyze an entire transcript for a single category using transcript-level prompts. + Returns the API response containing a "messages" array with per-message results for that category. + """ + if category == 'sentiment': + prompt_template = SENTIMENT_TRANSCRIPT_PROMPT + elif category == 'dear_man': + prompt_template = DEAR_MAN_TRANSCRIPT_PROMPT + elif category == 'fast': + prompt_template = FAST_TRANSCRIPT_PROMPT + else: + raise ValueError(f"Unknown category: {category}. Must be 'sentiment', 'dear_man', or 'fast'") + + prompt = prompt_template.format(transcript=transcript) + messages = [ + {"role": "system", "content": "You are a helpful assistant that categorizes transcript messages based on guidelines. Always respond with valid JSON."}, + {"role": "user", "content": prompt} + ] + response = openrouter_request(model_name=model_name, messages=messages) + + if response.status_code != 200: + raise Exception(f"OpenRouter API error: {response.status_code} - {response.text}") + + response_data = response.json() + analysis_text = response_data.get("choices", [{}])[0].get("message", {}).get("content", "") + + try: + json_match = re.search(r'\{[\s\S]*\}', analysis_text) + if json_match: + analysis = json.loads(json_match.group(0)) + return analysis + except json.JSONDecodeError as e: + print(f"Warning: Could not parse JSON response for {category}: {e}") + return {"raw_response": analysis_text, "error": str(e)} + + return {"raw_response": analysis_text} + + +def merge_category_results_into_messages( + utterance_list: List[Dict[str, str]], + sentiment_result: Dict[str, Any], + dear_man_result: Dict[str, Any], + fast_result: Dict[str, Any], +) -> List[Dict[str, Any]]: + """ + Merge per-category LLM results so each message has sentiment, dear_man, and fast fields. + Matches by index (same order as transcript); fills from utterance_list when category missing. + """ + categories = ["sentiment", "dear_man", "fast"] + results = [sentiment_result, dear_man_result, fast_result] + + def get_messages(data: Dict[str, Any]) -> List[Dict[str, Any]]: + if not data or "messages" not in data: + return [] + return data.get("messages", []) + + # Start with one row per utterance + merged: List[Dict[str, Any]] = [] + for u in utterance_list: + merged.append({ + "speaker": u.get("speaker", "Unknown"), + "text": u.get("message", ""), + }) + + for cat, data in zip(categories, results): + messages = get_messages(data) + if not messages and ("error" in data or "raw_response" in data): + for row in merged: + row[cat] = data + continue + for i, msg in enumerate(messages): + if i >= len(merged): + break + if cat in msg: + merged[i][cat] = msg[cat] + elif "error" in msg or "raw_response" in msg: + merged[i][cat] = msg + else: + merged[i][cat] = msg + + return merged + +def analyze_single_message( + speaker: str, + message: str, + category: str, + model_name: str = "openai/gpt-4o-mini" +) -> Dict[str, Any]: + """ + Analyze a single message for a specific category (sentiment, dear_man, or fast). + + Args: + speaker: The speaker identifier + message: The message text to analyze + category: The category to analyze ('sentiment', 'dear_man', or 'fast') + model_name: OpenRouter model name to use + + Returns: + Dictionary containing the analysis result for the category + """ + # Select the appropriate prompt based on category + if category == 'sentiment': + prompt_template = SENTIMENT_EVALUATION_PROMPT + elif category == 'dear_man': + prompt_template = DEAR_MAN_EVALUATION_PROMPT + elif category == 'fast': + prompt_template = FAST_EVALUATION_PROMPT + else: + raise ValueError(f"Unknown category: {category}. Must be 'sentiment', 'dear_man', or 'fast'") + + # Format the prompt with the message + prompt = prompt_template.format(speaker=speaker, message=message) + + # Call OpenRouter API + messages = [ + {"role": "system", "content": "You are a helpful assistant that categorizes transcript messages based on guidelines. Always respond with valid JSON."}, + { + "role": "user", + "content": prompt + } + ] + + response = openrouter_request(model_name=model_name, messages=messages) + + # Check for errors + if response.status_code != 200: + raise Exception(f"OpenRouter API error: {response.status_code} - {response.text}") + + response_data = response.json() + + # Extract the analysis from the response + analysis_text = response_data.get("choices", [{}])[0].get("message", {}).get("content", "") + + # Parse JSON response + try: + json_match = re.search(r'\{[\s\S]*\}', analysis_text) + if json_match: + analysis = json.loads(json_match.group(0)) + return analysis + except json.JSONDecodeError as e: + # If JSON parsing fails, return the raw text + print(f"Warning: Could not parse JSON response for {category}: {e}") + analysis = {"raw_response": analysis_text, "error": str(e)} + + return analysis + + +def analyze_audio_conversation(audio_file_path: str, model_name: str = "mistralai/ministral-8b-2512") -> Dict[str, Any]: + """ + Analyze an audio file by transcribing it and evaluating sentiment and DEAR MAN + FAST adherence. + Analyzes each message separately for each category. + + Args: + audio_file_path: Path to the audio file to analyze + model_name: OpenRouter model name to use for analysis (default: google/gemini-2.5-flash-lite) + + Returns: + Dictionary containing: + - transcript: The full transcript text + - analysis: The LLM analysis with messages array, each containing sentiment, dear_man, and fast + - raw_transcript: The raw transcript info (id, status, confidence) or None if cached + """ + # Step 1: Transcribe audio (may return cached list or AssemblyAI transcript) + print(f"Processing audio file: {audio_file_path}") + transcript_result = transcribe_audio(audio_file_path) + + # Step 2: Transform to utterance list format + utterance_list = transform_to_utterance_list(transcript_result) + + # Step 3: Format transcript for display + transcript_text = format_transcript_for_analysis(utterance_list) + + # Get full transcript text (for backward compatibility) + full_transcript_text = "\n".join([f"{u['speaker']}: {u['message']}" for u in utterance_list]) + + # Step 4: Analyze entire transcript per category, then merge so each message has sentiment, dear_man, fast + print(f"Analyzing transcript across 3 categories (sentiment, dear_man, fast)...") + categories = ["sentiment", "dear_man", "fast"] + raw_category_results: Dict[str, Dict[str, Any]] = {} + + for category in categories: + try: + print(f" Running {category}...") + category_result = analyze_transcript(transcript_text, category, model_name) + raw_category_results[category] = category_result + except Exception as e: + print(f"Error analyzing {category} for transcript: {e}") + raw_category_results[category] = {"error": str(e)} + + analyzed_messages = merge_category_results_into_messages( + utterance_list, + raw_category_results.get("sentiment", {}), + raw_category_results.get("dear_man", {}), + raw_category_results.get("fast", {}), + ) + + # Step 5: Write LLM outputs to JSON for later use + backend_dir = Path(__file__).resolve().parent.parent + transcript_dir = backend_dir / "audio_transcripts" + transcript_dir.mkdir(parents=True, exist_ok=True) + stem = Path(audio_file_path).stem + analysis_path = transcript_dir / f"{stem}_analysis.json" + output_payload = { + "transcript": full_transcript_text, + "transcript_with_speakers": transcript_text, + "raw_category_results": raw_category_results, + "messages": analyzed_messages, + } + with open(analysis_path, "w", encoding="utf-8") as f: + json.dump(output_payload, f, indent=2, ensure_ascii=False) + print(f"Wrote analysis to {analysis_path}") + + # Determine raw_transcript info + raw_transcript_info = None + if not isinstance(transcript_result, list): + # It was an AssemblyAI transcript object + raw_transcript_info = { + "id": transcript_result.id if hasattr(transcript_result, 'id') else None, + "status": transcript_result.status if hasattr(transcript_result, 'status') else None, + "confidence": transcript_result.confidence if hasattr(transcript_result, 'confidence') else None, + } + return { + "transcript": full_transcript_text, + "transcript_with_speakers": transcript_text, + "analysis": { + "messages": analyzed_messages + }, + "raw_transcript": raw_transcript_info, + } diff --git a/backend/database.py b/backend/database.py new file mode 100644 index 0000000..95dc9af --- /dev/null +++ b/backend/database.py @@ -0,0 +1,337 @@ +import sqlite3 +import os +from typing import Optional, List, Dict, Any, Tuple +from datetime import datetime + + +DB_PATH = os.path.join(os.path.dirname(__file__), 'transcripts.db') + + +def get_db_connection(): + """Get a connection to the SQLite database.""" + conn = sqlite3.connect(DB_PATH) + conn.row_factory = sqlite3.Row + return conn + + +def init_database(): + """Initialize the database and create tables if they don't exist.""" + conn = get_db_connection() + cursor = conn.cursor() + + # Create speakers table + cursor.execute(''' + CREATE TABLE IF NOT EXISTS speakers ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE + ) + ''') + + # Create meeting_transcripts table + cursor.execute(''' + CREATE TABLE IF NOT EXISTS meeting_transcripts ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + date TEXT NOT NULL, + created_at TEXT DEFAULT CURRENT_TIMESTAMP + ) + ''') + + # Create transcript_messages table + cursor.execute(''' + CREATE TABLE IF NOT EXISTS transcript_messages ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + meeting_transcript_id INTEGER NOT NULL, + speaker_id INTEGER NOT NULL, + text TEXT NOT NULL, + created_at TEXT DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (meeting_transcript_id) REFERENCES meeting_transcripts(id), + FOREIGN KEY (speaker_id) REFERENCES speakers(id) + ) + ''') + + # Create transcript_message_tags table + cursor.execute(''' + CREATE TABLE IF NOT EXISTS transcript_message_tags ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + transcript_message_id INTEGER NOT NULL, + category TEXT NOT NULL, + sub_category TEXT, + label TEXT, + score REAL, + FOREIGN KEY (transcript_message_id) REFERENCES transcript_messages(id) + ) + ''') + + # Create indexes for better query performance + cursor.execute(''' + CREATE INDEX IF NOT EXISTS idx_tags_category ON transcript_message_tags(category) + ''') + cursor.execute(''' + CREATE INDEX IF NOT EXISTS idx_tags_sub_category ON transcript_message_tags(sub_category) + ''') + cursor.execute(''' + CREATE INDEX IF NOT EXISTS idx_messages_transcript ON transcript_messages(meeting_transcript_id) + ''') + cursor.execute(''' + CREATE INDEX IF NOT EXISTS idx_messages_speaker ON transcript_messages(speaker_id) + ''') + + conn.commit() + conn.close() + + +def get_or_create_speaker(name: str) -> int: + """Get or create a speaker by name. Returns speaker ID.""" + conn = get_db_connection() + cursor = conn.cursor() + + # Try to get existing speaker + cursor.execute('SELECT id FROM speakers WHERE name = ?', (name,)) + row = cursor.fetchone() + + if row: + speaker_id = row['id'] + else: + # Create new speaker + cursor.execute('INSERT INTO speakers (name) VALUES (?)', (name,)) + speaker_id = cursor.lastrowid + conn.commit() + + conn.close() + return speaker_id + + +def create_meeting_transcript(name: str, date: Optional[str] = None) -> int: + """Create a new meeting transcript. Returns transcript ID.""" + if date is None: + date = datetime.now().isoformat() + + conn = get_db_connection() + cursor = conn.cursor() + + cursor.execute( + 'INSERT INTO meeting_transcripts (name, date) VALUES (?, ?)', + (name, date) + ) + transcript_id = cursor.lastrowid + conn.commit() + conn.close() + + return transcript_id + + +def create_transcript_message( + meeting_transcript_id: int, + speaker_id: int, + text: str +) -> int: + """Create a new transcript message. Returns message ID.""" + conn = get_db_connection() + cursor = conn.cursor() + + cursor.execute( + '''INSERT INTO transcript_messages + (meeting_transcript_id, speaker_id, text) + VALUES (?, ?, ?)''', + (meeting_transcript_id, speaker_id, text) + ) + message_id = cursor.lastrowid + conn.commit() + conn.close() + + return message_id + + +def create_transcript_message_tag( + transcript_message_id: int, + category: str, + sub_category: Optional[str] = None, + label: Optional[str] = None, + score: Optional[float] = None +) -> int: + """Create a tag for a transcript message. Returns tag ID.""" + conn = get_db_connection() + cursor = conn.cursor() + + cursor.execute( + '''INSERT INTO transcript_message_tags + (transcript_message_id, category, sub_category, label, score) + VALUES (?, ?, ?, ?, ?)''', + (transcript_message_id, category, sub_category, label, score) + ) + tag_id = cursor.lastrowid + conn.commit() + conn.close() + + return tag_id + + +def get_all_speakers() -> List[Dict[str, Any]]: + """Get all speakers.""" + conn = get_db_connection() + cursor = conn.cursor() + + cursor.execute('SELECT id, name FROM speakers ORDER BY name') + rows = cursor.fetchall() + + conn.close() + return [{'id': row['id'], 'name': row['name']} for row in rows] + + +def get_all_transcripts() -> List[Dict[str, Any]]: + """Get all meeting transcripts.""" + conn = get_db_connection() + cursor = conn.cursor() + + cursor.execute('SELECT id, name, date FROM meeting_transcripts ORDER BY date DESC') + rows = cursor.fetchall() + + conn.close() + return [ + {'id': row['id'], 'name': row['name'], 'date': row['date']} + for row in rows + ] + + +def get_average_scores( + category: str, + speaker_id: Optional[int] = None, + meeting_transcript_id: Optional[int] = None +) -> Dict[str, float]: + """Get average scores for a category, optionally filtered by speaker or transcript.""" + conn = get_db_connection() + cursor = conn.cursor() + + query = ''' + SELECT + sub_category, + AVG(score) as avg_score + FROM transcript_message_tags tmt + INNER JOIN transcript_messages tm ON tmt.transcript_message_id = tm.id + WHERE tmt.category = ? + ''' + params = [category] + + if speaker_id is not None: + query += ' AND tm.speaker_id = ?' + params.append(speaker_id) + + if meeting_transcript_id is not None: + query += ' AND tm.meeting_transcript_id = ?' + params.append(meeting_transcript_id) + + query += ' AND tmt.score IS NOT NULL GROUP BY sub_category' + + cursor.execute(query, params) + rows = cursor.fetchall() + + conn.close() + return {row['sub_category'] or 'overall': round(row['avg_score'], 2) for row in rows} + + +def get_label_counts( + category: str, + sub_category: Optional[str] = None, + speaker_id: Optional[int] = None, + meeting_transcript_id: Optional[int] = None +) -> Dict[str, int]: + """Get label frequency counts for a category/sub_category.""" + conn = get_db_connection() + cursor = conn.cursor() + + query = ''' + SELECT + tmt.label, + COUNT(*) as count + FROM transcript_message_tags tmt + INNER JOIN transcript_messages tm ON tmt.transcript_message_id = tm.id + WHERE tmt.category = ? AND tmt.label IS NOT NULL + ''' + params = [category] + + if sub_category is not None: + query += ' AND tmt.sub_category = ?' + params.append(sub_category) + + if speaker_id is not None: + query += ' AND tm.speaker_id = ?' + params.append(speaker_id) + + if meeting_transcript_id is not None: + query += ' AND tm.meeting_transcript_id = ?' + params.append(meeting_transcript_id) + + query += ' GROUP BY tmt.label' + + cursor.execute(query, params) + rows = cursor.fetchall() + + conn.close() + return {row['label']: row['count'] for row in rows} + + +def get_pie_chart_data( + category: str, + sub_category: Optional[str] = None, + speaker_id: Optional[int] = None, + meeting_transcript_id: Optional[int] = None +) -> List[Dict[str, Any]]: + """Get data formatted for pie charts.""" + label_counts = get_label_counts( + category, sub_category, speaker_id, meeting_transcript_id + ) + + total = sum(label_counts.values()) + if total == 0: + return [] + + return [ + { + 'label': label, + 'count': count, + 'percentage': round((count / total) * 100, 1) + } + for label, count in label_counts.items() + ] + + +def get_subcategory_adherence_counts( + category: str, + sub_category: str, + speaker_id: Optional[int] = None, + meeting_transcript_id: Optional[int] = None +) -> Dict[str, int]: + """Get adherence counts for a specific sub_category (adhered, did_not_adhere, not_applicable).""" + conn = get_db_connection() + cursor = conn.cursor() + + query = ''' + SELECT + tmt.label, + COUNT(*) as count + FROM transcript_message_tags tmt + INNER JOIN transcript_messages tm ON tmt.transcript_message_id = tm.id + WHERE tmt.category = ? AND tmt.sub_category = ? AND tmt.label IS NOT NULL + ''' + params = [category, sub_category] + + if speaker_id is not None: + query += ' AND tm.speaker_id = ?' + params.append(speaker_id) + + if meeting_transcript_id is not None: + query += ' AND tm.meeting_transcript_id = ?' + params.append(meeting_transcript_id) + + query += ' GROUP BY tmt.label' + + cursor.execute(query, params) + rows = cursor.fetchall() + + conn.close() + return {row['label']: row['count'] for row in rows} + + +# Initialize database on import +init_database() diff --git a/backend/main.py b/backend/main.py new file mode 100644 index 0000000..e96ede8 --- /dev/null +++ b/backend/main.py @@ -0,0 +1,142 @@ +from fastapi import FastAPI, Query +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel +from typing import Optional, List, Dict, Any +import uvicorn +import nltk +from nltk.sentiment.vader import SentimentIntensityAnalyzer +from database import ( + get_all_speakers, + get_all_transcripts, + get_average_scores, + get_pie_chart_data, + get_subcategory_adherence_counts, +) + +# Download VADER lexicon (only needed once) +nltk.download('vader_lexicon', quiet=True) + +app = FastAPI() + +# Add CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["http://localhost:3000", "http://127.0.0.1:3000"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Initialize VADER analyzer +sia = SentimentIntensityAnalyzer() + + +class SentimentRequest(BaseModel): + round: int + text: str + + +class SentimentResponse(BaseModel): + round: int + sentiment_score: float + sentiment_label: str + + +@app.get("/") +def hello_world(): + return {"message": "Hello World"} + + +@app.post("/sentiment", response_model=SentimentResponse) +def analyze_sentiment(request: SentimentRequest): + # Get VADER scores + scores = sia.polarity_scores(request.text) + + # Find which sentiment (neg, neu, pos) has the highest score + sentiment_scores = { + "negative": scores["neg"], + "neutral": scores["neu"], + "positive": scores["pos"], + } + + # Get the label with the highest score + sentiment_label = max(sentiment_scores, key=sentiment_scores.get) + + # Use compound score as the sentiment_score (-1 to 1) + sentiment_score = scores["compound"] + + return SentimentResponse( + round=request.round, + sentiment_score=round(sentiment_score, 2), + sentiment_label=sentiment_label, + ) + + +# Reports API endpoints +@app.get("/api/reports/speakers") +def get_speakers(): + """Get all speakers.""" + return get_all_speakers() + + +@app.get("/api/reports/transcripts") +def get_transcripts(): + """Get all meeting transcripts.""" + return get_all_transcripts() + + +@app.get("/api/reports/categories") +def get_categories(): + """Get available categories.""" + return ["sentiment", "dear_man", "fast"] + + +@app.get("/api/reports/metrics") +def get_metrics( + category: str = Query(default="dear_man", description="Category to get metrics for"), + speaker_id: Optional[int] = Query(default=None, description="Filter by speaker ID"), + meeting_transcript_id: Optional[int] = Query(default=None, description="Filter by transcript ID"), +): + """Get aggregated metrics for a category.""" + averages = get_average_scores(category, speaker_id, meeting_transcript_id) + + # Get label counts for subcategories + label_counts = {} + + if category == "sentiment": + # For sentiment, get label counts (positive, negative, neutral) + counts = get_pie_chart_data(category, None, speaker_id, meeting_transcript_id) + label_counts["sentiment"] = {item["label"]: item["count"] for item in counts} + elif category == "dear_man": + # For DEAR MAN, get adherence counts for each subcategory + subcategories = ["describe", "express", "assert", "reinforce", "mindful", "appear_confident", "negotiate"] + for subcat in subcategories: + counts = get_subcategory_adherence_counts(category, subcat, speaker_id, meeting_transcript_id) + label_counts[subcat] = counts + elif category == "fast": + # For FAST, get adherence counts for each subcategory + subcategories = ["fair", "apologies", "stick_to_values", "truthful"] + for subcat in subcategories: + counts = get_subcategory_adherence_counts(category, subcat, speaker_id, meeting_transcript_id) + label_counts[subcat] = counts + + return { + "averages": {category: averages}, + "label_counts": {category: label_counts} + } + + +@app.get("/api/reports/pie-chart-data") +def get_pie_chart_data_endpoint( + category: str = Query(description="Category to get data for"), + sub_category: Optional[str] = Query(default=None, description="Sub-category filter"), + speaker_id: Optional[int] = Query(default=None, description="Filter by speaker ID"), + meeting_transcript_id: Optional[int] = Query(default=None, description="Filter by transcript ID"), +): + """Get data formatted for pie charts.""" + data = get_pie_chart_data(category, sub_category, speaker_id, meeting_transcript_id) + return {"data": data} + + +if __name__ == "__main__": + uvicorn.run(app, host="0.0.0.0", port=8000) \ No newline at end of file diff --git a/backend/prompts.py b/backend/prompts.py new file mode 100644 index 0000000..9c6c8d4 --- /dev/null +++ b/backend/prompts.py @@ -0,0 +1,179 @@ +SENTIMENT_EVALUATION_PROMPT = """Analyze the sentiment of the message. + +Message to analyze: +Speaker: {speaker} +Text: {message} + +Classify the sentiment of this message as positive, negative, or neutral, and provide a brief explanation. + +Format your response as JSON with the following structure: +{{ + "label": "positive|negative|neutral", + "explanation": "brief explanation of why this sentiment classification was chosen" +}} + +Provide your analysis in valid JSON format only, no additional text before or after.""" + + +DEAR_MAN_EVALUATION_PROMPT = """You are an expert behavioral therapist evaluating a single message from a conversation. Evaluate how well the message adheres to the DEAR MAN skills. + +DEAR MAN Skills: +D - Describe: Did the speaker use factual statements without "I feel"? +E - Express: Did the speaker express feelings using "I feel", "I felt", or emotional words? +A - Assert: Did the speaker assert needs using "I want", "I need", "I'd like"? +R - Reinforce: Did the speaker reinforce their request with "because", "this would", "it helps"? +M - Mindful: Did the speaker stay on topic without tangents? +A - Appear confident: Did the speaker avoid hedging words like "maybe", "just", "sorry"? +N - Negotiate: Did the speaker negotiate using "what if", "would you", "can we"? + +Message to analyze: +Speaker: {speaker} +Text: {message} + +For each skill, determine if the speaker adhered to it (true/false) and provide a brief explanation. Calculate the total score (0-7, one point per skill). + +Format your response as JSON with the following structure: +{{ + "score": 0-7, + "breakdown": {{ + "describe": {{"adhered": true/false, "explanation": "..."}}, + "express": {{"adhered": true/false, "explanation": "..."}}, + "assert": {{"adhered": true/false, "explanation": "..."}}, + "reinforce": {{"adhered": true/false, "explanation": "..."}}, + "mindful": {{"adhered": true/false, "explanation": "..."}}, + "appear_confident": {{"adhered": true/false, "explanation": "..."}}, + "negotiate": {{"adhered": true/false, "explanation": "..."}} + }} +}} + +Provide your analysis in valid JSON format only, no additional text before or after.""" + + +FAST_EVALUATION_PROMPT = """You are an expert behavioral therapist evaluating a single message from a conversation. Evaluate how well the message adheres to the FAST skills. + +FAST Skills: +F - Fair: Was the speaker fair to themselves and others? +A - Apologies: Did the speaker avoid over-apologizing or apologizing for things that aren't their fault? +S - Stick to values: Did the speaker stick to their values and principles? +T - Truthful: Was the speaker truthful and authentic? + +Message to analyze: +Speaker: {speaker} +Text: {message} + +For each skill, determine if the speaker adhered to it (true/false) and provide a brief explanation. Calculate the total score (0-4, one point per skill). + +Format your response as JSON with the following structure: +{{ + "score": 0-4, + "breakdown": {{ + "fair": {{"adhered": true/false, "explanation": "..."}}, + "apologies": {{"adhered": true/false, "explanation": "..."}}, + "stick_to_values": {{"adhered": true/false, "explanation": "..."}}, + "truthful": {{"adhered": true/false, "explanation": "..."}} + }} +}} + +Provide your analysis in valid JSON format only, no additional text before or after.""" + + +# ---- Transcript-level prompts (entire transcript instead of single message) ---- + +SENTIMENT_TRANSCRIPT_PROMPT = """Analyze the sentiment of each message in the conversation transcript. + +Transcript to analyze: +{transcript} + +For each message in the transcript, classify the sentiment as positive, negative, or neutral, and provide a brief explanation. + +Format your response as JSON with the following structure: +{{ + "messages": [ + {{ + "speaker": "Speaker A", + "text": "message text", + "sentiment": {{ + "label": "positive|negative|neutral", + "explanation": "brief explanation" + }} + }} + ] +}} + +Provide your analysis in valid JSON format only, no additional text before or after.""" + + +DEAR_MAN_TRANSCRIPT_PROMPT = """You are an expert behavioral therapist evaluating a conversation transcript. Evaluate how well each message adheres to the DEAR MAN skills. + +DEAR MAN Skills: +D - Describe: Did the speaker use factual statements without "I feel"? +E - Express: Did the speaker express feelings using "I feel", "I felt", or emotional words? +A - Assert: Did the speaker assert needs using "I want", "I need", "I'd like"? +R - Reinforce: Did the speaker reinforce their request with "because", "this would", "it helps"? +M - Mindful: Did the speaker stay on topic without tangents? +A - Appear confident: Did the speaker avoid hedging words like "maybe", "just", "sorry"? +N - Negotiate: Did the speaker negotiate using "what if", "would you", "can we"? + +Transcript to analyze: +{transcript} + +For each message in the transcript, determine adherence for each skill (true/false) with a brief explanation, and calculate the total DEAR MAN score (0-7, one point per skill). + +Format your response as JSON with the following structure: +{{ + "messages": [ + {{ + "speaker": "Speaker A", + "text": "message text", + "dear_man": {{ + "score": 0-7, + "breakdown": {{ + "describe": {{"adhered": true/false, "explanation": "..."}}, + "express": {{"adhered": true/false, "explanation": "..."}}, + "assert": {{"adhered": true/false, "explanation": "..."}}, + "reinforce": {{"adhered": true/false, "explanation": "..."}}, + "mindful": {{"adhered": true/false, "explanation": "..."}}, + "appear_confident": {{"adhered": true/false, "explanation": "..."}}, + "negotiate": {{"adhered": true/false, "explanation": "..."}} + }} + }} + }} + ] +}} + +Provide your analysis in valid JSON format only, no additional text before or after.""" + + +FAST_TRANSCRIPT_PROMPT = """You are an expert behavioral therapist evaluating a conversation transcript. Evaluate how well each message adheres to the FAST skills. + +FAST Skills: +F - Fair: Was the speaker fair to themselves and others? +A - Apologies: Did the speaker avoid over-apologizing or apologizing for things that aren't their fault? +S - Stick to values: Did the speaker stick to their values and principles? +T - Truthful: Was the speaker truthful and authentic? + +Transcript to analyze: +{transcript} + +For each message in the transcript, determine adherence for each skill (true/false) with a brief explanation, and calculate the total FAST score (0-4, one point per skill). + +Format your response as JSON with the following structure: +{{ + "messages": [ + {{ + "speaker": "Speaker A", + "text": "message text", + "fast": {{ + "score": 0-4, + "breakdown": {{ + "fair": {{"adhered": true/false, "explanation": "..."}}, + "apologies": {{"adhered": true/false, "explanation": "..."}}, + "stick_to_values": {{"adhered": true/false, "explanation": "..."}}, + "truthful": {{"adhered": true/false, "explanation": "..."}} + }} + }} + }} + ] +}} + +Provide your analysis in valid JSON format only, no additional text before or after.""" diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 0000000..839fb1d --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,16 @@ +[project] +name = "backend" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "fastapi>=0.128.0", + "textblob>=0.19.0", + "uvicorn>=0.40.0", + "vadersentiment>=3.3.2", + "requests>=2.31.0", + "assemblyai>=0.28.0", + "youtube-to-audio>=0.1.7", + "python-dotenv>=1.2.1", +] diff --git a/backend/scripts/__init__.py b/backend/scripts/__init__.py new file mode 100644 index 0000000..7994ee4 --- /dev/null +++ b/backend/scripts/__init__.py @@ -0,0 +1 @@ +# Scripts package diff --git a/backend/scripts/process_audio.py b/backend/scripts/process_audio.py new file mode 100755 index 0000000..ba647fe --- /dev/null +++ b/backend/scripts/process_audio.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python3 +""" +Script to process audio files and store analysis results in the database. + +Usage: + python -m backend.scripts.process_audio [--meeting-name NAME] [--meeting-date DATE] +""" + +import sys +import os +import argparse +from datetime import datetime +from pathlib import Path + +# Add parent directory to path to import backend modules +backend_dir = Path(__file__).parent.parent +sys.path.insert(0, str(backend_dir.parent)) + +from backend.data_pipelines.ingest_audio_and_do_analysis import analyze_audio_conversation +from backend.database import ( + get_or_create_speaker, + create_meeting_transcript, + create_transcript_message, + create_transcript_message_tag, + init_database +) + + +def process_audio_file( + audio_file_path: str, + meeting_name: str = None, + meeting_date: str = None +): + """ + Process an audio file: transcribe, analyze, and store results in database. + + Args: + audio_file_path: Path to the audio file + meeting_name: Optional name for the meeting transcript (defaults to filename) + meeting_date: Optional date for the meeting (defaults to current date) + """ + # Validate audio file exists + if not os.path.exists(audio_file_path): + raise FileNotFoundError(f"Audio file not found: {audio_file_path}") + + # Analyze audio + print(f"Processing audio file: {audio_file_path}") + result = analyze_audio_conversation(audio_file_path) + + analysis = result.get('analysis', {}) + messages = analysis.get('messages', []) + + if not messages: + print("Warning: No messages found in analysis. Analysis structure:") + print(analysis) + return + + # Create meeting transcript + if meeting_name is None: + meeting_name = os.path.basename(audio_file_path) + + transcript_id = create_meeting_transcript(meeting_name, meeting_date) + print(f"Created meeting transcript: {meeting_name} (ID: {transcript_id})") + + # Process each message + for msg_idx, message in enumerate(messages): + speaker_name = message.get('speaker', 'Unknown') + text = message.get('text', '') + + # Get or create speaker + speaker_id = get_or_create_speaker(speaker_name) + + # Create transcript message + message_id = create_transcript_message(transcript_id, speaker_id, text) + + # Process sentiment + sentiment = message.get('sentiment', {}) + if sentiment: + sentiment_label = sentiment.get('label') + if sentiment_label: + create_transcript_message_tag( + message_id, + category='sentiment', + sub_category=None, + label=sentiment_label, + score=None + ) + + # Process DEAR MAN + dear_man = message.get('dear_man', {}) + if dear_man: + # Store overall score + dear_man_score = dear_man.get('score') + if dear_man_score is not None: + create_transcript_message_tag( + message_id, + category='dear_man', + sub_category='overall', + label=None, + score=float(dear_man_score) + ) + + # Store breakdown for each skill + breakdown = dear_man.get('breakdown', {}) + dear_man_skills = [ + 'describe', 'express', 'assert', 'reinforce', + 'mindful', 'appear_confident', 'negotiate' + ] + + for skill in dear_man_skills: + skill_data = breakdown.get(skill, {}) + if skill_data: + adhered = skill_data.get('adhered', False) + # Store as label: 'adhered' or 'did_not_adhere' + label = 'adhered' if adhered else 'did_not_adhere' + score = 1.0 if adhered else 0.0 + + create_transcript_message_tag( + message_id, + category='dear_man', + sub_category=skill, + label=label, + score=score + ) + + # Process FAST + fast = message.get('fast', {}) + if fast: + # Store overall score + fast_score = fast.get('score') + if fast_score is not None: + create_transcript_message_tag( + message_id, + category='fast', + sub_category='overall', + label=None, + score=float(fast_score) + ) + + # Store breakdown for each skill + breakdown = fast.get('breakdown', {}) + fast_skills = ['fair', 'apologies', 'stick_to_values', 'truthful'] + + for skill in fast_skills: + skill_data = breakdown.get(skill, {}) + if skill_data: + adhered = skill_data.get('adhered', False) + # Store as label: 'adhered' or 'did_not_adhere' + label = 'adhered' if adhered else 'did_not_adhere' + score = 1.0 if adhered else 0.0 + + create_transcript_message_tag( + message_id, + category='fast', + sub_category=skill, + label=label, + score=score + ) + + print(f"Processed message {msg_idx + 1}/{len(messages)} from {speaker_name}") + + print(f"Successfully processed {len(messages)} messages and stored in database.") + + +def main(): + parser = argparse.ArgumentParser( + description='Process audio file and store analysis in database' + ) + parser.add_argument( + '--audio_file', + type=str, + help='Path to the audio file to process' + ) + parser.add_argument( + '--meeting-name', + type=str, + default=None, + help='Name for the meeting transcript (defaults to filename)' + ) + parser.add_argument( + '--meeting-date', + type=str, + default=None, + help='Date for the meeting in ISO format (defaults to current date)' + ) + + args = parser.parse_args() + + try: + process_audio_file( + args.audio_file, + meeting_name=args.meeting_name, + meeting_date=args.meeting_date + ) + except Exception as e: + print(f"Error processing audio file: {e}", file=sys.stderr) + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/backend/test_sentiment.py b/backend/test_sentiment.py new file mode 100644 index 0000000..9445834 --- /dev/null +++ b/backend/test_sentiment.py @@ -0,0 +1,74 @@ +""" +Test script for the sentiment analysis API at localhost:8000 + +Run the server first: python main.py +Then run this test: python test_sentiment.py +""" + +import requests + +BASE_URL = "http://localhost:8000" + + +def test_sentiment(round_num: int, text: str): + """Call the /sentiment endpoint and print results.""" + response = requests.post( + f"{BASE_URL}/sentiment", + json={"round": round_num, "text": text}, + ) + + if response.status_code == 200: + result = response.json() + print(f"Round {result['round']}:") + print(f" Text: \"{text[:50]}{'...' if len(text) > 50 else ''}\"") + print(f" Score: {result['sentiment_score']}") + print(f" Label: {result['sentiment_label']}") + print() + return result + else: + print(f"Error: {response.status_code}") + print(response.text) + return None + + +def main(): + print("=" * 60) + print("Sentiment Analysis API Test") + print("=" * 60) + print() + + # Test cases with different sentiments + test_cases = [ + (1, "I love this! It's absolutely amazing and wonderful."), + (2, "This is terrible. I'm so disappointed and frustrated."), + (3, "The meeting is scheduled for 3pm tomorrow."), + (4, "I appreciate your help, but I'm a bit concerned about the timeline."), + (5, "Thank you so much for your kindness and support!"), + (6, "I hate waiting. This is the worst experience ever."), + (7, "The weather today is cloudy with a chance of rain."), + ] + + print("Testing various sentiment samples:\n") + + results = [] + for round_num, text in test_cases: + result = test_sentiment(round_num, text) + if result: + results.append(result) + + # Summary + print("=" * 60) + print("Summary") + print("=" * 60) + positive = sum(1 for r in results if r["sentiment_label"] == "positive") + negative = sum(1 for r in results if r["sentiment_label"] == "negative") + neutral = sum(1 for r in results if r["sentiment_label"] == "neutral") + + print(f"Total tests: {len(results)}") + print(f" Positive: {positive}") + print(f" Negative: {negative}") + print(f" Neutral: {neutral}") + + +if __name__ == "__main__": + main() diff --git a/backend/tests/body.json b/backend/tests/body.json new file mode 100644 index 0000000..4ac5b4a --- /dev/null +++ b/backend/tests/body.json @@ -0,0 +1,4 @@ +{ + "round": 1, + "text": "I would like you to text me if you’re going to be late." +} \ No newline at end of file diff --git a/backend/tests/test_sentiment.py b/backend/tests/test_sentiment.py new file mode 100644 index 0000000..9445834 --- /dev/null +++ b/backend/tests/test_sentiment.py @@ -0,0 +1,74 @@ +""" +Test script for the sentiment analysis API at localhost:8000 + +Run the server first: python main.py +Then run this test: python test_sentiment.py +""" + +import requests + +BASE_URL = "http://localhost:8000" + + +def test_sentiment(round_num: int, text: str): + """Call the /sentiment endpoint and print results.""" + response = requests.post( + f"{BASE_URL}/sentiment", + json={"round": round_num, "text": text}, + ) + + if response.status_code == 200: + result = response.json() + print(f"Round {result['round']}:") + print(f" Text: \"{text[:50]}{'...' if len(text) > 50 else ''}\"") + print(f" Score: {result['sentiment_score']}") + print(f" Label: {result['sentiment_label']}") + print() + return result + else: + print(f"Error: {response.status_code}") + print(response.text) + return None + + +def main(): + print("=" * 60) + print("Sentiment Analysis API Test") + print("=" * 60) + print() + + # Test cases with different sentiments + test_cases = [ + (1, "I love this! It's absolutely amazing and wonderful."), + (2, "This is terrible. I'm so disappointed and frustrated."), + (3, "The meeting is scheduled for 3pm tomorrow."), + (4, "I appreciate your help, but I'm a bit concerned about the timeline."), + (5, "Thank you so much for your kindness and support!"), + (6, "I hate waiting. This is the worst experience ever."), + (7, "The weather today is cloudy with a chance of rain."), + ] + + print("Testing various sentiment samples:\n") + + results = [] + for round_num, text in test_cases: + result = test_sentiment(round_num, text) + if result: + results.append(result) + + # Summary + print("=" * 60) + print("Summary") + print("=" * 60) + positive = sum(1 for r in results if r["sentiment_label"] == "positive") + negative = sum(1 for r in results if r["sentiment_label"] == "negative") + neutral = sum(1 for r in results if r["sentiment_label"] == "neutral") + + print(f"Total tests: {len(results)}") + print(f" Positive: {positive}") + print(f" Negative: {negative}") + print(f" Neutral: {neutral}") + + +if __name__ == "__main__": + main() diff --git a/backend/transcripts.db b/backend/transcripts.db new file mode 100644 index 0000000..04f00b9 Binary files /dev/null and b/backend/transcripts.db differ diff --git a/backend/understanding_app.md b/backend/understanding_app.md new file mode 100644 index 0000000..fc1d9ae --- /dev/null +++ b/backend/understanding_app.md @@ -0,0 +1,27 @@ +┌─────────────────────────────────────────────────────────────┐ +│ LIVE SESSION │ +│ User speaks/types → Round tracked → Volume monitored │ +│ ↓ │ +│ useAnalyticsStore (Zustand) │ +│ - startRound(), endRound() │ +│ - flagVolumeAlert() │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ SESSION ENDS │ +│ endSession() → SessionAnalytics │ +│ ↓ │ +│ getSessionSummary() → SessionSummaryExport │ +└─────────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────────┐ +│ DISPLAY │ +│ SessionAnalyticsSummary ← receives SessionSummaryExport │ +│ - Shows stats grid (rounds, time, input type, alerts) │ +│ - Expandable round-by-round breakdown │ +│ - Export to JSON/CSV/Email │ +└─────────────────────────────────────────────────────────────┘ + + +Example Curls: +curl -H "Content-Type: application/json" --data @backend/tests/body.json http://localhost:8000/sentiment \ No newline at end of file diff --git a/backend/uv.lock b/backend/uv.lock new file mode 100644 index 0000000..f69c980 --- /dev/null +++ b/backend/uv.lock @@ -0,0 +1,624 @@ +version = 1 +revision = 2 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version >= '3.14'", + "python_full_version < '3.14'", +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, +] + +[[package]] +name = "assemblyai" +version = "0.48.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, + { name = "pydantic-settings", marker = "python_full_version >= '3.14'" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/19/57ee0680d8afaa1d354833a35b8ff4ab4cd946734fa3876bedae8cc6ab08/assemblyai-0.48.4.tar.gz", hash = "sha256:a7ec007b6d3079d38a4a5c39ea7c733067785e1a30151a6f01aef7728c1bb659", size = 56367, upload-time = "2025-12-17T22:37:42.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/e3/a21e7fe81e056930ca0b6fd9ddfd1713322560762f8662605e493619da49/assemblyai-0.48.4-py3-none-any.whl", hash = "sha256:cf56b71f205e341b650cb2668e382181c01082b95873bea4dc7450fe1647083d", size = 52376, upload-time = "2025-12-17T22:37:40.97Z" }, +] + +[[package]] +name = "backend" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "assemblyai" }, + { name = "fastapi" }, + { name = "python-dotenv" }, + { name = "requests" }, + { name = "textblob" }, + { name = "uvicorn" }, + { name = "vadersentiment" }, + { name = "youtube-to-audio" }, +] + +[package.metadata] +requires-dist = [ + { name = "assemblyai", specifier = ">=0.28.0" }, + { name = "fastapi", specifier = ">=0.128.0" }, + { name = "python-dotenv", specifier = ">=1.2.1" }, + { name = "requests", specifier = ">=2.31.0" }, + { name = "textblob", specifier = ">=0.19.0" }, + { name = "uvicorn", specifier = ">=0.40.0" }, + { name = "vadersentiment", specifier = ">=3.3.2" }, + { name = "youtube-to-audio", specifier = ">=0.1.7" }, +] + +[[package]] +name = "certifi" +version = "2026.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "fastapi" +version = "0.128.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "joblib" +version = "1.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/f2/d34e8b3a08a9cc79a50b2208a93dce981fe615b64d5a4d4abee421d898df/joblib-1.5.3.tar.gz", hash = "sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3", size = 331603, upload-time = "2025-12-15T08:41:46.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/91/984aca2ec129e2757d1e4e3c81c3fcda9d0f85b74670a094cc443d9ee949/joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713", size = 309071, upload-time = "2025-12-15T08:41:44.973Z" }, +] + +[[package]] +name = "nltk" +version = "3.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "joblib" }, + { name = "regex" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629, upload-time = "2025-10-01T07:19:23.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404, upload-time = "2025-10-01T07:19:21.648Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic", marker = "python_full_version >= '3.14'" }, + { name = "python-dotenv", marker = "python_full_version >= '3.14'" }, + { name = "typing-inspection", marker = "python_full_version >= '3.14'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, +] + +[[package]] +name = "regex" +version = "2026.1.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" }, + { url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" }, + { url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e4/1fc4599450c9f0863d9406e944592d968b8d6dfd0d552a7d569e43bceada/regex-2026.1.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa", size = 798656, upload-time = "2026-01-14T23:14:48.77Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e6/59650d73a73fa8a60b3a590545bfcf1172b4384a7df2e7fe7b9aab4e2da9/regex-2026.1.15-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804", size = 864252, upload-time = "2026-01-14T23:14:50.528Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ab/1d0f4d50a1638849a97d731364c9a80fa304fec46325e48330c170ee8e80/regex-2026.1.15-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c", size = 912268, upload-time = "2026-01-14T23:14:52.952Z" }, + { url = "https://files.pythonhosted.org/packages/dd/df/0d722c030c82faa1d331d1921ee268a4e8fb55ca8b9042c9341c352f17fa/regex-2026.1.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5", size = 803589, upload-time = "2026-01-14T23:14:55.182Z" }, + { url = "https://files.pythonhosted.org/packages/66/23/33289beba7ccb8b805c6610a8913d0131f834928afc555b241caabd422a9/regex-2026.1.15-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3", size = 775700, upload-time = "2026-01-14T23:14:56.707Z" }, + { url = "https://files.pythonhosted.org/packages/e7/65/bf3a42fa6897a0d3afa81acb25c42f4b71c274f698ceabd75523259f6688/regex-2026.1.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb", size = 787928, upload-time = "2026-01-14T23:14:58.312Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f5/13bf65864fc314f68cdd6d8ca94adcab064d4d39dbd0b10fef29a9da48fc/regex-2026.1.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410", size = 858607, upload-time = "2026-01-14T23:15:00.657Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/040e589834d7a439ee43fb0e1e902bc81bd58a5ba81acffe586bb3321d35/regex-2026.1.15-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4", size = 763729, upload-time = "2026-01-14T23:15:02.248Z" }, + { url = "https://files.pythonhosted.org/packages/9b/84/6921e8129687a427edf25a34a5594b588b6d88f491320b9de5b6339a4fcb/regex-2026.1.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d", size = 850697, upload-time = "2026-01-14T23:15:03.878Z" }, + { url = "https://files.pythonhosted.org/packages/8a/87/3d06143d4b128f4229158f2de5de6c8f2485170c7221e61bf381313314b2/regex-2026.1.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22", size = 789849, upload-time = "2026-01-14T23:15:06.102Z" }, + { url = "https://files.pythonhosted.org/packages/77/69/c50a63842b6bd48850ebc7ab22d46e7a2a32d824ad6c605b218441814639/regex-2026.1.15-cp312-cp312-win32.whl", hash = "sha256:82345326b1d8d56afbe41d881fdf62f1926d7264b2fc1537f99ae5da9aad7913", size = 266279, upload-time = "2026-01-14T23:15:07.678Z" }, + { url = "https://files.pythonhosted.org/packages/f2/36/39d0b29d087e2b11fd8191e15e81cce1b635fcc845297c67f11d0d19274d/regex-2026.1.15-cp312-cp312-win_amd64.whl", hash = "sha256:4def140aa6156bc64ee9912383d4038f3fdd18fee03a6f222abd4de6357ce42a", size = 277166, upload-time = "2026-01-14T23:15:09.257Z" }, + { url = "https://files.pythonhosted.org/packages/28/32/5b8e476a12262748851fa8ab1b0be540360692325975b094e594dfebbb52/regex-2026.1.15-cp312-cp312-win_arm64.whl", hash = "sha256:c6c565d9a6e1a8d783c1948937ffc377dd5771e83bd56de8317c450a954d2056", size = 270415, upload-time = "2026-01-14T23:15:10.743Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2e/6870bb16e982669b674cce3ee9ff2d1d46ab80528ee6bcc20fb2292efb60/regex-2026.1.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e69d0deeb977ffe7ed3d2e4439360089f9c3f217ada608f0f88ebd67afb6385e", size = 489164, upload-time = "2026-01-14T23:15:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/9774542e203849b0286badf67199970a44ebdb0cc5fb739f06e47ada72f8/regex-2026.1.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3601ffb5375de85a16f407854d11cca8fe3f5febbe3ac78fb2866bb220c74d10", size = 291218, upload-time = "2026-01-14T23:15:15.647Z" }, + { url = "https://files.pythonhosted.org/packages/b2/87/b0cda79f22b8dee05f774922a214da109f9a4c0eca5da2c9d72d77ea062c/regex-2026.1.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4c5ef43b5c2d4114eb8ea424bb8c9cec01d5d17f242af88b2448f5ee81caadbc", size = 288895, upload-time = "2026-01-14T23:15:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/3b/6a/0041f0a2170d32be01ab981d6346c83a8934277d82c780d60b127331f264/regex-2026.1.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:968c14d4f03e10b2fd960f1d5168c1f0ac969381d3c1fcc973bc45fb06346599", size = 798680, upload-time = "2026-01-14T23:15:19.342Z" }, + { url = "https://files.pythonhosted.org/packages/58/de/30e1cfcdbe3e891324aa7568b7c968771f82190df5524fabc1138cb2d45a/regex-2026.1.15-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56a5595d0f892f214609c9f76b41b7428bed439d98dc961efafdd1354d42baae", size = 864210, upload-time = "2026-01-14T23:15:22.005Z" }, + { url = "https://files.pythonhosted.org/packages/64/44/4db2f5c5ca0ccd40ff052ae7b1e9731352fcdad946c2b812285a7505ca75/regex-2026.1.15-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf650f26087363434c4e560011f8e4e738f6f3e029b85d4904c50135b86cfa5", size = 912358, upload-time = "2026-01-14T23:15:24.569Z" }, + { url = "https://files.pythonhosted.org/packages/79/b6/e6a5665d43a7c42467138c8a2549be432bad22cbd206f5ec87162de74bd7/regex-2026.1.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18388a62989c72ac24de75f1449d0fb0b04dfccd0a1a7c1c43af5eb503d890f6", size = 803583, upload-time = "2026-01-14T23:15:26.526Z" }, + { url = "https://files.pythonhosted.org/packages/e7/53/7cd478222169d85d74d7437e74750005e993f52f335f7c04ff7adfda3310/regex-2026.1.15-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d220a2517f5893f55daac983bfa9fe998a7dbcaee4f5d27a88500f8b7873788", size = 775782, upload-time = "2026-01-14T23:15:29.352Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b5/75f9a9ee4b03a7c009fe60500fe550b45df94f0955ca29af16333ef557c5/regex-2026.1.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9c08c2fbc6120e70abff5d7f28ffb4d969e14294fb2143b4b5c7d20e46d1714", size = 787978, upload-time = "2026-01-14T23:15:31.295Z" }, + { url = "https://files.pythonhosted.org/packages/72/b3/79821c826245bbe9ccbb54f6eadb7879c722fd3e0248c17bfc90bf54e123/regex-2026.1.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7ef7d5d4bd49ec7364315167a4134a015f61e8266c6d446fc116a9ac4456e10d", size = 858550, upload-time = "2026-01-14T23:15:33.558Z" }, + { url = "https://files.pythonhosted.org/packages/4a/85/2ab5f77a1c465745bfbfcb3ad63178a58337ae8d5274315e2cc623a822fa/regex-2026.1.15-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e42844ad64194fa08d5ccb75fe6a459b9b08e6d7296bd704460168d58a388f3", size = 763747, upload-time = "2026-01-14T23:15:35.206Z" }, + { url = "https://files.pythonhosted.org/packages/6d/84/c27df502d4bfe2873a3e3a7cf1bdb2b9cc10284d1a44797cf38bed790470/regex-2026.1.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cfecdaa4b19f9ca534746eb3b55a5195d5c95b88cac32a205e981ec0a22b7d31", size = 850615, upload-time = "2026-01-14T23:15:37.523Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b7/658a9782fb253680aa8ecb5ccbb51f69e088ed48142c46d9f0c99b46c575/regex-2026.1.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:08df9722d9b87834a3d701f3fca570b2be115654dbfd30179f30ab2f39d606d3", size = 789951, upload-time = "2026-01-14T23:15:39.582Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2a/5928af114441e059f15b2f63e188bd00c6529b3051c974ade7444b85fcda/regex-2026.1.15-cp313-cp313-win32.whl", hash = "sha256:d426616dae0967ca225ab12c22274eb816558f2f99ccb4a1d52ca92e8baf180f", size = 266275, upload-time = "2026-01-14T23:15:42.108Z" }, + { url = "https://files.pythonhosted.org/packages/4f/16/5bfbb89e435897bff28cf0352a992ca719d9e55ebf8b629203c96b6ce4f7/regex-2026.1.15-cp313-cp313-win_amd64.whl", hash = "sha256:febd38857b09867d3ed3f4f1af7d241c5c50362e25ef43034995b77a50df494e", size = 277145, upload-time = "2026-01-14T23:15:44.244Z" }, + { url = "https://files.pythonhosted.org/packages/56/c1/a09ff7392ef4233296e821aec5f78c51be5e91ffde0d163059e50fd75835/regex-2026.1.15-cp313-cp313-win_arm64.whl", hash = "sha256:8e32f7896f83774f91499d239e24cebfadbc07639c1494bb7213983842348337", size = 270411, upload-time = "2026-01-14T23:15:45.858Z" }, + { url = "https://files.pythonhosted.org/packages/3c/38/0cfd5a78e5c6db00e6782fdae70458f89850ce95baa5e8694ab91d89744f/regex-2026.1.15-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ec94c04149b6a7b8120f9f44565722c7ae31b7a6d2275569d2eefa76b83da3be", size = 492068, upload-time = "2026-01-14T23:15:47.616Z" }, + { url = "https://files.pythonhosted.org/packages/50/72/6c86acff16cb7c959c4355826bbf06aad670682d07c8f3998d9ef4fee7cd/regex-2026.1.15-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40c86d8046915bb9aeb15d3f3f15b6fd500b8ea4485b30e1bbc799dab3fe29f8", size = 292756, upload-time = "2026-01-14T23:15:49.307Z" }, + { url = "https://files.pythonhosted.org/packages/4e/58/df7fb69eadfe76526ddfce28abdc0af09ffe65f20c2c90932e89d705153f/regex-2026.1.15-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:726ea4e727aba21643205edad8f2187ec682d3305d790f73b7a51c7587b64bdd", size = 291114, upload-time = "2026-01-14T23:15:51.484Z" }, + { url = "https://files.pythonhosted.org/packages/ed/6c/a4011cd1cf96b90d2cdc7e156f91efbd26531e822a7fbb82a43c1016678e/regex-2026.1.15-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb740d044aff31898804e7bf1181cc72c03d11dfd19932b9911ffc19a79070a", size = 807524, upload-time = "2026-01-14T23:15:53.102Z" }, + { url = "https://files.pythonhosted.org/packages/1d/25/a53ffb73183f69c3e9f4355c4922b76d2840aee160af6af5fac229b6201d/regex-2026.1.15-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05d75a668e9ea16f832390d22131fe1e8acc8389a694c8febc3e340b0f810b93", size = 873455, upload-time = "2026-01-14T23:15:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/66/0b/8b47fc2e8f97d9b4a851736f3890a5f786443aa8901061c55f24c955f45b/regex-2026.1.15-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d991483606f3dbec93287b9f35596f41aa2e92b7c2ebbb935b63f409e243c9af", size = 915007, upload-time = "2026-01-14T23:15:57.041Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fa/97de0d681e6d26fabe71968dbee06dd52819e9a22fdce5dac7256c31ed84/regex-2026.1.15-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:194312a14819d3e44628a44ed6fea6898fdbecb0550089d84c403475138d0a09", size = 812794, upload-time = "2026-01-14T23:15:58.916Z" }, + { url = "https://files.pythonhosted.org/packages/22/38/e752f94e860d429654aa2b1c51880bff8dfe8f084268258adf9151cf1f53/regex-2026.1.15-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe2fda4110a3d0bc163c2e0664be44657431440722c5c5315c65155cab92f9e5", size = 781159, upload-time = "2026-01-14T23:16:00.817Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a7/d739ffaef33c378fc888302a018d7f81080393d96c476b058b8c64fd2b0d/regex-2026.1.15-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:124dc36c85d34ef2d9164da41a53c1c8c122cfb1f6e1ec377a1f27ee81deb794", size = 795558, upload-time = "2026-01-14T23:16:03.267Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c4/542876f9a0ac576100fc73e9c75b779f5c31e3527576cfc9cb3009dcc58a/regex-2026.1.15-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1774cd1981cd212506a23a14dba7fdeaee259f5deba2df6229966d9911e767a", size = 868427, upload-time = "2026-01-14T23:16:05.646Z" }, + { url = "https://files.pythonhosted.org/packages/fc/0f/d5655bea5b22069e32ae85a947aa564912f23758e112cdb74212848a1a1b/regex-2026.1.15-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b5f7d8d2867152cdb625e72a530d2ccb48a3d199159144cbdd63870882fb6f80", size = 769939, upload-time = "2026-01-14T23:16:07.542Z" }, + { url = "https://files.pythonhosted.org/packages/20/06/7e18a4fa9d326daeda46d471a44ef94201c46eaa26dbbb780b5d92cbfdda/regex-2026.1.15-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:492534a0ab925d1db998defc3c302dae3616a2fc3fe2e08db1472348f096ddf2", size = 854753, upload-time = "2026-01-14T23:16:10.395Z" }, + { url = "https://files.pythonhosted.org/packages/3b/67/dc8946ef3965e166f558ef3b47f492bc364e96a265eb4a2bb3ca765c8e46/regex-2026.1.15-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c661fc820cfb33e166bf2450d3dadbda47c8d8981898adb9b6fe24e5e582ba60", size = 799559, upload-time = "2026-01-14T23:16:12.347Z" }, + { url = "https://files.pythonhosted.org/packages/a5/61/1bba81ff6d50c86c65d9fd84ce9699dd106438ee4cdb105bf60374ee8412/regex-2026.1.15-cp313-cp313t-win32.whl", hash = "sha256:99ad739c3686085e614bf77a508e26954ff1b8f14da0e3765ff7abbf7799f952", size = 268879, upload-time = "2026-01-14T23:16:14.049Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/cef7d4c5fb0ea3ac5c775fd37db5747f7378b29526cc83f572198924ff47/regex-2026.1.15-cp313-cp313t-win_amd64.whl", hash = "sha256:32655d17905e7ff8ba5c764c43cb124e34a9245e45b83c22e81041e1071aee10", size = 280317, upload-time = "2026-01-14T23:16:15.718Z" }, + { url = "https://files.pythonhosted.org/packages/b4/52/4317f7a5988544e34ab57b4bde0f04944c4786128c933fb09825924d3e82/regex-2026.1.15-cp313-cp313t-win_arm64.whl", hash = "sha256:b2a13dd6a95e95a489ca242319d18fc02e07ceb28fa9ad146385194d95b3c829", size = 271551, upload-time = "2026-01-14T23:16:17.533Z" }, + { url = "https://files.pythonhosted.org/packages/52/0a/47fa888ec7cbbc7d62c5f2a6a888878e76169170ead271a35239edd8f0e8/regex-2026.1.15-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:d920392a6b1f353f4aa54328c867fec3320fa50657e25f64abf17af054fc97ac", size = 489170, upload-time = "2026-01-14T23:16:19.835Z" }, + { url = "https://files.pythonhosted.org/packages/ac/c4/d000e9b7296c15737c9301708e9e7fbdea009f8e93541b6b43bdb8219646/regex-2026.1.15-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b5a28980a926fa810dbbed059547b02783952e2efd9c636412345232ddb87ff6", size = 291146, upload-time = "2026-01-14T23:16:21.541Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b6/921cc61982e538682bdf3bdf5b2c6ab6b34368da1f8e98a6c1ddc503c9cf/regex-2026.1.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:621f73a07595d83f28952d7bd1e91e9d1ed7625fb7af0064d3516674ec93a2a2", size = 288986, upload-time = "2026-01-14T23:16:23.381Z" }, + { url = "https://files.pythonhosted.org/packages/ca/33/eb7383dde0bbc93f4fb9d03453aab97e18ad4024ac7e26cef8d1f0a2cff0/regex-2026.1.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d7d92495f47567a9b1669c51fc8d6d809821849063d168121ef801bbc213846", size = 799098, upload-time = "2026-01-14T23:16:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/27/56/b664dccae898fc8d8b4c23accd853f723bde0f026c747b6f6262b688029c/regex-2026.1.15-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8dd16fba2758db7a3780a051f245539c4451ca20910f5a5e6ea1c08d06d4a76b", size = 864980, upload-time = "2026-01-14T23:16:27.297Z" }, + { url = "https://files.pythonhosted.org/packages/16/40/0999e064a170eddd237bae9ccfcd8f28b3aa98a38bf727a086425542a4fc/regex-2026.1.15-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1e1808471fbe44c1a63e5f577a1d5f02fe5d66031dcbdf12f093ffc1305a858e", size = 911607, upload-time = "2026-01-14T23:16:29.235Z" }, + { url = "https://files.pythonhosted.org/packages/07/78/c77f644b68ab054e5a674fb4da40ff7bffb2c88df58afa82dbf86573092d/regex-2026.1.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0751a26ad39d4f2ade8fe16c59b2bf5cb19eb3d2cd543e709e583d559bd9efde", size = 803358, upload-time = "2026-01-14T23:16:31.369Z" }, + { url = "https://files.pythonhosted.org/packages/27/31/d4292ea8566eaa551fafc07797961c5963cf5235c797cc2ae19b85dfd04d/regex-2026.1.15-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0f0c7684c7f9ca241344ff95a1de964f257a5251968484270e91c25a755532c5", size = 775833, upload-time = "2026-01-14T23:16:33.141Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b2/cff3bf2fea4133aa6fb0d1e370b37544d18c8350a2fa118c7e11d1db0e14/regex-2026.1.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:74f45d170a21df41508cb67165456538425185baaf686281fa210d7e729abc34", size = 788045, upload-time = "2026-01-14T23:16:35.005Z" }, + { url = "https://files.pythonhosted.org/packages/8d/99/2cb9b69045372ec877b6f5124bda4eb4253bc58b8fe5848c973f752bc52c/regex-2026.1.15-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f1862739a1ffb50615c0fde6bae6569b5efbe08d98e59ce009f68a336f64da75", size = 859374, upload-time = "2026-01-14T23:16:36.919Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/710b0a5abe8e077b1729a562d2f297224ad079f3a66dce46844c193416c8/regex-2026.1.15-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:453078802f1b9e2b7303fb79222c054cb18e76f7bdc220f7530fdc85d319f99e", size = 763940, upload-time = "2026-01-14T23:16:38.685Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/7585c8e744e40eb3d32f119191969b91de04c073fca98ec14299041f6e7e/regex-2026.1.15-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:a30a68e89e5a218b8b23a52292924c1f4b245cb0c68d1cce9aec9bbda6e2c160", size = 850112, upload-time = "2026-01-14T23:16:40.646Z" }, + { url = "https://files.pythonhosted.org/packages/af/d6/43e1dd85df86c49a347aa57c1f69d12c652c7b60e37ec162e3096194a278/regex-2026.1.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9479cae874c81bf610d72b85bb681a94c95722c127b55445285fb0e2c82db8e1", size = 789586, upload-time = "2026-01-14T23:16:42.799Z" }, + { url = "https://files.pythonhosted.org/packages/93/38/77142422f631e013f316aaae83234c629555729a9fbc952b8a63ac91462a/regex-2026.1.15-cp314-cp314-win32.whl", hash = "sha256:d639a750223132afbfb8f429c60d9d318aeba03281a5f1ab49f877456448dcf1", size = 271691, upload-time = "2026-01-14T23:16:44.671Z" }, + { url = "https://files.pythonhosted.org/packages/4a/a9/ab16b4649524ca9e05213c1cdbb7faa85cc2aa90a0230d2f796cbaf22736/regex-2026.1.15-cp314-cp314-win_amd64.whl", hash = "sha256:4161d87f85fa831e31469bfd82c186923070fc970b9de75339b68f0c75b51903", size = 280422, upload-time = "2026-01-14T23:16:46.607Z" }, + { url = "https://files.pythonhosted.org/packages/be/2a/20fd057bf3521cb4791f69f869635f73e0aaf2b9ad2d260f728144f9047c/regex-2026.1.15-cp314-cp314-win_arm64.whl", hash = "sha256:91c5036ebb62663a6b3999bdd2e559fd8456d17e2b485bf509784cd31a8b1705", size = 273467, upload-time = "2026-01-14T23:16:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/ad/77/0b1e81857060b92b9cad239104c46507dd481b3ff1fa79f8e7f865aae38a/regex-2026.1.15-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ee6854c9000a10938c79238de2379bea30c82e4925a371711af45387df35cab8", size = 492073, upload-time = "2026-01-14T23:16:51.154Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/f8302b0c208b22c1e4f423147e1913fd475ddd6230565b299925353de644/regex-2026.1.15-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c2b80399a422348ce5de4fe40c418d6299a0fa2803dd61dc0b1a2f28e280fcf", size = 292757, upload-time = "2026-01-14T23:16:53.08Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f0/ef55de2460f3b4a6da9d9e7daacd0cb79d4ef75c64a2af316e68447f0df0/regex-2026.1.15-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:dca3582bca82596609959ac39e12b7dad98385b4fefccb1151b937383cec547d", size = 291122, upload-time = "2026-01-14T23:16:55.383Z" }, + { url = "https://files.pythonhosted.org/packages/cf/55/bb8ccbacabbc3a11d863ee62a9f18b160a83084ea95cdfc5d207bfc3dd75/regex-2026.1.15-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71d476caa6692eea743ae5ea23cde3260677f70122c4d258ca952e5c2d4e84", size = 807761, upload-time = "2026-01-14T23:16:57.251Z" }, + { url = "https://files.pythonhosted.org/packages/8f/84/f75d937f17f81e55679a0509e86176e29caa7298c38bd1db7ce9c0bf6075/regex-2026.1.15-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c243da3436354f4af6c3058a3f81a97d47ea52c9bd874b52fd30274853a1d5df", size = 873538, upload-time = "2026-01-14T23:16:59.349Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d9/0da86327df70349aa8d86390da91171bd3ca4f0e7c1d1d453a9c10344da3/regex-2026.1.15-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8355ad842a7c7e9e5e55653eade3b7d1885ba86f124dd8ab1f722f9be6627434", size = 915066, upload-time = "2026-01-14T23:17:01.607Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5e/f660fb23fc77baa2a61aa1f1fe3a4eea2bbb8a286ddec148030672e18834/regex-2026.1.15-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f192a831d9575271a22d804ff1a5355355723f94f31d9eef25f0d45a152fdc1a", size = 812938, upload-time = "2026-01-14T23:17:04.366Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/a47a29bfecebbbfd1e5cd3f26b28020a97e4820f1c5148e66e3b7d4b4992/regex-2026.1.15-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:166551807ec20d47ceaeec380081f843e88c8949780cd42c40f18d16168bed10", size = 781314, upload-time = "2026-01-14T23:17:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/65/ec/7ec2bbfd4c3f4e494a24dec4c6943a668e2030426b1b8b949a6462d2c17b/regex-2026.1.15-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9ca1cbdc0fbfe5e6e6f8221ef2309988db5bcede52443aeaee9a4ad555e0dac", size = 795652, upload-time = "2026-01-14T23:17:08.521Z" }, + { url = "https://files.pythonhosted.org/packages/46/79/a5d8651ae131fe27d7c521ad300aa7f1c7be1dbeee4d446498af5411b8a9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b30bcbd1e1221783c721483953d9e4f3ab9c5d165aa709693d3f3946747b1aea", size = 868550, upload-time = "2026-01-14T23:17:10.573Z" }, + { url = "https://files.pythonhosted.org/packages/06/b7/25635d2809664b79f183070786a5552dd4e627e5aedb0065f4e3cf8ee37d/regex-2026.1.15-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2a8d7b50c34578d0d3bf7ad58cde9652b7d683691876f83aedc002862a35dc5e", size = 769981, upload-time = "2026-01-14T23:17:12.871Z" }, + { url = "https://files.pythonhosted.org/packages/16/8b/fc3fcbb2393dcfa4a6c5ffad92dc498e842df4581ea9d14309fcd3c55fb9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9d787e3310c6a6425eb346be4ff2ccf6eece63017916fd77fe8328c57be83521", size = 854780, upload-time = "2026-01-14T23:17:14.837Z" }, + { url = "https://files.pythonhosted.org/packages/d0/38/dde117c76c624713c8a2842530be9c93ca8b606c0f6102d86e8cd1ce8bea/regex-2026.1.15-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:619843841e220adca114118533a574a9cd183ed8a28b85627d2844c500a2b0db", size = 799778, upload-time = "2026-01-14T23:17:17.369Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0d/3a6cfa9ae99606afb612d8fb7a66b245a9d5ff0f29bb347c8a30b6ad561b/regex-2026.1.15-cp314-cp314t-win32.whl", hash = "sha256:e90b8db97f6f2c97eb045b51a6b2c5ed69cedd8392459e0642d4199b94fabd7e", size = 274667, upload-time = "2026-01-14T23:17:19.301Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b2/297293bb0742fd06b8d8e2572db41a855cdf1cae0bf009b1cb74fe07e196/regex-2026.1.15-cp314-cp314t-win_amd64.whl", hash = "sha256:5ef19071f4ac9f0834793af85bd04a920b4407715624e40cb7a0631a11137cdf", size = 284386, upload-time = "2026-01-14T23:17:21.231Z" }, + { url = "https://files.pythonhosted.org/packages/95/e4/a3b9480c78cf8ee86626cb06f8d931d74d775897d44201ccb813097ae697/regex-2026.1.15-cp314-cp314t-win_arm64.whl", hash = "sha256:ca89c5e596fc05b015f27561b3793dc2fa0917ea0d7507eebb448efd35274a70", size = 274837, upload-time = "2026-01-14T23:17:23.146Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "starlette" +version = "0.50.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, +] + +[[package]] +name = "textblob" +version = "0.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nltk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/a1/31fc6a5e9e46f2d84f72f12048588feac5464486e526dbfcc4719569cd3e/textblob-0.19.0.tar.gz", hash = "sha256:0a3d06a47cf7759441da3418c4843aed3797a998beba2108c6245a2020f83b01", size = 637872, upload-time = "2025-01-13T23:03:07.352Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d6/40aa5aead775582ea0cf35870e5a3f16fab4b967f1ad2debe675f673f923/textblob-0.19.0-py3-none-any.whl", hash = "sha256:af6b8827886f1ee839a625f4865e5abb1584eae8db2259627b33a6a0b02ef19d", size = 624280, upload-time = "2025-01-13T23:03:01.034Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, +] + +[[package]] +name = "vadersentiment" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/8c/4a48c10a50f750ae565e341e697d74a38075a3e43ff0df6f1ab72e186902/vaderSentiment-3.3.2.tar.gz", hash = "sha256:5d7c06e027fc8b99238edb0d53d970cf97066ef97654009890b83703849632f9", size = 2466783, upload-time = "2020-05-22T15:06:32.81Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/fc/310e16254683c1ed35eeb97386986d6c00bc29df17ce280aed64d55537e9/vaderSentiment-3.3.2-py2.py3-none-any.whl", hash = "sha256:3bf1d243b98b1afad575b9f22bc2cb1e212b94ff89ca74f8a23a588d024ea311", size = 125950, upload-time = "2020-05-22T15:07:00.052Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + +[[package]] +name = "youtube-to-audio" +version = "0.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "yt-dlp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/1e/e70d4cb1de73bcad3262a6990c8175744e69eefa71958c23d7b86314aa79/youtube_to_audio-0.1.7.tar.gz", hash = "sha256:d9dcbeaea22111f49937273fe75fd55f49c7045822c3d17b9284b5317d08306c", size = 4664, upload-time = "2025-03-05T13:08:14.88Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/d2/62b0628e5295230f33f2b14dfe98653910ab444f3d704269b399e3d92e41/youtube_to_audio-0.1.7-py3-none-any.whl", hash = "sha256:fde797af7822cd2c22a026fcab705ce4ba4ef9a65cd076b49c17eda062417e01", size = 5336, upload-time = "2025-03-05T13:08:13.912Z" }, +] + +[[package]] +name = "yt-dlp" +version = "2025.12.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/14/77/db924ebbd99d0b2b571c184cb08ed232cf4906c6f9b76eed763cd2c84170/yt_dlp-2025.12.8.tar.gz", hash = "sha256:b773c81bb6b71cb2c111cfb859f453c7a71cf2ef44eff234ff155877184c3e4f", size = 3088947, upload-time = "2025-12-08T00:16:01.649Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/2f/98c3596ad923f8efd32c90dca62e241e8ad9efcebf20831173c357042ba0/yt_dlp-2025.12.8-py3-none-any.whl", hash = "sha256:36e2584342e409cfbfa0b5e61448a1c5189e345cf4564294456ee509e7d3e065", size = 3291464, upload-time = "2025-12-08T00:15:58.556Z" }, +] diff --git a/package-lock.json b/package-lock.json index ed5ed1b..1f8f116 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,9 +8,11 @@ "name": "mediator-app", "version": "0.1.0", "dependencies": { + "@anthropic-ai/sdk": "^0.71.0", "@deepgram/sdk": "^4.11.3", "@ricky0123/vad-web": "^0.0.30", "@supabase/supabase-js": "^2.47.0", + "chart.js": "^4.5.1", "express-rate-limit": "^8.2.1", "framer-motion": "^12.23.25", "ioredis": "^5.8.2", @@ -18,7 +20,9 @@ "next": "16.0.7", "onnxruntime-web": "^1.23.2", "react": "19.2.0", + "react-chartjs-2": "^5.3.1", "react-dom": "19.2.0", + "recharts": "^2.12.7", "resend": "^6.8.0", "socket.io": "^4.8.1", "socket.io-client": "^4.8.1", @@ -52,6 +56,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@anthropic-ai/sdk": { + "version": "0.71.2", + "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.71.2.tgz", + "integrity": "sha512-TGNDEUuEstk/DKu0/TflXAEt+p+p/WhTlFzEnoosvbaDU2LTjm42igSdlL0VijrKpWejtOKxX0b8A7uc+XiSAQ==", + "license": "MIT", + "dependencies": { + "json-schema-to-ts": "^3.1.1" + }, + "bin": { + "anthropic-ai-sdk": "bin/cli" + }, + "peerDependencies": { + "zod": "^3.25.0 || ^4.0.0" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } + }, "node_modules/@babel/code-frame": { "version": "7.27.1", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", @@ -1096,6 +1120,12 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@kurkle/color": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.4.tgz", + "integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==", + "license": "MIT" + }, "node_modules/@napi-rs/wasm-runtime": { "version": "0.2.12", "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-0.2.12.tgz", @@ -1794,6 +1824,69 @@ "@types/node": "*" } }, + "node_modules/@types/d3-array": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", + "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", + "license": "MIT" + }, + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", + "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", + "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-shape": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz", + "integrity": "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==", + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", + "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "license": "MIT" + }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -2978,6 +3071,18 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/chart.js": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz", + "integrity": "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw==", + "license": "MIT", + "dependencies": { + "@kurkle/color": "^0.3.0" + }, + "engines": { + "pnpm": ">=8" + } + }, "node_modules/client-only": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", @@ -2999,6 +3104,15 @@ "node": ">=12" } }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/cluster-key-slot": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", @@ -3189,9 +3303,129 @@ "version": "3.2.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", - "devOptional": true, "license": "MIT" }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz", + "integrity": "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, "node_modules/damerau-levenshtein": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", @@ -3276,6 +3510,12 @@ } } }, + "node_modules/decimal.js-light": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz", + "integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==", + "license": "MIT" + }, "node_modules/deep-is": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", @@ -3370,6 +3610,16 @@ "node": ">=0.10.0" } }, + "node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, "node_modules/dompurify": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", @@ -4153,6 +4403,12 @@ "node": ">= 0.6" } }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", + "license": "MIT" + }, "node_modules/events": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", @@ -4282,6 +4538,15 @@ "dev": true, "license": "MIT" }, + "node_modules/fast-equals": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.4.0.tgz", + "integrity": "sha512-jt2DW/aNFNwke7AUd+Z+e6pz39KO5rzdbbFCg2sGafS4mk13MI7Z8O5z9cADNn5lhGODIgLwug6TZO2ctf7kcw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/fast-glob": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.1.tgz", @@ -4956,6 +5221,15 @@ "node": ">= 0.4" } }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, "node_modules/iobuffer": { "version": "5.4.0", "resolved": "https://registry.npmjs.org/iobuffer/-/iobuffer-5.4.0.tgz", @@ -5483,7 +5757,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true, "license": "MIT" }, "node_modules/js-yaml": { @@ -5519,6 +5792,19 @@ "dev": true, "license": "MIT" }, + "node_modules/json-schema-to-ts": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/json-schema-to-ts/-/json-schema-to-ts-3.1.1.tgz", + "integrity": "sha512-+DWg8jCJG2TEnpy7kOm/7/AxaYoaRbjVB4LFZLySZlWn8exGs3A4OLJR966cVvU26N7X9TWxl+Jsw7dzAqKT6g==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.18.3", + "ts-algebra": "^2.0.0" + }, + "engines": { + "node": ">=16" + } + }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", @@ -5900,6 +6186,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "license": "MIT" + }, "node_modules/lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", @@ -5929,7 +6221,6 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dev": true, "license": "MIT", "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" @@ -6620,7 +6911,6 @@ "version": "15.8.1", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", - "dev": true, "license": "MIT", "dependencies": { "loose-envify": "^1.4.0", @@ -6758,6 +7048,16 @@ "node": ">=0.10.0" } }, + "node_modules/react-chartjs-2": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.3.1.tgz", + "integrity": "sha512-h5IPXKg9EXpjoBzUfyWJvllMjG2mQ4EiuHQFhms/AjUm0XSZHhyRy2xVmLXHKrtcdrPO4mnGqRtYoD0vp95A0A==", + "license": "MIT", + "peerDependencies": { + "chart.js": "^4.1.1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, "node_modules/react-dom": { "version": "19.2.0", "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.0.tgz", @@ -6774,7 +7074,75 @@ "version": "16.13.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", - "dev": true, + "license": "MIT" + }, + "node_modules/react-smooth": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-4.0.4.tgz", + "integrity": "sha512-gnGKTpYwqL0Iii09gHobNolvX4Kiq4PKx6eWBCYYix+8cdw+cGo3do906l1NBPKkSWx1DghC1dlWG9L2uGd61Q==", + "license": "MIT", + "dependencies": { + "fast-equals": "^5.0.1", + "prop-types": "^15.8.1", + "react-transition-group": "^4.4.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/react-transition-group": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", + "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==", + "license": "BSD-3-Clause", + "dependencies": { + "@babel/runtime": "^7.5.5", + "dom-helpers": "^5.0.1", + "loose-envify": "^1.4.0", + "prop-types": "^15.6.2" + }, + "peerDependencies": { + "react": ">=16.6.0", + "react-dom": ">=16.6.0" + } + }, + "node_modules/recharts": { + "version": "2.15.4", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.15.4.tgz", + "integrity": "sha512-UT/q6fwS3c1dHbXv2uFgYJ9BMFHu3fwnd7AYZaEQhXuYQ4hgsxLvsUXzGdKeZrW5xopzDCvuA2N41WJ88I7zIw==", + "license": "MIT", + "dependencies": { + "clsx": "^2.0.0", + "eventemitter3": "^4.0.1", + "lodash": "^4.17.21", + "react-is": "^18.3.1", + "react-smooth": "^4.0.4", + "recharts-scale": "^0.4.4", + "tiny-invariant": "^1.3.1", + "victory-vendor": "^36.6.8" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "react": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/recharts-scale": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/recharts-scale/-/recharts-scale-0.4.5.tgz", + "integrity": "sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==", + "license": "MIT", + "dependencies": { + "decimal.js-light": "^2.4.1" + } + }, + "node_modules/recharts/node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", "license": "MIT" }, "node_modules/redis-errors": { @@ -7840,6 +8208,12 @@ "utrie": "^1.0.2" } }, + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", + "license": "MIT" + }, "node_modules/tinyglobby": { "version": "0.2.15", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", @@ -7927,6 +8301,12 @@ "tree-kill": "cli.js" } }, + "node_modules/ts-algebra": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ts-algebra/-/ts-algebra-2.0.0.tgz", + "integrity": "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw==", + "license": "MIT" + }, "node_modules/ts-api-utils": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", @@ -8286,6 +8666,28 @@ "node": ">= 0.8" } }, + "node_modules/victory-vendor": { + "version": "36.9.2", + "resolved": "https://registry.npmjs.org/victory-vendor/-/victory-vendor-36.9.2.tgz", + "integrity": "sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ==", + "license": "MIT AND ISC", + "dependencies": { + "@types/d3-array": "^3.0.3", + "@types/d3-ease": "^3.0.0", + "@types/d3-interpolate": "^3.0.1", + "@types/d3-scale": "^4.0.2", + "@types/d3-shape": "^3.1.0", + "@types/d3-time": "^3.0.0", + "@types/d3-timer": "^3.0.0", + "d3-array": "^3.1.6", + "d3-ease": "^3.0.1", + "d3-interpolate": "^3.0.1", + "d3-scale": "^4.0.2", + "d3-shape": "^3.1.0", + "d3-time": "^3.0.0", + "d3-timer": "^3.0.1" + } + }, "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", diff --git a/package.json b/package.json index 658c4d7..9a59b79 100644 --- a/package.json +++ b/package.json @@ -12,9 +12,11 @@ "lint": "eslint" }, "dependencies": { + "@anthropic-ai/sdk": "^0.71.0", "@deepgram/sdk": "^4.11.3", "@ricky0123/vad-web": "^0.0.30", "@supabase/supabase-js": "^2.47.0", + "chart.js": "^4.5.1", "express-rate-limit": "^8.2.1", "framer-motion": "^12.23.25", "ioredis": "^5.8.2", @@ -22,7 +24,9 @@ "next": "16.0.7", "onnxruntime-web": "^1.23.2", "react": "19.2.0", + "react-chartjs-2": "^5.3.1", "react-dom": "19.2.0", + "recharts": "^2.12.7", "resend": "^6.8.0", "socket.io": "^4.8.1", "socket.io-client": "^4.8.1", diff --git a/server.js b/server.js index 35fb1c3..ade3d34 100644 --- a/server.js +++ b/server.js @@ -6,6 +6,20 @@ const { Server } = require('socket.io'); const crypto = require('crypto'); const { z } = require('zod'); const Redis = require('ioredis'); +const fs = require('fs'); +const path = require('path'); + +// ============================================================================ +// AUDIO RECORDING: Directory for storing audio files +// ============================================================================ + +const AUDIO_DIR = path.join(process.cwd(), 'audio_files'); + +// Ensure audio directory exists +if (!fs.existsSync(AUDIO_DIR)) { + fs.mkdirSync(AUDIO_DIR, { recursive: true }); + console.log('Created audio_files directory'); +} // ============================================================================ // AUDIT LOGGING: Track all significant actions for B2B compliance @@ -1133,6 +1147,78 @@ io.on('connection', (socket) => { console.log('Conversation ended:', currentSessionCode); }); + // ============================================================================ + // AUDIO STREAMING: Receive and append audio chunks to file + // ============================================================================ + + // Receive audio chunk and append to session's audio file + socket.on('audio:chunk', async (data) => { + if (!currentSessionCode || !currentParticipantId) { + console.warn('audio:chunk received but no session/participant'); + return; + } + + try { + const { audioData, filename, mimeType } = data; + + if (!audioData) { + console.warn('audio:chunk received with no audioData'); + return; + } + + // Create session-specific directory + const sessionDir = path.join(AUDIO_DIR, currentSessionCode); + if (!fs.existsSync(sessionDir)) { + fs.mkdirSync(sessionDir, { recursive: true }); + } + + // Use provided filename or default to session code + const audioFilename = filename || `${currentSessionCode}.webm`; + const audioPath = path.join(sessionDir, audioFilename); + + // Convert base64 to buffer and append to file + const buffer = Buffer.from(audioData, 'base64'); + fs.appendFileSync(audioPath, buffer); + + console.log(`Audio chunk appended: ${audioPath} (+${buffer.length} bytes)`); + + // Emit confirmation back to client + socket.emit('audio:chunk:received', { + filename: audioFilename, + bytesWritten: buffer.length, + totalSize: fs.statSync(audioPath).size, + }); + } catch (err) { + console.error('Error saving audio chunk:', err); + socket.emit('audio:chunk:error', { error: err.message }); + } + }); + + // Finalize audio recording (optional - can be used to process the final file) + socket.on('audio:finalize', async (data) => { + if (!currentSessionCode) return; + + try { + const { filename } = data || {}; + const audioFilename = filename || `${currentSessionCode}.webm`; + const sessionDir = path.join(AUDIO_DIR, currentSessionCode); + const audioPath = path.join(sessionDir, audioFilename); + + if (fs.existsSync(audioPath)) { + const stats = fs.statSync(audioPath); + console.log(`Audio recording finalized: ${audioPath} (${stats.size} bytes)`); + + socket.emit('audio:finalized', { + filename: audioFilename, + path: path.join('audio_files', currentSessionCode, audioFilename), + size: stats.size, + }); + } + } catch (err) { + console.error('Error finalizing audio:', err); + } + }); + // Handle disconnection socket.on('disconnect', async () => { console.log('Client disconnected:', socket.id); diff --git a/src/components/admin/AdminDashboard.tsx b/src/components/admin/AdminDashboard.tsx index 9dfc996..7cbd5cd 100644 --- a/src/components/admin/AdminDashboard.tsx +++ b/src/components/admin/AdminDashboard.tsx @@ -3,6 +3,7 @@ import { useState } from 'react'; import { motion } from 'framer-motion'; import { SessionsTable } from './SessionsTable'; +import { ReportsPage } from '@/components/reports'; import { dbtSkillInfo, skillBasedTemplates } from '@/lib/dbtSkills'; import type { UserProfile, DBTSkill, SkillBasedTemplate } from '@/types'; @@ -124,7 +125,7 @@ const mockTeamMembers: TeamMember[] = [ }, ]; -type TabId = 'overview' | 'sessions' | 'conversations' | 'team' | 'settings' | 'practice' | 'learn'; +type TabId = 'overview' | 'sessions' | 'conversations' | 'team' | 'settings' | 'practice' | 'learn' | 'reports'; export function AdminDashboard({ userProfile, onStartConversation, onLogout }: AdminDashboardProps) { const [activeTab, setActiveTab] = useState('overview'); @@ -191,6 +192,15 @@ export function AdminDashboard({ userProfile, onStartConversation, onLogout }: A ), }, + { + id: 'reports', + label: 'Reports', + icon: ( + + + + ), + }, { id: 'settings', label: 'Settings', @@ -910,6 +920,7 @@ export function AdminDashboard({ userProfile, onStartConversation, onLogout }: A {activeTab === 'sessions' && isTeamAccount && } {activeTab === 'conversations' && isTeamAccount && renderConversations()} {activeTab === 'team' && isTeamAccount && renderTeam()} + {activeTab === 'reports' && } {activeTab === 'settings' && renderSettings()} diff --git a/src/components/conversation/ActiveConversation.tsx b/src/components/conversation/ActiveConversation.tsx index 4ff33b8..75b8696 100644 --- a/src/components/conversation/ActiveConversation.tsx +++ b/src/components/conversation/ActiveConversation.tsx @@ -1,11 +1,12 @@ 'use client'; -import { useState, useEffect, useCallback } from 'react'; +import { useState, useEffect, useCallback, useRef } from 'react'; import { motion } from 'framer-motion'; import { useSessionStore } from '@/store/session'; import { useVolumeMonitor } from '@/hooks/useVolumeMonitor'; import { useSessionAnalytics } from '@/hooks/useSessionAnalytics'; import { useAI } from '@/hooks/useAI'; +import { getSocket } from '@/lib/socket'; import { Timer } from '@/components/ui/Timer'; import { VolumeIndicator } from '@/components/ui/VolumeIndicator'; import { ParticipantCard } from '@/components/ui/ParticipantCard'; @@ -38,6 +39,7 @@ export function ActiveConversation({ onEndConversation, }: ActiveConversationProps) { const { + sessionCode, participants, currentSpeakerId, turnTimeSeconds, @@ -77,8 +79,241 @@ export function ActiveConversation({ const { detectTriggers, generateReflectionPrompt } = useAI(); const [lastProcessedTranscriptLength, setLastProcessedTranscriptLength] = useState(0); - // Initialize volume monitoring - const { volumeLevel, isListening, startListening, error: micError } = useVolumeMonitor({ + // Track recorded audio chunks count and bytes sent + const [chunksRecorded, setChunksRecorded] = useState(0); + const [bytesSent, setBytesSent] = useState(0); + const [streamingStatus, setStreamingStatus] = useState<'idle' | 'streaming' | 'error'>('idle'); + + // Track if we've set up socket listeners + const socketListenersSetup = useRef(false); + + // ============================================================================ + // BROADCAST CHANNEL: Turn-based recording leadership + // The current speaker's window becomes the recording leader + // ============================================================================ + + // Unique ID for this window/tab - includes the user ID for turn-based coordination + const windowIdRef = useRef(`window_${currentUserId}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`); + const broadcastChannelRef = useRef(null); + const [isRecordingLeader, setIsRecordingLeader] = useState(false); + + // Track if it's this user's turn to speak + const isMyTurn = currentSpeakerId === currentUserId; + + // Track the previous speaker to detect turn changes + const prevSpeakerIdRef = useRef(null); + + // Initialize broadcast channel for cross-tab coordination + useEffect(() => { + if (typeof window === 'undefined' || !sessionCode) return; + + const channelName = `mediator_audio_${sessionCode}`; + const channel = new BroadcastChannel(channelName); + broadcastChannelRef.current = channel; + + const windowId = windowIdRef.current; + + // Handle messages from other tabs + channel.onmessage = (event) => { + const { type, speakerId, senderId } = event.data; + + switch (type) { + case 'TURN_STARTED': + // Another user's turn started + if (speakerId !== currentUserId) { + // Not our turn, release leadership if we had it + if (isRecordingLeader) { + setIsRecordingLeader(false); + console.log(`Audio recording: Turn changed to ${speakerId}, releasing leadership`); + } + } + break; + + case 'LEADER_CLAIMED': + // Another tab for the same user claimed leadership + if (speakerId === currentUserId && senderId !== windowId) { + // Another tab for our user claimed, we defer + setIsRecordingLeader(false); + console.log(`Audio recording: Another tab for user ${currentUserId} is recording`); + } + break; + + case 'LEADER_RELEASED': + // A tab released leadership - only relevant if it's our turn + if (isMyTurn && !isRecordingLeader) { + // Claim leadership since it's our turn + claimLeadership(); + } + break; + } + }; + + // Function to claim leadership + const claimLeadership = () => { + channel.postMessage({ + type: 'LEADER_CLAIMED', + speakerId: currentUserId, + senderId: windowId, + timestamp: Date.now(), + }); + setIsRecordingLeader(true); + console.log(`Audio recording: This tab is now recording for user ${currentUserId}`); + }; + + // Clean up on unmount + return () => { + if (isRecordingLeader) { + channel.postMessage({ + type: 'LEADER_RELEASED', + speakerId: currentUserId, + senderId: windowId, + timestamp: Date.now(), + }); + } + channel.close(); + broadcastChannelRef.current = null; + }; + }, [sessionCode, currentUserId, isRecordingLeader, isMyTurn]); + + // Handle turn changes - claim or release leadership based on whose turn it is + useEffect(() => { + const channel = broadcastChannelRef.current; + if (!channel || phase !== 'active') return; + + const turnChanged = prevSpeakerIdRef.current !== currentSpeakerId; + prevSpeakerIdRef.current = currentSpeakerId; + + if (isMyTurn) { + // It's our turn - claim leadership if we don't have it + if (!isRecordingLeader) { + channel.postMessage({ + type: 'TURN_STARTED', + speakerId: currentUserId, + senderId: windowIdRef.current, + timestamp: Date.now(), + }); + + // Small delay to let other tabs release, then claim + setTimeout(() => { + channel.postMessage({ + type: 'LEADER_CLAIMED', + speakerId: currentUserId, + senderId: windowIdRef.current, + timestamp: Date.now(), + }); + setIsRecordingLeader(true); + console.log(`Audio recording: Turn started - this tab is now recording for ${currentUserId}`); + }, 50); + } + } else if (turnChanged && isRecordingLeader) { + // Turn changed and it's no longer our turn - release leadership + channel.postMessage({ + type: 'LEADER_RELEASED', + speakerId: currentUserId, + senderId: windowIdRef.current, + timestamp: Date.now(), + }); + setIsRecordingLeader(false); + console.log(`Audio recording: Turn ended - releasing leadership`); + } + }, [currentSpeakerId, currentUserId, isMyTurn, isRecordingLeader, phase]); + + // Release leadership when conversation ends + useEffect(() => { + if ((phase === 'ended' || phase === 'summary') && isRecordingLeader) { + const channel = broadcastChannelRef.current; + if (channel) { + channel.postMessage({ + type: 'LEADER_RELEASED', + speakerId: currentUserId, + senderId: windowIdRef.current, + timestamp: Date.now(), + }); + } + setIsRecordingLeader(false); + } + }, [phase, isRecordingLeader, currentUserId]); + + // Set up socket listeners for audio responses + useEffect(() => { + const socket = getSocket(); + if (!socket || socketListenersSetup.current) return; + + socket.on('audio:chunk:received', (data: { filename: string; bytesWritten: number; totalSize: number }) => { + setBytesSent(data.totalSize); + console.log(`Audio chunk confirmed: ${data.filename} (total: ${data.totalSize} bytes)`); + }); + + socket.on('audio:chunk:error', (data: { error: string }) => { + console.error('Audio streaming error:', data.error); + setStreamingStatus('error'); + }); + + socket.on('audio:finalized', (data: { filename: string; path: string; size: number }) => { + console.log(`Audio recording finalized: ${data.path} (${data.size} bytes)`); + }); + + socketListenersSetup.current = true; + + return () => { + socket.off('audio:chunk:received'); + socket.off('audio:chunk:error'); + socket.off('audio:finalized'); + socketListenersSetup.current = false; + }; + }, []); + + // Handle audio chunk - send to server via socket (only if this tab is the leader) + const handleAudioChunk = useCallback(async (chunk: Blob) => { + // Always count chunks for UI display + setChunksRecorded((prev) => prev + 1); + + // Only send to server if this tab is the recording leader + if (!isRecordingLeader) { + console.log('Audio chunk recorded but not sent (another tab is the leader)'); + return; + } + + const socket = getSocket(); + if (!socket?.connected) { + console.warn('Socket not connected, cannot send audio chunk'); + return; + } + + try { + // Convert blob to base64 for transmission (browser-compatible) + const arrayBuffer = await chunk.arrayBuffer(); + const uint8Array = new Uint8Array(arrayBuffer); + let binaryString = ''; + for (let i = 0; i < uint8Array.length; i++) { + binaryString += String.fromCharCode(uint8Array[i]); + } + const base64Data = btoa(binaryString); + + // Send audio chunk to server + socket.emit('audio:chunk', { + audioData: base64Data, + filename: `${sessionCode || 'recording'}.webm`, + mimeType: chunk.type, + }); + + setStreamingStatus('streaming'); + console.log(`Audio chunk sent: ${chunk.size} bytes`); + } catch (err) { + console.error('Error sending audio chunk:', err); + setStreamingStatus('error'); + } + }, [sessionCode, isRecordingLeader]); + + // Initialize volume monitoring with audio recording + const { + volumeLevel, + isListening, + isRecording, + startListening, + stopListening, + error: micError, + } = useVolumeMonitor({ onHighVolume: (level) => { // Auto-pause if volume stays high and flag the current round console.log('High volume detected:', level); @@ -89,6 +324,8 @@ export function ActiveConversation({ // Update store with current volume syncState({ volumeLevel: level }); }, + onAudioChunk: handleAudioChunk, + recordingIntervalMs: 1000, // Record in 1 second chunks }); // Start round tracking when round changes @@ -100,13 +337,34 @@ export function ActiveConversation({ } }, [roundNumber, phase, analyticsCurrentRound, currentRoundPrompt, startRound]); - // Start listening when conversation becomes active + // Start listening and recording when conversation becomes active useEffect(() => { if (phase === 'active' && !isListening) { startListening(); + setStreamingStatus('idle'); + setChunksRecorded(0); + setBytesSent(0); } }, [phase, isListening, startListening]); + // Finalize audio recording when conversation ends + useEffect(() => { + if (phase === 'ended' || phase === 'summary') { + if (isListening) { + stopListening(); + + // Tell server to finalize the recording + const socket = getSocket(); + if (socket?.connected && sessionCode) { + socket.emit('audio:finalize', { filename: `${sessionCode}.webm` }); + console.log(`Audio recording finalized for session: ${sessionCode}`); + } + + setStreamingStatus('idle'); + } + } + }, [phase, isListening, stopListening, sessionCode]); + const [showExtendOption, setShowExtendOption] = useState(false); const currentParticipant = participants.find((p) => p.id === currentUserId); const otherParticipant = participants.find((p) => p.id !== currentUserId); @@ -214,6 +472,36 @@ export function ActiveConversation({ currentUserId={currentUserId} />
+ {/* Recording & Streaming indicator - turn-based */} + {isRecording && ( +
+ + + {isMyTurn + ? (isRecordingLeader + ? (streamingStatus === 'streaming' ? 'Your turn · Streaming' : 'Your turn · Recording') + : 'Your turn') + : 'Listening'} + +
+ )} + {/* Chunks count and bytes - only show for leader */} + {chunksRecorded > 0 && isRecordingLeader && ( + + {chunksRecorded}s {bytesSent > 0 && `(${Math.round(bytesSent / 1024)}KB)`} + + )} {micError && ( Mic unavailable @@ -321,13 +609,27 @@ export function ActiveConversation({ {/* Footer - always visible exit option */}
- +
+ {/* Streaming status - show when it's your turn and streaming */} + {isRecordingLeader && chunksRecorded > 0 && bytesSent > 0 && ( +

+ Your audio streaming to server ({Math.round(bytesSent / 1024)}KB saved) +

+ )} + {/* Show message when listening (not your turn) */} + {isRecording && !isMyTurn && ( +

+ Listening while other participant speaks +

+ )} + +
{/* Skill Reference Card (floating) */} diff --git a/src/components/reports/AverageScoreCard.tsx b/src/components/reports/AverageScoreCard.tsx new file mode 100644 index 0000000..05784ca --- /dev/null +++ b/src/components/reports/AverageScoreCard.tsx @@ -0,0 +1,109 @@ +'use client'; + +import { motion } from 'framer-motion'; + +interface AverageScoreCardProps { + title: string; + score: number; + maxScore: number; + subScores?: { label: string; score: number }[]; +} + +export function AverageScoreCard({ + title, + score, + maxScore, + subScores, +}: AverageScoreCardProps) { + const percentage = (score / maxScore) * 100; + + const getScoreColor = (percent: number) => { + if (percent >= 75) return 'var(--color-safe-green)'; + if (percent >= 50) return 'var(--color-safe-amber)'; + return 'var(--color-safe-rose)'; + }; + + return ( + +

+ {title} +

+ +
+
+ + + + +
+
+ + {score.toFixed(1)} + + + / {maxScore} + +
+
+
+ +
+
+ Average Score +
+
+ {percentage.toFixed(0)}% +
+
+
+ + {subScores && subScores.length > 0 && ( +
+ {subScores.map((sub) => ( +
+ + {sub.label.replace(/_/g, ' ')} + + + {sub.score.toFixed(1)} + +
+ ))} +
+ )} +
+ ); +} diff --git a/src/components/reports/CategoryPieChart.tsx b/src/components/reports/CategoryPieChart.tsx new file mode 100644 index 0000000..e2b4d7a --- /dev/null +++ b/src/components/reports/CategoryPieChart.tsx @@ -0,0 +1,140 @@ +'use client'; + +import { PieChart, Pie, Cell, ResponsiveContainer, Legend, Tooltip } from 'recharts'; +import { motion } from 'framer-motion'; +import { PieChartDataPoint } from '@/lib/reportsApi'; + +interface CategoryPieChartProps { + title: string; + data: PieChartDataPoint[]; + colors?: string[]; +} + +const DEFAULT_COLORS = [ + 'var(--color-calm-500)', + 'var(--color-calm-400)', + 'var(--color-calm-600)', + 'var(--color-safe-green)', + 'var(--color-safe-amber)', + 'var(--color-safe-rose)', +]; + +export function CategoryPieChart({ + title, + data, + colors = DEFAULT_COLORS, +}: CategoryPieChartProps) { + if (data.length === 0) { + return ( + +

+ {title} +

+
+

No data available

+
+
+ ); + } + + const chartData = data.map((item) => ({ + name: item.label.charAt(0).toUpperCase() + item.label.slice(1), + value: item.count, + percentage: item.percentage, + })); + + const CustomTooltip = ({ active, payload }: any) => { + if (active && payload && payload.length) { + const data = payload[0].payload; + return ( +
+

+ {data.name} +

+

+ Count: {data.value} +

+

+ Percentage: {data.percentage}% +

+
+ ); + } + return null; + }; + + const CustomLabel = ({ cx, cy, midAngle, innerRadius, outerRadius, percent }: any) => { + const RADIAN = Math.PI / 180; + const radius = innerRadius + (outerRadius - innerRadius) * 0.5; + const x = cx + radius * Math.cos(-midAngle * RADIAN); + const y = cy + radius * Math.sin(-midAngle * RADIAN); + + return ( + cx ? 'start' : 'end'} + dominantBaseline="central" + className="text-sm font-medium" + > + {`${(percent * 100).toFixed(0)}%`} + + ); + }; + + return ( + +

+ {title} +

+ +
+ + + + {chartData.map((entry, index) => ( + + ))} + + } /> + ( + {value} + )} + /> + + +
+
+ ); +} diff --git a/src/components/reports/ReportsPage.tsx b/src/components/reports/ReportsPage.tsx new file mode 100644 index 0000000..e6a71d5 --- /dev/null +++ b/src/components/reports/ReportsPage.tsx @@ -0,0 +1,296 @@ +'use client'; + +import { useState, useEffect, useMemo } from 'react'; +import { motion } from 'framer-motion'; +import { + fetchSpeakers, + fetchTranscripts, + fetchCategories, + fetchMetrics, + fetchPieChartData, + Speaker, + Transcript, + MetricsResponse, + PieChartDataPoint, +} from '@/lib/reportsApi'; +import { ReportsSidebar } from './ReportsSidebar'; +import { AverageScoreCard } from './AverageScoreCard'; +import { CategoryPieChart } from './CategoryPieChart'; + +export function ReportsPage() { + const [speakers, setSpeakers] = useState([]); + const [transcripts, setTranscripts] = useState([]); + const [categories, setCategories] = useState([]); + const [selectedCategory, setSelectedCategory] = useState('dear_man'); + const [selectedSpeakerId, setSelectedSpeakerId] = useState(); + const [selectedTranscriptId, setSelectedTranscriptId] = useState(); + const [documentNameFilter, setDocumentNameFilter] = useState(''); + + const [metrics, setMetrics] = useState(null); + const [pieChartData, setPieChartData] = useState([]); + + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + + // Filter transcripts by document name + const filteredTranscripts = useMemo(() => { + if (!documentNameFilter) return transcripts; + return transcripts.filter((t) => + t.name.toLowerCase().includes(documentNameFilter.toLowerCase()) + ); + }, [transcripts, documentNameFilter]); + + // Update selected transcript ID if filtered out + useEffect(() => { + if (selectedTranscriptId) { + const transcriptExists = filteredTranscripts.some( + (t) => t.id === selectedTranscriptId + ); + if (!transcriptExists) { + setSelectedTranscriptId(undefined); + } + } + }, [filteredTranscripts, selectedTranscriptId]); + + // Load initial data + useEffect(() => { + const loadInitialData = async () => { + try { + setLoading(true); + setError(null); + + const [speakersData, transcriptsData, categoriesData] = await Promise.all([ + fetchSpeakers(), + fetchTranscripts(), + fetchCategories(), + ]); + + setSpeakers(speakersData); + setTranscripts(transcriptsData); + setCategories(categoriesData); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to load data'); + console.error('Error loading initial data:', err); + } finally { + setLoading(false); + } + }; + + loadInitialData(); + }, []); + + // Load metrics and pie chart data when filters change + useEffect(() => { + const loadData = async () => { + try { + setLoading(true); + setError(null); + + // Fetch metrics for all categories to show all score cards + const [sentimentMetrics, dearManMetrics, fastMetrics, pieData] = await Promise.all([ + fetchMetrics('sentiment', selectedSpeakerId, selectedTranscriptId), + fetchMetrics('dear_man', selectedSpeakerId, selectedTranscriptId), + fetchMetrics('fast', selectedSpeakerId, selectedTranscriptId), + fetchPieChartData(selectedCategory, undefined, selectedSpeakerId, selectedTranscriptId), + ]); + + // Combine all metrics + setMetrics({ + averages: { + sentiment: sentimentMetrics.averages.sentiment || {}, + dear_man: dearManMetrics.averages.dear_man || {}, + fast: fastMetrics.averages.fast || {}, + }, + label_counts: { + sentiment: sentimentMetrics.label_counts.sentiment || {}, + dear_man: dearManMetrics.label_counts.dear_man || {}, + fast: fastMetrics.label_counts.fast || {}, + }, + }); + setPieChartData(pieData); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to load metrics'); + console.error('Error loading metrics:', err); + } finally { + setLoading(false); + } + }; + + loadData(); + }, [selectedCategory, selectedSpeakerId, selectedTranscriptId]); + + // Get subcategory breakdown for pie charts + const getSubcategoryCharts = () => { + if (!metrics || selectedCategory === 'sentiment') { + return []; + } + + const categoryData = metrics.label_counts[selectedCategory] || {}; + const subcategories = + selectedCategory === 'dear_man' + ? ['describe', 'express', 'assert', 'reinforce', 'mindful', 'appear_confident', 'negotiate'] + : ['fair', 'apologies', 'stick_to_values', 'truthful']; + + return subcategories.map((subcat) => { + const counts = categoryData[subcat] || {}; + const total = Object.values(counts).reduce((sum: number, val: any) => sum + val, 0); + + const data: PieChartDataPoint[] = Object.entries(counts).map(([label, count]) => ({ + label, + count: count as number, + percentage: total > 0 ? ((count as number) / total) * 100 : 0, + })); + + return { + subcategory: subcat, + title: subcat.replace(/_/g, ' ').replace(/\b\w/g, (l) => l.toUpperCase()), + data, + }; + }); + }; + + if (loading && !metrics) { + return ( +
+
+
+
+
+

Loading reports...

+
+
+ ); + } + + if (error && !metrics) { + return ( +
+
+
+ + + +
+

+ Error Loading Reports +

+

+ {error} +

+ +
+
+ ); + } + + const categoryAverages = metrics?.averages[selectedCategory] || {}; + const overallScore = categoryAverages.overall || 0; + const maxScore = selectedCategory === 'dear_man' ? 7 : selectedCategory === 'fast' ? 4 : 1; + + const subScores = Object.entries(categoryAverages) + .filter(([key]) => key !== 'overall') + .map(([label, score]) => ({ label, score: score as number })); + + const subcategoryCharts = getSubcategoryCharts(); + + return ( +
+ + +
+
+

+ Reports Dashboard +

+ + {error && ( +
+ {error} +
+ )} + + {/* Average Score Cards */} +
+ + + +
+ + {/* Main Category Pie Chart */} + {selectedCategory === 'sentiment' && ( +
+ +
+ )} + + {/* Subcategory Pie Charts */} + {subcategoryCharts.length > 0 && ( +
+

+ {selectedCategory === 'dear_man' ? 'DEAR MAN' : 'FAST'} Breakdown +

+
+ {subcategoryCharts.map((chart) => ( +
+ +
+ ))} +
+
+ )} +
+
+
+ ); +} diff --git a/src/components/reports/ReportsSidebar.tsx b/src/components/reports/ReportsSidebar.tsx new file mode 100644 index 0000000..16604aa --- /dev/null +++ b/src/components/reports/ReportsSidebar.tsx @@ -0,0 +1,143 @@ +'use client'; + +import { Speaker, Transcript } from '@/lib/reportsApi'; + +interface ReportsSidebarProps { + speakers: Speaker[]; + transcripts: Transcript[]; + selectedSpeakerId?: number; + selectedTranscriptId?: number; + selectedCategory: string; + documentNameFilter: string; + onSpeakerChange: (speakerId?: number) => void; + onTranscriptChange: (transcriptId?: number) => void; + onCategoryChange: (category: string) => void; + onDocumentNameChange: (name: string) => void; +} + +export function ReportsSidebar({ + speakers, + transcripts, + selectedSpeakerId, + selectedTranscriptId, + selectedCategory, + documentNameFilter, + onSpeakerChange, + onTranscriptChange, + onCategoryChange, + onDocumentNameChange, +}: ReportsSidebarProps) { + return ( +
+

+ Filters +

+ + {/* Category Filter */} +
+ + +
+ + {/* Speaker Filter */} +
+ + +
+ + {/* Transcript Filter */} +
+ + +
+ + {/* Document Name Filter */} +
+ + onDocumentNameChange(e.target.value)} + placeholder="Filter by name..." + className="w-full p-2 rounded-lg border" + style={{ + backgroundColor: 'var(--background)', + borderColor: 'var(--border-soft)', + color: 'var(--foreground)', + }} + /> +
+
+ ); +} diff --git a/src/components/reports/index.ts b/src/components/reports/index.ts new file mode 100644 index 0000000..3e7dba8 --- /dev/null +++ b/src/components/reports/index.ts @@ -0,0 +1,4 @@ +export { ReportsPage } from './ReportsPage'; +export { ReportsSidebar } from './ReportsSidebar'; +export { AverageScoreCard } from './AverageScoreCard'; +export { CategoryPieChart } from './CategoryPieChart'; diff --git a/src/hooks/useVolumeMonitor.ts b/src/hooks/useVolumeMonitor.ts index 95ad73b..33fd122 100644 --- a/src/hooks/useVolumeMonitor.ts +++ b/src/hooks/useVolumeMonitor.ts @@ -5,26 +5,34 @@ import { useEffect, useRef, useState, useCallback } from 'react'; interface UseVolumeMonitorOptions { onHighVolume?: (level: number) => void; onVolumeChange?: (level: number) => void; + onAudioChunk?: (chunk: Blob) => void; highVolumeThreshold?: number; sustainedHighVolumeDuration?: number; // ms + recordingIntervalMs?: number; // interval for recording chunks (default 1000ms) } interface UseVolumeMonitorReturn { volumeLevel: number; isListening: boolean; + isRecording: boolean; startListening: () => Promise; stopListening: () => void; + getRecordedAudio: () => Blob | null; + downloadRecording: (filename?: string) => void; error: string | null; } export function useVolumeMonitor({ onHighVolume, onVolumeChange, + onAudioChunk, highVolumeThreshold = 75, sustainedHighVolumeDuration = 10000, + recordingIntervalMs = 1000, }: UseVolumeMonitorOptions = {}): UseVolumeMonitorReturn { const [volumeLevel, setVolumeLevel] = useState(0); const [isListening, setIsListening] = useState(false); + const [isRecording, setIsRecording] = useState(false); const [error, setError] = useState(null); const audioContextRef = useRef(null); @@ -34,6 +42,10 @@ export function useVolumeMonitor({ const highVolumeStartRef = useRef(null); const hasTriggeredHighVolumeRef = useRef(false); + // MediaRecorder refs for audio recording + const mediaRecorderRef = useRef(null); + const recordedChunksRef = useRef([]); + const processAudio = useCallback(() => { if (!analyserRef.current) return; @@ -94,18 +106,70 @@ export function useVolumeMonitor({ audioContextRef.current = audioContext; analyserRef.current = analyser; + // Initialize MediaRecorder for audio recording + // Determine supported MIME type for WebM + const mimeType = MediaRecorder.isTypeSupported('audio/webm;codecs=opus') + ? 'audio/webm;codecs=opus' + : MediaRecorder.isTypeSupported('audio/webm') + ? 'audio/webm' + : 'audio/ogg'; + + const recorder = new MediaRecorder(stream, { mimeType }); + + // Clear any previous recorded chunks + recordedChunksRef.current = []; + + recorder.ondataavailable = (event) => { + if (event.data.size > 0) { + // Append chunk to our accumulated recording + recordedChunksRef.current.push(event.data); + console.log(`Audio chunk recorded: ${event.data.size} bytes, total chunks: ${recordedChunksRef.current.length}`); + + // Call the onAudioChunk callback if provided + onAudioChunk?.(event.data); + } + }; + + recorder.onstart = () => { + console.log('MediaRecorder started - recording audio'); + setIsRecording(true); + }; + + recorder.onstop = () => { + console.log('MediaRecorder stopped'); + setIsRecording(false); + }; + + recorder.onerror = (event) => { + console.error('MediaRecorder error:', event); + setError('Recording error occurred'); + }; + + mediaRecorderRef.current = recorder; + + // Start recording with 1 second intervals (timeslice in ms) + recorder.start(recordingIntervalMs); + setIsListening(true); - // Start processing + // Start processing volume processAudio(); } catch (err) { const message = err instanceof Error ? err.message : 'Failed to access microphone'; setError(message); console.error('Volume monitor error:', err); } - }, [processAudio]); + }, [processAudio, recordingIntervalMs, onAudioChunk]); const stopListening = useCallback(() => { + console.log('Stopping audio monitoring and recording'); + + // Stop MediaRecorder first + if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') { + mediaRecorderRef.current.stop(); + mediaRecorderRef.current = null; + } + if (animationFrameRef.current) { cancelAnimationFrame(animationFrameRef.current); animationFrameRef.current = null; @@ -123,11 +187,44 @@ export function useVolumeMonitor({ analyserRef.current = null; setIsListening(false); + setIsRecording(false); setVolumeLevel(0); highVolumeStartRef.current = null; hasTriggeredHighVolumeRef.current = false; }, []); + // Get the complete recorded audio as a single Blob + const getRecordedAudio = useCallback((): Blob | null => { + if (recordedChunksRef.current.length === 0) { + return null; + } + + // Combine all chunks into a single WebM blob + const mimeType = recordedChunksRef.current[0]?.type || 'audio/webm'; + return new Blob(recordedChunksRef.current, { type: mimeType }); + }, []); + + // Download the recorded audio as a WebM file + const downloadRecording = useCallback((filename: string = 'demo') => { + const audioBlob = getRecordedAudio(); + if (!audioBlob) { + console.warn('No audio recorded yet'); + return; + } + + // Create download link + const url = URL.createObjectURL(audioBlob); + const a = document.createElement('a'); + a.href = url; + a.download = `${filename}.webm`; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + URL.revokeObjectURL(url); + + console.log(`Downloaded recording: ${filename}.webm (${audioBlob.size} bytes)`); + }, [getRecordedAudio]); + // Cleanup on unmount useEffect(() => { return () => { @@ -138,8 +235,11 @@ export function useVolumeMonitor({ return { volumeLevel, isListening, + isRecording, startListening, stopListening, + getRecordedAudio, + downloadRecording, error, }; } diff --git a/src/lib/reportsApi.ts b/src/lib/reportsApi.ts new file mode 100644 index 0000000..e6d304f --- /dev/null +++ b/src/lib/reportsApi.ts @@ -0,0 +1,102 @@ +const API_BASE_URL = 'http://localhost:8000/api/reports'; + +export interface Speaker { + id: number; + name: string; +} + +export interface Transcript { + id: number; + name: string; + date: string; +} + +export interface PieChartDataPoint { + label: string; + count: number; + percentage: number; +} + +export interface MetricsResponse { + averages: { + [category: string]: { + [subCategory: string]: number; + }; + }; + label_counts: { + [category: string]: { + [subCategory: string]: { + [label: string]: number; + }; + }; + }; +} + +export async function fetchSpeakers(): Promise { + const response = await fetch(`${API_BASE_URL}/speakers`); + if (!response.ok) { + throw new Error('Failed to fetch speakers'); + } + return response.json(); +} + +export async function fetchTranscripts(): Promise { + const response = await fetch(`${API_BASE_URL}/transcripts`); + if (!response.ok) { + throw new Error('Failed to fetch transcripts'); + } + return response.json(); +} + +export async function fetchCategories(): Promise { + const response = await fetch(`${API_BASE_URL}/categories`); + if (!response.ok) { + throw new Error('Failed to fetch categories'); + } + return response.json(); +} + +export async function fetchMetrics( + category: string, + speakerId?: number, + transcriptId?: number +): Promise { + const params = new URLSearchParams({ category }); + if (speakerId !== undefined) { + params.append('speaker_id', speakerId.toString()); + } + if (transcriptId !== undefined) { + params.append('meeting_transcript_id', transcriptId.toString()); + } + + const response = await fetch(`${API_BASE_URL}/metrics?${params.toString()}`); + if (!response.ok) { + throw new Error('Failed to fetch metrics'); + } + return response.json(); +} + +export async function fetchPieChartData( + category: string, + subCategory?: string, + speakerId?: number, + transcriptId?: number +): Promise { + const params = new URLSearchParams({ category }); + if (subCategory) { + params.append('sub_category', subCategory); + } + if (speakerId !== undefined) { + params.append('speaker_id', speakerId.toString()); + } + if (transcriptId !== undefined) { + params.append('meeting_transcript_id', transcriptId.toString()); + } + + const response = await fetch(`${API_BASE_URL}/pie-chart-data?${params.toString()}`); + if (!response.ok) { + throw new Error('Failed to fetch pie chart data'); + } + const data = await response.json(); + return data.data || []; +} diff --git a/src/lib/socket.ts b/src/lib/socket.ts index a0b0378..fbca118 100644 --- a/src/lib/socket.ts +++ b/src/lib/socket.ts @@ -22,6 +22,9 @@ export interface SocketEvents { 'breathing:complete': () => void; 'reflection:dismiss': () => void; 'conversation:end': () => void; + // Audio streaming + 'audio:chunk': (data: { audioData: string; filename?: string; mimeType?: string }) => void; + 'audio:finalize': (data?: { filename?: string }) => void; // Server -> Client 'session:created': (data: { sessionId: string; sessionCode: string; participantId: string }) => void; @@ -34,6 +37,10 @@ export interface SocketEvents { 'participant:connected': (data: { participantId: string; name: string }) => void; 'participant:disconnected': (participantId: string) => void; 'participant:reconnected': (participantId: string) => void; + // Audio streaming responses + 'audio:chunk:received': (data: { filename: string; bytesWritten: number; totalSize: number }) => void; + 'audio:chunk:error': (data: { error: string }) => void; + 'audio:finalized': (data: { filename: string; path: string; size: number }) => void; } export function getSocket(): Socket | null {