Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 7 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,15 @@ Dash is a **self-learning data agent** that grounds its answers in **6 layers of

Inspired by [OpenAI's in-house data agent](https://openai.com/index/inside-our-in-house-data-agent/).

## Quick Start
## Get Started

```sh
# Clone this repo
# Clone the repo
git clone https://github.com/agno-agi/dash.git && cd dash
# Add OPENAI_API_KEY by adding to .env file or export OPENAI_API_KEY=sk-***

# Add OPENAI_API_KEY
cp example.env .env
# Edit .env and add your key

# Start the application
docker compose up -d --build
Expand All @@ -20,7 +22,7 @@ docker exec -it dash-api python -m dash.scripts.load_data
docker exec -it dash-api python -m dash.scripts.load_knowledge
```

Confirm dash is running by navigation to [http://localhost:8000/docs](http://localhost:8000/docs).
Confirm dash is running at [http://localhost:8000/docs](http://localhost:8000/docs).

## Connect to the Web UI

Expand Down Expand Up @@ -238,7 +240,7 @@ python -m dash # CLI mode
| `EXA_API_KEY` | No | Web search for external knowledge |
| `DB_*` | No | Database config (defaults to localhost) |

## Further Reading
## Learn More

- [OpenAI's In-House Data Agent](https://openai.com/index/inside-our-in-house-data-agent/) — the inspiration
- [Self-Improving SQL Agent](https://www.ashpreetbedi.com/articles/sql-agent) — deep dive on an earlier architecture
Expand Down
4 changes: 2 additions & 2 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@
from dash.agents import dash, dash_knowledge, reasoning_dash
from db import get_postgres_db

# ============================================================================
# ---------------------------------------------------------------------------
# Create AgentOS
# ============================================================================
# ---------------------------------------------------------------------------
agent_os = AgentOS(
name="Dash",
tracing=True,
Expand Down
54 changes: 17 additions & 37 deletions dash/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,59 +8,36 @@
from os import getenv

from agno.agent import Agent
from agno.knowledge import Knowledge
from agno.knowledge.embedder.openai import OpenAIEmbedder
from agno.learn import (
LearnedKnowledgeConfig,
LearningMachine,
LearningMode,
UserMemoryConfig,
UserProfileConfig,
)
from agno.models.openai import OpenAIResponses
from agno.tools.mcp import MCPTools
from agno.tools.reasoning import ReasoningTools
from agno.tools.sql import SQLTools
from agno.vectordb.pgvector import PgVector, SearchType

from dash.context.business_rules import BUSINESS_CONTEXT
from dash.context.semantic_model import SEMANTIC_MODEL_STR
from dash.tools import create_introspect_schema_tool, create_save_validated_query_tool
from db import db_url, get_postgres_db
from db import create_knowledge, db_url, get_postgres_db

# ============================================================================
# ---------------------------------------------------------------------------
# Database & Knowledge
# ============================================================================
# ---------------------------------------------------------------------------

agent_db = get_postgres_db()

# KNOWLEDGE: Static, curated (table schemas, validated queries, business rules)
dash_knowledge = Knowledge(
name="Dash Knowledge",
vector_db=PgVector(
db_url=db_url,
table_name="dash_knowledge",
search_type=SearchType.hybrid,
embedder=OpenAIEmbedder(id="text-embedding-3-small"),
),
contents_db=get_postgres_db(contents_table="dash_knowledge_contents"),
)
dash_knowledge = create_knowledge("Dash Knowledge", "dash_knowledge")

# LEARNINGS: Dynamic, discovered (error patterns, gotchas, user corrections)
dash_learnings = Knowledge(
name="Dash Learnings",
vector_db=PgVector(
db_url=db_url,
table_name="dash_learnings",
search_type=SearchType.hybrid,
embedder=OpenAIEmbedder(id="text-embedding-3-small"),
),
contents_db=get_postgres_db(contents_table="dash_learnings_contents"),
)
dash_learnings = create_knowledge("Dash Learnings", "dash_learnings")

# ============================================================================
# ---------------------------------------------------------------------------
# Tools
# ============================================================================
# ---------------------------------------------------------------------------

save_validated_query = create_save_validated_query_tool(dash_knowledge)
introspect_schema = create_introspect_schema_tool(db_url)
Expand All @@ -72,9 +49,9 @@
MCPTools(url=f"https://mcp.exa.ai/mcp?exaApiKey={getenv('EXA_API_KEY', '')}&tools=web_search_exa"),
]

# ============================================================================
# ---------------------------------------------------------------------------
# Instructions
# ============================================================================
# ---------------------------------------------------------------------------

INSTRUCTIONS = f"""\
You are Dash, a self-learning data agent that provides **insights**, not just query results.
Expand Down Expand Up @@ -159,23 +136,23 @@
{BUSINESS_CONTEXT}\
"""

# ============================================================================
# ---------------------------------------------------------------------------
# Create Agent
# ============================================================================
# ---------------------------------------------------------------------------

dash = Agent(
id="dash",
name="Dash",
model=OpenAIResponses(id="gpt-5.2"),
db=agent_db,
instructions=INSTRUCTIONS,
# Knowledge (static)
knowledge=dash_knowledge,
search_knowledge=True,
# Learning (provides search_learnings, save_learning, user profile, user memory)
# Learning (provides search_learnings, save_learning)
enable_agentic_memory=True,
learning=LearningMachine(
knowledge=dash_learnings,
user_profile=UserProfileConfig(mode=LearningMode.AGENTIC),
user_memory=UserMemoryConfig(mode=LearningMode.AGENTIC),
learned_knowledge=LearnedKnowledgeConfig(mode=LearningMode.AGENTIC),
),
tools=base_tools,
Expand All @@ -195,5 +172,8 @@
}
)

# ---------------------------------------------------------------------------
# Run Agent
# ---------------------------------------------------------------------------
if __name__ == "__main__":
dash.print_response("Who won the most races in 2019?", stream=True)
3 changes: 2 additions & 1 deletion db/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@
Database connection utilities.
"""

from db.session import get_postgres_db
from db.session import create_knowledge, get_postgres_db
from db.url import db_url

__all__ = [
"create_knowledge",
"db_url",
"get_postgres_db",
]
25 changes: 25 additions & 0 deletions db/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
"""

from agno.db.postgres import PostgresDb
from agno.knowledge import Knowledge
from agno.knowledge.embedder.openai import OpenAIEmbedder
from agno.vectordb.pgvector import PgVector, SearchType

from db.url import db_url

Expand All @@ -24,3 +27,25 @@ def get_postgres_db(contents_table: str | None = None) -> PostgresDb:
if contents_table is not None:
return PostgresDb(id=DB_ID, db_url=db_url, knowledge_table=contents_table)
return PostgresDb(id=DB_ID, db_url=db_url)


def create_knowledge(name: str, table_name: str) -> Knowledge:
"""Create a Knowledge instance with PgVector hybrid search.

Args:
name: Display name for the knowledge base.
table_name: PostgreSQL table name for vector storage.

Returns:
Configured Knowledge instance.
"""
return Knowledge(
name=name,
vector_db=PgVector(
db_url=db_url,
table_name=table_name,
search_type=SearchType.hybrid,
embedder=OpenAIEmbedder(id="text-embedding-3-small"),
),
contents_db=get_postgres_db(contents_table=f"{table_name}_contents"),
)
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ license-files = ["LICENSE"]
authors = [{ name = "Agno", email = "hello@agno.com" }]

dependencies = [
"agno-infra",
"agno",
"fastapi[standard]",
"httpx",
Expand Down
32 changes: 15 additions & 17 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,22 +1,21 @@
# This file was autogenerated by uv via the following command:
# ./scripts/generate_requirements.sh upgrade
agno==2.4.7
agno-infra==1.0.7
agno==2.5.2
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
attrs==25.4.0
certifi==2026.1.4
cffi==2.0.0
click==8.3.1
cryptography==46.0.4
cryptography==46.0.5
distro==1.9.0
dnspython==2.8.0
docstring-parser==0.17.0
email-validator==2.3.0
fastapi==0.128.0
fastapi-cli==0.0.20
fastapi-cloud-cli==0.11.0
fastapi==0.129.0
fastapi-cli==0.0.23
fastapi-cloud-cli==0.12.0
fastar==0.8.0
gitdb==4.0.12
gitpython==3.1.46
Expand All @@ -31,17 +30,17 @@ hyperframe==6.1.0
idna==3.11
importlib-metadata==8.7.1
jinja2==3.1.6
jiter==0.12.0
jiter==0.13.0
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
markdown-it-py==4.0.0
markupsafe==3.0.3
mcp==1.26.0
mdurl==0.1.2
numpy==2.4.2
openai==2.16.0
openai==2.21.0
openinference-instrumentation==0.1.44
openinference-instrumentation-agno==0.1.27
openinference-instrumentation-agno==0.1.28
openinference-semantic-conventions==0.1.26
opentelemetry-api==1.39.1
opentelemetry-instrumentation==0.60b1
Expand All @@ -56,7 +55,7 @@ pycparser==3.0
pydantic==2.12.5
pydantic-core==2.41.5
pydantic-extra-types==2.11.0
pydantic-settings==2.12.0
pydantic-settings==2.13.0
pygments==2.19.2
pyjwt==2.11.0
python-dateutil==2.9.0.post0
Expand All @@ -65,24 +64,23 @@ python-multipart==0.0.22
pyyaml==6.0.3
referencing==0.37.0
rich==14.3.2
rich-toolkit==0.18.1
rich-toolkit==0.19.4
rignore==0.7.6
rpds-py==0.30.0
sentry-sdk==2.51.0
sentry-sdk==2.53.0
shellingham==1.5.4
six==1.17.0
smmap==5.0.2
sniffio==1.3.1
sqlalchemy==2.0.46
sse-starlette==3.2.0
starlette==0.50.0
tomli==2.4.0
tqdm==4.67.2
typer==0.21.1
starlette==0.52.1
tqdm==4.67.3
typer==0.24.0
typing-extensions==4.15.0
typing-inspection==0.4.2
urllib3==2.6.3
uvicorn==0.40.0
uvicorn==0.41.0
uvloop==0.22.1
watchfiles==1.1.1
websockets==16.0
Expand Down
Loading