diff --git a/src/infrastructure/docker-compose.yml b/src/infrastructure/docker-compose.yml index deec251..23f413e 100644 --- a/src/infrastructure/docker-compose.yml +++ b/src/infrastructure/docker-compose.yml @@ -16,7 +16,7 @@ services: - server server: - build: + build: context: ../server dockerfile: ../infrastructure/Dockerfile.server ports: diff --git a/src/server/controllers/example_controller.py b/src/server/controllers/example_controller.py index fb880ca..4c687ad 100644 --- a/src/server/controllers/example_controller.py +++ b/src/server/controllers/example_controller.py @@ -1,6 +1,5 @@ -import os from schemas.schema import SampleRequest -from services.service import sample_service,AppError +from services.example_service import sample_service,AppError from fastapi import HTTPException @@ -18,57 +17,3 @@ def sample_controller(request:SampleRequest)->type: ) -import hashlib -from langchain_community.document_loaders import DirectoryLoader, JSONLoader -import json -from langchain_community.vectorstores.upstash import UpstashVectorStore -import getpass -from langchain_openai import OpenAIEmbeddings - -#input should be a folder with json or a singular json file -def update_Database(path:str)->str: #return number of added docs - - os.environ["UPSTASH_VECTOR_REST_URL"] = "https://loving-kingfish-56853-us1-vector.upstash.io" - if not os.environ.get("UPSTASH_VECTOR_REST_TOKEN"): - os.environ["UPSTASH_VECTOR_REST_TOKEN"] = getpass.getpass("Enter API key for Upstash: ") - - if not os.environ.get("OPENAI_API_KEY"): - os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter API key for OpenAI: ") - loader_kwargs = { - "jq_schema": ".[]", #iterate over question objects. - "text_content": False - } - loader = DirectoryLoader( - path=path, - glob="**/*.json", #ensures json - loader_cls=JSONLoader, - loader_kwargs=loader_kwargs - ) - docs = loader.load() - sanitized_docs = [] - doc_ids = [] # Standardized name - - for doc in docs: - raw_data = json.loads(doc.page_content) - question_text = raw_data.get("question") - unique_id = hashlib.md5(question_text.encode('utf-8')).hexdigest() - - doc.metadata = { - "choices": raw_data.get("choices"), - "category": raw_data.get("category"), - "answer": raw_data.get("answer"), - "difficulty": raw_data.get("difficulty"), - } - doc.page_content = question_text - doc_ids.append(unique_id) - sanitized_docs.append(doc) - - embeddings = OpenAIEmbeddings(model="text-embedding-3-small") - store = UpstashVectorStore( - embedding=embeddings - ) - store.add_documents(documents=sanitized_docs, ids=doc_ids) - return f"Successfully processed {len(sanitized_docs)} documents." - - - diff --git a/src/server/routes/example_route.py b/src/server/routes/example_router.py similarity index 100% rename from src/server/routes/example_route.py rename to src/server/routes/example_router.py diff --git a/src/server/server.py b/src/server/server.py index c12f128..8ba6ce5 100644 --- a/src/server/server.py +++ b/src/server/server.py @@ -1,6 +1,6 @@ from fastapi import FastAPI -from jwt_auth.services.auth_services import router as auth_router -from routes.example_route import router as business_router +from jwt_auth.auth_routes import router as auth_router +from routes.example_router import router as business_router from fastapi.middleware.cors import CORSMiddleware app = FastAPI() diff --git a/src/server/services/service.py b/src/server/services/example_service.py similarity index 100% rename from src/server/services/service.py rename to src/server/services/example_service.py diff --git a/src/server/services/services.py b/src/server/services/services.py new file mode 100644 index 0000000..5392d69 --- /dev/null +++ b/src/server/services/services.py @@ -0,0 +1,63 @@ +'''IMPLEMENT SERVICES HERE''' + + + +class AppError(Exception): + def __init__(self, message: str, status_code: int = 400): + self.message = message + self.status_code = status_code + super().__init__(message) + +import hashlib,os,json,getpass +from langchain_community.document_loaders import DirectoryLoader, JSONLoader +from langchain_community.vectorstores.upstash import UpstashVectorStore +from langchain_openai import OpenAIEmbeddings + +#input should be a folder with json or a singular json file +def update_Database(path:str)->str: #return number of added docs + + os.environ["UPSTASH_VECTOR_REST_URL"] = "https://loving-kingfish-56853-us1-vector.upstash.io" + if not os.environ.get("UPSTASH_VECTOR_REST_TOKEN"): + os.environ["UPSTASH_VECTOR_REST_TOKEN"] = getpass.getpass("Enter API key for Upstash: ") + + if not os.environ.get("OPENAI_API_KEY"): + os.environ["OPENAI_API_KEY"] = getpass.getpass("Enter API key for OpenAI: ") + loader_kwargs = { + "jq_schema": ".[]", #iterate over question objects. + "text_content": False + } + loader = DirectoryLoader( + path=path, + glob="**/*.json", #ensures json + loader_cls=JSONLoader, + loader_kwargs=loader_kwargs + ) + docs = loader.load() + sanitized_docs = [] + doc_ids = [] # Standardized name + + for doc in docs: + raw_data = json.loads(doc.page_content) + question_text = raw_data.get("question") + unique_id = hashlib.md5(question_text.encode('utf-8')).hexdigest() + + doc.metadata = { + "choices": raw_data.get("choices"), + "category": raw_data.get("category"), + "answer": raw_data.get("answer"), + "difficulty": raw_data.get("difficulty"), + } + doc.page_content = question_text + doc_ids.append(unique_id) + sanitized_docs.append(doc) + + embeddings = OpenAIEmbeddings(model="text-embedding-3-small") + store = UpstashVectorStore( + embedding=embeddings + ) + store.add_documents(documents=sanitized_docs, ids=doc_ids) + return f"Successfully processed {len(sanitized_docs)} documents." + + + +