Skip to content

Commit

Permalink
type annotations to uppercase
Browse files Browse the repository at this point in the history
  • Loading branch information
hananell committed Jan 12, 2024
1 parent 8adc914 commit 4f88ad3
Show file tree
Hide file tree
Showing 17 changed files with 64 additions and 61 deletions.
3 changes: 2 additions & 1 deletion semantic_router/encoders/base.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from typing import List
from pydantic import BaseModel, Field


Expand All @@ -9,5 +10,5 @@ class BaseEncoder(BaseModel):
class Config:
arbitrary_types_allowed = True

def __call__(self, docs: list[str]) -> list[list[float]]:
def __call__(self, docs: List[str]) -> List[List[float]]:
raise NotImplementedError("Subclasses must implement this method")
8 changes: 4 additions & 4 deletions semantic_router/encoders/bm25.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
from typing import Any, Optional
from typing import Any, Optional, List, Dict

from semantic_router.encoders import BaseEncoder
from semantic_router.utils.logger import logger


class BM25Encoder(BaseEncoder):
model: Optional[Any] = None
idx_mapping: Optional[dict[int, int]] = None
idx_mapping: Optional[Dict[int, int]] = None
type: str = "sparse"

def __init__(
Expand Down Expand Up @@ -40,7 +40,7 @@ def _set_idx_mapping(self):
else:
raise TypeError("Expected a dictionary for 'doc_freq'")

def __call__(self, docs: list[str]) -> list[list[float]]:
def __call__(self, docs: List[str]) -> List[List[float]]:
if self.model is None or self.idx_mapping is None:
raise ValueError("Model or index mapping is not initialized.")
if len(docs) == 1:
Expand All @@ -60,7 +60,7 @@ def __call__(self, docs: list[str]) -> list[list[float]]:
embeds[i][position] = val
return embeds

def fit(self, docs: list[str]):
def fit(self, docs: List[str]):
if self.model is None:
raise ValueError("Model is not initialized.")
self.model.fit(docs)
Expand Down
4 changes: 2 additions & 2 deletions semantic_router/encoders/cohere.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from typing import Optional
from typing import Optional, List

import cohere

Expand Down Expand Up @@ -27,7 +27,7 @@ def __init__(
except Exception as e:
raise ValueError(f"Cohere API client failed to initialize. Error: {e}")

def __call__(self, docs: list[str]) -> list[list[float]]:
def __call__(self, docs: List[str]) -> List[List[float]]:
if self.client is None:
raise ValueError("Cohere client is not initialized.")
try:
Expand Down
8 changes: 4 additions & 4 deletions semantic_router/encoders/fastembed.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Optional
from typing import Any, Optional, List

import numpy as np
from pydantic import PrivateAttr
Expand Down Expand Up @@ -42,10 +42,10 @@ def _initialize_client(self):
embedding = Embedding(**embedding_args)
return embedding

def __call__(self, docs: list[str]) -> list[list[float]]:
def __call__(self, docs: List[str]) -> List[List[float]]:
try:
embeds: list[np.ndarray] = list(self._client.embed(docs))
embeddings: list[list[float]] = [e.tolist() for e in embeds]
embeds: List[np.ndarray] = list(self._client.embed(docs))
embeddings: List[List[float]] = [e.tolist() for e in embeds]
return embeddings
except Exception as e:
raise ValueError(f"FastEmbed embed failed. Error: {e}")
6 changes: 3 additions & 3 deletions semantic_router/encoders/huggingface.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Optional
from typing import Any, Optional, List

from pydantic import PrivateAttr

Expand Down Expand Up @@ -60,11 +60,11 @@ def _initialize_hf_model(self):

def __call__(
self,
docs: list[str],
docs: List[str],
batch_size: int = 32,
normalize_embeddings: bool = True,
pooling_strategy: str = "mean",
) -> list[list[float]]:
) -> List[List[float]]:
all_embeddings = []
for i in range(0, len(docs), batch_size):
batch_docs = docs[i : i + batch_size]
Expand Down
4 changes: 2 additions & 2 deletions semantic_router/encoders/openai.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
from time import sleep
from typing import Optional
from typing import Optional, List

import openai
from openai import OpenAIError
Expand Down Expand Up @@ -31,7 +31,7 @@ def __init__(
except Exception as e:
raise ValueError(f"OpenAI API client failed to initialize. Error: {e}")

def __call__(self, docs: list[str]) -> list[list[float]]:
def __call__(self, docs: List[str]) -> List[List[float]]:
if self.client is None:
raise ValueError("OpenAI client is not initialized.")
embeds = None
Expand Down
12 changes: 6 additions & 6 deletions semantic_router/hybrid_layer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Optional
from typing import Optional, List, Dict, Tuple

import numpy as np
from numpy.linalg import norm
Expand All @@ -21,7 +21,7 @@ def __init__(
self,
encoder: BaseEncoder,
sparse_encoder: Optional[BM25Encoder] = None,
routes: list[Route] = [],
routes: List[Route] = [],
alpha: float = 0.3,
):
self.encoder = encoder
Expand Down Expand Up @@ -81,7 +81,7 @@ def _add_route(self, route: Route):
else:
self.sparse_index = np.concatenate([self.sparse_index, sparse_embeds])

def _add_routes(self, routes: list[Route]):
def _add_routes(self, routes: List[Route]):
# create embeddings for all routes
logger.info("Creating embeddings for all routes...")
all_utterances = [
Expand Down Expand Up @@ -153,8 +153,8 @@ def _convex_scaling(self, dense: np.ndarray, sparse: np.ndarray):
sparse = np.array(sparse) * (1 - self.alpha)
return dense, sparse

def _semantic_classify(self, query_results: list[dict]) -> tuple[str, list[float]]:
scores_by_class: dict[str, list[float]] = {}
def _semantic_classify(self, query_results: List[Dict]) -> Tuple[str, List[float]]:
scores_by_class: Dict[str, List[float]] = {}
for result in query_results:
score = result["score"]
route = result["route"]
Expand All @@ -174,7 +174,7 @@ def _semantic_classify(self, query_results: list[dict]) -> tuple[str, list[float
logger.warning("No classification found for semantic classifier.")
return "", []

def _pass_threshold(self, scores: list[float], threshold: float) -> bool:
def _pass_threshold(self, scores: List[float], threshold: float) -> bool:
if scores:
return max(scores) > threshold
else:
Expand Down
18 changes: 9 additions & 9 deletions semantic_router/layer.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
import os
from typing import Optional, Any
from typing import Optional, Any, List, Dict, Tuple

import numpy as np
import yaml
Expand Down Expand Up @@ -48,11 +48,11 @@ class LayerConfig:
RouteLayer.
"""

routes: list[Route] = []
routes: List[Route] = []

def __init__(
self,
routes: list[Route] = [],
routes: List[Route] = [],
encoder_type: str = "openai",
encoder_name: Optional[str] = None,
):
Expand Down Expand Up @@ -99,7 +99,7 @@ def from_file(cls, path: str) -> "LayerConfig":
else:
raise Exception("Invalid config JSON or YAML")

def to_dict(self) -> dict[str, Any]:
def to_dict(self) -> Dict[str, Any]:
return {
"encoder_type": self.encoder_type,
"encoder_name": self.encoder_name,
Expand Down Expand Up @@ -158,7 +158,7 @@ def __init__(
self,
encoder: Optional[BaseEncoder] = None,
llm: Optional[BaseLLM] = None,
routes: Optional[list[Route]] = None,
routes: Optional[List[Route]] = None,
top_k_routes: int = 3,
):
logger.info("Initializing RouteLayer")
Expand Down Expand Up @@ -247,7 +247,7 @@ def add(self, route: Route):
# add route to routes list
self.routes.append(route)

def _add_routes(self, routes: list[Route]):
def _add_routes(self, routes: List[Route]):
# create embeddings for all routes
all_utterances = [
utterance for route in routes for utterance in route.utterances
Expand Down Expand Up @@ -290,8 +290,8 @@ def _query(self, text: str, top_k: int = 5):
logger.warning("No index found for route layer.")
return []

def _semantic_classify(self, query_results: list[dict]) -> tuple[str, list[float]]:
scores_by_class: dict[str, list[float]] = {}
def _semantic_classify(self, query_results: List[dict]) -> Tuple[str, List[float]]:
scores_by_class: Dict[str, List[float]] = {}
for result in query_results:
score = result["score"]
route = result["route"]
Expand All @@ -311,7 +311,7 @@ def _semantic_classify(self, query_results: list[dict]) -> tuple[str, list[float
logger.warning("No classification found for semantic classifier.")
return "", []

def _pass_threshold(self, scores: list[float], threshold: float) -> bool:
def _pass_threshold(self, scores: List[float], threshold: float) -> bool:
if scores:
return max(scores) > threshold
else:
Expand Down
4 changes: 3 additions & 1 deletion semantic_router/linear.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from typing import Tuple

import numpy as np
from numpy.linalg import norm

Expand All @@ -19,7 +21,7 @@ def similarity_matrix(xq: np.ndarray, index: np.ndarray) -> np.ndarray:
return sim


def top_scores(sim: np.ndarray, top_k: int = 5) -> tuple[np.ndarray, np.ndarray]:
def top_scores(sim: np.ndarray, top_k: int = 5) -> Tuple[np.ndarray, np.ndarray]:
# get indices of top_k records
top_k = min(top_k, sim.shape[0])
idx = np.argpartition(sim, -top_k)[-top_k:]
Expand Down
4 changes: 2 additions & 2 deletions semantic_router/llms/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Optional
from typing import Optional, List

from pydantic import BaseModel

Expand All @@ -11,5 +11,5 @@ class BaseLLM(BaseModel):
class Config:
arbitrary_types_allowed = True

def __call__(self, messages: list[Message]) -> Optional[str]:
def __call__(self, messages: List[Message]) -> Optional[str]:
raise NotImplementedError("Subclasses must implement this method")
4 changes: 2 additions & 2 deletions semantic_router/llms/cohere.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from typing import Optional
from typing import Optional, List

import cohere

Expand All @@ -26,7 +26,7 @@ def __init__(
except Exception as e:
raise ValueError(f"Cohere API client failed to initialize. Error: {e}")

def __call__(self, messages: list[Message]) -> str:
def __call__(self, messages: List[Message]) -> str:
if self.client is None:
raise ValueError("Cohere client is not initialized.")
try:
Expand Down
4 changes: 2 additions & 2 deletions semantic_router/llms/openai.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from typing import Optional
from typing import Optional, List

import openai

Expand Down Expand Up @@ -33,7 +33,7 @@ def __init__(
self.temperature = temperature
self.max_tokens = max_tokens

def __call__(self, messages: list[Message]) -> str:
def __call__(self, messages: List[Message]) -> str:
if self.client is None:
raise ValueError("OpenAI client is not initialized.")
try:
Expand Down
4 changes: 2 additions & 2 deletions semantic_router/llms/openrouter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from typing import Optional
from typing import Optional, List

import openai

Expand Down Expand Up @@ -38,7 +38,7 @@ def __init__(
self.temperature = temperature
self.max_tokens = max_tokens

def __call__(self, messages: list[Message]) -> str:
def __call__(self, messages: List[Message]) -> str:
if self.client is None:
raise ValueError("OpenRouter client is not initialized.")
try:
Expand Down
12 changes: 6 additions & 6 deletions semantic_router/route.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
import re
from typing import Any, Callable, Optional, Union
from typing import Any, Callable, Optional, Union, List, Dict

from pydantic import BaseModel

Expand Down Expand Up @@ -40,9 +40,9 @@ def is_valid(route_config: str) -> bool:

class Route(BaseModel):
name: str
utterances: list[str]
utterances: List[str]
description: Optional[str] = None
function_schema: Optional[dict[str, Any]] = None
function_schema: Optional[Dict[str, Any]] = None
llm: Optional[BaseLLM] = None

def __call__(self, query: str) -> RouteChoice:
Expand All @@ -62,11 +62,11 @@ def __call__(self, query: str) -> RouteChoice:
func_call = None
return RouteChoice(name=self.name, function_call=func_call)

def to_dict(self) -> dict[str, Any]:
def to_dict(self) -> Dict[str, Any]:
return self.dict()

@classmethod
def from_dict(cls, data: dict[str, Any]):
def from_dict(cls, data: Dict[str, Any]):
return cls(**data)

@classmethod
Expand All @@ -92,7 +92,7 @@ def _parse_route_config(cls, config: str) -> str:
raise ValueError("No <config></config> tags found in the output.")

@classmethod
def _generate_dynamic_route(cls, llm: BaseLLM, function_schema: dict[str, Any]):
def _generate_dynamic_route(cls, llm: BaseLLM, function_schema: Dict[str, Any]):
logger.info("Generating dynamic route...")

prompt = f"""
Expand Down
8 changes: 4 additions & 4 deletions semantic_router/schema.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from enum import Enum
from typing import Optional, Literal
from typing import Optional, Literal, List, Dict

from pydantic import BaseModel
from pydantic.dataclasses import dataclass
Expand Down Expand Up @@ -47,7 +47,7 @@ def __init__(self, type: str, name: Optional[str]):
else:
raise ValueError

def __call__(self, texts: list[str]) -> list[list[float]]:
def __call__(self, texts: List[str]) -> List[List[float]]:
return self.model(texts)


Expand All @@ -65,7 +65,7 @@ def to_cohere(self):


class Conversation(BaseModel):
messages: list[Message]
messages: List[Message]

def split_by_topic(
self,
Expand All @@ -74,7 +74,7 @@ def split_by_topic(
split_method: Literal[
"consecutive_similarity_drop", "cumulative_similarity_drop"
] = "consecutive_similarity_drop",
) -> dict[str, list[str]]:
) -> Dict[str, List[str]]:
docs = [f"{m.role}: {m.content}" for m in self.messages]
return semantic_splitter(
encoder=encoder, docs=docs, threshold=threshold, split_method=split_method
Expand Down
Loading

0 comments on commit 4f88ad3

Please sign in to comment.