Skip to content

Commit

Permalink
Removing previous function to detect ai-service based off system message
Browse files Browse the repository at this point in the history
  • Loading branch information
juanroesel committed May 13, 2024
1 parent b99dbf8 commit d39c7d9
Showing 1 changed file with 1 addition and 23 deletions.
24 changes: 1 addition & 23 deletions llama_cpp/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import psutil
import subprocess

from typing import Any, Dict, List, Tuple
from typing import Any, Dict, List, Tuple, Union

# Avoid "LookupError: unknown encoding: ascii" when open() called in a destructor
outnull_file = open(os.devnull, "w")
Expand Down Expand Up @@ -123,25 +123,3 @@ def get_gpu_general_info() -> Tuple[float, float, float]:
except (subprocess.CalledProcessError, FileNotFoundError):
pass
return 0.0, 0.0, 0.0

def infer_service_from_prompt(prompt: str | List[str]):
"""
Infer the service for which a completion request is sent based on the prompt.
"""
LABEL_SUGGESTIONS_TASK = "Your task is to select the most relevant labels for a GitHub issue title from a list of labels provided."
ACCEPTANCE_CRITERIA_TASK = "Your task is to write the acceptance criteria for a GitHub issue."
SPRINT_REVIEW_TASK = "You are helping me prepare a sprint review."

if isinstance(prompt, list):
prompt = " ".join(prompt)

if LABEL_SUGGESTIONS_TASK in prompt:
return "label-suggestions"

elif ACCEPTANCE_CRITERIA_TASK in prompt:
return "acceptance-criteria"

elif SPRINT_REVIEW_TASK in prompt:
return "sprint-review"

return "not-specified"

0 comments on commit d39c7d9

Please sign in to comment.