Skip to content

Commit

Permalink
Fixed unittest issues and default values
Browse files Browse the repository at this point in the history
  • Loading branch information
kirgrim committed Oct 28, 2024
1 parent 46e59c4 commit 8cf57ed
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 6 deletions.
8 changes: 3 additions & 5 deletions neon_diana_utils/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,15 +204,13 @@ def make_keys_config(write_config: bool,
vllm_hf_token = click.prompt("Hugging Face Auth Token", type=str)
vllm_role = click.prompt("VLLM Role",
type=str,
default="You are NeonLLM."
"You are trying to give a short "
"answer in less than 40 words.")
default="You are NeonLLM.")
vllm_context_depth = click.prompt("VLLM Context depth",
type=int,
default=4)
vllm_max_tokens = click.prompt("Maximum tokens in responses",
type=int,
default=100)
type=int,
default=512)
vllm_num_parallel_processes = click.prompt("Number of parallel processes",
type=int,
default=2)
Expand Down
4 changes: 3 additions & 1 deletion tests/test_diana_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,7 @@ def test_get_unconfigured_backend_services(self):
all_configured = {'keys': {'api_services': {'configured': True},
'emails': {'configured': True},
'track_my_brands': True},
'LLM_VLLM': {'config': False},
'LLM_CHAT_GPT': {'config': False},
'LLM_CLAUDE': {'': ''},
'LLM_PALM2': 'enabled',
Expand All @@ -304,7 +305,8 @@ def test_get_unconfigured_backend_services(self):
self.assertEqual(disabled, {'neon-api-proxy', 'neon-brands-service',
'neon-email-proxy', 'neon-llm-chatgpt',
'neon-llm-fastchat', 'neon-llm-claude',
'neon-llm-palm', 'neon-llm-gemini'})
'neon-llm-palm', 'neon-llm-gemini',
'neon-llm-vllm'})

def test_get_optional_http_backend(self):
from neon_diana_utils.configuration import _get_optional_http_backend
Expand Down
7 changes: 7 additions & 0 deletions tests/test_rabbitmq.json
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,13 @@
"write": ".*",
"read": "chat_gpt_input"
},
{
"user": "neon_llm_vllm",
"vhost": "/llm",
"configure": ".*",
"write": ".*",
"read": "vllm_input"
},
{
"user": "neon_llm_fastchat",
"vhost": "/llm",
Expand Down

0 comments on commit 8cf57ed

Please sign in to comment.