Skip to content

Commit

Permalink
Merge pull request #34 from baloise/main
Browse files Browse the repository at this point in the history
Add a way to set custom values for LLM Backend
  • Loading branch information
robbizbal authored Nov 13, 2024
2 parents eb923a0 + b0e5f19 commit 12c1f50
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 8 deletions.
19 changes: 18 additions & 1 deletion src/api/mask.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from src.utils.ano_llm import find_entities as llm_find_entities
from src.utils.ano_spacy import Anon_Spacy
from src.utils.ano_regex import find_entities as reg_find_entities
from src.utils.env import *

router = fastapi.APIRouter()

Expand All @@ -17,15 +18,31 @@ class BackendType(Enum):
class MaskRequest(BaseModel):
text: str
backendType: BackendType
llmURL: str
llmModel: str

ano = Anon_Spacy()

def check_parameter(param):
if not param: # This checks if param is empty (None, '', [], etc.)
print(f"The parameter is empty. Setting to default value from ENV if existing.")
return 0
else:
print(f"The parameter is not empty: {param}")
return 1

@router.post("/mask", response_class=JSONResponse, include_in_schema=True)
async def mask(request: MaskRequest):

match request.backendType:
case BackendType.LLM:
llm_entities = llm_find_entities(request.text)
if check_parameter(request.llmURL) == 0:
request.llmURL = OLLAMA_BASE_URL

if check_parameter(request.llmModel) == 0:
request.llmModel = OLLAMA_MODEL

llm_entities = llm_find_entities(text=request.text, base_url=request.llmURL, model=request.llmModel)
return {"original_text": request.text, "entities": llm_entities['replace_dict'], "anonymized_text": llm_entities['text']}
case BackendType.NER:
spacy_entities = ano.find_entities(request.text)
Expand Down
24 changes: 22 additions & 2 deletions src/static/scripts/mask.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,29 @@
document.getElementById('backendType').addEventListener('change', function() {
var llmInputCheckbox = document.getElementById('LLMcustomLabel');
if (this.value === 'LLM') {
llmInputCheckbox.style.display = 'flex';
} else {
llmInputCheckbox.style.display = 'none';
}
});

document.getElementById('LLMcustom').addEventListener('change', function() {
var llmInputDiv = document.getElementById('LLMInput');
if (this.checked === true) {
llmInputDiv.style.display = 'flex'; // Show the LLMInput div
} else {
llmInputDiv.style.display = 'none'; // Hide the LLMInput div
}
});

document.getElementById('inputForm').addEventListener('submit', function(event) {
event.preventDefault(); // Prevent the default form submission

const inputData = document.getElementById('inputData').value;
const backendType = document.getElementById('backendType').value;

const inputLLMurl = document.getElementById('inputLLMurl').value;
const inputLLMmodel = document.getElementById('inputLLMmodel').value;

// Use a relative URL for the API endpoint
const apiEndpoint = '/api/mask'; // Relative URL

Expand All @@ -13,7 +33,7 @@ document.getElementById('inputForm').addEventListener('submit', function(event)
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ text: inputData, "backendType": backendType }), // Send the input text as JSON
body: JSON.stringify({ text: inputData, "backendType": backendType, llmURL: inputLLMurl, llmModel: inputLLMmodel}), // Send the input text as JSON
})
.then(response => {
if (!response.ok) {
Expand Down
12 changes: 12 additions & 0 deletions src/static/styles/style.css
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,14 @@ footer {
white-space: pre-wrap; /* Preserve whitespace */
}

#LLMInput {
display: none; /* Hide by default */
}

#LLMcustomLabel {
display: none; /* Hide by default */
}

.flex-container {
display: flex; /* Use flexbox to align items */
justify-content: center; /* Center the buttons horizontally */
Expand Down Expand Up @@ -95,3 +103,7 @@ footer {
flex-direction: column; /* Stack items vertically */
}
}

.checkbox-label {
font-size: 14px; /* Font size */
}
13 changes: 11 additions & 2 deletions src/templates/html/mask.html
Original file line number Diff line number Diff line change
Expand Up @@ -28,18 +28,27 @@ <h3>Input</h3>
<div class="flex-item">
<form id="inputForm">
<textarea class="default-textarea" type="text" id="inputData" placeholder="Enter text to anonymize or load text from a file" required></textarea>
<div class="flex-container-row">
<div class="flex-container">
<div class="flex-item">
<select id="backendType" name="backendType" required>
<option value="REG">REG</option>
<option value="LLM">LLM</option>
<option value="NER">NER</option>
</select>
</div>
<label class="flex-item checkbox-label" id="LLMcustomLabel"><input type="checkbox" id="LLMcustom" name="LLMcustom">use custom settings</label>
<div id="LLMInput" class="flex-container">
<div class="flex-item">
<input type="text" name="LLMURL" id="inputLLMurl" placeholder="e.g.: http://localhost:11434"></input>
</div>
<div class="flex-item">
<input type="text" name="LLMMODEL" id="inputLLMmodel" placeholder="e.g.: llama3.2:latest"></input>
</div>
</div>
<div class="flex-item">
<input type="submit" value="Submit">
</div>
</div>
</div>
</form>
</div>

Expand Down
4 changes: 1 addition & 3 deletions src/utils/ano_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@
Text to anonymize: {text}
"""


def find_entities(text, model=OLLAMA_MODEL, temperature=0.1, template=TEMPLATE,
base_url=OLLAMA_BASE_URL, unprettify=True, raw=False):
"""
Expand All @@ -89,8 +88,7 @@ def find_entities(text, model=OLLAMA_MODEL, temperature=0.1, template=TEMPLATE,
:param raw:
:return:
"""



prompt = ChatPromptTemplate.from_template(template)
model = OllamaLLM(model=model, temperature=temperature, base_url=base_url, client_kwargs={"verify": os.getenv("HTTPX_CLIENT_VERIFY", True)})
chain = prompt | model
Expand Down

0 comments on commit 12c1f50

Please sign in to comment.