Skip to content

Commit

Permalink
support dual llm ; one for processing and one for type check and type…
Browse files Browse the repository at this point in the history
… resolver
  • Loading branch information
kugesan1105 committed Nov 18, 2024
1 parent 09d6dee commit 0157035
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 4 deletions.
5 changes: 4 additions & 1 deletion jac-mtllm/examples/inherit_basellm.jac
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import from mtllm.llms.base { BaseLLM }
import:py from mtllm.llms { OpenAI }
import:py from PIL { Image }
import torch;
import from transformers { AutoModelForCausalLM, AutoProcessor }
Expand Down Expand Up @@ -85,6 +86,7 @@ obj Florence :BaseLLM: {
}

glob llm = Florence('microsoft/Florence-2-base');
glob llm2 = OpenAI(verbose=True, model_name="gpt-4o-mini");

enum DamageType {
NoDamage,
Expand All @@ -94,7 +96,8 @@ enum DamageType {
}

can ""
predict_vehicle_damage(img: Image) -> DamageType by llm(is_custom=True,raw_output=True);
# predict_vehicle_damage(img: Image) -> DamageType by llm(is_custom=True,raw_output=True);
predict_vehicle_damage(img: Image) -> DamageType by llm(is_custom=True,resolve_with =llm2);

with entry {
img = 'car_scratch.jpg';
Expand Down
6 changes: 3 additions & 3 deletions jac-mtllm/mtllm/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,12 +121,12 @@ def with_llm(
_globals,
_locals,
)
resolver_model = model_params.pop("resolve_with") if "resolve_with" in model_params else model
_output = (
model.resolve_output(
meaning_out if raw_output else
resolver_model.resolve_output(
meaning_out, output_hint, output_type_explanations, _globals, _locals
)
if not raw_output
else meaning_out
)
return _output

Expand Down

0 comments on commit 0157035

Please sign in to comment.