Skip to content

Commit b3f8668

Browse files
committed
more model fixes to test_inference_engine
Signed-off-by: lilacheden <[email protected]>
1 parent ea09bab commit b3f8668

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

tests/inference/test_inference_engine.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -383,7 +383,7 @@ def test_lite_llm_inference_engine(self):
383383

384384
def test_lite_llm_inference_engine_without_task_data_not_failing(self):
385385
LiteLLMInferenceEngine(
386-
model="watsonx/meta-llama/llama-3-2-11b-instruct",
386+
model="watsonx/meta-llama/llama-3-2-11b-vision-instruct",
387387
max_tokens=2,
388388
temperature=0,
389389
top_p=1,
@@ -464,7 +464,7 @@ def test_ollama_inference_engine(self):
464464
{"source": "Answer in one word only. What is the capital of Canada"},
465465
]
466466

467-
engine = OllamaInferenceEngine(model="llama3.2:1b", temperature=0.0)
467+
engine = OllamaInferenceEngine(model="llama3:8b", temperature=0.0)
468468
predictions = engine.infer(dataset)
469469

470470
self.assertTrue("Ottawa" in predictions[0], predictions[0])

0 commit comments

Comments
 (0)