Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,6 +4,7 @@ from outlines import generate, samplers
|
|
| 4 |
from pydantic import BaseModel, Field
|
| 5 |
from typing import Optional
|
| 6 |
import json
|
|
|
|
| 7 |
|
| 8 |
# Define the output schema
|
| 9 |
class Medication(BaseModel):
|
|
@@ -18,7 +19,10 @@ class Medication(BaseModel):
|
|
| 18 |
total_daily_dose_mg: Optional[float] = Field(default=None, description="The total daily dose in milligrams.")
|
| 19 |
|
| 20 |
# Load your model locally via llama-cpp
|
| 21 |
-
model = llamacpp(
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
sampler = samplers.greedy()
|
| 24 |
|
|
|
|
| 4 |
from pydantic import BaseModel, Field
|
| 5 |
from typing import Optional
|
| 6 |
import json
|
| 7 |
+
import llama_cpp
|
| 8 |
|
| 9 |
# Define the output schema
|
| 10 |
class Medication(BaseModel):
|
|
|
|
| 19 |
total_daily_dose_mg: Optional[float] = Field(default=None, description="The total daily dose in milligrams.")
|
| 20 |
|
| 21 |
# Load your model locally via llama-cpp
|
| 22 |
+
model = llamacpp(
|
| 23 |
+
"cmcmaster/drug_parsing_Llama-3.2-1B-Instruct-Q5_K_S-GGUF",
|
| 24 |
+
"drug_parsing_llama-3.2-1b-instruct-q5_k_s.gguf",
|
| 25 |
+
tokenizer=llama_cpp.llama_tokenizer.LlamaHFTokenizer.from_pretrained("cmcmaster/drug_parsing_Llama-3.2-1B-Instruct")
|
| 26 |
|
| 27 |
sampler = samplers.greedy()
|
| 28 |
|