Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from outlines.models import llamacpp | |
| from outlines import generate, samplers | |
| from pydantic import BaseModel, Field | |
| from typing import Optional | |
| import json | |
| import llama_cpp | |
| # Define the output schema | |
| class Medication(BaseModel): | |
| drug_name: str = Field(description="The name of the drug.") | |
| is_generic: bool = Field(description="Indicates if the drug name is a generic drug name.") | |
| strength: Optional[str] = Field(default=None, description="The strength of the drug.") | |
| unit: Optional[str] = Field(default=None, description="The unit of measurement for the drug strength.") | |
| dosage_form: Optional[str] = Field(default=None, description="The form of the drug (e.g., patch, tablet).") | |
| frequency: Optional[str] = Field(default=None, description="The frequency of drug administration.") | |
| route: Optional[str] = Field(default=None, description="The route of administration (e.g., oral, topical).") | |
| is_prn: Optional[bool] = Field(default=None, description="Whether the medication is taken 'as needed'.") | |
| total_daily_dose_mg: Optional[float] = Field(default=None, description="The total daily dose in milligrams.") | |
| # Load your model locally via llama-cpp | |
| model = llamacpp( | |
| "cmcmaster/drug_parsing_Llama-3.2-1B-Instruct-Q5_K_S-GGUF", | |
| "drug_parsing_llama-3.2-1b-instruct-q5_k_s.gguf", | |
| tokenizer=llama_cpp.llama_tokenizer.LlamaHFTokenizer.from_pretrained("cmcmaster/drug_parsing_Llama-3.2-1B-Instruct") | |
| ) | |
| sampler = samplers.greedy() | |
| # Prepare structured generator | |
| structured_generator = generate.json(model, Medication, sampler = sampler) | |
| def respond( | |
| message, | |
| history: list[tuple[str, str]], | |
| ): | |
| try: | |
| medication = structured_generator(message) | |
| response = json.dumps(medication.model_dump(), indent=2) | |
| except Exception as e: | |
| response = f"Error: {str(e)}" | |
| yield response | |
| # Gradio interface | |
| demo = gr.ChatInterface( | |
| respond | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |