Spaces:
Runtime error
Runtime error
Force download llama model
Browse files
app.py
CHANGED
|
@@ -73,7 +73,7 @@ model_name_or_path = "TheBloke/Llama-2-13B-chat-GGML"
|
|
| 73 |
model_basename = "llama-2-13b-chat.ggmlv3.q5_1.bin" # The model is in bin format
|
| 74 |
|
| 75 |
# Download the model file
|
| 76 |
-
model_path_llama = hf_hub_download(repo_id=model_name_or_path, filename=model_basename)
|
| 77 |
|
| 78 |
# Initialize the Llama model with appropriate settings for GPU
|
| 79 |
lcpp_llm = Llama(
|
|
|
|
| 73 |
model_basename = "llama-2-13b-chat.ggmlv3.q5_1.bin" # The model is in bin format
|
| 74 |
|
| 75 |
# Download the model file
|
| 76 |
+
model_path_llama = hf_hub_download(repo_id=model_name_or_path, filename=model_basename, force_download=True)
|
| 77 |
|
| 78 |
# Initialize the Llama model with appropriate settings for GPU
|
| 79 |
lcpp_llm = Llama(
|