Update llm_utils.py
Browse files- llm_utils.py +2 -2
llm_utils.py
CHANGED
|
@@ -5,7 +5,7 @@ import openai
|
|
| 5 |
|
| 6 |
|
| 7 |
# Sentence Generator (Decoder) for GPT-3 ...
|
| 8 |
-
def decoder_for_gpt3(input, max_length, temperature=0, engine="
|
| 9 |
# GPT-3 API allows each users execute the API within 60 times in a minute ...
|
| 10 |
if engine == "gpt-3.5-turbo":
|
| 11 |
time.sleep(1)
|
|
@@ -33,7 +33,7 @@ def decoder_for_gpt3(input, max_length, temperature=0, engine="text-davinci-003"
|
|
| 33 |
response = response["choices"][0]["text"]
|
| 34 |
return response
|
| 35 |
|
| 36 |
-
def decoder_for_gpt3_consistency(input, max_length, temp=0.7, n=5, engine="
|
| 37 |
# GPT-3 API allows each users execute the API within 60 times in a minute ...
|
| 38 |
if engine == "gpt-3.5-turbo":
|
| 39 |
time.sleep(1)
|
|
|
|
| 5 |
|
| 6 |
|
| 7 |
# Sentence Generator (Decoder) for GPT-3 ...
|
| 8 |
+
def decoder_for_gpt3(input, max_length, temperature=0, engine="gpt-3.5-turbo-instruct"):
|
| 9 |
# GPT-3 API allows each users execute the API within 60 times in a minute ...
|
| 10 |
if engine == "gpt-3.5-turbo":
|
| 11 |
time.sleep(1)
|
|
|
|
| 33 |
response = response["choices"][0]["text"]
|
| 34 |
return response
|
| 35 |
|
| 36 |
+
def decoder_for_gpt3_consistency(input, max_length, temp=0.7, n=5, engine="gpt-3.5-turbo-instruct"):
|
| 37 |
# GPT-3 API allows each users execute the API within 60 times in a minute ...
|
| 38 |
if engine == "gpt-3.5-turbo":
|
| 39 |
time.sleep(1)
|