-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgpt_setting.py
More file actions
33 lines (23 loc) · 856 Bytes
/
gpt_setting.py
File metadata and controls
33 lines (23 loc) · 856 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import os
import re
from utils import *
from llm_settings.openai_models import *
from llm_settings.gemini_models import *
from llm_settings.deepinfra_models import *
def print_prompt(inputs, response):
os.makedirs("records", exist_ok=True)
with open(f"records/records.txt", 'a') as f:
f.write(f"{inputs}\n----\n")
f.write(f"{response}\n====\n")
return
def llm_request(model, inputs):
if model.startswith("gpt"):
response = gpt_chat(model, inputs).strip()
elif model.startswith("gemini"):
response = gemini_chat(model, inputs).strip()
elif model.startswith("meta-llama"):
response = deepinfra_chat(model, inputs).strip()
else:
raise ValueError("The model is not supported or does not exist.")
print_prompt(inputs, response)
return response