Skip to content

Commit 8bbdb2c

Browse files
committed
user_prpmpt at end
1 parent 679fc0b commit 8bbdb2c

6 files changed

Lines changed: 65 additions & 54 deletions

File tree

README.md

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,32 +58,29 @@ from pydantic import BaseModel
5858
```
5959

6060
``` python
61-
model="azure/gpt-4o-2024-08-06"
61+
model="azure/gpt-4o-2024-08-06" # e.g. openai/gpt-4o-2024-08-06 would use the standard OpenAI
6262
system_prompt = "Extract the event information."
63-
```
6463

65-
``` python
66-
# This defines the output response format that we want
6764
class CalendarEvent(BaseModel):
6865
name: str
6966
date: str
7067
participants: list[str]
7168
```
7269

7370
``` python
74-
user_prompt = "Alice and Bob are going to Carmen's Birtday party on 22nd March 2025"
71+
user_prompt = "Alice and Bob are going to Carmen's birthday party on 22nd March 2025"
7572
```
7673

7774
``` python
7875
r = structured_output(model=model,
7976
system_prompt=system_prompt,
80-
user_prompt=user_prompt,
8177
response_format=CalendarEvent, #Note this is the class name (without the `()`)
78+
user_prompt=user_prompt,
8279
)
8380

8481
r.model_dump()
8582
```
8683

8784
{'name': "Carmen's Birthday Party",
88-
'date': '22nd March 2025',
85+
'date': '2025-03-22',
8986
'participants': ['Alice', 'Bob']}

litestruct/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
__version__ = "0.0.2"
1+
__version__ = "0.0.3"
22
from .core import *

litestruct/core.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,20 +10,22 @@
1010
from litellm import completion
1111
from pydantic import BaseModel
1212

13-
# %% ../nbs/00_core.ipynb 4
14-
def structured_output(model:str, # Model name, see examples here or LiteLLM docs for complete list
15-
system_prompt:str, # Instructions for LLM to process the input string
16-
user_prompt:str, # Input string that will be processed
17-
response_format:BaseModel # User-defined Pydantic model to define output
18-
) -> BaseModel :
13+
# %% ../nbs/00_core.ipynb 5
14+
def structured_output(
15+
model:str, # Model name, see examples here or LiteLLM docs for complete list
16+
system_prompt:str, # Instructions for LLM to process the input string
17+
response_format:BaseModel, # User-defined Pydantic model to define output
18+
user_prompt:str, # Input string that will be processed
19+
) -> BaseModel:
20+
21+
"""Get structured output from `model` by combining system and user prompts and making the right API call.
22+
See, [here](https://docs.litellm.ai/docs/completion/json_mode#pass-in-json_schema) for full list of APIs available."""
23+
1924
response = completion(
2025
model=model,
2126
messages=[
2227
{"role": "system", "content": system_prompt},
23-
{
24-
"role": "user",
25-
"content": user_prompt
26-
}
28+
{"role": "user", "content": user_prompt}
2729
],
2830
response_format=response_format
2931
)

nbs/00_core.ipynb

Lines changed: 34 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,9 @@
2424
"metadata": {},
2525
"outputs": [],
2626
"source": [
27-
"#| hide\n",
28-
"from nbdev.showdoc import *"
27+
"# | hide\n",
28+
"%load_ext autoreload\n",
29+
"%autoreload 2"
2930
]
3031
},
3132
{
@@ -35,45 +36,56 @@
3536
"outputs": [],
3637
"source": [
3738
"#| export\n",
39+
"\n",
3840
"import os\n",
3941
"from litellm import completion\n",
4042
"from pydantic import BaseModel"
4143
]
4244
},
45+
{
46+
"cell_type": "code",
47+
"execution_count": null,
48+
"metadata": {},
49+
"outputs": [],
50+
"source": [
51+
"#| hide\n",
52+
"from nbdev.showdoc import *"
53+
]
54+
},
4355
{
4456
"cell_type": "code",
4557
"execution_count": null,
4658
"metadata": {},
4759
"outputs": [],
4860
"source": [
4961
"#| export\n",
50-
"def structured_output(model:str, # Model name, see examples here or LiteLLM docs for complete list\n",
51-
" system_prompt:str, # Instructions for LLM to process the input string\n",
52-
" user_prompt:str, # Input string that will be processed\n",
53-
" response_format:BaseModel # User-defined Pydantic model to define output \n",
54-
" ) -> BaseModel :\n",
62+
"\n",
63+
"def structured_output(\n",
64+
" model:str, # Model name, see examples here or LiteLLM docs for complete list\n",
65+
" system_prompt:str, # Instructions for LLM to process the input string\n",
66+
" response_format:BaseModel, # User-defined Pydantic model to define output \n",
67+
" user_prompt:str, # Input string that will be processed\n",
68+
") -> BaseModel:\n",
69+
" \n",
70+
" \"\"\"Get structured output from `model` by combining system and user prompts and making the right API call.\n",
71+
" See, [here](https://docs.litellm.ai/docs/completion/json_mode#pass-in-json_schema) for full list of APIs available.\"\"\"\n",
72+
" \n",
5573
" response = completion(\n",
5674
" model=model,\n",
5775
" messages=[\n",
5876
" {\"role\": \"system\", \"content\": system_prompt},\n",
59-
" {\n",
60-
" \"role\": \"user\",\n",
61-
" \"content\": user_prompt\n",
62-
" }\n",
77+
" {\"role\": \"user\", \"content\": user_prompt}\n",
6378
" ],\n",
6479
" response_format=response_format\n",
6580
" )\n",
6681
" return response_format.model_validate_json(response.choices[0].message.content)"
6782
]
6883
},
6984
{
70-
"cell_type": "code",
71-
"execution_count": null,
85+
"cell_type": "markdown",
7286
"metadata": {},
73-
"outputs": [],
7487
"source": [
75-
"model=\"azure/gpt-4o-2024-08-06\"\n",
76-
"system_prompt = \"Extract the event information.\""
88+
"You can control the model that is used for the call by simply adjusting the string in the `model` variable."
7789
]
7890
},
7991
{
@@ -82,7 +94,9 @@
8294
"metadata": {},
8395
"outputs": [],
8496
"source": [
85-
"# This defines the output response format that we want\n",
97+
"model=\"azure/gpt-4o-2024-08-06\" # e.g. openai/gpt-4o-2024-08-06 would use the standard OpenAI\n",
98+
"system_prompt = \"Extract the event information.\"\n",
99+
"\n",
86100
"class CalendarEvent(BaseModel):\n",
87101
" name: str\n",
88102
" date: str\n",
@@ -95,8 +109,7 @@
95109
"metadata": {},
96110
"outputs": [],
97111
"source": [
98-
"user_prompt = \"Alice and Bob are going to Carmen's Birtday party on 22nd March 2025\"\n",
99-
"# Note that the word \"Birthday\" is spelt incorrectly on purpose for this example"
112+
"user_prompt = \"Alice and Bob are going to Carmen's birthday party on 22nd March 2025\""
100113
]
101114
},
102115
{
@@ -108,7 +121,7 @@
108121
"data": {
109122
"text/plain": [
110123
"{'name': \"Carmen's Birthday Party\",\n",
111-
" 'date': '22nd March 2025',\n",
124+
" 'date': '2025-03-22',\n",
112125
" 'participants': ['Alice', 'Bob']}"
113126
]
114127
},
@@ -121,8 +134,8 @@
121134
"#| eval: false\n",
122135
"r = structured_output(model=model,\n",
123136
" system_prompt=system_prompt,\n",
124-
" user_prompt=user_prompt,\n",
125137
" response_format=CalendarEvent, #Note this is the class name (without the `()`)\n",
138+
" user_prompt=user_prompt,\n",
126139
" )\n",
127140
"\n",
128141
"r.model_dump()"

nbs/index.ipynb

Lines changed: 13 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -142,17 +142,9 @@
142142
"metadata": {},
143143
"outputs": [],
144144
"source": [
145-
"model=\"azure/gpt-4o-2024-08-06\"\n",
146-
"system_prompt = \"Extract the event information.\""
147-
]
148-
},
149-
{
150-
"cell_type": "code",
151-
"execution_count": null,
152-
"metadata": {},
153-
"outputs": [],
154-
"source": [
155-
"# This defines the output response format that we want\n",
145+
"model=\"azure/gpt-4o-2024-08-06\" # e.g. openai/gpt-4o-2024-08-06 would use the standard OpenAI\n",
146+
"system_prompt = \"Extract the event information.\"\n",
147+
"\n",
156148
"class CalendarEvent(BaseModel):\n",
157149
" name: str\n",
158150
" date: str\n",
@@ -165,7 +157,7 @@
165157
"metadata": {},
166158
"outputs": [],
167159
"source": [
168-
"user_prompt = \"Alice and Bob are going to Carmen's Birtday party on 22nd March 2025\""
160+
"user_prompt = \"Alice and Bob are going to Carmen's birthday party on 22nd March 2025\""
169161
]
170162
},
171163
{
@@ -177,7 +169,7 @@
177169
"data": {
178170
"text/plain": [
179171
"{'name': \"Carmen's Birthday Party\",\n",
180-
" 'date': '22nd March 2025',\n",
172+
" 'date': '2025-03-22',\n",
181173
" 'participants': ['Alice', 'Bob']}"
182174
]
183175
},
@@ -190,12 +182,19 @@
190182
"#| eval: false\n",
191183
"r = structured_output(model=model,\n",
192184
" system_prompt=system_prompt,\n",
193-
" user_prompt=user_prompt,\n",
194185
" response_format=CalendarEvent, #Note this is the class name (without the `()`)\n",
186+
" user_prompt=user_prompt,\n",
195187
" )\n",
196188
"\n",
197189
"r.model_dump()"
198190
]
191+
},
192+
{
193+
"cell_type": "code",
194+
"execution_count": null,
195+
"metadata": {},
196+
"outputs": [],
197+
"source": []
199198
}
200199
],
201200
"metadata": {

settings.ini

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[DEFAULT]
22
repo = litestruct
33
lib_name = litestruct
4-
version = 0.0.2
4+
version = 0.0.3
55
min_python = 3.9
66
license = apache2
77
black_formatting = False

0 commit comments

Comments
 (0)