|
24 | 24 | "metadata": {}, |
25 | 25 | "outputs": [], |
26 | 26 | "source": [ |
27 | | - "#| hide\n", |
28 | | - "from nbdev.showdoc import *" |
| 27 | + "# | hide\n", |
| 28 | + "%load_ext autoreload\n", |
| 29 | + "%autoreload 2" |
29 | 30 | ] |
30 | 31 | }, |
31 | 32 | { |
|
35 | 36 | "outputs": [], |
36 | 37 | "source": [ |
37 | 38 | "#| export\n", |
| 39 | + "\n", |
38 | 40 | "import os\n", |
39 | 41 | "from litellm import completion\n", |
40 | 42 | "from pydantic import BaseModel" |
41 | 43 | ] |
42 | 44 | }, |
| 45 | + { |
| 46 | + "cell_type": "code", |
| 47 | + "execution_count": null, |
| 48 | + "metadata": {}, |
| 49 | + "outputs": [], |
| 50 | + "source": [ |
| 51 | + "#| hide\n", |
| 52 | + "from nbdev.showdoc import *" |
| 53 | + ] |
| 54 | + }, |
43 | 55 | { |
44 | 56 | "cell_type": "code", |
45 | 57 | "execution_count": null, |
46 | 58 | "metadata": {}, |
47 | 59 | "outputs": [], |
48 | 60 | "source": [ |
49 | 61 | "#| export\n", |
50 | | - "def structured_output(model:str, # Model name, see examples here or LiteLLM docs for complete list\n", |
51 | | - " system_prompt:str, # Instructions for LLM to process the input string\n", |
52 | | - " user_prompt:str, # Input string that will be processed\n", |
53 | | - " response_format:BaseModel # User-defined Pydantic model to define output \n", |
54 | | - " ) -> BaseModel :\n", |
| 62 | + "\n", |
| 63 | + "def structured_output(\n", |
| 64 | + " model:str, # Model name, see examples here or LiteLLM docs for complete list\n", |
| 65 | + " system_prompt:str, # Instructions for LLM to process the input string\n", |
| 66 | + " response_format:BaseModel, # User-defined Pydantic model to define output \n", |
| 67 | + " user_prompt:str, # Input string that will be processed\n", |
| 68 | + ") -> BaseModel:\n", |
| 69 | + " \n", |
| 70 | + " \"\"\"Get structured output from `model` by combining system and user prompts and making the right API call.\n", |
| 71 | + " See, [here](https://docs.litellm.ai/docs/completion/json_mode#pass-in-json_schema) for full list of APIs available.\"\"\"\n", |
| 72 | + " \n", |
55 | 73 | " response = completion(\n", |
56 | 74 | " model=model,\n", |
57 | 75 | " messages=[\n", |
58 | 76 | " {\"role\": \"system\", \"content\": system_prompt},\n", |
59 | | - " {\n", |
60 | | - " \"role\": \"user\",\n", |
61 | | - " \"content\": user_prompt\n", |
62 | | - " }\n", |
| 77 | + " {\"role\": \"user\", \"content\": user_prompt}\n", |
63 | 78 | " ],\n", |
64 | 79 | " response_format=response_format\n", |
65 | 80 | " )\n", |
66 | 81 | " return response_format.model_validate_json(response.choices[0].message.content)" |
67 | 82 | ] |
68 | 83 | }, |
69 | 84 | { |
70 | | - "cell_type": "code", |
71 | | - "execution_count": null, |
| 85 | + "cell_type": "markdown", |
72 | 86 | "metadata": {}, |
73 | | - "outputs": [], |
74 | 87 | "source": [ |
75 | | - "model=\"azure/gpt-4o-2024-08-06\"\n", |
76 | | - "system_prompt = \"Extract the event information.\"" |
| 88 | + "You can control the model that is used for the call by simply adjusting the string in the `model` variable." |
77 | 89 | ] |
78 | 90 | }, |
79 | 91 | { |
|
82 | 94 | "metadata": {}, |
83 | 95 | "outputs": [], |
84 | 96 | "source": [ |
85 | | - "# This defines the output response format that we want\n", |
| 97 | + "model=\"azure/gpt-4o-2024-08-06\" # e.g. openai/gpt-4o-2024-08-06 would use the standard OpenAI\n", |
| 98 | + "system_prompt = \"Extract the event information.\"\n", |
| 99 | + "\n", |
86 | 100 | "class CalendarEvent(BaseModel):\n", |
87 | 101 | " name: str\n", |
88 | 102 | " date: str\n", |
|
95 | 109 | "metadata": {}, |
96 | 110 | "outputs": [], |
97 | 111 | "source": [ |
98 | | - "user_prompt = \"Alice and Bob are going to Carmen's Birtday party on 22nd March 2025\"\n", |
99 | | - "# Note that the word \"Birthday\" is spelt incorrectly on purpose for this example" |
| 112 | + "user_prompt = \"Alice and Bob are going to Carmen's birthday party on 22nd March 2025\"" |
100 | 113 | ] |
101 | 114 | }, |
102 | 115 | { |
|
108 | 121 | "data": { |
109 | 122 | "text/plain": [ |
110 | 123 | "{'name': \"Carmen's Birthday Party\",\n", |
111 | | - " 'date': '22nd March 2025',\n", |
| 124 | + " 'date': '2025-03-22',\n", |
112 | 125 | " 'participants': ['Alice', 'Bob']}" |
113 | 126 | ] |
114 | 127 | }, |
|
121 | 134 | "#| eval: false\n", |
122 | 135 | "r = structured_output(model=model,\n", |
123 | 136 | " system_prompt=system_prompt,\n", |
124 | | - " user_prompt=user_prompt,\n", |
125 | 137 | " response_format=CalendarEvent, #Note this is the class name (without the `()`)\n", |
| 138 | + " user_prompt=user_prompt,\n", |
126 | 139 | " )\n", |
127 | 140 | "\n", |
128 | 141 | "r.model_dump()" |
|
0 commit comments