File size: 2,779 Bytes
f5ec828 872b692 f5ec828 872b692 f5ec828 872b692 f5ec828 872b692 f5ec828 872b692 f5ec828 872b692 f5ec828 872b692 f5ec828 872b692 f5ec828 872b692 f5ec828 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 |
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "158928f5",
"metadata": {},
"outputs": [],
"source": [
"!pip install -q git+https://github.com/srush/MiniChain\n",
"!git clone https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b7be4150",
"metadata": {
"tags": [
"hide_inp"
]
},
"outputs": [],
"source": [
"desc = \"\"\"\n",
"### Backtrack on Failure\n",
"\n",
"Chain that backtracks on failure. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/backtrack.ipynb)\n",
"\n",
"\"\"\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d9a87050",
"metadata": {
"lines_to_next_cell": 1
},
"outputs": [],
"source": [
"from minichain import prompt, Mock, show, OpenAI\n",
"import minichain"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c11692a3",
"metadata": {
"lines_to_next_cell": 1
},
"outputs": [],
"source": [
"@prompt(Mock([\"dog\", \"blue\", \"cat\"]))\n",
"def prompt_generation(model):\n",
" return model(\"\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2035f083",
"metadata": {
"lines_to_next_cell": 1
},
"outputs": [],
"source": [
"@prompt(OpenAI(), template=\"Answer 'yes' is {{query}} is a color. Answer:\")\n",
"def prompt_validation(model, x):\n",
" out = model(dict(query=x))\n",
" if out.strip().lower().startswith(\"yes\"):\n",
" return x\n",
" return model.fail(1)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2b9caa2b",
"metadata": {
"lines_to_next_cell": 1
},
"outputs": [],
"source": [
"def run():\n",
" x = prompt_generation()\n",
" return prompt_validation(x)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d825edb7",
"metadata": {},
"outputs": [],
"source": [
"gradio = show(run,\n",
" examples = [],\n",
" subprompts=[prompt_generation, prompt_validation],\n",
" out_type=\"markdown\"\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1eac94d8",
"metadata": {},
"outputs": [],
"source": [
"if __name__ == \"__main__\":\n",
" gradio.launch()"
]
}
],
"metadata": {
"jupytext": {
"cell_metadata_filter": "tags,-all",
"main_language": "python",
"notebook_metadata_filter": "-all"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
|