File size: 1,166 Bytes
f5ec828 5b30d27 f5ec828 5b30d27 f5ec828 5b30d27 f5ec828 5b30d27 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
# + tags=["hide_inp"]
desc = """
### Backtrack on Failure
Chain that backtracks on failure. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/backtrack.ipynb)
"""
# -
# $
from minichain import prompt, Mock, show, OpenAI
import minichain
@prompt(Mock(["dog", "blue", "cat"]), stream=True)
def prompt_generation(model):
out = ""
for token in model.stream(""):
out += token
yield out
yield out
@prompt(Mock(["No", "Yes"]), template="Answer 'yes' is {{query}} is a color. Answer:", stream=False)
def prompt_validation(model, x):
out = model(dict(query=x))
if out.strip().lower().startswith("yes"):
return x
return model.fail(1)
def run():
x = prompt_generation()
return prompt_validation(x)
# $
gradio = show(run,
examples = [],
subprompts=[prompt_generation, prompt_validation],
code=open("backtrack.py", "r").read().split("$")[1].strip().strip("#").strip(),
out_type="markdown"
)
if __name__ == "__main__":
gradio.queue().launch()
|