远兮 commited on
Commit
92ee09b
·
1 Parent(s): 9983729

add sequential chain

Browse files
Files changed (2) hide show
  1. README.md +1 -0
  2. llms_sequential_chain.ipynb +104 -0
README.md CHANGED
@@ -46,5 +46,6 @@ TODO:
46
  1.看一下ONNXruntime
47
  2.详细看一下gptcache
48
  3.看一下Hugging Face Hub上有哪些开源model,这些model大模型环境是跑在FuggingFace上的?如果要用的话,可以直接用还是自己搭server环境?
 
49
 
50
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
46
  1.看一下ONNXruntime
47
  2.详细看一下gptcache
48
  3.看一下Hugging Face Hub上有哪些开源model,这些model大模型环境是跑在FuggingFace上的?如果要用的话,可以直接用还是自己搭server环境?
49
+ 4.PromptTemplate,可以有多个input/output吗,怎么使用?参见llms_sequential_chain.ipynb
50
 
51
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
llms_sequential_chain.ipynb ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 26,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from langchain.chains import SimpleSequentialChain\n",
10
+ "from langchain.llms import OpenAI\n",
11
+ "from langchain import LLMChain\n",
12
+ "from langchain import PromptTemplate"
13
+ ]
14
+ },
15
+ {
16
+ "cell_type": "code",
17
+ "execution_count": 27,
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "llm1 = OpenAI(temperature=0.5)\n",
22
+ "prompt1 = PromptTemplate(\n",
23
+ " input_variables=[\"arithmetic\"],\n",
24
+ " template=\"请出一道小学四则运算题,{arithmetic},只有题目,没有答案。\",\n",
25
+ ")\n",
26
+ "chain1 = LLMChain(llm=llm1,prompt=prompt1)"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": 28,
32
+ "metadata": {},
33
+ "outputs": [],
34
+ "source": [
35
+ "llm2 = OpenAI(temperature=0.5)\n",
36
+ "prompt2 = PromptTemplate(\n",
37
+ " input_variables=[\"question\"],\n",
38
+ " template=\"{question},答案是?\",\n",
39
+ ")\n",
40
+ "chain2 = LLMChain(llm=llm2, prompt=prompt2)"
41
+ ]
42
+ },
43
+ {
44
+ "cell_type": "code",
45
+ "execution_count": 29,
46
+ "metadata": {},
47
+ "outputs": [
48
+ {
49
+ "name": "stdout",
50
+ "output_type": "stream",
51
+ "text": [
52
+ "\n",
53
+ "\n",
54
+ "\u001b[1m> Entering new SimpleSequentialChain chain...\u001b[0m\n",
55
+ "\u001b[36;1m\u001b[1;3m\n",
56
+ "\n",
57
+ "6 - 3 = ?\u001b[0m\n",
58
+ "\u001b[33;1m\u001b[1;3m\n",
59
+ "\n",
60
+ "3\u001b[0m\n",
61
+ "\n",
62
+ "\u001b[1m> Finished chain.\u001b[0m\n"
63
+ ]
64
+ },
65
+ {
66
+ "data": {
67
+ "text/plain": [
68
+ "'3'"
69
+ ]
70
+ },
71
+ "execution_count": 29,
72
+ "metadata": {},
73
+ "output_type": "execute_result"
74
+ }
75
+ ],
76
+ "source": [
77
+ "overall_chain = SimpleSequentialChain(chains=[chain1, chain2], verbose=True)\n",
78
+ "overall_chain.run(\"减法\").strip()"
79
+ ]
80
+ }
81
+ ],
82
+ "metadata": {
83
+ "kernelspec": {
84
+ "display_name": "base",
85
+ "language": "python",
86
+ "name": "python3"
87
+ },
88
+ "language_info": {
89
+ "codemirror_mode": {
90
+ "name": "ipython",
91
+ "version": 3
92
+ },
93
+ "file_extension": ".py",
94
+ "mimetype": "text/x-python",
95
+ "name": "python",
96
+ "nbconvert_exporter": "python",
97
+ "pygments_lexer": "ipython3",
98
+ "version": "3.10.10"
99
+ },
100
+ "orig_nbformat": 4
101
+ },
102
+ "nbformat": 4,
103
+ "nbformat_minor": 2
104
+ }