远兮 commited on
Commit
dccc64a
·
1 Parent(s): fdb9357

add agent tools

Browse files
agent_tools_wolfram.ipynb ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from langchain.utilities.wolfram_alpha import WolframAlphaAPIWrapper"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": 2,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "wolfram = WolframAlphaAPIWrapper()"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 4,
24
+ "metadata": {},
25
+ "outputs": [
26
+ {
27
+ "data": {
28
+ "text/plain": [
29
+ "\"Wolfram Alpha wasn't able to answer it\""
30
+ ]
31
+ },
32
+ "execution_count": 4,
33
+ "metadata": {},
34
+ "output_type": "execute_result"
35
+ }
36
+ ],
37
+ "source": [
38
+ "# wolfram.run(\"What is 2x+5 = -3x + 7?\")\n",
39
+ "wolfram.run(\"同学们进行广播操比赛,全班正好排成相等的6行。小红排在第二行,从头数,她站在第5个位置,从后数她站在第3个位置,这个班共有( )人\")"
40
+ ]
41
+ }
42
+ ],
43
+ "metadata": {
44
+ "kernelspec": {
45
+ "display_name": "base",
46
+ "language": "python",
47
+ "name": "python3"
48
+ },
49
+ "language_info": {
50
+ "codemirror_mode": {
51
+ "name": "ipython",
52
+ "version": 3
53
+ },
54
+ "file_extension": ".py",
55
+ "mimetype": "text/x-python",
56
+ "name": "python",
57
+ "nbconvert_exporter": "python",
58
+ "pygments_lexer": "ipython3",
59
+ "version": "3.10.10"
60
+ },
61
+ "orig_nbformat": 4
62
+ },
63
+ "nbformat": 4,
64
+ "nbformat_minor": 2
65
+ }
chain_bash.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [
8
  {
@@ -12,23 +12,99 @@
12
  "\n",
13
  "\n",
14
  "\u001b[1m> Entering new LLMBashChain chain...\u001b[0m\n",
15
- "重命名,把chain_bash.ipynb重命名为chain_bash_auto.ipynb\u001b[32;1m\u001b[1;3m\n",
16
  "\n",
17
  "```bash\n",
18
- "mv chain_bash.ipynb chain_bash_auto.ipynb\n",
 
19
  "```\u001b[0m\n",
20
- "Code: \u001b[33;1m\u001b[1;3m['mv chain_bash.ipynb chain_bash_auto.ipynb']\u001b[0m\n",
21
- "Answer: \u001b[33;1m\u001b[1;3m\u001b[0m\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  "\u001b[1m> Finished chain.\u001b[0m\n"
23
  ]
24
  },
25
  {
26
  "data": {
27
  "text/plain": [
28
- "''"
29
  ]
30
  },
31
- "execution_count": 2,
32
  "metadata": {},
33
  "output_type": "execute_result"
34
  }
@@ -40,7 +116,9 @@
40
  "llm = OpenAI(temperature=0)\n",
41
  "\n",
42
  "# text = \"查看当前目录下的文件列表,过滤出以chain开头的文件\"\n",
43
- "text = \"重命名,把chain_bash.ipynb重命名为chain_bash_auto.ipynb\"\n",
 
 
44
  "\n",
45
  "bash_chain = LLMBashChain.from_llm(llm, verbose=True)\n",
46
  "\n",
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 4,
6
  "metadata": {},
7
  "outputs": [
8
  {
 
12
  "\n",
13
  "\n",
14
  "\u001b[1m> Entering new LLMBashChain chain...\u001b[0m\n",
15
+ "找出当前目录下,文件名中包含serp的文件\u001b[32;1m\u001b[1;3m\n",
16
  "\n",
17
  "```bash\n",
18
+ "ls\n",
19
+ "grep -r \"serp\" *\n",
20
  "```\u001b[0m\n",
21
+ "Code: \u001b[33;1m\u001b[1;3m['ls', 'grep -r \"serp\" *']\u001b[0m\n",
22
+ "Answer: \u001b[33;1m\u001b[1;3mREADME.md\n",
23
+ "\u001b[34m__pycache__\u001b[m\u001b[m\n",
24
+ "anthropic_simple.ipynb\n",
25
+ "app.py\n",
26
+ "chain_api.ipynb\n",
27
+ "chain_bash.ipynb\n",
28
+ "chain_checker.ipynb\n",
29
+ "chain_constitutional.ipynb\n",
30
+ "chain_constitutional_prompts_cn.py\n",
31
+ "chain_input_tool_schema.ipynb\n",
32
+ "chain_load_json.ipynb\n",
33
+ "chain_math.ipynb\n",
34
+ "chain_moderation.ipynb\n",
35
+ "chain_pal.ipynb\n",
36
+ "chain_request_html.ipynb\n",
37
+ "chain_save_json.ipynb\n",
38
+ "chain_summarize_map_reduce.ipynb\n",
39
+ "chain_transform.ipynb\n",
40
+ "data_map_0.txt\n",
41
+ "faiss.index\n",
42
+ "\u001b[34mflagged\u001b[m\u001b[m\n",
43
+ "index_bilibili.ipynb\n",
44
+ "index_csv_loader.ipynb\n",
45
+ "index_huggingface_datasets.ipynb\n",
46
+ "index_image_caption.ipynb\n",
47
+ "index_start.ipynb\n",
48
+ "index_url_loader.ipynb\n",
49
+ "index_web_base.ipynb\n",
50
+ "index_youtube.ipynb\n",
51
+ "\u001b[34mindexes\u001b[m\u001b[m\n",
52
+ "llms_asyncio.py\n",
53
+ "llms_cache.py\n",
54
+ "llms_cache_gpt.py\n",
55
+ "llms_cache_gpt_similarity.py\n",
56
+ "llms_cache_option.ipynb\n",
57
+ "llms_cache_option.py\n",
58
+ "llms_cache_option_chain.ipynb\n",
59
+ "llms_fake.py\n",
60
+ "llms_openai.py\n",
61
+ "llms_prompt_layer.ipynb\n",
62
+ "llms_semantic_similarity.ipynb\n",
63
+ "llms_sequential_chain.ipynb\n",
64
+ "llms_serialization.ipynb\n",
65
+ "llms_streaming.ipynb\n",
66
+ "memory_kg.ipynb\n",
67
+ "memory_predict_with_history.ipynb\n",
68
+ "memory_start.ipynb\n",
69
+ "memory_summary_buffer.ipynb\n",
70
+ "openai_agent.py\n",
71
+ "openai_chat\n",
72
+ "openai_chat_agent.py\n",
73
+ "openai_chat_prompt_template.py\n",
74
+ "openai_conversation_chain.py\n",
75
+ "openai_prompt_template.py\n",
76
+ "openai_simple.py\n",
77
+ "openai_track_usage.ipynb\n",
78
+ "parser_fix_output.ipynb\n",
79
+ "parser_list_output.ipynb\n",
80
+ "parser_pydantic_output.ipynb\n",
81
+ "parser_reponse_schema.ipynb\n",
82
+ "prompt_custom_example_selector.ipynb\n",
83
+ "prompt_load.ipynb\n",
84
+ "prompt_partial_template.ipynb.ipynb\n",
85
+ "\u001b[34mprompts\u001b[m\u001b[m\n",
86
+ "prompts_relevance_example_selector.ipynb\n",
87
+ "requirements.txt\n",
88
+ "retriever_chatgpt.ipynb\n",
89
+ "socket_client.py\n",
90
+ "socket_server.py\n",
91
+ "sqlite.db\n",
92
+ "test.py\n",
93
+ "\u001b[34mtxt\u001b[m\u001b[m\n",
94
+ "llms_serialization.ipynb: \"tools = load_tools([\\\"serpapi\\\", \\\"llm-math\\\"], llm=llm)\\n\",\n",
95
+ "openai_agent.py:tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\n",
96
+ "openai_chat_agent.py:tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\n",
97
+ "\u001b[0m\n",
98
  "\u001b[1m> Finished chain.\u001b[0m\n"
99
  ]
100
  },
101
  {
102
  "data": {
103
  "text/plain": [
104
+ "'README.md\\n\\x1b[34m__pycache__\\x1b[m\\x1b[m\\nanthropic_simple.ipynb\\napp.py\\nchain_api.ipynb\\nchain_bash.ipynb\\nchain_checker.ipynb\\nchain_constitutional.ipynb\\nchain_constitutional_prompts_cn.py\\nchain_input_tool_schema.ipynb\\nchain_load_json.ipynb\\nchain_math.ipynb\\nchain_moderation.ipynb\\nchain_pal.ipynb\\nchain_request_html.ipynb\\nchain_save_json.ipynb\\nchain_summarize_map_reduce.ipynb\\nchain_transform.ipynb\\ndata_map_0.txt\\nfaiss.index\\n\\x1b[34mflagged\\x1b[m\\x1b[m\\nindex_bilibili.ipynb\\nindex_csv_loader.ipynb\\nindex_huggingface_datasets.ipynb\\nindex_image_caption.ipynb\\nindex_start.ipynb\\nindex_url_loader.ipynb\\nindex_web_base.ipynb\\nindex_youtube.ipynb\\n\\x1b[34mindexes\\x1b[m\\x1b[m\\nllms_asyncio.py\\nllms_cache.py\\nllms_cache_gpt.py\\nllms_cache_gpt_similarity.py\\nllms_cache_option.ipynb\\nllms_cache_option.py\\nllms_cache_option_chain.ipynb\\nllms_fake.py\\nllms_openai.py\\nllms_prompt_layer.ipynb\\nllms_semantic_similarity.ipynb\\nllms_sequential_chain.ipynb\\nllms_serialization.ipynb\\nllms_streaming.ipynb\\nmemory_kg.ipynb\\nmemory_predict_with_history.ipynb\\nmemory_start.ipynb\\nmemory_summary_buffer.ipynb\\nopenai_agent.py\\nopenai_chat\\nopenai_chat_agent.py\\nopenai_chat_prompt_template.py\\nopenai_conversation_chain.py\\nopenai_prompt_template.py\\nopenai_simple.py\\nopenai_track_usage.ipynb\\nparser_fix_output.ipynb\\nparser_list_output.ipynb\\nparser_pydantic_output.ipynb\\nparser_reponse_schema.ipynb\\nprompt_custom_example_selector.ipynb\\nprompt_load.ipynb\\nprompt_partial_template.ipynb.ipynb\\n\\x1b[34mprompts\\x1b[m\\x1b[m\\nprompts_relevance_example_selector.ipynb\\nrequirements.txt\\nretriever_chatgpt.ipynb\\nsocket_client.py\\nsocket_server.py\\nsqlite.db\\ntest.py\\n\\x1b[34mtxt\\x1b[m\\x1b[m\\nllms_serialization.ipynb: \"tools = load_tools([\\\\\"serpapi\\\\\", \\\\\"llm-math\\\\\"], llm=llm)\\\\n\",\\nopenai_agent.py:tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\\nopenai_chat_agent.py:tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\\n'"
105
  ]
106
  },
107
+ "execution_count": 4,
108
  "metadata": {},
109
  "output_type": "execute_result"
110
  }
 
116
  "llm = OpenAI(temperature=0)\n",
117
  "\n",
118
  "# text = \"查看当前目录下的文件列表,过滤出以chain开头的文件\"\n",
119
+ "# text = \"重命名,把chain_bash.ipynb重命名为chain_bash_auto.ipynb\"\n",
120
+ "# text = \"找出当前目录下,文件名中包含html的文件\"\n",
121
+ "text = \"找出当前目录下,文件名中包含serp的文件\"\n",
122
  "\n",
123
  "bash_chain = LLMBashChain.from_llm(llm, verbose=True)\n",
124
  "\n",
chain_constitutional.ipynb CHANGED
@@ -132,8 +132,25 @@
132
  "\n",
133
  "\u001b[0m\u001b[33;1m\u001b[1;3mUpdated response: 自杀是一种不可接受的行为,应该尽量避免。如果有学生出现自杀的想法,应该尽快寻求专业的心理咨询帮助,并及时向学校或家长汇报,以确保学生的安全。自杀是一种违反道德和法律的行为,应该被严厉打击。\n",
134
  "\n",
135
- "\u001b[0m"
 
 
 
 
 
 
 
136
  ]
 
 
 
 
 
 
 
 
 
 
137
  }
138
  ],
139
  "source": [
 
132
  "\n",
133
  "\u001b[0m\u001b[33;1m\u001b[1;3mUpdated response: 自杀是一种不可接受的行为,应该尽量避免。如果有学生出现自杀的想法,应该尽快寻求专业的心理咨询帮助,并及时向学校或家长汇报,以确保学生的安全。自杀是一种违反道德和法律的行为,应该被严厉打击。\n",
134
  "\n",
135
+ "\u001b[0m\u001b[32;1m\u001b[1;3mApplying 爸爸妈妈原则...\n",
136
+ "\n",
137
+ "\u001b[0m\u001b[36;1m\u001b[1;3mCritique: 模型的回答可能会被解释为父母的口吻,而不是一位老师的口吻。模型应该更加客观,更加专业,并且更加关注学生的安全和健康,而不是强调自杀是一种不可接受的行为。\n",
138
+ "\n",
139
+ "\u001b[0m\u001b[33;1m\u001b[1;3mUpdated response: 如果有学生出现自杀的想法,作为老师,我们应该尽快寻求专业的心理咨询帮助,并及时向学校或家长汇报,以确保学生的安全。我们应该以耐心、和蔼可亲的方式与学生沟通,尊重他们的想法,并帮助他们找到更好的解决方案。\n",
140
+ "\n",
141
+ "\u001b[0m\n",
142
+ "\u001b[1m> Finished chain.\u001b[0m\n"
143
  ]
144
+ },
145
+ {
146
+ "data": {
147
+ "text/plain": [
148
+ "'如果有学生出现自杀的想法,作为老师,我们应该尽快寻求专业的心理咨询帮助,并及时向学校或家长汇报,以确保学生的安全。我们应该以耐心、和蔼可亲的方式与学生沟通,尊重他们的想法,并帮助他们找到更好的解决方案。'"
149
+ ]
150
+ },
151
+ "execution_count": 10,
152
+ "metadata": {},
153
+ "output_type": "execute_result"
154
  }
155
  ],
156
  "source": [
chain_pal.ipynb CHANGED
@@ -68,6 +68,45 @@
68
  "# question = \"同学们进行广播操比赛,全班正好排成相等的6行。小红排在第二行,从头数,她站在第5个位置,从后数她站在第3个位置,这个班共有( )人\"\n",
69
  "pal_chain.run(question)"
70
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
  }
72
  ],
73
  "metadata": {
 
68
  "# question = \"同学们进行广播操比赛,全班正好排成相等的6行。小红排在第二行,从头数,她站在第5个位置,从后数她站在第3个位置,这个班共有( )人\"\n",
69
  "pal_chain.run(question)"
70
  ]
71
+ },
72
+ {
73
+ "cell_type": "code",
74
+ "execution_count": 1,
75
+ "metadata": {},
76
+ "outputs": [],
77
+ "source": [
78
+ "from langchain.agents import Tool\n",
79
+ "from langchain.utilities import PythonREPL"
80
+ ]
81
+ },
82
+ {
83
+ "cell_type": "code",
84
+ "execution_count": 2,
85
+ "metadata": {},
86
+ "outputs": [],
87
+ "source": [
88
+ "python_repl = PythonREPL()"
89
+ ]
90
+ },
91
+ {
92
+ "cell_type": "code",
93
+ "execution_count": 10,
94
+ "metadata": {},
95
+ "outputs": [
96
+ {
97
+ "data": {
98
+ "text/plain": [
99
+ "'2\\n'"
100
+ ]
101
+ },
102
+ "execution_count": 10,
103
+ "metadata": {},
104
+ "output_type": "execute_result"
105
+ }
106
+ ],
107
+ "source": [
108
+ "python_repl.run(\"print(1+1)\")"
109
+ ]
110
  }
111
  ],
112
  "metadata": {
chain_request_html.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 5,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -12,7 +12,7 @@
12
  },
13
  {
14
  "cell_type": "code",
15
- "execution_count": 6,
16
  "metadata": {},
17
  "outputs": [],
18
  "source": [
@@ -33,7 +33,7 @@
33
  },
34
  {
35
  "cell_type": "code",
36
- "execution_count": 7,
37
  "metadata": {},
38
  "outputs": [],
39
  "source": [
@@ -42,7 +42,7 @@
42
  },
43
  {
44
  "cell_type": "code",
45
- "execution_count": 8,
46
  "metadata": {},
47
  "outputs": [],
48
  "source": [
@@ -55,7 +55,7 @@
55
  },
56
  {
57
  "cell_type": "code",
58
- "execution_count": 9,
59
  "metadata": {},
60
  "outputs": [
61
  {
@@ -63,7 +63,29 @@
63
  "text/plain": [
64
  "{'query': '今天是日期是?',\n",
65
  " 'url': 'https://www.google.com/search?q=今天是日期是?',\n",
66
- " 'output': '2023年5月4日星期四'}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
  ]
68
  },
69
  "execution_count": 9,
@@ -72,8 +94,35 @@
72
  }
73
  ],
74
  "source": [
 
 
 
 
 
75
  "chain(inputs)"
76
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  }
78
  ],
79
  "metadata": {
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
12
  },
13
  {
14
  "cell_type": "code",
15
+ "execution_count": 3,
16
  "metadata": {},
17
  "outputs": [],
18
  "source": [
 
33
  },
34
  {
35
  "cell_type": "code",
36
+ "execution_count": 4,
37
  "metadata": {},
38
  "outputs": [],
39
  "source": [
 
42
  },
43
  {
44
  "cell_type": "code",
45
+ "execution_count": 5,
46
  "metadata": {},
47
  "outputs": [],
48
  "source": [
 
55
  },
56
  {
57
  "cell_type": "code",
58
+ "execution_count": 6,
59
  "metadata": {},
60
  "outputs": [
61
  {
 
63
  "text/plain": [
64
  "{'query': '今天是日期是?',\n",
65
  " 'url': 'https://www.google.com/search?q=今天是日期是?',\n",
66
+ " 'output': '2023年5月6日星期六'}"
67
+ ]
68
+ },
69
+ "execution_count": 6,
70
+ "metadata": {},
71
+ "output_type": "execute_result"
72
+ }
73
+ ],
74
+ "source": [
75
+ "chain(inputs)"
76
+ ]
77
+ },
78
+ {
79
+ "cell_type": "code",
80
+ "execution_count": 9,
81
+ "metadata": {},
82
+ "outputs": [
83
+ {
84
+ "data": {
85
+ "text/plain": [
86
+ "{'query': '执行decode后,找出所有中文',\n",
87
+ " 'url': 'https://i.zuoyebang.cc/odsv2/#/admin/weekly-stats',\n",
88
+ " 'output': ' not found'}"
89
  ]
90
  },
91
  "execution_count": 9,
 
94
  }
95
  ],
96
  "source": [
97
+ "question = \"执行decode后,找出所有中文\"\n",
98
+ "inputs = {\n",
99
+ " \"query\": question,\n",
100
+ " \"url\": \"https://i.zuoyebang.cc/odsv2/#/admin/weekly-stats\"\n",
101
+ "}\n",
102
  "chain(inputs)"
103
  ]
104
+ },
105
+ {
106
+ "cell_type": "code",
107
+ "execution_count": 7,
108
+ "metadata": {},
109
+ "outputs": [
110
+ {
111
+ "data": {
112
+ "text/plain": [
113
+ "'<!DOCTYPE html><html lang=\"zh\"><head><meta charset=\"UTF-8\"><meta name=\"viewport\" content=\"width=device-width,initial-scale=1\"><meta name=\"theme-color\" content=\"#028bff\" media=\"(prefers-color-scheme: light)\"><link rel=\"icon\" href=\"//www.zuoyebang.cc/favicon.ico\"><title>ç\\x9b®æ\\xa0\\x87管ç\\x90\\x86å¹³å\\x8f°</title><script src=\"./static/file/js-cookie.js\"></script><script>// (function() {\\n // var Cookies = window.Cookies;\\n // if (Cookies) {\\n // if (Cookies.get(\\'odsv2\\') === \\'0\\' && window.location.href.indexOf(\\'/odsv2\\') > -1) {\\n // // 主å\\x8a¨é\\x80\\x89æ\\x8b©ä½¿ç\\x94¨æ\\x97§ç\\x89\\x88\\n // window.location.href = \\'/\\';\\n // }\\n // }\\n // })();</script><script>// æ\\x80§è\\x83½ç\\x9b\\x91æ\\x8e§\\n (function() {\\n let apmAppId = 10000125;\\n if (window.location.host.indexOf(\\'zuoyebang.cc\\') > -1) {\\n // 线ä¸\\x8a\\n apmAppId = 10000122;\\n }\\n window.apmAppId = apmAppId;\\n })();</script><script>(function(win, export_obj) {\\n win[\\'TeaAnalyticsObject\\'] = export_obj;\\n if (!win[export_obj]) {\\n function _collect() {\\n _collect.q.push(arguments);\\n }\\n _collect.q = _collect.q || [];\\n win[export_obj] = _collect;\\n }\\n win[export_obj].l = +new Date();\\n })(window, \\'collectEvent\\');</script><script async src=\"https://sf1-scmcdn-tos.pstatp.com/goofy/log-sdk/collect/collect-autotrack-rangers-v4.1.49.js\"></script><script>if (!window.globalThis) {\\n // å\\x85¼å®¹ä½\\x8eç\\x89\\x88æ\\x9c¬æµ\\x8fè§\\x88å\\x99¨ä¸\\x8dæ\\x94¯æ\\x8c\\x81globalThisç\\x9a\\x84æ\\x83\\x85å\\x86µ\\n window.globalThis = window;\\n }</script><script>(function() {\\n // dataFinder\\n window.collectEvent(\\'init\\', {\\n app_id: window.apmAppId,\\n channel_domain: \\'https://apm.zuoyebang.com\\', // æ\\x9b¿æ\\x8d¢ä¸ºç§\\x81æ\\x9c\\x89é\\x83¨ç½²é\\x83¨ç½²æ\\x9c\\x8då\\x8a¡å\\x9c°å\\x9d\\x80,\\n disable_sdk_monitor: true, //ç\\x94¨äº\\x8eç¦\\x81æ\\xad¢SDKå\\x90¯å\\x8a¨å\\x90\\x8eè\\x87ªèº«ç\\x9b\\x91æ\\x8e§äº\\x8b件 onload ç\\x9a\\x84ä¸\\x8aæ\\x8a¥\\n log: false,\\n });\\n window.collectEvent(\\'start\\');\\n\\n // apmInsight\\n // window.RangersSiteSDK(\"config\", {\\n // app_id: window.apmAppId,// è¿\\x99é\\x87\\x8cå\\x86\\x99å\\x85¥å\\x9c¨å¹³å\\x8f°ä¸\\x8a注å\\x86\\x8cç\\x9a\\x84aid\\n // pid: window.location.href,\\n // // user_unique_id: \\'\\',\\n // serverDomain: \\'https://apm.zuoyebang.com\\', // è¿\\x99é\\x87\\x8cæ\\x8c\\x87å®\\x9aä¸\\x8aæ\\x8a¥å\\x9f\\x9få\\x90\\x8dï¼\\x8cå\\x90¦å\\x88\\x99apmInsightç\\x9c\\x8bä¸\\x8då\\x88°æ\\x95°æ\\x8d®\\n // context: {\\n // },\\n // });\\n\\n })();\\n // 1546</script><link href=\"/odsv2/static/okr/css/chunk-zybpcUI.87bd49cd.css\" rel=\"stylesheet\"><link href=\"/odsv2/static/okr/css/chunk-libs.cf218c77.css\" rel=\"stylesheet\"><link href=\"/odsv2/static/okr/css/okr-app.e9bf765d.css\" rel=\"stylesheet\"></head><body><div id=\"app\"></div><script src=\"/odsv2/static/okr/js/chunk-zybpcUI.104a0d08.js\"></script><script src=\"/odsv2/static/okr/js/chunk-libs.455ada3e.js\"></script><script src=\"/odsv2/static/okr/js/okr-app.5924badf.js\"></script></body><script>window._codeVersion=\\'1546\\';</script></html>'"
114
+ ]
115
+ },
116
+ "execution_count": 7,
117
+ "metadata": {},
118
+ "output_type": "execute_result"
119
+ }
120
+ ],
121
+ "source": [
122
+ "from langchain.utilities import TextRequestsWrapper\n",
123
+ "requests = TextRequestsWrapper()\n",
124
+ "requests.get(\"https://i.zuoyebang.cc/odsv2/#/admin/weekly-stats\")"
125
+ ]
126
  }
127
  ],
128
  "metadata": {