File size: 6,679 Bytes
2ef1d57
 
 
 
dccc64a
2ef1d57
 
 
 
 
 
 
 
 
dccc64a
2ef1d57
 
dccc64a
 
2ef1d57
dccc64a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2ef1d57
 
 
 
 
 
dccc64a
2ef1d57
 
dccc64a
2ef1d57
 
 
 
 
 
 
 
 
 
 
dccc64a
 
 
2ef1d57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "\n",
      "\u001b[1m> Entering new LLMBashChain chain...\u001b[0m\n",
      "找出当前目录下,文件名中包含serp的文件\u001b[32;1m\u001b[1;3m\n",
      "\n",
      "```bash\n",
      "ls\n",
      "grep -r \"serp\" *\n",
      "```\u001b[0m\n",
      "Code: \u001b[33;1m\u001b[1;3m['ls', 'grep -r \"serp\" *']\u001b[0m\n",
      "Answer: \u001b[33;1m\u001b[1;3mREADME.md\n",
      "\u001b[34m__pycache__\u001b[m\u001b[m\n",
      "anthropic_simple.ipynb\n",
      "app.py\n",
      "chain_api.ipynb\n",
      "chain_bash.ipynb\n",
      "chain_checker.ipynb\n",
      "chain_constitutional.ipynb\n",
      "chain_constitutional_prompts_cn.py\n",
      "chain_input_tool_schema.ipynb\n",
      "chain_load_json.ipynb\n",
      "chain_math.ipynb\n",
      "chain_moderation.ipynb\n",
      "chain_pal.ipynb\n",
      "chain_request_html.ipynb\n",
      "chain_save_json.ipynb\n",
      "chain_summarize_map_reduce.ipynb\n",
      "chain_transform.ipynb\n",
      "data_map_0.txt\n",
      "faiss.index\n",
      "\u001b[34mflagged\u001b[m\u001b[m\n",
      "index_bilibili.ipynb\n",
      "index_csv_loader.ipynb\n",
      "index_huggingface_datasets.ipynb\n",
      "index_image_caption.ipynb\n",
      "index_start.ipynb\n",
      "index_url_loader.ipynb\n",
      "index_web_base.ipynb\n",
      "index_youtube.ipynb\n",
      "\u001b[34mindexes\u001b[m\u001b[m\n",
      "llms_asyncio.py\n",
      "llms_cache.py\n",
      "llms_cache_gpt.py\n",
      "llms_cache_gpt_similarity.py\n",
      "llms_cache_option.ipynb\n",
      "llms_cache_option.py\n",
      "llms_cache_option_chain.ipynb\n",
      "llms_fake.py\n",
      "llms_openai.py\n",
      "llms_prompt_layer.ipynb\n",
      "llms_semantic_similarity.ipynb\n",
      "llms_sequential_chain.ipynb\n",
      "llms_serialization.ipynb\n",
      "llms_streaming.ipynb\n",
      "memory_kg.ipynb\n",
      "memory_predict_with_history.ipynb\n",
      "memory_start.ipynb\n",
      "memory_summary_buffer.ipynb\n",
      "openai_agent.py\n",
      "openai_chat\n",
      "openai_chat_agent.py\n",
      "openai_chat_prompt_template.py\n",
      "openai_conversation_chain.py\n",
      "openai_prompt_template.py\n",
      "openai_simple.py\n",
      "openai_track_usage.ipynb\n",
      "parser_fix_output.ipynb\n",
      "parser_list_output.ipynb\n",
      "parser_pydantic_output.ipynb\n",
      "parser_reponse_schema.ipynb\n",
      "prompt_custom_example_selector.ipynb\n",
      "prompt_load.ipynb\n",
      "prompt_partial_template.ipynb.ipynb\n",
      "\u001b[34mprompts\u001b[m\u001b[m\n",
      "prompts_relevance_example_selector.ipynb\n",
      "requirements.txt\n",
      "retriever_chatgpt.ipynb\n",
      "socket_client.py\n",
      "socket_server.py\n",
      "sqlite.db\n",
      "test.py\n",
      "\u001b[34mtxt\u001b[m\u001b[m\n",
      "llms_serialization.ipynb:    \"tools = load_tools([\\\"serpapi\\\", \\\"llm-math\\\"], llm=llm)\\n\",\n",
      "openai_agent.py:tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\n",
      "openai_chat_agent.py:tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\n",
      "\u001b[0m\n",
      "\u001b[1m> Finished chain.\u001b[0m\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'README.md\\n\\x1b[34m__pycache__\\x1b[m\\x1b[m\\nanthropic_simple.ipynb\\napp.py\\nchain_api.ipynb\\nchain_bash.ipynb\\nchain_checker.ipynb\\nchain_constitutional.ipynb\\nchain_constitutional_prompts_cn.py\\nchain_input_tool_schema.ipynb\\nchain_load_json.ipynb\\nchain_math.ipynb\\nchain_moderation.ipynb\\nchain_pal.ipynb\\nchain_request_html.ipynb\\nchain_save_json.ipynb\\nchain_summarize_map_reduce.ipynb\\nchain_transform.ipynb\\ndata_map_0.txt\\nfaiss.index\\n\\x1b[34mflagged\\x1b[m\\x1b[m\\nindex_bilibili.ipynb\\nindex_csv_loader.ipynb\\nindex_huggingface_datasets.ipynb\\nindex_image_caption.ipynb\\nindex_start.ipynb\\nindex_url_loader.ipynb\\nindex_web_base.ipynb\\nindex_youtube.ipynb\\n\\x1b[34mindexes\\x1b[m\\x1b[m\\nllms_asyncio.py\\nllms_cache.py\\nllms_cache_gpt.py\\nllms_cache_gpt_similarity.py\\nllms_cache_option.ipynb\\nllms_cache_option.py\\nllms_cache_option_chain.ipynb\\nllms_fake.py\\nllms_openai.py\\nllms_prompt_layer.ipynb\\nllms_semantic_similarity.ipynb\\nllms_sequential_chain.ipynb\\nllms_serialization.ipynb\\nllms_streaming.ipynb\\nmemory_kg.ipynb\\nmemory_predict_with_history.ipynb\\nmemory_start.ipynb\\nmemory_summary_buffer.ipynb\\nopenai_agent.py\\nopenai_chat\\nopenai_chat_agent.py\\nopenai_chat_prompt_template.py\\nopenai_conversation_chain.py\\nopenai_prompt_template.py\\nopenai_simple.py\\nopenai_track_usage.ipynb\\nparser_fix_output.ipynb\\nparser_list_output.ipynb\\nparser_pydantic_output.ipynb\\nparser_reponse_schema.ipynb\\nprompt_custom_example_selector.ipynb\\nprompt_load.ipynb\\nprompt_partial_template.ipynb.ipynb\\n\\x1b[34mprompts\\x1b[m\\x1b[m\\nprompts_relevance_example_selector.ipynb\\nrequirements.txt\\nretriever_chatgpt.ipynb\\nsocket_client.py\\nsocket_server.py\\nsqlite.db\\ntest.py\\n\\x1b[34mtxt\\x1b[m\\x1b[m\\nllms_serialization.ipynb:    \"tools = load_tools([\\\\\"serpapi\\\\\", \\\\\"llm-math\\\\\"], llm=llm)\\\\n\",\\nopenai_agent.py:tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\\nopenai_chat_agent.py:tools = load_tools([\"serpapi\", \"llm-math\"], llm=llm)\\n'"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from langchain.chains import LLMBashChain\n",
    "from langchain.llms import OpenAI\n",
    "\n",
    "llm = OpenAI(temperature=0)\n",
    "\n",
    "# text = \"查看当前目录下的文件列表,过滤出以chain开头的文件\"\n",
    "# text = \"重命名,把chain_bash.ipynb重命名为chain_bash_auto.ipynb\"\n",
    "# text = \"找出当前目录下,文件名中包含html的文件\"\n",
    "text = \"找出当前目录下,文件名中包含serp的文件\"\n",
    "\n",
    "bash_chain = LLMBashChain.from_llm(llm, verbose=True)\n",
    "\n",
    "bash_chain.run(text)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "base",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.10"
  },
  "orig_nbformat": 4
 },
 "nbformat": 4,
 "nbformat_minor": 2
}