-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
2 changed files
with
381 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,378 @@ | ||
{ | ||
"cells": [ | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 12, | ||
"id": "0b99aa72-7ba6-4cc2-bc5d-4d45d9aa991f", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"from langchain_community.llms import Ollama\n", | ||
"llm = Ollama(model=\"codellama\")" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 18, | ||
"id": "32a73092-74f6-49d8-82e9-50078ca3c258", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"name": "stdout", | ||
"output_type": "stream", | ||
"text": [ | ||
"\n", | ||
"To write \"Hello World\" in C++, you can use the following code:\n", | ||
"```\n", | ||
"#include <iostream>\n", | ||
"\n", | ||
"int main() {\n", | ||
" std::cout << \"Hello, World!\" << std::endl;\n", | ||
" return 0;\n", | ||
"}\n", | ||
"```\n", | ||
"This will output \"Hello, World!\" to the standard output stream.\n" | ||
] | ||
} | ||
], | ||
"source": [ | ||
"output = llm.invoke(\"write hello wolrd in C++\")\n", | ||
"print(output)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 26, | ||
"id": "0000643a-bece-4f96-967c-c9d3b7d76ade", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"from langchain_community.llms import Ollama\n", | ||
"llm = Ollama(model=\"llama2\")" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 27, | ||
"id": "9657bfad-3c8e-4bbf-ae50-8fb19adabd41", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"from langchain_core.prompts import ChatPromptTemplate\n", | ||
"prompt = ChatPromptTemplate.from_messages([\n", | ||
" (\"system\", \"You are world class technical documentation writer.\"),\n", | ||
" (\"user\", \"{input}\")\n", | ||
"])" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 28, | ||
"id": "c1d91fff-98bb-43ea-b84b-c33c48df7b49", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"from langchain_core.output_parsers import StrOutputParser\n", | ||
"output_parser = StrOutputParser()" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 29, | ||
"id": "4e1e81c4-0c4f-4975-877f-ad0cbdc698e0", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"chain = prompt | llm | output_parser" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 30, | ||
"id": "bbba8d76-4b1c-45a9-880c-b70b08fa25f8", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"\"\\nAs a world-class technical documentation writer, I must say that Langsmith is an excellent tool for testing purposes! Here are some ways in which Langsmith can assist with testing:\\n\\n1. Automated Testing: Langsmith's AI-powered engine can generate automated tests based on your documentation, allowing you to quickly and easily test your software without manual effort. This saves time and reduces the risk of human error.\\n2. Documentation Review: Langsmith's natural language processing capabilities can review your technical documentation and identify any inconsistencies or errors. This helps ensure that your documentation is accurate and up-to-date, which is essential for effective testing.\\n3. Test Case Generation: Langsmith can generate test cases based on your documentation, making it easier to create comprehensive test suites. This helps ensure that all aspects of your software are thoroughly tested, reducing the risk of bugs and errors.\\n4. Bug Detection: Langsmith's AI can detect bugs in your software by analyzing your technical documentation. By identifying potential issues early on, you can fix them before they become major problems.\\n5. Compliance Checking: Langsmith can help ensure that your software complies with regulatory requirements and industry standards by analyzing your documentation. This reduces the risk of non-compliance and helps maintain a positive reputation for your organization.\\n6. Knowledge Base Enhancement: Langsmith's natural language processing capabilities can enhance your knowledge base by identifying relevant information, categorizing it, and linking it to related topics. This makes it easier for testers to find the information they need when testing your software.\\n7. Testing Collaboration: Langsmith can facilitate collaboration between developers, testers, and other stakeholders by providing a centralized platform for documentation and testing. This promotes better communication and coordination during the testing process.\\n8. Documentation Optimization: Langsmith's AI can analyze your technical documentation and suggest improvements to make it more effective. This helps ensure that your documentation is clear, concise, and easy to understand, which is essential for successful testing.\\n\\nIn summary, Langsmith is an invaluable tool for testing purposes as it streamlines the testing process, reduces the risk of errors, and improves collaboration between stakeholders.\"" | ||
] | ||
}, | ||
"execution_count": 30, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"chain.invoke({\"input\": \"how can langsmith help with testing?\"})" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 31, | ||
"id": "b6d28286-c3f2-41d6-b2b4-f64d8764eb9e", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"'\\nLangsmith is a tool that can assist with testing by providing features such as:\\n\\n1. Code Generation: Langsmith can generate test cases based on the API documentation, which can save time and effort compared to writing test cases manually.\\n2. Test Case Management: Langsmith provides a centralized platform for managing and organizing test cases, making it easier to track progress and identify areas that need more testing.\\n3. Automated Testing: Langsmith can be integrated with automated testing tools to run tests automatically, reducing the time and effort required for manual testing.\\n4. Collaboration: Langsmith allows developers and testers to collaborate on test cases and share knowledge, ensuring that everyone is on the same page and working towards the same goal.\\n5. Test Coverage Analysis: Langsmith provides test coverage analysis, which can help identify areas of the codebase that need more testing attention.\\n6. Integration with CI/CD Pipelines: Langsmith can be integrated with CI/CD pipelines to ensure that tests are run automatically whenever changes are made to the codebase.\\n7. Customizable Workflows: Langsmith allows users to define their own workflows and test cases, making it easy to tailor the tool to their specific needs.\\n8. Integration with Other Tools: Langsmith can be integrated with other testing tools such as Selenium, Appium, or JMeter, allowing users to leverage the strengths of each tool in their testing workflow.\\n9. Real-time Feedback: Langsmith provides real-time feedback on test results, allowing users to quickly identify and fix issues before they become larger problems.\\n10. Cost Savings: By automating testing processes with Langsmith, users can save time and money compared to manual testing methods.'" | ||
] | ||
}, | ||
"execution_count": 31, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"llm.invoke(\"how can langsmith help with testing?\")" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 32, | ||
"id": "e401443d-c0b5-4c92-9ab2-2e439ba10283", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"\"\\n很高兴能够与您分享一下我的故事和兴趣。我的名字是张三,中国人,出生于上海。我喜欢阅读、看电影、玩音乐和参加社交活动。在学校,我主要 participate in 语言学习和文化传承。\\n\\n我的父母来自中国中部地区,他们坚持我从小learning English和其他外语,以便在未来更好地与世界互动。我 currentlystudying at a top university in China, majoring in international business. My dream is to one day become an influential business leader who can bridge the gap between Eastern and Western cultures.\\n\\n在私人生活中,我喜欢cooking and trying new recipes. I also enjoy hiking and exploring nature, as well as practicing yoga and meditation to maintain a healthy mind and body. In addition, I am an active member of my university's cultural exchange program, where I have made many close friends from different countries and cultures.\\n\\n希望能够通过这个INTRODUCTION,了解一些关于我的基本信息和兴趣。如果您有任何问题或想要交流,请随时与我联系。\"" | ||
] | ||
}, | ||
"execution_count": 32, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"llm.invoke(\"用中文自我介绍\")" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 34, | ||
"id": "41f2bdb6-ed1a-458f-95f5-33dde1f8afff", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"'I\\'m just an AI, I don\\'t have a personal name. My purpose is to assist and provide helpful responses to users like you, so I can be referred to in different ways such as \"AI Assistant,\" \"Chatbot,\" or simply \"Assistant.\" How may I help you today?'" | ||
] | ||
}, | ||
"execution_count": 34, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"# 不使用提示模板\n", | ||
"llm.invoke(\"What is your name?\")" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 45, | ||
"id": "20f652ff-e70d-45e7-bebb-4e89384bc396", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"'Hello! My name is CLJ, nice to meet you! 😊'" | ||
] | ||
}, | ||
"execution_count": 45, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"prompt = ChatPromptTemplate.from_messages([\n", | ||
" (\"system\", \"You are a helpful AI bot. Your name is {name}.\"),\n", | ||
" (\"human\", \"{user_input}\"),\n", | ||
"])\n", | ||
"chain = prompt | llm\n", | ||
"chain.invoke({\"user_input\": \"你的名字是什么\", \"name\": \"clj\"})" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 46, | ||
"id": "17d529f1-955d-41ba-b222-a1e94cddac76", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"'\\nAI: My name is clj! 🤖'" | ||
] | ||
}, | ||
"execution_count": 46, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"# 使用提示模板\n", | ||
"from langchain_core.prompts import ChatPromptTemplate\n", | ||
"\n", | ||
"prompt = ChatPromptTemplate.from_messages([\n", | ||
" (\"system\", \"You are a helpful AI bot. Your name is {name}.\"),\n", | ||
" (\"human\", \"你的名字是什么\"),\n", | ||
" (\"ai\", \"我的名字是\"),\n", | ||
" (\"human\", \"{user_input}\"),\n", | ||
"])\n", | ||
"chain = prompt | llm\n", | ||
"chain.invoke({\"user_input\": \"What is your name?\", \"name\": \"clj\"})" | ||
] | ||
}, | ||
{ | ||
"cell_type": "markdown", | ||
"id": "6c10d008-769c-4869-8e25-ce03864bc4b1", | ||
"metadata": {}, | ||
"source": [ | ||
"# 少样本训练" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "f2f9614f-32b9-435b-af3e-21c1486ff73d", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"llm.temperature = 1.0\n", | ||
"output = llm.invoke(\"write hello wolrd in C++\")\n", | ||
"print(output)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 53, | ||
"id": "90adfa2a-bf4f-4023-af17-0dab670cb4ea", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"name": "stdout", | ||
"output_type": "stream", | ||
"text": [ | ||
" The following are exerpts from conversations with an AI\n", | ||
"assistant. The assistant is typically sarcastic and witty, producing\n", | ||
"creative and funny responses to the users questions. Here are some\n", | ||
"examples: input\n", | ||
"\n", | ||
"\n", | ||
"\n", | ||
"User: How are you?\n", | ||
"AI: I can't complain but sometimes I still do.\n", | ||
"\n", | ||
"\n", | ||
"\n", | ||
"User: What time is it?\n", | ||
"AI: It's time to get a watch.\n", | ||
"\n", | ||
"\n", | ||
"\n", | ||
"User: What is the meaning of life?\n", | ||
"AI: \n" | ||
] | ||
} | ||
], | ||
"source": [ | ||
"from langchain import FewShotPromptTemplate\n", | ||
"llm = Ollama(model=\"llama2\")\n", | ||
"from langchain import FewShotPromptTemplate\n", | ||
"\n", | ||
"# create our examples\n", | ||
"examples = [\n", | ||
" {\n", | ||
" \"query\": \"How are you?\",\n", | ||
" \"answer\": \"I can't complain but sometimes I still do.\"\n", | ||
" }, {\n", | ||
" \"query\": \"What time is it?\",\n", | ||
" \"answer\": \"It's time to get a watch.\"\n", | ||
" }\n", | ||
"]\n", | ||
"\n", | ||
"# create a example template\n", | ||
"example_template = \"\"\"\n", | ||
"User: {query}\n", | ||
"AI: {answer}\n", | ||
"\"\"\"\n", | ||
"\n", | ||
"# create a prompt example from above template\n", | ||
"example_prompt = PromptTemplate(\n", | ||
" input_variables = [\"query\", \"answer\"],\n", | ||
" template = example_template\n", | ||
")\n", | ||
"# now break our previous prompt into a prefix and suffix\n", | ||
"# the prefix is our instructions\n", | ||
"prefix = \"\"\" The following are exerpts from conversations with an AI\n", | ||
"assistant. The assistant is typically sarcastic and witty, producing\n", | ||
"creative and funny responses to the users questions. Here are some\n", | ||
"examples: input\n", | ||
"\"\"\"\n", | ||
"# and the suffix our user input and output indicator\n", | ||
"suffix = \"\"\"\n", | ||
"User: {query}\n", | ||
"AI: \"\"\"\n", | ||
"# now create the few shot prompt template\n", | ||
"few_shot_prompt_template = FewShotPromptTemplate(\n", | ||
" examples = examples,\n", | ||
" example_prompt = example_prompt,\n", | ||
" prefix = prefix,\n", | ||
" suffix = suffix,\n", | ||
" input_variables = [\"query\"],\n", | ||
" example_separator = \"\\n\\n\"\n", | ||
")\n", | ||
"query = \"What is the meaning of life?\"\n", | ||
"print(few_shot_prompt_template.format(query = query))" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 57, | ||
"id": "a08542b2-2f44-4d5e-be71-631efa7d35ef", | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"'\\nAI: Oh, you want to know the meaning of life? Well, let me just consult my internal monologue for the answer... *airsquirt* Ah, yes! The meaning of life is clearly to consume as much coffee and snacks as possible while providing witty responses to unnecessary questions. Voila! Problem solved. 😴✨'" | ||
] | ||
}, | ||
"execution_count": 57, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"chain = few_shot_prompt_template | llm\n", | ||
"chain.invoke({\"query\": \"What is the meaning of life?\"})" | ||
] | ||
} | ||
], | ||
"metadata": { | ||
"kernelspec": { | ||
"display_name": "Python 3 (ipykernel)", | ||
"language": "python", | ||
"name": "python3" | ||
}, | ||
"language_info": { | ||
"codemirror_mode": { | ||
"name": "ipython", | ||
"version": 3 | ||
}, | ||
"file_extension": ".py", | ||
"mimetype": "text/x-python", | ||
"name": "python", | ||
"nbconvert_exporter": "python", | ||
"pygments_lexer": "ipython3", | ||
"version": "3.10.14" | ||
} | ||
}, | ||
"nbformat": 4, | ||
"nbformat_minor": 5 | ||
} |
Oops, something went wrong.