From 790d07f1493a46fccbf42898719c985fc43e8632 Mon Sep 17 00:00:00 2001 From: Masih Moafi <132553157+MasihMoafi@users.noreply.github.com> Date: Wed, 12 Feb 2025 21:56:53 +0330 Subject: [PATCH] Delete Chat with data.ipynb --- Chat with data.ipynb | 256 ------------------------------------------- 1 file changed, 256 deletions(-) delete mode 100644 Chat with data.ipynb diff --git a/Chat with data.ipynb b/Chat with data.ipynb deleted file mode 100644 index 9859020..0000000 --- a/Chat with data.ipynb +++ /dev/null @@ -1,256 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 3, - "id": "e5cdf85d-1cda-473b-b0e5-16ebd70ac69f", - "metadata": { - "collapsed": true, - "jupyter": { - "outputs_hidden": true - } - }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", - "To disable this warning, you can either:\n", - "\t- Avoid using `tokenizers` before the fork if possible\n", - "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Collecting langchain-huggingface\n", - " Downloading langchain_huggingface-0.1.2-py3-none-any.whl.metadata (1.3 kB)\n", - "Requirement already satisfied: huggingface-hub>=0.23.0 in ./anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.27.1)\n", - "Requirement already satisfied: langchain-core<0.4.0,>=0.3.15 in ./anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.3.31)\n", - "Requirement already satisfied: sentence-transformers>=2.6.0 in ./anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (3.4.0)\n", - "Requirement already satisfied: tokenizers>=0.19.1 in ./anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (0.21.0)\n", - "Requirement already satisfied: transformers>=4.39.0 in ./anaconda3/lib/python3.12/site-packages (from langchain-huggingface) (4.48.0)\n", - "Requirement already satisfied: filelock in ./anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.23.0->langchain-huggingface) (3.13.1)\n", - "Requirement already satisfied: fsspec>=2023.5.0 in ./anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.23.0->langchain-huggingface) (2024.6.1)\n", - "Requirement already satisfied: packaging>=20.9 in ./anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.23.0->langchain-huggingface) (24.1)\n", - "Requirement already satisfied: pyyaml>=5.1 in ./anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.23.0->langchain-huggingface) (6.0.1)\n", - "Requirement already satisfied: requests in ./anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.23.0->langchain-huggingface) (2.32.3)\n", - "Requirement already satisfied: tqdm>=4.42.1 in ./anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.23.0->langchain-huggingface) (4.66.5)\n", - "Requirement already satisfied: typing-extensions>=3.7.4.3 in ./anaconda3/lib/python3.12/site-packages (from huggingface-hub>=0.23.0->langchain-huggingface) (4.12.2)\n", - "Requirement already satisfied: jsonpatch<2.0,>=1.33 in ./anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (1.33)\n", - "Requirement already satisfied: langsmith<0.4,>=0.1.125 in ./anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (0.3.1)\n", - "Requirement already satisfied: pydantic<3.0.0,>=2.5.2 in ./anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (2.10.5)\n", - "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in ./anaconda3/lib/python3.12/site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (8.2.3)\n", - "Requirement already satisfied: torch>=1.11.0 in ./anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (2.5.1)\n", - "Requirement already satisfied: scikit-learn in ./anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (1.5.1)\n", - "Requirement already satisfied: scipy in ./anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (1.13.1)\n", - "Requirement already satisfied: Pillow in ./anaconda3/lib/python3.12/site-packages (from sentence-transformers>=2.6.0->langchain-huggingface) (10.4.0)\n", - "Requirement already satisfied: numpy>=1.17 in ./anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (1.26.4)\n", - "Requirement already satisfied: regex!=2019.12.17 in ./anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (2024.9.11)\n", - "Requirement already satisfied: safetensors>=0.4.1 in ./anaconda3/lib/python3.12/site-packages (from transformers>=4.39.0->langchain-huggingface) (0.5.2)\n", - "Requirement already satisfied: jsonpointer>=1.9 in ./anaconda3/lib/python3.12/site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (2.1)\n", - "Requirement already satisfied: httpx<1,>=0.23.0 in ./anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.125->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (0.27.0)\n", - "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in ./anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.125->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (3.10.15)\n", - "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in ./anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.125->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (1.0.0)\n", - "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in ./anaconda3/lib/python3.12/site-packages (from langsmith<0.4,>=0.1.125->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (0.23.0)\n", - "Requirement already satisfied: annotated-types>=0.6.0 in ./anaconda3/lib/python3.12/site-packages (from pydantic<3.0.0,>=2.5.2->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (0.6.0)\n", - "Requirement already satisfied: pydantic-core==2.27.2 in ./anaconda3/lib/python3.12/site-packages (from pydantic<3.0.0,>=2.5.2->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (2.27.2)\n", - "Requirement already satisfied: charset-normalizer<4,>=2 in ./anaconda3/lib/python3.12/site-packages (from requests->huggingface-hub>=0.23.0->langchain-huggingface) (3.3.2)\n", - "Requirement already satisfied: idna<4,>=2.5 in ./anaconda3/lib/python3.12/site-packages (from requests->huggingface-hub>=0.23.0->langchain-huggingface) (3.7)\n", - "Requirement already satisfied: urllib3<3,>=1.21.1 in ./anaconda3/lib/python3.12/site-packages (from requests->huggingface-hub>=0.23.0->langchain-huggingface) (2.2.3)\n", - "Requirement already satisfied: certifi>=2017.4.17 in ./anaconda3/lib/python3.12/site-packages (from requests->huggingface-hub>=0.23.0->langchain-huggingface) (2024.12.14)\n", - "Requirement already satisfied: networkx in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (3.3)\n", - "Requirement already satisfied: jinja2 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (3.1.4)\n", - "Requirement already satisfied: nvidia-cuda-nvrtc-cu12==12.4.127 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (12.4.127)\n", - "Requirement already satisfied: nvidia-cuda-runtime-cu12==12.4.127 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (12.4.127)\n", - "Requirement already satisfied: nvidia-cuda-cupti-cu12==12.4.127 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (12.4.127)\n", - "Requirement already satisfied: nvidia-cudnn-cu12==9.1.0.70 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (9.1.0.70)\n", - "Requirement already satisfied: nvidia-cublas-cu12==12.4.5.8 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (12.4.5.8)\n", - "Requirement already satisfied: nvidia-cufft-cu12==11.2.1.3 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (11.2.1.3)\n", - "Requirement already satisfied: nvidia-curand-cu12==10.3.5.147 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (10.3.5.147)\n", - "Requirement already satisfied: nvidia-cusolver-cu12==11.6.1.9 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (11.6.1.9)\n", - "Requirement already satisfied: nvidia-cusparse-cu12==12.3.1.170 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (12.3.1.170)\n", - "Requirement already satisfied: nvidia-nccl-cu12==2.21.5 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (2.21.5)\n", - "Requirement already satisfied: nvidia-nvtx-cu12==12.4.127 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (12.4.127)\n", - "Requirement already satisfied: nvidia-nvjitlink-cu12==12.4.127 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (12.4.127)\n", - "Requirement already satisfied: triton==3.1.0 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (3.1.0)\n", - "Requirement already satisfied: setuptools in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (75.1.0)\n", - "Requirement already satisfied: sympy==1.13.1 in ./anaconda3/lib/python3.12/site-packages (from torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (1.13.1)\n", - "Requirement already satisfied: mpmath<1.4,>=1.1.0 in ./anaconda3/lib/python3.12/site-packages (from sympy==1.13.1->torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (1.3.0)\n", - "Requirement already satisfied: joblib>=1.2.0 in ./anaconda3/lib/python3.12/site-packages (from scikit-learn->sentence-transformers>=2.6.0->langchain-huggingface) (1.4.2)\n", - "Requirement already satisfied: threadpoolctl>=3.1.0 in ./anaconda3/lib/python3.12/site-packages (from scikit-learn->sentence-transformers>=2.6.0->langchain-huggingface) (3.5.0)\n", - "Requirement already satisfied: anyio in ./anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.125->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (4.2.0)\n", - "Requirement already satisfied: httpcore==1.* in ./anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.125->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (1.0.2)\n", - "Requirement already satisfied: sniffio in ./anaconda3/lib/python3.12/site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.125->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (1.3.0)\n", - "Requirement already satisfied: h11<0.15,>=0.13 in ./anaconda3/lib/python3.12/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.125->langchain-core<0.4.0,>=0.3.15->langchain-huggingface) (0.14.0)\n", - "Requirement already satisfied: MarkupSafe>=2.0 in ./anaconda3/lib/python3.12/site-packages (from jinja2->torch>=1.11.0->sentence-transformers>=2.6.0->langchain-huggingface) (2.1.3)\n", - "Downloading langchain_huggingface-0.1.2-py3-none-any.whl (21 kB)\n", - "Installing collected packages: langchain-huggingface\n", - "Successfully installed langchain-huggingface-0.1.2\n" - ] - } - ], - "source": [ - "!pip install -U langchain-huggingface" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "3e1c09dd-2acf-4d49-bc3c-0f80f081d6d4", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/tmp/ipykernel_98350/1857655220.py:42: LangChainDeprecationWarning: The class `Ollama` was deprecated in LangChain 0.3.1 and will be removed in 1.0.0. An updated version of the class exists in the :class:`~langchain-ollama package and should be used instead. To use it run `pip install -U :class:`~langchain-ollama` and import as `from :class:`~langchain_ollama import OllamaLLM``.\n", - " llm = Ollama(model=\"llama3.2\")\n", - "/tmp/ipykernel_98350/1857655220.py:51: LangChainDeprecationWarning: The class `LLMChain` was deprecated in LangChain 0.1.17 and will be removed in 1.0. Use :meth:`~RunnableSequence, e.g., `prompt | llm`` instead.\n", - " chain = LLMChain(llm=llm, prompt=template)\n" - ] - }, - { - "name": "stdin", - "output_type": "stream", - "text": [ - "You: what's the gist of my data\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/tmp/ipykernel_98350/1857655220.py:56: LangChainDeprecationWarning: The method `Chain.run` was deprecated in langchain 0.1.0 and will be removed in 1.0. Use :meth:`~invoke` instead.\n", - " response = chain.run(question=question, documents=texts)\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[7], line 60\u001b[0m\n\u001b[1;32m 57\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAssistant:\u001b[39m\u001b[38;5;124m\"\u001b[39m, response)\n\u001b[1;32m 59\u001b[0m \u001b[38;5;66;03m# Start the chat\u001b[39;00m\n\u001b[0;32m---> 60\u001b[0m chat_with_data()\n", - "Cell \u001b[0;32mIn[7], line 56\u001b[0m, in \u001b[0;36mchat_with_data\u001b[0;34m()\u001b[0m\n\u001b[1;32m 54\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[1;32m 55\u001b[0m question \u001b[38;5;241m=\u001b[39m \u001b[38;5;28minput\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mYou: \u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m---> 56\u001b[0m response \u001b[38;5;241m=\u001b[39m chain\u001b[38;5;241m.\u001b[39mrun(question\u001b[38;5;241m=\u001b[39mquestion, documents\u001b[38;5;241m=\u001b[39mtexts)\n\u001b[1;32m 57\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAssistant:\u001b[39m\u001b[38;5;124m\"\u001b[39m, response)\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_core/_api/deprecation.py:182\u001b[0m, in \u001b[0;36mdeprecated..deprecate..warning_emitting_wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 180\u001b[0m warned \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[1;32m 181\u001b[0m emit_warning()\n\u001b[0;32m--> 182\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m wrapped(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain/chains/base.py:611\u001b[0m, in \u001b[0;36mChain.run\u001b[0;34m(self, callbacks, tags, metadata, *args, **kwargs)\u001b[0m\n\u001b[1;32m 606\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m(args[\u001b[38;5;241m0\u001b[39m], callbacks\u001b[38;5;241m=\u001b[39mcallbacks, tags\u001b[38;5;241m=\u001b[39mtags, metadata\u001b[38;5;241m=\u001b[39mmetadata)[\n\u001b[1;32m 607\u001b[0m _output_key\n\u001b[1;32m 608\u001b[0m ]\n\u001b[1;32m 610\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m kwargs \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m args:\n\u001b[0;32m--> 611\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m(kwargs, callbacks\u001b[38;5;241m=\u001b[39mcallbacks, tags\u001b[38;5;241m=\u001b[39mtags, metadata\u001b[38;5;241m=\u001b[39mmetadata)[\n\u001b[1;32m 612\u001b[0m _output_key\n\u001b[1;32m 613\u001b[0m ]\n\u001b[1;32m 615\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m kwargs \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m args:\n\u001b[1;32m 616\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 617\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m`run` supported with either positional arguments or keyword arguments,\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 618\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m but none were provided.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 619\u001b[0m )\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_core/_api/deprecation.py:182\u001b[0m, in \u001b[0;36mdeprecated..deprecate..warning_emitting_wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 180\u001b[0m warned \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[1;32m 181\u001b[0m emit_warning()\n\u001b[0;32m--> 182\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m wrapped(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain/chains/base.py:389\u001b[0m, in \u001b[0;36mChain.__call__\u001b[0;34m(self, inputs, return_only_outputs, callbacks, tags, metadata, run_name, include_run_info)\u001b[0m\n\u001b[1;32m 357\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Execute the chain.\u001b[39;00m\n\u001b[1;32m 358\u001b[0m \n\u001b[1;32m 359\u001b[0m \u001b[38;5;124;03mArgs:\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 380\u001b[0m \u001b[38;5;124;03m `Chain.output_keys`.\u001b[39;00m\n\u001b[1;32m 381\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 382\u001b[0m config \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 383\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcallbacks\u001b[39m\u001b[38;5;124m\"\u001b[39m: callbacks,\n\u001b[1;32m 384\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtags\u001b[39m\u001b[38;5;124m\"\u001b[39m: tags,\n\u001b[1;32m 385\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmetadata\u001b[39m\u001b[38;5;124m\"\u001b[39m: metadata,\n\u001b[1;32m 386\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrun_name\u001b[39m\u001b[38;5;124m\"\u001b[39m: run_name,\n\u001b[1;32m 387\u001b[0m }\n\u001b[0;32m--> 389\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minvoke(\n\u001b[1;32m 390\u001b[0m inputs,\n\u001b[1;32m 391\u001b[0m cast(RunnableConfig, {k: v \u001b[38;5;28;01mfor\u001b[39;00m k, v \u001b[38;5;129;01min\u001b[39;00m config\u001b[38;5;241m.\u001b[39mitems() \u001b[38;5;28;01mif\u001b[39;00m v \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m}),\n\u001b[1;32m 392\u001b[0m return_only_outputs\u001b[38;5;241m=\u001b[39mreturn_only_outputs,\n\u001b[1;32m 393\u001b[0m include_run_info\u001b[38;5;241m=\u001b[39minclude_run_info,\n\u001b[1;32m 394\u001b[0m )\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain/chains/base.py:170\u001b[0m, in \u001b[0;36mChain.invoke\u001b[0;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[1;32m 168\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 169\u001b[0m run_manager\u001b[38;5;241m.\u001b[39mon_chain_error(e)\n\u001b[0;32m--> 170\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[1;32m 171\u001b[0m run_manager\u001b[38;5;241m.\u001b[39mon_chain_end(outputs)\n\u001b[1;32m 173\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m include_run_info:\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain/chains/base.py:160\u001b[0m, in \u001b[0;36mChain.invoke\u001b[0;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[1;32m 157\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 158\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_validate_inputs(inputs)\n\u001b[1;32m 159\u001b[0m outputs \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m--> 160\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call(inputs, run_manager\u001b[38;5;241m=\u001b[39mrun_manager)\n\u001b[1;32m 161\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m new_arg_supported\n\u001b[1;32m 162\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call(inputs)\n\u001b[1;32m 163\u001b[0m )\n\u001b[1;32m 165\u001b[0m final_outputs: Dict[\u001b[38;5;28mstr\u001b[39m, Any] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprep_outputs(\n\u001b[1;32m 166\u001b[0m inputs, outputs, return_only_outputs\n\u001b[1;32m 167\u001b[0m )\n\u001b[1;32m 168\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain/chains/llm.py:126\u001b[0m, in \u001b[0;36mLLMChain._call\u001b[0;34m(self, inputs, run_manager)\u001b[0m\n\u001b[1;32m 121\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_call\u001b[39m(\n\u001b[1;32m 122\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 123\u001b[0m inputs: Dict[\u001b[38;5;28mstr\u001b[39m, Any],\n\u001b[1;32m 124\u001b[0m run_manager: Optional[CallbackManagerForChainRun] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 125\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Dict[\u001b[38;5;28mstr\u001b[39m, \u001b[38;5;28mstr\u001b[39m]:\n\u001b[0;32m--> 126\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgenerate([inputs], run_manager\u001b[38;5;241m=\u001b[39mrun_manager)\n\u001b[1;32m 127\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcreate_outputs(response)[\u001b[38;5;241m0\u001b[39m]\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain/chains/llm.py:138\u001b[0m, in \u001b[0;36mLLMChain.generate\u001b[0;34m(self, input_list, run_manager)\u001b[0m\n\u001b[1;32m 136\u001b[0m callbacks \u001b[38;5;241m=\u001b[39m run_manager\u001b[38;5;241m.\u001b[39mget_child() \u001b[38;5;28;01mif\u001b[39;00m run_manager \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 137\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm, BaseLanguageModel):\n\u001b[0;32m--> 138\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm\u001b[38;5;241m.\u001b[39mgenerate_prompt(\n\u001b[1;32m 139\u001b[0m prompts,\n\u001b[1;32m 140\u001b[0m stop,\n\u001b[1;32m 141\u001b[0m callbacks\u001b[38;5;241m=\u001b[39mcallbacks,\n\u001b[1;32m 142\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm_kwargs,\n\u001b[1;32m 143\u001b[0m )\n\u001b[1;32m 144\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 145\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm\u001b[38;5;241m.\u001b[39mbind(stop\u001b[38;5;241m=\u001b[39mstop, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm_kwargs)\u001b[38;5;241m.\u001b[39mbatch(\n\u001b[1;32m 146\u001b[0m cast(List, prompts), {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcallbacks\u001b[39m\u001b[38;5;124m\"\u001b[39m: callbacks}\n\u001b[1;32m 147\u001b[0m )\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_core/language_models/llms.py:759\u001b[0m, in \u001b[0;36mBaseLLM.generate_prompt\u001b[0;34m(self, prompts, stop, callbacks, **kwargs)\u001b[0m\n\u001b[1;32m 751\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mgenerate_prompt\u001b[39m(\n\u001b[1;32m 752\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 753\u001b[0m prompts: \u001b[38;5;28mlist\u001b[39m[PromptValue],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 756\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[1;32m 757\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m LLMResult:\n\u001b[1;32m 758\u001b[0m prompt_strings \u001b[38;5;241m=\u001b[39m [p\u001b[38;5;241m.\u001b[39mto_string() \u001b[38;5;28;01mfor\u001b[39;00m p \u001b[38;5;129;01min\u001b[39;00m prompts]\n\u001b[0;32m--> 759\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgenerate(prompt_strings, stop\u001b[38;5;241m=\u001b[39mstop, callbacks\u001b[38;5;241m=\u001b[39mcallbacks, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_core/language_models/llms.py:962\u001b[0m, in \u001b[0;36mBaseLLM.generate\u001b[0;34m(self, prompts, stop, callbacks, tags, metadata, run_name, run_id, **kwargs)\u001b[0m\n\u001b[1;32m 947\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m get_llm_cache() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m) \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mFalse\u001b[39;00m:\n\u001b[1;32m 948\u001b[0m run_managers \u001b[38;5;241m=\u001b[39m [\n\u001b[1;32m 949\u001b[0m callback_manager\u001b[38;5;241m.\u001b[39mon_llm_start(\n\u001b[1;32m 950\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_serialized,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 960\u001b[0m )\n\u001b[1;32m 961\u001b[0m ]\n\u001b[0;32m--> 962\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate_helper(\n\u001b[1;32m 963\u001b[0m prompts, stop, run_managers, \u001b[38;5;28mbool\u001b[39m(new_arg_supported), \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 964\u001b[0m )\n\u001b[1;32m 965\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m output\n\u001b[1;32m 966\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(missing_prompts) \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m:\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_core/language_models/llms.py:796\u001b[0m, in \u001b[0;36mBaseLLM._generate_helper\u001b[0;34m(self, prompts, stop, run_managers, new_arg_supported, **kwargs)\u001b[0m\n\u001b[1;32m 794\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m run_manager \u001b[38;5;129;01min\u001b[39;00m run_managers:\n\u001b[1;32m 795\u001b[0m run_manager\u001b[38;5;241m.\u001b[39mon_llm_error(e, response\u001b[38;5;241m=\u001b[39mLLMResult(generations\u001b[38;5;241m=\u001b[39m[]))\n\u001b[0;32m--> 796\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[1;32m 797\u001b[0m flattened_outputs \u001b[38;5;241m=\u001b[39m output\u001b[38;5;241m.\u001b[39mflatten()\n\u001b[1;32m 798\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m manager, flattened_output \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(run_managers, flattened_outputs):\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_core/language_models/llms.py:783\u001b[0m, in \u001b[0;36mBaseLLM._generate_helper\u001b[0;34m(self, prompts, stop, run_managers, new_arg_supported, **kwargs)\u001b[0m\n\u001b[1;32m 773\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_generate_helper\u001b[39m(\n\u001b[1;32m 774\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 775\u001b[0m prompts: \u001b[38;5;28mlist\u001b[39m[\u001b[38;5;28mstr\u001b[39m],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 779\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[1;32m 780\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m LLMResult:\n\u001b[1;32m 781\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 782\u001b[0m output \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m--> 783\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate(\n\u001b[1;32m 784\u001b[0m prompts,\n\u001b[1;32m 785\u001b[0m stop\u001b[38;5;241m=\u001b[39mstop,\n\u001b[1;32m 786\u001b[0m \u001b[38;5;66;03m# TODO: support multiple run managers\u001b[39;00m\n\u001b[1;32m 787\u001b[0m run_manager\u001b[38;5;241m=\u001b[39mrun_managers[\u001b[38;5;241m0\u001b[39m] \u001b[38;5;28;01mif\u001b[39;00m run_managers \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 788\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 789\u001b[0m )\n\u001b[1;32m 790\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m new_arg_supported\n\u001b[1;32m 791\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate(prompts, stop\u001b[38;5;241m=\u001b[39mstop)\n\u001b[1;32m 792\u001b[0m )\n\u001b[1;32m 793\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 794\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m run_manager \u001b[38;5;129;01min\u001b[39;00m run_managers:\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_community/llms/ollama.py:437\u001b[0m, in \u001b[0;36mOllama._generate\u001b[0;34m(self, prompts, stop, images, run_manager, **kwargs)\u001b[0m\n\u001b[1;32m 435\u001b[0m generations \u001b[38;5;241m=\u001b[39m []\n\u001b[1;32m 436\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m prompt \u001b[38;5;129;01min\u001b[39;00m prompts:\n\u001b[0;32m--> 437\u001b[0m final_chunk \u001b[38;5;241m=\u001b[39m \u001b[38;5;28msuper\u001b[39m()\u001b[38;5;241m.\u001b[39m_stream_with_aggregation(\n\u001b[1;32m 438\u001b[0m prompt,\n\u001b[1;32m 439\u001b[0m stop\u001b[38;5;241m=\u001b[39mstop,\n\u001b[1;32m 440\u001b[0m images\u001b[38;5;241m=\u001b[39mimages,\n\u001b[1;32m 441\u001b[0m run_manager\u001b[38;5;241m=\u001b[39mrun_manager,\n\u001b[1;32m 442\u001b[0m verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose,\n\u001b[1;32m 443\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 444\u001b[0m )\n\u001b[1;32m 445\u001b[0m generations\u001b[38;5;241m.\u001b[39mappend([final_chunk])\n\u001b[1;32m 446\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m LLMResult(generations\u001b[38;5;241m=\u001b[39mgenerations)\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_community/llms/ollama.py:349\u001b[0m, in \u001b[0;36m_OllamaCommon._stream_with_aggregation\u001b[0;34m(self, prompt, stop, run_manager, verbose, **kwargs)\u001b[0m\n\u001b[1;32m 340\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_stream_with_aggregation\u001b[39m(\n\u001b[1;32m 341\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 342\u001b[0m prompt: \u001b[38;5;28mstr\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 346\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[1;32m 347\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m GenerationChunk:\n\u001b[1;32m 348\u001b[0m final_chunk: Optional[GenerationChunk] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 349\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m stream_resp \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_create_generate_stream(prompt, stop, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 350\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream_resp:\n\u001b[1;32m 351\u001b[0m chunk \u001b[38;5;241m=\u001b[39m _stream_response_to_generation_chunk(stream_resp)\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/langchain_community/llms/ollama.py:194\u001b[0m, in \u001b[0;36m_OllamaCommon._create_generate_stream\u001b[0;34m(self, prompt, stop, images, **kwargs)\u001b[0m\n\u001b[1;32m 186\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_create_generate_stream\u001b[39m(\n\u001b[1;32m 187\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 188\u001b[0m prompt: \u001b[38;5;28mstr\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 191\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[1;32m 192\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Iterator[\u001b[38;5;28mstr\u001b[39m]:\n\u001b[1;32m 193\u001b[0m payload \u001b[38;5;241m=\u001b[39m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mprompt\u001b[39m\u001b[38;5;124m\"\u001b[39m: prompt, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mimages\u001b[39m\u001b[38;5;124m\"\u001b[39m: images}\n\u001b[0;32m--> 194\u001b[0m \u001b[38;5;28;01myield from\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_create_stream(\n\u001b[1;32m 195\u001b[0m payload\u001b[38;5;241m=\u001b[39mpayload,\n\u001b[1;32m 196\u001b[0m stop\u001b[38;5;241m=\u001b[39mstop,\n\u001b[1;32m 197\u001b[0m api_url\u001b[38;5;241m=\u001b[39m\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbase_url\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m/api/generate\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 198\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 199\u001b[0m )\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/requests/models.py:869\u001b[0m, in \u001b[0;36mResponse.iter_lines\u001b[0;34m(self, chunk_size, decode_unicode, delimiter)\u001b[0m\n\u001b[1;32m 860\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Iterates over the response data, one line at a time. When\u001b[39;00m\n\u001b[1;32m 861\u001b[0m \u001b[38;5;124;03mstream=True is set on the request, this avoids reading the\u001b[39;00m\n\u001b[1;32m 862\u001b[0m \u001b[38;5;124;03mcontent at once into memory for large responses.\u001b[39;00m\n\u001b[1;32m 863\u001b[0m \n\u001b[1;32m 864\u001b[0m \u001b[38;5;124;03m.. note:: This method is not reentrant safe.\u001b[39;00m\n\u001b[1;32m 865\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 867\u001b[0m pending \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 869\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m chunk \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39miter_content(\n\u001b[1;32m 870\u001b[0m chunk_size\u001b[38;5;241m=\u001b[39mchunk_size, decode_unicode\u001b[38;5;241m=\u001b[39mdecode_unicode\n\u001b[1;32m 871\u001b[0m ):\n\u001b[1;32m 872\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m pending \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 873\u001b[0m chunk \u001b[38;5;241m=\u001b[39m pending \u001b[38;5;241m+\u001b[39m chunk\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/requests/utils.py:572\u001b[0m, in \u001b[0;36mstream_decode_response_unicode\u001b[0;34m(iterator, r)\u001b[0m\n\u001b[1;32m 569\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m\n\u001b[1;32m 571\u001b[0m decoder \u001b[38;5;241m=\u001b[39m codecs\u001b[38;5;241m.\u001b[39mgetincrementaldecoder(r\u001b[38;5;241m.\u001b[39mencoding)(errors\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mreplace\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 572\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m chunk \u001b[38;5;129;01min\u001b[39;00m iterator:\n\u001b[1;32m 573\u001b[0m rv \u001b[38;5;241m=\u001b[39m decoder\u001b[38;5;241m.\u001b[39mdecode(chunk)\n\u001b[1;32m 574\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m rv:\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/requests/models.py:820\u001b[0m, in \u001b[0;36mResponse.iter_content..generate\u001b[0;34m()\u001b[0m\n\u001b[1;32m 818\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mraw, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstream\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[1;32m 819\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 820\u001b[0m \u001b[38;5;28;01myield from\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mraw\u001b[38;5;241m.\u001b[39mstream(chunk_size, decode_content\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 821\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ProtocolError \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 822\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m ChunkedEncodingError(e)\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/urllib3/response.py:1057\u001b[0m, in \u001b[0;36mHTTPResponse.stream\u001b[0;34m(self, amt, decode_content)\u001b[0m\n\u001b[1;32m 1041\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1042\u001b[0m \u001b[38;5;124;03mA generator wrapper for the read() method. A call will block until\u001b[39;00m\n\u001b[1;32m 1043\u001b[0m \u001b[38;5;124;03m``amt`` bytes have been read from the connection or until the\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1054\u001b[0m \u001b[38;5;124;03m 'content-encoding' header.\u001b[39;00m\n\u001b[1;32m 1055\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1056\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchunked \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msupports_chunked_reads():\n\u001b[0;32m-> 1057\u001b[0m \u001b[38;5;28;01myield from\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mread_chunked(amt, decode_content\u001b[38;5;241m=\u001b[39mdecode_content)\n\u001b[1;32m 1058\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1059\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_fp_closed(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_fp) \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_decoded_buffer) \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m:\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/urllib3/response.py:1206\u001b[0m, in \u001b[0;36mHTTPResponse.read_chunked\u001b[0;34m(self, amt, decode_content)\u001b[0m\n\u001b[1;32m 1203\u001b[0m amt \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1205\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m-> 1206\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_update_chunk_length()\n\u001b[1;32m 1207\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchunk_left \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[1;32m 1208\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/site-packages/urllib3/response.py:1125\u001b[0m, in \u001b[0;36mHTTPResponse._update_chunk_length\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1123\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchunk_left \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 1124\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m-> 1125\u001b[0m line \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_fp\u001b[38;5;241m.\u001b[39mfp\u001b[38;5;241m.\u001b[39mreadline() \u001b[38;5;66;03m# type: ignore[union-attr]\u001b[39;00m\n\u001b[1;32m 1126\u001b[0m line \u001b[38;5;241m=\u001b[39m line\u001b[38;5;241m.\u001b[39msplit(\u001b[38;5;124mb\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m;\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;241m1\u001b[39m)[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 1127\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n", - "File \u001b[0;32m~/anaconda3/lib/python3.12/socket.py:707\u001b[0m, in \u001b[0;36mSocketIO.readinto\u001b[0;34m(self, b)\u001b[0m\n\u001b[1;32m 705\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[1;32m 706\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 707\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sock\u001b[38;5;241m.\u001b[39mrecv_into(b)\n\u001b[1;32m 708\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m timeout:\n\u001b[1;32m 709\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_timeout_occurred \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], - "source": [ - "from langchain.document_loaders import TextLoader\n", - "from langchain.indexes import VectorstoreIndexCreator\n", - "from langchain.vectorstores import Chroma\n", - "from langchain.embeddings import HuggingFaceEmbeddings\n", - "from langchain.llms import Ollama\n", - "from langchain.chains import LLMChain\n", - "from langchain.prompts import PromptTemplate\n", - "import ollama\n", - "\n", - "DOCUMENT_PATHS = [\n", - " r'/home/masih/rag_data/Hamrah.txt',\n", - " r'/home/masih/rag_data/vape.txt',\n", - " r'/home/masih/rag_data/Shah.txt',\n", - " r'/home/masih/rag_data/Khalife.txt',\n", - " r'/home/masih/rag_data/carbon.txt',\n", - " r'/home/masih/rag_data/takapoo.txt'\n", - "]\n", - "\n", - "# Load all documents\n", - "def load_documents(file_paths):\n", - " texts = []\n", - " for file_path in file_paths:\n", - " loader = TextLoader(file_path)\n", - " documents = loader.load()\n", - " texts.extend(documents)\n", - " return texts\n", - "\n", - "texts = load_documents(DOCUMENT_PATHS)\n", - "\n", - "# Init embeddings\n", - "embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-mpnet-base-v2\") \n", - "\n", - "# Create a vector store 4 semantic search\n", - "index = VectorstoreIndexCreator(\n", - " vectorstore_cls=Chroma,\n", - " embedding=embeddings\n", - ").from_documents(texts)\n", - "\n", - "# Init Llama3.2 locally\n", - "llm = Ollama(model=\"llama3.2\")\n", - "\n", - "# Create a chat interface\n", - "template = PromptTemplate.from_template(\n", - " \"You are a helpful assistant with access to the following documents:\\n\\n{documents}\\n\\nPlease answer the question: {question}\\n\"\n", - ")\n", - "\n", - "def chat_with_data():\n", - " # Create an LLM chain for responses\n", - " chain = LLMChain(llm=llm, prompt=template)\n", - " \n", - " # Simple chat loop\n", - " while True:\n", - " question = input(\"You: \")\n", - " response = chain.run(question=question, documents=texts)\n", - " print(\"Assistant:\", response)\n", - "\n", - "# Start the chat\n", - "chat_with_data()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dba08176-e56e-4c11-8fc7-4717108a8b78", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python [conda env:base] *", - "language": "python", - "name": "conda-base-py" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.12.3" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -}