Spaces:
Sleeping
Sleeping
first commit
Browse files- .gitignore +3 -0
- Dockerfile +22 -0
- app.py +38 -0
- notebooks/Gemini_Chat_Docs-1.ipynb +811 -0
- notebooks/Gemini_Chat_Docs-2.ipynb +251 -0
- notebooks/Gemini_Chat_Docs.ipynb +932 -0
- notebooks/gemini-docs.ipynb +883 -0
- notebooks/gemini-langchain.ipynb +123 -0
- notebooks/gemini-llama-index.ipynb +605 -0
- notebooks/gemini-pro.ipynb +280 -0
- requirements.txt +167 -0
- src/agent.py +50 -0
- src/utils.py +100 -0
.gitignore
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
docs
|
2 |
+
.env
|
3 |
+
.venv
|
Dockerfile
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.11-slim-buster
|
2 |
+
|
3 |
+
WORKDIR /app
|
4 |
+
|
5 |
+
COPY . /app
|
6 |
+
|
7 |
+
# ENV HNSWLIB_NO_NATIVE=1
|
8 |
+
|
9 |
+
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
10 |
+
|
11 |
+
RUN useradd -m -u 1000 user
|
12 |
+
|
13 |
+
USER user
|
14 |
+
|
15 |
+
ENV HOME=/home/user \
|
16 |
+
PATH=/home/user/.local/bin:$PATH
|
17 |
+
|
18 |
+
WORKDIR $HOME/app
|
19 |
+
|
20 |
+
COPY --chown=user . $HOME/app
|
21 |
+
|
22 |
+
CMD ["python", "app.py"]
|
app.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
import gradio as gr
|
4 |
+
|
5 |
+
from src.utils import (
|
6 |
+
process_files, answer_query
|
7 |
+
)
|
8 |
+
|
9 |
+
load_dotenv()
|
10 |
+
|
11 |
+
|
12 |
+
|
13 |
+
gr.close_all()
|
14 |
+
|
15 |
+
title = ""
|
16 |
+
description = f"Chat with PDF/TXT/DOC"
|
17 |
+
|
18 |
+
chatbot = gr.Chatbot(label="ExploreText")
|
19 |
+
|
20 |
+
with gr.Blocks(
|
21 |
+
title="ExploreText",
|
22 |
+
) as textbot:
|
23 |
+
|
24 |
+
gr.Markdown("# <center> Welcome to ExploreDoc Web App</center>")
|
25 |
+
|
26 |
+
with gr.Accordion("Upload a file here", open=False):
|
27 |
+
file_output = gr.File()
|
28 |
+
upload_button = gr.UploadButton("Click to Upload a File", file_types=["txt","doc","pdf"])
|
29 |
+
upload_button.upload(process_files, upload_button, file_output)
|
30 |
+
|
31 |
+
# with gr.Row("Chat with Text"):
|
32 |
+
gr.ChatInterface(fn=answer_query, chatbot=chatbot, submit_btn="Ask", undo_btn=None, retry_btn=None, clear_btn=None)
|
33 |
+
gr.Markdown("<center> Developed by <a href='https://92-vasim.github.io' target='_blank'>Mohammed Vasim<a/> | AI Engineer & Computer Vision Engineer @ ZestIoT. </center>")
|
34 |
+
|
35 |
+
|
36 |
+
if __name__ == "__main__":
|
37 |
+
textbot.queue().launch(server_name="0.0.0.0")
|
38 |
+
|
notebooks/Gemini_Chat_Docs-1.ipynb
ADDED
@@ -0,0 +1,811 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"metadata": {
|
7 |
+
"id": "2N8psBL6-wfJ"
|
8 |
+
},
|
9 |
+
"outputs": [],
|
10 |
+
"source": [
|
11 |
+
"# !cp -rf /content/drive/MyDrive/ML\\ Projects/Gemini-Pro/docs /content/"
|
12 |
+
]
|
13 |
+
},
|
14 |
+
{
|
15 |
+
"cell_type": "code",
|
16 |
+
"execution_count": 1,
|
17 |
+
"metadata": {
|
18 |
+
"colab": {
|
19 |
+
"base_uri": "https://localhost:8080/"
|
20 |
+
},
|
21 |
+
"id": "9AkjB4x3ybTb",
|
22 |
+
"outputId": "7b4fa13a-cb14-4e59-f5b7-c426a3fbea35"
|
23 |
+
},
|
24 |
+
"outputs": [
|
25 |
+
{
|
26 |
+
"name": "stdout",
|
27 |
+
"output_type": "stream",
|
28 |
+
"text": [
|
29 |
+
"Collecting langchain\n",
|
30 |
+
" Downloading langchain-0.1.0-py3-none-any.whl (797 kB)\n",
|
31 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m798.0/798.0 KB\u001b[0m \u001b[31m604.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
32 |
+
"\u001b[?25hCollecting tenacity<9.0.0,>=8.1.0\n",
|
33 |
+
" Using cached tenacity-8.2.3-py3-none-any.whl (24 kB)\n",
|
34 |
+
"Collecting dataclasses-json<0.7,>=0.5.7\n",
|
35 |
+
" Downloading dataclasses_json-0.6.3-py3-none-any.whl (28 kB)\n",
|
36 |
+
"Collecting SQLAlchemy<3,>=1.4\n",
|
37 |
+
" Downloading SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n",
|
38 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
39 |
+
"\u001b[?25hCollecting requests<3,>=2\n",
|
40 |
+
" Using cached requests-2.31.0-py3-none-any.whl (62 kB)\n",
|
41 |
+
"Collecting numpy<2,>=1\n",
|
42 |
+
" Downloading numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (18.2 MB)\n",
|
43 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m18.2/18.2 MB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
44 |
+
"\u001b[?25hCollecting langchain-core<0.2,>=0.1.7\n",
|
45 |
+
" Using cached langchain_core-0.1.10-py3-none-any.whl (216 kB)\n",
|
46 |
+
"Collecting langchain-community<0.1,>=0.0.9\n",
|
47 |
+
" Downloading langchain_community-0.0.12-py3-none-any.whl (1.6 MB)\n",
|
48 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m1.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0meta \u001b[36m0:00:01\u001b[0m\n",
|
49 |
+
"\u001b[?25hCollecting langsmith<0.1.0,>=0.0.77\n",
|
50 |
+
" Downloading langsmith-0.0.80-py3-none-any.whl (48 kB)\n",
|
51 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m48.3/48.3 KB\u001b[0m \u001b[31m594.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m \u001b[36m0:00:01\u001b[0m\n",
|
52 |
+
"\u001b[?25hCollecting PyYAML>=5.3\n",
|
53 |
+
" Using cached PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (705 kB)\n",
|
54 |
+
"Collecting aiohttp<4.0.0,>=3.8.3\n",
|
55 |
+
" Downloading aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)\n",
|
56 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m524.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:04\u001b[0m\n",
|
57 |
+
"\u001b[?25hCollecting async-timeout<5.0.0,>=4.0.0\n",
|
58 |
+
" Using cached async_timeout-4.0.3-py3-none-any.whl (5.7 kB)\n",
|
59 |
+
"Collecting pydantic<3,>=1\n",
|
60 |
+
" Using cached pydantic-2.5.3-py3-none-any.whl (381 kB)\n",
|
61 |
+
"Collecting jsonpatch<2.0,>=1.33\n",
|
62 |
+
" Using cached jsonpatch-1.33-py2.py3-none-any.whl (12 kB)\n",
|
63 |
+
"Collecting aiosignal>=1.1.2\n",
|
64 |
+
" Using cached aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n",
|
65 |
+
"Collecting multidict<7.0,>=4.5\n",
|
66 |
+
" Using cached multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (114 kB)\n",
|
67 |
+
"Collecting yarl<2.0,>=1.0\n",
|
68 |
+
" Downloading yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (301 kB)\n",
|
69 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m301.6/301.6 KB\u001b[0m \u001b[31m867.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
70 |
+
"\u001b[?25hCollecting frozenlist>=1.1.1\n",
|
71 |
+
" Downloading frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (239 kB)\n",
|
72 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m239.5/239.5 KB\u001b[0m \u001b[31m1.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
73 |
+
"\u001b[?25hCollecting attrs>=17.3.0\n",
|
74 |
+
" Downloading attrs-23.2.0-py3-none-any.whl (60 kB)\n",
|
75 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m60.8/60.8 KB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
76 |
+
"\u001b[?25hCollecting marshmallow<4.0.0,>=3.18.0\n",
|
77 |
+
" Downloading marshmallow-3.20.2-py3-none-any.whl (49 kB)\n",
|
78 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.4/49.4 KB\u001b[0m \u001b[31m17.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
79 |
+
"\u001b[?25hCollecting typing-inspect<1,>=0.4.0\n",
|
80 |
+
" Using cached typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n",
|
81 |
+
"Collecting jsonpointer>=1.9\n",
|
82 |
+
" Using cached jsonpointer-2.4-py2.py3-none-any.whl (7.8 kB)\n",
|
83 |
+
"Requirement already satisfied: packaging<24.0,>=23.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1.7->langchain) (23.2)\n",
|
84 |
+
"Collecting anyio<5,>=3\n",
|
85 |
+
" Using cached anyio-4.2.0-py3-none-any.whl (85 kB)\n",
|
86 |
+
"Collecting annotated-types>=0.4.0\n",
|
87 |
+
" Using cached annotated_types-0.6.0-py3-none-any.whl (12 kB)\n",
|
88 |
+
"Collecting typing-extensions>=4.6.1\n",
|
89 |
+
" Using cached typing_extensions-4.9.0-py3-none-any.whl (32 kB)\n",
|
90 |
+
"\u001b[33mWARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ProtocolError('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))': /simple/pydantic-core/\u001b[0m\u001b[33m\n",
|
91 |
+
"\u001b[0mCollecting pydantic-core==2.14.6\n",
|
92 |
+
" Using cached pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.1 MB)\n",
|
93 |
+
"Collecting certifi>=2017.4.17\n",
|
94 |
+
" Using cached certifi-2023.11.17-py3-none-any.whl (162 kB)\n",
|
95 |
+
"Collecting urllib3<3,>=1.21.1\n",
|
96 |
+
" Using cached urllib3-2.1.0-py3-none-any.whl (104 kB)\n",
|
97 |
+
"Collecting idna<4,>=2.5\n",
|
98 |
+
" Using cached idna-3.6-py3-none-any.whl (61 kB)\n",
|
99 |
+
"Collecting charset-normalizer<4,>=2\n",
|
100 |
+
" Using cached charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (142 kB)\n",
|
101 |
+
"Collecting greenlet!=0.4.17\n",
|
102 |
+
" Downloading greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl (616 kB)\n",
|
103 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m616.0/616.0 KB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
104 |
+
"\u001b[?25hCollecting sniffio>=1.1\n",
|
105 |
+
" Using cached sniffio-1.3.0-py3-none-any.whl (10 kB)\n",
|
106 |
+
"Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1.7->langchain) (1.2.0)\n",
|
107 |
+
"Collecting mypy-extensions>=0.3.0\n",
|
108 |
+
" Using cached mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n",
|
109 |
+
"Installing collected packages: urllib3, typing-extensions, tenacity, sniffio, PyYAML, numpy, mypy-extensions, multidict, marshmallow, jsonpointer, idna, greenlet, frozenlist, charset-normalizer, certifi, attrs, async-timeout, annotated-types, yarl, typing-inspect, SQLAlchemy, requests, pydantic-core, jsonpatch, anyio, aiosignal, pydantic, dataclasses-json, aiohttp, langsmith, langchain-core, langchain-community, langchain\n",
|
110 |
+
"Successfully installed PyYAML-6.0.1 SQLAlchemy-2.0.25 aiohttp-3.9.1 aiosignal-1.3.1 annotated-types-0.6.0 anyio-4.2.0 async-timeout-4.0.3 attrs-23.2.0 certifi-2023.11.17 charset-normalizer-3.3.2 dataclasses-json-0.6.3 frozenlist-1.4.1 greenlet-3.0.3 idna-3.6 jsonpatch-1.33 jsonpointer-2.4 langchain-0.1.0 langchain-community-0.0.12 langchain-core-0.1.10 langsmith-0.0.80 marshmallow-3.20.2 multidict-6.0.4 mypy-extensions-1.0.0 numpy-1.26.3 pydantic-2.5.3 pydantic-core-2.14.6 requests-2.31.0 sniffio-1.3.0 tenacity-8.2.3 typing-extensions-4.9.0 typing-inspect-0.9.0 urllib3-2.1.0 yarl-1.9.4\n",
|
111 |
+
"Collecting pypdf\n",
|
112 |
+
" Downloading pypdf-3.17.4-py3-none-any.whl (278 kB)\n",
|
113 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m278.2/278.2 KB\u001b[0m \u001b[31m898.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
114 |
+
"\u001b[?25hInstalling collected packages: pypdf\n",
|
115 |
+
"Successfully installed pypdf-3.17.4\n",
|
116 |
+
"Collecting langchain_google_genai\n",
|
117 |
+
" Using cached langchain_google_genai-0.0.6-py3-none-any.whl (15 kB)\n",
|
118 |
+
"Collecting google-generativeai<0.4.0,>=0.3.1\n",
|
119 |
+
" Using cached google_generativeai-0.3.2-py3-none-any.whl (146 kB)\n",
|
120 |
+
"Requirement already satisfied: langchain-core<0.2,>=0.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain_google_genai) (0.1.10)\n",
|
121 |
+
"Collecting google-api-core\n",
|
122 |
+
" Using cached google_api_core-2.15.0-py3-none-any.whl (121 kB)\n",
|
123 |
+
"Collecting google-ai-generativelanguage==0.4.0\n",
|
124 |
+
" Using cached google_ai_generativelanguage-0.4.0-py3-none-any.whl (598 kB)\n",
|
125 |
+
"Collecting protobuf\n",
|
126 |
+
" Downloading protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl (294 kB)\n",
|
127 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m294.6/294.6 KB\u001b[0m \u001b[31m891.6 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
128 |
+
"\u001b[?25hCollecting google-auth\n",
|
129 |
+
" Downloading google_auth-2.26.2-py2.py3-none-any.whl (186 kB)\n",
|
130 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m186.5/186.5 KB\u001b[0m \u001b[31m1.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
131 |
+
"\u001b[?25hRequirement already satisfied: typing-extensions in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai<0.4.0,>=0.3.1->langchain_google_genai) (4.9.0)\n",
|
132 |
+
"Collecting tqdm\n",
|
133 |
+
" Using cached tqdm-4.66.1-py3-none-any.whl (78 kB)\n",
|
134 |
+
"Collecting proto-plus<2.0.0dev,>=1.22.3\n",
|
135 |
+
" Using cached proto_plus-1.23.0-py3-none-any.whl (48 kB)\n",
|
136 |
+
"Requirement already satisfied: requests<3,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (2.31.0)\n",
|
137 |
+
"Requirement already satisfied: langsmith<0.1.0,>=0.0.63 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (0.0.80)\n",
|
138 |
+
"Requirement already satisfied: pydantic<3,>=1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (2.5.3)\n",
|
139 |
+
"Requirement already satisfied: PyYAML>=5.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (6.0.1)\n",
|
140 |
+
"Requirement already satisfied: anyio<5,>=3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (4.2.0)\n",
|
141 |
+
"Requirement already satisfied: packaging<24.0,>=23.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (23.2)\n",
|
142 |
+
"Requirement already satisfied: tenacity<9.0.0,>=8.1.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (8.2.3)\n",
|
143 |
+
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (1.33)\n",
|
144 |
+
"Requirement already satisfied: sniffio>=1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (1.3.0)\n",
|
145 |
+
"Requirement already satisfied: idna>=2.8 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (3.6)\n",
|
146 |
+
"Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (1.2.0)\n",
|
147 |
+
"Requirement already satisfied: jsonpointer>=1.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.2,>=0.1->langchain_google_genai) (2.4)\n",
|
148 |
+
"Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic<3,>=1->langchain-core<0.2,>=0.1->langchain_google_genai) (2.14.6)\n",
|
149 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic<3,>=1->langchain-core<0.2,>=0.1->langchain_google_genai) (0.6.0)\n",
|
150 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (3.3.2)\n",
|
151 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (2.1.0)\n",
|
152 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (2023.11.17)\n",
|
153 |
+
"Collecting googleapis-common-protos<2.0.dev0,>=1.56.2\n",
|
154 |
+
" Downloading googleapis_common_protos-1.62.0-py2.py3-none-any.whl (228 kB)\n",
|
155 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m228.7/228.7 KB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
156 |
+
"\u001b[?25hCollecting cachetools<6.0,>=2.0.0\n",
|
157 |
+
" Using cached cachetools-5.3.2-py3-none-any.whl (9.3 kB)\n",
|
158 |
+
"Collecting pyasn1-modules>=0.2.1\n",
|
159 |
+
" Using cached pyasn1_modules-0.3.0-py2.py3-none-any.whl (181 kB)\n",
|
160 |
+
"Collecting rsa<5,>=3.1.4\n",
|
161 |
+
" Using cached rsa-4.9-py3-none-any.whl (34 kB)\n",
|
162 |
+
"Collecting grpcio<2.0dev,>=1.33.2\n",
|
163 |
+
" Using cached grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (5.4 MB)\n",
|
164 |
+
"Collecting grpcio-status<2.0.dev0,>=1.33.2\n",
|
165 |
+
" Using cached grpcio_status-1.60.0-py3-none-any.whl (14 kB)\n",
|
166 |
+
"Collecting pyasn1<0.6.0,>=0.4.6\n",
|
167 |
+
" Downloading pyasn1-0.5.1-py2.py3-none-any.whl (84 kB)\n",
|
168 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m84.9/84.9 KB\u001b[0m \u001b[31m974.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
169 |
+
"\u001b[?25hInstalling collected packages: tqdm, pyasn1, protobuf, grpcio, cachetools, rsa, pyasn1-modules, proto-plus, googleapis-common-protos, grpcio-status, google-auth, google-api-core, google-ai-generativelanguage, google-generativeai, langchain_google_genai\n",
|
170 |
+
"Successfully installed cachetools-5.3.2 google-ai-generativelanguage-0.4.0 google-api-core-2.15.0 google-auth-2.26.2 google-generativeai-0.3.2 googleapis-common-protos-1.62.0 grpcio-1.60.0 grpcio-status-1.60.0 langchain_google_genai-0.0.6 proto-plus-1.23.0 protobuf-4.25.2 pyasn1-0.5.1 pyasn1-modules-0.3.0 rsa-4.9 tqdm-4.66.1\n",
|
171 |
+
"Requirement already satisfied: google-generativeai in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (0.3.2)\n",
|
172 |
+
"Requirement already satisfied: google-auth in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (2.26.2)\n",
|
173 |
+
"Requirement already satisfied: google-api-core in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (2.15.0)\n",
|
174 |
+
"Requirement already satisfied: protobuf in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.25.2)\n",
|
175 |
+
"Requirement already satisfied: google-ai-generativelanguage==0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (0.4.0)\n",
|
176 |
+
"Requirement already satisfied: tqdm in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.66.1)\n",
|
177 |
+
"Requirement already satisfied: typing-extensions in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.9.0)\n",
|
178 |
+
"Requirement already satisfied: proto-plus<2.0.0dev,>=1.22.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-ai-generativelanguage==0.4.0->google-generativeai) (1.23.0)\n",
|
179 |
+
"Requirement already satisfied: requests<3.0.0.dev0,>=2.18.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (2.31.0)\n",
|
180 |
+
"Requirement already satisfied: googleapis-common-protos<2.0.dev0,>=1.56.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.62.0)\n",
|
181 |
+
"Requirement already satisfied: cachetools<6.0,>=2.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (5.3.2)\n",
|
182 |
+
"Requirement already satisfied: pyasn1-modules>=0.2.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (0.3.0)\n",
|
183 |
+
"Requirement already satisfied: rsa<5,>=3.1.4 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (4.9)\n",
|
184 |
+
"Requirement already satisfied: grpcio<2.0dev,>=1.33.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.60.0)\n",
|
185 |
+
"Requirement already satisfied: grpcio-status<2.0.dev0,>=1.33.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.60.0)\n",
|
186 |
+
"Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pyasn1-modules>=0.2.1->google-auth->google-generativeai) (0.5.1)\n",
|
187 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (2023.11.17)\n",
|
188 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (3.3.2)\n",
|
189 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (2.1.0)\n",
|
190 |
+
"Requirement already satisfied: idna<4,>=2.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (3.6)\n",
|
191 |
+
"Collecting chromadb\n",
|
192 |
+
" Downloading chromadb-0.4.22-py3-none-any.whl (509 kB)\n",
|
193 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m509.0/509.0 KB\u001b[0m \u001b[31m945.6 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
194 |
+
"\u001b[?25hCollecting overrides>=7.3.1\n",
|
195 |
+
" Using cached overrides-7.4.0-py3-none-any.whl (17 kB)\n",
|
196 |
+
"Collecting opentelemetry-api>=1.2.0\n",
|
197 |
+
" Downloading opentelemetry_api-1.22.0-py3-none-any.whl (57 kB)\n",
|
198 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.9/57.9 KB\u001b[0m \u001b[31m861.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:--:--\u001b[0m\n",
|
199 |
+
"\u001b[?25hRequirement already satisfied: typing-extensions>=4.5.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (4.9.0)\n",
|
200 |
+
"Collecting pulsar-client>=3.1.0\n",
|
201 |
+
" Downloading pulsar_client-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (5.4 MB)\n",
|
202 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.4/5.4 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
203 |
+
"\u001b[?25hCollecting mmh3>=4.0.1\n",
|
204 |
+
" Downloading mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (67 kB)\n",
|
205 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m67.6/67.6 KB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
206 |
+
"\u001b[?25hCollecting onnxruntime>=1.14.1\n",
|
207 |
+
" Downloading onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (6.4 MB)\n",
|
208 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.4/6.4 MB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
209 |
+
"\u001b[?25hCollecting typer>=0.9.0\n",
|
210 |
+
" Using cached typer-0.9.0-py3-none-any.whl (45 kB)\n",
|
211 |
+
"Requirement already satisfied: grpcio>=1.58.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (1.60.0)\n",
|
212 |
+
"Requirement already satisfied: numpy>=1.22.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (1.26.3)\n",
|
213 |
+
"Collecting opentelemetry-instrumentation-fastapi>=0.41b0\n",
|
214 |
+
" Downloading opentelemetry_instrumentation_fastapi-0.43b0-py3-none-any.whl (11 kB)\n",
|
215 |
+
"Requirement already satisfied: requests>=2.28 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (2.31.0)\n",
|
216 |
+
"Collecting pypika>=0.48.9\n",
|
217 |
+
" Using cached PyPika-0.48.9-py2.py3-none-any.whl\n",
|
218 |
+
"Requirement already satisfied: pydantic>=1.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (2.5.3)\n",
|
219 |
+
"Collecting opentelemetry-exporter-otlp-proto-grpc>=1.2.0\n",
|
220 |
+
" Downloading opentelemetry_exporter_otlp_proto_grpc-1.22.0-py3-none-any.whl (18 kB)\n",
|
221 |
+
"Collecting opentelemetry-sdk>=1.2.0\n",
|
222 |
+
" Downloading opentelemetry_sdk-1.22.0-py3-none-any.whl (105 kB)\n",
|
223 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m105.6/105.6 KB\u001b[0m \u001b[31m6.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
224 |
+
"\u001b[?25hCollecting kubernetes>=28.1.0\n",
|
225 |
+
" Downloading kubernetes-29.0.0-py2.py3-none-any.whl (1.6 MB)\n",
|
226 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
227 |
+
"\u001b[?25hCollecting uvicorn[standard]>=0.18.3\n",
|
228 |
+
" Using cached uvicorn-0.25.0-py3-none-any.whl (60 kB)\n",
|
229 |
+
"Collecting posthog>=2.4.0\n",
|
230 |
+
" Downloading posthog-3.3.1-py2.py3-none-any.whl (40 kB)\n",
|
231 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m40.6/40.6 KB\u001b[0m \u001b[31m455.0 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m \u001b[36m0:00:01\u001b[0m\n",
|
232 |
+
"\u001b[?25hRequirement already satisfied: tenacity>=8.2.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (8.2.3)\n",
|
233 |
+
"Requirement already satisfied: PyYAML>=6.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (6.0.1)\n",
|
234 |
+
"Requirement already satisfied: tqdm>=4.65.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (4.66.1)\n",
|
235 |
+
"Collecting tokenizers>=0.13.2\n",
|
236 |
+
" Using cached tokenizers-0.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.8 MB)\n",
|
237 |
+
"Collecting bcrypt>=4.0.1\n",
|
238 |
+
" Downloading bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl (698 kB)\n",
|
239 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m698.9/698.9 KB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
240 |
+
"\u001b[?25hCollecting fastapi>=0.95.2\n",
|
241 |
+
" Downloading fastapi-0.109.0-py3-none-any.whl (92 kB)\n",
|
242 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m92.0/92.0 KB\u001b[0m \u001b[31m9.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
243 |
+
"\u001b[?25hCollecting build>=1.0.3\n",
|
244 |
+
" Downloading build-1.0.3-py3-none-any.whl (18 kB)\n",
|
245 |
+
"Collecting importlib-resources\n",
|
246 |
+
" Using cached importlib_resources-6.1.1-py3-none-any.whl (33 kB)\n",
|
247 |
+
"Collecting chroma-hnswlib==0.7.3\n",
|
248 |
+
" Using cached chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.4 MB)\n",
|
249 |
+
"Collecting pyproject_hooks\n",
|
250 |
+
" Downloading pyproject_hooks-1.0.0-py3-none-any.whl (9.3 kB)\n",
|
251 |
+
"Collecting tomli>=1.1.0\n",
|
252 |
+
" Using cached tomli-2.0.1-py3-none-any.whl (12 kB)\n",
|
253 |
+
"Requirement already satisfied: packaging>=19.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from build>=1.0.3->chromadb) (23.2)\n",
|
254 |
+
"Collecting starlette<0.36.0,>=0.35.0\n",
|
255 |
+
" Downloading starlette-0.35.1-py3-none-any.whl (71 kB)\n",
|
256 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m71.1/71.1 KB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
257 |
+
"\u001b[?25hCollecting oauthlib>=3.2.2\n",
|
258 |
+
" Using cached oauthlib-3.2.2-py3-none-any.whl (151 kB)\n",
|
259 |
+
"Requirement already satisfied: python-dateutil>=2.5.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.8.2)\n",
|
260 |
+
"Requirement already satisfied: urllib3>=1.24.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.1.0)\n",
|
261 |
+
"Requirement already satisfied: google-auth>=1.0.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.26.2)\n",
|
262 |
+
"Requirement already satisfied: six>=1.9.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (1.16.0)\n",
|
263 |
+
"Collecting requests-oauthlib\n",
|
264 |
+
" Using cached requests_oauthlib-1.3.1-py2.py3-none-any.whl (23 kB)\n",
|
265 |
+
"Collecting websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0\n",
|
266 |
+
" Downloading websocket_client-1.7.0-py3-none-any.whl (58 kB)\n",
|
267 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.5/58.5 KB\u001b[0m \u001b[31m3.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
268 |
+
"\u001b[?25hRequirement already satisfied: certifi>=14.05.14 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2023.11.17)\n",
|
269 |
+
"Collecting flatbuffers\n",
|
270 |
+
" Using cached flatbuffers-23.5.26-py2.py3-none-any.whl (26 kB)\n",
|
271 |
+
"Collecting coloredlogs\n",
|
272 |
+
" Using cached coloredlogs-15.0.1-py2.py3-none-any.whl (46 kB)\n",
|
273 |
+
"Collecting sympy\n",
|
274 |
+
" Using cached sympy-1.12-py3-none-any.whl (5.7 MB)\n",
|
275 |
+
"Requirement already satisfied: protobuf in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from onnxruntime>=1.14.1->chromadb) (4.25.2)\n",
|
276 |
+
"Collecting deprecated>=1.2.6\n",
|
277 |
+
" Using cached Deprecated-1.2.14-py2.py3-none-any.whl (9.6 kB)\n",
|
278 |
+
"Collecting importlib-metadata<7.0,>=6.0\n",
|
279 |
+
" Downloading importlib_metadata-6.11.0-py3-none-any.whl (23 kB)\n",
|
280 |
+
"Collecting backoff<3.0.0,>=1.10.0\n",
|
281 |
+
" Using cached backoff-2.2.1-py3-none-any.whl (15 kB)\n",
|
282 |
+
"Collecting opentelemetry-proto==1.22.0\n",
|
283 |
+
" Downloading opentelemetry_proto-1.22.0-py3-none-any.whl (50 kB)\n",
|
284 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.8/50.8 KB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
285 |
+
"\u001b[?25hCollecting opentelemetry-exporter-otlp-proto-common==1.22.0\n",
|
286 |
+
" Downloading opentelemetry_exporter_otlp_proto_common-1.22.0-py3-none-any.whl (17 kB)\n",
|
287 |
+
"Requirement already satisfied: googleapis-common-protos~=1.52 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from opentelemetry-exporter-otlp-proto-grpc>=1.2.0->chromadb) (1.62.0)\n",
|
288 |
+
"Collecting opentelemetry-instrumentation-asgi==0.43b0\n",
|
289 |
+
" Downloading opentelemetry_instrumentation_asgi-0.43b0-py3-none-any.whl (14 kB)\n",
|
290 |
+
"Collecting opentelemetry-semantic-conventions==0.43b0\n",
|
291 |
+
" Downloading opentelemetry_semantic_conventions-0.43b0-py3-none-any.whl (36 kB)\n",
|
292 |
+
"Collecting opentelemetry-util-http==0.43b0\n",
|
293 |
+
" Downloading opentelemetry_util_http-0.43b0-py3-none-any.whl (6.9 kB)\n",
|
294 |
+
"Collecting opentelemetry-instrumentation==0.43b0\n",
|
295 |
+
" Downloading opentelemetry_instrumentation-0.43b0-py3-none-any.whl (28 kB)\n",
|
296 |
+
"Collecting wrapt<2.0.0,>=1.0.0\n",
|
297 |
+
" Using cached wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (80 kB)\n",
|
298 |
+
"Requirement already satisfied: setuptools>=16.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from opentelemetry-instrumentation==0.43b0->opentelemetry-instrumentation-fastapi>=0.41b0->chromadb) (59.6.0)\n",
|
299 |
+
"Collecting asgiref~=3.0\n",
|
300 |
+
" Downloading asgiref-3.7.2-py3-none-any.whl (24 kB)\n",
|
301 |
+
"Collecting monotonic>=1.5\n",
|
302 |
+
" Downloading monotonic-1.6-py2.py3-none-any.whl (8.2 kB)\n",
|
303 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=1.9->chromadb) (0.6.0)\n",
|
304 |
+
"Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=1.9->chromadb) (2.14.6)\n",
|
305 |
+
"Requirement already satisfied: idna<4,>=2.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests>=2.28->chromadb) (3.6)\n",
|
306 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests>=2.28->chromadb) (3.3.2)\n",
|
307 |
+
"Collecting huggingface_hub<1.0,>=0.16.4\n",
|
308 |
+
" Using cached huggingface_hub-0.20.2-py3-none-any.whl (330 kB)\n",
|
309 |
+
"Collecting click<9.0.0,>=7.1.1\n",
|
310 |
+
" Using cached click-8.1.7-py3-none-any.whl (97 kB)\n",
|
311 |
+
"Collecting h11>=0.8\n",
|
312 |
+
" Using cached h11-0.14.0-py3-none-any.whl (58 kB)\n",
|
313 |
+
"Collecting websockets>=10.4\n",
|
314 |
+
" Downloading websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (130 kB)\n",
|
315 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m130.2/130.2 KB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
316 |
+
"\u001b[?25hCollecting python-dotenv>=0.13\n",
|
317 |
+
" Using cached python_dotenv-1.0.0-py3-none-any.whl (19 kB)\n",
|
318 |
+
"Collecting httptools>=0.5.0\n",
|
319 |
+
" Using cached httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (341 kB)\n",
|
320 |
+
"Collecting watchfiles>=0.13\n",
|
321 |
+
" Using cached watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
|
322 |
+
"Collecting uvloop!=0.15.0,!=0.15.1,>=0.14.0\n",
|
323 |
+
" Using cached uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.4 MB)\n",
|
324 |
+
"Requirement already satisfied: rsa<5,>=3.1.4 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (4.9)\n",
|
325 |
+
"Requirement already satisfied: cachetools<6.0,>=2.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (5.3.2)\n",
|
326 |
+
"Requirement already satisfied: pyasn1-modules>=0.2.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (0.3.0)\n",
|
327 |
+
"Collecting filelock\n",
|
328 |
+
" Using cached filelock-3.13.1-py3-none-any.whl (11 kB)\n",
|
329 |
+
"Collecting fsspec>=2023.5.0\n",
|
330 |
+
" Using cached fsspec-2023.12.2-py3-none-any.whl (168 kB)\n",
|
331 |
+
"Collecting zipp>=0.5\n",
|
332 |
+
" Downloading zipp-3.17.0-py3-none-any.whl (7.4 kB)\n",
|
333 |
+
"Requirement already satisfied: anyio<5,>=3.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (4.2.0)\n",
|
334 |
+
"Collecting humanfriendly>=9.1\n",
|
335 |
+
" Using cached humanfriendly-10.0-py2.py3-none-any.whl (86 kB)\n",
|
336 |
+
"Collecting mpmath>=0.19\n",
|
337 |
+
" Using cached mpmath-1.3.0-py3-none-any.whl (536 kB)\n",
|
338 |
+
"Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3.4.0->starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (1.2.0)\n",
|
339 |
+
"Requirement already satisfied: sniffio>=1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3.4.0->starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (1.3.0)\n",
|
340 |
+
"Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pyasn1-modules>=0.2.1->google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (0.5.1)\n",
|
341 |
+
"Installing collected packages: pypika, mpmath, monotonic, mmh3, flatbuffers, zipp, wrapt, websockets, websocket-client, uvloop, tomli, sympy, python-dotenv, pulsar-client, overrides, opentelemetry-util-http, opentelemetry-semantic-conventions, opentelemetry-proto, oauthlib, importlib-resources, humanfriendly, httptools, h11, fsspec, filelock, click, chroma-hnswlib, bcrypt, backoff, asgiref, watchfiles, uvicorn, typer, starlette, requests-oauthlib, pyproject_hooks, posthog, opentelemetry-exporter-otlp-proto-common, importlib-metadata, huggingface_hub, deprecated, coloredlogs, tokenizers, opentelemetry-api, onnxruntime, kubernetes, fastapi, build, opentelemetry-sdk, opentelemetry-instrumentation, opentelemetry-instrumentation-asgi, opentelemetry-exporter-otlp-proto-grpc, opentelemetry-instrumentation-fastapi, chromadb\n",
|
342 |
+
"Successfully installed asgiref-3.7.2 backoff-2.2.1 bcrypt-4.1.2 build-1.0.3 chroma-hnswlib-0.7.3 chromadb-0.4.22 click-8.1.7 coloredlogs-15.0.1 deprecated-1.2.14 fastapi-0.109.0 filelock-3.13.1 flatbuffers-23.5.26 fsspec-2023.12.2 h11-0.14.0 httptools-0.6.1 huggingface_hub-0.20.2 humanfriendly-10.0 importlib-metadata-6.11.0 importlib-resources-6.1.1 kubernetes-29.0.0 mmh3-4.1.0 monotonic-1.6 mpmath-1.3.0 oauthlib-3.2.2 onnxruntime-1.16.3 opentelemetry-api-1.22.0 opentelemetry-exporter-otlp-proto-common-1.22.0 opentelemetry-exporter-otlp-proto-grpc-1.22.0 opentelemetry-instrumentation-0.43b0 opentelemetry-instrumentation-asgi-0.43b0 opentelemetry-instrumentation-fastapi-0.43b0 opentelemetry-proto-1.22.0 opentelemetry-sdk-1.22.0 opentelemetry-semantic-conventions-0.43b0 opentelemetry-util-http-0.43b0 overrides-7.4.0 posthog-3.3.1 pulsar-client-3.4.0 pypika-0.48.9 pyproject_hooks-1.0.0 python-dotenv-1.0.0 requests-oauthlib-1.3.1 starlette-0.35.1 sympy-1.12 tokenizers-0.15.0 tomli-2.0.1 typer-0.9.0 uvicorn-0.25.0 uvloop-0.19.0 watchfiles-0.21.0 websocket-client-1.7.0 websockets-12.0 wrapt-1.16.0 zipp-3.17.0\n",
|
343 |
+
"Collecting gradio\n",
|
344 |
+
" Using cached gradio-4.14.0-py3-none-any.whl (16.6 MB)\n",
|
345 |
+
"Requirement already satisfied: packaging in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (23.2)\n",
|
346 |
+
"Requirement already satisfied: pyyaml<7.0,>=5.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (6.0.1)\n",
|
347 |
+
"Requirement already satisfied: pydantic>=2.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (2.5.3)\n",
|
348 |
+
"Requirement already satisfied: typing-extensions~=4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (4.9.0)\n",
|
349 |
+
"Collecting tomlkit==0.12.0\n",
|
350 |
+
" Using cached tomlkit-0.12.0-py3-none-any.whl (37 kB)\n",
|
351 |
+
"Collecting pydub\n",
|
352 |
+
" Using cached pydub-0.25.1-py2.py3-none-any.whl (32 kB)\n",
|
353 |
+
"Collecting httpx\n",
|
354 |
+
" Using cached httpx-0.26.0-py3-none-any.whl (75 kB)\n",
|
355 |
+
"Collecting altair<6.0,>=4.2.0\n",
|
356 |
+
" Using cached altair-5.2.0-py3-none-any.whl (996 kB)\n",
|
357 |
+
"Requirement already satisfied: typer[all]<1.0,>=0.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.9.0)\n",
|
358 |
+
"Requirement already satisfied: importlib-resources<7.0,>=1.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (6.1.1)\n",
|
359 |
+
"Requirement already satisfied: fastapi in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.109.0)\n",
|
360 |
+
"Collecting python-multipart\n",
|
361 |
+
" Using cached python_multipart-0.0.6-py3-none-any.whl (45 kB)\n",
|
362 |
+
"Requirement already satisfied: huggingface-hub>=0.19.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.20.2)\n",
|
363 |
+
"Collecting gradio-client==0.8.0\n",
|
364 |
+
" Using cached gradio_client-0.8.0-py3-none-any.whl (305 kB)\n",
|
365 |
+
"Collecting jinja2<4.0\n",
|
366 |
+
" Downloading Jinja2-3.1.3-py3-none-any.whl (133 kB)\n",
|
367 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m133.2/133.2 KB\u001b[0m \u001b[31m917.4 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
368 |
+
"\u001b[?25hCollecting ffmpy\n",
|
369 |
+
" Using cached ffmpy-0.3.1-py3-none-any.whl\n",
|
370 |
+
"Collecting orjson~=3.0\n",
|
371 |
+
" Using cached orjson-3.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (138 kB)\n",
|
372 |
+
"Requirement already satisfied: uvicorn>=0.14.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.25.0)\n",
|
373 |
+
"Requirement already satisfied: numpy~=1.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (1.26.3)\n",
|
374 |
+
"Collecting matplotlib~=3.0\n",
|
375 |
+
" Using cached matplotlib-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (11.6 MB)\n",
|
376 |
+
"Collecting semantic-version~=2.0\n",
|
377 |
+
" Using cached semantic_version-2.10.0-py2.py3-none-any.whl (15 kB)\n",
|
378 |
+
"Collecting pillow<11.0,>=8.0\n",
|
379 |
+
" Downloading pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl (4.5 MB)\n",
|
380 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.5/4.5 MB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
381 |
+
"\u001b[?25hCollecting aiofiles<24.0,>=22.0\n",
|
382 |
+
" Using cached aiofiles-23.2.1-py3-none-any.whl (15 kB)\n",
|
383 |
+
"Collecting pandas<3.0,>=1.0\n",
|
384 |
+
" Using cached pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (12.3 MB)\n",
|
385 |
+
"Collecting markupsafe~=2.0\n",
|
386 |
+
" Using cached MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (25 kB)\n",
|
387 |
+
"Requirement already satisfied: fsspec in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio-client==0.8.0->gradio) (2023.12.2)\n",
|
388 |
+
"Collecting websockets<12.0,>=10.0\n",
|
389 |
+
" Using cached websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (129 kB)\n",
|
390 |
+
"Collecting toolz\n",
|
391 |
+
" Using cached toolz-0.12.0-py3-none-any.whl (55 kB)\n",
|
392 |
+
"Collecting jsonschema>=3.0\n",
|
393 |
+
" Using cached jsonschema-4.20.0-py3-none-any.whl (84 kB)\n",
|
394 |
+
"Requirement already satisfied: tqdm>=4.42.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (4.66.1)\n",
|
395 |
+
"Requirement already satisfied: filelock in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (3.13.1)\n",
|
396 |
+
"Requirement already satisfied: requests in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (2.31.0)\n",
|
397 |
+
"Collecting kiwisolver>=1.3.1\n",
|
398 |
+
" Using cached kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.6 MB)\n",
|
399 |
+
"Collecting pyparsing>=2.3.1\n",
|
400 |
+
" Using cached pyparsing-3.1.1-py3-none-any.whl (103 kB)\n",
|
401 |
+
"Collecting contourpy>=1.0.1\n",
|
402 |
+
" Using cached contourpy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (310 kB)\n",
|
403 |
+
"Collecting cycler>=0.10\n",
|
404 |
+
" Using cached cycler-0.12.1-py3-none-any.whl (8.3 kB)\n",
|
405 |
+
"Requirement already satisfied: python-dateutil>=2.7 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from matplotlib~=3.0->gradio) (2.8.2)\n",
|
406 |
+
"Collecting fonttools>=4.22.0\n",
|
407 |
+
" Downloading fonttools-4.47.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.6 MB)\n",
|
408 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.6/4.6 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
409 |
+
"\u001b[?25hCollecting pytz>=2020.1\n",
|
410 |
+
" Using cached pytz-2023.3.post1-py2.py3-none-any.whl (502 kB)\n",
|
411 |
+
"Collecting tzdata>=2022.1\n",
|
412 |
+
" Using cached tzdata-2023.4-py2.py3-none-any.whl (346 kB)\n",
|
413 |
+
"Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=2.0->gradio) (2.14.6)\n",
|
414 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=2.0->gradio) (0.6.0)\n",
|
415 |
+
"Requirement already satisfied: click<9.0.0,>=7.1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from typer[all]<1.0,>=0.9->gradio) (8.1.7)\n",
|
416 |
+
"Collecting shellingham<2.0.0,>=1.3.0\n",
|
417 |
+
" Using cached shellingham-1.5.4-py2.py3-none-any.whl (9.8 kB)\n",
|
418 |
+
"Collecting colorama<0.5.0,>=0.4.3\n",
|
419 |
+
" Using cached colorama-0.4.6-py2.py3-none-any.whl (25 kB)\n",
|
420 |
+
"Collecting rich<14.0.0,>=10.11.0\n",
|
421 |
+
" Using cached rich-13.7.0-py3-none-any.whl (240 kB)\n",
|
422 |
+
"Requirement already satisfied: h11>=0.8 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from uvicorn>=0.14.0->gradio) (0.14.0)\n",
|
423 |
+
"Requirement already satisfied: starlette<0.36.0,>=0.35.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from fastapi->gradio) (0.35.1)\n",
|
424 |
+
"Requirement already satisfied: sniffio in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (1.3.0)\n",
|
425 |
+
"Requirement already satisfied: idna in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (3.6)\n",
|
426 |
+
"Requirement already satisfied: certifi in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (2023.11.17)\n",
|
427 |
+
"Collecting httpcore==1.*\n",
|
428 |
+
" Using cached httpcore-1.0.2-py3-none-any.whl (76 kB)\n",
|
429 |
+
"Requirement already satisfied: anyio in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (4.2.0)\n",
|
430 |
+
"Collecting referencing>=0.28.4\n",
|
431 |
+
" Downloading referencing-0.32.1-py3-none-any.whl (26 kB)\n",
|
432 |
+
"Collecting jsonschema-specifications>=2023.03.6\n",
|
433 |
+
" Using cached jsonschema_specifications-2023.12.1-py3-none-any.whl (18 kB)\n",
|
434 |
+
"Requirement already satisfied: attrs>=22.2.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from jsonschema>=3.0->altair<6.0,>=4.2.0->gradio) (23.2.0)\n",
|
435 |
+
"Collecting rpds-py>=0.7.1\n",
|
436 |
+
" Downloading rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)\n",
|
437 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0mm\n",
|
438 |
+
"\u001b[?25hRequirement already satisfied: six>=1.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from python-dateutil>=2.7->matplotlib~=3.0->gradio) (1.16.0)\n",
|
439 |
+
"Collecting markdown-it-py>=2.2.0\n",
|
440 |
+
" Using cached markdown_it_py-3.0.0-py3-none-any.whl (87 kB)\n",
|
441 |
+
"Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from rich<14.0.0,>=10.11.0->typer[all]<1.0,>=0.9->gradio) (2.17.2)\n",
|
442 |
+
"Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio->httpx->gradio) (1.2.0)\n",
|
443 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests->huggingface-hub>=0.19.3->gradio) (2.1.0)\n",
|
444 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests->huggingface-hub>=0.19.3->gradio) (3.3.2)\n",
|
445 |
+
"Collecting mdurl~=0.1\n",
|
446 |
+
" Using cached mdurl-0.1.2-py3-none-any.whl (10.0 kB)\n",
|
447 |
+
"Installing collected packages: pytz, pydub, ffmpy, websockets, tzdata, toolz, tomlkit, shellingham, semantic-version, rpds-py, python-multipart, pyparsing, pillow, orjson, mdurl, markupsafe, kiwisolver, httpcore, fonttools, cycler, contourpy, colorama, aiofiles, referencing, pandas, matplotlib, markdown-it-py, jinja2, httpx, rich, jsonschema-specifications, gradio-client, jsonschema, altair, gradio\n",
|
448 |
+
" Attempting uninstall: websockets\n",
|
449 |
+
" Found existing installation: websockets 12.0\n",
|
450 |
+
" Uninstalling websockets-12.0:\n",
|
451 |
+
" Successfully uninstalled websockets-12.0\n",
|
452 |
+
"Successfully installed aiofiles-23.2.1 altair-5.2.0 colorama-0.4.6 contourpy-1.2.0 cycler-0.12.1 ffmpy-0.3.1 fonttools-4.47.2 gradio-4.14.0 gradio-client-0.8.0 httpcore-1.0.2 httpx-0.26.0 jinja2-3.1.3 jsonschema-4.20.0 jsonschema-specifications-2023.12.1 kiwisolver-1.4.5 markdown-it-py-3.0.0 markupsafe-2.1.3 matplotlib-3.8.2 mdurl-0.1.2 orjson-3.9.10 pandas-2.1.4 pillow-10.2.0 pydub-0.25.1 pyparsing-3.1.1 python-multipart-0.0.6 pytz-2023.3.post1 referencing-0.32.1 rich-13.7.0 rpds-py-0.17.1 semantic-version-2.10.0 shellingham-1.5.4 tomlkit-0.12.0 toolz-0.12.0 tzdata-2023.4 websockets-11.0.3\n",
|
453 |
+
"Collecting faiss-cpu\n",
|
454 |
+
" Using cached faiss_cpu-1.7.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.6 MB)\n",
|
455 |
+
"Installing collected packages: faiss-cpu\n",
|
456 |
+
"Successfully installed faiss-cpu-1.7.4\n"
|
457 |
+
]
|
458 |
+
}
|
459 |
+
],
|
460 |
+
"source": [
|
461 |
+
"# !pip install langchain\n",
|
462 |
+
"# !pip install pypdf\n",
|
463 |
+
"# !pip install langchain_google_genai\n",
|
464 |
+
"# !pip install google-generativeai\n",
|
465 |
+
"# !pip install chromadb\n",
|
466 |
+
"# !pip install gradio\n",
|
467 |
+
"# !pip install faiss-cpu"
|
468 |
+
]
|
469 |
+
},
|
470 |
+
{
|
471 |
+
"cell_type": "code",
|
472 |
+
"execution_count": 3,
|
473 |
+
"metadata": {
|
474 |
+
"id": "ylbT549oymIl"
|
475 |
+
},
|
476 |
+
"outputs": [
|
477 |
+
{
|
478 |
+
"data": {
|
479 |
+
"text/plain": [
|
480 |
+
"True"
|
481 |
+
]
|
482 |
+
},
|
483 |
+
"execution_count": 3,
|
484 |
+
"metadata": {},
|
485 |
+
"output_type": "execute_result"
|
486 |
+
}
|
487 |
+
],
|
488 |
+
"source": [
|
489 |
+
"import os\n",
|
490 |
+
"from langchain.document_loaders import (\n",
|
491 |
+
" PyPDFLoader,\n",
|
492 |
+
" TextLoader,\n",
|
493 |
+
" Docx2txtLoader\n",
|
494 |
+
")\n",
|
495 |
+
"\n",
|
496 |
+
"from langchain.text_splitter import CharacterTextSplitter\n",
|
497 |
+
"# from PyPDF2 import PdfReader\n",
|
498 |
+
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
499 |
+
"from langchain_google_genai import GoogleGenerativeAIEmbeddings\n",
|
500 |
+
"import google.generativeai as genai\n",
|
501 |
+
"from langchain.vectorstores import FAISS\n",
|
502 |
+
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
503 |
+
"from langchain.chains.question_answering import load_qa_chain\n",
|
504 |
+
"from langchain.prompts import PromptTemplate\n",
|
505 |
+
"from langchain.memory import ConversationBufferMemory\n",
|
506 |
+
"from dotenv import load_dotenv\n",
|
507 |
+
"load_dotenv()"
|
508 |
+
]
|
509 |
+
},
|
510 |
+
{
|
511 |
+
"cell_type": "code",
|
512 |
+
"execution_count": 6,
|
513 |
+
"metadata": {
|
514 |
+
"id": "65o268jqzN7O"
|
515 |
+
},
|
516 |
+
"outputs": [],
|
517 |
+
"source": [
|
518 |
+
"def extract_text(docs):\n",
|
519 |
+
" documents = []\n",
|
520 |
+
" files = os.listdir(docs)\n",
|
521 |
+
" \n",
|
522 |
+
" if len(files) == 0:\n",
|
523 |
+
" return \"Directory is empty\"\n",
|
524 |
+
"\n",
|
525 |
+
" base_dir = docs.split(\"/\")\n",
|
526 |
+
" base_dir = \"/\".join(base_dir)\n",
|
527 |
+
"\n",
|
528 |
+
" for file in files:\n",
|
529 |
+
" if file.endswith(\".pdf\"):\n",
|
530 |
+
" pdf_path=os.path.join(base_dir, file)\n",
|
531 |
+
" loader=PyPDFLoader(pdf_path)\n",
|
532 |
+
" documents.extend(loader.load())\n",
|
533 |
+
" elif file.endswith('.docx') or file.endswith('.doc'):\n",
|
534 |
+
" doc_path=os.path.join(base_dir, file)\n",
|
535 |
+
" loader=Docx2txtLoader(doc_path)\n",
|
536 |
+
" documents.extend(loader.load())\n",
|
537 |
+
" elif file.endswith('.txt'):\n",
|
538 |
+
" text_path=os.path.join(base_dir, file)\n",
|
539 |
+
" loader=TextLoader(text_path)\n",
|
540 |
+
" documents.extend(loader.load())\n",
|
541 |
+
" return documents"
|
542 |
+
]
|
543 |
+
},
|
544 |
+
{
|
545 |
+
"cell_type": "code",
|
546 |
+
"execution_count": 7,
|
547 |
+
"metadata": {
|
548 |
+
"id": "0gT5m9cD_cM7"
|
549 |
+
},
|
550 |
+
"outputs": [],
|
551 |
+
"source": [
|
552 |
+
"doc_dir = \"../docs\""
|
553 |
+
]
|
554 |
+
},
|
555 |
+
{
|
556 |
+
"cell_type": "code",
|
557 |
+
"execution_count": 8,
|
558 |
+
"metadata": {
|
559 |
+
"id": "Svi3z1Rdzjbm"
|
560 |
+
},
|
561 |
+
"outputs": [
|
562 |
+
{
|
563 |
+
"name": "stdout",
|
564 |
+
"output_type": "stream",
|
565 |
+
"text": [
|
566 |
+
"['profile.txt', 'llm-research.pdf']\n",
|
567 |
+
"../docs\n"
|
568 |
+
]
|
569 |
+
}
|
570 |
+
],
|
571 |
+
"source": [
|
572 |
+
"documents = extract_text(doc_dir)"
|
573 |
+
]
|
574 |
+
},
|
575 |
+
{
|
576 |
+
"cell_type": "code",
|
577 |
+
"execution_count": 9,
|
578 |
+
"metadata": {
|
579 |
+
"id": "CRe5WNKC0D88"
|
580 |
+
},
|
581 |
+
"outputs": [],
|
582 |
+
"source": [
|
583 |
+
"def get_text_chunks(text):\n",
|
584 |
+
" text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)\n",
|
585 |
+
" chunks = text_splitter.split_documents(text)\n",
|
586 |
+
" return chunks\n",
|
587 |
+
"\n",
|
588 |
+
"def save_in_faiss(text_chunks, save=False):\n",
|
589 |
+
" embeddings = GoogleGenerativeAIEmbeddings(model = \"models/embedding-001\")\n",
|
590 |
+
" vector_store = FAISS.from_documents(text_chunks, embedding=embeddings)\n",
|
591 |
+
" if not save:\n",
|
592 |
+
" vector_store.save_local(\"faiss_index\")\n",
|
593 |
+
" return vector_store"
|
594 |
+
]
|
595 |
+
},
|
596 |
+
{
|
597 |
+
"cell_type": "code",
|
598 |
+
"execution_count": 10,
|
599 |
+
"metadata": {},
|
600 |
+
"outputs": [],
|
601 |
+
"source": [
|
602 |
+
"def process_files(docs):\n",
|
603 |
+
" documents = extract_text(docs)\n",
|
604 |
+
" text_chunks = get_text_chunks(documents)\n",
|
605 |
+
" vector_store = save_in_faiss(text_chunks)\n",
|
606 |
+
" return vector_store"
|
607 |
+
]
|
608 |
+
},
|
609 |
+
{
|
610 |
+
"cell_type": "code",
|
611 |
+
"execution_count": 17,
|
612 |
+
"metadata": {},
|
613 |
+
"outputs": [],
|
614 |
+
"source": [
|
615 |
+
"llm = model = ChatGoogleGenerativeAI(model=\"gemini-pro\",temperature=0.7)\n",
|
616 |
+
"\n",
|
617 |
+
"template = \"\"\"You are a chatbot having a conversation with a human.\n",
|
618 |
+
"\n",
|
619 |
+
"Given the following extracted parts of a long document and a question, create a final answer.\n",
|
620 |
+
"\n",
|
621 |
+
"{context}\n",
|
622 |
+
"\n",
|
623 |
+
"{chat_history}\n",
|
624 |
+
"Human: {human_input}\n",
|
625 |
+
"Chatbot:\"\"\"\n",
|
626 |
+
"\n",
|
627 |
+
"prompt = PromptTemplate(\n",
|
628 |
+
" input_variables=[\"chat_history\", \"human_input\", \"context\"], template=template\n",
|
629 |
+
")\n",
|
630 |
+
"memory = ConversationBufferMemory(memory_key=\"chat_history\", input_key=\"human_input\")\n",
|
631 |
+
"chain = load_qa_chain(\n",
|
632 |
+
" llm=llm, chain_type=\"stuff\", memory=memory, prompt=prompt\n",
|
633 |
+
")"
|
634 |
+
]
|
635 |
+
},
|
636 |
+
{
|
637 |
+
"cell_type": "code",
|
638 |
+
"execution_count": null,
|
639 |
+
"metadata": {},
|
640 |
+
"outputs": [],
|
641 |
+
"source": [
|
642 |
+
"def build_qa_chain(llm=llm, prompt=prompt, memory=memory):\n",
|
643 |
+
" chain = load_qa_chain(\n",
|
644 |
+
" llm=llm, chain_type=\"stuff\", memory=memory, prompt=prompt\n",
|
645 |
+
")\n",
|
646 |
+
" return chain\n",
|
647 |
+
"\n",
|
648 |
+
"def build_agent(query, db):\n",
|
649 |
+
" query = \"is vasim plumber\"\n",
|
650 |
+
" docs = db.similarity_search(query)\n",
|
651 |
+
" response = chain({\"input_documents\": docs, \"human_input\": query}, return_only_outputs=True)\n",
|
652 |
+
" return response['output_text']"
|
653 |
+
]
|
654 |
+
},
|
655 |
+
{
|
656 |
+
"cell_type": "code",
|
657 |
+
"execution_count": 12,
|
658 |
+
"metadata": {},
|
659 |
+
"outputs": [
|
660 |
+
{
|
661 |
+
"name": "stdout",
|
662 |
+
"output_type": "stream",
|
663 |
+
"text": [
|
664 |
+
"['profile.txt', 'llm-research.pdf']\n",
|
665 |
+
"../docs\n"
|
666 |
+
]
|
667 |
+
}
|
668 |
+
],
|
669 |
+
"source": [
|
670 |
+
"db = process_files(doc_dir)"
|
671 |
+
]
|
672 |
+
},
|
673 |
+
{
|
674 |
+
"cell_type": "code",
|
675 |
+
"execution_count": 16,
|
676 |
+
"metadata": {},
|
677 |
+
"outputs": [
|
678 |
+
{
|
679 |
+
"data": {
|
680 |
+
"text/plain": [
|
681 |
+
"{'output_text': 'I do not have any information about Vasim being a plumber.'}"
|
682 |
+
]
|
683 |
+
},
|
684 |
+
"execution_count": 16,
|
685 |
+
"metadata": {},
|
686 |
+
"output_type": "execute_result"
|
687 |
+
}
|
688 |
+
],
|
689 |
+
"source": [
|
690 |
+
"query = \"is vasim plumber\"\n",
|
691 |
+
"docs = db.similarity_search(query)\n",
|
692 |
+
"chain({\"input_documents\": docs, \"human_input\": query}, return_only_outputs=True)"
|
693 |
+
]
|
694 |
+
},
|
695 |
+
{
|
696 |
+
"cell_type": "code",
|
697 |
+
"execution_count": 18,
|
698 |
+
"metadata": {},
|
699 |
+
"outputs": [],
|
700 |
+
"source": [
|
701 |
+
"def answer_query(message, history):\n",
|
702 |
+
" docs = db.similarity_search(message)\n",
|
703 |
+
" response = chain({\"input_documents\": docs, \"human_input\": message}, return_only_outputs=True)\n",
|
704 |
+
" return response['output_text']"
|
705 |
+
]
|
706 |
+
},
|
707 |
+
{
|
708 |
+
"cell_type": "code",
|
709 |
+
"execution_count": 34,
|
710 |
+
"metadata": {
|
711 |
+
"colab": {
|
712 |
+
"background_save": true
|
713 |
+
},
|
714 |
+
"id": "a8tNUutJB9EA"
|
715 |
+
},
|
716 |
+
"outputs": [
|
717 |
+
{
|
718 |
+
"name": "stdout",
|
719 |
+
"output_type": "stream",
|
720 |
+
"text": [
|
721 |
+
"Chat interface is cool.\n",
|
722 |
+
"Running on local URL: http://127.0.0.1:7875\n",
|
723 |
+
"\n",
|
724 |
+
"To create a public link, set `share=True` in `launch()`.\n"
|
725 |
+
]
|
726 |
+
},
|
727 |
+
{
|
728 |
+
"data": {
|
729 |
+
"text/html": [
|
730 |
+
"<div><iframe src=\"http://127.0.0.1:7875/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
731 |
+
],
|
732 |
+
"text/plain": [
|
733 |
+
"<IPython.core.display.HTML object>"
|
734 |
+
]
|
735 |
+
},
|
736 |
+
"metadata": {},
|
737 |
+
"output_type": "display_data"
|
738 |
+
}
|
739 |
+
],
|
740 |
+
"source": [
|
741 |
+
"# Gradio App\n",
|
742 |
+
"import gradio as gr\n",
|
743 |
+
"\n",
|
744 |
+
"title = \"\"\n",
|
745 |
+
"description = f\"Chat with any docs\"\n",
|
746 |
+
"\n",
|
747 |
+
"# def answer_query(message, history):\n",
|
748 |
+
"# docs = db.similarity_search(message)\n",
|
749 |
+
"# message = agent(\n",
|
750 |
+
"# {\"input_documents\":docs, \"question\": message}\n",
|
751 |
+
"# ,return_only_outputs=True)\n",
|
752 |
+
"# return message['output_text']\n",
|
753 |
+
"\n",
|
754 |
+
"\n",
|
755 |
+
"chatbot = gr.Chatbot(label=\"ExploreText\")\n",
|
756 |
+
"\n",
|
757 |
+
"with gr.Blocks(\n",
|
758 |
+
" title=\"ExploreText\",\n",
|
759 |
+
" ) as textbot:\n",
|
760 |
+
"\n",
|
761 |
+
" gr.Markdown(\"# <center> Welcome to ExploreDoc Web App</center>\")\n",
|
762 |
+
" \n",
|
763 |
+
" with gr.Accordion(\"Upload a file here\", open=False):\n",
|
764 |
+
" file_output = gr.File(scale=1)\n",
|
765 |
+
" upload_button = gr.UploadButton(\"Click to Upload a File\", file_types=[\"txt\",\"doc\",\"pdf\"])\n",
|
766 |
+
" upload_button.upload(process_files, upload_button, file_output)\n",
|
767 |
+
" # gr.Info(\"Click on Chat with PDF tab\")\n",
|
768 |
+
"\n",
|
769 |
+
" # with gr.Row(\"Chat with Text\"):\n",
|
770 |
+
" gr.ChatInterface(fn=answer_query, chatbot=chatbot, submit_btn=\"Ask\", undo_btn=None, retry_btn=None, clear_btn=None)\n",
|
771 |
+
" print(\"Chat interface is cool.\")\n",
|
772 |
+
" gr.Markdown(\"<center> Developed by <a href='https://92-vasim.github.io' target='_blank'>Mohammed Vasim<a/> | AI Engineer & Computer Vision Engineer @ ZestIoT. </center>\")\n",
|
773 |
+
" \n",
|
774 |
+
"\n",
|
775 |
+
"if __name__ == \"__main__\":\n",
|
776 |
+
" textbot.queue().launch()\n",
|
777 |
+
"\n"
|
778 |
+
]
|
779 |
+
},
|
780 |
+
{
|
781 |
+
"cell_type": "code",
|
782 |
+
"execution_count": null,
|
783 |
+
"metadata": {},
|
784 |
+
"outputs": [],
|
785 |
+
"source": []
|
786 |
+
}
|
787 |
+
],
|
788 |
+
"metadata": {
|
789 |
+
"colab": {
|
790 |
+
"provenance": []
|
791 |
+
},
|
792 |
+
"kernelspec": {
|
793 |
+
"display_name": "Python 3",
|
794 |
+
"name": "python3"
|
795 |
+
},
|
796 |
+
"language_info": {
|
797 |
+
"codemirror_mode": {
|
798 |
+
"name": "ipython",
|
799 |
+
"version": 3
|
800 |
+
},
|
801 |
+
"file_extension": ".py",
|
802 |
+
"mimetype": "text/x-python",
|
803 |
+
"name": "python",
|
804 |
+
"nbconvert_exporter": "python",
|
805 |
+
"pygments_lexer": "ipython3",
|
806 |
+
"version": "3.10.12"
|
807 |
+
}
|
808 |
+
},
|
809 |
+
"nbformat": 4,
|
810 |
+
"nbformat_minor": 0
|
811 |
+
}
|
notebooks/Gemini_Chat_Docs-2.ipynb
ADDED
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"metadata": {
|
7 |
+
"id": "ylbT549oymIl"
|
8 |
+
},
|
9 |
+
"outputs": [
|
10 |
+
{
|
11 |
+
"name": "stderr",
|
12 |
+
"output_type": "stream",
|
13 |
+
"text": [
|
14 |
+
"/home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
15 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
16 |
+
]
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"data": {
|
20 |
+
"text/plain": [
|
21 |
+
"True"
|
22 |
+
]
|
23 |
+
},
|
24 |
+
"execution_count": 1,
|
25 |
+
"metadata": {},
|
26 |
+
"output_type": "execute_result"
|
27 |
+
}
|
28 |
+
],
|
29 |
+
"source": [
|
30 |
+
"import os\n",
|
31 |
+
"from langchain.document_loaders import (\n",
|
32 |
+
" PyPDFLoader,\n",
|
33 |
+
" TextLoader,\n",
|
34 |
+
" Docx2txtLoader\n",
|
35 |
+
")\n",
|
36 |
+
"\n",
|
37 |
+
"from langchain.text_splitter import CharacterTextSplitter\n",
|
38 |
+
"# from PyPDF2 import PdfReader\n",
|
39 |
+
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
40 |
+
"from langchain_google_genai import GoogleGenerativeAIEmbeddings\n",
|
41 |
+
"import google.generativeai as genai\n",
|
42 |
+
"from langchain.vectorstores import FAISS\n",
|
43 |
+
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
44 |
+
"from langchain.chains.question_answering import load_qa_chain\n",
|
45 |
+
"from langchain.prompts import PromptTemplate\n",
|
46 |
+
"from langchain.memory import ConversationBufferMemory\n",
|
47 |
+
"from dotenv import load_dotenv\n",
|
48 |
+
"load_dotenv()"
|
49 |
+
]
|
50 |
+
},
|
51 |
+
{
|
52 |
+
"cell_type": "code",
|
53 |
+
"execution_count": 2,
|
54 |
+
"metadata": {},
|
55 |
+
"outputs": [],
|
56 |
+
"source": [
|
57 |
+
"os.chdir(\"../\")"
|
58 |
+
]
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"cell_type": "code",
|
62 |
+
"execution_count": 3,
|
63 |
+
"metadata": {},
|
64 |
+
"outputs": [],
|
65 |
+
"source": [
|
66 |
+
"from src.utils import (\n",
|
67 |
+
" process_files, answer_query, extract_text_from_file\n",
|
68 |
+
")"
|
69 |
+
]
|
70 |
+
},
|
71 |
+
{
|
72 |
+
"cell_type": "code",
|
73 |
+
"execution_count": 4,
|
74 |
+
"metadata": {},
|
75 |
+
"outputs": [],
|
76 |
+
"source": [
|
77 |
+
"# extract_text_from_file(\"docs/llm-research.pdf\")"
|
78 |
+
]
|
79 |
+
},
|
80 |
+
{
|
81 |
+
"cell_type": "code",
|
82 |
+
"execution_count": 5,
|
83 |
+
"metadata": {},
|
84 |
+
"outputs": [],
|
85 |
+
"source": [
|
86 |
+
"from pathlib import Path \n",
|
87 |
+
"path = Path(\"docs/llm-research.pdf\")"
|
88 |
+
]
|
89 |
+
},
|
90 |
+
{
|
91 |
+
"cell_type": "code",
|
92 |
+
"execution_count": 6,
|
93 |
+
"metadata": {},
|
94 |
+
"outputs": [
|
95 |
+
{
|
96 |
+
"data": {
|
97 |
+
"text/plain": [
|
98 |
+
"'docs/llm-research.pdf'"
|
99 |
+
]
|
100 |
+
},
|
101 |
+
"execution_count": 6,
|
102 |
+
"metadata": {},
|
103 |
+
"output_type": "execute_result"
|
104 |
+
}
|
105 |
+
],
|
106 |
+
"source": [
|
107 |
+
"str(path)"
|
108 |
+
]
|
109 |
+
},
|
110 |
+
{
|
111 |
+
"cell_type": "code",
|
112 |
+
"execution_count": 7,
|
113 |
+
"metadata": {},
|
114 |
+
"outputs": [],
|
115 |
+
"source": [
|
116 |
+
"# process_files(\"docs/llm-research.pdf\")"
|
117 |
+
]
|
118 |
+
},
|
119 |
+
{
|
120 |
+
"cell_type": "code",
|
121 |
+
"execution_count": 8,
|
122 |
+
"metadata": {
|
123 |
+
"colab": {
|
124 |
+
"background_save": true
|
125 |
+
},
|
126 |
+
"id": "a8tNUutJB9EA"
|
127 |
+
},
|
128 |
+
"outputs": [
|
129 |
+
{
|
130 |
+
"name": "stdout",
|
131 |
+
"output_type": "stream",
|
132 |
+
"text": [
|
133 |
+
"Running on local URL: http://127.0.0.1:7862\n",
|
134 |
+
"\n",
|
135 |
+
"To create a public link, set `share=True` in `launch()`.\n"
|
136 |
+
]
|
137 |
+
},
|
138 |
+
{
|
139 |
+
"data": {
|
140 |
+
"text/html": [
|
141 |
+
"<div><iframe src=\"http://127.0.0.1:7862/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
142 |
+
],
|
143 |
+
"text/plain": [
|
144 |
+
"<IPython.core.display.HTML object>"
|
145 |
+
]
|
146 |
+
},
|
147 |
+
"metadata": {},
|
148 |
+
"output_type": "display_data"
|
149 |
+
},
|
150 |
+
{
|
151 |
+
"name": "stdout",
|
152 |
+
"output_type": "stream",
|
153 |
+
"text": [
|
154 |
+
"Text extracted\n",
|
155 |
+
"Chunks splitted\n",
|
156 |
+
"Document search created\n"
|
157 |
+
]
|
158 |
+
},
|
159 |
+
{
|
160 |
+
"name": "stderr",
|
161 |
+
"output_type": "stream",
|
162 |
+
"text": [
|
163 |
+
"/home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages/langchain_core/_api/deprecation.py:117: LangChainDeprecationWarning: The function `__call__` was deprecated in LangChain 0.1.0 and will be removed in 0.2.0. Use invoke instead.\n",
|
164 |
+
" warn_deprecated(\n"
|
165 |
+
]
|
166 |
+
}
|
167 |
+
],
|
168 |
+
"source": [
|
169 |
+
"# Gradio App\n",
|
170 |
+
"import gradio as gr\n",
|
171 |
+
"\n",
|
172 |
+
"gr.close_all()\n",
|
173 |
+
"\n",
|
174 |
+
"title = \"\"\n",
|
175 |
+
"description = f\"Chat with any docs\"\n",
|
176 |
+
"\n",
|
177 |
+
"# def answer_query(message, history):\n",
|
178 |
+
"# docs = db.similarity_search(message)\n",
|
179 |
+
"# message = agent(\n",
|
180 |
+
"# {\"input_documents\":docs, \"question\": message}\n",
|
181 |
+
"# ,return_only_outputs=True)\n",
|
182 |
+
"# return message['output_text']\n",
|
183 |
+
"\n",
|
184 |
+
"\n",
|
185 |
+
"chatbot = gr.Chatbot(label=\"ExploreText\")\n",
|
186 |
+
"\n",
|
187 |
+
"with gr.Blocks(\n",
|
188 |
+
" title=\"ExploreText\",\n",
|
189 |
+
" ) as textbot:\n",
|
190 |
+
"\n",
|
191 |
+
" gr.Markdown(\"# <center> Welcome to ExploreDoc Web App</center>\")\n",
|
192 |
+
" \n",
|
193 |
+
" with gr.Accordion(\"Upload a file here\", open=False):\n",
|
194 |
+
" file_output = gr.File()\n",
|
195 |
+
" upload_button = gr.UploadButton(\"Click to Upload a File\", file_types=[\"txt\",\"doc\",\"pdf\"])\n",
|
196 |
+
" upload_button.upload(process_files, upload_button, file_output)\n",
|
197 |
+
"\n",
|
198 |
+
" # with gr.Row(\"Chat with Text\"):\n",
|
199 |
+
" gr.ChatInterface(fn=answer_query, chatbot=chatbot, submit_btn=\"Ask\", undo_btn=None, retry_btn=None, clear_btn=None)\n",
|
200 |
+
" gr.Markdown(\"<center> Developed by <a href='https://92-vasim.github.io' target='_blank'>Mohammed Vasim<a/> | AI Engineer & Computer Vision Engineer @ ZestIoT. </center>\")\n",
|
201 |
+
" \n",
|
202 |
+
"\n",
|
203 |
+
"if __name__ == \"__main__\":\n",
|
204 |
+
" textbot.queue().launch()\n",
|
205 |
+
"\n"
|
206 |
+
]
|
207 |
+
},
|
208 |
+
{
|
209 |
+
"cell_type": "markdown",
|
210 |
+
"metadata": {},
|
211 |
+
"source": []
|
212 |
+
},
|
213 |
+
{
|
214 |
+
"cell_type": "code",
|
215 |
+
"execution_count": null,
|
216 |
+
"metadata": {},
|
217 |
+
"outputs": [],
|
218 |
+
"source": []
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"cell_type": "code",
|
222 |
+
"execution_count": null,
|
223 |
+
"metadata": {},
|
224 |
+
"outputs": [],
|
225 |
+
"source": []
|
226 |
+
}
|
227 |
+
],
|
228 |
+
"metadata": {
|
229 |
+
"colab": {
|
230 |
+
"provenance": []
|
231 |
+
},
|
232 |
+
"kernelspec": {
|
233 |
+
"display_name": "Python 3",
|
234 |
+
"name": "python3"
|
235 |
+
},
|
236 |
+
"language_info": {
|
237 |
+
"codemirror_mode": {
|
238 |
+
"name": "ipython",
|
239 |
+
"version": 3
|
240 |
+
},
|
241 |
+
"file_extension": ".py",
|
242 |
+
"mimetype": "text/x-python",
|
243 |
+
"name": "python",
|
244 |
+
"nbconvert_exporter": "python",
|
245 |
+
"pygments_lexer": "ipython3",
|
246 |
+
"version": "3.10.12"
|
247 |
+
}
|
248 |
+
},
|
249 |
+
"nbformat": 4,
|
250 |
+
"nbformat_minor": 0
|
251 |
+
}
|
notebooks/Gemini_Chat_Docs.ipynb
ADDED
@@ -0,0 +1,932 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"metadata": {
|
7 |
+
"id": "2N8psBL6-wfJ"
|
8 |
+
},
|
9 |
+
"outputs": [],
|
10 |
+
"source": [
|
11 |
+
"# !cp -rf /content/drive/MyDrive/ML\\ Projects/Gemini-Pro/docs /content/"
|
12 |
+
]
|
13 |
+
},
|
14 |
+
{
|
15 |
+
"cell_type": "code",
|
16 |
+
"execution_count": 1,
|
17 |
+
"metadata": {
|
18 |
+
"colab": {
|
19 |
+
"base_uri": "https://localhost:8080/"
|
20 |
+
},
|
21 |
+
"id": "9AkjB4x3ybTb",
|
22 |
+
"outputId": "7b4fa13a-cb14-4e59-f5b7-c426a3fbea35"
|
23 |
+
},
|
24 |
+
"outputs": [
|
25 |
+
{
|
26 |
+
"name": "stdout",
|
27 |
+
"output_type": "stream",
|
28 |
+
"text": [
|
29 |
+
"Collecting langchain\n",
|
30 |
+
" Downloading langchain-0.1.0-py3-none-any.whl (797 kB)\n",
|
31 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m798.0/798.0 KB\u001b[0m \u001b[31m604.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
32 |
+
"\u001b[?25hCollecting tenacity<9.0.0,>=8.1.0\n",
|
33 |
+
" Using cached tenacity-8.2.3-py3-none-any.whl (24 kB)\n",
|
34 |
+
"Collecting dataclasses-json<0.7,>=0.5.7\n",
|
35 |
+
" Downloading dataclasses_json-0.6.3-py3-none-any.whl (28 kB)\n",
|
36 |
+
"Collecting SQLAlchemy<3,>=1.4\n",
|
37 |
+
" Downloading SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n",
|
38 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
39 |
+
"\u001b[?25hCollecting requests<3,>=2\n",
|
40 |
+
" Using cached requests-2.31.0-py3-none-any.whl (62 kB)\n",
|
41 |
+
"Collecting numpy<2,>=1\n",
|
42 |
+
" Downloading numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (18.2 MB)\n",
|
43 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m18.2/18.2 MB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
44 |
+
"\u001b[?25hCollecting langchain-core<0.2,>=0.1.7\n",
|
45 |
+
" Using cached langchain_core-0.1.10-py3-none-any.whl (216 kB)\n",
|
46 |
+
"Collecting langchain-community<0.1,>=0.0.9\n",
|
47 |
+
" Downloading langchain_community-0.0.12-py3-none-any.whl (1.6 MB)\n",
|
48 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m1.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0meta \u001b[36m0:00:01\u001b[0m\n",
|
49 |
+
"\u001b[?25hCollecting langsmith<0.1.0,>=0.0.77\n",
|
50 |
+
" Downloading langsmith-0.0.80-py3-none-any.whl (48 kB)\n",
|
51 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m48.3/48.3 KB\u001b[0m \u001b[31m594.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m \u001b[36m0:00:01\u001b[0m\n",
|
52 |
+
"\u001b[?25hCollecting PyYAML>=5.3\n",
|
53 |
+
" Using cached PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (705 kB)\n",
|
54 |
+
"Collecting aiohttp<4.0.0,>=3.8.3\n",
|
55 |
+
" Downloading aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)\n",
|
56 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m524.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:04\u001b[0m\n",
|
57 |
+
"\u001b[?25hCollecting async-timeout<5.0.0,>=4.0.0\n",
|
58 |
+
" Using cached async_timeout-4.0.3-py3-none-any.whl (5.7 kB)\n",
|
59 |
+
"Collecting pydantic<3,>=1\n",
|
60 |
+
" Using cached pydantic-2.5.3-py3-none-any.whl (381 kB)\n",
|
61 |
+
"Collecting jsonpatch<2.0,>=1.33\n",
|
62 |
+
" Using cached jsonpatch-1.33-py2.py3-none-any.whl (12 kB)\n",
|
63 |
+
"Collecting aiosignal>=1.1.2\n",
|
64 |
+
" Using cached aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n",
|
65 |
+
"Collecting multidict<7.0,>=4.5\n",
|
66 |
+
" Using cached multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (114 kB)\n",
|
67 |
+
"Collecting yarl<2.0,>=1.0\n",
|
68 |
+
" Downloading yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (301 kB)\n",
|
69 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m301.6/301.6 KB\u001b[0m \u001b[31m867.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
70 |
+
"\u001b[?25hCollecting frozenlist>=1.1.1\n",
|
71 |
+
" Downloading frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (239 kB)\n",
|
72 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m239.5/239.5 KB\u001b[0m \u001b[31m1.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
73 |
+
"\u001b[?25hCollecting attrs>=17.3.0\n",
|
74 |
+
" Downloading attrs-23.2.0-py3-none-any.whl (60 kB)\n",
|
75 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m60.8/60.8 KB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
76 |
+
"\u001b[?25hCollecting marshmallow<4.0.0,>=3.18.0\n",
|
77 |
+
" Downloading marshmallow-3.20.2-py3-none-any.whl (49 kB)\n",
|
78 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.4/49.4 KB\u001b[0m \u001b[31m17.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
79 |
+
"\u001b[?25hCollecting typing-inspect<1,>=0.4.0\n",
|
80 |
+
" Using cached typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n",
|
81 |
+
"Collecting jsonpointer>=1.9\n",
|
82 |
+
" Using cached jsonpointer-2.4-py2.py3-none-any.whl (7.8 kB)\n",
|
83 |
+
"Requirement already satisfied: packaging<24.0,>=23.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1.7->langchain) (23.2)\n",
|
84 |
+
"Collecting anyio<5,>=3\n",
|
85 |
+
" Using cached anyio-4.2.0-py3-none-any.whl (85 kB)\n",
|
86 |
+
"Collecting annotated-types>=0.4.0\n",
|
87 |
+
" Using cached annotated_types-0.6.0-py3-none-any.whl (12 kB)\n",
|
88 |
+
"Collecting typing-extensions>=4.6.1\n",
|
89 |
+
" Using cached typing_extensions-4.9.0-py3-none-any.whl (32 kB)\n",
|
90 |
+
"\u001b[33mWARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ProtocolError('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))': /simple/pydantic-core/\u001b[0m\u001b[33m\n",
|
91 |
+
"\u001b[0mCollecting pydantic-core==2.14.6\n",
|
92 |
+
" Using cached pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.1 MB)\n",
|
93 |
+
"Collecting certifi>=2017.4.17\n",
|
94 |
+
" Using cached certifi-2023.11.17-py3-none-any.whl (162 kB)\n",
|
95 |
+
"Collecting urllib3<3,>=1.21.1\n",
|
96 |
+
" Using cached urllib3-2.1.0-py3-none-any.whl (104 kB)\n",
|
97 |
+
"Collecting idna<4,>=2.5\n",
|
98 |
+
" Using cached idna-3.6-py3-none-any.whl (61 kB)\n",
|
99 |
+
"Collecting charset-normalizer<4,>=2\n",
|
100 |
+
" Using cached charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (142 kB)\n",
|
101 |
+
"Collecting greenlet!=0.4.17\n",
|
102 |
+
" Downloading greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl (616 kB)\n",
|
103 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m616.0/616.0 KB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
104 |
+
"\u001b[?25hCollecting sniffio>=1.1\n",
|
105 |
+
" Using cached sniffio-1.3.0-py3-none-any.whl (10 kB)\n",
|
106 |
+
"Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1.7->langchain) (1.2.0)\n",
|
107 |
+
"Collecting mypy-extensions>=0.3.0\n",
|
108 |
+
" Using cached mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n",
|
109 |
+
"Installing collected packages: urllib3, typing-extensions, tenacity, sniffio, PyYAML, numpy, mypy-extensions, multidict, marshmallow, jsonpointer, idna, greenlet, frozenlist, charset-normalizer, certifi, attrs, async-timeout, annotated-types, yarl, typing-inspect, SQLAlchemy, requests, pydantic-core, jsonpatch, anyio, aiosignal, pydantic, dataclasses-json, aiohttp, langsmith, langchain-core, langchain-community, langchain\n",
|
110 |
+
"Successfully installed PyYAML-6.0.1 SQLAlchemy-2.0.25 aiohttp-3.9.1 aiosignal-1.3.1 annotated-types-0.6.0 anyio-4.2.0 async-timeout-4.0.3 attrs-23.2.0 certifi-2023.11.17 charset-normalizer-3.3.2 dataclasses-json-0.6.3 frozenlist-1.4.1 greenlet-3.0.3 idna-3.6 jsonpatch-1.33 jsonpointer-2.4 langchain-0.1.0 langchain-community-0.0.12 langchain-core-0.1.10 langsmith-0.0.80 marshmallow-3.20.2 multidict-6.0.4 mypy-extensions-1.0.0 numpy-1.26.3 pydantic-2.5.3 pydantic-core-2.14.6 requests-2.31.0 sniffio-1.3.0 tenacity-8.2.3 typing-extensions-4.9.0 typing-inspect-0.9.0 urllib3-2.1.0 yarl-1.9.4\n",
|
111 |
+
"Collecting pypdf\n",
|
112 |
+
" Downloading pypdf-3.17.4-py3-none-any.whl (278 kB)\n",
|
113 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m278.2/278.2 KB\u001b[0m \u001b[31m898.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
114 |
+
"\u001b[?25hInstalling collected packages: pypdf\n",
|
115 |
+
"Successfully installed pypdf-3.17.4\n",
|
116 |
+
"Collecting langchain_google_genai\n",
|
117 |
+
" Using cached langchain_google_genai-0.0.6-py3-none-any.whl (15 kB)\n",
|
118 |
+
"Collecting google-generativeai<0.4.0,>=0.3.1\n",
|
119 |
+
" Using cached google_generativeai-0.3.2-py3-none-any.whl (146 kB)\n",
|
120 |
+
"Requirement already satisfied: langchain-core<0.2,>=0.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain_google_genai) (0.1.10)\n",
|
121 |
+
"Collecting google-api-core\n",
|
122 |
+
" Using cached google_api_core-2.15.0-py3-none-any.whl (121 kB)\n",
|
123 |
+
"Collecting google-ai-generativelanguage==0.4.0\n",
|
124 |
+
" Using cached google_ai_generativelanguage-0.4.0-py3-none-any.whl (598 kB)\n",
|
125 |
+
"Collecting protobuf\n",
|
126 |
+
" Downloading protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl (294 kB)\n",
|
127 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m294.6/294.6 KB\u001b[0m \u001b[31m891.6 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
128 |
+
"\u001b[?25hCollecting google-auth\n",
|
129 |
+
" Downloading google_auth-2.26.2-py2.py3-none-any.whl (186 kB)\n",
|
130 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m186.5/186.5 KB\u001b[0m \u001b[31m1.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
131 |
+
"\u001b[?25hRequirement already satisfied: typing-extensions in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai<0.4.0,>=0.3.1->langchain_google_genai) (4.9.0)\n",
|
132 |
+
"Collecting tqdm\n",
|
133 |
+
" Using cached tqdm-4.66.1-py3-none-any.whl (78 kB)\n",
|
134 |
+
"Collecting proto-plus<2.0.0dev,>=1.22.3\n",
|
135 |
+
" Using cached proto_plus-1.23.0-py3-none-any.whl (48 kB)\n",
|
136 |
+
"Requirement already satisfied: requests<3,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (2.31.0)\n",
|
137 |
+
"Requirement already satisfied: langsmith<0.1.0,>=0.0.63 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (0.0.80)\n",
|
138 |
+
"Requirement already satisfied: pydantic<3,>=1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (2.5.3)\n",
|
139 |
+
"Requirement already satisfied: PyYAML>=5.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (6.0.1)\n",
|
140 |
+
"Requirement already satisfied: anyio<5,>=3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (4.2.0)\n",
|
141 |
+
"Requirement already satisfied: packaging<24.0,>=23.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (23.2)\n",
|
142 |
+
"Requirement already satisfied: tenacity<9.0.0,>=8.1.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (8.2.3)\n",
|
143 |
+
"Requirement already satisfied: jsonpatch<2.0,>=1.33 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (1.33)\n",
|
144 |
+
"Requirement already satisfied: sniffio>=1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (1.3.0)\n",
|
145 |
+
"Requirement already satisfied: idna>=2.8 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (3.6)\n",
|
146 |
+
"Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (1.2.0)\n",
|
147 |
+
"Requirement already satisfied: jsonpointer>=1.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.2,>=0.1->langchain_google_genai) (2.4)\n",
|
148 |
+
"Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic<3,>=1->langchain-core<0.2,>=0.1->langchain_google_genai) (2.14.6)\n",
|
149 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic<3,>=1->langchain-core<0.2,>=0.1->langchain_google_genai) (0.6.0)\n",
|
150 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (3.3.2)\n",
|
151 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (2.1.0)\n",
|
152 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (2023.11.17)\n",
|
153 |
+
"Collecting googleapis-common-protos<2.0.dev0,>=1.56.2\n",
|
154 |
+
" Downloading googleapis_common_protos-1.62.0-py2.py3-none-any.whl (228 kB)\n",
|
155 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m228.7/228.7 KB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
156 |
+
"\u001b[?25hCollecting cachetools<6.0,>=2.0.0\n",
|
157 |
+
" Using cached cachetools-5.3.2-py3-none-any.whl (9.3 kB)\n",
|
158 |
+
"Collecting pyasn1-modules>=0.2.1\n",
|
159 |
+
" Using cached pyasn1_modules-0.3.0-py2.py3-none-any.whl (181 kB)\n",
|
160 |
+
"Collecting rsa<5,>=3.1.4\n",
|
161 |
+
" Using cached rsa-4.9-py3-none-any.whl (34 kB)\n",
|
162 |
+
"Collecting grpcio<2.0dev,>=1.33.2\n",
|
163 |
+
" Using cached grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (5.4 MB)\n",
|
164 |
+
"Collecting grpcio-status<2.0.dev0,>=1.33.2\n",
|
165 |
+
" Using cached grpcio_status-1.60.0-py3-none-any.whl (14 kB)\n",
|
166 |
+
"Collecting pyasn1<0.6.0,>=0.4.6\n",
|
167 |
+
" Downloading pyasn1-0.5.1-py2.py3-none-any.whl (84 kB)\n",
|
168 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m84.9/84.9 KB\u001b[0m \u001b[31m974.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
169 |
+
"\u001b[?25hInstalling collected packages: tqdm, pyasn1, protobuf, grpcio, cachetools, rsa, pyasn1-modules, proto-plus, googleapis-common-protos, grpcio-status, google-auth, google-api-core, google-ai-generativelanguage, google-generativeai, langchain_google_genai\n",
|
170 |
+
"Successfully installed cachetools-5.3.2 google-ai-generativelanguage-0.4.0 google-api-core-2.15.0 google-auth-2.26.2 google-generativeai-0.3.2 googleapis-common-protos-1.62.0 grpcio-1.60.0 grpcio-status-1.60.0 langchain_google_genai-0.0.6 proto-plus-1.23.0 protobuf-4.25.2 pyasn1-0.5.1 pyasn1-modules-0.3.0 rsa-4.9 tqdm-4.66.1\n",
|
171 |
+
"Requirement already satisfied: google-generativeai in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (0.3.2)\n",
|
172 |
+
"Requirement already satisfied: google-auth in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (2.26.2)\n",
|
173 |
+
"Requirement already satisfied: google-api-core in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (2.15.0)\n",
|
174 |
+
"Requirement already satisfied: protobuf in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.25.2)\n",
|
175 |
+
"Requirement already satisfied: google-ai-generativelanguage==0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (0.4.0)\n",
|
176 |
+
"Requirement already satisfied: tqdm in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.66.1)\n",
|
177 |
+
"Requirement already satisfied: typing-extensions in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.9.0)\n",
|
178 |
+
"Requirement already satisfied: proto-plus<2.0.0dev,>=1.22.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-ai-generativelanguage==0.4.0->google-generativeai) (1.23.0)\n",
|
179 |
+
"Requirement already satisfied: requests<3.0.0.dev0,>=2.18.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (2.31.0)\n",
|
180 |
+
"Requirement already satisfied: googleapis-common-protos<2.0.dev0,>=1.56.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.62.0)\n",
|
181 |
+
"Requirement already satisfied: cachetools<6.0,>=2.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (5.3.2)\n",
|
182 |
+
"Requirement already satisfied: pyasn1-modules>=0.2.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (0.3.0)\n",
|
183 |
+
"Requirement already satisfied: rsa<5,>=3.1.4 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (4.9)\n",
|
184 |
+
"Requirement already satisfied: grpcio<2.0dev,>=1.33.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.60.0)\n",
|
185 |
+
"Requirement already satisfied: grpcio-status<2.0.dev0,>=1.33.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.60.0)\n",
|
186 |
+
"Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pyasn1-modules>=0.2.1->google-auth->google-generativeai) (0.5.1)\n",
|
187 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (2023.11.17)\n",
|
188 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (3.3.2)\n",
|
189 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (2.1.0)\n",
|
190 |
+
"Requirement already satisfied: idna<4,>=2.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (3.6)\n",
|
191 |
+
"Collecting chromadb\n",
|
192 |
+
" Downloading chromadb-0.4.22-py3-none-any.whl (509 kB)\n",
|
193 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m509.0/509.0 KB\u001b[0m \u001b[31m945.6 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
194 |
+
"\u001b[?25hCollecting overrides>=7.3.1\n",
|
195 |
+
" Using cached overrides-7.4.0-py3-none-any.whl (17 kB)\n",
|
196 |
+
"Collecting opentelemetry-api>=1.2.0\n",
|
197 |
+
" Downloading opentelemetry_api-1.22.0-py3-none-any.whl (57 kB)\n",
|
198 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.9/57.9 KB\u001b[0m \u001b[31m861.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:--:--\u001b[0m\n",
|
199 |
+
"\u001b[?25hRequirement already satisfied: typing-extensions>=4.5.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (4.9.0)\n",
|
200 |
+
"Collecting pulsar-client>=3.1.0\n",
|
201 |
+
" Downloading pulsar_client-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (5.4 MB)\n",
|
202 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.4/5.4 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
203 |
+
"\u001b[?25hCollecting mmh3>=4.0.1\n",
|
204 |
+
" Downloading mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (67 kB)\n",
|
205 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m67.6/67.6 KB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
206 |
+
"\u001b[?25hCollecting onnxruntime>=1.14.1\n",
|
207 |
+
" Downloading onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (6.4 MB)\n",
|
208 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.4/6.4 MB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
209 |
+
"\u001b[?25hCollecting typer>=0.9.0\n",
|
210 |
+
" Using cached typer-0.9.0-py3-none-any.whl (45 kB)\n",
|
211 |
+
"Requirement already satisfied: grpcio>=1.58.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (1.60.0)\n",
|
212 |
+
"Requirement already satisfied: numpy>=1.22.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (1.26.3)\n",
|
213 |
+
"Collecting opentelemetry-instrumentation-fastapi>=0.41b0\n",
|
214 |
+
" Downloading opentelemetry_instrumentation_fastapi-0.43b0-py3-none-any.whl (11 kB)\n",
|
215 |
+
"Requirement already satisfied: requests>=2.28 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (2.31.0)\n",
|
216 |
+
"Collecting pypika>=0.48.9\n",
|
217 |
+
" Using cached PyPika-0.48.9-py2.py3-none-any.whl\n",
|
218 |
+
"Requirement already satisfied: pydantic>=1.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (2.5.3)\n",
|
219 |
+
"Collecting opentelemetry-exporter-otlp-proto-grpc>=1.2.0\n",
|
220 |
+
" Downloading opentelemetry_exporter_otlp_proto_grpc-1.22.0-py3-none-any.whl (18 kB)\n",
|
221 |
+
"Collecting opentelemetry-sdk>=1.2.0\n",
|
222 |
+
" Downloading opentelemetry_sdk-1.22.0-py3-none-any.whl (105 kB)\n",
|
223 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m105.6/105.6 KB\u001b[0m \u001b[31m6.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
224 |
+
"\u001b[?25hCollecting kubernetes>=28.1.0\n",
|
225 |
+
" Downloading kubernetes-29.0.0-py2.py3-none-any.whl (1.6 MB)\n",
|
226 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
227 |
+
"\u001b[?25hCollecting uvicorn[standard]>=0.18.3\n",
|
228 |
+
" Using cached uvicorn-0.25.0-py3-none-any.whl (60 kB)\n",
|
229 |
+
"Collecting posthog>=2.4.0\n",
|
230 |
+
" Downloading posthog-3.3.1-py2.py3-none-any.whl (40 kB)\n",
|
231 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m40.6/40.6 KB\u001b[0m \u001b[31m455.0 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m \u001b[36m0:00:01\u001b[0m\n",
|
232 |
+
"\u001b[?25hRequirement already satisfied: tenacity>=8.2.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (8.2.3)\n",
|
233 |
+
"Requirement already satisfied: PyYAML>=6.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (6.0.1)\n",
|
234 |
+
"Requirement already satisfied: tqdm>=4.65.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (4.66.1)\n",
|
235 |
+
"Collecting tokenizers>=0.13.2\n",
|
236 |
+
" Using cached tokenizers-0.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.8 MB)\n",
|
237 |
+
"Collecting bcrypt>=4.0.1\n",
|
238 |
+
" Downloading bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl (698 kB)\n",
|
239 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m698.9/698.9 KB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
240 |
+
"\u001b[?25hCollecting fastapi>=0.95.2\n",
|
241 |
+
" Downloading fastapi-0.109.0-py3-none-any.whl (92 kB)\n",
|
242 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m92.0/92.0 KB\u001b[0m \u001b[31m9.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
243 |
+
"\u001b[?25hCollecting build>=1.0.3\n",
|
244 |
+
" Downloading build-1.0.3-py3-none-any.whl (18 kB)\n",
|
245 |
+
"Collecting importlib-resources\n",
|
246 |
+
" Using cached importlib_resources-6.1.1-py3-none-any.whl (33 kB)\n",
|
247 |
+
"Collecting chroma-hnswlib==0.7.3\n",
|
248 |
+
" Using cached chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.4 MB)\n",
|
249 |
+
"Collecting pyproject_hooks\n",
|
250 |
+
" Downloading pyproject_hooks-1.0.0-py3-none-any.whl (9.3 kB)\n",
|
251 |
+
"Collecting tomli>=1.1.0\n",
|
252 |
+
" Using cached tomli-2.0.1-py3-none-any.whl (12 kB)\n",
|
253 |
+
"Requirement already satisfied: packaging>=19.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from build>=1.0.3->chromadb) (23.2)\n",
|
254 |
+
"Collecting starlette<0.36.0,>=0.35.0\n",
|
255 |
+
" Downloading starlette-0.35.1-py3-none-any.whl (71 kB)\n",
|
256 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m71.1/71.1 KB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
257 |
+
"\u001b[?25hCollecting oauthlib>=3.2.2\n",
|
258 |
+
" Using cached oauthlib-3.2.2-py3-none-any.whl (151 kB)\n",
|
259 |
+
"Requirement already satisfied: python-dateutil>=2.5.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.8.2)\n",
|
260 |
+
"Requirement already satisfied: urllib3>=1.24.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.1.0)\n",
|
261 |
+
"Requirement already satisfied: google-auth>=1.0.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.26.2)\n",
|
262 |
+
"Requirement already satisfied: six>=1.9.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (1.16.0)\n",
|
263 |
+
"Collecting requests-oauthlib\n",
|
264 |
+
" Using cached requests_oauthlib-1.3.1-py2.py3-none-any.whl (23 kB)\n",
|
265 |
+
"Collecting websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0\n",
|
266 |
+
" Downloading websocket_client-1.7.0-py3-none-any.whl (58 kB)\n",
|
267 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.5/58.5 KB\u001b[0m \u001b[31m3.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
268 |
+
"\u001b[?25hRequirement already satisfied: certifi>=14.05.14 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2023.11.17)\n",
|
269 |
+
"Collecting flatbuffers\n",
|
270 |
+
" Using cached flatbuffers-23.5.26-py2.py3-none-any.whl (26 kB)\n",
|
271 |
+
"Collecting coloredlogs\n",
|
272 |
+
" Using cached coloredlogs-15.0.1-py2.py3-none-any.whl (46 kB)\n",
|
273 |
+
"Collecting sympy\n",
|
274 |
+
" Using cached sympy-1.12-py3-none-any.whl (5.7 MB)\n",
|
275 |
+
"Requirement already satisfied: protobuf in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from onnxruntime>=1.14.1->chromadb) (4.25.2)\n",
|
276 |
+
"Collecting deprecated>=1.2.6\n",
|
277 |
+
" Using cached Deprecated-1.2.14-py2.py3-none-any.whl (9.6 kB)\n",
|
278 |
+
"Collecting importlib-metadata<7.0,>=6.0\n",
|
279 |
+
" Downloading importlib_metadata-6.11.0-py3-none-any.whl (23 kB)\n",
|
280 |
+
"Collecting backoff<3.0.0,>=1.10.0\n",
|
281 |
+
" Using cached backoff-2.2.1-py3-none-any.whl (15 kB)\n",
|
282 |
+
"Collecting opentelemetry-proto==1.22.0\n",
|
283 |
+
" Downloading opentelemetry_proto-1.22.0-py3-none-any.whl (50 kB)\n",
|
284 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.8/50.8 KB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
285 |
+
"\u001b[?25hCollecting opentelemetry-exporter-otlp-proto-common==1.22.0\n",
|
286 |
+
" Downloading opentelemetry_exporter_otlp_proto_common-1.22.0-py3-none-any.whl (17 kB)\n",
|
287 |
+
"Requirement already satisfied: googleapis-common-protos~=1.52 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from opentelemetry-exporter-otlp-proto-grpc>=1.2.0->chromadb) (1.62.0)\n",
|
288 |
+
"Collecting opentelemetry-instrumentation-asgi==0.43b0\n",
|
289 |
+
" Downloading opentelemetry_instrumentation_asgi-0.43b0-py3-none-any.whl (14 kB)\n",
|
290 |
+
"Collecting opentelemetry-semantic-conventions==0.43b0\n",
|
291 |
+
" Downloading opentelemetry_semantic_conventions-0.43b0-py3-none-any.whl (36 kB)\n",
|
292 |
+
"Collecting opentelemetry-util-http==0.43b0\n",
|
293 |
+
" Downloading opentelemetry_util_http-0.43b0-py3-none-any.whl (6.9 kB)\n",
|
294 |
+
"Collecting opentelemetry-instrumentation==0.43b0\n",
|
295 |
+
" Downloading opentelemetry_instrumentation-0.43b0-py3-none-any.whl (28 kB)\n",
|
296 |
+
"Collecting wrapt<2.0.0,>=1.0.0\n",
|
297 |
+
" Using cached wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (80 kB)\n",
|
298 |
+
"Requirement already satisfied: setuptools>=16.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from opentelemetry-instrumentation==0.43b0->opentelemetry-instrumentation-fastapi>=0.41b0->chromadb) (59.6.0)\n",
|
299 |
+
"Collecting asgiref~=3.0\n",
|
300 |
+
" Downloading asgiref-3.7.2-py3-none-any.whl (24 kB)\n",
|
301 |
+
"Collecting monotonic>=1.5\n",
|
302 |
+
" Downloading monotonic-1.6-py2.py3-none-any.whl (8.2 kB)\n",
|
303 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=1.9->chromadb) (0.6.0)\n",
|
304 |
+
"Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=1.9->chromadb) (2.14.6)\n",
|
305 |
+
"Requirement already satisfied: idna<4,>=2.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests>=2.28->chromadb) (3.6)\n",
|
306 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests>=2.28->chromadb) (3.3.2)\n",
|
307 |
+
"Collecting huggingface_hub<1.0,>=0.16.4\n",
|
308 |
+
" Using cached huggingface_hub-0.20.2-py3-none-any.whl (330 kB)\n",
|
309 |
+
"Collecting click<9.0.0,>=7.1.1\n",
|
310 |
+
" Using cached click-8.1.7-py3-none-any.whl (97 kB)\n",
|
311 |
+
"Collecting h11>=0.8\n",
|
312 |
+
" Using cached h11-0.14.0-py3-none-any.whl (58 kB)\n",
|
313 |
+
"Collecting websockets>=10.4\n",
|
314 |
+
" Downloading websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (130 kB)\n",
|
315 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m130.2/130.2 KB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
316 |
+
"\u001b[?25hCollecting python-dotenv>=0.13\n",
|
317 |
+
" Using cached python_dotenv-1.0.0-py3-none-any.whl (19 kB)\n",
|
318 |
+
"Collecting httptools>=0.5.0\n",
|
319 |
+
" Using cached httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (341 kB)\n",
|
320 |
+
"Collecting watchfiles>=0.13\n",
|
321 |
+
" Using cached watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
|
322 |
+
"Collecting uvloop!=0.15.0,!=0.15.1,>=0.14.0\n",
|
323 |
+
" Using cached uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.4 MB)\n",
|
324 |
+
"Requirement already satisfied: rsa<5,>=3.1.4 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (4.9)\n",
|
325 |
+
"Requirement already satisfied: cachetools<6.0,>=2.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (5.3.2)\n",
|
326 |
+
"Requirement already satisfied: pyasn1-modules>=0.2.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (0.3.0)\n",
|
327 |
+
"Collecting filelock\n",
|
328 |
+
" Using cached filelock-3.13.1-py3-none-any.whl (11 kB)\n",
|
329 |
+
"Collecting fsspec>=2023.5.0\n",
|
330 |
+
" Using cached fsspec-2023.12.2-py3-none-any.whl (168 kB)\n",
|
331 |
+
"Collecting zipp>=0.5\n",
|
332 |
+
" Downloading zipp-3.17.0-py3-none-any.whl (7.4 kB)\n",
|
333 |
+
"Requirement already satisfied: anyio<5,>=3.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (4.2.0)\n",
|
334 |
+
"Collecting humanfriendly>=9.1\n",
|
335 |
+
" Using cached humanfriendly-10.0-py2.py3-none-any.whl (86 kB)\n",
|
336 |
+
"Collecting mpmath>=0.19\n",
|
337 |
+
" Using cached mpmath-1.3.0-py3-none-any.whl (536 kB)\n",
|
338 |
+
"Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3.4.0->starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (1.2.0)\n",
|
339 |
+
"Requirement already satisfied: sniffio>=1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3.4.0->starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (1.3.0)\n",
|
340 |
+
"Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pyasn1-modules>=0.2.1->google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (0.5.1)\n",
|
341 |
+
"Installing collected packages: pypika, mpmath, monotonic, mmh3, flatbuffers, zipp, wrapt, websockets, websocket-client, uvloop, tomli, sympy, python-dotenv, pulsar-client, overrides, opentelemetry-util-http, opentelemetry-semantic-conventions, opentelemetry-proto, oauthlib, importlib-resources, humanfriendly, httptools, h11, fsspec, filelock, click, chroma-hnswlib, bcrypt, backoff, asgiref, watchfiles, uvicorn, typer, starlette, requests-oauthlib, pyproject_hooks, posthog, opentelemetry-exporter-otlp-proto-common, importlib-metadata, huggingface_hub, deprecated, coloredlogs, tokenizers, opentelemetry-api, onnxruntime, kubernetes, fastapi, build, opentelemetry-sdk, opentelemetry-instrumentation, opentelemetry-instrumentation-asgi, opentelemetry-exporter-otlp-proto-grpc, opentelemetry-instrumentation-fastapi, chromadb\n",
|
342 |
+
"Successfully installed asgiref-3.7.2 backoff-2.2.1 bcrypt-4.1.2 build-1.0.3 chroma-hnswlib-0.7.3 chromadb-0.4.22 click-8.1.7 coloredlogs-15.0.1 deprecated-1.2.14 fastapi-0.109.0 filelock-3.13.1 flatbuffers-23.5.26 fsspec-2023.12.2 h11-0.14.0 httptools-0.6.1 huggingface_hub-0.20.2 humanfriendly-10.0 importlib-metadata-6.11.0 importlib-resources-6.1.1 kubernetes-29.0.0 mmh3-4.1.0 monotonic-1.6 mpmath-1.3.0 oauthlib-3.2.2 onnxruntime-1.16.3 opentelemetry-api-1.22.0 opentelemetry-exporter-otlp-proto-common-1.22.0 opentelemetry-exporter-otlp-proto-grpc-1.22.0 opentelemetry-instrumentation-0.43b0 opentelemetry-instrumentation-asgi-0.43b0 opentelemetry-instrumentation-fastapi-0.43b0 opentelemetry-proto-1.22.0 opentelemetry-sdk-1.22.0 opentelemetry-semantic-conventions-0.43b0 opentelemetry-util-http-0.43b0 overrides-7.4.0 posthog-3.3.1 pulsar-client-3.4.0 pypika-0.48.9 pyproject_hooks-1.0.0 python-dotenv-1.0.0 requests-oauthlib-1.3.1 starlette-0.35.1 sympy-1.12 tokenizers-0.15.0 tomli-2.0.1 typer-0.9.0 uvicorn-0.25.0 uvloop-0.19.0 watchfiles-0.21.0 websocket-client-1.7.0 websockets-12.0 wrapt-1.16.0 zipp-3.17.0\n",
|
343 |
+
"Collecting gradio\n",
|
344 |
+
" Using cached gradio-4.14.0-py3-none-any.whl (16.6 MB)\n",
|
345 |
+
"Requirement already satisfied: packaging in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (23.2)\n",
|
346 |
+
"Requirement already satisfied: pyyaml<7.0,>=5.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (6.0.1)\n",
|
347 |
+
"Requirement already satisfied: pydantic>=2.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (2.5.3)\n",
|
348 |
+
"Requirement already satisfied: typing-extensions~=4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (4.9.0)\n",
|
349 |
+
"Collecting tomlkit==0.12.0\n",
|
350 |
+
" Using cached tomlkit-0.12.0-py3-none-any.whl (37 kB)\n",
|
351 |
+
"Collecting pydub\n",
|
352 |
+
" Using cached pydub-0.25.1-py2.py3-none-any.whl (32 kB)\n",
|
353 |
+
"Collecting httpx\n",
|
354 |
+
" Using cached httpx-0.26.0-py3-none-any.whl (75 kB)\n",
|
355 |
+
"Collecting altair<6.0,>=4.2.0\n",
|
356 |
+
" Using cached altair-5.2.0-py3-none-any.whl (996 kB)\n",
|
357 |
+
"Requirement already satisfied: typer[all]<1.0,>=0.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.9.0)\n",
|
358 |
+
"Requirement already satisfied: importlib-resources<7.0,>=1.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (6.1.1)\n",
|
359 |
+
"Requirement already satisfied: fastapi in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.109.0)\n",
|
360 |
+
"Collecting python-multipart\n",
|
361 |
+
" Using cached python_multipart-0.0.6-py3-none-any.whl (45 kB)\n",
|
362 |
+
"Requirement already satisfied: huggingface-hub>=0.19.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.20.2)\n",
|
363 |
+
"Collecting gradio-client==0.8.0\n",
|
364 |
+
" Using cached gradio_client-0.8.0-py3-none-any.whl (305 kB)\n",
|
365 |
+
"Collecting jinja2<4.0\n",
|
366 |
+
" Downloading Jinja2-3.1.3-py3-none-any.whl (133 kB)\n",
|
367 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m133.2/133.2 KB\u001b[0m \u001b[31m917.4 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
|
368 |
+
"\u001b[?25hCollecting ffmpy\n",
|
369 |
+
" Using cached ffmpy-0.3.1-py3-none-any.whl\n",
|
370 |
+
"Collecting orjson~=3.0\n",
|
371 |
+
" Using cached orjson-3.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (138 kB)\n",
|
372 |
+
"Requirement already satisfied: uvicorn>=0.14.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.25.0)\n",
|
373 |
+
"Requirement already satisfied: numpy~=1.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (1.26.3)\n",
|
374 |
+
"Collecting matplotlib~=3.0\n",
|
375 |
+
" Using cached matplotlib-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (11.6 MB)\n",
|
376 |
+
"Collecting semantic-version~=2.0\n",
|
377 |
+
" Using cached semantic_version-2.10.0-py2.py3-none-any.whl (15 kB)\n",
|
378 |
+
"Collecting pillow<11.0,>=8.0\n",
|
379 |
+
" Downloading pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl (4.5 MB)\n",
|
380 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.5/4.5 MB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
381 |
+
"\u001b[?25hCollecting aiofiles<24.0,>=22.0\n",
|
382 |
+
" Using cached aiofiles-23.2.1-py3-none-any.whl (15 kB)\n",
|
383 |
+
"Collecting pandas<3.0,>=1.0\n",
|
384 |
+
" Using cached pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (12.3 MB)\n",
|
385 |
+
"Collecting markupsafe~=2.0\n",
|
386 |
+
" Using cached MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (25 kB)\n",
|
387 |
+
"Requirement already satisfied: fsspec in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio-client==0.8.0->gradio) (2023.12.2)\n",
|
388 |
+
"Collecting websockets<12.0,>=10.0\n",
|
389 |
+
" Using cached websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (129 kB)\n",
|
390 |
+
"Collecting toolz\n",
|
391 |
+
" Using cached toolz-0.12.0-py3-none-any.whl (55 kB)\n",
|
392 |
+
"Collecting jsonschema>=3.0\n",
|
393 |
+
" Using cached jsonschema-4.20.0-py3-none-any.whl (84 kB)\n",
|
394 |
+
"Requirement already satisfied: tqdm>=4.42.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (4.66.1)\n",
|
395 |
+
"Requirement already satisfied: filelock in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (3.13.1)\n",
|
396 |
+
"Requirement already satisfied: requests in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (2.31.0)\n",
|
397 |
+
"Collecting kiwisolver>=1.3.1\n",
|
398 |
+
" Using cached kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.6 MB)\n",
|
399 |
+
"Collecting pyparsing>=2.3.1\n",
|
400 |
+
" Using cached pyparsing-3.1.1-py3-none-any.whl (103 kB)\n",
|
401 |
+
"Collecting contourpy>=1.0.1\n",
|
402 |
+
" Using cached contourpy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (310 kB)\n",
|
403 |
+
"Collecting cycler>=0.10\n",
|
404 |
+
" Using cached cycler-0.12.1-py3-none-any.whl (8.3 kB)\n",
|
405 |
+
"Requirement already satisfied: python-dateutil>=2.7 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from matplotlib~=3.0->gradio) (2.8.2)\n",
|
406 |
+
"Collecting fonttools>=4.22.0\n",
|
407 |
+
" Downloading fonttools-4.47.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.6 MB)\n",
|
408 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.6/4.6 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
|
409 |
+
"\u001b[?25hCollecting pytz>=2020.1\n",
|
410 |
+
" Using cached pytz-2023.3.post1-py2.py3-none-any.whl (502 kB)\n",
|
411 |
+
"Collecting tzdata>=2022.1\n",
|
412 |
+
" Using cached tzdata-2023.4-py2.py3-none-any.whl (346 kB)\n",
|
413 |
+
"Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=2.0->gradio) (2.14.6)\n",
|
414 |
+
"Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=2.0->gradio) (0.6.0)\n",
|
415 |
+
"Requirement already satisfied: click<9.0.0,>=7.1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from typer[all]<1.0,>=0.9->gradio) (8.1.7)\n",
|
416 |
+
"Collecting shellingham<2.0.0,>=1.3.0\n",
|
417 |
+
" Using cached shellingham-1.5.4-py2.py3-none-any.whl (9.8 kB)\n",
|
418 |
+
"Collecting colorama<0.5.0,>=0.4.3\n",
|
419 |
+
" Using cached colorama-0.4.6-py2.py3-none-any.whl (25 kB)\n",
|
420 |
+
"Collecting rich<14.0.0,>=10.11.0\n",
|
421 |
+
" Using cached rich-13.7.0-py3-none-any.whl (240 kB)\n",
|
422 |
+
"Requirement already satisfied: h11>=0.8 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from uvicorn>=0.14.0->gradio) (0.14.0)\n",
|
423 |
+
"Requirement already satisfied: starlette<0.36.0,>=0.35.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from fastapi->gradio) (0.35.1)\n",
|
424 |
+
"Requirement already satisfied: sniffio in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (1.3.0)\n",
|
425 |
+
"Requirement already satisfied: idna in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (3.6)\n",
|
426 |
+
"Requirement already satisfied: certifi in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (2023.11.17)\n",
|
427 |
+
"Collecting httpcore==1.*\n",
|
428 |
+
" Using cached httpcore-1.0.2-py3-none-any.whl (76 kB)\n",
|
429 |
+
"Requirement already satisfied: anyio in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (4.2.0)\n",
|
430 |
+
"Collecting referencing>=0.28.4\n",
|
431 |
+
" Downloading referencing-0.32.1-py3-none-any.whl (26 kB)\n",
|
432 |
+
"Collecting jsonschema-specifications>=2023.03.6\n",
|
433 |
+
" Using cached jsonschema_specifications-2023.12.1-py3-none-any.whl (18 kB)\n",
|
434 |
+
"Requirement already satisfied: attrs>=22.2.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from jsonschema>=3.0->altair<6.0,>=4.2.0->gradio) (23.2.0)\n",
|
435 |
+
"Collecting rpds-py>=0.7.1\n",
|
436 |
+
" Downloading rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)\n",
|
437 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0mm\n",
|
438 |
+
"\u001b[?25hRequirement already satisfied: six>=1.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from python-dateutil>=2.7->matplotlib~=3.0->gradio) (1.16.0)\n",
|
439 |
+
"Collecting markdown-it-py>=2.2.0\n",
|
440 |
+
" Using cached markdown_it_py-3.0.0-py3-none-any.whl (87 kB)\n",
|
441 |
+
"Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from rich<14.0.0,>=10.11.0->typer[all]<1.0,>=0.9->gradio) (2.17.2)\n",
|
442 |
+
"Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio->httpx->gradio) (1.2.0)\n",
|
443 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests->huggingface-hub>=0.19.3->gradio) (2.1.0)\n",
|
444 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests->huggingface-hub>=0.19.3->gradio) (3.3.2)\n",
|
445 |
+
"Collecting mdurl~=0.1\n",
|
446 |
+
" Using cached mdurl-0.1.2-py3-none-any.whl (10.0 kB)\n",
|
447 |
+
"Installing collected packages: pytz, pydub, ffmpy, websockets, tzdata, toolz, tomlkit, shellingham, semantic-version, rpds-py, python-multipart, pyparsing, pillow, orjson, mdurl, markupsafe, kiwisolver, httpcore, fonttools, cycler, contourpy, colorama, aiofiles, referencing, pandas, matplotlib, markdown-it-py, jinja2, httpx, rich, jsonschema-specifications, gradio-client, jsonschema, altair, gradio\n",
|
448 |
+
" Attempting uninstall: websockets\n",
|
449 |
+
" Found existing installation: websockets 12.0\n",
|
450 |
+
" Uninstalling websockets-12.0:\n",
|
451 |
+
" Successfully uninstalled websockets-12.0\n",
|
452 |
+
"Successfully installed aiofiles-23.2.1 altair-5.2.0 colorama-0.4.6 contourpy-1.2.0 cycler-0.12.1 ffmpy-0.3.1 fonttools-4.47.2 gradio-4.14.0 gradio-client-0.8.0 httpcore-1.0.2 httpx-0.26.0 jinja2-3.1.3 jsonschema-4.20.0 jsonschema-specifications-2023.12.1 kiwisolver-1.4.5 markdown-it-py-3.0.0 markupsafe-2.1.3 matplotlib-3.8.2 mdurl-0.1.2 orjson-3.9.10 pandas-2.1.4 pillow-10.2.0 pydub-0.25.1 pyparsing-3.1.1 python-multipart-0.0.6 pytz-2023.3.post1 referencing-0.32.1 rich-13.7.0 rpds-py-0.17.1 semantic-version-2.10.0 shellingham-1.5.4 tomlkit-0.12.0 toolz-0.12.0 tzdata-2023.4 websockets-11.0.3\n",
|
453 |
+
"Collecting faiss-cpu\n",
|
454 |
+
" Using cached faiss_cpu-1.7.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.6 MB)\n",
|
455 |
+
"Installing collected packages: faiss-cpu\n",
|
456 |
+
"Successfully installed faiss-cpu-1.7.4\n"
|
457 |
+
]
|
458 |
+
}
|
459 |
+
],
|
460 |
+
"source": [
|
461 |
+
"# !pip install langchain\n",
|
462 |
+
"# !pip install pypdf\n",
|
463 |
+
"# !pip install langchain_google_genai\n",
|
464 |
+
"# !pip install google-generativeai\n",
|
465 |
+
"# !pip install chromadb\n",
|
466 |
+
"# !pip install gradio\n",
|
467 |
+
"# !pip install faiss-cpu"
|
468 |
+
]
|
469 |
+
},
|
470 |
+
{
|
471 |
+
"cell_type": "code",
|
472 |
+
"execution_count": 2,
|
473 |
+
"metadata": {
|
474 |
+
"id": "ylbT549oymIl"
|
475 |
+
},
|
476 |
+
"outputs": [
|
477 |
+
{
|
478 |
+
"name": "stderr",
|
479 |
+
"output_type": "stream",
|
480 |
+
"text": [
|
481 |
+
"/home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
482 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
483 |
+
]
|
484 |
+
}
|
485 |
+
],
|
486 |
+
"source": [
|
487 |
+
"import os\n",
|
488 |
+
"from langchain.document_loaders import (\n",
|
489 |
+
" PyPDFLoader,\n",
|
490 |
+
" TextLoader,\n",
|
491 |
+
" Docx2txtLoader\n",
|
492 |
+
")\n",
|
493 |
+
"\n",
|
494 |
+
"from langchain.text_splitter import CharacterTextSplitter\n",
|
495 |
+
"# from PyPDF2 import PdfReader\n",
|
496 |
+
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
497 |
+
"from langchain_google_genai import GoogleGenerativeAIEmbeddings\n",
|
498 |
+
"import google.generativeai as genai\n",
|
499 |
+
"from langchain.vectorstores import FAISS\n",
|
500 |
+
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
501 |
+
"from langchain.chains.question_answering import load_qa_chain\n",
|
502 |
+
"from langchain.prompts import PromptTemplate\n",
|
503 |
+
"from dotenv import load_dotenv"
|
504 |
+
]
|
505 |
+
},
|
506 |
+
{
|
507 |
+
"cell_type": "code",
|
508 |
+
"execution_count": 3,
|
509 |
+
"metadata": {
|
510 |
+
"id": "PVfL-AcM8jnE"
|
511 |
+
},
|
512 |
+
"outputs": [
|
513 |
+
{
|
514 |
+
"data": {
|
515 |
+
"text/plain": [
|
516 |
+
"True"
|
517 |
+
]
|
518 |
+
},
|
519 |
+
"execution_count": 3,
|
520 |
+
"metadata": {},
|
521 |
+
"output_type": "execute_result"
|
522 |
+
}
|
523 |
+
],
|
524 |
+
"source": [
|
525 |
+
"\n",
|
526 |
+
"load_dotenv()\n",
|
527 |
+
"\n",
|
528 |
+
"# genai.configure(api_key=userdata.get(\"GOOGLE_API_KEY\"))"
|
529 |
+
]
|
530 |
+
},
|
531 |
+
{
|
532 |
+
"cell_type": "code",
|
533 |
+
"execution_count": 4,
|
534 |
+
"metadata": {
|
535 |
+
"id": "L4kAmlMpzA6X"
|
536 |
+
},
|
537 |
+
"outputs": [],
|
538 |
+
"source": [
|
539 |
+
"# document=[]\n",
|
540 |
+
"# for file in os.listdir(\"docs\"):\n",
|
541 |
+
"# if file.endswith(\".pdf\"):\n",
|
542 |
+
"# pdf_path=\"./docs/\"+file\n",
|
543 |
+
"# loader=PyPDFLoader(pdf_path)\n",
|
544 |
+
"# document.extend(loader.load())\n",
|
545 |
+
"# elif file.endswith('.docx') or file.endswith('.doc'):\n",
|
546 |
+
"# doc_path=\"./docs/\"+file\n",
|
547 |
+
"# loader=Docx2txtLoader(doc_path)\n",
|
548 |
+
"# document.extend(loader.load())\n",
|
549 |
+
"# elif file.endswith('.txt'):\n",
|
550 |
+
"# text_path=\"./docs/\"+file\n",
|
551 |
+
"# loader=TextLoader(text_path)\n",
|
552 |
+
"# document.extend(loader.load())"
|
553 |
+
]
|
554 |
+
},
|
555 |
+
{
|
556 |
+
"cell_type": "code",
|
557 |
+
"execution_count": 5,
|
558 |
+
"metadata": {
|
559 |
+
"id": "65o268jqzN7O"
|
560 |
+
},
|
561 |
+
"outputs": [],
|
562 |
+
"source": [
|
563 |
+
"def extract_text(docs):\n",
|
564 |
+
" documents = []\n",
|
565 |
+
" files = os.listdir(docs)\n",
|
566 |
+
" \n",
|
567 |
+
" if len(files) == 0:\n",
|
568 |
+
" return \"Directory is empty\"\n",
|
569 |
+
"\n",
|
570 |
+
" base_dir = docs.split(\"/\")\n",
|
571 |
+
" base_dir = \"/\".join(base_dir)\n",
|
572 |
+
" \n",
|
573 |
+
" print(files)\n",
|
574 |
+
" print(base_dir)\n",
|
575 |
+
"\n",
|
576 |
+
" for file in files:\n",
|
577 |
+
" if file.endswith(\".pdf\"):\n",
|
578 |
+
" pdf_path=os.path.join(base_dir, file)\n",
|
579 |
+
" loader=PyPDFLoader(pdf_path)\n",
|
580 |
+
" documents.extend(loader.load())\n",
|
581 |
+
" elif file.endswith('.docx') or file.endswith('.doc'):\n",
|
582 |
+
" doc_path=os.path.join(base_dir, file)\n",
|
583 |
+
" loader=Docx2txtLoader(doc_path)\n",
|
584 |
+
" documents.extend(loader.load())\n",
|
585 |
+
" elif file.endswith('.txt'):\n",
|
586 |
+
" text_path=os.path.join(base_dir, file)\n",
|
587 |
+
" loader=TextLoader(text_path)\n",
|
588 |
+
" documents.extend(loader.load())\n",
|
589 |
+
" return documents"
|
590 |
+
]
|
591 |
+
},
|
592 |
+
{
|
593 |
+
"cell_type": "code",
|
594 |
+
"execution_count": 6,
|
595 |
+
"metadata": {
|
596 |
+
"id": "0gT5m9cD_cM7"
|
597 |
+
},
|
598 |
+
"outputs": [],
|
599 |
+
"source": [
|
600 |
+
"doc_dir = \"../docs\""
|
601 |
+
]
|
602 |
+
},
|
603 |
+
{
|
604 |
+
"cell_type": "code",
|
605 |
+
"execution_count": 7,
|
606 |
+
"metadata": {
|
607 |
+
"id": "Svi3z1Rdzjbm"
|
608 |
+
},
|
609 |
+
"outputs": [
|
610 |
+
{
|
611 |
+
"name": "stdout",
|
612 |
+
"output_type": "stream",
|
613 |
+
"text": [
|
614 |
+
"['profile.txt', 'llm-research.pdf']\n",
|
615 |
+
"../docs\n"
|
616 |
+
]
|
617 |
+
}
|
618 |
+
],
|
619 |
+
"source": [
|
620 |
+
"documents = extract_text(doc_dir)"
|
621 |
+
]
|
622 |
+
},
|
623 |
+
{
|
624 |
+
"cell_type": "code",
|
625 |
+
"execution_count": 8,
|
626 |
+
"metadata": {
|
627 |
+
"colab": {
|
628 |
+
"base_uri": "https://localhost:8080/"
|
629 |
+
},
|
630 |
+
"id": "ryjhUUWN7TnP",
|
631 |
+
"outputId": "d862304d-16d7-4960-89e8-12e73842c897"
|
632 |
+
},
|
633 |
+
"outputs": [
|
634 |
+
{
|
635 |
+
"data": {
|
636 |
+
"text/plain": [
|
637 |
+
"Document(page_content='PREPRINT 1\\nA Comprehensive Overview of\\nLarge Language Models\\nHumza Naveed1, Asad Ullah Khan1,∗, Shi Qiu2,∗, Muhammad Saqib3,4,∗,\\nSaeed Anwar5,6, Muhammad Usman5,6, Naveed Akhtar7, Nick Barnes8, Ajmal Mian9\\n1University of Engineering and Technology (UET), Lahore, Pakistan\\n2The Chinese University of Hong Kong (CUHK), HKSAR, China\\n3University of Technology Sydney (UTS), Sydney, Australia\\n4Commonwealth Scientific and Industrial Research Organisation (CSIRO), Sydney, Australia\\n5King Fahd University of Petroleum and Minerals (KFUPM), Dhahran, Saudi Arabia\\n6SDAIA-KFUPM Joint Research Center for Artificial Intelligence (JRCAI), Dhahran, Saudi Arabia\\n7The University of Melbourne (UoM), Melbourne, Australia\\n8Australian National University (ANU), Canberra, Australia\\n9The University of Western Australia (UWA), Perth, Australia\\nAbstract —\\nLarge Language Models (LLMs) have recently demonstrated\\nremarkable capabilities in natural language processing tasks and\\nbeyond. This success of LLMs has led to a large influx of research\\ncontributions in this direction. These works encompass diverse\\ntopics such as architectural innovations, better training strategies,\\ncontext length improvements, fine-tuning, multi-modal LLMs,\\nrobotics, datasets, benchmarking, efficiency, and more. With the\\nrapid development of techniques and regular breakthroughs in\\nLLM research, it has become considerably challenging to perceive\\nthe bigger picture of the advances in this direction. Considering\\nthe rapidly emerging plethora of literature on LLMs, it is\\nimperative that the research community is able to benefit from a\\nconcise yet comprehensive overview of the recent developments\\nin this field. This article provides an overview of the existing\\nliterature on a broad range of LLM-related concepts. Our self-\\ncontained comprehensive overview of LLMs discusses relevant\\nbackground concepts along with covering the advanced topics\\nat the frontier of research in LLMs. This review article is\\nintended to not only provide a systematic survey but also a quick\\ncomprehensive reference for the researchers and practitioners\\nto draw insights from extensive informative summaries of the\\nexisting works to advance the LLM research.\\nIndex Terms —\\nLarge Language Models, LLMs, chatGPT, Augmented LLMs,\\nMultimodal LLMs, LLM training, LLM Benchmarking\\nI. I NTRODUCTION\\nLanguage plays a fundamental role in facilitating commu-\\nnication and self-expression for humans, and their interaction\\nwith machines. The need for generalized models stems from\\n* is for equal contribution\\nContact e-mail: humza_naveed@yahoo.com\\nEmail: humza_naveed@yahoo.com, aukhanee@gmail.com,\\nshiqiu@cse.cuhk.edu.hk, muhammad.saqib@data61.csiro.au,\\nsaeed.anwar@kfupm.edu.sa, muhammad.usman@kfupm.edu.sa,\\nnaveed.akhtar1@unimelb.edu.au, nick.barnes@anu.edu.au,\\najmal.mian@uwa.edu.au\\nRepo: https://github.com/humza909/LLM_Survey.git\\nFig. 1: The trends in the number of LLM models introduced\\nover the years.\\nthe growing demand for machines to handle complex lan-\\nguage tasks, including translation, summarization, information\\nretrieval, conversational interactions, etc. Recently, signifi-\\ncant breakthroughs have been witnessed in language models,\\nprimarily attributed to transformers [1], increased computa-\\ntional capabilities, and the availability of large-scale training\\ndata. These developments have brought about a revolutionary\\ntransformation by enabling the creation of LLMs that can\\napproximate human-level performance on various tasks [2],\\n[3]. Large Language Models (LLMs) have emerged as cutting-\\nedge artificial intelligence systems that can process and gen-\\nerate text with coherent communication [4], and generalize to\\nmultiple tasks [5], [6].\\nThe historical progress in natural language processing (NLP)\\nevolved from statistical to neural language modeling and then\\nfrom pre-trained language models (PLMs) to LLMs. While\\nconventional language modeling (LM) trains task-specificarXiv:2307.06435v7 [cs.CL] 27 Dec 2023', metadata={'source': '../docs/llm-research.pdf', 'page': 0})"
|
638 |
+
]
|
639 |
+
},
|
640 |
+
"execution_count": 8,
|
641 |
+
"metadata": {},
|
642 |
+
"output_type": "execute_result"
|
643 |
+
}
|
644 |
+
],
|
645 |
+
"source": [
|
646 |
+
"documents[1]"
|
647 |
+
]
|
648 |
+
},
|
649 |
+
{
|
650 |
+
"cell_type": "code",
|
651 |
+
"execution_count": 9,
|
652 |
+
"metadata": {},
|
653 |
+
"outputs": [
|
654 |
+
{
|
655 |
+
"data": {
|
656 |
+
"text/plain": [
|
657 |
+
"47"
|
658 |
+
]
|
659 |
+
},
|
660 |
+
"execution_count": 9,
|
661 |
+
"metadata": {},
|
662 |
+
"output_type": "execute_result"
|
663 |
+
}
|
664 |
+
],
|
665 |
+
"source": [
|
666 |
+
"len(documents)"
|
667 |
+
]
|
668 |
+
},
|
669 |
+
{
|
670 |
+
"cell_type": "code",
|
671 |
+
"execution_count": 10,
|
672 |
+
"metadata": {
|
673 |
+
"id": "hMr64jFWts5V"
|
674 |
+
},
|
675 |
+
"outputs": [],
|
676 |
+
"source": [
|
677 |
+
"# embeddings = GoogleGenerativeAIEmbeddings(model = \"models/embedding-001\")\n",
|
678 |
+
"# embeddings.embed_query(\"I am good\")"
|
679 |
+
]
|
680 |
+
},
|
681 |
+
{
|
682 |
+
"cell_type": "code",
|
683 |
+
"execution_count": 11,
|
684 |
+
"metadata": {
|
685 |
+
"id": "CRe5WNKC0D88"
|
686 |
+
},
|
687 |
+
"outputs": [],
|
688 |
+
"source": [
|
689 |
+
"def get_text_chunks(text):\n",
|
690 |
+
" text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)\n",
|
691 |
+
" chunks = text_splitter.split_documents(text)\n",
|
692 |
+
" return chunks\n",
|
693 |
+
"\n",
|
694 |
+
"def get_vector_store(text_chunks, save=False):\n",
|
695 |
+
" embeddings = GoogleGenerativeAIEmbeddings(model = \"models/embedding-001\")\n",
|
696 |
+
" vector_store = FAISS.from_documents(text_chunks, embedding=embeddings)\n",
|
697 |
+
" if not save:\n",
|
698 |
+
" vector_store.save_local(\"faiss_index\")\n",
|
699 |
+
" return vector_store"
|
700 |
+
]
|
701 |
+
},
|
702 |
+
{
|
703 |
+
"cell_type": "code",
|
704 |
+
"execution_count": 12,
|
705 |
+
"metadata": {
|
706 |
+
"id": "2bfL_ejE63zE"
|
707 |
+
},
|
708 |
+
"outputs": [],
|
709 |
+
"source": [
|
710 |
+
"documents = get_text_chunks(documents)"
|
711 |
+
]
|
712 |
+
},
|
713 |
+
{
|
714 |
+
"cell_type": "code",
|
715 |
+
"execution_count": 13,
|
716 |
+
"metadata": {
|
717 |
+
"colab": {
|
718 |
+
"background_save": true
|
719 |
+
},
|
720 |
+
"id": "9GgV19-K8T1h"
|
721 |
+
},
|
722 |
+
"outputs": [],
|
723 |
+
"source": [
|
724 |
+
"db = get_vector_store(documents)"
|
725 |
+
]
|
726 |
+
},
|
727 |
+
{
|
728 |
+
"cell_type": "code",
|
729 |
+
"execution_count": 14,
|
730 |
+
"metadata": {
|
731 |
+
"id": "oVFkWCOw2UQt"
|
732 |
+
},
|
733 |
+
"outputs": [],
|
734 |
+
"source": [
|
735 |
+
"def get_conversational_chain():\n",
|
736 |
+
"\n",
|
737 |
+
" prompt_template = \"\"\"\n",
|
738 |
+
" Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in\n",
|
739 |
+
" provided context just say, \"answer is not available in the context\", don't provide the wrong answer\\n\\n\n",
|
740 |
+
" Context:\\n {context}?\\n\n",
|
741 |
+
" Question: \\n{question}\\n\n",
|
742 |
+
"\n",
|
743 |
+
" Answer:\n",
|
744 |
+
" \"\"\"\n",
|
745 |
+
"\n",
|
746 |
+
" model = ChatGoogleGenerativeAI(model=\"gemini-pro\",temperature=0.3)\n",
|
747 |
+
"\n",
|
748 |
+
" prompt = PromptTemplate(template = prompt_template, input_variables = [\"context\", \"question\"])\n",
|
749 |
+
" chain = load_qa_chain(llm=model, chain_type=\"stuff\", prompt=prompt)\n",
|
750 |
+
"\n",
|
751 |
+
" return chain"
|
752 |
+
]
|
753 |
+
},
|
754 |
+
{
|
755 |
+
"cell_type": "code",
|
756 |
+
"execution_count": 15,
|
757 |
+
"metadata": {},
|
758 |
+
"outputs": [
|
759 |
+
{
|
760 |
+
"data": {
|
761 |
+
"text/plain": [
|
762 |
+
"AIMessage(content='Hello! How can I assist you today?')"
|
763 |
+
]
|
764 |
+
},
|
765 |
+
"execution_count": 15,
|
766 |
+
"metadata": {},
|
767 |
+
"output_type": "execute_result"
|
768 |
+
}
|
769 |
+
],
|
770 |
+
"source": [
|
771 |
+
"llm = ChatGoogleGenerativeAI(model=\"gemini-pro\",temperature=0.3)\n",
|
772 |
+
"llm.invoke(\"HII\")"
|
773 |
+
]
|
774 |
+
},
|
775 |
+
{
|
776 |
+
"cell_type": "code",
|
777 |
+
"execution_count": 32,
|
778 |
+
"metadata": {},
|
779 |
+
"outputs": [],
|
780 |
+
"source": [
|
781 |
+
"agent = get_conversational_chain()\n",
|
782 |
+
"def answer(question,agent=agent, db=db):\n",
|
783 |
+
" docs = db.similarity_search(question)\n",
|
784 |
+
" print(docs)\n",
|
785 |
+
" response = agent(\n",
|
786 |
+
" {\"input_documents\":docs, \"question\": question}\n",
|
787 |
+
" ,return_only_outputs=True)\n",
|
788 |
+
" return response"
|
789 |
+
]
|
790 |
+
},
|
791 |
+
{
|
792 |
+
"cell_type": "code",
|
793 |
+
"execution_count": 33,
|
794 |
+
"metadata": {},
|
795 |
+
"outputs": [
|
796 |
+
{
|
797 |
+
"data": {
|
798 |
+
"text/plain": [
|
799 |
+
"Document(page_content='PREPRINT 5\\n2. Relative: To pass the information on the relative depen-\\ndencies of different tokens appearing at different locations in\\nthe sequence, a relative positional encoding is calculated by\\nsome kind of learning. Two famous types of relative encodings\\nare:\\nAlibi: [65] In this approach, a scalar bias is subtracted from\\nthe attention score calculated using two tokens which increases\\nwith the distance between the positions of the tokens. This\\nlearned approach effectively favors using recent tokens for\\nattention.\\nRoPE: Keys, queries, and values are all vectors in the LLMs.\\nRoPE [66] involves the rotation of the query and key represen-\\ntations at an angle proportional to their absolute positions of\\nthe tokens in the input sequence. This step results in a relative\\npositional encoding scheme which decays with the distance\\nbetween the tokens.\\nE. Activation Functions\\nThe activation functions serve a crucial role in the curve-\\nfitting abilities of the neural networks, as proved in [68]. The\\nmodern activation functions used in LLMs are different from\\nthe earlier squashing functions but are critical to the success\\nof LLMs. We discuss these activation functions in this section.\\n1. ReLU [69]: Rectified linear unit (ReLU) is defined as\\nReLU (x) =max(0, x) (1)\\n2. GeLU [70]: Gaussian Error Linear Unit (GeLU) is the\\ncombination of ReLU, dropout [71] and zoneout [72]. It is the\\nmost widely used activation function in contemporary LLM\\nliterature.\\n3. GLU variants [73]: Gated Linear Unit [74] is a neural\\nnetwork layer that is an element-wise product ( ⊗) of a linear\\ntransformation and a sigmoid transformed ( σ) linear projection\\nof the input given as\\nGLU (x, W, V, b, c ) = (xW+b)⊗σ(xV+c), (2)\\nwhere Xis the input of layer and l,W, b, V andcare learned\\nparameters.\\nGLU was modified in [73] to evaluate the effect of different\\nvariations in the training and testing of transformers, resulting\\nin better empirical results. Here are the different GLU varia-\\ntions introduced in [73] and used in LLMs.\\nReGLU (x, W, V, b, c ) =max(0, xW +b)⊗,\\nGEGLU (x, W, V, b, c ) =GELU (xW+b)⊗(xV+c),\\nSwiGLU (x, W, V, b, c, β ) =Swishβ (xW+b)⊗(xV+c).\\nF . Layer Normalization\\nLayer normalization leads to faster convergence and is a\\nwidely used component in transformers. In this section, we\\nprovide different normalization techniques widely used in\\nLLM literature.1. LayerNorm: Layer norm computes statistics over all the\\nhidden units in a layer (l)as follows:\\nul=1\\nnnX\\nial\\ni σl=vuut1\\nnnX\\ni(al\\ni−ul)2, (3)\\nwhere nis the number of neurons in the layer landal\\niis the\\nsummed input of the ineuron in layer l. LayerNorm provides\\ninvariance to rescaling of the weights and re-centering of the\\ndistribution.\\n2. RMSNorm: [75] proposed that the invariance properties\\nof LayerNorm are spurious, and we can achieve the same\\nperformance benefits as we get from LayerNorm by using a\\ncomputationally efficient normalization technique that trades\\noff re-centering invariance with speed. LayerNorm gives the\\nnormalized summed input to layer las follows\\nal\\ni=al\\ni−ul\\nσgl\\ni (4)\\nwhere gl\\niis the gain parameter. RMSNorm [75] modifies al\\ni\\nas\\nal\\ni=al\\ni\\nRMS(al)gl\\ni,where RMS (al) =vuut1\\nnnX\\ni(al\\ni)2.(5)\\n3. Pre-Norm and Post-Norm: LLMs use transformer [62]\\narchitecture with some variations. The original implementa-\\ntion [62] used layer normalization after the residual con-\\nnection, commonly called post-LN, concerning the order of\\nMultihead attention – Residual – LN . There is another order\\nof the normalization, referred to as pre-LN [76] due to the\\nposition of the normalization step before the self-attention\\nlayer as in LN – Multihead attention – Residual . Pre-LN is\\nknown to provide more stability in the training [77].\\n4. DeepNorm: While pre-LN has certain benefits over post-\\nLN training, pre-LN training has an unwanted effect on the\\ngradients [77]. The earlier layers have larger gradients than\\nthose at the bottom. DeepNorm [78] mitigates these adverse\\neffects on the gradients. It is given as\\nxlf=LN(αxlp+Glp(xlp, θlp), (6)\\nwhere αis a constant and θlprepresents the parameters of\\nlayer lp. These parameters are scaled by another constant β.\\nBoth of these constants depend only on the architecture.\\nG. Distributed LLM Training\\nThis section describes distributed LLM training approaches\\nbriefly. More details are available in [13], [37], [79], [80].\\n1. Data Parallelism: Data parallelism replicates the model\\non multiple devices where data in a batch gets divided across\\ndevices. At the end of each training iteration weights are\\nsynchronized across all devices.\\n2. Tensor Parallelism: Tensor parallelism shards a tensor\\ncomputation across devices. It is also known as horizontal\\nparallelism or intra-layer model parallelism.', metadata={'source': '../docs/llm-research.pdf', 'page': 4})"
|
800 |
+
]
|
801 |
+
},
|
802 |
+
"execution_count": 33,
|
803 |
+
"metadata": {},
|
804 |
+
"output_type": "execute_result"
|
805 |
+
}
|
806 |
+
],
|
807 |
+
"source": [
|
808 |
+
"documents[5]"
|
809 |
+
]
|
810 |
+
},
|
811 |
+
{
|
812 |
+
"cell_type": "code",
|
813 |
+
"execution_count": 34,
|
814 |
+
"metadata": {},
|
815 |
+
"outputs": [
|
816 |
+
{
|
817 |
+
"name": "stdout",
|
818 |
+
"output_type": "stream",
|
819 |
+
"text": [
|
820 |
+
"[Document(page_content='HI, this is vasim an AI Engineer', metadata={'source': '../docs/profile.txt'}), Document(page_content='PREPRINT 23\\nTABLE V: Architecture details of LLMs. Here, “PE” is the positional embedding, “nL” is the number of layers, “nH” is the\\nnumber of attention heads, “HS” is the size of hidden states.\\nModels TypeTraining\\nObjectiveAttention Vocab Tokenizer Norm PE Activation Bias nL nH HS\\nT5 (11B) Enc-Dec Span Corruption Standard 32k SentencePiece Pre-RMS Relative ReLU × 24 128 1024\\nGPT3 (175B) Causal-Dec Next Token Dense+Sparse - - Layer Learned GeLU ✓ 96 96 12288\\nmT5 (13B) Enc-Dec Span Corruption Standard 250k SentencePiece Pre-RMS Relative ReLU - - - -\\nPanGu- α(200B) Causal-Dec Next Token Standard 40k BPE Layer - - - 64 128 16384\\nCPM-2 (198B) Enc-Dec Span Corruption Standard 250k SentencePiece Pre-RMS Relative ReLU - 24 64 -\\nCodex (12B) Causal-Dec Next Token Standard - BPE+ Pre-Layer Learned GeLU - 96 96 12288\\nERNIE 3.0 (10B) Causal-Dec Next Token Standard - WordPiece Post-Layer Relative GeLU - 48 64 4096\\nJurassic-1 (178B) Causal-Dec Next Token Standard 256k SentencePiece∗Pre-Layer Learned GeLU ✓ 76 96 13824\\nHyperCLOV A (82B) Causal-Dec Next Token Dense+Sparse - BPE* Pre-Layer Learned GeLU - 64 80 10240\\nYuan 1.0 (245B) Causal-Dec Next Token Standard - - - - - - 76 -16384\\nGopher (280B) Causal-Dec Next Token Standard 32k SentencePiece Pre-RMS Relative GeLU ✓ 80 128 16384\\nERNIE 3.0 Titan (260B) Causal-Dec Next Token Standard - WordPiece Post-Layer Relative GeLU - 48 192 12288\\nGPT-NeoX-20B Causal-Dec Next Token Parallel 50k BPE Layer Rotary GeLU ✓ 44 64 -\\nOPT (175B) Causal-Dec Next Token Standard - BPE - - ReLU ✓ 96 96 -\\nBLOOM (176B) Causal-Dec Next Token Standard 250k BPE Layer ALiBi GeLU ✓ 70 112 14336\\nGalactica (120B) Causal-Dec Next Token Standard 50k BPE+custom Layer Learned GeLU × 96 80 10240\\nGLaM (1.2T) MoE-Dec Next Token Standard 256k SentencePiece Layer Relative GeLU ✓ 64 128 32768\\nLaMDA (137B) Causal-Dec Next Token Standard 32k BPE Layer Relative GeGLU - 64 128 8192\\nMT-NLG (530B) Causal-Dec Next Token Standard 50k BPE Pre-Layer Learned GeLU ✓ 105 128 20480\\nAlphaCode (41B) Enc-Dec Next Token Multi-query 8k SentencePiece - - - - 64 128 6144\\nChinchilla (70B) Causal-Dec Next Token Standard 32k SentencePiece-NFKC Pre-RMS Relative GeLU ✓ 80 64 8192\\nPaLM (540B) Causal-Dec Next Token Parallel+Multi-query 256k SentencePiece Layer RoPE SwiGLU × 118 48 18432\\nAlexaTM (20B) Enc-Dec Denoising Standard 150k SentencePiece Pre-Layer Learned GeLU ✓ 78 32 4096\\nSparrow (70B) Causal-Dec Pref.&Rule RM - 32k SentencePiece-NFKC Pre-RMS Relative GeLU ✓ 16∗64 8192\\nU-PaLM (540B) Non-Causal-Dec MoD Parallel+Multi-query 256k SentencePiece Layer RoPE SwiGLU × 118 48 18432\\nUL2 (20B) Enc-Dec MoD Standard 32k SentencePiece - - - - 64 16 4096\\nGLM (130B) Non-Causal-Dec AR Blank Infilling Standard 130k SentencePiece Deep RoPE GeGLU ✓ 70 96 12288\\nCodeGen (16B) Causal-Dec Next Token Parallel - BPE Layer RoPE - - 34 24 -\\nLLaMA (65B) Causal-Dec Next Token Standard 32k BPE Pre-RMS RoPE SwiGLU - 80 64 8192\\nPanGu- Σ(1085B) Causal-Dec Next Token Standard - BPE Fused Layer - FastGeLU - 40 40 5120\\nBloombergGPT (50B) Causal-Dec Next Token Standard 131k Unigram Layer ALiBi GeLU ✓ 70 40 7680\\nXuan Yuan 2.0 (176B) Causal-Dec Next Token Self 250k BPE Layer ALiBi GeLU ✓ 70 112 14336\\nCodeT5+ (16B) Enc-Dec SC+NT+Cont.+Match Standard - Code-Specific - - - - - - -\\nStarCoder (15.5B) Causal-Dec FIM Multi-query 49k BPE - Learned - - 40 48 6144\\nLLaMA (70B) Causal-Dec Next Token Grouped-query 32k BPE Pre-RMS RoPE SwiGLUE - - - -\\nPaLM-2 - MoD Parallel - - - - - - - - -\\nTABLE VI: Summary of optimization settings used for pre-trained LLMs. The values for weight decay, gradient clipping, and\\ndropout are 0.1, 1.0, and 0.1, respectively, for most of the LLMs.\\nSequence LR Optimizers Precision Weight Grad\\nModels Batch Size Length LR Warmup Decay AdaFactor Adam AdamW FP16 BF16 Mixed Decay Clip Dropout\\nT5 (11B) 211512 0.01 × inverse square root ✓ - - - - - ✓\\nGPT3 (175B) 32K - 6e-5 ✓ cosine ✓ ✓ ✓ ✓ -\\nmT5 (13B) 1024 1024 0.01 - inverse square root ✓ - - - - - ✓\\nPanGu- α(200B) - 1024 2e-5 - - - - - - ✓ - - - -\\nCPM-2 (198B) 1024 1024 0.001 - - ✓ - - - - - ✓\\nCodex (12B) - - 6e-5 ✓ cosine ✓ ✓ ✓ - -\\nERNIE 3.0 (12B) 6144 512 1e-4 ✓ linear ✓ - - - ✓ - -\\nJurassic-1 (178B) 3.2M 2048 6e-5 ✓ cosine ✓ ✓ ✓ ✓ -\\nHyperCLOV A (82B) 1024 - 6e-5 - cosine ✓ - - - ✓ - -\\nYuan 1.0 (245B) <10M 2048 1.6e-4 ✓ cosine decay to 10% ✓ - - - ✓ - -\\nGopher (280B) 3M 2048 4e-5 ✓ cosine decay to 10% ✓ ✓ - ✓ -\\nERNIE 3.0 Titan (260B) - 512 1e-4 ✓ linear ✓ ✓ ✓ ✓ -\\nGPT-NeoX-20B 1538 2048 0.97e-5 ✓ cosine ✓ ✓ ✓ ✓ ×\\nOPT (175B) 2M 2048 1.2e-4 - linear ✓ ✓ ✓ ✓ ✓\\nBLOOM (176B) 2048 2048 6e-5 ✓ cosine ✓ ✓ ✓ ✓ ×\\nGalactica (120B) 2M 2048 7e-6 ✓ linear decay to 10% ✓ - - - ✓ ✓ ✓\\nGLaM (1.2T) 1M 1024 0.01 - inverse square root ✓ FP32 + ✓ - ✓ ×\\nLaMDA (137B) 256K - - - - - - - - - - - - -\\nMT-NLG (530B) 1920 2048 5e-5 ✓ cosine decay to 10% ✓ ✓ ✓ ✓ -\\nAlphaCode (41B) 2048 1536+768 1e-4 ✓ cosine decay to 10% ✓ ✓ ✓ ✓ -\\nChinchilla (70B) 1.5M 2048 1e-4 ✓ cosine decay to 10% ✓ ✓ - - -\\nPaLM (540B) 2048 2048 0.01 - inverse square root ✓ - - - ✓ ✓ ×\\nAlexaTM (20B) 2M 1024 1e-4 - linear decay to 5% ✓ ✓ ✓ - ✓\\nU-PaLM (540B) 32 2048 1e-4 - cosine ✓ - - - - - -\\nUL2 (20B) 1024 1024 - - inverse square root - - - - - - × - -\\nGLM (130B) 4224 2048 8e-5 ✓ cosine ✓ ✓ ✓ ✓ ✓\\nCodeGen (16B) 2M 2048 5e-5 ✓ cosine ✓ - - - ✓ ✓ -\\nLLaMA (65B) 4M Tokens 2048 1.5e-4 ✓ cosine decay to 10% ✓ - - - ✓ ✓ -\\nPanGu- Σ(1.085T) 512 1024 2e-5 ✓ - ✓ ✓ - - -\\nBloombergGPT (50B) 2048 2048 6e-5 ✓ cosine ✓ ✓ ✓ ✓ ×\\nXuan Yuan 2.0 (176B) 2048 2048 6e-5 ✓ cosine ✓ ✓ ✓ ✓ -\\nCodeT5+ (16B) 2048 1024 2e-4 - linear ✓ ✓ ✓ - -\\nStarCoder (15.5B) 512 8k 3e-4 ✓ cosine ✓ ✓ ✓ - -\\nLLaMA-2 (70B) 4M Tokens 4k 1.5e-4 ✓ cosine ✓ ✓ ✓ ✓ -', metadata={'source': '../docs/llm-research.pdf', 'page': 22}), Document(page_content='PREPRINT 3\\nFig. 3: A broader overview of LLMs, dividing LLMs into five branches: 1. Training 2. Inference 3. Evaluation 4. Applications\\n5. Challenges\\nLLMs literature in surveys [46], [47], [48], [49], and topic-\\nspecific surveys in [50], [51], [52], [53], [54]. In contrast\\nto these surveys, our contribution focuses on providing a\\ncomprehensive yet concise overview of the general direction\\nof LLM research. This article summarizes architectural and\\ntraining details of pre-trained LLMs and delves deeper into\\nthe details of concepts like fine-tuning, multi-modal LLMs,\\nrobotics, augmented LLMs, datasets, evaluation, and others\\nto provide a self-contained comprehensive overview. Our key\\ncontributions are summarized as follows.\\n•We present a survey on the developments in LLM re-\\nsearch with the specific aim of providing a concise yet\\ncomprehensive overview of the direction.\\n•We present extensive summaries of pre-trained models\\nthat include fine-grained details of architecture and train-ing details.\\n•Besides paying special attention to the chronological\\norder of LLMs throughout the article, we also summarize\\nmajor findings of the popular contributions and provide\\ndetailed discussion on the key design and development\\naspects of LLMs to help practitioners to effectively\\nleverage this technology.\\n•In this self-contained article, we cover a range of concepts\\nto comprehend the general direction of LLMs comprehen-\\nsively, including background, pre-training, fine-tuning,\\nrobotics, multi-modal LLMs, augmented LLMs, datasets,\\nevaluation, etc.\\nWe loosely follow the existing terminologies to ensure pro-\\nviding a more standardized outlook of this research direction.\\nFor instance, following [46], our survey discusses pre-trained', metadata={'source': '../docs/llm-research.pdf', 'page': 2}), Document(page_content='PREPRINT 4\\nLLMs with 10B parameters or more. We refer the readers\\ninterested in smaller pre-trained models to [47], [48], [49].\\nThe organization of this paper is as follows. Section II dis-\\ncusses the background of LLMs. Section III focuses on LLMs\\noverview, architectures, training pipelines and strategies, and\\nutilization in different aspects. Section IV presents the key\\nfindings derived from each LLM. Section V highlights the\\nconfiguration and parameters that play a crucial role in the\\nfunctioning of these models. Summary and discussions are\\npresented in section VIII. The LLM training and evaluation,\\ndatasets and benchmarks are discussed in section VI, followed\\nby challenges and future directions and conclusion in sec-\\ntions IX and X, respectively.\\nII. B ACKGROUND\\nWe provide the relevant background to understand the\\nfundamentals related to LLMs in this section. Aligned with\\nour objective of providing a comprehensive overview of this\\ndirection, this section offers a comprehensive yet concise\\noutline of the basic concepts. We focus more on the intuitive\\naspects and refer the readers interested in details to the original\\nworks.\\nA. Tokenization\\nLLMs are trained on text to predict text, and similar to\\nother natural language processing systems, they use tokeniza-\\ntion [55] as the essential preprocessing step. It aims to parse\\nthe text into non-decomposing units called tokens. Tokens\\ncan be characters, subwords [56], symbols [57], or words,\\ndepending on the size and type of the model. Some of the\\ncommonly used tokenization schemes in LLMs are briefed\\nhere. Readers are encouraged to refer to [58] for a detailed\\nsurvey.\\n1. WordPiece [59]: It was introduced in [59] as a novel text\\nsegmentation technique for Japanese and Korean languages to\\nimprove the language model for voice search systems. Word-\\nPiece selects tokens that increase the likelihood of an n-gram-\\nbased language model trained on the vocabulary composed of\\ntokens.\\n2. BPE [57]: Byte Pair Encoding (BPE) has its origin in\\ncompression algorithms. It is an iterative process of generating\\ntokens where pairs of adjacent symbols are replaced by a new\\nsymbol, and the occurrences of the most occurring symbols in\\nthe input text are merged.\\n3. UnigramLM [56]: In this tokenization, a simple unigram\\nlanguage model (LM) is trained using an initial vocabulary\\nofsubword units. The vocabulary is pruned iteratively by\\nremoving the lowest probability items from the list, which\\nare the worst performing on the unigram LM.\\nB. Attention\\nAttention, particularly selective attention , has been widely\\nstudied under perception, psychophysics, and psychology. Se-\\nlective attention can be conceived as “the programming by\\nthe O of which stimuli will be processed or encoded and in\\nwhat order this will occur” [60]. While this definition has itsroots in visual perception, it has uncanny similarities with the\\nrecently formulated attention [61], [62] (which stimuli will\\nbe processed) and positional encoding (in what order this\\nwill occur) [62] in LLMs. We discuss both in sections II-C\\nand II-D, respectively.\\nC. Attention in LLMs\\nThe attention mechanism computes a representation of the\\ninput sequences by relating different positions ( tokens ) of these\\nsequences. There are various approaches to calculating and\\nimplementing attention, out of which some famous types are\\ngiven below.\\n1. Self-Attention [62]: The self-attention is also known as\\nintra-attention since all the queries, keys, and values come\\nfrom the same block (encoder or decoder). The self-attention\\nlayer connects all the sequence positions with O(1)space\\ncomplexity which is highly desirable for learning long-range\\ndependencies in the input.\\n2. Cross Attention: In encoder-decoder architectures, the\\noutputs of the encoder blocks act as the queries to the\\nintermediate representation of the decoder, which provides the\\nkeys and values to calculate a representation of the decoder\\nconditioned on the encoder. This attention is called cross-\\nattention.\\n3. Full Attention: The naive implementation of calculating\\nself-attention is known as full attention.\\n4. Sparse Attention [63]: The self-attention has a time\\ncomplexity of O(n2), which becomes prohibitive when scaling\\nthe LLMs to large context windows. An approximation to the\\nself-attention was proposed in [63], which greatly enhanced\\nthe capacity of GPT series LLMs to process a greater number\\nof input tokens in a reasonable time.\\n5. Flash Attention [64]: The bottleneck for calculating the\\nattention using GPUs lies in the memory access rather than the\\ncomputational speed. Flash Attention uses the classical input\\ntiling approach to process the blocks of the input in GPU on-\\nchip SRAM rather than doing IO for every token from the High\\nBandwith Memory (HBM). An extension of this approach to\\nsparse attention follows the speed gains of the full attention\\nimplementation. This trick allows even greater context-length\\nwindows in the LLMs as compared to those LLMs with sparse\\nattention.\\nD. Encoding Positions\\nTheattention modules do not consider the order of process-\\ning by design. Transformer [62] introduced “positional encod-\\nings” to feed information about the position of the tokens in\\ninput sequences. Several variants of positional encoding have\\nbeen proposed [65], [66]. Interestingly, a recent study [67]\\nsuggests that adding this information may not matter for the\\nstate-of-the-art decoder-only Transformers.\\n1. Absolute: This is the most straightforward approach to\\nadding the sequence order information by assigning a unique\\nidentifier to each position of the sequence before passing it to\\nthe attention module.', metadata={'source': '../docs/llm-research.pdf', 'page': 3})]\n"
|
821 |
+
]
|
822 |
+
},
|
823 |
+
{
|
824 |
+
"data": {
|
825 |
+
"text/plain": [
|
826 |
+
"{'output_text': ' answer is not available in the context'}"
|
827 |
+
]
|
828 |
+
},
|
829 |
+
"execution_count": 34,
|
830 |
+
"metadata": {},
|
831 |
+
"output_type": "execute_result"
|
832 |
+
}
|
833 |
+
],
|
834 |
+
"source": [
|
835 |
+
"answer(\"what are the tokens\")"
|
836 |
+
]
|
837 |
+
},
|
838 |
+
{
|
839 |
+
"cell_type": "code",
|
840 |
+
"execution_count": 20,
|
841 |
+
"metadata": {
|
842 |
+
"colab": {
|
843 |
+
"background_save": true
|
844 |
+
},
|
845 |
+
"id": "a8tNUutJB9EA"
|
846 |
+
},
|
847 |
+
"outputs": [
|
848 |
+
{
|
849 |
+
"name": "stdout",
|
850 |
+
"output_type": "stream",
|
851 |
+
"text": [
|
852 |
+
"Running on local URL: http://127.0.0.1:7861\n",
|
853 |
+
"\n",
|
854 |
+
"To create a public link, set `share=True` in `launch()`.\n"
|
855 |
+
]
|
856 |
+
},
|
857 |
+
{
|
858 |
+
"data": {
|
859 |
+
"text/html": [
|
860 |
+
"<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
861 |
+
],
|
862 |
+
"text/plain": [
|
863 |
+
"<IPython.core.display.HTML object>"
|
864 |
+
]
|
865 |
+
},
|
866 |
+
"metadata": {},
|
867 |
+
"output_type": "display_data"
|
868 |
+
}
|
869 |
+
],
|
870 |
+
"source": [
|
871 |
+
"# Gradio App\n",
|
872 |
+
"import gradio as gr\n",
|
873 |
+
"\n",
|
874 |
+
"title = \"\"\n",
|
875 |
+
"description = f\"Chat with any docs\"\n",
|
876 |
+
"\n",
|
877 |
+
"def answer_query(message, history):\n",
|
878 |
+
" docs = db.similarity_search(message)\n",
|
879 |
+
" message = agent(\n",
|
880 |
+
" {\"input_documents\":docs, \"question\": message}\n",
|
881 |
+
" ,return_only_outputs=True)\n",
|
882 |
+
" return message['output_text']\n",
|
883 |
+
"\n",
|
884 |
+
"\n",
|
885 |
+
"demo = gr.ChatInterface(\n",
|
886 |
+
" answer_query,\n",
|
887 |
+
" title= title,\n",
|
888 |
+
" description=description,\n",
|
889 |
+
" examples=[\n",
|
890 |
+
" [\"What is a Large Language Model?\"],\n",
|
891 |
+
" [\"What's 9+2-1?\"],\n",
|
892 |
+
" [\"Write Python code to print the Fibonacci sequence\"]\n",
|
893 |
+
" ]\n",
|
894 |
+
")\n",
|
895 |
+
"\n",
|
896 |
+
"if __name__ == \"__main__\":\n",
|
897 |
+
" demo.queue().launch()\n",
|
898 |
+
"\n"
|
899 |
+
]
|
900 |
+
},
|
901 |
+
{
|
902 |
+
"cell_type": "code",
|
903 |
+
"execution_count": null,
|
904 |
+
"metadata": {},
|
905 |
+
"outputs": [],
|
906 |
+
"source": []
|
907 |
+
}
|
908 |
+
],
|
909 |
+
"metadata": {
|
910 |
+
"colab": {
|
911 |
+
"provenance": []
|
912 |
+
},
|
913 |
+
"kernelspec": {
|
914 |
+
"display_name": "Python 3",
|
915 |
+
"name": "python3"
|
916 |
+
},
|
917 |
+
"language_info": {
|
918 |
+
"codemirror_mode": {
|
919 |
+
"name": "ipython",
|
920 |
+
"version": 3
|
921 |
+
},
|
922 |
+
"file_extension": ".py",
|
923 |
+
"mimetype": "text/x-python",
|
924 |
+
"name": "python",
|
925 |
+
"nbconvert_exporter": "python",
|
926 |
+
"pygments_lexer": "ipython3",
|
927 |
+
"version": "3.10.12"
|
928 |
+
}
|
929 |
+
},
|
930 |
+
"nbformat": 4,
|
931 |
+
"nbformat_minor": 0
|
932 |
+
}
|
notebooks/gemini-docs.ipynb
ADDED
@@ -0,0 +1,883 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [],
|
8 |
+
"source": [
|
9 |
+
"! pip install -q --upgrade google-generativeai langchain-google-genai gradio"
|
10 |
+
]
|
11 |
+
},
|
12 |
+
{
|
13 |
+
"cell_type": "code",
|
14 |
+
"execution_count": 2,
|
15 |
+
"metadata": {},
|
16 |
+
"outputs": [
|
17 |
+
{
|
18 |
+
"name": "stderr",
|
19 |
+
"output_type": "stream",
|
20 |
+
"text": [
|
21 |
+
"/home/vasim/.local/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
22 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
23 |
+
]
|
24 |
+
}
|
25 |
+
],
|
26 |
+
"source": [
|
27 |
+
"import os\n",
|
28 |
+
"from langchain.document_loaders import (\n",
|
29 |
+
" PyPDFLoader,\n",
|
30 |
+
" TextLoader,\n",
|
31 |
+
" Docx2txtLoader\n",
|
32 |
+
")\n",
|
33 |
+
"\n",
|
34 |
+
"from langchain.text_splitter import CharacterTextSplitter\n",
|
35 |
+
"# from PyPDF2 import PdfReader\n",
|
36 |
+
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
37 |
+
"from langchain_google_genai import GoogleGenerativeAIEmbeddings\n",
|
38 |
+
"import google.generativeai as genai\n",
|
39 |
+
"from langchain.vectorstores import FAISS\n",
|
40 |
+
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
41 |
+
"from langchain.chains.question_answering import load_qa_chain\n",
|
42 |
+
"from langchain.prompts import PromptTemplate\n",
|
43 |
+
"from dotenv import load_dotenv"
|
44 |
+
]
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"cell_type": "code",
|
48 |
+
"execution_count": 3,
|
49 |
+
"metadata": {},
|
50 |
+
"outputs": [
|
51 |
+
{
|
52 |
+
"data": {
|
53 |
+
"text/plain": [
|
54 |
+
"True"
|
55 |
+
]
|
56 |
+
},
|
57 |
+
"execution_count": 3,
|
58 |
+
"metadata": {},
|
59 |
+
"output_type": "execute_result"
|
60 |
+
}
|
61 |
+
],
|
62 |
+
"source": [
|
63 |
+
"load_dotenv()"
|
64 |
+
]
|
65 |
+
},
|
66 |
+
{
|
67 |
+
"cell_type": "code",
|
68 |
+
"execution_count": 4,
|
69 |
+
"metadata": {},
|
70 |
+
"outputs": [
|
71 |
+
{
|
72 |
+
"data": {
|
73 |
+
"text/plain": [
|
74 |
+
"[0.0026024636,\n",
|
75 |
+
" -0.00046470974,\n",
|
76 |
+
" -0.06524466,\n",
|
77 |
+
" 0.0037671768,\n",
|
78 |
+
" 0.050909057,\n",
|
79 |
+
" 0.03566457,\n",
|
80 |
+
" 0.004729628,\n",
|
81 |
+
" -0.02265792,\n",
|
82 |
+
" -0.019636517,\n",
|
83 |
+
" 0.041511342,\n",
|
84 |
+
" -0.0032873638,\n",
|
85 |
+
" 0.009839665,\n",
|
86 |
+
" 0.0022297094,\n",
|
87 |
+
" 0.011045753,\n",
|
88 |
+
" -0.0029112806,\n",
|
89 |
+
" 0.00036380324,\n",
|
90 |
+
" 0.013318236,\n",
|
91 |
+
" 0.008650382,\n",
|
92 |
+
" 0.040087987,\n",
|
93 |
+
" 0.003234754,\n",
|
94 |
+
" 0.02216253,\n",
|
95 |
+
" 0.027707517,\n",
|
96 |
+
" 0.0015880043,\n",
|
97 |
+
" -0.011375762,\n",
|
98 |
+
" 0.027686268,\n",
|
99 |
+
" -0.03158842,\n",
|
100 |
+
" 0.013077853,\n",
|
101 |
+
" -0.02646762,\n",
|
102 |
+
" -0.03462742,\n",
|
103 |
+
" -0.010597595,\n",
|
104 |
+
" -0.053662993,\n",
|
105 |
+
" 0.033564012,\n",
|
106 |
+
" -0.025191225,\n",
|
107 |
+
" 0.008060145,\n",
|
108 |
+
" 0.006509568,\n",
|
109 |
+
" -0.065666765,\n",
|
110 |
+
" -0.003321127,\n",
|
111 |
+
" 0.015929354,\n",
|
112 |
+
" 0.009666688,\n",
|
113 |
+
" 0.017954187,\n",
|
114 |
+
" 0.003692957,\n",
|
115 |
+
" -0.025450539,\n",
|
116 |
+
" -0.019842725,\n",
|
117 |
+
" 0.0074209156,\n",
|
118 |
+
" -0.0038040788,\n",
|
119 |
+
" -0.013353295,\n",
|
120 |
+
" -0.06744818,\n",
|
121 |
+
" 0.02687267,\n",
|
122 |
+
" 0.00036143005,\n",
|
123 |
+
" -0.04220317,\n",
|
124 |
+
" 0.03936105,\n",
|
125 |
+
" 0.038491815,\n",
|
126 |
+
" 0.044381935,\n",
|
127 |
+
" -0.04167377,\n",
|
128 |
+
" -0.017151065,\n",
|
129 |
+
" -0.027833307,\n",
|
130 |
+
" 0.0400076,\n",
|
131 |
+
" -0.0050078775,\n",
|
132 |
+
" 0.0017342605,\n",
|
133 |
+
" 0.033973645,\n",
|
134 |
+
" -0.0035528678,\n",
|
135 |
+
" -0.008506744,\n",
|
136 |
+
" 0.050232697,\n",
|
137 |
+
" 0.0047968724,\n",
|
138 |
+
" -0.022322817,\n",
|
139 |
+
" -0.075537644,\n",
|
140 |
+
" -0.014450601,\n",
|
141 |
+
" 0.0054708333,\n",
|
142 |
+
" 0.034601416,\n",
|
143 |
+
" -0.01907222,\n",
|
144 |
+
" -0.013576153,\n",
|
145 |
+
" -0.07363731,\n",
|
146 |
+
" 0.058990918,\n",
|
147 |
+
" 0.0051348056,\n",
|
148 |
+
" -0.009838986,\n",
|
149 |
+
" -0.155085,\n",
|
150 |
+
" -0.0028395671,\n",
|
151 |
+
" 0.04909831,\n",
|
152 |
+
" 0.0138770975,\n",
|
153 |
+
" 0.011540807,\n",
|
154 |
+
" -0.0018861439,\n",
|
155 |
+
" -0.01755376,\n",
|
156 |
+
" -0.08361732,\n",
|
157 |
+
" -0.039391823,\n",
|
158 |
+
" -0.077364445,\n",
|
159 |
+
" 0.0204261,\n",
|
160 |
+
" -0.05680118,\n",
|
161 |
+
" -0.009028887,\n",
|
162 |
+
" -0.015683021,\n",
|
163 |
+
" 0.061899763,\n",
|
164 |
+
" -0.026810465,\n",
|
165 |
+
" -0.021239934,\n",
|
166 |
+
" 0.03995343,\n",
|
167 |
+
" -0.06467655,\n",
|
168 |
+
" 0.011940644,\n",
|
169 |
+
" 0.063081965,\n",
|
170 |
+
" -0.03472896,\n",
|
171 |
+
" -0.020098649,\n",
|
172 |
+
" 0.03046696,\n",
|
173 |
+
" 0.0026317302,\n",
|
174 |
+
" -0.0033308691,\n",
|
175 |
+
" 0.0076804003,\n",
|
176 |
+
" -0.07220898,\n",
|
177 |
+
" 0.03097491,\n",
|
178 |
+
" 0.02099598,\n",
|
179 |
+
" 0.015009643,\n",
|
180 |
+
" 0.0027554797,\n",
|
181 |
+
" 0.056863576,\n",
|
182 |
+
" 0.016525302,\n",
|
183 |
+
" 0.044575516,\n",
|
184 |
+
" -0.07853067,\n",
|
185 |
+
" 0.01992914,\n",
|
186 |
+
" 0.02582012,\n",
|
187 |
+
" 0.024094418,\n",
|
188 |
+
" 0.03533265,\n",
|
189 |
+
" -0.021520808,\n",
|
190 |
+
" -0.033198,\n",
|
191 |
+
" 0.07007376,\n",
|
192 |
+
" 0.020927606,\n",
|
193 |
+
" 0.03658496,\n",
|
194 |
+
" 0.021251578,\n",
|
195 |
+
" -0.025971107,\n",
|
196 |
+
" 0.09621473,\n",
|
197 |
+
" -0.023366872,\n",
|
198 |
+
" 0.003256949,\n",
|
199 |
+
" -0.022166384,\n",
|
200 |
+
" -0.038614567,\n",
|
201 |
+
" 0.02979776,\n",
|
202 |
+
" 0.039533786,\n",
|
203 |
+
" 0.012638491,\n",
|
204 |
+
" -0.0055726334,\n",
|
205 |
+
" -0.05542629,\n",
|
206 |
+
" -0.024121152,\n",
|
207 |
+
" 0.03178174,\n",
|
208 |
+
" 0.052707106,\n",
|
209 |
+
" 0.104821995,\n",
|
210 |
+
" 0.019577984,\n",
|
211 |
+
" -0.013310535,\n",
|
212 |
+
" 0.044787172,\n",
|
213 |
+
" -0.024393918,\n",
|
214 |
+
" 0.009802365,\n",
|
215 |
+
" 0.02994997,\n",
|
216 |
+
" -0.0008475685,\n",
|
217 |
+
" 0.04228906,\n",
|
218 |
+
" -0.006219175,\n",
|
219 |
+
" 0.04402614,\n",
|
220 |
+
" -0.040632106,\n",
|
221 |
+
" 0.009029098,\n",
|
222 |
+
" 0.08250455,\n",
|
223 |
+
" -0.008027798,\n",
|
224 |
+
" -0.012396638,\n",
|
225 |
+
" -0.005741472,\n",
|
226 |
+
" -0.06530075,\n",
|
227 |
+
" -0.0076336395,\n",
|
228 |
+
" 0.056545228,\n",
|
229 |
+
" 0.008156076,\n",
|
230 |
+
" -0.031731352,\n",
|
231 |
+
" 0.025516897,\n",
|
232 |
+
" 0.013671465,\n",
|
233 |
+
" 0.007437407,\n",
|
234 |
+
" 0.022746652,\n",
|
235 |
+
" -0.0021193696,\n",
|
236 |
+
" 0.01611938,\n",
|
237 |
+
" 0.018748892,\n",
|
238 |
+
" 0.0031200994,\n",
|
239 |
+
" 0.0031527125,\n",
|
240 |
+
" 0.0005562957,\n",
|
241 |
+
" 0.017112905,\n",
|
242 |
+
" -0.03922318,\n",
|
243 |
+
" 0.002402639,\n",
|
244 |
+
" -0.041061036,\n",
|
245 |
+
" -0.005380344,\n",
|
246 |
+
" -0.036544673,\n",
|
247 |
+
" -0.03292972,\n",
|
248 |
+
" -0.010599688,\n",
|
249 |
+
" -0.033042144,\n",
|
250 |
+
" 0.021467226,\n",
|
251 |
+
" -0.011730441,\n",
|
252 |
+
" -0.040189914,\n",
|
253 |
+
" -0.0038518396,\n",
|
254 |
+
" 0.006088003,\n",
|
255 |
+
" -0.05208495,\n",
|
256 |
+
" 0.026408045,\n",
|
257 |
+
" 0.04386243,\n",
|
258 |
+
" -0.01699642,\n",
|
259 |
+
" -0.05508133,\n",
|
260 |
+
" 0.060988806,\n",
|
261 |
+
" -0.021120366,\n",
|
262 |
+
" -0.048851084,\n",
|
263 |
+
" 0.011562087,\n",
|
264 |
+
" 0.0046992013,\n",
|
265 |
+
" 0.0029417663,\n",
|
266 |
+
" -0.030239712,\n",
|
267 |
+
" -0.044726174,\n",
|
268 |
+
" -0.036426347,\n",
|
269 |
+
" 0.058376063,\n",
|
270 |
+
" 0.007909313,\n",
|
271 |
+
" 0.0082651945,\n",
|
272 |
+
" 0.005973673,\n",
|
273 |
+
" -0.08589435,\n",
|
274 |
+
" -0.04365921,\n",
|
275 |
+
" 0.079266064,\n",
|
276 |
+
" 0.010272922,\n",
|
277 |
+
" -0.00077904004,\n",
|
278 |
+
" 0.027783131,\n",
|
279 |
+
" 0.009019264,\n",
|
280 |
+
" 0.05524787,\n",
|
281 |
+
" -0.048576605,\n",
|
282 |
+
" -0.04641576,\n",
|
283 |
+
" 0.04849984,\n",
|
284 |
+
" -0.031302325,\n",
|
285 |
+
" 0.04504644,\n",
|
286 |
+
" -0.014899033,\n",
|
287 |
+
" 0.02830163,\n",
|
288 |
+
" 0.012142052,\n",
|
289 |
+
" 0.0027500011,\n",
|
290 |
+
" 0.0037108215,\n",
|
291 |
+
" 0.024816774,\n",
|
292 |
+
" 0.034425203,\n",
|
293 |
+
" -0.025865829,\n",
|
294 |
+
" -0.0023688357,\n",
|
295 |
+
" -0.004570346,\n",
|
296 |
+
" -0.036537673,\n",
|
297 |
+
" 0.00785959,\n",
|
298 |
+
" -0.006369414,\n",
|
299 |
+
" 0.05036734,\n",
|
300 |
+
" -0.0013942671,\n",
|
301 |
+
" -0.0066736704,\n",
|
302 |
+
" -0.007213406,\n",
|
303 |
+
" -0.02938604,\n",
|
304 |
+
" -0.005098042,\n",
|
305 |
+
" 0.069885835,\n",
|
306 |
+
" 0.039996464,\n",
|
307 |
+
" -0.0014599679,\n",
|
308 |
+
" 0.052899357,\n",
|
309 |
+
" 0.012151934,\n",
|
310 |
+
" 0.0045691393,\n",
|
311 |
+
" -0.0169914,\n",
|
312 |
+
" 0.023379533,\n",
|
313 |
+
" 0.054388337,\n",
|
314 |
+
" -0.046915285,\n",
|
315 |
+
" 0.012740782,\n",
|
316 |
+
" 0.008226091,\n",
|
317 |
+
" 0.0650389,\n",
|
318 |
+
" -0.053086124,\n",
|
319 |
+
" -0.035921216,\n",
|
320 |
+
" -0.047861494,\n",
|
321 |
+
" 0.03646698,\n",
|
322 |
+
" 0.02124863,\n",
|
323 |
+
" 0.07875475,\n",
|
324 |
+
" 0.041897457,\n",
|
325 |
+
" -0.059685115,\n",
|
326 |
+
" -0.005645064,\n",
|
327 |
+
" -0.038765974,\n",
|
328 |
+
" -0.082096376,\n",
|
329 |
+
" 0.019661527,\n",
|
330 |
+
" -0.048235465,\n",
|
331 |
+
" 0.032961316,\n",
|
332 |
+
" -0.015816065,\n",
|
333 |
+
" 0.01349209,\n",
|
334 |
+
" 0.030743608,\n",
|
335 |
+
" 0.015003568,\n",
|
336 |
+
" -0.028993933,\n",
|
337 |
+
" -0.01690658,\n",
|
338 |
+
" -0.035253666,\n",
|
339 |
+
" -0.010791926,\n",
|
340 |
+
" 0.0042820405,\n",
|
341 |
+
" -0.06907554,\n",
|
342 |
+
" -0.0014160916,\n",
|
343 |
+
" -0.012969035,\n",
|
344 |
+
" 0.043451488,\n",
|
345 |
+
" -0.01587785,\n",
|
346 |
+
" 0.038347695,\n",
|
347 |
+
" 0.012478342,\n",
|
348 |
+
" -0.012291408,\n",
|
349 |
+
" 0.018989006,\n",
|
350 |
+
" -0.012111724,\n",
|
351 |
+
" 0.043280758,\n",
|
352 |
+
" 0.00065698626,\n",
|
353 |
+
" -0.03504554,\n",
|
354 |
+
" 0.06592344,\n",
|
355 |
+
" 0.03193109,\n",
|
356 |
+
" 0.026153369,\n",
|
357 |
+
" -0.050498523,\n",
|
358 |
+
" 0.013383252,\n",
|
359 |
+
" -0.008368444,\n",
|
360 |
+
" -0.041243635,\n",
|
361 |
+
" -0.04379342,\n",
|
362 |
+
" -0.007160138,\n",
|
363 |
+
" -0.041415907,\n",
|
364 |
+
" -0.009043778,\n",
|
365 |
+
" -0.06227678,\n",
|
366 |
+
" 0.04495307,\n",
|
367 |
+
" -0.012437426,\n",
|
368 |
+
" -0.005763928,\n",
|
369 |
+
" -0.01721329,\n",
|
370 |
+
" 0.0021767728,\n",
|
371 |
+
" 0.036614556,\n",
|
372 |
+
" 0.014989671,\n",
|
373 |
+
" -0.02297016,\n",
|
374 |
+
" 0.017203707,\n",
|
375 |
+
" -0.068807654,\n",
|
376 |
+
" 0.028532282,\n",
|
377 |
+
" -0.077031314,\n",
|
378 |
+
" -0.003550955,\n",
|
379 |
+
" -0.016694745,\n",
|
380 |
+
" -0.024186041,\n",
|
381 |
+
" -0.051172636,\n",
|
382 |
+
" 0.023041002,\n",
|
383 |
+
" 0.03607617,\n",
|
384 |
+
" -0.006545797,\n",
|
385 |
+
" 0.026791858,\n",
|
386 |
+
" -0.050831117,\n",
|
387 |
+
" -0.0060527464,\n",
|
388 |
+
" 0.040906418,\n",
|
389 |
+
" 0.030260349,\n",
|
390 |
+
" -0.03622405,\n",
|
391 |
+
" 0.03427033,\n",
|
392 |
+
" -0.0076176235,\n",
|
393 |
+
" 0.04239094,\n",
|
394 |
+
" -0.007758525,\n",
|
395 |
+
" 0.08181105,\n",
|
396 |
+
" -0.002424102,\n",
|
397 |
+
" -0.008245623,\n",
|
398 |
+
" -0.024848122,\n",
|
399 |
+
" 0.052394852,\n",
|
400 |
+
" 0.019161373,\n",
|
401 |
+
" 0.00611725,\n",
|
402 |
+
" -0.020502884,\n",
|
403 |
+
" 0.026944518,\n",
|
404 |
+
" -0.010385731,\n",
|
405 |
+
" 0.041430656,\n",
|
406 |
+
" -0.072016835,\n",
|
407 |
+
" -0.0066381455,\n",
|
408 |
+
" -0.0046330006,\n",
|
409 |
+
" 0.05215405,\n",
|
410 |
+
" -0.08320881,\n",
|
411 |
+
" 0.019687334,\n",
|
412 |
+
" -0.047417916,\n",
|
413 |
+
" -0.03933844,\n",
|
414 |
+
" 0.023311257,\n",
|
415 |
+
" -0.0010838164,\n",
|
416 |
+
" -0.016675808,\n",
|
417 |
+
" -0.053388983,\n",
|
418 |
+
" 0.002350658,\n",
|
419 |
+
" -0.018954204,\n",
|
420 |
+
" -0.021903217,\n",
|
421 |
+
" 0.028829457,\n",
|
422 |
+
" 0.068412215,\n",
|
423 |
+
" 0.04974716,\n",
|
424 |
+
" 0.015097642,\n",
|
425 |
+
" 0.084202014,\n",
|
426 |
+
" 0.02282603,\n",
|
427 |
+
" 0.06366213,\n",
|
428 |
+
" -0.026193192,\n",
|
429 |
+
" -0.01595,\n",
|
430 |
+
" 0.044413194,\n",
|
431 |
+
" -0.053779442,\n",
|
432 |
+
" 0.008965999,\n",
|
433 |
+
" -0.03272141,\n",
|
434 |
+
" 0.004915969,\n",
|
435 |
+
" 0.007829177,\n",
|
436 |
+
" 0.002712625,\n",
|
437 |
+
" -0.036152992,\n",
|
438 |
+
" -0.04032615,\n",
|
439 |
+
" -0.010541384,\n",
|
440 |
+
" -0.034215618,\n",
|
441 |
+
" -0.025328316,\n",
|
442 |
+
" -0.014646692,\n",
|
443 |
+
" 0.029056,\n",
|
444 |
+
" 0.02925028,\n",
|
445 |
+
" 0.008913267,\n",
|
446 |
+
" 0.0019940082,\n",
|
447 |
+
" -0.06695999,\n",
|
448 |
+
" -0.0010260289,\n",
|
449 |
+
" 0.011302115,\n",
|
450 |
+
" -0.03292385,\n",
|
451 |
+
" 0.00042418708,\n",
|
452 |
+
" 0.017628789,\n",
|
453 |
+
" -0.00475448,\n",
|
454 |
+
" 0.043151293,\n",
|
455 |
+
" 0.006198079,\n",
|
456 |
+
" 0.08446194,\n",
|
457 |
+
" 0.0038840906,\n",
|
458 |
+
" 0.029208006,\n",
|
459 |
+
" -0.03733759,\n",
|
460 |
+
" 0.035057925,\n",
|
461 |
+
" -0.025195966,\n",
|
462 |
+
" -0.022711853,\n",
|
463 |
+
" 0.069107376,\n",
|
464 |
+
" -0.056606133,\n",
|
465 |
+
" 0.07434411,\n",
|
466 |
+
" 0.101236925,\n",
|
467 |
+
" -0.0047446825,\n",
|
468 |
+
" 0.0071048927,\n",
|
469 |
+
" -0.011161651,\n",
|
470 |
+
" 0.012175554,\n",
|
471 |
+
" -0.037840243,\n",
|
472 |
+
" 0.036562636,\n",
|
473 |
+
" 0.057127528,\n",
|
474 |
+
" -0.014619162,\n",
|
475 |
+
" -0.045825053,\n",
|
476 |
+
" -0.027700488,\n",
|
477 |
+
" -0.0059357225,\n",
|
478 |
+
" -0.050302368,\n",
|
479 |
+
" -0.00026914122,\n",
|
480 |
+
" 0.019348465,\n",
|
481 |
+
" -0.013578195,\n",
|
482 |
+
" -0.042193584,\n",
|
483 |
+
" -0.009891802,\n",
|
484 |
+
" 0.017196594,\n",
|
485 |
+
" -0.025658505,\n",
|
486 |
+
" 0.006126944,\n",
|
487 |
+
" -0.0873973,\n",
|
488 |
+
" -0.043111254,\n",
|
489 |
+
" -0.03861637,\n",
|
490 |
+
" 0.040047407,\n",
|
491 |
+
" 0.004773531,\n",
|
492 |
+
" -0.0055007017,\n",
|
493 |
+
" 0.020156063,\n",
|
494 |
+
" -0.04213769,\n",
|
495 |
+
" -0.0020724083,\n",
|
496 |
+
" 0.012001859,\n",
|
497 |
+
" -0.05014639,\n",
|
498 |
+
" -0.08432724,\n",
|
499 |
+
" -0.04858163,\n",
|
500 |
+
" 0.03317245,\n",
|
501 |
+
" 0.009808364,\n",
|
502 |
+
" -0.011328112,\n",
|
503 |
+
" 0.034310147,\n",
|
504 |
+
" 0.033897907,\n",
|
505 |
+
" 0.0014663753,\n",
|
506 |
+
" -0.006445559,\n",
|
507 |
+
" -0.0244833,\n",
|
508 |
+
" -0.01675865,\n",
|
509 |
+
" -0.019391444,\n",
|
510 |
+
" -0.0033584565,\n",
|
511 |
+
" 0.02279385,\n",
|
512 |
+
" -0.008156112,\n",
|
513 |
+
" 0.0020341233,\n",
|
514 |
+
" 0.064902686,\n",
|
515 |
+
" -0.029238507,\n",
|
516 |
+
" 0.014163741,\n",
|
517 |
+
" -0.0075179366,\n",
|
518 |
+
" -0.049113605,\n",
|
519 |
+
" -0.0025780846,\n",
|
520 |
+
" -0.014767654,\n",
|
521 |
+
" -0.034390695,\n",
|
522 |
+
" 0.009971777,\n",
|
523 |
+
" -0.06279843,\n",
|
524 |
+
" 0.04299513,\n",
|
525 |
+
" -0.08961137,\n",
|
526 |
+
" -0.02808285,\n",
|
527 |
+
" -0.041206528,\n",
|
528 |
+
" -0.04893974,\n",
|
529 |
+
" -0.039606616,\n",
|
530 |
+
" -0.0077889636,\n",
|
531 |
+
" 0.08335686,\n",
|
532 |
+
" -0.010121534,\n",
|
533 |
+
" -0.0057770414,\n",
|
534 |
+
" -0.029257368,\n",
|
535 |
+
" -0.020732552,\n",
|
536 |
+
" -0.0035506056,\n",
|
537 |
+
" -0.103099026,\n",
|
538 |
+
" 0.021422477,\n",
|
539 |
+
" -0.018300131,\n",
|
540 |
+
" -0.0013129099,\n",
|
541 |
+
" -0.020269357,\n",
|
542 |
+
" 0.007500347,\n",
|
543 |
+
" 0.054774307,\n",
|
544 |
+
" 0.0024568313,\n",
|
545 |
+
" -0.0438022,\n",
|
546 |
+
" -0.015716508,\n",
|
547 |
+
" 0.0008172836,\n",
|
548 |
+
" -0.046375576,\n",
|
549 |
+
" -0.03338143,\n",
|
550 |
+
" -0.091609284,\n",
|
551 |
+
" 0.062183857,\n",
|
552 |
+
" -0.020281738,\n",
|
553 |
+
" -0.025984671,\n",
|
554 |
+
" 0.055528525,\n",
|
555 |
+
" 0.016999535,\n",
|
556 |
+
" -0.028604554,\n",
|
557 |
+
" 0.026752898,\n",
|
558 |
+
" -0.009096473,\n",
|
559 |
+
" 0.022438003,\n",
|
560 |
+
" -0.017217133,\n",
|
561 |
+
" -0.025918938,\n",
|
562 |
+
" -0.021384709,\n",
|
563 |
+
" 0.04131251,\n",
|
564 |
+
" 0.011413672,\n",
|
565 |
+
" -0.03978722,\n",
|
566 |
+
" 0.003499603,\n",
|
567 |
+
" -0.03932664,\n",
|
568 |
+
" -0.03266107,\n",
|
569 |
+
" -0.0037615069,\n",
|
570 |
+
" 0.01098813,\n",
|
571 |
+
" 0.055706598,\n",
|
572 |
+
" 0.04374345,\n",
|
573 |
+
" 0.018282231,\n",
|
574 |
+
" -0.011001723,\n",
|
575 |
+
" -0.026331998,\n",
|
576 |
+
" 0.008415885,\n",
|
577 |
+
" -0.0017321053,\n",
|
578 |
+
" 0.07693357,\n",
|
579 |
+
" -0.08920055,\n",
|
580 |
+
" -0.022223707,\n",
|
581 |
+
" 0.012738715,\n",
|
582 |
+
" 0.01656477,\n",
|
583 |
+
" -0.023257954,\n",
|
584 |
+
" -0.0056636413,\n",
|
585 |
+
" 0.009431777,\n",
|
586 |
+
" -0.0063169277,\n",
|
587 |
+
" 0.04679692,\n",
|
588 |
+
" 0.054250635,\n",
|
589 |
+
" -0.0041032853,\n",
|
590 |
+
" 0.0056171734,\n",
|
591 |
+
" 0.018150132,\n",
|
592 |
+
" -0.00058200443,\n",
|
593 |
+
" 0.019457081,\n",
|
594 |
+
" 0.0018458135,\n",
|
595 |
+
" 0.0003532362,\n",
|
596 |
+
" -0.08229051,\n",
|
597 |
+
" -8.8678644e-05,\n",
|
598 |
+
" 0.010146113,\n",
|
599 |
+
" -0.050217737,\n",
|
600 |
+
" -0.018734543,\n",
|
601 |
+
" -0.0039246683,\n",
|
602 |
+
" -0.048076928,\n",
|
603 |
+
" 0.03604184,\n",
|
604 |
+
" -0.027944451,\n",
|
605 |
+
" 0.07513914,\n",
|
606 |
+
" -0.05666047,\n",
|
607 |
+
" -0.03619383,\n",
|
608 |
+
" 0.049743947,\n",
|
609 |
+
" 0.029366102,\n",
|
610 |
+
" 0.017087461,\n",
|
611 |
+
" 0.01768675,\n",
|
612 |
+
" -0.015412814,\n",
|
613 |
+
" -0.023675736,\n",
|
614 |
+
" 0.014236046,\n",
|
615 |
+
" -0.0353288,\n",
|
616 |
+
" 0.034391064,\n",
|
617 |
+
" 0.009070227,\n",
|
618 |
+
" -0.07759078,\n",
|
619 |
+
" 0.017592117,\n",
|
620 |
+
" -0.00289023,\n",
|
621 |
+
" -0.06718024,\n",
|
622 |
+
" 0.0136539275,\n",
|
623 |
+
" -0.0040624915,\n",
|
624 |
+
" -0.023015171,\n",
|
625 |
+
" 0.033067793,\n",
|
626 |
+
" 0.038277265,\n",
|
627 |
+
" -0.0037099298,\n",
|
628 |
+
" 0.004821567,\n",
|
629 |
+
" -0.0043556388,\n",
|
630 |
+
" 0.010389023,\n",
|
631 |
+
" -0.011538616,\n",
|
632 |
+
" 0.017592786,\n",
|
633 |
+
" -0.0029306181,\n",
|
634 |
+
" -0.008503525,\n",
|
635 |
+
" -0.012787438,\n",
|
636 |
+
" 0.013798478,\n",
|
637 |
+
" 0.034812994,\n",
|
638 |
+
" 0.06886472,\n",
|
639 |
+
" -0.0009630421,\n",
|
640 |
+
" -0.013648257,\n",
|
641 |
+
" 0.008538377,\n",
|
642 |
+
" 0.04826577,\n",
|
643 |
+
" -0.0047196406,\n",
|
644 |
+
" 0.013498656,\n",
|
645 |
+
" -0.0005008069,\n",
|
646 |
+
" 0.036062807,\n",
|
647 |
+
" 0.01667458,\n",
|
648 |
+
" 0.04128509,\n",
|
649 |
+
" 0.00669686,\n",
|
650 |
+
" -0.03196692,\n",
|
651 |
+
" 0.00046054774,\n",
|
652 |
+
" -0.009966674,\n",
|
653 |
+
" -0.019439934,\n",
|
654 |
+
" 0.04555503,\n",
|
655 |
+
" -0.0172578,\n",
|
656 |
+
" 0.0064148423,\n",
|
657 |
+
" 0.04498466,\n",
|
658 |
+
" -0.029169412,\n",
|
659 |
+
" 0.050785944,\n",
|
660 |
+
" 0.06723971,\n",
|
661 |
+
" 0.047408525,\n",
|
662 |
+
" 0.021920597,\n",
|
663 |
+
" 0.014741846,\n",
|
664 |
+
" -0.044936806,\n",
|
665 |
+
" 0.0051182187,\n",
|
666 |
+
" -0.04394078,\n",
|
667 |
+
" -0.019925192,\n",
|
668 |
+
" 0.015369175,\n",
|
669 |
+
" 0.0011553997,\n",
|
670 |
+
" -0.00030811407,\n",
|
671 |
+
" -0.040543456,\n",
|
672 |
+
" -0.027933566,\n",
|
673 |
+
" -0.033180367,\n",
|
674 |
+
" 0.05718386,\n",
|
675 |
+
" -0.027550489,\n",
|
676 |
+
" 0.023995027,\n",
|
677 |
+
" -0.05262063,\n",
|
678 |
+
" 0.03024302,\n",
|
679 |
+
" 0.04645257,\n",
|
680 |
+
" -0.04900795,\n",
|
681 |
+
" 0.0077397116,\n",
|
682 |
+
" 0.027761744,\n",
|
683 |
+
" 0.05983705,\n",
|
684 |
+
" -0.020184644,\n",
|
685 |
+
" -0.0199132,\n",
|
686 |
+
" 0.01580453,\n",
|
687 |
+
" -0.04299223,\n",
|
688 |
+
" -0.050547145,\n",
|
689 |
+
" -0.0519957,\n",
|
690 |
+
" 0.08249654,\n",
|
691 |
+
" 0.0058217077,\n",
|
692 |
+
" -0.021394847,\n",
|
693 |
+
" -0.049588665,\n",
|
694 |
+
" -0.04656881,\n",
|
695 |
+
" -0.010261212,\n",
|
696 |
+
" -0.0025667087,\n",
|
697 |
+
" -0.03484151,\n",
|
698 |
+
" -0.02142792,\n",
|
699 |
+
" 0.0061094393,\n",
|
700 |
+
" -0.009446063,\n",
|
701 |
+
" -0.0042138724,\n",
|
702 |
+
" 0.055291895,\n",
|
703 |
+
" 0.03617863,\n",
|
704 |
+
" 0.06034423,\n",
|
705 |
+
" 0.06481362,\n",
|
706 |
+
" -0.03290425,\n",
|
707 |
+
" 0.029245012,\n",
|
708 |
+
" -0.048802678,\n",
|
709 |
+
" -0.0037805268,\n",
|
710 |
+
" -0.045257535,\n",
|
711 |
+
" 0.036172822,\n",
|
712 |
+
" 0.038555525,\n",
|
713 |
+
" -0.02758126,\n",
|
714 |
+
" -0.06875568,\n",
|
715 |
+
" 0.0017989068,\n",
|
716 |
+
" -0.008342789,\n",
|
717 |
+
" -0.01680573,\n",
|
718 |
+
" -0.0112027405,\n",
|
719 |
+
" 0.062149644,\n",
|
720 |
+
" 0.027881822,\n",
|
721 |
+
" -0.106920265,\n",
|
722 |
+
" -0.064146906,\n",
|
723 |
+
" -0.02093536,\n",
|
724 |
+
" -0.033114824,\n",
|
725 |
+
" 0.015528122,\n",
|
726 |
+
" 0.00026822102,\n",
|
727 |
+
" -0.008216998,\n",
|
728 |
+
" 0.012704465,\n",
|
729 |
+
" 0.030706486,\n",
|
730 |
+
" -0.05130113,\n",
|
731 |
+
" -0.050231606,\n",
|
732 |
+
" 0.034715306,\n",
|
733 |
+
" 0.005949386,\n",
|
734 |
+
" -0.05149062,\n",
|
735 |
+
" -0.03999013,\n",
|
736 |
+
" 0.019164404,\n",
|
737 |
+
" 0.013223384,\n",
|
738 |
+
" 0.00029772808,\n",
|
739 |
+
" -0.023228012,\n",
|
740 |
+
" -0.03363084,\n",
|
741 |
+
" -0.06697605,\n",
|
742 |
+
" -0.00289148,\n",
|
743 |
+
" 0.065958,\n",
|
744 |
+
" -0.082492866,\n",
|
745 |
+
" 0.018354924,\n",
|
746 |
+
" 0.03967476,\n",
|
747 |
+
" -0.029593993,\n",
|
748 |
+
" 0.049283735,\n",
|
749 |
+
" -0.010077968,\n",
|
750 |
+
" -0.007118481,\n",
|
751 |
+
" 0.061920356,\n",
|
752 |
+
" 0.02018739,\n",
|
753 |
+
" -0.0039915326,\n",
|
754 |
+
" 0.03894082,\n",
|
755 |
+
" 0.012183999,\n",
|
756 |
+
" -0.030113503,\n",
|
757 |
+
" 0.014517147,\n",
|
758 |
+
" 0.007991418,\n",
|
759 |
+
" 0.024015212,\n",
|
760 |
+
" 0.009208566,\n",
|
761 |
+
" -0.020164741,\n",
|
762 |
+
" -0.020583002,\n",
|
763 |
+
" -0.039123856,\n",
|
764 |
+
" 0.00087153394,\n",
|
765 |
+
" -0.03082177,\n",
|
766 |
+
" 0.0020127937,\n",
|
767 |
+
" 0.06228645,\n",
|
768 |
+
" 0.026857367,\n",
|
769 |
+
" 0.0368351,\n",
|
770 |
+
" -0.027293308,\n",
|
771 |
+
" -0.026962344,\n",
|
772 |
+
" 0.008827768,\n",
|
773 |
+
" 0.014945436,\n",
|
774 |
+
" -0.029614218,\n",
|
775 |
+
" -0.031657815,\n",
|
776 |
+
" -0.007631079,\n",
|
777 |
+
" -0.0076884576,\n",
|
778 |
+
" 0.04339069,\n",
|
779 |
+
" -0.0026196744,\n",
|
780 |
+
" 0.013788046,\n",
|
781 |
+
" 0.011783524,\n",
|
782 |
+
" 0.026193634,\n",
|
783 |
+
" 0.07127412,\n",
|
784 |
+
" 0.034598272,\n",
|
785 |
+
" -0.035599194,\n",
|
786 |
+
" -0.025256252,\n",
|
787 |
+
" 0.09411122,\n",
|
788 |
+
" 0.010242209,\n",
|
789 |
+
" -0.017384086,\n",
|
790 |
+
" 0.019750888,\n",
|
791 |
+
" 0.043792516,\n",
|
792 |
+
" -0.020661648,\n",
|
793 |
+
" 0.070129156,\n",
|
794 |
+
" 0.049977932,\n",
|
795 |
+
" 0.023388589,\n",
|
796 |
+
" 0.034094717,\n",
|
797 |
+
" -0.048965782,\n",
|
798 |
+
" -0.03520176,\n",
|
799 |
+
" 0.060515266,\n",
|
800 |
+
" -0.032321338,\n",
|
801 |
+
" 0.019958243,\n",
|
802 |
+
" -0.02175526,\n",
|
803 |
+
" -0.04276059,\n",
|
804 |
+
" 0.09902725,\n",
|
805 |
+
" -0.012637526,\n",
|
806 |
+
" 0.01423612,\n",
|
807 |
+
" 0.033305667,\n",
|
808 |
+
" -0.017869117,\n",
|
809 |
+
" 0.07474199,\n",
|
810 |
+
" 0.03466648,\n",
|
811 |
+
" 0.050615326,\n",
|
812 |
+
" 0.010753818,\n",
|
813 |
+
" -0.08773687,\n",
|
814 |
+
" -0.033456173,\n",
|
815 |
+
" -0.018937487,\n",
|
816 |
+
" -0.0038408504,\n",
|
817 |
+
" 0.03892539,\n",
|
818 |
+
" 0.01966001,\n",
|
819 |
+
" 0.00027581758,\n",
|
820 |
+
" 0.0057093804,\n",
|
821 |
+
" -0.03970555,\n",
|
822 |
+
" -0.0064289654,\n",
|
823 |
+
" -0.014644212,\n",
|
824 |
+
" -0.0068154233,\n",
|
825 |
+
" 0.007386573,\n",
|
826 |
+
" -0.03209175,\n",
|
827 |
+
" 0.043366488,\n",
|
828 |
+
" 0.04398344,\n",
|
829 |
+
" 0.0070169214,\n",
|
830 |
+
" -0.015283345,\n",
|
831 |
+
" -0.005414628,\n",
|
832 |
+
" -0.026633335,\n",
|
833 |
+
" 0.022585975,\n",
|
834 |
+
" -0.025980968,\n",
|
835 |
+
" -0.0075632525,\n",
|
836 |
+
" -0.024178095,\n",
|
837 |
+
" -0.0019861956,\n",
|
838 |
+
" 0.033296216,\n",
|
839 |
+
" 0.014306449,\n",
|
840 |
+
" 0.011111949,\n",
|
841 |
+
" 0.013801798]"
|
842 |
+
]
|
843 |
+
},
|
844 |
+
"execution_count": 4,
|
845 |
+
"metadata": {},
|
846 |
+
"output_type": "execute_result"
|
847 |
+
}
|
848 |
+
],
|
849 |
+
"source": [
|
850 |
+
"embeddings = GoogleGenerativeAIEmbeddings(model = \"models/embedding-001\")\n",
|
851 |
+
"embeddings.embed_query(\"I am good\")"
|
852 |
+
]
|
853 |
+
},
|
854 |
+
{
|
855 |
+
"cell_type": "code",
|
856 |
+
"execution_count": null,
|
857 |
+
"metadata": {},
|
858 |
+
"outputs": [],
|
859 |
+
"source": []
|
860 |
+
}
|
861 |
+
],
|
862 |
+
"metadata": {
|
863 |
+
"kernelspec": {
|
864 |
+
"display_name": "Python 3",
|
865 |
+
"language": "python",
|
866 |
+
"name": "python3"
|
867 |
+
},
|
868 |
+
"language_info": {
|
869 |
+
"codemirror_mode": {
|
870 |
+
"name": "ipython",
|
871 |
+
"version": 3
|
872 |
+
},
|
873 |
+
"file_extension": ".py",
|
874 |
+
"mimetype": "text/x-python",
|
875 |
+
"name": "python",
|
876 |
+
"nbconvert_exporter": "python",
|
877 |
+
"pygments_lexer": "ipython3",
|
878 |
+
"version": "3.10.12"
|
879 |
+
}
|
880 |
+
},
|
881 |
+
"nbformat": 4,
|
882 |
+
"nbformat_minor": 2
|
883 |
+
}
|
notebooks/gemini-langchain.ipynb
ADDED
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [],
|
8 |
+
"source": [
|
9 |
+
"! pip install -q --upgrade google-generativeai langchain-google-genai"
|
10 |
+
]
|
11 |
+
},
|
12 |
+
{
|
13 |
+
"cell_type": "code",
|
14 |
+
"execution_count": 2,
|
15 |
+
"metadata": {},
|
16 |
+
"outputs": [
|
17 |
+
{
|
18 |
+
"data": {
|
19 |
+
"text/plain": [
|
20 |
+
"True"
|
21 |
+
]
|
22 |
+
},
|
23 |
+
"execution_count": 2,
|
24 |
+
"metadata": {},
|
25 |
+
"output_type": "execute_result"
|
26 |
+
}
|
27 |
+
],
|
28 |
+
"source": [
|
29 |
+
"from dotenv import load_dotenv\n",
|
30 |
+
"load_dotenv()"
|
31 |
+
]
|
32 |
+
},
|
33 |
+
{
|
34 |
+
"cell_type": "code",
|
35 |
+
"execution_count": 3,
|
36 |
+
"metadata": {},
|
37 |
+
"outputs": [
|
38 |
+
{
|
39 |
+
"name": "stderr",
|
40 |
+
"output_type": "stream",
|
41 |
+
"text": [
|
42 |
+
"/home/vasim/.local/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
43 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
44 |
+
]
|
45 |
+
}
|
46 |
+
],
|
47 |
+
"source": [
|
48 |
+
"from langchain_google_genai import ChatGoogleGenerativeAI"
|
49 |
+
]
|
50 |
+
},
|
51 |
+
{
|
52 |
+
"cell_type": "code",
|
53 |
+
"execution_count": 5,
|
54 |
+
"metadata": {},
|
55 |
+
"outputs": [],
|
56 |
+
"source": [
|
57 |
+
"from IPython.display import display\n",
|
58 |
+
"from IPython.display import Markdown\n",
|
59 |
+
"import textwrap\n",
|
60 |
+
"\n",
|
61 |
+
"\n",
|
62 |
+
"def to_markdown(text):\n",
|
63 |
+
" text = text.replace('•', ' *')\n",
|
64 |
+
" return Markdown(textwrap.indent(text, '> ', predicate=lambda _: True))"
|
65 |
+
]
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"cell_type": "code",
|
69 |
+
"execution_count": 6,
|
70 |
+
"metadata": {},
|
71 |
+
"outputs": [
|
72 |
+
{
|
73 |
+
"data": {
|
74 |
+
"text/markdown": [
|
75 |
+
"> 1. **Exercise Regularly:** Aim for at least 150 minutes of moderate-intensity aerobic activity (e.g., brisk walking, cycling, swimming) or 75 minutes of vigorous-intensity aerobic activity (e.g., running, circuit training) each week.\n",
|
76 |
+
"> 2. **Strength Training:** Incorporate strength training exercises that work all major muscle groups at least twice a week. This helps build and maintain muscle mass, which improves strength, mobility, and bone density.\n",
|
77 |
+
"> 3. **Mindful Eating:** Pay attention to hunger and fullness cues. Eat a balanced diet rich in fruits, vegetables, whole grains, lean protein, and healthy fats. Avoid processed foods, excessive sugar, and large portion sizes.\n",
|
78 |
+
"> 4. **Good Sleep:** Aim for 7-8 hours of quality sleep each night. Sleep helps the body recover from exercise, regulates hormones, and supports overall well-being.\n",
|
79 |
+
"> 5. **Manage Stress:** Engage in stress management practices such as meditation, yoga, deep breathing, or spending time in nature. Chronic stress can negatively impact physical and mental health.\n",
|
80 |
+
"> 6. **Avoid Smoking and Excessive Alcohol:** Smoking is detrimental to overall health. Excessive alcohol consumption can interfere with sleep, lead to weight gain, and increase the risk of liver damage.\n",
|
81 |
+
"> 7. **Stay Hydrated:** Drinking plenty of water is essential for various bodily functions. Dehydration can lead to fatigue and impaired performance.\n",
|
82 |
+
"> 8. **Routine Medical Check-ups:** Schedule regular check-ups with your healthcare provider to monitor your overall health, including blood pressure, cholesterol, and weight.\n",
|
83 |
+
"> 9. **Body Positivity:** Focus on feeling healthy and strong rather than obsessing over appearance. Appreciate your body for its abilities and prioritize health over unattainable beauty standards.\n",
|
84 |
+
"> 10. **Find Enjoyable Activities:** Choose physical activities that you genuinely enjoy. This makes it more likely that you'll stick with your fitness routine over time."
|
85 |
+
],
|
86 |
+
"text/plain": [
|
87 |
+
"<IPython.core.display.Markdown object>"
|
88 |
+
]
|
89 |
+
},
|
90 |
+
"execution_count": 6,
|
91 |
+
"metadata": {},
|
92 |
+
"output_type": "execute_result"
|
93 |
+
}
|
94 |
+
],
|
95 |
+
"source": [
|
96 |
+
"llm = ChatGoogleGenerativeAI(model=\"gemini-pro\")\n",
|
97 |
+
"result = llm.invoke(\"What is the best practice to keep fit?\")\n",
|
98 |
+
"to_markdown(result.content)"
|
99 |
+
]
|
100 |
+
}
|
101 |
+
],
|
102 |
+
"metadata": {
|
103 |
+
"kernelspec": {
|
104 |
+
"display_name": "Python 3",
|
105 |
+
"language": "python",
|
106 |
+
"name": "python3"
|
107 |
+
},
|
108 |
+
"language_info": {
|
109 |
+
"codemirror_mode": {
|
110 |
+
"name": "ipython",
|
111 |
+
"version": 3
|
112 |
+
},
|
113 |
+
"file_extension": ".py",
|
114 |
+
"mimetype": "text/x-python",
|
115 |
+
"name": "python",
|
116 |
+
"nbconvert_exporter": "python",
|
117 |
+
"pygments_lexer": "ipython3",
|
118 |
+
"version": "3.10.12"
|
119 |
+
}
|
120 |
+
},
|
121 |
+
"nbformat": 4,
|
122 |
+
"nbformat_minor": 2
|
123 |
+
}
|
notebooks/gemini-llama-index.ipynb
ADDED
@@ -0,0 +1,605 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"metadata": {
|
6 |
+
"id": "aEFLqqRlwFqB"
|
7 |
+
},
|
8 |
+
"source": [
|
9 |
+
"<a href=\"https://colab.research.google.com/github/run-llama/llama_index/blob/main/docs/examples/llm/gemini.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
|
10 |
+
]
|
11 |
+
},
|
12 |
+
{
|
13 |
+
"cell_type": "markdown",
|
14 |
+
"metadata": {
|
15 |
+
"id": "WOuuFLPcwFqH"
|
16 |
+
},
|
17 |
+
"source": [
|
18 |
+
"# Gemini"
|
19 |
+
]
|
20 |
+
},
|
21 |
+
{
|
22 |
+
"cell_type": "markdown",
|
23 |
+
"metadata": {
|
24 |
+
"id": "Q0g1JrupwFqI"
|
25 |
+
},
|
26 |
+
"source": [
|
27 |
+
"In this notebook, we show how to use the Gemini text models from Google in LlamaIndex. Check out the [Gemini site](https://ai.google.dev/) or the [announcement](https://deepmind.google/technologies/gemini/).\n",
|
28 |
+
"\n",
|
29 |
+
"If you're opening this Notebook on colab, you will need to install LlamaIndex 🦙 and the Gemini Python SDK."
|
30 |
+
]
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"cell_type": "code",
|
34 |
+
"execution_count": null,
|
35 |
+
"metadata": {
|
36 |
+
"id": "ogUU-nZJwFqK",
|
37 |
+
"outputId": "046a4de5-1058-4054-ab9d-edad146d5238"
|
38 |
+
},
|
39 |
+
"outputs": [
|
40 |
+
{
|
41 |
+
"name": "stdout",
|
42 |
+
"output_type": "stream",
|
43 |
+
"text": [
|
44 |
+
"\n",
|
45 |
+
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.3.2\u001b[0m\n",
|
46 |
+
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n"
|
47 |
+
]
|
48 |
+
}
|
49 |
+
],
|
50 |
+
"source": [
|
51 |
+
"!pip install -q llama-index google-generativeai"
|
52 |
+
]
|
53 |
+
},
|
54 |
+
{
|
55 |
+
"cell_type": "markdown",
|
56 |
+
"metadata": {
|
57 |
+
"id": "Gw__DinXwFqN"
|
58 |
+
},
|
59 |
+
"source": [
|
60 |
+
"## Basic Usage\n",
|
61 |
+
"\n",
|
62 |
+
"You will need to get an API key from [Google AI Studio](https://makersuite.google.com/app/apikey). Once you have one, you can either pass it explicity to the model, or use the `GOOGLE_API_KEY` environment variable."
|
63 |
+
]
|
64 |
+
},
|
65 |
+
{
|
66 |
+
"cell_type": "code",
|
67 |
+
"execution_count": null,
|
68 |
+
"metadata": {
|
69 |
+
"id": "UV4GaCEMwFqN",
|
70 |
+
"outputId": "4cb844b8-94d1-41da-8c99-a6bf24074c0d"
|
71 |
+
},
|
72 |
+
"outputs": [
|
73 |
+
{
|
74 |
+
"name": "stdout",
|
75 |
+
"output_type": "stream",
|
76 |
+
"text": [
|
77 |
+
"env: GOOGLE_API_KEY=...\n"
|
78 |
+
]
|
79 |
+
}
|
80 |
+
],
|
81 |
+
"source": [
|
82 |
+
"%env GOOGLE_API_KEY=..."
|
83 |
+
]
|
84 |
+
},
|
85 |
+
{
|
86 |
+
"cell_type": "code",
|
87 |
+
"execution_count": null,
|
88 |
+
"metadata": {
|
89 |
+
"id": "4mmANfa8wFqO"
|
90 |
+
},
|
91 |
+
"outputs": [],
|
92 |
+
"source": [
|
93 |
+
"import os\n",
|
94 |
+
"\n",
|
95 |
+
"GOOGLE_API_KEY = \"\" # add your GOOGLE API key here\n",
|
96 |
+
"os.environ[\"GOOGLE_API_KEY\"] = GOOGLE_API_KEY"
|
97 |
+
]
|
98 |
+
},
|
99 |
+
{
|
100 |
+
"cell_type": "markdown",
|
101 |
+
"metadata": {
|
102 |
+
"id": "DzkDtLuYwFqO"
|
103 |
+
},
|
104 |
+
"source": [
|
105 |
+
"#### Call `complete` with a prompt"
|
106 |
+
]
|
107 |
+
},
|
108 |
+
{
|
109 |
+
"cell_type": "code",
|
110 |
+
"execution_count": null,
|
111 |
+
"metadata": {
|
112 |
+
"id": "rEfnd7eRwFqO",
|
113 |
+
"outputId": "809da18f-643b-4819-d0fb-67ea5ee1cf96"
|
114 |
+
},
|
115 |
+
"outputs": [
|
116 |
+
{
|
117 |
+
"name": "stdout",
|
118 |
+
"output_type": "stream",
|
119 |
+
"text": [
|
120 |
+
"In a world of wonder, where dreams take flight,\n",
|
121 |
+
"There exists a backpack, a magical sight.\n",
|
122 |
+
"Its fabric woven with stardust and grace,\n",
|
123 |
+
"Embroidered with spells, an enchanting embrace.\n",
|
124 |
+
"\n",
|
125 |
+
"With a whisper and a wish, it opens wide,\n",
|
126 |
+
"Revealing treasures that shimmer inside.\n",
|
127 |
+
"Books that whisper secrets, maps that unfold,\n",
|
128 |
+
"A compass that guides, stories yet untold.\n",
|
129 |
+
"\n",
|
130 |
+
"A pencil that writes poems, a paintbrush that sings,\n",
|
131 |
+
"A telescope that captures the stars' gleaming wings.\n",
|
132 |
+
"A magnifying glass, revealing nature's art,\n",
|
133 |
+
"A kaleidoscope, painting rainbows in your heart.\n",
|
134 |
+
"\n",
|
135 |
+
"It holds a mirror that reflects your true worth,\n",
|
136 |
+
"A locket that keeps memories close to your birth.\n",
|
137 |
+
"A journal that captures your hopes and your fears,\n",
|
138 |
+
"A flashlight that banishes shadows and clears.\n",
|
139 |
+
"\n",
|
140 |
+
"With each step you take, the backpack transforms,\n",
|
141 |
+
"Adjusting its weight, adapting to storms.\n",
|
142 |
+
"It grows or shrinks, as your needs may arise,\n",
|
143 |
+
"A faithful companion, beneath sunny skies.\n",
|
144 |
+
"\n",
|
145 |
+
"When you're lost and alone, it whispers your name,\n",
|
146 |
+
"Guiding you back to the path you reclaim.\n",
|
147 |
+
"It carries your burdens, lightens your load,\n",
|
148 |
+
"A magical backpack, a gift bestowed.\n",
|
149 |
+
"\n",
|
150 |
+
"So embrace its magic, let your spirit soar,\n",
|
151 |
+
"With this wondrous backpack, forever explore.\n",
|
152 |
+
"For within its depths, a universe lies,\n",
|
153 |
+
"A treasure trove of dreams, beneath vast skies.\n"
|
154 |
+
]
|
155 |
+
}
|
156 |
+
],
|
157 |
+
"source": [
|
158 |
+
"from llama_index.llms import Gemini\n",
|
159 |
+
"\n",
|
160 |
+
"resp = Gemini().complete(\"Write a poem about a magic backpack\")\n",
|
161 |
+
"print(resp)"
|
162 |
+
]
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"cell_type": "markdown",
|
166 |
+
"metadata": {
|
167 |
+
"id": "FHK_wQLUwFqP"
|
168 |
+
},
|
169 |
+
"source": [
|
170 |
+
"#### Call `chat` with a list of messages"
|
171 |
+
]
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"cell_type": "code",
|
175 |
+
"execution_count": null,
|
176 |
+
"metadata": {
|
177 |
+
"id": "nTPnlI6pwFqP",
|
178 |
+
"outputId": "037824df-a4f9-4db6-fb56-a65e7308a505"
|
179 |
+
},
|
180 |
+
"outputs": [
|
181 |
+
{
|
182 |
+
"name": "stdout",
|
183 |
+
"output_type": "stream",
|
184 |
+
"text": [
|
185 |
+
"[parts {\n",
|
186 |
+
" text: \"Hello friend!\"\n",
|
187 |
+
"}\n",
|
188 |
+
"role: \"user\"\n",
|
189 |
+
", parts {\n",
|
190 |
+
" text: \"Yarr what is shakin\\' matey?\"\n",
|
191 |
+
"}\n",
|
192 |
+
"role: \"model\"\n",
|
193 |
+
", parts {\n",
|
194 |
+
" text: \"Help me decide what to have for dinner.\"\n",
|
195 |
+
"}\n",
|
196 |
+
"role: \"user\"\n",
|
197 |
+
"]\n",
|
198 |
+
"assistant: Ahoy there, matey! Let's set sail on a culinary adventure and find the perfect dinner for ye. Here be some options to consider:\n",
|
199 |
+
"\n",
|
200 |
+
"1. **Fish and Chips:** Embark on a classic voyage with a hearty portion of golden-fried fish, accompanied by crispy chips. Dip 'em in tartar sauce for a taste that'll make ye shiver me timbers!\n",
|
201 |
+
"\n",
|
202 |
+
"2. **Lobster Thermidor:** Indulge in a luxurious feast fit for a pirate king. Tender lobster meat, bathed in a creamy, cheesy sauce, will have ye feeling like royalty.\n",
|
203 |
+
"\n",
|
204 |
+
"3. **Paella:** Set course for the shores of Spain with a vibrant paella. This colorful dish combines rice, seafood, and vegetables in a saffron-infused broth. Ahoy, it's a feast for the eyes and the belly!\n",
|
205 |
+
"\n",
|
206 |
+
"4. **Surf and Turf:** Experience the best of both worlds with a combination of succulent steak and tender lobster. This hearty meal is sure to satisfy even the hungriest of scallywags.\n",
|
207 |
+
"\n",
|
208 |
+
"5. **Crab Cakes:** Dive into a platter of golden-brown crab cakes, bursting with fresh crab meat and flavorful seasonings. Served with a tangy remoulade sauce, these treasures will have ye craving more.\n",
|
209 |
+
"\n",
|
210 |
+
"6. **Oysters Rockefeller:** Embark on a culinary journey to New Orleans with these decadent oysters. Baked with a rich spinach, breadcrumb, and Pernod sauce, they're a taste of the Big Easy that'll leave ye wanting more.\n",
|
211 |
+
"\n",
|
212 |
+
"7. **Clam Chowder:** Warm yer bones with a hearty bowl of clam chowder. This New England classic, made with fresh clams, potatoes, and a creamy broth, is the perfect antidote to a chilly night.\n",
|
213 |
+
"\n",
|
214 |
+
"8. **Lobster Rolls:** Set sail for the coast of Maine and indulge in a classic lobster roll. Fresh lobster meat, dressed in a light mayonnaise-based sauce, is nestled in a toasted bun. It's a taste of the sea that'll have ye hooked!\n",
|
215 |
+
"\n",
|
216 |
+
"9. **Scallops:** Dive into a plate of seared scallops, cooked to perfection and served with a variety of sauces. Whether ye prefer them with a simple lemon butter sauce or a more adventurous mango salsa, these succulent morsels are sure to please.\n",
|
217 |
+
"\n",
|
218 |
+
"10. **Shrimp Scampi:** Embark on a culinary adventure to Italy with this classic dish. Plump shrimp, sautéed in a garlicky white wine sauce, served over pasta. It's a taste of the Mediterranean that'll transport ye to sunnier shores.\n",
|
219 |
+
"\n",
|
220 |
+
"No matter what ye choose, matey, make sure it's a feast worthy of a true pirate. Bon appétit!\n"
|
221 |
+
]
|
222 |
+
}
|
223 |
+
],
|
224 |
+
"source": [
|
225 |
+
"from llama_index.llms import ChatMessage, Gemini\n",
|
226 |
+
"\n",
|
227 |
+
"messages = [\n",
|
228 |
+
" ChatMessage(role=\"user\", content=\"Hello friend!\"),\n",
|
229 |
+
" ChatMessage(role=\"assistant\", content=\"Yarr what is shakin' matey?\"),\n",
|
230 |
+
" ChatMessage(\n",
|
231 |
+
" role=\"user\", content=\"Help me decide what to have for dinner.\"\n",
|
232 |
+
" ),\n",
|
233 |
+
"]\n",
|
234 |
+
"resp = Gemini().chat(messages)\n",
|
235 |
+
"print(resp)"
|
236 |
+
]
|
237 |
+
},
|
238 |
+
{
|
239 |
+
"cell_type": "markdown",
|
240 |
+
"metadata": {
|
241 |
+
"id": "Kwi2CW9AwFqQ"
|
242 |
+
},
|
243 |
+
"source": [
|
244 |
+
"## Streaming"
|
245 |
+
]
|
246 |
+
},
|
247 |
+
{
|
248 |
+
"cell_type": "markdown",
|
249 |
+
"metadata": {
|
250 |
+
"id": "a_VEZxGbwFqQ"
|
251 |
+
},
|
252 |
+
"source": [
|
253 |
+
"Using `stream_complete` endpoint"
|
254 |
+
]
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"cell_type": "code",
|
258 |
+
"execution_count": null,
|
259 |
+
"metadata": {
|
260 |
+
"id": "4XvkfXcUwFqR"
|
261 |
+
},
|
262 |
+
"outputs": [],
|
263 |
+
"source": [
|
264 |
+
"from llama_index.llms import Gemini\n",
|
265 |
+
"\n",
|
266 |
+
"llm = Gemini()\n",
|
267 |
+
"resp = llm.stream_complete(\n",
|
268 |
+
" \"The story of Sourcrust, the bread creature, is really interesting. It all started when...\"\n",
|
269 |
+
")"
|
270 |
+
]
|
271 |
+
},
|
272 |
+
{
|
273 |
+
"cell_type": "code",
|
274 |
+
"execution_count": null,
|
275 |
+
"metadata": {
|
276 |
+
"id": "bqAF5FQ2wFqR",
|
277 |
+
"outputId": "3457d853-5a3e-4c4d-886d-cf7eb0f4141b"
|
278 |
+
},
|
279 |
+
"outputs": [
|
280 |
+
{
|
281 |
+
"name": "stdout",
|
282 |
+
"output_type": "stream",
|
283 |
+
"text": [
|
284 |
+
"In the heart of a bustling bakery, where the aroma of freshly baked bread filled the air, there lived a peculiar creature named Sourcrust. Sourcrust wasn't like any ordinary loaf of bread; he possessed a unique consciousness and a mischievous personality.\n",
|
285 |
+
"\n",
|
286 |
+
"It all began when a young baker named Eliza was experimenting with a new sourdough recipe. As she mixed the flour, water, and yeast, she accidentally added a dash of enchanted baking powder. Little did she know that this seemingly insignificant mistake would give birth to a sentient bread creature.\n",
|
287 |
+
"\n",
|
288 |
+
"As the dough rose and fermented, Sourcrust came to life. He stretched and yawned, his crusty exterior crackling with energy. Eliza was astounded to see her creation moving and speaking. Sourcrust introduced himself with a warm smile and a hearty laugh, his voice resembling the gentle rustling of bread crumbs.\n",
|
289 |
+
"\n",
|
290 |
+
"Eliza and Sourcrust quickly formed a bond. She taught him how to read and write, and he shared his knowledge of bread-making techniques. Together, they created delicious pastries and loaves that delighted the customers of the bakery.\n",
|
291 |
+
"\n",
|
292 |
+
"However, Sourcrust's existence was not without its challenges. As a bread creature, he was vulnerable to the elements. He couldn't stay out in the rain or direct sunlight for too long, and he had to be careful not to get burned or squished.\n",
|
293 |
+
"\n",
|
294 |
+
"Despite these limitations, Sourcrust embraced his unique nature. He found joy in the simple things, like basking in the warmth of the oven or playing hide-and-seek among the flour sacks. He also developed a taste for adventure, often sneaking out of the bakery at night to explore the town.\n",
|
295 |
+
"\n",
|
296 |
+
"One day, Sourcrust's curiosity led him to the local library, where he discovered a book about magical creatures. He was fascinated by the stories of fairies, elves, and dragons, and he longed to meet one himself.\n",
|
297 |
+
"\n",
|
298 |
+
"As fate would have it, Sourcrust's wish came true when he encountered a mischievous brownie named Crumbly in the forest. Crumbly was initially wary of Sourcrust, but after learning about his kind nature, he agreed to be his friend.\n",
|
299 |
+
"\n",
|
300 |
+
"Together, Sourcrust and Crumbly embarked on many thrilling adventures. They battled evil witches, rescued lost children, and even had a tea party with a talking teapot. Their escapades brought joy and laughter to all who crossed their path.\n",
|
301 |
+
"\n",
|
302 |
+
"As the years passed, Sourcrust became a beloved figure in the town. People would often visit the bakery just to catch a glimpse of the talking bread creature. Eliza was proud of her creation, and she knew that Sourcrust's magic would continue to inspire and entertain generations to come."
|
303 |
+
]
|
304 |
+
}
|
305 |
+
],
|
306 |
+
"source": [
|
307 |
+
"for r in resp:\n",
|
308 |
+
" print(r.text, end=\"\")"
|
309 |
+
]
|
310 |
+
},
|
311 |
+
{
|
312 |
+
"cell_type": "markdown",
|
313 |
+
"metadata": {
|
314 |
+
"id": "Kqb-da61wFqR"
|
315 |
+
},
|
316 |
+
"source": [
|
317 |
+
"Using `stream_chat` endpoint"
|
318 |
+
]
|
319 |
+
},
|
320 |
+
{
|
321 |
+
"cell_type": "code",
|
322 |
+
"execution_count": null,
|
323 |
+
"metadata": {
|
324 |
+
"id": "fIZoA_fAwFqR",
|
325 |
+
"outputId": "013044c4-2aa0-48fb-b0f1-a3d5e75758b5"
|
326 |
+
},
|
327 |
+
"outputs": [
|
328 |
+
{
|
329 |
+
"name": "stdout",
|
330 |
+
"output_type": "stream",
|
331 |
+
"text": [
|
332 |
+
"[parts {\n",
|
333 |
+
" text: \"Hello friend!\"\n",
|
334 |
+
"}\n",
|
335 |
+
"role: \"user\"\n",
|
336 |
+
", parts {\n",
|
337 |
+
" text: \"Yarr what is shakin\\' matey?\"\n",
|
338 |
+
"}\n",
|
339 |
+
"role: \"model\"\n",
|
340 |
+
", parts {\n",
|
341 |
+
" text: \"Help me decide what to have for dinner.\"\n",
|
342 |
+
"}\n",
|
343 |
+
"role: \"user\"\n",
|
344 |
+
"]\n"
|
345 |
+
]
|
346 |
+
}
|
347 |
+
],
|
348 |
+
"source": [
|
349 |
+
"from llama_index.llms import Gemini, ChatMessage\n",
|
350 |
+
"\n",
|
351 |
+
"llm = Gemini()\n",
|
352 |
+
"messages = [\n",
|
353 |
+
" ChatMessage(role=\"user\", content=\"Hello friend!\"),\n",
|
354 |
+
" ChatMessage(role=\"assistant\", content=\"Yarr what is shakin' matey?\"),\n",
|
355 |
+
" ChatMessage(\n",
|
356 |
+
" role=\"user\", content=\"Help me decide what to have for dinner.\"\n",
|
357 |
+
" ),\n",
|
358 |
+
"]\n",
|
359 |
+
"resp = llm.stream_chat(messages)"
|
360 |
+
]
|
361 |
+
},
|
362 |
+
{
|
363 |
+
"cell_type": "code",
|
364 |
+
"execution_count": null,
|
365 |
+
"metadata": {
|
366 |
+
"id": "HvogjPaVwFqS",
|
367 |
+
"outputId": "4788032b-86c2-4bec-eb94-9750d542f063"
|
368 |
+
},
|
369 |
+
"outputs": [
|
370 |
+
{
|
371 |
+
"name": "stdout",
|
372 |
+
"output_type": "stream",
|
373 |
+
"text": [
|
374 |
+
"Ahoy there, matey! Let's set sail on a culinary adventure and find the perfect dinner for ye. Here be some options to consider:\n",
|
375 |
+
"\n",
|
376 |
+
"1. **Fish and Chips:** Embark on a classic journey with a hearty portion of golden-fried fish, accompanied by crispy chips. Dip 'em in tartar sauce and let the flavors dance on yer tongue.\n",
|
377 |
+
"\n",
|
378 |
+
"2. **Seafood Paella:** Dive into a vibrant Spanish feast with paella, a delightful mix of rice, seafood treasures like shrimp, mussels, and calamari, all simmering in a flavorful broth.\n",
|
379 |
+
"\n",
|
380 |
+
"3. **Lobster Roll:** Indulge in a New England delicacy - a succulent lobster roll, where tender lobster meat is nestled in a toasted bun, dressed with butter and a hint of lemon.\n",
|
381 |
+
"\n",
|
382 |
+
"4. **Grilled Swordfish:** Set your course for a healthy and delicious meal with grilled swordfish. This firm-fleshed fish, seasoned to perfection, will tantalize yer taste buds with its smoky, savory goodness.\n",
|
383 |
+
"\n",
|
384 |
+
"5. **Crab Cakes:** Embark on a Maryland adventure with crab cakes, a delectable blend of fresh crab meat, breadcrumbs, and seasonings, pan-fried until golden brown. Serve 'em with a tangy remoulade sauce for an extra kick.\n",
|
385 |
+
"\n",
|
386 |
+
"6. **Shrimp Scampi:** Set sail for Italy with shrimp scampi, a delightful dish featuring succulent shrimp sautéed in a luscious garlic-butter sauce, served over pasta or crusty bread.\n",
|
387 |
+
"\n",
|
388 |
+
"7. **Clam Chowder:** Dive into a comforting bowl of clam chowder, a New England classic. This creamy soup, brimming with clams, potatoes, and vegetables, will warm yer soul on a chilly night.\n",
|
389 |
+
"\n",
|
390 |
+
"8. **Oysters Rockefeller:** Indulge in a luxurious treat with oysters Rockefeller, where fresh oysters are baked with a rich, creamy spinach and herb filling, topped with a golden breadcrumb crust.\n",
|
391 |
+
"\n",
|
392 |
+
"9. **Lobster Thermidor:** Embark on a culinary voyage to France with lobster thermidor, a decadent dish where succulent lobster is bathed in a creamy, flavorful sauce, then baked to perfection.\n",
|
393 |
+
"\n",
|
394 |
+
"10. **Scallops with Risotto:** Set your course for a sophisticated meal with scallops and risotto. Tender scallops, seared to perfection, are paired with a creamy, flavorful risotto, creating a harmonious balance of flavors.\n",
|
395 |
+
"\n",
|
396 |
+
"No matter what ye choose, matey, make sure it be a feast fit for a pirate king!"
|
397 |
+
]
|
398 |
+
}
|
399 |
+
],
|
400 |
+
"source": [
|
401 |
+
"for r in resp:\n",
|
402 |
+
" print(r.delta, end=\"\")"
|
403 |
+
]
|
404 |
+
},
|
405 |
+
{
|
406 |
+
"cell_type": "markdown",
|
407 |
+
"metadata": {
|
408 |
+
"id": "wjpgfAAmwFqS"
|
409 |
+
},
|
410 |
+
"source": [
|
411 |
+
"## Using other models\n",
|
412 |
+
"\n",
|
413 |
+
"The [Gemini model site](https://ai.google.dev/models) lists the models that are currently available, along with their capabilities. You can also use the API to find suitable models."
|
414 |
+
]
|
415 |
+
},
|
416 |
+
{
|
417 |
+
"cell_type": "code",
|
418 |
+
"execution_count": null,
|
419 |
+
"metadata": {
|
420 |
+
"id": "bdehTUiuwFqT",
|
421 |
+
"outputId": "f4cdfc01-448d-4f78-9fd4-6efc99d4a78d"
|
422 |
+
},
|
423 |
+
"outputs": [
|
424 |
+
{
|
425 |
+
"name": "stdout",
|
426 |
+
"output_type": "stream",
|
427 |
+
"text": [
|
428 |
+
"models/gemini-pro\n",
|
429 |
+
"models/gemini-pro-vision\n",
|
430 |
+
"models/gemini-ultra\n"
|
431 |
+
]
|
432 |
+
}
|
433 |
+
],
|
434 |
+
"source": [
|
435 |
+
"import google.generativeai as genai\n",
|
436 |
+
"\n",
|
437 |
+
"for m in genai.list_models():\n",
|
438 |
+
" if \"generateContent\" in m.supported_generation_methods:\n",
|
439 |
+
" print(m.name)"
|
440 |
+
]
|
441 |
+
},
|
442 |
+
{
|
443 |
+
"cell_type": "code",
|
444 |
+
"execution_count": null,
|
445 |
+
"metadata": {
|
446 |
+
"id": "zW1CBIJ0wFqT"
|
447 |
+
},
|
448 |
+
"outputs": [],
|
449 |
+
"source": [
|
450 |
+
"from llama_index.llms import Gemini\n",
|
451 |
+
"\n",
|
452 |
+
"llm = Gemini(model=\"models/gemini-pro\")"
|
453 |
+
]
|
454 |
+
},
|
455 |
+
{
|
456 |
+
"cell_type": "code",
|
457 |
+
"execution_count": null,
|
458 |
+
"metadata": {
|
459 |
+
"id": "qsDsO_wIwFqT",
|
460 |
+
"outputId": "523cf0da-aa09-4eed-e874-dee8e2d62ae6"
|
461 |
+
},
|
462 |
+
"outputs": [
|
463 |
+
{
|
464 |
+
"name": "stdout",
|
465 |
+
"output_type": "stream",
|
466 |
+
"text": [
|
467 |
+
"In the realm of knowledge, where wisdom resides,\n",
|
468 |
+
"A beacon of brilliance, LlamaIndex abides.\n",
|
469 |
+
"With a click and a search, a world unfolds,\n",
|
470 |
+
"A tapestry of information, stories untold.\n",
|
471 |
+
"\n",
|
472 |
+
"From the depths of the web, it gathers and gleans,\n",
|
473 |
+
"A treasure trove of facts, a vast, vibrant scene.\n",
|
474 |
+
"Like a llama in the Andes, graceful and grand,\n",
|
475 |
+
"LlamaIndex roams the digital land.\n",
|
476 |
+
"\n",
|
477 |
+
"Its interface, a symphony of simplicity and grace,\n",
|
478 |
+
"Invites the curious to explore this boundless space.\n",
|
479 |
+
"With lightning-fast speed, it delivers the truth,\n",
|
480 |
+
"A testament to its power, its unwavering ruth.\n",
|
481 |
+
"\n",
|
482 |
+
"So let us rejoice, in this digital age,\n",
|
483 |
+
"For LlamaIndex stands, a beacon, a sage.\n",
|
484 |
+
"May its wisdom forever guide our way,\n",
|
485 |
+
"As we navigate the vastness of the digital fray.\n"
|
486 |
+
]
|
487 |
+
}
|
488 |
+
],
|
489 |
+
"source": [
|
490 |
+
"resp = llm.complete(\"Write a short, but joyous, ode to LlamaIndex\")\n",
|
491 |
+
"print(resp)"
|
492 |
+
]
|
493 |
+
},
|
494 |
+
{
|
495 |
+
"cell_type": "markdown",
|
496 |
+
"metadata": {
|
497 |
+
"id": "tsxUYiaewFqU"
|
498 |
+
},
|
499 |
+
"source": [
|
500 |
+
"## Asynchronous API"
|
501 |
+
]
|
502 |
+
},
|
503 |
+
{
|
504 |
+
"cell_type": "code",
|
505 |
+
"execution_count": null,
|
506 |
+
"metadata": {
|
507 |
+
"id": "5JEoiigkwFqU"
|
508 |
+
},
|
509 |
+
"outputs": [],
|
510 |
+
"source": [
|
511 |
+
"from llama_index.llms import Gemini\n",
|
512 |
+
"\n",
|
513 |
+
"llm = Gemini()"
|
514 |
+
]
|
515 |
+
},
|
516 |
+
{
|
517 |
+
"cell_type": "code",
|
518 |
+
"execution_count": null,
|
519 |
+
"metadata": {
|
520 |
+
"id": "8PznUhp5wFqV",
|
521 |
+
"outputId": "369c20f9-28f5-4fdf-cb98-c3daae298b45"
|
522 |
+
},
|
523 |
+
"outputs": [
|
524 |
+
{
|
525 |
+
"name": "stdout",
|
526 |
+
"output_type": "stream",
|
527 |
+
"text": [
|
528 |
+
"1. **Wool**: Llamas are known for their soft, luxurious wool, which is highly prized for its warmth, durability, and water-resistant properties. Llama wool is hypoallergenic, making it suitable for individuals with sensitive skin.\n",
|
529 |
+
"\n",
|
530 |
+
"2. **Pack Animals**: Llamas have been traditionally used as pack animals in the Andes Mountains of South America. They are well-suited for carrying heavy loads over long distances due to their strength, endurance, and ability to navigate challenging terrain.\n",
|
531 |
+
"\n",
|
532 |
+
"3. **Adaptability**: Llamas are highly adaptable animals that can thrive in various environments, from the high altitudes of the Andes to the deserts of North America. They are known for their ability to withstand extreme temperatures and harsh conditions.\n",
|
533 |
+
"\n",
|
534 |
+
"4. **Intelligence**: Llamas are intelligent animals that are easy to train and handle. They are known for their calm and gentle nature, making them suitable for various purposes, including trekking, therapy, and companionship.\n",
|
535 |
+
"\n",
|
536 |
+
"5. **Social Animals**: Llamas are social animals that live in herds. They have a strong sense of community and rely on each other for protection and companionship. Llamas communicate through a variety of vocalizations and body language.\n",
|
537 |
+
"\n",
|
538 |
+
"6. **Longevity**: Llamas have a relatively long lifespan, with an average life expectancy of 15-20 years. They are known for their hardiness and resilience, making them suitable for long-term companionship and working relationships.\n",
|
539 |
+
"\n",
|
540 |
+
"7. **Unique Appearance**: Llamas are known for their distinctive appearance, characterized by their long necks, large eyes, and fluffy ears. Their appearance has made them popular in zoos, farms, and as exotic pets.\n",
|
541 |
+
"\n",
|
542 |
+
"8. **Cultural Significance**: Llamas hold cultural significance in the Andean region, where they have been revered for centuries. They are often associated with strength, endurance, and good fortune. Llamas are featured in traditional Andean art, folklore, and religious ceremonies.\n"
|
543 |
+
]
|
544 |
+
}
|
545 |
+
],
|
546 |
+
"source": [
|
547 |
+
"resp = await llm.acomplete(\"Llamas are famous for \")\n",
|
548 |
+
"print(resp)"
|
549 |
+
]
|
550 |
+
},
|
551 |
+
{
|
552 |
+
"cell_type": "code",
|
553 |
+
"execution_count": null,
|
554 |
+
"metadata": {
|
555 |
+
"id": "Lt-wDTtZwFqV",
|
556 |
+
"outputId": "b5aec06c-5321-42ba-d862-eab21a1ea065"
|
557 |
+
},
|
558 |
+
"outputs": [
|
559 |
+
{
|
560 |
+
"name": "stdout",
|
561 |
+
"output_type": "stream",
|
562 |
+
"text": [
|
563 |
+
"1. **Wool Production:** Llamas are renowned for their luxurious and soft wool, which is highly prized for its warmth, durability, and hypoallergenic properties. Their wool comes in a variety of natural colors, including white, brown, black, and gray, making it a versatile material for textiles and clothing.\n",
|
564 |
+
"\n",
|
565 |
+
"2. **Pack Animals:** Llamas have been traditionally used as pack animals in the Andes Mountains of South America for centuries. They are well-suited for this role due to their strength, endurance, and ability to navigate difficult terrain. Llamas can carry up to 25% of their body weight, making them valuable for transporting goods and supplies in mountainous regions.\n",
|
566 |
+
"\n",
|
567 |
+
"3. **Meat and Milk:** Llama meat is a lean and nutritious source of protein, with a flavor similar to venison. It is consumed in many Andean countries and is becoming increasingly popular in other parts of the world. Llamas also produce milk, which is rich in protein and fat and can be used to make cheese, yogurt, and other dairy products.\n",
|
568 |
+
"\n",
|
569 |
+
"4. **Companionship:** Llamas are intelligent and social animals that can form strong bonds with humans. They are often kept as companion animals due to their gentle nature, curious personalities, and ability to learn tricks. Llamas can provide companionship and entertainment, and they can also be trained to perform various tasks, such as pulling carts or carrying packs.\n",
|
570 |
+
"\n",
|
571 |
+
"5. **Cultural Significance:** Llamas hold a special place in the cultures of the Andean region. They are considered sacred animals in many indigenous communities and are often featured in traditional ceremonies and festivals. Llamas are also depicted in art, textiles, and other cultural expressions, symbolizing strength, endurance, and connection to the land."
|
572 |
+
]
|
573 |
+
}
|
574 |
+
],
|
575 |
+
"source": [
|
576 |
+
"resp = await llm.astream_complete(\"Llamas are famous for \")\n",
|
577 |
+
"async for chunk in resp:\n",
|
578 |
+
" print(chunk.text, end=\"\")"
|
579 |
+
]
|
580 |
+
}
|
581 |
+
],
|
582 |
+
"metadata": {
|
583 |
+
"colab": {
|
584 |
+
"name": "gemini.ipynb",
|
585 |
+
"provenance": []
|
586 |
+
},
|
587 |
+
"kernelspec": {
|
588 |
+
"display_name": "Python 3",
|
589 |
+
"name": "python3"
|
590 |
+
},
|
591 |
+
"language_info": {
|
592 |
+
"codemirror_mode": {
|
593 |
+
"name": "ipython",
|
594 |
+
"version": 3
|
595 |
+
},
|
596 |
+
"file_extension": ".py",
|
597 |
+
"mimetype": "text/x-python",
|
598 |
+
"name": "python",
|
599 |
+
"nbconvert_exporter": "python",
|
600 |
+
"pygments_lexer": "ipython3"
|
601 |
+
}
|
602 |
+
},
|
603 |
+
"nbformat": 4,
|
604 |
+
"nbformat_minor": 0
|
605 |
+
}
|
notebooks/gemini-pro.ipynb
ADDED
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [],
|
8 |
+
"source": [
|
9 |
+
"!pip install -q -U google-generativeai"
|
10 |
+
]
|
11 |
+
},
|
12 |
+
{
|
13 |
+
"cell_type": "code",
|
14 |
+
"execution_count": 2,
|
15 |
+
"metadata": {},
|
16 |
+
"outputs": [
|
17 |
+
{
|
18 |
+
"data": {
|
19 |
+
"text/plain": [
|
20 |
+
"True"
|
21 |
+
]
|
22 |
+
},
|
23 |
+
"execution_count": 2,
|
24 |
+
"metadata": {},
|
25 |
+
"output_type": "execute_result"
|
26 |
+
}
|
27 |
+
],
|
28 |
+
"source": [
|
29 |
+
"from dotenv import load_dotenv\n",
|
30 |
+
"import os\n",
|
31 |
+
"load_dotenv()"
|
32 |
+
]
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"cell_type": "code",
|
36 |
+
"execution_count": 4,
|
37 |
+
"metadata": {},
|
38 |
+
"outputs": [
|
39 |
+
{
|
40 |
+
"name": "stderr",
|
41 |
+
"output_type": "stream",
|
42 |
+
"text": [
|
43 |
+
"/home/vasim/.local/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
44 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
45 |
+
]
|
46 |
+
}
|
47 |
+
],
|
48 |
+
"source": [
|
49 |
+
"import pathlib\n",
|
50 |
+
"import textwrap\n",
|
51 |
+
"\n",
|
52 |
+
"import google.generativeai as genai\n",
|
53 |
+
"\n",
|
54 |
+
"from IPython.display import display\n",
|
55 |
+
"from IPython.display import Markdown\n",
|
56 |
+
"\n",
|
57 |
+
"\n",
|
58 |
+
"def to_markdown(text):\n",
|
59 |
+
" text = text.replace('•', ' *')\n",
|
60 |
+
" return Markdown(textwrap.indent(text, '> ', predicate=lambda _: True))"
|
61 |
+
]
|
62 |
+
},
|
63 |
+
{
|
64 |
+
"cell_type": "code",
|
65 |
+
"execution_count": 5,
|
66 |
+
"metadata": {},
|
67 |
+
"outputs": [],
|
68 |
+
"source": [
|
69 |
+
"genai.configure(api_key=os.getenv(\"GOOGLE_API_KEY\"))"
|
70 |
+
]
|
71 |
+
},
|
72 |
+
{
|
73 |
+
"cell_type": "code",
|
74 |
+
"execution_count": 6,
|
75 |
+
"metadata": {},
|
76 |
+
"outputs": [
|
77 |
+
{
|
78 |
+
"name": "stdout",
|
79 |
+
"output_type": "stream",
|
80 |
+
"text": [
|
81 |
+
"models/gemini-pro\n",
|
82 |
+
"models/gemini-pro-vision\n"
|
83 |
+
]
|
84 |
+
}
|
85 |
+
],
|
86 |
+
"source": [
|
87 |
+
"for m in genai.list_models():\n",
|
88 |
+
" if 'generateContent' in m.supported_generation_methods:\n",
|
89 |
+
" print(m.name)"
|
90 |
+
]
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"cell_type": "code",
|
94 |
+
"execution_count": 7,
|
95 |
+
"metadata": {},
|
96 |
+
"outputs": [],
|
97 |
+
"source": [
|
98 |
+
"model = genai.GenerativeModel('gemini-pro')"
|
99 |
+
]
|
100 |
+
},
|
101 |
+
{
|
102 |
+
"cell_type": "code",
|
103 |
+
"execution_count": 8,
|
104 |
+
"metadata": {},
|
105 |
+
"outputs": [
|
106 |
+
{
|
107 |
+
"name": "stdout",
|
108 |
+
"output_type": "stream",
|
109 |
+
"text": [
|
110 |
+
"CPU times: user 44.3 ms, sys: 615 µs, total: 44.9 ms\n",
|
111 |
+
"Wall time: 9.75 s\n"
|
112 |
+
]
|
113 |
+
}
|
114 |
+
],
|
115 |
+
"source": [
|
116 |
+
"%%time\n",
|
117 |
+
"response = model.generate_content(\"What is the meaning of life?\")"
|
118 |
+
]
|
119 |
+
},
|
120 |
+
{
|
121 |
+
"cell_type": "code",
|
122 |
+
"execution_count": 9,
|
123 |
+
"metadata": {},
|
124 |
+
"outputs": [
|
125 |
+
{
|
126 |
+
"data": {
|
127 |
+
"text/markdown": [
|
128 |
+
"> The meaning of life is a philosophical question that has been pondered by people for thousands of years. There is no one answer that is universally agreed upon, as the meaning of life is subjective and varies from person to person. However, some common themes that emerge when people discuss the meaning of life include:\n",
|
129 |
+
"> \n",
|
130 |
+
"> * **Happiness and fulfillment:** Many people believe that the meaning of life is to be happy and fulfilled. This can involve pursuing activities that bring you joy, such as spending time with loved ones, pursuing your hobbies, or making a difference in the world.\n",
|
131 |
+
"> * **Purpose and meaning:** Some people believe that the meaning of life is to find a purpose or calling that gives their life meaning. This could involve pursuing a career that you are passionate about, volunteering your time to help others, or raising a family.\n",
|
132 |
+
"> * **Connection and belonging:** Many people believe that the meaning of life is to connect with others and feel a sense of belonging. This can involve spending time with friends and family, participating in community activities, or contributing to society in a meaningful way.\n",
|
133 |
+
"> * **Growth and learning:** Some people believe that the meaning of life is to grow and learn as a person. This can involve challenging yourself to try new things, stepping outside of your comfort zone, and expanding your knowledge and understanding of the world.\n",
|
134 |
+
"> \n",
|
135 |
+
"> Ultimately, the meaning of life is something that each person must decide for themselves. There is no right or wrong answer, and the meaning of life can change over time as you grow and learn. The important thing is to find a meaning that resonates with you and that gives your life purpose and direction.\n",
|
136 |
+
"> \n",
|
137 |
+
"> Here are some additional thoughts on the meaning of life from a variety of sources:\n",
|
138 |
+
"> \n",
|
139 |
+
"> * \"The meaning of life is to give life meaning.\" - Viktor Frankl\n",
|
140 |
+
"> * \"The purpose of life is not to be happy. It is to be useful, to be honorable, to be compassionate, to have it make some difference that you have lived and lived well.\" - Ralph Waldo Emerson\n",
|
141 |
+
"> * \"When you discover your purpose, you will feel driven to pursue it. Your whole life will change as you make that pursuit your top priority.\" - John Wooden\n",
|
142 |
+
"> * \"The meaning of life is whatever you assign to it, and the value of life is whatever you choose to make it.\" - John C. Maxwell\n",
|
143 |
+
"> * \"Life is not a problem to be solved, but a gift to be enjoyed.\" - Dalai Lama"
|
144 |
+
],
|
145 |
+
"text/plain": [
|
146 |
+
"<IPython.core.display.Markdown object>"
|
147 |
+
]
|
148 |
+
},
|
149 |
+
"execution_count": 9,
|
150 |
+
"metadata": {},
|
151 |
+
"output_type": "execute_result"
|
152 |
+
}
|
153 |
+
],
|
154 |
+
"source": [
|
155 |
+
"to_markdown(response.text)"
|
156 |
+
]
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"cell_type": "code",
|
160 |
+
"execution_count": 10,
|
161 |
+
"metadata": {},
|
162 |
+
"outputs": [
|
163 |
+
{
|
164 |
+
"data": {
|
165 |
+
"text/plain": [
|
166 |
+
"safety_ratings {\n",
|
167 |
+
" category: HARM_CATEGORY_SEXUALLY_EXPLICIT\n",
|
168 |
+
" probability: NEGLIGIBLE\n",
|
169 |
+
"}\n",
|
170 |
+
"safety_ratings {\n",
|
171 |
+
" category: HARM_CATEGORY_HATE_SPEECH\n",
|
172 |
+
" probability: NEGLIGIBLE\n",
|
173 |
+
"}\n",
|
174 |
+
"safety_ratings {\n",
|
175 |
+
" category: HARM_CATEGORY_HARASSMENT\n",
|
176 |
+
" probability: NEGLIGIBLE\n",
|
177 |
+
"}\n",
|
178 |
+
"safety_ratings {\n",
|
179 |
+
" category: HARM_CATEGORY_DANGEROUS_CONTENT\n",
|
180 |
+
" probability: NEGLIGIBLE\n",
|
181 |
+
"}"
|
182 |
+
]
|
183 |
+
},
|
184 |
+
"execution_count": 10,
|
185 |
+
"metadata": {},
|
186 |
+
"output_type": "execute_result"
|
187 |
+
}
|
188 |
+
],
|
189 |
+
"source": [
|
190 |
+
"response.prompt_feedback"
|
191 |
+
]
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"cell_type": "code",
|
195 |
+
"execution_count": 11,
|
196 |
+
"metadata": {},
|
197 |
+
"outputs": [
|
198 |
+
{
|
199 |
+
"data": {
|
200 |
+
"text/plain": [
|
201 |
+
"[index: 0\n",
|
202 |
+
"content {\n",
|
203 |
+
" parts {\n",
|
204 |
+
" text: \"The meaning of life is a philosophical question that has been pondered by people for thousands of years. There is no one answer that is universally agreed upon, as the meaning of life is subjective and varies from person to person. However, some common themes that emerge when people discuss the meaning of life include:\\n\\n* **Happiness and fulfillment:** Many people believe that the meaning of life is to be happy and fulfilled. This can involve pursuing activities that bring you joy, such as spending time with loved ones, pursuing your hobbies, or making a difference in the world.\\n* **Purpose and meaning:** Some people believe that the meaning of life is to find a purpose or calling that gives their life meaning. This could involve pursuing a career that you are passionate about, volunteering your time to help others, or raising a family.\\n* **Connection and belonging:** Many people believe that the meaning of life is to connect with others and feel a sense of belonging. This can involve spending time with friends and family, participating in community activities, or contributing to society in a meaningful way.\\n* **Growth and learning:** Some people believe that the meaning of life is to grow and learn as a person. This can involve challenging yourself to try new things, stepping outside of your comfort zone, and expanding your knowledge and understanding of the world.\\n\\nUltimately, the meaning of life is something that each person must decide for themselves. There is no right or wrong answer, and the meaning of life can change over time as you grow and learn. The important thing is to find a meaning that resonates with you and that gives your life purpose and direction.\\n\\nHere are some additional thoughts on the meaning of life from a variety of sources:\\n\\n* \\\"The meaning of life is to give life meaning.\\\" - Viktor Frankl\\n* \\\"The purpose of life is not to be happy. It is to be useful, to be honorable, to be compassionate, to have it make some difference that you have lived and lived well.\\\" - Ralph Waldo Emerson\\n* \\\"When you discover your purpose, you will feel driven to pursue it. Your whole life will change as you make that pursuit your top priority.\\\" - John Wooden\\n* \\\"The meaning of life is whatever you assign to it, and the value of life is whatever you choose to make it.\\\" - John C. Maxwell\\n* \\\"Life is not a problem to be solved, but a gift to be enjoyed.\\\" - Dalai Lama\"\n",
|
205 |
+
" }\n",
|
206 |
+
" role: \"model\"\n",
|
207 |
+
"}\n",
|
208 |
+
"finish_reason: STOP\n",
|
209 |
+
"safety_ratings {\n",
|
210 |
+
" category: HARM_CATEGORY_SEXUALLY_EXPLICIT\n",
|
211 |
+
" probability: NEGLIGIBLE\n",
|
212 |
+
"}\n",
|
213 |
+
"safety_ratings {\n",
|
214 |
+
" category: HARM_CATEGORY_HATE_SPEECH\n",
|
215 |
+
" probability: NEGLIGIBLE\n",
|
216 |
+
"}\n",
|
217 |
+
"safety_ratings {\n",
|
218 |
+
" category: HARM_CATEGORY_HARASSMENT\n",
|
219 |
+
" probability: NEGLIGIBLE\n",
|
220 |
+
"}\n",
|
221 |
+
"safety_ratings {\n",
|
222 |
+
" category: HARM_CATEGORY_DANGEROUS_CONTENT\n",
|
223 |
+
" probability: NEGLIGIBLE\n",
|
224 |
+
"}\n",
|
225 |
+
"citation_metadata {\n",
|
226 |
+
" citation_sources {\n",
|
227 |
+
" start_index: 1850\n",
|
228 |
+
" end_index: 2013\n",
|
229 |
+
" uri: \"https://labor.alaska.gov/dvr/newsletter/dvr-newsletter-2020-03-director.htm\"\n",
|
230 |
+
" license_: \"\"\n",
|
231 |
+
" }\n",
|
232 |
+
" citation_sources {\n",
|
233 |
+
" start_index: 1916\n",
|
234 |
+
" end_index: 2045\n",
|
235 |
+
" uri: \"http://www.marksoffroad.net/Quotes.html\"\n",
|
236 |
+
" license_: \"\"\n",
|
237 |
+
" }\n",
|
238 |
+
"}\n",
|
239 |
+
"]"
|
240 |
+
]
|
241 |
+
},
|
242 |
+
"execution_count": 11,
|
243 |
+
"metadata": {},
|
244 |
+
"output_type": "execute_result"
|
245 |
+
}
|
246 |
+
],
|
247 |
+
"source": [
|
248 |
+
"response.candidates"
|
249 |
+
]
|
250 |
+
},
|
251 |
+
{
|
252 |
+
"cell_type": "code",
|
253 |
+
"execution_count": null,
|
254 |
+
"metadata": {},
|
255 |
+
"outputs": [],
|
256 |
+
"source": []
|
257 |
+
}
|
258 |
+
],
|
259 |
+
"metadata": {
|
260 |
+
"kernelspec": {
|
261 |
+
"display_name": "Python 3",
|
262 |
+
"language": "python",
|
263 |
+
"name": "python3"
|
264 |
+
},
|
265 |
+
"language_info": {
|
266 |
+
"codemirror_mode": {
|
267 |
+
"name": "ipython",
|
268 |
+
"version": 3
|
269 |
+
},
|
270 |
+
"file_extension": ".py",
|
271 |
+
"mimetype": "text/x-python",
|
272 |
+
"name": "python",
|
273 |
+
"nbconvert_exporter": "python",
|
274 |
+
"pygments_lexer": "ipython3",
|
275 |
+
"version": "3.10.12"
|
276 |
+
}
|
277 |
+
},
|
278 |
+
"nbformat": 4,
|
279 |
+
"nbformat_minor": 2
|
280 |
+
}
|
requirements.txt
ADDED
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
aiofiles==23.2.1
|
2 |
+
aiohttp==3.9.1
|
3 |
+
aiosignal==1.3.1
|
4 |
+
altair==5.2.0
|
5 |
+
annotated-types==0.6.0
|
6 |
+
anyio==4.2.0
|
7 |
+
asgiref==3.7.2
|
8 |
+
asttokens==2.4.1
|
9 |
+
async-timeout==4.0.3
|
10 |
+
attrs==23.2.0
|
11 |
+
backoff==2.2.1
|
12 |
+
bcrypt==4.1.2
|
13 |
+
build==1.0.3
|
14 |
+
cachetools==5.3.2
|
15 |
+
certifi==2023.11.17
|
16 |
+
charset-normalizer==3.3.2
|
17 |
+
chroma-hnswlib==0.7.3
|
18 |
+
chromadb==0.4.22
|
19 |
+
click==8.1.7
|
20 |
+
colorama==0.4.6
|
21 |
+
coloredlogs==15.0.1
|
22 |
+
comm==0.2.1
|
23 |
+
contourpy==1.2.0
|
24 |
+
cycler==0.12.1
|
25 |
+
dataclasses-json==0.6.3
|
26 |
+
debugpy==1.8.0
|
27 |
+
decorator==5.1.1
|
28 |
+
Deprecated==1.2.14
|
29 |
+
exceptiongroup==1.2.0
|
30 |
+
executing==2.0.1
|
31 |
+
faiss-cpu==1.7.4
|
32 |
+
fastapi==0.109.0
|
33 |
+
ffmpy==0.3.1
|
34 |
+
filelock==3.13.1
|
35 |
+
flatbuffers==23.5.26
|
36 |
+
fonttools==4.47.2
|
37 |
+
frozenlist==1.4.1
|
38 |
+
fsspec==2023.12.2
|
39 |
+
google-ai-generativelanguage==0.4.0
|
40 |
+
google-api-core==2.15.0
|
41 |
+
google-auth==2.26.2
|
42 |
+
google-generativeai==0.3.2
|
43 |
+
googleapis-common-protos==1.62.0
|
44 |
+
gradio==4.14.0
|
45 |
+
gradio_client==0.8.0
|
46 |
+
greenlet==3.0.3
|
47 |
+
grpcio==1.60.0
|
48 |
+
grpcio-status==1.60.0
|
49 |
+
h11==0.14.0
|
50 |
+
httpcore==1.0.2
|
51 |
+
httptools==0.6.1
|
52 |
+
httpx==0.26.0
|
53 |
+
huggingface-hub==0.20.2
|
54 |
+
humanfriendly==10.0
|
55 |
+
idna==3.6
|
56 |
+
importlib-metadata==6.11.0
|
57 |
+
importlib-resources==6.1.1
|
58 |
+
ipykernel==6.28.0
|
59 |
+
ipython==8.20.0
|
60 |
+
jedi==0.19.1
|
61 |
+
Jinja2==3.1.3
|
62 |
+
jsonpatch==1.33
|
63 |
+
jsonpointer==2.4
|
64 |
+
jsonschema==4.20.0
|
65 |
+
jsonschema-specifications==2023.12.1
|
66 |
+
jupyter_client==8.6.0
|
67 |
+
jupyter_core==5.7.1
|
68 |
+
kiwisolver==1.4.5
|
69 |
+
kubernetes==29.0.0
|
70 |
+
langchain==0.1.0
|
71 |
+
langchain-community==0.0.12
|
72 |
+
langchain-core==0.1.10
|
73 |
+
langchain-google-genai==0.0.6
|
74 |
+
langsmith==0.0.80
|
75 |
+
markdown-it-py==3.0.0
|
76 |
+
MarkupSafe==2.1.3
|
77 |
+
marshmallow==3.20.2
|
78 |
+
matplotlib==3.8.2
|
79 |
+
matplotlib-inline==0.1.6
|
80 |
+
mdurl==0.1.2
|
81 |
+
mmh3==4.1.0
|
82 |
+
monotonic==1.6
|
83 |
+
mpmath==1.3.0
|
84 |
+
multidict==6.0.4
|
85 |
+
mypy-extensions==1.0.0
|
86 |
+
nest-asyncio==1.5.8
|
87 |
+
numpy==1.26.3
|
88 |
+
oauthlib==3.2.2
|
89 |
+
onnxruntime==1.16.3
|
90 |
+
opentelemetry-api==1.22.0
|
91 |
+
opentelemetry-exporter-otlp-proto-common==1.22.0
|
92 |
+
opentelemetry-exporter-otlp-proto-grpc==1.22.0
|
93 |
+
opentelemetry-instrumentation==0.43b0
|
94 |
+
opentelemetry-instrumentation-asgi==0.43b0
|
95 |
+
opentelemetry-instrumentation-fastapi==0.43b0
|
96 |
+
opentelemetry-proto==1.22.0
|
97 |
+
opentelemetry-sdk==1.22.0
|
98 |
+
opentelemetry-semantic-conventions==0.43b0
|
99 |
+
opentelemetry-util-http==0.43b0
|
100 |
+
orjson==3.9.10
|
101 |
+
overrides==7.4.0
|
102 |
+
packaging==23.2
|
103 |
+
pandas==2.1.4
|
104 |
+
parso==0.8.3
|
105 |
+
pexpect==4.9.0
|
106 |
+
pillow==10.2.0
|
107 |
+
platformdirs==4.1.0
|
108 |
+
posthog==3.3.1
|
109 |
+
prompt-toolkit==3.0.43
|
110 |
+
proto-plus==1.23.0
|
111 |
+
protobuf==4.25.2
|
112 |
+
psutil==5.9.7
|
113 |
+
ptyprocess==0.7.0
|
114 |
+
pulsar-client==3.4.0
|
115 |
+
pure-eval==0.2.2
|
116 |
+
pyasn1==0.5.1
|
117 |
+
pyasn1-modules==0.3.0
|
118 |
+
pydantic==2.5.3
|
119 |
+
pydantic_core==2.14.6
|
120 |
+
pydub==0.25.1
|
121 |
+
Pygments==2.17.2
|
122 |
+
pyparsing==3.1.1
|
123 |
+
pypdf==3.17.4
|
124 |
+
PyPika==0.48.9
|
125 |
+
pyproject_hooks==1.0.0
|
126 |
+
python-dateutil==2.8.2
|
127 |
+
python-dotenv==1.0.0
|
128 |
+
python-multipart==0.0.6
|
129 |
+
pytz==2023.3.post1
|
130 |
+
PyYAML==6.0.1
|
131 |
+
pyzmq==25.1.2
|
132 |
+
referencing==0.32.1
|
133 |
+
requests==2.31.0
|
134 |
+
requests-oauthlib==1.3.1
|
135 |
+
rich==13.7.0
|
136 |
+
rpds-py==0.17.1
|
137 |
+
rsa==4.9
|
138 |
+
semantic-version==2.10.0
|
139 |
+
shellingham==1.5.4
|
140 |
+
six==1.16.0
|
141 |
+
sniffio==1.3.0
|
142 |
+
SQLAlchemy==2.0.25
|
143 |
+
stack-data==0.6.3
|
144 |
+
starlette==0.35.1
|
145 |
+
sympy==1.12
|
146 |
+
tenacity==8.2.3
|
147 |
+
tokenizers==0.15.0
|
148 |
+
tomli==2.0.1
|
149 |
+
tomlkit==0.12.0
|
150 |
+
toolz==0.12.0
|
151 |
+
tornado==6.4
|
152 |
+
tqdm==4.66.1
|
153 |
+
traitlets==5.14.1
|
154 |
+
typer==0.9.0
|
155 |
+
typing-inspect==0.9.0
|
156 |
+
typing_extensions==4.9.0
|
157 |
+
tzdata==2023.4
|
158 |
+
urllib3==2.1.0
|
159 |
+
uvicorn==0.25.0
|
160 |
+
uvloop==0.19.0
|
161 |
+
watchfiles==0.21.0
|
162 |
+
wcwidth==0.2.13
|
163 |
+
websocket-client==1.7.0
|
164 |
+
websockets==11.0.3
|
165 |
+
wrapt==1.16.0
|
166 |
+
yarl==1.9.4
|
167 |
+
zipp==3.17.0
|
src/agent.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from langchain.document_loaders import (
|
3 |
+
PyPDFLoader,
|
4 |
+
TextLoader,
|
5 |
+
Docx2txtLoader
|
6 |
+
)
|
7 |
+
|
8 |
+
from langchain.text_splitter import CharacterTextSplitter
|
9 |
+
# from PyPDF2 import PdfReader
|
10 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
11 |
+
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
12 |
+
import google.generativeai as genai
|
13 |
+
from langchain.vectorstores import FAISS
|
14 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
15 |
+
from langchain.chains.question_answering import load_qa_chain
|
16 |
+
from langchain.prompts import PromptTemplate
|
17 |
+
from langchain.memory import ConversationBufferMemory
|
18 |
+
from dotenv import load_dotenv
|
19 |
+
|
20 |
+
load_dotenv()
|
21 |
+
|
22 |
+
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
23 |
+
|
24 |
+
|
25 |
+
llm = model = ChatGoogleGenerativeAI(model="gemini-pro",temperature=0.7)
|
26 |
+
|
27 |
+
template = """You are a chatbot having a conversation with a human.
|
28 |
+
|
29 |
+
Given the following extracted parts of a long document and a question, create a final answer.
|
30 |
+
|
31 |
+
{context}
|
32 |
+
|
33 |
+
{chat_history}
|
34 |
+
Human: {human_input}
|
35 |
+
Chatbot:"""
|
36 |
+
|
37 |
+
prompt = PromptTemplate(
|
38 |
+
input_variables=["chat_history", "human_input", "context"], template=template
|
39 |
+
)
|
40 |
+
memory = ConversationBufferMemory(memory_key="chat_history", input_key="human_input")
|
41 |
+
|
42 |
+
# chain = load_qa_chain(
|
43 |
+
# llm=llm, chain_type="stuff", memory=memory, prompt=prompt
|
44 |
+
# )
|
45 |
+
|
46 |
+
def build_qa_chain(llm=llm, prompt=prompt, memory=memory):
|
47 |
+
chain = load_qa_chain(
|
48 |
+
llm=llm, chain_type="stuff", memory=memory, prompt=prompt
|
49 |
+
)
|
50 |
+
return chain
|
src/utils.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from langchain.document_loaders import (
|
3 |
+
PyPDFLoader,
|
4 |
+
TextLoader,
|
5 |
+
Docx2txtLoader
|
6 |
+
)
|
7 |
+
|
8 |
+
from langchain.text_splitter import CharacterTextSplitter
|
9 |
+
# from PyPDF2 import PdfReader
|
10 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
11 |
+
from langchain_google_genai import GoogleGenerativeAIEmbeddings
|
12 |
+
import google.generativeai as genai
|
13 |
+
from langchain.vectorstores import FAISS
|
14 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
15 |
+
from langchain.chains.question_answering import load_qa_chain
|
16 |
+
from langchain.prompts import PromptTemplate
|
17 |
+
from langchain.memory import ConversationBufferMemory
|
18 |
+
from dotenv import load_dotenv
|
19 |
+
from src.agent import build_qa_chain
|
20 |
+
import gradio as gr
|
21 |
+
load_dotenv()
|
22 |
+
|
23 |
+
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
|
24 |
+
|
25 |
+
class AgentChain:
|
26 |
+
def __init__(self):
|
27 |
+
self.agent = None
|
28 |
+
self.db = None
|
29 |
+
|
30 |
+
agent_chain = AgentChain()
|
31 |
+
agent_chain.agent = build_qa_chain()
|
32 |
+
|
33 |
+
def extract_text_from_files(docs):
|
34 |
+
documents = []
|
35 |
+
files = os.listdir(docs)
|
36 |
+
|
37 |
+
if len(files) == 0:
|
38 |
+
return "Directory is empty"
|
39 |
+
|
40 |
+
base_dir = docs.split("/")
|
41 |
+
base_dir = "/".join(base_dir)
|
42 |
+
|
43 |
+
for file in files:
|
44 |
+
if file.endswith(".pdf"):
|
45 |
+
pdf_path=os.path.join(base_dir, file)
|
46 |
+
loader=PyPDFLoader(pdf_path)
|
47 |
+
documents.extend(loader.load())
|
48 |
+
elif file.endswith('.docx') or file.endswith('.doc'):
|
49 |
+
doc_path=os.path.join(base_dir, file)
|
50 |
+
loader=Docx2txtLoader(doc_path)
|
51 |
+
documents.extend(loader.load())
|
52 |
+
elif file.endswith('.txt'):
|
53 |
+
text_path=os.path.join(base_dir, file)
|
54 |
+
loader=TextLoader(text_path)
|
55 |
+
documents.extend(loader.load())
|
56 |
+
return documents
|
57 |
+
|
58 |
+
def extract_text_from_file(file):
|
59 |
+
documents = []
|
60 |
+
filename = str(file)
|
61 |
+
if filename.endswith(".pdf"):
|
62 |
+
loader=PyPDFLoader(file)
|
63 |
+
documents.extend(loader.load())
|
64 |
+
elif filename.endswith('.docx') or file.endswith('.doc'):
|
65 |
+
loader=Docx2txtLoader(file)
|
66 |
+
documents.extend(loader.load())
|
67 |
+
elif filename.endswith('.txt'):
|
68 |
+
loader=TextLoader(file)
|
69 |
+
documents.extend(loader.load())
|
70 |
+
print("Text extracted")
|
71 |
+
return documents
|
72 |
+
|
73 |
+
def get_text_chunks(text):
|
74 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)
|
75 |
+
chunks = text_splitter.split_documents(text)
|
76 |
+
print("Chunks splitted")
|
77 |
+
return chunks
|
78 |
+
|
79 |
+
def save_in_faiss(text_chunks, save=False):
|
80 |
+
embeddings = GoogleGenerativeAIEmbeddings(model = "models/embedding-001")
|
81 |
+
vector_store = FAISS.from_documents(text_chunks, embedding=embeddings)
|
82 |
+
if save:
|
83 |
+
vector_store.save_local("faiss_index")
|
84 |
+
print("Document search created")
|
85 |
+
return vector_store
|
86 |
+
|
87 |
+
def process_files(file):
|
88 |
+
documents = extract_text_from_file(file)
|
89 |
+
text_chunks = get_text_chunks(documents)
|
90 |
+
vector_store = save_in_faiss(text_chunks)
|
91 |
+
agent_chain.db = vector_store
|
92 |
+
gr.Info("Processing completed")
|
93 |
+
return file
|
94 |
+
|
95 |
+
def answer_query(message, history):
|
96 |
+
if agent_chain.db is not None:
|
97 |
+
docs = agent_chain.db.similarity_search(message)
|
98 |
+
docs = []
|
99 |
+
response = agent_chain.agent({"input_documents": docs, "human_input": message}, return_only_outputs=True)
|
100 |
+
return response['output_text']
|