File size: 67,682 Bytes
e39ab22
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
{
  "cells": [
    {
      "cell_type": "code",
      "execution_count": 1,
      "metadata": {
        "id": "2N8psBL6-wfJ"
      },
      "outputs": [],
      "source": [
        "# !cp -rf /content/drive/MyDrive/ML\\ Projects/Gemini-Pro/docs /content/"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 1,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "9AkjB4x3ybTb",
        "outputId": "7b4fa13a-cb14-4e59-f5b7-c426a3fbea35"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Collecting langchain\n",
            "  Downloading langchain-0.1.0-py3-none-any.whl (797 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m798.0/798.0 KB\u001b[0m \u001b[31m604.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting tenacity<9.0.0,>=8.1.0\n",
            "  Using cached tenacity-8.2.3-py3-none-any.whl (24 kB)\n",
            "Collecting dataclasses-json<0.7,>=0.5.7\n",
            "  Downloading dataclasses_json-0.6.3-py3-none-any.whl (28 kB)\n",
            "Collecting SQLAlchemy<3,>=1.4\n",
            "  Downloading SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting requests<3,>=2\n",
            "  Using cached requests-2.31.0-py3-none-any.whl (62 kB)\n",
            "Collecting numpy<2,>=1\n",
            "  Downloading numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (18.2 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m18.2/18.2 MB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting langchain-core<0.2,>=0.1.7\n",
            "  Using cached langchain_core-0.1.10-py3-none-any.whl (216 kB)\n",
            "Collecting langchain-community<0.1,>=0.0.9\n",
            "  Downloading langchain_community-0.0.12-py3-none-any.whl (1.6 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m1.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0meta \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting langsmith<0.1.0,>=0.0.77\n",
            "  Downloading langsmith-0.0.80-py3-none-any.whl (48 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m48.3/48.3 KB\u001b[0m \u001b[31m594.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting PyYAML>=5.3\n",
            "  Using cached PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (705 kB)\n",
            "Collecting aiohttp<4.0.0,>=3.8.3\n",
            "  Downloading aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m524.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:04\u001b[0m\n",
            "\u001b[?25hCollecting async-timeout<5.0.0,>=4.0.0\n",
            "  Using cached async_timeout-4.0.3-py3-none-any.whl (5.7 kB)\n",
            "Collecting pydantic<3,>=1\n",
            "  Using cached pydantic-2.5.3-py3-none-any.whl (381 kB)\n",
            "Collecting jsonpatch<2.0,>=1.33\n",
            "  Using cached jsonpatch-1.33-py2.py3-none-any.whl (12 kB)\n",
            "Collecting aiosignal>=1.1.2\n",
            "  Using cached aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n",
            "Collecting multidict<7.0,>=4.5\n",
            "  Using cached multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (114 kB)\n",
            "Collecting yarl<2.0,>=1.0\n",
            "  Downloading yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (301 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m301.6/301.6 KB\u001b[0m \u001b[31m867.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting frozenlist>=1.1.1\n",
            "  Downloading frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (239 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m239.5/239.5 KB\u001b[0m \u001b[31m1.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting attrs>=17.3.0\n",
            "  Downloading attrs-23.2.0-py3-none-any.whl (60 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m60.8/60.8 KB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hCollecting marshmallow<4.0.0,>=3.18.0\n",
            "  Downloading marshmallow-3.20.2-py3-none-any.whl (49 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.4/49.4 KB\u001b[0m \u001b[31m17.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hCollecting typing-inspect<1,>=0.4.0\n",
            "  Using cached typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n",
            "Collecting jsonpointer>=1.9\n",
            "  Using cached jsonpointer-2.4-py2.py3-none-any.whl (7.8 kB)\n",
            "Requirement already satisfied: packaging<24.0,>=23.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1.7->langchain) (23.2)\n",
            "Collecting anyio<5,>=3\n",
            "  Using cached anyio-4.2.0-py3-none-any.whl (85 kB)\n",
            "Collecting annotated-types>=0.4.0\n",
            "  Using cached annotated_types-0.6.0-py3-none-any.whl (12 kB)\n",
            "Collecting typing-extensions>=4.6.1\n",
            "  Using cached typing_extensions-4.9.0-py3-none-any.whl (32 kB)\n",
            "\u001b[33mWARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ProtocolError('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))': /simple/pydantic-core/\u001b[0m\u001b[33m\n",
            "\u001b[0mCollecting pydantic-core==2.14.6\n",
            "  Using cached pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.1 MB)\n",
            "Collecting certifi>=2017.4.17\n",
            "  Using cached certifi-2023.11.17-py3-none-any.whl (162 kB)\n",
            "Collecting urllib3<3,>=1.21.1\n",
            "  Using cached urllib3-2.1.0-py3-none-any.whl (104 kB)\n",
            "Collecting idna<4,>=2.5\n",
            "  Using cached idna-3.6-py3-none-any.whl (61 kB)\n",
            "Collecting charset-normalizer<4,>=2\n",
            "  Using cached charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (142 kB)\n",
            "Collecting greenlet!=0.4.17\n",
            "  Downloading greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl (616 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m616.0/616.0 KB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting sniffio>=1.1\n",
            "  Using cached sniffio-1.3.0-py3-none-any.whl (10 kB)\n",
            "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1.7->langchain) (1.2.0)\n",
            "Collecting mypy-extensions>=0.3.0\n",
            "  Using cached mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n",
            "Installing collected packages: urllib3, typing-extensions, tenacity, sniffio, PyYAML, numpy, mypy-extensions, multidict, marshmallow, jsonpointer, idna, greenlet, frozenlist, charset-normalizer, certifi, attrs, async-timeout, annotated-types, yarl, typing-inspect, SQLAlchemy, requests, pydantic-core, jsonpatch, anyio, aiosignal, pydantic, dataclasses-json, aiohttp, langsmith, langchain-core, langchain-community, langchain\n",
            "Successfully installed PyYAML-6.0.1 SQLAlchemy-2.0.25 aiohttp-3.9.1 aiosignal-1.3.1 annotated-types-0.6.0 anyio-4.2.0 async-timeout-4.0.3 attrs-23.2.0 certifi-2023.11.17 charset-normalizer-3.3.2 dataclasses-json-0.6.3 frozenlist-1.4.1 greenlet-3.0.3 idna-3.6 jsonpatch-1.33 jsonpointer-2.4 langchain-0.1.0 langchain-community-0.0.12 langchain-core-0.1.10 langsmith-0.0.80 marshmallow-3.20.2 multidict-6.0.4 mypy-extensions-1.0.0 numpy-1.26.3 pydantic-2.5.3 pydantic-core-2.14.6 requests-2.31.0 sniffio-1.3.0 tenacity-8.2.3 typing-extensions-4.9.0 typing-inspect-0.9.0 urllib3-2.1.0 yarl-1.9.4\n",
            "Collecting pypdf\n",
            "  Downloading pypdf-3.17.4-py3-none-any.whl (278 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m278.2/278.2 KB\u001b[0m \u001b[31m898.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hInstalling collected packages: pypdf\n",
            "Successfully installed pypdf-3.17.4\n",
            "Collecting langchain_google_genai\n",
            "  Using cached langchain_google_genai-0.0.6-py3-none-any.whl (15 kB)\n",
            "Collecting google-generativeai<0.4.0,>=0.3.1\n",
            "  Using cached google_generativeai-0.3.2-py3-none-any.whl (146 kB)\n",
            "Requirement already satisfied: langchain-core<0.2,>=0.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain_google_genai) (0.1.10)\n",
            "Collecting google-api-core\n",
            "  Using cached google_api_core-2.15.0-py3-none-any.whl (121 kB)\n",
            "Collecting google-ai-generativelanguage==0.4.0\n",
            "  Using cached google_ai_generativelanguage-0.4.0-py3-none-any.whl (598 kB)\n",
            "Collecting protobuf\n",
            "  Downloading protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl (294 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m294.6/294.6 KB\u001b[0m \u001b[31m891.6 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting google-auth\n",
            "  Downloading google_auth-2.26.2-py2.py3-none-any.whl (186 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m186.5/186.5 KB\u001b[0m \u001b[31m1.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hRequirement already satisfied: typing-extensions in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai<0.4.0,>=0.3.1->langchain_google_genai) (4.9.0)\n",
            "Collecting tqdm\n",
            "  Using cached tqdm-4.66.1-py3-none-any.whl (78 kB)\n",
            "Collecting proto-plus<2.0.0dev,>=1.22.3\n",
            "  Using cached proto_plus-1.23.0-py3-none-any.whl (48 kB)\n",
            "Requirement already satisfied: requests<3,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (2.31.0)\n",
            "Requirement already satisfied: langsmith<0.1.0,>=0.0.63 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (0.0.80)\n",
            "Requirement already satisfied: pydantic<3,>=1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (2.5.3)\n",
            "Requirement already satisfied: PyYAML>=5.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (6.0.1)\n",
            "Requirement already satisfied: anyio<5,>=3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (4.2.0)\n",
            "Requirement already satisfied: packaging<24.0,>=23.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (23.2)\n",
            "Requirement already satisfied: tenacity<9.0.0,>=8.1.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (8.2.3)\n",
            "Requirement already satisfied: jsonpatch<2.0,>=1.33 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1->langchain_google_genai) (1.33)\n",
            "Requirement already satisfied: sniffio>=1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (1.3.0)\n",
            "Requirement already satisfied: idna>=2.8 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (3.6)\n",
            "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1->langchain_google_genai) (1.2.0)\n",
            "Requirement already satisfied: jsonpointer>=1.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.2,>=0.1->langchain_google_genai) (2.4)\n",
            "Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic<3,>=1->langchain-core<0.2,>=0.1->langchain_google_genai) (2.14.6)\n",
            "Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic<3,>=1->langchain-core<0.2,>=0.1->langchain_google_genai) (0.6.0)\n",
            "Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (3.3.2)\n",
            "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (2.1.0)\n",
            "Requirement already satisfied: certifi>=2017.4.17 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3,>=2->langchain-core<0.2,>=0.1->langchain_google_genai) (2023.11.17)\n",
            "Collecting googleapis-common-protos<2.0.dev0,>=1.56.2\n",
            "  Downloading googleapis_common_protos-1.62.0-py2.py3-none-any.whl (228 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m228.7/228.7 KB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting cachetools<6.0,>=2.0.0\n",
            "  Using cached cachetools-5.3.2-py3-none-any.whl (9.3 kB)\n",
            "Collecting pyasn1-modules>=0.2.1\n",
            "  Using cached pyasn1_modules-0.3.0-py2.py3-none-any.whl (181 kB)\n",
            "Collecting rsa<5,>=3.1.4\n",
            "  Using cached rsa-4.9-py3-none-any.whl (34 kB)\n",
            "Collecting grpcio<2.0dev,>=1.33.2\n",
            "  Using cached grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (5.4 MB)\n",
            "Collecting grpcio-status<2.0.dev0,>=1.33.2\n",
            "  Using cached grpcio_status-1.60.0-py3-none-any.whl (14 kB)\n",
            "Collecting pyasn1<0.6.0,>=0.4.6\n",
            "  Downloading pyasn1-0.5.1-py2.py3-none-any.whl (84 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m84.9/84.9 KB\u001b[0m \u001b[31m974.7 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hInstalling collected packages: tqdm, pyasn1, protobuf, grpcio, cachetools, rsa, pyasn1-modules, proto-plus, googleapis-common-protos, grpcio-status, google-auth, google-api-core, google-ai-generativelanguage, google-generativeai, langchain_google_genai\n",
            "Successfully installed cachetools-5.3.2 google-ai-generativelanguage-0.4.0 google-api-core-2.15.0 google-auth-2.26.2 google-generativeai-0.3.2 googleapis-common-protos-1.62.0 grpcio-1.60.0 grpcio-status-1.60.0 langchain_google_genai-0.0.6 proto-plus-1.23.0 protobuf-4.25.2 pyasn1-0.5.1 pyasn1-modules-0.3.0 rsa-4.9 tqdm-4.66.1\n",
            "Requirement already satisfied: google-generativeai in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (0.3.2)\n",
            "Requirement already satisfied: google-auth in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (2.26.2)\n",
            "Requirement already satisfied: google-api-core in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (2.15.0)\n",
            "Requirement already satisfied: protobuf in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.25.2)\n",
            "Requirement already satisfied: google-ai-generativelanguage==0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (0.4.0)\n",
            "Requirement already satisfied: tqdm in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.66.1)\n",
            "Requirement already satisfied: typing-extensions in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-generativeai) (4.9.0)\n",
            "Requirement already satisfied: proto-plus<2.0.0dev,>=1.22.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-ai-generativelanguage==0.4.0->google-generativeai) (1.23.0)\n",
            "Requirement already satisfied: requests<3.0.0.dev0,>=2.18.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (2.31.0)\n",
            "Requirement already satisfied: googleapis-common-protos<2.0.dev0,>=1.56.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.62.0)\n",
            "Requirement already satisfied: cachetools<6.0,>=2.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (5.3.2)\n",
            "Requirement already satisfied: pyasn1-modules>=0.2.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (0.3.0)\n",
            "Requirement already satisfied: rsa<5,>=3.1.4 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth->google-generativeai) (4.9)\n",
            "Requirement already satisfied: grpcio<2.0dev,>=1.33.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.60.0)\n",
            "Requirement already satisfied: grpcio-status<2.0.dev0,>=1.33.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-api-core->google-generativeai) (1.60.0)\n",
            "Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pyasn1-modules>=0.2.1->google-auth->google-generativeai) (0.5.1)\n",
            "Requirement already satisfied: certifi>=2017.4.17 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (2023.11.17)\n",
            "Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (3.3.2)\n",
            "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (2.1.0)\n",
            "Requirement already satisfied: idna<4,>=2.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests<3.0.0.dev0,>=2.18.0->google-api-core->google-generativeai) (3.6)\n",
            "Collecting chromadb\n",
            "  Downloading chromadb-0.4.22-py3-none-any.whl (509 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m509.0/509.0 KB\u001b[0m \u001b[31m945.6 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting overrides>=7.3.1\n",
            "  Using cached overrides-7.4.0-py3-none-any.whl (17 kB)\n",
            "Collecting opentelemetry-api>=1.2.0\n",
            "  Downloading opentelemetry_api-1.22.0-py3-none-any.whl (57 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.9/57.9 KB\u001b[0m \u001b[31m861.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m:--:--\u001b[0m\n",
            "\u001b[?25hRequirement already satisfied: typing-extensions>=4.5.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (4.9.0)\n",
            "Collecting pulsar-client>=3.1.0\n",
            "  Downloading pulsar_client-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (5.4 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.4/5.4 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting mmh3>=4.0.1\n",
            "  Downloading mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (67 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m67.6/67.6 KB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hCollecting onnxruntime>=1.14.1\n",
            "  Downloading onnxruntime-1.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (6.4 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.4/6.4 MB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting typer>=0.9.0\n",
            "  Using cached typer-0.9.0-py3-none-any.whl (45 kB)\n",
            "Requirement already satisfied: grpcio>=1.58.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (1.60.0)\n",
            "Requirement already satisfied: numpy>=1.22.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (1.26.3)\n",
            "Collecting opentelemetry-instrumentation-fastapi>=0.41b0\n",
            "  Downloading opentelemetry_instrumentation_fastapi-0.43b0-py3-none-any.whl (11 kB)\n",
            "Requirement already satisfied: requests>=2.28 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (2.31.0)\n",
            "Collecting pypika>=0.48.9\n",
            "  Using cached PyPika-0.48.9-py2.py3-none-any.whl\n",
            "Requirement already satisfied: pydantic>=1.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (2.5.3)\n",
            "Collecting opentelemetry-exporter-otlp-proto-grpc>=1.2.0\n",
            "  Downloading opentelemetry_exporter_otlp_proto_grpc-1.22.0-py3-none-any.whl (18 kB)\n",
            "Collecting opentelemetry-sdk>=1.2.0\n",
            "  Downloading opentelemetry_sdk-1.22.0-py3-none-any.whl (105 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m105.6/105.6 KB\u001b[0m \u001b[31m6.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hCollecting kubernetes>=28.1.0\n",
            "  Downloading kubernetes-29.0.0-py2.py3-none-any.whl (1.6 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting uvicorn[standard]>=0.18.3\n",
            "  Using cached uvicorn-0.25.0-py3-none-any.whl (60 kB)\n",
            "Collecting posthog>=2.4.0\n",
            "  Downloading posthog-3.3.1-py2.py3-none-any.whl (40 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m40.6/40.6 KB\u001b[0m \u001b[31m455.0 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hRequirement already satisfied: tenacity>=8.2.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (8.2.3)\n",
            "Requirement already satisfied: PyYAML>=6.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (6.0.1)\n",
            "Requirement already satisfied: tqdm>=4.65.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from chromadb) (4.66.1)\n",
            "Collecting tokenizers>=0.13.2\n",
            "  Using cached tokenizers-0.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.8 MB)\n",
            "Collecting bcrypt>=4.0.1\n",
            "  Downloading bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl (698 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m698.9/698.9 KB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting fastapi>=0.95.2\n",
            "  Downloading fastapi-0.109.0-py3-none-any.whl (92 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m92.0/92.0 KB\u001b[0m \u001b[31m9.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hCollecting build>=1.0.3\n",
            "  Downloading build-1.0.3-py3-none-any.whl (18 kB)\n",
            "Collecting importlib-resources\n",
            "  Using cached importlib_resources-6.1.1-py3-none-any.whl (33 kB)\n",
            "Collecting chroma-hnswlib==0.7.3\n",
            "  Using cached chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.4 MB)\n",
            "Collecting pyproject_hooks\n",
            "  Downloading pyproject_hooks-1.0.0-py3-none-any.whl (9.3 kB)\n",
            "Collecting tomli>=1.1.0\n",
            "  Using cached tomli-2.0.1-py3-none-any.whl (12 kB)\n",
            "Requirement already satisfied: packaging>=19.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from build>=1.0.3->chromadb) (23.2)\n",
            "Collecting starlette<0.36.0,>=0.35.0\n",
            "  Downloading starlette-0.35.1-py3-none-any.whl (71 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m71.1/71.1 KB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hCollecting oauthlib>=3.2.2\n",
            "  Using cached oauthlib-3.2.2-py3-none-any.whl (151 kB)\n",
            "Requirement already satisfied: python-dateutil>=2.5.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.8.2)\n",
            "Requirement already satisfied: urllib3>=1.24.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.1.0)\n",
            "Requirement already satisfied: google-auth>=1.0.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2.26.2)\n",
            "Requirement already satisfied: six>=1.9.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (1.16.0)\n",
            "Collecting requests-oauthlib\n",
            "  Using cached requests_oauthlib-1.3.1-py2.py3-none-any.whl (23 kB)\n",
            "Collecting websocket-client!=0.40.0,!=0.41.*,!=0.42.*,>=0.32.0\n",
            "  Downloading websocket_client-1.7.0-py3-none-any.whl (58 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.5/58.5 KB\u001b[0m \u001b[31m3.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hRequirement already satisfied: certifi>=14.05.14 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from kubernetes>=28.1.0->chromadb) (2023.11.17)\n",
            "Collecting flatbuffers\n",
            "  Using cached flatbuffers-23.5.26-py2.py3-none-any.whl (26 kB)\n",
            "Collecting coloredlogs\n",
            "  Using cached coloredlogs-15.0.1-py2.py3-none-any.whl (46 kB)\n",
            "Collecting sympy\n",
            "  Using cached sympy-1.12-py3-none-any.whl (5.7 MB)\n",
            "Requirement already satisfied: protobuf in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from onnxruntime>=1.14.1->chromadb) (4.25.2)\n",
            "Collecting deprecated>=1.2.6\n",
            "  Using cached Deprecated-1.2.14-py2.py3-none-any.whl (9.6 kB)\n",
            "Collecting importlib-metadata<7.0,>=6.0\n",
            "  Downloading importlib_metadata-6.11.0-py3-none-any.whl (23 kB)\n",
            "Collecting backoff<3.0.0,>=1.10.0\n",
            "  Using cached backoff-2.2.1-py3-none-any.whl (15 kB)\n",
            "Collecting opentelemetry-proto==1.22.0\n",
            "  Downloading opentelemetry_proto-1.22.0-py3-none-any.whl (50 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.8/50.8 KB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hCollecting opentelemetry-exporter-otlp-proto-common==1.22.0\n",
            "  Downloading opentelemetry_exporter_otlp_proto_common-1.22.0-py3-none-any.whl (17 kB)\n",
            "Requirement already satisfied: googleapis-common-protos~=1.52 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from opentelemetry-exporter-otlp-proto-grpc>=1.2.0->chromadb) (1.62.0)\n",
            "Collecting opentelemetry-instrumentation-asgi==0.43b0\n",
            "  Downloading opentelemetry_instrumentation_asgi-0.43b0-py3-none-any.whl (14 kB)\n",
            "Collecting opentelemetry-semantic-conventions==0.43b0\n",
            "  Downloading opentelemetry_semantic_conventions-0.43b0-py3-none-any.whl (36 kB)\n",
            "Collecting opentelemetry-util-http==0.43b0\n",
            "  Downloading opentelemetry_util_http-0.43b0-py3-none-any.whl (6.9 kB)\n",
            "Collecting opentelemetry-instrumentation==0.43b0\n",
            "  Downloading opentelemetry_instrumentation-0.43b0-py3-none-any.whl (28 kB)\n",
            "Collecting wrapt<2.0.0,>=1.0.0\n",
            "  Using cached wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (80 kB)\n",
            "Requirement already satisfied: setuptools>=16.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from opentelemetry-instrumentation==0.43b0->opentelemetry-instrumentation-fastapi>=0.41b0->chromadb) (59.6.0)\n",
            "Collecting asgiref~=3.0\n",
            "  Downloading asgiref-3.7.2-py3-none-any.whl (24 kB)\n",
            "Collecting monotonic>=1.5\n",
            "  Downloading monotonic-1.6-py2.py3-none-any.whl (8.2 kB)\n",
            "Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=1.9->chromadb) (0.6.0)\n",
            "Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=1.9->chromadb) (2.14.6)\n",
            "Requirement already satisfied: idna<4,>=2.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests>=2.28->chromadb) (3.6)\n",
            "Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests>=2.28->chromadb) (3.3.2)\n",
            "Collecting huggingface_hub<1.0,>=0.16.4\n",
            "  Using cached huggingface_hub-0.20.2-py3-none-any.whl (330 kB)\n",
            "Collecting click<9.0.0,>=7.1.1\n",
            "  Using cached click-8.1.7-py3-none-any.whl (97 kB)\n",
            "Collecting h11>=0.8\n",
            "  Using cached h11-0.14.0-py3-none-any.whl (58 kB)\n",
            "Collecting websockets>=10.4\n",
            "  Downloading websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (130 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m130.2/130.2 KB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting python-dotenv>=0.13\n",
            "  Using cached python_dotenv-1.0.0-py3-none-any.whl (19 kB)\n",
            "Collecting httptools>=0.5.0\n",
            "  Using cached httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (341 kB)\n",
            "Collecting watchfiles>=0.13\n",
            "  Using cached watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
            "Collecting uvloop!=0.15.0,!=0.15.1,>=0.14.0\n",
            "  Using cached uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.4 MB)\n",
            "Requirement already satisfied: rsa<5,>=3.1.4 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (4.9)\n",
            "Requirement already satisfied: cachetools<6.0,>=2.0.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (5.3.2)\n",
            "Requirement already satisfied: pyasn1-modules>=0.2.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (0.3.0)\n",
            "Collecting filelock\n",
            "  Using cached filelock-3.13.1-py3-none-any.whl (11 kB)\n",
            "Collecting fsspec>=2023.5.0\n",
            "  Using cached fsspec-2023.12.2-py3-none-any.whl (168 kB)\n",
            "Collecting zipp>=0.5\n",
            "  Downloading zipp-3.17.0-py3-none-any.whl (7.4 kB)\n",
            "Requirement already satisfied: anyio<5,>=3.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (4.2.0)\n",
            "Collecting humanfriendly>=9.1\n",
            "  Using cached humanfriendly-10.0-py2.py3-none-any.whl (86 kB)\n",
            "Collecting mpmath>=0.19\n",
            "  Using cached mpmath-1.3.0-py3-none-any.whl (536 kB)\n",
            "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3.4.0->starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (1.2.0)\n",
            "Requirement already satisfied: sniffio>=1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio<5,>=3.4.0->starlette<0.36.0,>=0.35.0->fastapi>=0.95.2->chromadb) (1.3.0)\n",
            "Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pyasn1-modules>=0.2.1->google-auth>=1.0.1->kubernetes>=28.1.0->chromadb) (0.5.1)\n",
            "Installing collected packages: pypika, mpmath, monotonic, mmh3, flatbuffers, zipp, wrapt, websockets, websocket-client, uvloop, tomli, sympy, python-dotenv, pulsar-client, overrides, opentelemetry-util-http, opentelemetry-semantic-conventions, opentelemetry-proto, oauthlib, importlib-resources, humanfriendly, httptools, h11, fsspec, filelock, click, chroma-hnswlib, bcrypt, backoff, asgiref, watchfiles, uvicorn, typer, starlette, requests-oauthlib, pyproject_hooks, posthog, opentelemetry-exporter-otlp-proto-common, importlib-metadata, huggingface_hub, deprecated, coloredlogs, tokenizers, opentelemetry-api, onnxruntime, kubernetes, fastapi, build, opentelemetry-sdk, opentelemetry-instrumentation, opentelemetry-instrumentation-asgi, opentelemetry-exporter-otlp-proto-grpc, opentelemetry-instrumentation-fastapi, chromadb\n",
            "Successfully installed asgiref-3.7.2 backoff-2.2.1 bcrypt-4.1.2 build-1.0.3 chroma-hnswlib-0.7.3 chromadb-0.4.22 click-8.1.7 coloredlogs-15.0.1 deprecated-1.2.14 fastapi-0.109.0 filelock-3.13.1 flatbuffers-23.5.26 fsspec-2023.12.2 h11-0.14.0 httptools-0.6.1 huggingface_hub-0.20.2 humanfriendly-10.0 importlib-metadata-6.11.0 importlib-resources-6.1.1 kubernetes-29.0.0 mmh3-4.1.0 monotonic-1.6 mpmath-1.3.0 oauthlib-3.2.2 onnxruntime-1.16.3 opentelemetry-api-1.22.0 opentelemetry-exporter-otlp-proto-common-1.22.0 opentelemetry-exporter-otlp-proto-grpc-1.22.0 opentelemetry-instrumentation-0.43b0 opentelemetry-instrumentation-asgi-0.43b0 opentelemetry-instrumentation-fastapi-0.43b0 opentelemetry-proto-1.22.0 opentelemetry-sdk-1.22.0 opentelemetry-semantic-conventions-0.43b0 opentelemetry-util-http-0.43b0 overrides-7.4.0 posthog-3.3.1 pulsar-client-3.4.0 pypika-0.48.9 pyproject_hooks-1.0.0 python-dotenv-1.0.0 requests-oauthlib-1.3.1 starlette-0.35.1 sympy-1.12 tokenizers-0.15.0 tomli-2.0.1 typer-0.9.0 uvicorn-0.25.0 uvloop-0.19.0 watchfiles-0.21.0 websocket-client-1.7.0 websockets-12.0 wrapt-1.16.0 zipp-3.17.0\n",
            "Collecting gradio\n",
            "  Using cached gradio-4.14.0-py3-none-any.whl (16.6 MB)\n",
            "Requirement already satisfied: packaging in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (23.2)\n",
            "Requirement already satisfied: pyyaml<7.0,>=5.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (6.0.1)\n",
            "Requirement already satisfied: pydantic>=2.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (2.5.3)\n",
            "Requirement already satisfied: typing-extensions~=4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (4.9.0)\n",
            "Collecting tomlkit==0.12.0\n",
            "  Using cached tomlkit-0.12.0-py3-none-any.whl (37 kB)\n",
            "Collecting pydub\n",
            "  Using cached pydub-0.25.1-py2.py3-none-any.whl (32 kB)\n",
            "Collecting httpx\n",
            "  Using cached httpx-0.26.0-py3-none-any.whl (75 kB)\n",
            "Collecting altair<6.0,>=4.2.0\n",
            "  Using cached altair-5.2.0-py3-none-any.whl (996 kB)\n",
            "Requirement already satisfied: typer[all]<1.0,>=0.9 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.9.0)\n",
            "Requirement already satisfied: importlib-resources<7.0,>=1.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (6.1.1)\n",
            "Requirement already satisfied: fastapi in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.109.0)\n",
            "Collecting python-multipart\n",
            "  Using cached python_multipart-0.0.6-py3-none-any.whl (45 kB)\n",
            "Requirement already satisfied: huggingface-hub>=0.19.3 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.20.2)\n",
            "Collecting gradio-client==0.8.0\n",
            "  Using cached gradio_client-0.8.0-py3-none-any.whl (305 kB)\n",
            "Collecting jinja2<4.0\n",
            "  Downloading Jinja2-3.1.3-py3-none-any.whl (133 kB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m133.2/133.2 KB\u001b[0m \u001b[31m917.4 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
            "\u001b[?25hCollecting ffmpy\n",
            "  Using cached ffmpy-0.3.1-py3-none-any.whl\n",
            "Collecting orjson~=3.0\n",
            "  Using cached orjson-3.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (138 kB)\n",
            "Requirement already satisfied: uvicorn>=0.14.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (0.25.0)\n",
            "Requirement already satisfied: numpy~=1.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio) (1.26.3)\n",
            "Collecting matplotlib~=3.0\n",
            "  Using cached matplotlib-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (11.6 MB)\n",
            "Collecting semantic-version~=2.0\n",
            "  Using cached semantic_version-2.10.0-py2.py3-none-any.whl (15 kB)\n",
            "Collecting pillow<11.0,>=8.0\n",
            "  Downloading pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl (4.5 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.5/4.5 MB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting aiofiles<24.0,>=22.0\n",
            "  Using cached aiofiles-23.2.1-py3-none-any.whl (15 kB)\n",
            "Collecting pandas<3.0,>=1.0\n",
            "  Using cached pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (12.3 MB)\n",
            "Collecting markupsafe~=2.0\n",
            "  Using cached MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (25 kB)\n",
            "Requirement already satisfied: fsspec in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from gradio-client==0.8.0->gradio) (2023.12.2)\n",
            "Collecting websockets<12.0,>=10.0\n",
            "  Using cached websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (129 kB)\n",
            "Collecting toolz\n",
            "  Using cached toolz-0.12.0-py3-none-any.whl (55 kB)\n",
            "Collecting jsonschema>=3.0\n",
            "  Using cached jsonschema-4.20.0-py3-none-any.whl (84 kB)\n",
            "Requirement already satisfied: tqdm>=4.42.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (4.66.1)\n",
            "Requirement already satisfied: filelock in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (3.13.1)\n",
            "Requirement already satisfied: requests in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from huggingface-hub>=0.19.3->gradio) (2.31.0)\n",
            "Collecting kiwisolver>=1.3.1\n",
            "  Using cached kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.6 MB)\n",
            "Collecting pyparsing>=2.3.1\n",
            "  Using cached pyparsing-3.1.1-py3-none-any.whl (103 kB)\n",
            "Collecting contourpy>=1.0.1\n",
            "  Using cached contourpy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (310 kB)\n",
            "Collecting cycler>=0.10\n",
            "  Using cached cycler-0.12.1-py3-none-any.whl (8.3 kB)\n",
            "Requirement already satisfied: python-dateutil>=2.7 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from matplotlib~=3.0->gradio) (2.8.2)\n",
            "Collecting fonttools>=4.22.0\n",
            "  Downloading fonttools-4.47.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.6 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.6/4.6 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
            "\u001b[?25hCollecting pytz>=2020.1\n",
            "  Using cached pytz-2023.3.post1-py2.py3-none-any.whl (502 kB)\n",
            "Collecting tzdata>=2022.1\n",
            "  Using cached tzdata-2023.4-py2.py3-none-any.whl (346 kB)\n",
            "Requirement already satisfied: pydantic-core==2.14.6 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=2.0->gradio) (2.14.6)\n",
            "Requirement already satisfied: annotated-types>=0.4.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from pydantic>=2.0->gradio) (0.6.0)\n",
            "Requirement already satisfied: click<9.0.0,>=7.1.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from typer[all]<1.0,>=0.9->gradio) (8.1.7)\n",
            "Collecting shellingham<2.0.0,>=1.3.0\n",
            "  Using cached shellingham-1.5.4-py2.py3-none-any.whl (9.8 kB)\n",
            "Collecting colorama<0.5.0,>=0.4.3\n",
            "  Using cached colorama-0.4.6-py2.py3-none-any.whl (25 kB)\n",
            "Collecting rich<14.0.0,>=10.11.0\n",
            "  Using cached rich-13.7.0-py3-none-any.whl (240 kB)\n",
            "Requirement already satisfied: h11>=0.8 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from uvicorn>=0.14.0->gradio) (0.14.0)\n",
            "Requirement already satisfied: starlette<0.36.0,>=0.35.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from fastapi->gradio) (0.35.1)\n",
            "Requirement already satisfied: sniffio in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (1.3.0)\n",
            "Requirement already satisfied: idna in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (3.6)\n",
            "Requirement already satisfied: certifi in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (2023.11.17)\n",
            "Collecting httpcore==1.*\n",
            "  Using cached httpcore-1.0.2-py3-none-any.whl (76 kB)\n",
            "Requirement already satisfied: anyio in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from httpx->gradio) (4.2.0)\n",
            "Collecting referencing>=0.28.4\n",
            "  Downloading referencing-0.32.1-py3-none-any.whl (26 kB)\n",
            "Collecting jsonschema-specifications>=2023.03.6\n",
            "  Using cached jsonschema_specifications-2023.12.1-py3-none-any.whl (18 kB)\n",
            "Requirement already satisfied: attrs>=22.2.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from jsonschema>=3.0->altair<6.0,>=4.2.0->gradio) (23.2.0)\n",
            "Collecting rpds-py>=0.7.1\n",
            "  Downloading rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)\n",
            "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.2/1.2 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0mm\n",
            "\u001b[?25hRequirement already satisfied: six>=1.5 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from python-dateutil>=2.7->matplotlib~=3.0->gradio) (1.16.0)\n",
            "Collecting markdown-it-py>=2.2.0\n",
            "  Using cached markdown_it_py-3.0.0-py3-none-any.whl (87 kB)\n",
            "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from rich<14.0.0,>=10.11.0->typer[all]<1.0,>=0.9->gradio) (2.17.2)\n",
            "Requirement already satisfied: exceptiongroup>=1.0.2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from anyio->httpx->gradio) (1.2.0)\n",
            "Requirement already satisfied: urllib3<3,>=1.21.1 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests->huggingface-hub>=0.19.3->gradio) (2.1.0)\n",
            "Requirement already satisfied: charset-normalizer<4,>=2 in /home/vasim/Khatir/Programming/ML Projects/gemini-pro with docs/.venv/lib/python3.10/site-packages (from requests->huggingface-hub>=0.19.3->gradio) (3.3.2)\n",
            "Collecting mdurl~=0.1\n",
            "  Using cached mdurl-0.1.2-py3-none-any.whl (10.0 kB)\n",
            "Installing collected packages: pytz, pydub, ffmpy, websockets, tzdata, toolz, tomlkit, shellingham, semantic-version, rpds-py, python-multipart, pyparsing, pillow, orjson, mdurl, markupsafe, kiwisolver, httpcore, fonttools, cycler, contourpy, colorama, aiofiles, referencing, pandas, matplotlib, markdown-it-py, jinja2, httpx, rich, jsonschema-specifications, gradio-client, jsonschema, altair, gradio\n",
            "  Attempting uninstall: websockets\n",
            "    Found existing installation: websockets 12.0\n",
            "    Uninstalling websockets-12.0:\n",
            "      Successfully uninstalled websockets-12.0\n",
            "Successfully installed aiofiles-23.2.1 altair-5.2.0 colorama-0.4.6 contourpy-1.2.0 cycler-0.12.1 ffmpy-0.3.1 fonttools-4.47.2 gradio-4.14.0 gradio-client-0.8.0 httpcore-1.0.2 httpx-0.26.0 jinja2-3.1.3 jsonschema-4.20.0 jsonschema-specifications-2023.12.1 kiwisolver-1.4.5 markdown-it-py-3.0.0 markupsafe-2.1.3 matplotlib-3.8.2 mdurl-0.1.2 orjson-3.9.10 pandas-2.1.4 pillow-10.2.0 pydub-0.25.1 pyparsing-3.1.1 python-multipart-0.0.6 pytz-2023.3.post1 referencing-0.32.1 rich-13.7.0 rpds-py-0.17.1 semantic-version-2.10.0 shellingham-1.5.4 tomlkit-0.12.0 toolz-0.12.0 tzdata-2023.4 websockets-11.0.3\n",
            "Collecting faiss-cpu\n",
            "  Using cached faiss_cpu-1.7.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.6 MB)\n",
            "Installing collected packages: faiss-cpu\n",
            "Successfully installed faiss-cpu-1.7.4\n"
          ]
        }
      ],
      "source": [
        "# !pip install langchain\n",
        "# !pip install pypdf\n",
        "# !pip install langchain_google_genai\n",
        "# !pip install google-generativeai\n",
        "# !pip install chromadb\n",
        "# !pip install gradio\n",
        "# !pip install faiss-cpu"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {
        "id": "ylbT549oymIl"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "True"
            ]
          },
          "execution_count": 3,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "import os\n",
        "from langchain.document_loaders import (\n",
        "    PyPDFLoader,\n",
        "    TextLoader,\n",
        "    Docx2txtLoader\n",
        ")\n",
        "\n",
        "from langchain.text_splitter import CharacterTextSplitter\n",
        "# from PyPDF2 import PdfReader\n",
        "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
        "from langchain_google_genai import GoogleGenerativeAIEmbeddings\n",
        "import google.generativeai as genai\n",
        "from langchain.vectorstores import FAISS\n",
        "from langchain_google_genai import ChatGoogleGenerativeAI\n",
        "from langchain.chains.question_answering import load_qa_chain\n",
        "from langchain.prompts import PromptTemplate\n",
        "from langchain.memory import ConversationBufferMemory\n",
        "from dotenv import load_dotenv\n",
        "load_dotenv()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 6,
      "metadata": {
        "id": "65o268jqzN7O"
      },
      "outputs": [],
      "source": [
        "def extract_text(docs):\n",
        "    documents = []\n",
        "    files = os.listdir(docs)\n",
        "    \n",
        "    if len(files) == 0:\n",
        "        return \"Directory is empty\"\n",
        "\n",
        "    base_dir = docs.split(\"/\")\n",
        "    base_dir = \"/\".join(base_dir)\n",
        "\n",
        "    for file in files:\n",
        "        if file.endswith(\".pdf\"):\n",
        "            pdf_path=os.path.join(base_dir, file)\n",
        "            loader=PyPDFLoader(pdf_path)\n",
        "            documents.extend(loader.load())\n",
        "        elif file.endswith('.docx') or file.endswith('.doc'):\n",
        "            doc_path=os.path.join(base_dir, file)\n",
        "            loader=Docx2txtLoader(doc_path)\n",
        "            documents.extend(loader.load())\n",
        "        elif file.endswith('.txt'):\n",
        "            text_path=os.path.join(base_dir, file)\n",
        "            loader=TextLoader(text_path)\n",
        "            documents.extend(loader.load())\n",
        "    return documents"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 7,
      "metadata": {
        "id": "0gT5m9cD_cM7"
      },
      "outputs": [],
      "source": [
        "doc_dir = \"../docs\""
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 8,
      "metadata": {
        "id": "Svi3z1Rdzjbm"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "['profile.txt', 'llm-research.pdf']\n",
            "../docs\n"
          ]
        }
      ],
      "source": [
        "documents = extract_text(doc_dir)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 9,
      "metadata": {
        "id": "CRe5WNKC0D88"
      },
      "outputs": [],
      "source": [
        "def get_text_chunks(text):\n",
        "    text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)\n",
        "    chunks = text_splitter.split_documents(text)\n",
        "    return chunks\n",
        "\n",
        "def save_in_faiss(text_chunks, save=False):\n",
        "    embeddings = GoogleGenerativeAIEmbeddings(model = \"models/embedding-001\")\n",
        "    vector_store = FAISS.from_documents(text_chunks, embedding=embeddings)\n",
        "    if not save:\n",
        "        vector_store.save_local(\"faiss_index\")\n",
        "    return vector_store"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 10,
      "metadata": {},
      "outputs": [],
      "source": [
        "def process_files(docs):\n",
        "    documents = extract_text(docs)\n",
        "    text_chunks = get_text_chunks(documents)\n",
        "    vector_store = save_in_faiss(text_chunks)\n",
        "    return vector_store"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 17,
      "metadata": {},
      "outputs": [],
      "source": [
        "llm = model = ChatGoogleGenerativeAI(model=\"gemini-pro\",temperature=0.7)\n",
        "\n",
        "template = \"\"\"You are a chatbot having a conversation with a human.\n",
        "\n",
        "Given the following extracted parts of a long document and a question, create a final answer.\n",
        "\n",
        "{context}\n",
        "\n",
        "{chat_history}\n",
        "Human: {human_input}\n",
        "Chatbot:\"\"\"\n",
        "\n",
        "prompt = PromptTemplate(\n",
        "    input_variables=[\"chat_history\", \"human_input\", \"context\"], template=template\n",
        ")\n",
        "memory = ConversationBufferMemory(memory_key=\"chat_history\", input_key=\"human_input\")\n",
        "chain = load_qa_chain(\n",
        "    llm=llm, chain_type=\"stuff\", memory=memory, prompt=prompt\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {},
      "outputs": [],
      "source": [
        "def build_qa_chain(llm=llm, prompt=prompt, memory=memory):\n",
        "    chain = load_qa_chain(\n",
        "    llm=llm, chain_type=\"stuff\", memory=memory, prompt=prompt\n",
        ")\n",
        "    return chain\n",
        "\n",
        "def build_agent(query, db):\n",
        "    query = \"is vasim plumber\"\n",
        "    docs = db.similarity_search(query)\n",
        "    response = chain({\"input_documents\": docs, \"human_input\": query}, return_only_outputs=True)\n",
        "    return response['output_text']"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 12,
      "metadata": {},
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "['profile.txt', 'llm-research.pdf']\n",
            "../docs\n"
          ]
        }
      ],
      "source": [
        "db = process_files(doc_dir)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 16,
      "metadata": {},
      "outputs": [
        {
          "data": {
            "text/plain": [
              "{'output_text': 'I do not have any information about Vasim being a plumber.'}"
            ]
          },
          "execution_count": 16,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "query = \"is vasim plumber\"\n",
        "docs = db.similarity_search(query)\n",
        "chain({\"input_documents\": docs, \"human_input\": query}, return_only_outputs=True)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 18,
      "metadata": {},
      "outputs": [],
      "source": [
        "def answer_query(message, history):\n",
        "    docs = db.similarity_search(message)\n",
        "    response = chain({\"input_documents\": docs, \"human_input\": message}, return_only_outputs=True)\n",
        "    return response['output_text']"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 34,
      "metadata": {
        "colab": {
          "background_save": true
        },
        "id": "a8tNUutJB9EA"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Chat interface is cool.\n",
            "Running on local URL:  http://127.0.0.1:7875\n",
            "\n",
            "To create a public link, set `share=True` in `launch()`.\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "<div><iframe src=\"http://127.0.0.1:7875/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
            ],
            "text/plain": [
              "<IPython.core.display.HTML object>"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        }
      ],
      "source": [
        "# Gradio App\n",
        "import gradio as gr\n",
        "\n",
        "title = \"\"\n",
        "description = f\"Chat with any docs\"\n",
        "\n",
        "# def answer_query(message, history):\n",
        "#     docs = db.similarity_search(message)\n",
        "#     message = agent(\n",
        "#         {\"input_documents\":docs, \"question\": message}\n",
        "#         ,return_only_outputs=True)\n",
        "#     return message['output_text']\n",
        "\n",
        "\n",
        "chatbot = gr.Chatbot(label=\"ExploreText\")\n",
        "\n",
        "with gr.Blocks(\n",
        "    title=\"ExploreText\",\n",
        "    ) as textbot:\n",
        "\n",
        "    gr.Markdown(\"# <center> Welcome to ExploreDoc Web App</center>\")\n",
        "    \n",
        "    with gr.Accordion(\"Upload a file here\", open=False):\n",
        "        file_output = gr.File(scale=1)\n",
        "        upload_button = gr.UploadButton(\"Click to Upload a File\", file_types=[\"txt\",\"doc\",\"pdf\"])\n",
        "        upload_button.upload(process_files, upload_button, file_output)\n",
        "        # gr.Info(\"Click on Chat with PDF tab\")\n",
        "\n",
        "    # with gr.Row(\"Chat with Text\"):\n",
        "    gr.ChatInterface(fn=answer_query, chatbot=chatbot, submit_btn=\"Ask\", undo_btn=None, retry_btn=None, clear_btn=None)\n",
        "    print(\"Chat interface is cool.\")\n",
        "    gr.Markdown(\"<center>  Developed by <a href='https://92-vasim.github.io' target='_blank'>Mohammed Vasim<a/> | AI Engineer & Computer Vision Engineer @ ZestIoT.  </center>\")\n",
        "        \n",
        "\n",
        "if __name__ == \"__main__\":\n",
        "    textbot.queue().launch()\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {},
      "outputs": [],
      "source": []
    }
  ],
  "metadata": {
    "colab": {
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.10.12"
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}