File size: 1,506 Bytes
7b69047
 
 
 
7911c46
7b69047
 
 
 
 
7911c46
7b69047
 
7911c46
7b69047
 
 
 
 
7911c46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7b69047
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "97"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import utils\n",
    "\n",
    "utils.load_env()\n",
    "\n",
    "from langchain.llms import OpenAI\n",
    "\n",
    "# Create an instance of the OpenAI class\n",
    "openai = OpenAI(model_name=\"text-davinci-003\")\n",
    "\n",
    "# Get the maximum context size for the model\n",
    "max_context_size = openai.modelname_to_contextsize(openai.model_name)\n",
    "\n",
    "# Set the max_tokens attribute to a value within the model's maximum context length\n",
    "openai.max_tokens = max_context_size - 1  # or any value less than max_context_size\n",
    "\n",
    "# Alternatively, you can use the max_tokens_for_prompt method\n",
    "prompt = \"Tell me a joke.\"*800\n",
    "openai.max_tokens = openai.max_tokens_for_prompt(prompt)\n",
    "openai.max_tokens"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.9"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}