TheBloke commited on
Commit
965a939
1 Parent(s): 767cc69

GPTQ model commit

Browse files
Files changed (1) hide show
  1. non_langchain_example.py +142 -0
non_langchain_example.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Literal
2
+
3
+ import math
4
+
5
+ import inspect
6
+
7
+ from transformers import pipeline
8
+
9
+
10
+ ##########################################################
11
+ # Step 1: Define the functions you want to articulate. ###
12
+ ##########################################################
13
+
14
+
15
+ def calculator(
16
+ input_a: float,
17
+ input_b: float,
18
+ operation: Literal["add", "subtract", "multiply", "divide"],
19
+ ):
20
+ """
21
+ Computes a calculation.
22
+
23
+ Args:
24
+ input_a (float) : Required. The first input.
25
+ input_b (float) : Required. The second input.
26
+ operation (string): The operation. Choices include: add to add two numbers, subtract to subtract two numbers, multiply to multiply two numbers, and divide to divide them.
27
+ """
28
+ match operation:
29
+ case "add":
30
+ return input_a + input_b
31
+ case "subtract":
32
+ return input_a - input_b
33
+ case "multiply":
34
+ return input_a * input_b
35
+ case "divide":
36
+ return input_a / input_b
37
+
38
+
39
+ def cylinder_volume(radius, height):
40
+ """
41
+ Calculate the volume of a cylinder.
42
+
43
+ Parameters:
44
+ - radius (float): The radius of the base of the cylinder.
45
+ - height (float): The height of the cylinder.
46
+
47
+ Returns:
48
+ - float: The volume of the cylinder.
49
+ """
50
+ if radius < 0 or height < 0:
51
+ raise ValueError("Radius and height must be non-negative.")
52
+
53
+ volume = math.pi * (radius**2) * height
54
+ return volume
55
+
56
+
57
+ #############################################################
58
+ # Step 2: Let's define some utils for building the prompt ###
59
+ #############################################################
60
+
61
+
62
+ def format_functions_for_prompt(*functions):
63
+ formatted_functions = []
64
+ for func in functions:
65
+ source_code = inspect.getsource(func)
66
+ docstring = inspect.getdoc(func)
67
+ formatted_functions.append(
68
+ f"OPTION:\n<func_start>{source_code}<func_end>\n<docstring_start>\n{docstring}\n<docstring_end>"
69
+ )
70
+ return "\n".join(formatted_functions)
71
+
72
+
73
+ ##############################
74
+ # Step 3: Construct Prompt ###
75
+ ##############################
76
+
77
+
78
+ def construct_prompt(user_query: str):
79
+ formatted_prompt = format_functions_for_prompt(calculator, cylinder_volume)
80
+ formatted_prompt += f"\n\nUser Query: Question: {user_query}\n"
81
+
82
+ prompt = (
83
+ "<human>:\n"
84
+ + formatted_prompt
85
+ + "Please pick a function from the above options that best answers the user query and fill in the appropriate arguments.<human_end>"
86
+ )
87
+ return prompt
88
+
89
+
90
+ #######################################
91
+ # Step 4: Execute the function call ###
92
+ #######################################
93
+
94
+
95
+ def execute_function_call(model_output):
96
+ # Ignore everything after "Reflection" since that is not essential.
97
+ function_call = (
98
+ model_output[0]["generated_text"]
99
+ .strip()
100
+ .split("\n")[1]
101
+ .replace("Initial Answer:", "")
102
+ .strip()
103
+ )
104
+
105
+ try:
106
+ return eval(function_call)
107
+ except Exception as e:
108
+ return str(e)
109
+
110
+
111
+ if __name__ == "__main__":
112
+ # Build the model
113
+ text_gen = pipeline(
114
+ "text-generation",
115
+ model="Nexusflow/NexusRaven-13B",
116
+ device="cuda",
117
+ )
118
+
119
+ # Comp[ute a Simple equation
120
+ prompt = construct_prompt("What is 1+10?")
121
+ model_output = text_gen(
122
+ prompt, do_sample=False, max_new_tokens=400, return_full_text=False
123
+ )
124
+ result = execute_function_call(model_output)
125
+
126
+ print("Model Output:", model_output)
127
+ print("Execution Result:", result)
128
+
129
+ prompt = construct_prompt(
130
+ "I have a cake that is about 3 centimenters high and 200 centimeters in diameter. How much cake do I have?"
131
+ )
132
+ model_output = text_gen(
133
+ prompt,
134
+ do_sample=False,
135
+ max_new_tokens=400,
136
+ return_full_text=False,
137
+ stop=["\nReflection:"],
138
+ )
139
+ result = execute_function_call(model_output)
140
+
141
+ print("Model Output:", model_output)
142
+ print("Execution Result:", result)