coeuslearning commited on
Commit
e3382f4
1 Parent(s): 2f4bac0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -10,8 +10,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStream
10
 
11
  HF_TOKEN = "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" ##Llama Write Token
12
 
13
- MAX_MAX_NEW_TOKENS = 2048
14
- DEFAULT_MAX_NEW_TOKENS = 1024
15
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
16
 
17
  DESCRIPTION = """\
@@ -33,7 +33,7 @@ def generate(
33
  message: str,
34
  chat_history: list[tuple[str, str]],
35
  system_prompt: str,
36
- max_new_tokens: int = 1024,
37
  temperature: float = 0.6,
38
  top_p: float = 0.9,
39
  top_k: int = 50,
@@ -95,8 +95,8 @@ def mask_with_protecto(text_for_prompt):
95
  # Parse the masked result from the API response and format it for display
96
  masked_result = response.json()
97
  final_result = json.dumps(masked_result, indent=4)
98
- # return(str(masked_result["data"][0]["token_value"]))
99
- return(str(masked_result))
100
  else:
101
  # Return an error message if the API request was not successful.
102
  return(str(response.status_code))
 
10
 
11
  HF_TOKEN = "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" ##Llama Write Token
12
 
13
+ MAX_MAX_NEW_TOKENS = 8192
14
+ DEFAULT_MAX_NEW_TOKENS = 4096
15
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
16
 
17
  DESCRIPTION = """\
 
33
  message: str,
34
  chat_history: list[tuple[str, str]],
35
  system_prompt: str,
36
+ max_new_tokens: int = 8192,
37
  temperature: float = 0.6,
38
  top_p: float = 0.9,
39
  top_k: int = 50,
 
95
  # Parse the masked result from the API response and format it for display
96
  masked_result = response.json()
97
  final_result = json.dumps(masked_result, indent=4)
98
+ return_value = str(masked_result["data"][0]["token_value"])
99
+ return(return_value)
100
  else:
101
  # Return an error message if the API request was not successful.
102
  return(str(response.status_code))