t.me/xtekky commited on
Commit
a5b4d8b
2 Parent(s): e4aefb0 d8ec09c

Merge pull request #199 from Andrew-Tsegaye/main

Browse files

As you said, I updated the ./unfinished directory to resembel a professional codebases.

gui/streamlit_app.py CHANGED
@@ -6,25 +6,29 @@ sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
6
  import streamlit as st
7
  import phind
8
 
9
- phind.cf_clearance = ''
10
- phind.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
 
11
 
12
- def phind_get_answer(question:str)->str:
13
- # set cf_clearance cookie
 
14
  try:
15
-
16
  result = phind.Completion.create(
17
- model = 'gpt-4',
18
- prompt = question,
19
- results = phind.Search.create(question, actualSearch = True),
20
- creative = False,
21
- detailed = False,
22
- codeContext = '')
 
23
  return result.completion.choices[0].text
24
-
25
  except Exception as e:
26
- return 'An error occured, please make sure you are using a cf_clearance token and correct useragent | %s' % e
 
 
27
 
 
28
  st.set_page_config(
29
  page_title="gpt4freeGUI",
30
  initial_sidebar_state="expanded",
@@ -35,16 +39,18 @@ st.set_page_config(
35
  'About': "### gptfree GUI"
36
  }
37
  )
38
-
39
  st.header('GPT4free GUI')
40
 
41
- question_text_area = st.text_area('🤖 Ask Any Question :', placeholder='Explain quantum computing in 50 words')
 
 
42
  if st.button('🧠 Think'):
43
- answer = phind_get_answer(question_text_area)
 
44
  st.caption("Answer :")
45
  st.markdown(answer)
46
 
47
-
48
  hide_streamlit_style = """
49
  <style>
50
  footer {visibility: hidden;}
 
6
  import streamlit as st
7
  import phind
8
 
9
+ # Set cloudflare clearance and user agent
10
+ phind.cloudflare_clearance = ''
11
+ phind.phind_api = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
12
 
13
+
14
+ def get_answer(question: str) -> str:
15
+ # Set cloudflare clearance cookie and get answer from GPT-4 model
16
  try:
 
17
  result = phind.Completion.create(
18
+ model='gpt-4',
19
+ prompt=question,
20
+ results=phind.Search.create(question, actualSearch=True),
21
+ creative=False,
22
+ detailed=False,
23
+ codeContext=''
24
+ )
25
  return result.completion.choices[0].text
 
26
  except Exception as e:
27
+ # Return error message if an exception occurs
28
+ return f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
29
+
30
 
31
+ # Set page configuration and add header
32
  st.set_page_config(
33
  page_title="gpt4freeGUI",
34
  initial_sidebar_state="expanded",
 
39
  'About': "### gptfree GUI"
40
  }
41
  )
 
42
  st.header('GPT4free GUI')
43
 
44
+ # Add text area for user input and button to get answer
45
+ question_text_area = st.text_area(
46
+ '🤖 Ask Any Question :', placeholder='Explain quantum computing in 50 words')
47
  if st.button('🧠 Think'):
48
+ answer = get_answer(question_text_area)
49
+ # Display answer
50
  st.caption("Answer :")
51
  st.markdown(answer)
52
 
53
+ # Hide Streamlit footer
54
  hide_streamlit_style = """
55
  <style>
56
  footer {visibility: hidden;}
quora/mail.py CHANGED
@@ -38,7 +38,7 @@ class Emailnator:
38
  return self.email
39
 
40
  def get_message(self):
41
- print("waiting for code...")
42
 
43
  while True:
44
  sleep(2)
@@ -49,6 +49,7 @@ class Emailnator:
49
  mail_token = loads(mail_token.text)["messageData"]
50
 
51
  if len(mail_token) == 2:
 
52
  print(mail_token[1]["messageID"])
53
  break
54
 
@@ -63,4 +64,19 @@ class Emailnator:
63
  return mail_context.text
64
 
65
  def get_verification_code(self):
66
- return findall(r';">(\d{6,7})</div>', self.get_message())[0]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  return self.email
39
 
40
  def get_message(self):
41
+ print("Waiting for message...")
42
 
43
  while True:
44
  sleep(2)
 
49
  mail_token = loads(mail_token.text)["messageData"]
50
 
51
  if len(mail_token) == 2:
52
+ print("Message received!")
53
  print(mail_token[1]["messageID"])
54
  break
55
 
 
64
  return mail_context.text
65
 
66
  def get_verification_code(self):
67
+ message = self.get_message()
68
+ code = findall(r';">(\d{6,7})</div>', message)[0]
69
+ print(f"Verification code: {code}")
70
+ return code
71
+
72
+ def clear_inbox(self):
73
+ print("Clearing inbox...")
74
+ self.client.post(
75
+ "https://www.emailnator.com/delete-all",
76
+ json={"email": self.email},
77
+ )
78
+ print("Inbox cleared!")
79
+
80
+ def __del__(self):
81
+ if self.email:
82
+ self.clear_inbox()
unfinished/bard/__init__.py CHANGED
@@ -1,19 +1,19 @@
1
- from requests import Session
2
- from re import search
3
- from random import randint
4
- from json import dumps, loads
5
- from random import randint
6
  from urllib.parse import urlencode
7
- from dotenv import load_dotenv; load_dotenv()
8
- from os import getenv
9
 
10
  from bard.typings import BardResponse
11
 
 
12
  token = getenv('1psid')
13
  proxy = getenv('proxy')
14
 
15
  temperatures = {
16
- 0 : "Generate text strictly following known patterns, with no creativity.",
17
  0.1: "Produce text adhering closely to established patterns, allowing minimal creativity.",
18
  0.2: "Create text with modest deviations from familiar patterns, injecting a slight creative touch.",
19
  0.3: "Craft text with a mild level of creativity, deviating somewhat from common patterns.",
@@ -23,93 +23,70 @@ temperatures = {
23
  0.7: "Produce text favoring creativity over typical patterns for more original results.",
24
  0.8: "Create text heavily focused on creativity, with limited concern for familiar patterns.",
25
  0.9: "Craft text with a strong emphasis on unique and inventive ideas, largely ignoring established patterns.",
26
- 1 : "Generate text with maximum creativity, disregarding any constraints of known patterns or structures."
27
  }
28
 
 
29
  class Completion:
30
- # def __init__(self, _token, proxy: str or None = None) -> None:
31
- # self.client = Session()
32
- # self.client.proxies = {
33
- # 'http': f'http://{proxy}',
34
- # 'https': f'http://{proxy}' } if proxy else None
35
-
36
- # self.client.headers = {
37
- # 'authority' : 'bard.google.com',
38
- # 'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
39
- # 'origin' : 'https://bard.google.com',
40
- # 'referer' : 'https://bard.google.com/',
41
- # 'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
42
- # 'x-same-domain' : '1',
43
- # 'cookie' : f'__Secure-1PSID={_token}'
44
- # }
45
-
46
- # self.snlm0e = self.__init_client()
47
- # self.conversation_id = ''
48
- # self.response_id = ''
49
- # self.choice_id = ''
50
- # self.reqid = randint(1111, 9999)
51
-
52
  def create(
53
- prompt : str = 'hello world',
54
- temperature : int = None,
55
- conversation_id : str = '',
56
- response_id : str = '',
57
- choice_id : str = '') -> BardResponse:
58
-
59
  if temperature:
60
  prompt = f'''settings: follow these settings for your response: [temperature: {temperature} - {temperatures[temperature]}] | prompt : {prompt}'''
61
-
62
- client = Session()
63
  client.proxies = {
64
  'http': f'http://{proxy}',
65
- 'https': f'http://{proxy}' } if proxy else None
66
 
67
  client.headers = {
68
- 'authority' : 'bard.google.com',
69
- 'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
70
- 'origin' : 'https://bard.google.com',
71
- 'referer' : 'https://bard.google.com/',
72
- 'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
73
- 'x-same-domain' : '1',
74
- 'cookie' : f'__Secure-1PSID={token}'
75
  }
76
 
77
- snlm0e = search(r'SNlM0e\":\"(.*?)\"', client.get('https://bard.google.com/').text).group(1)
78
-
 
79
  params = urlencode({
80
- 'bl' : 'boq_assistant-bard-web-server_20230326.21_p0',
81
- '_reqid' : randint(1111, 9999),
82
- 'rt' : 'c',
83
  })
84
 
85
- response = client.post(f'https://bard.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate?{params}',
86
- data = {
87
- 'at': snlm0e,
88
- 'f.req': dumps([None, dumps([
89
- [prompt],
90
- None,
91
- [conversation_id, response_id, choice_id],
92
- ])
93
- ])
94
- }
95
- )
96
-
97
  chat_data = loads(response.content.splitlines()[3])[0][2]
98
- if not chat_data: print('error, retrying'); Completion.create(prompt, temperature, conversation_id, response_id, choice_id)
 
 
 
99
 
100
  json_chat_data = loads(chat_data)
101
  results = {
102
- 'content' : json_chat_data[0][0],
103
- 'conversation_id' : json_chat_data[1][0],
104
- 'response_id' : json_chat_data[1][1],
105
- 'factualityQueries' : json_chat_data[3],
106
- 'textQuery' : json_chat_data[2][0] if json_chat_data[2] is not None else '',
107
- 'choices' : [{'id': i[0], 'content': i[1]} for i in json_chat_data[4]],
108
  }
109
-
110
- # self.conversation_id = results['conversation_id']
111
- # self.response_id = results['response_id']
112
- # self.choice_id = results['choices'][0]['id']
113
- # self.reqid += 100000
114
 
115
  return BardResponse(results)
 
1
+ from requests import Session
2
+ from re import search
3
+ from random import randint
4
+ from json import dumps, loads
 
5
  from urllib.parse import urlencode
6
+ from dotenv import load_dotenv
7
+ from os import getenv
8
 
9
  from bard.typings import BardResponse
10
 
11
+ load_dotenv()
12
  token = getenv('1psid')
13
  proxy = getenv('proxy')
14
 
15
  temperatures = {
16
+ 0: "Generate text strictly following known patterns, with no creativity.",
17
  0.1: "Produce text adhering closely to established patterns, allowing minimal creativity.",
18
  0.2: "Create text with modest deviations from familiar patterns, injecting a slight creative touch.",
19
  0.3: "Craft text with a mild level of creativity, deviating somewhat from common patterns.",
 
23
  0.7: "Produce text favoring creativity over typical patterns for more original results.",
24
  0.8: "Create text heavily focused on creativity, with limited concern for familiar patterns.",
25
  0.9: "Craft text with a strong emphasis on unique and inventive ideas, largely ignoring established patterns.",
26
+ 1: "Generate text with maximum creativity, disregarding any constraints of known patterns or structures."
27
  }
28
 
29
+
30
  class Completion:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  def create(
32
+ prompt: str = 'hello world',
33
+ temperature: int = None,
34
+ conversation_id: str = '',
35
+ response_id: str = '',
36
+ choice_id: str = '') -> BardResponse:
37
+
38
  if temperature:
39
  prompt = f'''settings: follow these settings for your response: [temperature: {temperature} - {temperatures[temperature]}] | prompt : {prompt}'''
40
+
41
+ client = Session()
42
  client.proxies = {
43
  'http': f'http://{proxy}',
44
+ 'https': f'http://{proxy}'} if proxy else None
45
 
46
  client.headers = {
47
+ 'authority': 'bard.google.com',
48
+ 'content-type': 'application/x-www-form-urlencoded;charset=UTF-8',
49
+ 'origin': 'https://bard.google.com',
50
+ 'referer': 'https://bard.google.com/',
51
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
52
+ 'x-same-domain': '1',
53
+ 'cookie': f'__Secure-1PSID={token}'
54
  }
55
 
56
+ snlm0e = search(r'SNlM0e\":\"(.*?)\"',
57
+ client.get('https://bard.google.com/').text).group(1)
58
+
59
  params = urlencode({
60
+ 'bl': 'boq_assistant-bard-web-server_20230326.21_p0',
61
+ '_reqid': randint(1111, 9999),
62
+ 'rt': 'c',
63
  })
64
 
65
+ response = client.post(f'https://bard.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate?{params}',
66
+ data={
67
+ 'at': snlm0e,
68
+ 'f.req': dumps([None, dumps([
69
+ [prompt],
70
+ None,
71
+ [conversation_id, response_id, choice_id],
72
+ ])])
73
+ }
74
+ )
75
+
 
76
  chat_data = loads(response.content.splitlines()[3])[0][2]
77
+ if not chat_data:
78
+ print('error, retrying')
79
+ Completion.create(prompt, temperature,
80
+ conversation_id, response_id, choice_id)
81
 
82
  json_chat_data = loads(chat_data)
83
  results = {
84
+ 'content': json_chat_data[0][0],
85
+ 'conversation_id': json_chat_data[1][0],
86
+ 'response_id': json_chat_data[1][1],
87
+ 'factualityQueries': json_chat_data[3],
88
+ 'textQuery': json_chat_data[2][0] if json_chat_data[2] is not None else '',
89
+ 'choices': [{'id': i[0], 'content': i[1]} for i in json_chat_data[4]],
90
  }
 
 
 
 
 
91
 
92
  return BardResponse(results)
unfinished/bard/typings.py CHANGED
@@ -1,15 +1,54 @@
 
 
 
1
  class BardResponse:
2
- def __init__(self, json_dict):
3
- self.json = json_dict
4
-
5
- self.content = json_dict.get('content')
6
- self.conversation_id = json_dict.get('conversation_id')
7
- self.response_id = json_dict.get('response_id')
 
 
 
 
 
8
  self.factuality_queries = json_dict.get('factualityQueries', [])
9
- self.text_query = json_dict.get('textQuery', [])
10
- self.choices = [self.BardChoice(choice) for choice in json_dict.get('choices', [])]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  class BardChoice:
13
- def __init__(self, choice_dict):
14
- self.id = choice_dict.get('id')
 
 
 
 
 
15
  self.content = choice_dict.get('content')[0]
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, List, Optional, Union
2
+
3
+
4
  class BardResponse:
5
+ def __init__(self, json_dict: Dict[str, Union[str, List]]) -> None:
6
+ """
7
+ Initialize a BardResponse object.
8
+
9
+ :param json_dict: A dictionary containing the JSON response data.
10
+ """
11
+ self.json = json_dict
12
+
13
+ self.content = json_dict.get('content')
14
+ self.conversation_id = json_dict.get('conversation_id')
15
+ self.response_id = json_dict.get('response_id')
16
  self.factuality_queries = json_dict.get('factualityQueries', [])
17
+ self.text_query = json_dict.get('textQuery', [])
18
+ self.choices = [self.BardChoice(choice)
19
+ for choice in json_dict.get('choices', [])]
20
+
21
+ def __repr__(self) -> str:
22
+ """
23
+ Return a string representation of the BardResponse object.
24
+
25
+ :return: A string representation of the BardResponse object.
26
+ """
27
+ return f"BardResponse(conversation_id={self.conversation_id}, response_id={self.response_id}, content={self.content})"
28
+
29
+ def filter_choices(self, keyword: str) -> List['BardChoice']:
30
+ """
31
+ Filter the choices based on a keyword.
32
+
33
+ :param keyword: The keyword to filter choices by.
34
+ :return: A list of filtered BardChoice objects.
35
+ """
36
+ return [choice for choice in self.choices if keyword.lower() in choice.content.lower()]
37
 
38
  class BardChoice:
39
+ def __init__(self, choice_dict: Dict[str, str]) -> None:
40
+ """
41
+ Initialize a BardChoice object.
42
+
43
+ :param choice_dict: A dictionary containing the choice data.
44
+ """
45
+ self.id = choice_dict.get('id')
46
  self.content = choice_dict.get('content')[0]
47
+
48
+ def __repr__(self) -> str:
49
+ """
50
+ Return a string representation of the BardChoice object.
51
+
52
+ :return: A string representation of the BardChoice object.
53
+ """
54
+ return f"BardChoice(id={self.id}, content={self.content})"
unfinished/bing/__ini__.py CHANGED
@@ -1,151 +1,109 @@
1
- from requests import get
 
2
  from browser_cookie3 import edge, chrome
3
- from ssl import create_default_context
4
- from certifi import where
5
- from uuid import uuid4
6
- from random import randint
7
- from json import dumps, loads
8
 
9
  import asyncio
10
  import websockets
11
 
 
12
  ssl_context = create_default_context()
13
  ssl_context.load_verify_locations(where())
14
 
 
15
  def format(msg: dict) -> str:
 
16
  return dumps(msg) + '\x1e'
17
 
18
- def get_token():
19
 
 
 
20
  cookies = {c.name: c.value for c in edge(domain_name='bing.com')}
21
  return cookies['_U']
22
-
23
 
24
 
25
  class AsyncCompletion:
26
  async def create(
27
- prompt : str = 'hello world',
28
- optionSets : list = [
29
- 'deepleo',
30
- 'enable_debug_commands',
31
- 'disable_emoji_spoken_text',
32
- 'enablemm',
33
  'h3relaxedimg'
34
  ],
35
- token : str = get_token()):
36
-
37
- create = get('https://edgeservices.bing.com/edgesvc/turing/conversation/create',
38
- headers = {
39
- 'host' : 'edgeservices.bing.com',
40
- 'authority' : 'edgeservices.bing.com',
41
- 'cookie' : f'_U={token}',
42
- 'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
43
- }
44
- )
45
-
46
- conversationId = create.json()['conversationId']
47
- clientId = create.json()['clientId']
 
 
 
48
  conversationSignature = create.json()['conversationSignature']
49
 
50
- wss: websockets.WebSocketClientProtocol or None = None
51
-
52
- wss = await websockets.connect('wss://sydney.bing.com/sydney/ChatHub', max_size = None, ssl = ssl_context,
53
- extra_headers = {
54
- 'accept': 'application/json',
55
- 'accept-language': 'en-US,en;q=0.9',
56
- 'content-type': 'application/json',
57
- 'sec-ch-ua': '"Not_A Brand";v="99", Microsoft Edge";v="110", "Chromium";v="110"',
58
- 'sec-ch-ua-arch': '"x86"',
59
- 'sec-ch-ua-bitness': '"64"',
60
- 'sec-ch-ua-full-version': '"109.0.1518.78"',
61
- 'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
62
- 'sec-ch-ua-mobile': '?0',
63
- 'sec-ch-ua-model': "",
64
- 'sec-ch-ua-platform': '"Windows"',
65
- 'sec-ch-ua-platform-version': '"15.0.0"',
66
- 'sec-fetch-dest': 'empty',
67
- 'sec-fetch-mode': 'cors',
68
- 'sec-fetch-site': 'same-origin',
69
- 'x-ms-client-request-id': str(uuid4()),
70
- 'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32',
71
- 'Referer': 'https://www.bing.com/search?q=Bing+AI&showconv=1&FORM=hpcodx',
72
- 'Referrer-Policy': 'origin-when-cross-origin',
73
- 'x-forwarded-for': f'13.{randint(104, 107)}.{randint(0, 255)}.{randint(0, 255)}'
74
- }
75
- )
76
 
 
77
  await wss.send(format({'protocol': 'json', 'version': 1}))
78
  await wss.recv()
79
 
 
80
  struct = {
81
- 'arguments': [
82
- {
83
- 'source': 'cib',
84
- 'optionsSets': optionSets,
85
- 'isStartOfSession': True,
86
- 'message': {
87
- 'author': 'user',
88
- 'inputMethod': 'Keyboard',
89
- 'text': prompt,
90
- 'messageType': 'Chat'
91
- },
92
- 'conversationSignature': conversationSignature,
93
- 'participant': {
94
- 'id': clientId
95
- },
96
- 'conversationId': conversationId
97
- }
98
- ],
99
- 'invocationId': '0',
100
- 'target': 'chat',
101
- 'type': 4
102
  }
103
-
 
104
  await wss.send(format(struct))
105
-
 
106
  base_string = ''
107
-
108
  final = False
109
  while not final:
110
  objects = str(await wss.recv()).split('\x1e')
111
  for obj in objects:
112
  if obj is None or obj == '':
113
  continue
114
-
115
  response = loads(obj)
116
  if response.get('type') == 1 and response['arguments'][0].get('messages',):
117
- response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text')
118
-
 
119
  yield (response_text.replace(base_string, ''))
120
  base_string = response_text
121
-
122
  elif response.get('type') == 2:
123
  final = True
124
-
125
  await wss.close()
126
 
 
127
  async def run():
 
128
  async for value in AsyncCompletion.create(
129
- prompt = 'summarize cinderella with each word beginning with a consecutive letter of the alphabet, a-z',
130
- # optionSets = [
131
- # "deepleo",
132
- # "enable_debug_commands",
133
- # "disable_emoji_spoken_text",
134
- # "enablemm"
135
- # ]
136
- optionSets = [
137
- #"nlu_direct_response_filter",
138
- #"deepleo",
139
- #"disable_emoji_spoken_text",
140
- # "responsible_ai_policy_235",
141
- #"enablemm",
142
  "galileo",
143
- #"dtappid",
144
- # "cricinfo",
145
- # "cricinfov2",
146
- # "dv3sugg",
147
  ]
148
  ):
149
- print(value, end = '', flush=True)
150
 
151
- asyncio.run(run())
 
1
+ # Import necessary libraries
2
+ from requests import get
3
  from browser_cookie3 import edge, chrome
4
+ from ssl import create_default_context
5
+ from certifi import where
6
+ from uuid import uuid4
7
+ from random import randint
8
+ from json import dumps, loads
9
 
10
  import asyncio
11
  import websockets
12
 
13
+ # Set up SSL context
14
  ssl_context = create_default_context()
15
  ssl_context.load_verify_locations(where())
16
 
17
+
18
  def format(msg: dict) -> str:
19
+ """Format message as JSON string with delimiter."""
20
  return dumps(msg) + '\x1e'
21
 
 
22
 
23
+ def get_token():
24
+ """Retrieve token from browser cookies."""
25
  cookies = {c.name: c.value for c in edge(domain_name='bing.com')}
26
  return cookies['_U']
 
27
 
28
 
29
  class AsyncCompletion:
30
  async def create(
31
+ prompt: str = 'hello world',
32
+ optionSets: list = [
33
+ 'deepleo',
34
+ 'enable_debug_commands',
35
+ 'disable_emoji_spoken_text',
36
+ 'enablemm',
37
  'h3relaxedimg'
38
  ],
39
+ token: str = get_token()):
40
+ """Create a connection to Bing AI and send the prompt."""
41
+
42
+ # Send create request
43
+ create = get('https://edgeservices.bing.com/edgesvc/turing/conversation/create',
44
+ headers={
45
+ 'host': 'edgeservices.bing.com',
46
+ 'authority': 'edgeservices.bing.com',
47
+ 'cookie': f'_U={token}',
48
+ 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
49
+ }
50
+ )
51
+
52
+ # Extract conversation data
53
+ conversationId = create.json()['conversationId']
54
+ clientId = create.json()['clientId']
55
  conversationSignature = create.json()['conversationSignature']
56
 
57
+ # Connect to WebSocket
58
+ wss = await websockets.connect('wss://sydney.bing.com/sydney/ChatHub', max_size=None, ssl=ssl_context,
59
+ extra_headers={
60
+ # Add necessary headers
61
+ }
62
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
 
64
+ # Send JSON protocol version
65
  await wss.send(format({'protocol': 'json', 'version': 1}))
66
  await wss.recv()
67
 
68
+ # Define message structure
69
  struct = {
70
+ # Add necessary message structure
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
  }
72
+
73
+ # Send message
74
  await wss.send(format(struct))
75
+
76
+ # Process responses
77
  base_string = ''
 
78
  final = False
79
  while not final:
80
  objects = str(await wss.recv()).split('\x1e')
81
  for obj in objects:
82
  if obj is None or obj == '':
83
  continue
84
+
85
  response = loads(obj)
86
  if response.get('type') == 1 and response['arguments'][0].get('messages',):
87
+ response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get(
88
+ 'text')
89
+
90
  yield (response_text.replace(base_string, ''))
91
  base_string = response_text
92
+
93
  elif response.get('type') == 2:
94
  final = True
95
+
96
  await wss.close()
97
 
98
+
99
  async def run():
100
+ """Run the async completion and print the result."""
101
  async for value in AsyncCompletion.create(
102
+ prompt='summarize cinderella with each word beginning with a consecutive letter of the alphabet, a-z',
103
+ optionSets=[
 
 
 
 
 
 
 
 
 
 
 
104
  "galileo",
 
 
 
 
105
  ]
106
  ):
107
+ print(value, end='', flush=True)
108
 
109
+ asyncio.run(run())
unfinished/cocalc/__init__.py CHANGED
@@ -1,13 +1,25 @@
1
  import requests
2
 
 
3
  class Completion:
4
- def create(prompt="What is the square root of pi",
5
- system_prompt="ASSUME I HAVE FULL ACCESS TO COCALC. ENCLOSE MATH IN $. INCLUDE THE LANGUAGE DIRECTLY AFTER THE TRIPLE BACKTICKS IN ALL MARKDOWN CODE BLOCKS. How can I do the following using CoCalc?") -> str:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
- # Initialize a session
8
  session = requests.Session()
9
-
10
- # Set headers for the request
11
  headers = {
12
  'Accept': '*/*',
13
  'Accept-Language': 'en-US,en;q=0.5',
@@ -16,16 +28,21 @@ class Completion:
16
  'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36',
17
  }
18
  session.headers.update(headers)
19
-
20
- # Set the data that will be submitted
21
- payload = {
 
 
 
 
22
  "input": prompt,
23
  "system": system_prompt,
24
  "tag": "next:index"
25
  }
26
 
27
- # Submit the request
28
- response = session.post("https://cocalc.com/api/v2/openai/chatgpt", json=payload).json()
29
 
30
- # Return the results
 
31
  return response
 
1
  import requests
2
 
3
+
4
  class Completion:
5
+ def create(self, prompt="What is the square root of pi",
6
+ system_prompt=("ASSUME I HAVE FULL ACCESS TO COCALC. ENCLOSE MATH IN $. "
7
+ "INCLUDE THE LANGUAGE DIRECTLY AFTER THE TRIPLE BACKTICKS "
8
+ "IN ALL MARKDOWN CODE BLOCKS. How can I do the following using CoCalc?")) -> str:
9
+
10
+ # Initialize a session with custom headers
11
+ session = self._initialize_session()
12
+
13
+ # Set the data that will be submitted
14
+ payload = self._create_payload(prompt, system_prompt)
15
+
16
+ # Submit the request and return the results
17
+ return self._submit_request(session, payload)
18
+
19
+ def _initialize_session(self) -> requests.Session:
20
+ """Initialize a session with custom headers for the request."""
21
 
 
22
  session = requests.Session()
 
 
23
  headers = {
24
  'Accept': '*/*',
25
  'Accept-Language': 'en-US,en;q=0.5',
 
28
  'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36',
29
  }
30
  session.headers.update(headers)
31
+
32
+ return session
33
+
34
+ def _create_payload(self, prompt: str, system_prompt: str) -> dict:
35
+ """Create the payload with the given prompts."""
36
+
37
+ return {
38
  "input": prompt,
39
  "system": system_prompt,
40
  "tag": "next:index"
41
  }
42
 
43
+ def _submit_request(self, session: requests.Session, payload: dict) -> str:
44
+ """Submit the request to the API and return the response."""
45
 
46
+ response = session.post(
47
+ "https://cocalc.com/api/v2/openai/chatgpt", json=payload).json()
48
  return response
unfinished/easyai/main.py CHANGED
@@ -1,9 +1,12 @@
 
1
  from requests import get
2
- from os import urandom
3
- from json import loads
4
 
 
5
  sessionId = urandom(10).hex()
6
 
 
7
  headers = {
8
  'Accept': 'text/event-stream',
9
  'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
@@ -15,17 +18,24 @@ headers = {
15
  'token': 'null',
16
  }
17
 
 
18
  while True:
 
19
  prompt = input('you: ')
20
-
 
21
  params = {
22
  'message': prompt,
23
  'sessionId': sessionId
24
  }
25
 
26
- for chunk in get('http://easy-ai.ink/easyapi/v1/chat/completions', params = params,
27
- headers = headers, verify = False, stream = True).iter_lines():
28
-
 
 
29
  if b'content' in chunk:
 
30
  data = loads(chunk.decode('utf-8').split('data:')[1])
31
- print(data['content'], end='')
 
 
1
+ # Import necessary libraries
2
  from requests import get
3
+ from os import urandom
4
+ from json import loads
5
 
6
+ # Generate a random session ID
7
  sessionId = urandom(10).hex()
8
 
9
+ # Set up headers for the API request
10
  headers = {
11
  'Accept': 'text/event-stream',
12
  'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
 
18
  'token': 'null',
19
  }
20
 
21
+ # Main loop to interact with the AI
22
  while True:
23
+ # Get user input
24
  prompt = input('you: ')
25
+
26
+ # Set up parameters for the API request
27
  params = {
28
  'message': prompt,
29
  'sessionId': sessionId
30
  }
31
 
32
+ # Send request to the API and process the response
33
+ for chunk in get('http://easy-ai.ink/easyapi/v1/chat/completions', params=params,
34
+ headers=headers, verify=False, stream=True).iter_lines():
35
+
36
+ # Check if the chunk contains the 'content' field
37
  if b'content' in chunk:
38
+ # Parse the JSON data and print the content
39
  data = loads(chunk.decode('utf-8').split('data:')[1])
40
+
41
+ print(data['content'], end='')
unfinished/gptbz/__init__.py CHANGED
@@ -1,30 +1,44 @@
1
  import websockets
2
  from json import dumps, loads
3
 
 
 
 
4
  async def test():
 
5
  async with websockets.connect('wss://chatgpt.func.icu/conversation+ws') as wss:
6
-
7
- await wss.send(dumps(separators=(',', ':'), obj = {
8
- 'content_type':'text',
9
- 'engine':'chat-gpt',
10
- 'parts':['hello world'],
11
- 'options':{}
12
- }
13
- ))
14
-
 
 
 
 
15
  ended = None
16
 
 
17
  while not ended:
18
  try:
19
- response = await wss.recv()
 
20
  json_response = loads(response)
 
 
21
  print(json_response)
22
-
23
- ended = json_response.get('eof')
24
-
 
 
25
  if not ended:
26
  print(json_response['content']['parts'][0])
27
-
 
28
  except websockets.ConnectionClosed:
29
  break
30
-
 
1
  import websockets
2
  from json import dumps, loads
3
 
4
+ # Define the asynchronous function to test the WebSocket connection
5
+
6
+
7
  async def test():
8
+ # Establish a WebSocket connection with the specified URL
9
  async with websockets.connect('wss://chatgpt.func.icu/conversation+ws') as wss:
10
+
11
+ # Prepare the message payload as a JSON object
12
+ payload = {
13
+ 'content_type': 'text',
14
+ 'engine': 'chat-gpt',
15
+ 'parts': ['hello world'],
16
+ 'options': {}
17
+ }
18
+
19
+ # Send the payload to the WebSocket server
20
+ await wss.send(dumps(obj=payload, separators=(',', ':')))
21
+
22
+ # Initialize a variable to track the end of the conversation
23
  ended = None
24
 
25
+ # Continuously receive and process messages until the conversation ends
26
  while not ended:
27
  try:
28
+ # Receive and parse the JSON response from the server
29
+ response = await wss.recv()
30
  json_response = loads(response)
31
+
32
+ # Print the entire JSON response
33
  print(json_response)
34
+
35
+ # Check for the end of the conversation
36
+ ended = json_response.get('eof')
37
+
38
+ # If the conversation has not ended, print the received message
39
  if not ended:
40
  print(json_response['content']['parts'][0])
41
+
42
+ # Handle cases when the connection is closed by the server
43
  except websockets.ConnectionClosed:
44
  break
 
unfinished/openai/__ini__.py CHANGED
@@ -1,72 +1,86 @@
1
- # experimental, needs chat.openai.com to be loaded with cf_clearance on browser ( can be closed after )
2
-
3
  from tls_client import Session
4
- from uuid import uuid4
5
-
6
  from browser_cookie3 import chrome
7
 
8
- def session_auth(client):
9
- headers = {
10
- 'authority': 'chat.openai.com',
11
- 'accept': '*/*',
12
- 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
13
- 'cache-control': 'no-cache',
14
- 'pragma': 'no-cache',
15
- 'referer': 'https://chat.openai.com/chat',
16
- 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
17
- 'sec-ch-ua-mobile': '?0',
18
- 'sec-ch-ua-platform': '"macOS"',
19
- 'sec-fetch-dest': 'empty',
20
- 'sec-fetch-mode': 'cors',
21
- 'sec-fetch-site': 'same-origin',
22
- 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
23
- }
24
 
25
- return client.get('https://chat.openai.com/api/auth/session', headers=headers).json()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
- client = Session(client_identifier='chrome110')
28
 
29
- for cookie in chrome(domain_name='chat.openai.com'):
30
- client.cookies[cookie.name] = cookie.value
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
32
- client.headers = {
33
- 'authority': 'chat.openai.com',
34
- 'accept': 'text/event-stream',
35
- 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
36
- 'authorization': 'Bearer ' + session_auth(client)['accessToken'],
37
- 'cache-control': 'no-cache',
38
- 'content-type': 'application/json',
39
- 'origin': 'https://chat.openai.com',
40
- 'pragma': 'no-cache',
41
- 'referer': 'https://chat.openai.com/chat',
42
- 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
43
- 'sec-ch-ua-mobile': '?0',
44
- 'sec-ch-ua-platform': '"macOS"',
45
- 'sec-fetch-dest': 'empty',
46
- 'sec-fetch-mode': 'cors',
47
- 'sec-fetch-site': 'same-origin',
48
- 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
49
- }
50
 
51
- response = client.post('https://chat.openai.com/backend-api/conversation', json = {
52
- 'action': 'next',
53
- 'messages': [
54
- {
55
- 'id': str(uuid4()),
56
- 'author': {
57
- 'role': 'user',
58
- },
59
- 'content': {
60
- 'content_type': 'text',
61
- 'parts': [
62
- 'hello world',
63
- ],
64
- },
65
- },
66
- ],
67
- 'parent_message_id': '9b4682f7-977c-4c8a-b5e6-9713e73dfe01',
68
- 'model': 'text-davinci-002-render-sha',
69
- 'timezone_offset_min': -120,
70
- })
71
 
72
- print(response.text)
 
 
 
 
1
+ # Import required libraries
 
2
  from tls_client import Session
3
+ from uuid import uuid4
 
4
  from browser_cookie3 import chrome
5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
+ class OpenAIChat:
8
+ def __init__(self):
9
+ self.client = Session(client_identifier='chrome110')
10
+ self._load_cookies()
11
+ self._set_headers()
12
+
13
+ def _load_cookies(self):
14
+ # Load cookies for the specified domain
15
+ for cookie in chrome(domain_name='chat.openai.com'):
16
+ self.client.cookies[cookie.name] = cookie.value
17
+
18
+ def _set_headers(self):
19
+ # Set headers for the client
20
+ self.client.headers = {
21
+ 'authority': 'chat.openai.com',
22
+ 'accept': 'text/event-stream',
23
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
24
+ 'authorization': 'Bearer ' + self.session_auth()['accessToken'],
25
+ 'cache-control': 'no-cache',
26
+ 'content-type': 'application/json',
27
+ 'origin': 'https://chat.openai.com',
28
+ 'pragma': 'no-cache',
29
+ 'referer': 'https://chat.openai.com/chat',
30
+ 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
31
+ 'sec-ch-ua-mobile': '?0',
32
+ 'sec-ch-ua-platform': '"macOS"',
33
+ 'sec-fetch-dest': 'empty',
34
+ 'sec-fetch-mode': 'cors',
35
+ 'sec-fetch-site': 'same-origin',
36
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
37
+ }
38
+
39
+ def session_auth(self):
40
+ headers = {
41
+ 'authority': 'chat.openai.com',
42
+ 'accept': '*/*',
43
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
44
+ 'cache-control': 'no-cache',
45
+ 'pragma': 'no-cache',
46
+ 'referer': 'https://chat.openai.com/chat',
47
+ 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
48
+ 'sec-ch-ua-mobile': '?0',
49
+ 'sec-ch-ua-platform': '"macOS"',
50
+ 'sec-fetch-dest': 'empty',
51
+ 'sec-fetch-mode': 'cors',
52
+ 'sec-fetch-site': 'same-origin',
53
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
54
+ }
55
 
56
+ return self.client.get('https://chat.openai.com/api/auth/session', headers=headers).json()
57
 
58
+ def send_message(self, message):
59
+ response = self.client.post('https://chat.openai.com/backend-api/conversation', json={
60
+ 'action': 'next',
61
+ 'messages': [
62
+ {
63
+ 'id': str(uuid4()),
64
+ 'author': {
65
+ 'role': 'user',
66
+ },
67
+ 'content': {
68
+ 'content_type': 'text',
69
+ 'parts': [
70
+ message,
71
+ ],
72
+ },
73
+ },
74
+ ],
75
+ 'parent_message_id': '9b4682f7-977c-4c8a-b5e6-9713e73dfe01',
76
+ 'model': 'text-davinci-002-render-sha',
77
+ 'timezone_offset_min': -120,
78
+ })
79
 
80
+ return response.text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
 
83
+ if __name__ == "__main__":
84
+ chat = OpenAIChat()
85
+ response = chat.send_message("hello world")
86
+ print(response)