randydev commited on
Commit
4c642b9
1 Parent(s): 9efce41

Create main.py

Browse files
Files changed (1) hide show
  1. main.py +260 -0
main.py ADDED
@@ -0,0 +1,260 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # Copyright 2020-2024 (c) Randy W @xtdevs, @xtsea
4
+ #
5
+ # from : https://github.com/TeamKillerX
6
+ # Channel : @RendyProjects
7
+ # This program is free software: you can redistribute it and/or modify
8
+ # it under the terms of the GNU Affero General Public License as published by
9
+ # the Free Software Foundation, either version 3 of the License, or
10
+ # (at your option) any later version.
11
+ #
12
+ # This program is distributed in the hope that it will be useful,
13
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
14
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
+ # GNU Affero General Public License for more details.
16
+ #
17
+ # You should have received a copy of the GNU Affero General Public License
18
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
19
+
20
+ import requests
21
+ import time
22
+ import json
23
+ import asyncio
24
+ import io
25
+ import os
26
+ import re
27
+ from PIL import Image
28
+
29
+ from pyrogram import *
30
+ from pyrogram import Client, filters
31
+ from pyrogram.types import *
32
+ from pyrogram.errors import *
33
+ from RyuzakiLib import FaceAI, FullStackDev, GeminiLatest, RendyDevChat
34
+ from config import *
35
+
36
+ import google.generativeai as genai
37
+ from google.api_core.exceptions import InvalidArgument
38
+
39
+ API_ID = 0
40
+ API_HASH = ""
41
+ BOT_TOKEN = ""
42
+
43
+ client = Client(
44
+ api_id=API_ID,
45
+ api_hash=API_HASH,
46
+ bot_token=BOT_TOKEN
47
+ )
48
+
49
+ async def geni_files_delete(name: str):
50
+ url = f"https://generativelanguage.googleapis.com/v1beta/{name}"
51
+ params = {"key": GOOGLE_API_KEY}
52
+ response = requests.delete(url, params=params)
53
+ if response.status_code != 200:
54
+ return None
55
+ return response.text
56
+
57
+ @client.on_message(
58
+ filters.incoming
59
+ & (
60
+ filters.text
61
+ | filters.photo
62
+ | filters.video
63
+ | filters.audio
64
+ | filters.voice
65
+ | filters.regex(r"\b(Randy|Rendi)\b(.*)", flags=re.IGNORECASE)
66
+ )
67
+ & filters.private
68
+ & ~filters.bot
69
+ & ~filters.via_bot
70
+ & ~filters.forwarded,
71
+ group=2,
72
+ )
73
+ async def chatbot_talk(client: Client, message: Message):
74
+ chat_user = await db.get_chatbot(message.chat.id)
75
+ genai.configure(api_key=GOOGLE_API_KEY)
76
+ if message.photo:
77
+ file_path = await message.download()
78
+ caption = message.caption or "What's this?"
79
+ x = GeminiLatest(api_keys=GOOGLE_API_KEY)
80
+ if client.me.is_premium:
81
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
82
+ else:
83
+ ai_reply = await message.reply_text(f"Processing...")
84
+ try:
85
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
86
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
87
+ response_reads = x.get_response_image(caption, file_path)
88
+ if len(response_reads) > 4096:
89
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
90
+ out_file.write(response_reads)
91
+ await message.reply_document(
92
+ document="chat.txt",
93
+ disable_notification=True
94
+ )
95
+ await ai_reply.delete()
96
+ os.remove("chat.txt")
97
+ else:
98
+ await ai_reply.edit_text(response_reads)
99
+ backup_chat.append({"role": "model", "parts": [{"text": response_reads}]})
100
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
101
+ os.remove(file_path)
102
+ return
103
+ except InvalidArgument as e:
104
+ return await ai_reply.edit_text(f"Error: {e}")
105
+ except Exception as e:
106
+ return await ai_reply.edit_text(f"Error: {e}")
107
+
108
+ if message.audio or message.voice:
109
+ if client.me.is_premium:
110
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
111
+ else:
112
+ ai_reply = await message.reply_text(f"Processing...")
113
+ if message.audio:
114
+ audio_file_name = await message.download()
115
+ if message.voice:
116
+ audio_file_name = await message.download()
117
+ caption = message.caption or "What's this?"
118
+ model = genai.GenerativeModel(
119
+ model_name="gemini-1.5-flash",
120
+ safety_settings={
121
+ genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
122
+ genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
123
+ genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
124
+ genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
125
+ }
126
+ )
127
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
128
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
129
+ if client.me.is_premium:
130
+ await ai_reply.edit_text(f"{custom_loading}Uploading file..")
131
+ else:
132
+ await ai_reply.edit_text("Uploading file..")
133
+ audio_file = genai.upload_file(path=audio_file_name)
134
+ while audio_file.state.name == "PROCESSING":
135
+ await asyncio.sleep(10)
136
+ audio_file = genai.get_file(audio_file.name)
137
+ if audio_file.state.name == "FAILED":
138
+ return await ai_reply.edit_text(f"Error: {audio_file.state.name}")
139
+ try:
140
+ response = model.generate_content(
141
+ [audio_file, caption],
142
+ request_options={"timeout": 600}
143
+ )
144
+ if len(response.text) > 4096:
145
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
146
+ out_file.write(response.text)
147
+ await message.reply_document(
148
+ document="chat.txt",
149
+ disable_notification=True
150
+ )
151
+ await ai_reply.delete()
152
+ os.remove("chat.txt")
153
+ else:
154
+ await ai_reply.edit_text(response.text)
155
+ backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
156
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
157
+ audio_file.delete()
158
+ os.remove(audio_file_name)
159
+ return
160
+ except InvalidArgument as e:
161
+ return await ai_reply.edit_text(f"Error: {e}")
162
+ except Exception as e:
163
+ return await ai_reply.edit_text(f"Error: {e}")
164
+
165
+ if message.video:
166
+ if client.me.is_premium:
167
+ ai_reply = await message.reply_text(f"{custom_loading}Processing...")
168
+ else:
169
+ ai_reply = await message.reply_text(f"Processing...")
170
+ video_file_name = await message.download(file_name="newvideo.mp4")
171
+ caption = message.caption or "What's this?"
172
+ model = genai.GenerativeModel(
173
+ model_name="gemini-1.5-pro",
174
+ safety_settings={
175
+ genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_NONE,
176
+ genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
177
+ genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_NONE,
178
+ genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_NONE,
179
+ }
180
+ )
181
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
182
+ backup_chat.append({"role": "user", "parts": [{"text": caption}]})
183
+ if client.me.is_premium:
184
+ await ai_reply.edit_text(f"{custom_loading}Uploading file..")
185
+ else:
186
+ await ai_reply.edit_text("Uploading file..")
187
+ video_file = genai.upload_file(path=video_file_name)
188
+ while video_file.state.name == "PROCESSING":
189
+ await asyncio.sleep(10)
190
+ video_file = genai.get_file(video_file.name)
191
+ if video_file.state.name == "FAILED":
192
+ return await ai_reply.edit_text(f"Error: {video_file.state.name}")
193
+ try:
194
+ response = model.generate_content(
195
+ [video_file, caption],
196
+ request_options={"timeout": 600}
197
+ )
198
+ if len(response.text) > 4096:
199
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
200
+ out_file.write(response.text)
201
+ await message.reply_document(
202
+ document="chat.txt",
203
+ disable_notification=True
204
+ )
205
+ await ai_reply.delete()
206
+ os.remove("chat.txt")
207
+ else:
208
+ await ai_reply.edit_text(response.text)
209
+ backup_chat.append({"role": "model", "parts": [{"text": response.text}]})
210
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
211
+ video_file.delete()
212
+ os.remove(video_file_name)
213
+ return
214
+ except InvalidArgument as e:
215
+ return await ai_reply.edit_text(f"Error: {e}")
216
+ except Exception as e:
217
+ return await ai_reply.edit_text(f"Error: {e}")
218
+
219
+ if message.text:
220
+ query = message.text.strip()
221
+ match = re.search(r"\b(Randy|Rendi)\b(.*)", query, flags=re.IGNORECASE)
222
+ if match:
223
+ rest_of_sentence = match.group(2).strip()
224
+ query_base = rest_of_sentence if rest_of_sentence else query
225
+ else:
226
+ query_base = query
227
+ parts = query.split(maxsplit=1)
228
+ command = parts[0].lower()
229
+ pic_query = parts[1].strip() if len(parts) > 1 else ""
230
+ try:
231
+ model_flash = genai.GenerativeModel(
232
+ model_name="gemini-1.5-flash"
233
+ )
234
+ backup_chat = await db._get_chatbot_chat_from_db(message.from_user.id)
235
+ backup_chat.append({"role": "user", "parts": [{"text": query_base}]})
236
+ chat_session = model_flash.start_chat(history=backup_chat)
237
+ response_data = chat_session.send_message(query_base)
238
+ output = response_data.text
239
+ if len(output) > 4096:
240
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
241
+ out_file.write(output)
242
+ await message.reply_document(
243
+ document="chat.txt",
244
+ disable_notification=True
245
+ )
246
+ os.remove("chat.txt")
247
+ else:
248
+ await message.reply_text(output)
249
+ backup_chat.append({"role": "model", "parts": [{"text": output}]})
250
+ await db._update_chatbot_chat_in_db(message.from_user.id, backup_chat)
251
+ except Exception as e:
252
+ return await message.reply_text(str(e))
253
+
254
+ async def main():
255
+ await client.start()
256
+ me = await client.get_me()
257
+ print(f"started bot: {me.first_name}")
258
+
259
+ if __name__ == "__name__":
260
+ asyncio.run(main())