Upload 8 files
Browse files- database/admin_group.py +31 -0
- database/connections_mdb.py +133 -0
- database/filters_mdb.py +107 -0
- database/ia_filterdb.py +141 -0
- database/notification.py +47 -0
- database/quickdb.py +100 -0
- database/tvseriesfilters.py +54 -0
- database/users_chats_db.py +122 -0
database/admin_group.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pymongo
|
2 |
+
from info import DATABASE_URI, DATABASE_NAME
|
3 |
+
import logging
|
4 |
+
logger = logging.getLogger(__name__)
|
5 |
+
logger.setLevel(logging.ERROR)
|
6 |
+
|
7 |
+
myclient = pymongo.MongoClient(DATABASE_URI)
|
8 |
+
mydb = myclient[DATABASE_NAME]
|
9 |
+
|
10 |
+
|
11 |
+
async def add_admingroup(group_id, template):
|
12 |
+
mycol = mydb["templatedb"]
|
13 |
+
mydict = {"group_id": str(group_id), "template": str(template)}
|
14 |
+
|
15 |
+
try:
|
16 |
+
x = mycol.insert_one(mydict)
|
17 |
+
except Exception:
|
18 |
+
logger.exception('Some error occured!', exc_info=True)
|
19 |
+
|
20 |
+
|
21 |
+
async def remove_admingroup(group_id):
|
22 |
+
mycol = mydb["templatedb"]
|
23 |
+
myquery = {"group_id": str(group_id)}
|
24 |
+
mycol.delete_one(myquery)
|
25 |
+
|
26 |
+
|
27 |
+
async def get_admingroup(group_id):
|
28 |
+
mycol = mydb["templatedb"]
|
29 |
+
myquery = {"group_id": str(group_id), }
|
30 |
+
mydoc = mycol.find(myquery)
|
31 |
+
return mydoc[0]
|
database/connections_mdb.py
ADDED
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pymongo
|
2 |
+
|
3 |
+
from info import DATABASE_URI, DATABASE_NAME
|
4 |
+
|
5 |
+
import logging
|
6 |
+
logger = logging.getLogger(__name__)
|
7 |
+
logger.setLevel(logging.ERROR)
|
8 |
+
|
9 |
+
myclient = pymongo.MongoClient(DATABASE_URI)
|
10 |
+
mydb = myclient[DATABASE_NAME]
|
11 |
+
mycol = mydb['CONNECTION']
|
12 |
+
|
13 |
+
|
14 |
+
async def add_connection(group_id, user_id):
|
15 |
+
query = mycol.find_one(
|
16 |
+
{"_id": user_id},
|
17 |
+
{"_id": 0, "active_group": 0}
|
18 |
+
)
|
19 |
+
if query is not None:
|
20 |
+
group_ids = [x["group_id"] for x in query["group_details"]]
|
21 |
+
if group_id in group_ids:
|
22 |
+
return False
|
23 |
+
|
24 |
+
group_details = {
|
25 |
+
"group_id": group_id
|
26 |
+
}
|
27 |
+
|
28 |
+
data = {
|
29 |
+
'_id': user_id,
|
30 |
+
'group_details': [group_details],
|
31 |
+
'active_group': group_id,
|
32 |
+
}
|
33 |
+
|
34 |
+
if mycol.count_documents({"_id": user_id}) == 0:
|
35 |
+
try:
|
36 |
+
mycol.insert_one(data)
|
37 |
+
return True
|
38 |
+
except Exception:
|
39 |
+
logger.exception('Some error occurred!', exc_info=True)
|
40 |
+
|
41 |
+
else:
|
42 |
+
try:
|
43 |
+
mycol.update_one(
|
44 |
+
{'_id': user_id},
|
45 |
+
{
|
46 |
+
"$push": {"group_details": group_details},
|
47 |
+
"$set": {"active_group": group_id}
|
48 |
+
}
|
49 |
+
)
|
50 |
+
return True
|
51 |
+
except Exception:
|
52 |
+
logger.exception('Some error occurred!', exc_info=True)
|
53 |
+
|
54 |
+
|
55 |
+
async def active_connection(user_id):
|
56 |
+
|
57 |
+
query = mycol.find_one(
|
58 |
+
{"_id": user_id},
|
59 |
+
{"_id": 0, "group_details": 0}
|
60 |
+
)
|
61 |
+
if not query:
|
62 |
+
return None
|
63 |
+
|
64 |
+
group_id = query['active_group']
|
65 |
+
return int(group_id) if group_id != None else None
|
66 |
+
|
67 |
+
|
68 |
+
async def all_connections(user_id):
|
69 |
+
query = mycol.find_one(
|
70 |
+
{"_id": user_id},
|
71 |
+
{"_id": 0, "active_group": 0}
|
72 |
+
)
|
73 |
+
if query is not None:
|
74 |
+
return [x["group_id"] for x in query["group_details"]]
|
75 |
+
else:
|
76 |
+
return None
|
77 |
+
|
78 |
+
|
79 |
+
async def if_active(user_id, group_id):
|
80 |
+
query = mycol.find_one(
|
81 |
+
{"_id": user_id},
|
82 |
+
{"_id": 0, "group_details": 0}
|
83 |
+
)
|
84 |
+
return query is not None and query['active_group'] == group_id
|
85 |
+
|
86 |
+
|
87 |
+
async def make_active(user_id, group_id):
|
88 |
+
update = mycol.update_one(
|
89 |
+
{'_id': user_id},
|
90 |
+
{"$set": {"active_group": group_id}}
|
91 |
+
)
|
92 |
+
return update.modified_count != 0
|
93 |
+
|
94 |
+
|
95 |
+
async def make_inactive(user_id):
|
96 |
+
update = mycol.update_one(
|
97 |
+
{'_id': user_id},
|
98 |
+
{"$set": {"active_group": None}}
|
99 |
+
)
|
100 |
+
return update.modified_count != 0
|
101 |
+
|
102 |
+
|
103 |
+
async def delete_connection(user_id, group_id):
|
104 |
+
|
105 |
+
try:
|
106 |
+
update = mycol.update_one(
|
107 |
+
{"_id": user_id},
|
108 |
+
{"$pull": {"group_details": {"group_id": group_id}}}
|
109 |
+
)
|
110 |
+
if update.modified_count == 0:
|
111 |
+
return False
|
112 |
+
query = mycol.find_one(
|
113 |
+
{"_id": user_id},
|
114 |
+
{"_id": 0}
|
115 |
+
)
|
116 |
+
if len(query["group_details"]) >= 1:
|
117 |
+
if query['active_group'] == group_id:
|
118 |
+
prvs_group_id = query["group_details"][len(
|
119 |
+
query["group_details"]) - 1]["group_id"]
|
120 |
+
|
121 |
+
mycol.update_one(
|
122 |
+
{'_id': user_id},
|
123 |
+
{"$set": {"active_group": prvs_group_id}}
|
124 |
+
)
|
125 |
+
else:
|
126 |
+
mycol.update_one(
|
127 |
+
{'_id': user_id},
|
128 |
+
{"$set": {"active_group": None}}
|
129 |
+
)
|
130 |
+
return True
|
131 |
+
except Exception as e:
|
132 |
+
logger.exception(f'Some error occurred! {e}', exc_info=True)
|
133 |
+
return False
|
database/filters_mdb.py
ADDED
@@ -0,0 +1,107 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pymongo
|
2 |
+
from info import DATABASE_URI, DATABASE_NAME
|
3 |
+
import logging
|
4 |
+
logger = logging.getLogger(__name__)
|
5 |
+
logger.setLevel(logging.ERROR)
|
6 |
+
|
7 |
+
myclient = pymongo.MongoClient(DATABASE_URI)
|
8 |
+
mydb = myclient[DATABASE_NAME]
|
9 |
+
|
10 |
+
|
11 |
+
async def add_filter(grp_id, text, reply_text, btn, file, alert):
|
12 |
+
mycol = mydb[str(grp_id)]
|
13 |
+
# mycol.create_index([('text', 'text')])
|
14 |
+
|
15 |
+
data = {
|
16 |
+
'text': str(text),
|
17 |
+
'reply': str(reply_text),
|
18 |
+
'btn': str(btn),
|
19 |
+
'file': str(file),
|
20 |
+
'alert': str(alert)
|
21 |
+
}
|
22 |
+
|
23 |
+
try:
|
24 |
+
mycol.update_one({'text': str(text)}, {"$set": data}, upsert=True)
|
25 |
+
except Exception:
|
26 |
+
logger.exception('Some error occured!', exc_info=True)
|
27 |
+
|
28 |
+
|
29 |
+
async def find_filter(group_id, name):
|
30 |
+
mycol = mydb[str(group_id)]
|
31 |
+
query = mycol.find({"text": name})
|
32 |
+
try:
|
33 |
+
for file in query:
|
34 |
+
reply_text = file['reply']
|
35 |
+
btn = file['btn']
|
36 |
+
fileid = file['file']
|
37 |
+
try:
|
38 |
+
alert = file['alert']
|
39 |
+
except Exception:
|
40 |
+
alert = None
|
41 |
+
return reply_text, btn, alert, fileid
|
42 |
+
except Exception:
|
43 |
+
return None, None, None, None
|
44 |
+
|
45 |
+
|
46 |
+
async def get_filters(group_id):
|
47 |
+
mycol = mydb[str(group_id)]
|
48 |
+
texts = []
|
49 |
+
query = mycol.find()
|
50 |
+
try:
|
51 |
+
texts.extend(file['text'] for file in query)
|
52 |
+
except Exception:
|
53 |
+
pass
|
54 |
+
return texts
|
55 |
+
|
56 |
+
|
57 |
+
async def delete_filter(message, text, group_id):
|
58 |
+
mycol = mydb[str(group_id)]
|
59 |
+
|
60 |
+
myquery = {'text': text}
|
61 |
+
query = mycol.count_documents(myquery)
|
62 |
+
if query == 1:
|
63 |
+
mycol.delete_one(myquery)
|
64 |
+
await message.reply_text(
|
65 |
+
f"'`{text}`' deleted. I'll not respond to that filter anymore.",
|
66 |
+
quote=True,
|
67 |
+
parse_mode="md"
|
68 |
+
)
|
69 |
+
else:
|
70 |
+
await message.reply_text("Couldn't find that filter!", quote=True)
|
71 |
+
|
72 |
+
|
73 |
+
async def del_all(message, group_id, title):
|
74 |
+
if str(group_id) not in mydb.list_collection_names():
|
75 |
+
await message.edit_text(f"Nothing to remove in {title}!")
|
76 |
+
return
|
77 |
+
mycol = mydb[str(group_id)]
|
78 |
+
try:
|
79 |
+
mycol.drop()
|
80 |
+
await message.edit_text(f"All filters from {title} has been removed")
|
81 |
+
except Exception:
|
82 |
+
await message.edit_text("Couldn't remove all filters from group!")
|
83 |
+
return
|
84 |
+
|
85 |
+
|
86 |
+
async def count_filters(group_id):
|
87 |
+
mycol = mydb[str(group_id)]
|
88 |
+
|
89 |
+
count = mycol.count()
|
90 |
+
return False if count == 0 else count
|
91 |
+
|
92 |
+
|
93 |
+
async def filter_stats():
|
94 |
+
collections = mydb.list_collection_names()
|
95 |
+
|
96 |
+
if "CONNECTION" in collections:
|
97 |
+
collections.remove("CONNECTION")
|
98 |
+
|
99 |
+
totalcount = 0
|
100 |
+
for collection in collections:
|
101 |
+
mycol = mydb[collection]
|
102 |
+
count = mycol.count()
|
103 |
+
totalcount += count
|
104 |
+
|
105 |
+
totalcollections = len(collections)
|
106 |
+
|
107 |
+
return totalcollections, totalcount
|
database/ia_filterdb.py
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from struct import pack
|
3 |
+
import re
|
4 |
+
import base64
|
5 |
+
from pyrogram.file_id import FileId
|
6 |
+
from pymongo.errors import DuplicateKeyError
|
7 |
+
from umongo import Instance, Document, fields
|
8 |
+
from motor.motor_asyncio import AsyncIOMotorClient
|
9 |
+
from marshmallow.exceptions import ValidationError
|
10 |
+
from info import DATABASE_URI, DATABASE_NAME, COLLECTION_NAME, USE_CAPTION_FILTER
|
11 |
+
|
12 |
+
logger = logging.getLogger(__name__)
|
13 |
+
logger.setLevel(logging.INFO)
|
14 |
+
|
15 |
+
|
16 |
+
client = AsyncIOMotorClient(DATABASE_URI)
|
17 |
+
db = client[DATABASE_NAME]
|
18 |
+
instance = Instance.from_db(db)
|
19 |
+
|
20 |
+
|
21 |
+
@instance.register
|
22 |
+
class Media(Document):
|
23 |
+
file_id = fields.StrField(attribute='_id')
|
24 |
+
file_ref = fields.StrField(allow_none=True)
|
25 |
+
file_name = fields.StrField(required=True)
|
26 |
+
file_size = fields.IntField(required=True)
|
27 |
+
file_type = fields.StrField(allow_none=True)
|
28 |
+
mime_type = fields.StrField(allow_none=True)
|
29 |
+
caption = fields.StrField(allow_none=True)
|
30 |
+
|
31 |
+
class Meta:
|
32 |
+
indexes = ('$file_name', )
|
33 |
+
collection_name = COLLECTION_NAME
|
34 |
+
|
35 |
+
|
36 |
+
async def save_file(media):
|
37 |
+
"""Save file in database"""
|
38 |
+
|
39 |
+
# TODO: Find better way to get same file_id for same media to avoid duplicates
|
40 |
+
file_id, file_ref = unpack_new_file_id(media.file_id)
|
41 |
+
file_name = re.sub(r"(_|\-|\.|\+)", " ", str(media.file_name))
|
42 |
+
try:
|
43 |
+
file = Media(
|
44 |
+
file_id=file_id,
|
45 |
+
file_ref=file_ref,
|
46 |
+
file_name=file_name,
|
47 |
+
file_size=media.file_size,
|
48 |
+
file_type=media.file_type,
|
49 |
+
mime_type=media.mime_type,
|
50 |
+
caption=media.caption.html if media.caption else None,
|
51 |
+
)
|
52 |
+
except ValidationError:
|
53 |
+
logger.exception('Error occurred while saving file in database')
|
54 |
+
return False, 2
|
55 |
+
else:
|
56 |
+
try:
|
57 |
+
await file.commit()
|
58 |
+
except DuplicateKeyError:
|
59 |
+
logger.warning(
|
60 |
+
f'{getattr(media, "file_name", "NO_FILE")} is already saved in database'
|
61 |
+
)
|
62 |
+
|
63 |
+
return False, 0
|
64 |
+
else:
|
65 |
+
logger.info(
|
66 |
+
f'{getattr(media, "file_name", "NO_FILE")} is saved to database')
|
67 |
+
return True, 1
|
68 |
+
|
69 |
+
|
70 |
+
async def get_search_results(query, file_type=None, max_results=10, offset=0, filter=False):
|
71 |
+
# sourcery skip: avoid-builtin-shadow
|
72 |
+
"""For given query return (results, next_offset)"""
|
73 |
+
query = query.strip()
|
74 |
+
if not query:
|
75 |
+
raw_pattern = '.'
|
76 |
+
elif ' ' not in query:
|
77 |
+
raw_pattern = '(\b|[\.\+\-_])' + query + '(\b|[\.\+\-_])'
|
78 |
+
else:
|
79 |
+
raw_pattern = query.replace(' ', '.*[\s\.\+\-_]')
|
80 |
+
try:
|
81 |
+
regex = re.compile(raw_pattern, flags=re.IGNORECASE)
|
82 |
+
except Exception:
|
83 |
+
return []
|
84 |
+
if USE_CAPTION_FILTER:
|
85 |
+
filter = {'$or': [{'file_name': regex}, {'caption': regex}]}
|
86 |
+
else:
|
87 |
+
filter = {'file_name': regex}
|
88 |
+
if file_type:
|
89 |
+
filter['file_type'] = file_type
|
90 |
+
total_results = await Media.count_documents(filter)
|
91 |
+
next_offset = offset + max_results
|
92 |
+
if next_offset > total_results:
|
93 |
+
next_offset = ''
|
94 |
+
cursor = Media.find(filter)
|
95 |
+
cursor.sort('$natural', -1)
|
96 |
+
cursor.skip(offset).limit(max_results)
|
97 |
+
files = await cursor.to_list(length=max_results)
|
98 |
+
return files, next_offset, total_results
|
99 |
+
|
100 |
+
|
101 |
+
async def get_file_details(query): # sourcery skip: avoid-builtin-shadow
|
102 |
+
filter = {'file_id': query}
|
103 |
+
cursor = Media.find(filter)
|
104 |
+
return await cursor.to_list(length=1)
|
105 |
+
|
106 |
+
|
107 |
+
def encode_file_id(s: bytes) -> str:
|
108 |
+
r = b""
|
109 |
+
n = 0
|
110 |
+
|
111 |
+
for i in s + bytes([22]) + bytes([4]):
|
112 |
+
if i == 0:
|
113 |
+
n += 1
|
114 |
+
else:
|
115 |
+
if n:
|
116 |
+
r += b"\x00" + bytes([n])
|
117 |
+
n = 0
|
118 |
+
|
119 |
+
r += bytes([i])
|
120 |
+
|
121 |
+
return base64.urlsafe_b64encode(r).decode().rstrip("=")
|
122 |
+
|
123 |
+
|
124 |
+
def encode_file_ref(file_ref: bytes) -> str:
|
125 |
+
return base64.urlsafe_b64encode(file_ref).decode().rstrip("=")
|
126 |
+
|
127 |
+
|
128 |
+
def unpack_new_file_id(new_file_id):
|
129 |
+
"""Return file_id, file_ref"""
|
130 |
+
decoded = FileId.decode(new_file_id)
|
131 |
+
file_id = encode_file_id(
|
132 |
+
pack(
|
133 |
+
"<iiqq",
|
134 |
+
int(decoded.file_type),
|
135 |
+
decoded.dc_id,
|
136 |
+
decoded.media_id,
|
137 |
+
decoded.access_hash
|
138 |
+
)
|
139 |
+
)
|
140 |
+
file_ref = encode_file_ref(decoded.file_reference)
|
141 |
+
return file_id, file_ref
|
database/notification.py
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pymongo
|
2 |
+
from info import DATABASE_URI, DATABASE_NAME
|
3 |
+
import logging
|
4 |
+
logger = logging.getLogger(__name__)
|
5 |
+
logger.setLevel(logging.ERROR)
|
6 |
+
|
7 |
+
myclient = pymongo.MongoClient(DATABASE_URI)
|
8 |
+
mydb = myclient[DATABASE_NAME]
|
9 |
+
|
10 |
+
|
11 |
+
async def add_notification(userId, stats):
|
12 |
+
mycol = mydb["notification"]
|
13 |
+
mydict = {"userId": str(userId), "stats": str(stats)}
|
14 |
+
|
15 |
+
try:
|
16 |
+
mycol.insert_one(mydict)
|
17 |
+
except Exception:
|
18 |
+
logger.exception('Some error occured!', exc_info=True)
|
19 |
+
|
20 |
+
|
21 |
+
async def update_notification(userId, stats):
|
22 |
+
mycol = mydb["notification"]
|
23 |
+
filter = {'userId': str(userId)}
|
24 |
+
newvalues = {"$set": {"stats": str(stats)}}
|
25 |
+
try:
|
26 |
+
mycol.update_one(filter, newvalues)
|
27 |
+
except Exception:
|
28 |
+
logger.exception('Some error occured!', exc_info=True)
|
29 |
+
|
30 |
+
|
31 |
+
async def remove_notification(userId):
|
32 |
+
mycol = mydb["notification"]
|
33 |
+
myquery = {'userId': str(userId)}
|
34 |
+
mycol.delete_one(myquery)
|
35 |
+
|
36 |
+
|
37 |
+
async def find_allusers():
|
38 |
+
mycol = mydb["notification"]
|
39 |
+
return [x["userId"] for x in mycol.find()]
|
40 |
+
|
41 |
+
|
42 |
+
async def find_notification(userId):
|
43 |
+
mycol = mydb["notification"]
|
44 |
+
myquery = {"userId": str(userId)}
|
45 |
+
mydoc = mycol.find(myquery)
|
46 |
+
for x in mydoc:
|
47 |
+
return x
|
database/quickdb.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pymongo
|
2 |
+
from info import DATABASE_URI, DATABASE_NAME
|
3 |
+
import logging
|
4 |
+
logger = logging.getLogger(__name__)
|
5 |
+
logger.setLevel(logging.ERROR)
|
6 |
+
|
7 |
+
myclient = pymongo.MongoClient(DATABASE_URI)
|
8 |
+
mydb = myclient[DATABASE_NAME]
|
9 |
+
|
10 |
+
|
11 |
+
async def add_inst_filter(fileid, links):
|
12 |
+
mycol = mydb["quickdb"]
|
13 |
+
mydict = {"fileid": str(fileid), "links": str(links)}
|
14 |
+
|
15 |
+
try:
|
16 |
+
x = mycol.insert_one(mydict)
|
17 |
+
except:
|
18 |
+
logger.exception('Some error occured!', exc_info=True)
|
19 |
+
|
20 |
+
|
21 |
+
async def remove_inst(fileid):
|
22 |
+
mycol = mydb["quickdb"]
|
23 |
+
myquery = {"fileid": str(fileid)}
|
24 |
+
mycol.delete_one(myquery)
|
25 |
+
|
26 |
+
|
27 |
+
async def get_ids(fileid):
|
28 |
+
mycol = mydb["quickdb"]
|
29 |
+
myquery = {"fileid": str(fileid)}
|
30 |
+
mydoc = mycol.find(myquery)
|
31 |
+
for x in mydoc:
|
32 |
+
return x
|
33 |
+
|
34 |
+
|
35 |
+
async def get(id):
|
36 |
+
mycol = mydb["quickdb"]
|
37 |
+
for x in mycol.find():
|
38 |
+
return x
|
39 |
+
|
40 |
+
|
41 |
+
async def add_sent_files(userid, fileid):
|
42 |
+
mycol = mydb["sentfiledb"]
|
43 |
+
mydict = {"fileid": str(fileid), "userid": str(userid)}
|
44 |
+
|
45 |
+
try:
|
46 |
+
x = mycol.insert_one(mydict)
|
47 |
+
except:
|
48 |
+
logger.exception('Some error occured!', exc_info=True)
|
49 |
+
|
50 |
+
|
51 |
+
async def count_sent_files():
|
52 |
+
mycol = mydb["sentfiledb"]
|
53 |
+
return mycol.count()
|
54 |
+
|
55 |
+
|
56 |
+
async def add_verification(user_id, stats, file_id, updat_time):
|
57 |
+
mycol = mydb["verification"]
|
58 |
+
mydict = {"userId": str(user_id), "stats": str(
|
59 |
+
stats), "file": str(file_id), "updat_time": str(updat_time)}
|
60 |
+
|
61 |
+
try:
|
62 |
+
x = mycol.insert_one(mydict)
|
63 |
+
except Exception:
|
64 |
+
logger.exception('Some error occured!', exc_info=True)
|
65 |
+
|
66 |
+
|
67 |
+
async def remove_verification(user_id):
|
68 |
+
mycol = mydb["verification"]
|
69 |
+
myquery = {"userId": str(user_id)}
|
70 |
+
mycol.delete_one(myquery)
|
71 |
+
|
72 |
+
|
73 |
+
async def get_verification(user_id):
|
74 |
+
mycol = mydb["verification"]
|
75 |
+
myquery = {"userId": str(user_id)}
|
76 |
+
mydoc = mycol.find(myquery)
|
77 |
+
for user_dic in mydoc:
|
78 |
+
return user_dic
|
79 |
+
|
80 |
+
|
81 |
+
async def add_update_msg(total_users, files):
|
82 |
+
mycol = mydb["updatemsg"]
|
83 |
+
mydict = {"totalUsers": str(
|
84 |
+
total_users), "files": str(files)}
|
85 |
+
|
86 |
+
try:
|
87 |
+
x = mycol.insert_one(mydict)
|
88 |
+
except Exception:
|
89 |
+
logger.exception('Some error occured!', exc_info=True)
|
90 |
+
|
91 |
+
|
92 |
+
async def remove_update_msg():
|
93 |
+
mycol = mydb["updatemsg"]
|
94 |
+
mycol.delete_many({})
|
95 |
+
|
96 |
+
|
97 |
+
async def get_update_msg():
|
98 |
+
mycol = mydb["updatemsg"]
|
99 |
+
for x in mycol.find():
|
100 |
+
return x
|
database/tvseriesfilters.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pymongo
|
2 |
+
from info import DATABASE_URI, DATABASE_NAME
|
3 |
+
import logging
|
4 |
+
logger = logging.getLogger(__name__)
|
5 |
+
logger.setLevel(logging.ERROR)
|
6 |
+
|
7 |
+
myclient = pymongo.MongoClient(DATABASE_URI)
|
8 |
+
mydb = myclient[DATABASE_NAME]
|
9 |
+
|
10 |
+
|
11 |
+
async def add_tvseries_filter(name, language, quality, seasonlink):
|
12 |
+
mycol = mydb["tvseries"]
|
13 |
+
mydict = {"name": str(name), "language": str(
|
14 |
+
language), "quality": str(quality), "seasonlink": str(seasonlink)}
|
15 |
+
|
16 |
+
try:
|
17 |
+
mycol.insert_one(mydict)
|
18 |
+
except Exception:
|
19 |
+
logger.exception('Some error occured!', exc_info=True)
|
20 |
+
|
21 |
+
|
22 |
+
async def update_tvseries_filter(name, language, quality, seasonlink):
|
23 |
+
mycol = mydb["tvseries"]
|
24 |
+
filter = {'name': str(name)}
|
25 |
+
newvalues = {"$set": {"language": str(language), "quality": str(
|
26 |
+
quality), "seasonlink": str(seasonlink)}}
|
27 |
+
|
28 |
+
try:
|
29 |
+
mycol.update_one(filter, newvalues)
|
30 |
+
except Exception:
|
31 |
+
logger.exception('Some error occured!', exc_info=True)
|
32 |
+
|
33 |
+
|
34 |
+
async def remove_tvseries(name):
|
35 |
+
mycol = mydb["tvseries"]
|
36 |
+
myquery = {'name': str(name)}
|
37 |
+
mycol.delete_one(myquery)
|
38 |
+
|
39 |
+
|
40 |
+
async def getlinks():
|
41 |
+
mycol = mydb["tvseries"]
|
42 |
+
return list(mycol.find())
|
43 |
+
|
44 |
+
|
45 |
+
async def find_tvseries_filter(name):
|
46 |
+
mycol = mydb["tvseries"]
|
47 |
+
return list(mycol.find({'name': {'$regex': f'^{name}'}}))
|
48 |
+
|
49 |
+
|
50 |
+
async def find_tvseries_by_first(letter):
|
51 |
+
mycol = mydb["tvseries"]
|
52 |
+
return list(mycol.find({'name': {'$regex': f'^{letter}'}}))
|
53 |
+
|
54 |
+
# https://www.w3schools.com/python/python_mongodb_query.asp
|
database/users_chats_db.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# https://github.com/odysseusmax/animated-lamp/blob/master/bot/database/database.py
|
2 |
+
import motor.motor_asyncio
|
3 |
+
from info import DATABASE_NAME, DATABASE_URI, IMDB, IMDB_TEMPLATE, MELCOW_NEW_USERS, P_TTI_SHOW_OFF, SINGLE_BUTTON, SPELL_CHECK_REPLY, PROTECT_CONTENT
|
4 |
+
|
5 |
+
|
6 |
+
class Database:
|
7 |
+
|
8 |
+
def __init__(self, uri, database_name):
|
9 |
+
self._client = motor.motor_asyncio.AsyncIOMotorClient(uri)
|
10 |
+
self.db = self._client[database_name]
|
11 |
+
self.col = self.db.users
|
12 |
+
self.grp = self.db.groups
|
13 |
+
|
14 |
+
def new_user(self, id, name):
|
15 |
+
return dict(
|
16 |
+
id=id,
|
17 |
+
name=name,
|
18 |
+
ban_status=dict(
|
19 |
+
is_banned=False,
|
20 |
+
ban_reason="",
|
21 |
+
),
|
22 |
+
)
|
23 |
+
|
24 |
+
def new_group(self, id, title):
|
25 |
+
return dict(
|
26 |
+
id=id,
|
27 |
+
title=title,
|
28 |
+
chat_status=dict(
|
29 |
+
is_disabled=False,
|
30 |
+
reason="",
|
31 |
+
),
|
32 |
+
)
|
33 |
+
|
34 |
+
async def add_user(self, id, name):
|
35 |
+
user = self.new_user(id, name)
|
36 |
+
await self.col.insert_one(user)
|
37 |
+
|
38 |
+
async def is_user_exist(self, id):
|
39 |
+
user = await self.col.find_one({'id': int(id)})
|
40 |
+
return bool(user)
|
41 |
+
|
42 |
+
async def total_users_count(self):
|
43 |
+
return await self.col.count_documents({})
|
44 |
+
|
45 |
+
async def remove_ban(self, id):
|
46 |
+
ban_status = dict(
|
47 |
+
is_banned=False,
|
48 |
+
ban_reason=''
|
49 |
+
)
|
50 |
+
await self.col.update_one({'id': id}, {'$set': {'ban_status': ban_status}})
|
51 |
+
|
52 |
+
async def ban_user(self, user_id, ban_reason="No Reason"):
|
53 |
+
ban_status = dict(
|
54 |
+
is_banned=True,
|
55 |
+
ban_reason=ban_reason
|
56 |
+
)
|
57 |
+
await self.col.update_one({'id': user_id}, {'$set': {'ban_status': ban_status}})
|
58 |
+
|
59 |
+
async def get_ban_status(self, id):
|
60 |
+
default = dict(
|
61 |
+
is_banned=False,
|
62 |
+
ban_reason=''
|
63 |
+
)
|
64 |
+
user = await self.col.find_one({'id': int(id)})
|
65 |
+
return user.get('ban_status', default) if user else default
|
66 |
+
|
67 |
+
async def get_all_users(self):
|
68 |
+
return self.col.find({})
|
69 |
+
|
70 |
+
async def delete_user(self, user_id):
|
71 |
+
await self.col.delete_many({'id': int(user_id)})
|
72 |
+
|
73 |
+
async def get_banned(self):
|
74 |
+
users = self.col.find({'ban_status.is_banned': True})
|
75 |
+
chats = self.grp.find({'chat_status.is_disabled': True})
|
76 |
+
b_chats = [chat['id'] async for chat in chats]
|
77 |
+
b_users = [user['id'] async for user in users]
|
78 |
+
return b_users, b_chats
|
79 |
+
|
80 |
+
async def add_chat(self, chat, title):
|
81 |
+
chat = self.new_group(chat, title)
|
82 |
+
await self.grp.insert_one(chat)
|
83 |
+
|
84 |
+
async def get_chat(self, chat):
|
85 |
+
chat = await self.grp.find_one({'id': int(chat)})
|
86 |
+
return chat.get('chat_status') if chat else False
|
87 |
+
|
88 |
+
async def re_enable_chat(self, id):
|
89 |
+
chat_status = dict(
|
90 |
+
is_disabled=False,
|
91 |
+
reason="",
|
92 |
+
)
|
93 |
+
await self.grp.update_one({'id': int(id)}, {'$set': {'chat_status': chat_status}})
|
94 |
+
|
95 |
+
async def update_settings(self, id, settings):
|
96 |
+
await self.grp.update_one({'id': int(id)}, {'$set': {'settings': settings}})
|
97 |
+
|
98 |
+
async def get_settings(self, id):
|
99 |
+
default = {'button': SINGLE_BUTTON, 'botpm': P_TTI_SHOW_OFF, 'file_secure': PROTECT_CONTENT,
|
100 |
+
'imdb': IMDB, 'spell_check': SPELL_CHECK_REPLY, 'welcome': MELCOW_NEW_USERS, 'template': IMDB_TEMPLATE}
|
101 |
+
|
102 |
+
chat = await self.grp.find_one({'id': int(id)})
|
103 |
+
return chat.get('settings', default) if chat else default
|
104 |
+
|
105 |
+
async def disable_chat(self, chat, reason="No Reason"):
|
106 |
+
chat_status = dict(
|
107 |
+
is_disabled=True,
|
108 |
+
reason=reason,
|
109 |
+
)
|
110 |
+
await self.grp.update_one({'id': int(chat)}, {'$set': {'chat_status': chat_status}})
|
111 |
+
|
112 |
+
async def total_chat_count(self):
|
113 |
+
return await self.grp.count_documents({})
|
114 |
+
|
115 |
+
async def get_all_chats(self):
|
116 |
+
return self.grp.find({})
|
117 |
+
|
118 |
+
async def get_db_size(self):
|
119 |
+
return (await self.db.command("dbstats"))['dataSize']
|
120 |
+
|
121 |
+
|
122 |
+
db = Database(DATABASE_URI, DATABASE_NAME)
|