urlExpanderBot/groupwatcher.py

145 lines
3.6 KiB
Python

"""
This add group support for the bot
"""
from pyrogram import Client, filters
from datetime import date as dt1
from datetime import datetime
from string import digits
from pathlib import Path
import re
import urlexpander
import json
app = Client('urlExpander')
hour = datetime.now()
today = dt1.today()
directory = Path().resolve()
servfiles_location = directory / 'services'
services = []
file_list = []
for entry in servfiles_location.iterdir():
file_list.append(entry.name)
for item in file_list:
item_location = servfiles_location / item
with open(item_location, 'r') as fl:
for line in fl:
line = line.lower()
line = line.replace('\n', '').replace(' ', '')
if line in services:
continue
else:
services.append(line)
fl.close()
print(f'Supported services: {len(services)}')
def watch(client, message):
links = []
for service in services:
pattern = f'http.*{service}/.*'
match = re.match(pattern, message.text)
if match:
url = match.group(0)
request = urlexpander.unshort(url)
hops = request['hops']
hops = list(hops.values())
for i, hop in enumerate(hops, start=1):
link = f'{i}. {hop}'
url_original = f'{min(hops)}'
links.append(link)
res = links[-1]
res = str(res)
res = res[3:]
links = str(links)
links = links.replace('[', '')
links = links.replace(']', '')
links = links.replace("'", '')
links = links.replace(', ', '\n')
current_date = today.strftime("%d %B, %Y")
current_time = hour.strftime("%H:%M:%S")
data = f'{current_date} - {current_time}'
log = {
'time': data,
'api_response': request,
}
with open('logs.txt', 'a') as f:
json.dump(log, f)
f.write('\n')
f.close()
# auto-search output message
message.reply('URL Original:\n{url}\n\nURL Final:\n{res}\n\nRedirecionamentos:\n{links}', disable_web_page_preview=True)
del links
del link
del res
del hops
del request
del url
del log
del data
else:
continue
# on /expander group call
@app.on_message(filters.command('expander'))
def expander(client, message):
url = message.text.replace('/expander','').replace(' ','')
urlexpander.unshort(url)
request = urlexpander.unshort(url)
hops = request['hops']
hops = list(hops.values())
for i, hop in enumerate(hops, start=1):
link = f'{i}. {hop}'
url_original = f'{min(hops)}'
links.append(link)
res = links[-1]
res = str(res)
res = res[3:]
links = str(links)
links = links.replace('[', '')
links = links.replace(']', '')
links = links.replace("'", '')
links = links.replace(', ', '\n')
current_date = today.strftime("%d %B, %Y")
current_time = hour.strftime("%H:%M:%S")
data = f'{current_date} - {current_time}'
log = {
'date': data,
'api_response': request,
}
with open('logs.txt', 'a') as f:
json.dump(log, f)
f.write('\n')
f.close()
# group: output message
message.reply('URL Original:\n{url}\n\nURL Final:\n{res}\n\nRedirecionamentos:\n{links}', disable_web_page_preview=True)
del links
del link
del res
del hops
del request
del url
del data