2021-04-24 14:19:01 +02:00
|
|
|
import asyncio
|
2022-05-13 11:57:11 +02:00
|
|
|
import hashlib
|
2022-05-09 12:05:55 +02:00
|
|
|
import json
|
2021-04-24 14:19:01 +02:00
|
|
|
import logging
|
2023-10-21 19:25:38 +02:00
|
|
|
import mimetypes
|
2021-04-24 14:19:01 +02:00
|
|
|
import os
|
2022-04-11 19:09:38 +02:00
|
|
|
import platform
|
2023-04-16 00:34:15 +02:00
|
|
|
import random
|
2021-04-24 14:38:39 +02:00
|
|
|
import re
|
2022-03-18 21:30:43 +01:00
|
|
|
import shutil
|
2022-03-28 19:44:50 +02:00
|
|
|
import zipfile
|
2021-04-24 14:19:01 +02:00
|
|
|
from asyncio.exceptions import TimeoutError
|
|
|
|
from string import punctuation, whitespace
|
|
|
|
from time import time
|
2022-03-28 19:44:50 +02:00
|
|
|
from typing import List
|
2021-04-24 14:19:01 +02:00
|
|
|
|
|
|
|
import aiofiles
|
|
|
|
import aiohttp
|
2022-04-18 00:11:21 +02:00
|
|
|
from aiohttp import ClientConnectorError, ServerDisconnectedError
|
2021-04-24 14:19:01 +02:00
|
|
|
|
2022-05-09 12:05:55 +02:00
|
|
|
import ccl_bplist
|
|
|
|
|
2021-04-24 14:19:01 +02:00
|
|
|
PROTOCOL = 'https://'
|
|
|
|
ILLEGAL_PATH_CHARS = punctuation.replace('.', '') + whitespace
|
|
|
|
|
2021-04-24 16:42:40 +02:00
|
|
|
DYNAMIC_PART_MOCK = 'telegram-crawler'
|
|
|
|
|
2021-04-24 14:19:01 +02:00
|
|
|
INPUT_FILENAME = os.environ.get('INPUT_FILENAME', 'tracked_links.txt')
|
2022-05-13 11:57:11 +02:00
|
|
|
INPUT_RES_FILENAME = os.environ.get('INPUT_FILENAME', 'tracked_res_links.txt')
|
2022-06-18 17:36:39 +02:00
|
|
|
INPUT_TR_FILENAME = os.environ.get('INPUT_FILENAME', 'tracked_tr_links.txt')
|
2021-04-24 14:19:01 +02:00
|
|
|
OUTPUT_FOLDER = os.environ.get('OUTPUT_FOLDER', 'data/')
|
2022-05-13 11:57:11 +02:00
|
|
|
OUTPUT_MTPROTO_FOLDER = os.path.join(OUTPUT_FOLDER, os.environ.get('OUTPUT_MTPROTO_FOLDER', 'server/'))
|
|
|
|
OUTPUT_SITES_FOLDER = os.path.join(OUTPUT_FOLDER, os.environ.get('OUTPUT_SITES_FOLDER', 'web/'))
|
|
|
|
OUTPUT_CLIENTS_FOLDER = os.path.join(OUTPUT_FOLDER, os.environ.get('OUTPUT_CLIENTS_FOLDER', 'client/'))
|
|
|
|
OUTPUT_RESOURCES_FOLDER = os.path.join(OUTPUT_FOLDER, os.environ.get('OUTPUT_RESOURCES_FOLDER', 'web_res/'))
|
2022-06-18 17:36:39 +02:00
|
|
|
OUTPUT_TRANSLATIONS_FOLDER = os.path.join(OUTPUT_FOLDER, os.environ.get('OUTPUT_RESOURCES_FOLDER', 'web_tr/'))
|
2023-09-15 11:57:25 +02:00
|
|
|
OUTPUT_MINI_APPS_FOLDER = os.path.join(OUTPUT_FOLDER, os.environ.get('OUTPUT_MINI_APPS_FOLDER', 'mini_app/'))
|
2021-04-24 14:19:01 +02:00
|
|
|
|
2022-03-12 01:20:05 +01:00
|
|
|
TRANSLATIONS_EN_CATEGORY_URL_REGEX = r'/en/[a-z_]+/[a-z_]+/$'
|
|
|
|
|
2021-04-24 14:38:39 +02:00
|
|
|
PAGE_GENERATION_TIME_REGEX = r'<!-- page generated in .+ -->'
|
2021-04-24 15:16:10 +02:00
|
|
|
PAGE_API_HASH_REGEX = r'\?hash=[a-z0-9]+'
|
2021-04-24 16:42:40 +02:00
|
|
|
PAGE_API_HASH_TEMPLATE = f'?hash={DYNAMIC_PART_MOCK}'
|
2022-10-29 23:50:29 +02:00
|
|
|
TON_RATE_REGEX = r'"tonRate":"[.0-9]+"'
|
|
|
|
TON_RATE_TEMPLATE = f'"tonRate":"{DYNAMIC_PART_MOCK}"'
|
2021-04-24 16:42:40 +02:00
|
|
|
PASSPORT_SSID_REGEX = r'passport_ssid=[a-z0-9]+_[a-z0-9]+_[a-z0-9]+'
|
|
|
|
PASSPORT_SSID_TEMPLATE = f'passport_ssid={DYNAMIC_PART_MOCK}'
|
|
|
|
NONCE_REGEX = r'"nonce":"[a-z0-9]+_[a-z0-9]+_[a-z0-9]+'
|
|
|
|
NONCE_TEMPLATE = f'"nonce":"{DYNAMIC_PART_MOCK}'
|
2021-10-01 20:39:52 +02:00
|
|
|
PROXY_CONFIG_SUB_NET_REGEX = r'\d+\.\d+:8888;'
|
|
|
|
PROXY_CONFIG_SUB_NET_TEMPLATE = 'X.X:8888;'
|
2022-03-15 10:37:27 +01:00
|
|
|
TRANSLATE_SUGGESTION_REGEX = r'<div class="tr-value-suggestion">(.?)+</div>'
|
2022-03-30 09:51:54 +02:00
|
|
|
SPARKLE_SIG_REGEX = r';sig=(.*?);'
|
|
|
|
SPARKLE_SE_REGEX = r';se=(.*?);'
|
|
|
|
SPARKLE_SIG_TEMPLATE = f';sig={DYNAMIC_PART_MOCK};'
|
|
|
|
SPARKLE_SE_TEMPLATE = f';se={DYNAMIC_PART_MOCK};'
|
2021-04-24 14:38:39 +02:00
|
|
|
|
2022-05-13 11:57:11 +02:00
|
|
|
STEL_DEV_LAYER = 190
|
2022-05-11 11:10:08 +02:00
|
|
|
|
2021-04-24 14:19:01 +02:00
|
|
|
# unsecure but so simple
|
2022-05-11 11:10:08 +02:00
|
|
|
CONNECTOR = aiohttp.TCPConnector(ssl=False, force_close=True, limit=300)
|
2022-03-12 01:20:05 +01:00
|
|
|
TIMEOUT = aiohttp.ClientTimeout(total=10)
|
2022-05-11 11:10:08 +02:00
|
|
|
HEADERS = {
|
|
|
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:99.0) Gecko/20100101 Firefox/99.0',
|
|
|
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
|
|
|
|
'Accept-Language': 'en-US,en;q=0.5',
|
|
|
|
'Accept-Encoding': 'gzip, deflate, br',
|
|
|
|
'DNT': '1',
|
|
|
|
'Connection': 'keep-alive',
|
2022-05-13 11:57:11 +02:00
|
|
|
'Cookie': f'stel_ln=en; stel_dev_layer={STEL_DEV_LAYER}',
|
2022-05-11 11:10:08 +02:00
|
|
|
'Upgrade-Insecure-Requests': '1',
|
|
|
|
'Sec-Fetch-Dest': 'document',
|
|
|
|
'Sec-Fetch-Mode': 'navigate',
|
|
|
|
'Sec-Fetch-Site': 'none',
|
|
|
|
'Sec-Fetch-User': '?1',
|
|
|
|
'Cache-Control': 'max-age=0',
|
|
|
|
'TE': 'trailers',
|
|
|
|
}
|
2021-04-24 14:19:01 +02:00
|
|
|
|
2022-04-24 13:33:16 +02:00
|
|
|
logging.basicConfig(format='%(message)s', level=logging.INFO)
|
2021-04-24 14:19:01 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2022-04-10 17:46:36 +02:00
|
|
|
def get_hash(data: bytes) -> str:
|
|
|
|
return hashlib.sha256(data).hexdigest()
|
|
|
|
|
|
|
|
|
2022-05-11 11:10:08 +02:00
|
|
|
async def download_file(url: str, path: str, session: aiohttp.ClientSession):
|
2022-03-19 15:56:19 +01:00
|
|
|
async with session.get(url) as response:
|
|
|
|
if response.status != 200:
|
|
|
|
return
|
|
|
|
|
2022-04-24 08:45:20 +02:00
|
|
|
content = await response.read()
|
|
|
|
|
|
|
|
async with aiofiles.open(path, mode='wb') as f:
|
|
|
|
await f.write(content)
|
2022-03-19 15:56:19 +01:00
|
|
|
|
|
|
|
|
2022-03-28 19:44:50 +02:00
|
|
|
async def get_download_link_of_latest_appcenter_release(parameterized_url: str, session: aiohttp.ClientSession):
|
2022-03-18 21:25:00 +01:00
|
|
|
api_base = 'https://install.appcenter.ms/api/v0.1'
|
2022-03-19 15:56:19 +01:00
|
|
|
base_url = f'{api_base}/{parameterized_url}'
|
2023-10-23 17:37:17 +02:00
|
|
|
headers = {
|
|
|
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:109.0) Gecko/20100101 Firefox/118.0',
|
|
|
|
}
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2022-03-19 15:56:19 +01:00
|
|
|
async def make_req(url):
|
2023-10-23 17:37:17 +02:00
|
|
|
async with session.get(url, headers=headers) as response:
|
2022-03-19 15:56:19 +01:00
|
|
|
if response.status != 200:
|
|
|
|
return
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2022-03-19 15:56:19 +01:00
|
|
|
return await response.json(encoding='UTF-8')
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2023-04-16 01:28:32 +02:00
|
|
|
res_json = await make_req(f'{base_url}/public_releases')
|
|
|
|
if res_json and res_json[0]:
|
|
|
|
latest_id = res_json[0]['id']
|
|
|
|
version = res_json[0]['version']
|
2022-03-19 15:56:19 +01:00
|
|
|
else:
|
2023-04-16 01:37:39 +02:00
|
|
|
raise RuntimeError('AppCenter is down as always')
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2022-06-09 13:57:47 +02:00
|
|
|
logger.info(f'The latest appcenter release is {version} ({parameterized_url})')
|
|
|
|
|
2023-04-16 01:28:32 +02:00
|
|
|
res_json = await make_req(f'{base_url}/releases/{latest_id}')
|
|
|
|
if res_json:
|
|
|
|
return res_json['download_url']
|
2022-03-28 19:44:50 +02:00
|
|
|
|
2023-04-16 01:37:39 +02:00
|
|
|
raise RuntimeError('AppCenter is down as always')
|
2022-03-28 19:44:50 +02:00
|
|
|
|
|
|
|
|
2022-04-10 15:00:01 +02:00
|
|
|
async def track_additional_files(
|
|
|
|
files_to_track: List[str], input_dir_name: str, output_dir_name: str, encoding='utf-8', save_hash_only=False
|
|
|
|
):
|
2022-04-24 08:45:20 +02:00
|
|
|
kwargs = {'mode': 'r', 'encoding': encoding}
|
|
|
|
if save_hash_only:
|
|
|
|
kwargs['mode'] = 'rb'
|
|
|
|
del kwargs['encoding']
|
|
|
|
|
2022-03-28 19:44:50 +02:00
|
|
|
for file in files_to_track:
|
2022-04-24 08:45:20 +02:00
|
|
|
async with aiofiles.open(os.path.join(input_dir_name, file), **kwargs) as r_file:
|
|
|
|
content = await r_file.read()
|
|
|
|
|
|
|
|
if save_hash_only:
|
|
|
|
content = get_hash(content)
|
|
|
|
else:
|
|
|
|
content = re.sub(r'id=".*"', 'id="tgcrawl"', content)
|
|
|
|
|
2022-05-13 11:57:11 +02:00
|
|
|
filename = os.path.join(output_dir_name, file)
|
2022-03-28 19:44:50 +02:00
|
|
|
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
|
|
|
async with aiofiles.open(filename, 'w', encoding='utf-8') as w_file:
|
2022-04-24 08:45:20 +02:00
|
|
|
await w_file.write(content)
|
2022-03-28 19:44:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
async def download_telegram_macos_beta_and_extract_resources(session: aiohttp.ClientSession):
|
2023-01-22 13:14:25 +01:00
|
|
|
parameterized_url = 'apps/keepcoder/Telergam-Beta-Updated/distribution_groups/public'
|
2022-03-28 19:44:50 +02:00
|
|
|
download_url = await get_download_link_of_latest_appcenter_release(parameterized_url, session)
|
|
|
|
|
|
|
|
if not download_url:
|
|
|
|
return
|
|
|
|
|
2022-05-13 11:57:11 +02:00
|
|
|
crawled_data_folder = os.path.join(OUTPUT_CLIENTS_FOLDER, 'macos-beta')
|
2022-04-11 19:09:38 +02:00
|
|
|
client_folder_name = 'macos'
|
|
|
|
client_archive_name = 'macos.zip'
|
2022-04-10 15:00:01 +02:00
|
|
|
|
2022-04-24 08:45:20 +02:00
|
|
|
assets_output_dir = 'macos_assets'
|
|
|
|
assets_filename = 'Assets.car'
|
|
|
|
assets_extractor = 'acextract'
|
|
|
|
|
2022-05-08 23:17:03 +02:00
|
|
|
tool_download_url = 'https://github.com/MarshalX/acextract/releases/download/3.0/acextract'
|
2022-04-24 08:45:20 +02:00
|
|
|
|
|
|
|
if 'darwin' not in platform.system().lower():
|
|
|
|
await download_file(download_url, client_archive_name, session)
|
|
|
|
else:
|
|
|
|
await asyncio.gather(
|
|
|
|
download_file(download_url, client_archive_name, session),
|
2022-05-08 23:17:03 +02:00
|
|
|
download_file(tool_download_url, assets_extractor, session),
|
2022-04-24 08:45:20 +02:00
|
|
|
)
|
2022-03-28 19:44:50 +02:00
|
|
|
|
|
|
|
# synced
|
2022-04-11 19:09:38 +02:00
|
|
|
with zipfile.ZipFile(client_archive_name, 'r') as f:
|
|
|
|
f.extractall(client_folder_name)
|
2022-03-28 19:44:50 +02:00
|
|
|
|
2022-04-10 15:00:01 +02:00
|
|
|
resources_path = 'Telegram.app/Contents/Resources'
|
2022-03-28 19:44:50 +02:00
|
|
|
files_to_track = [
|
2022-04-10 15:00:01 +02:00
|
|
|
f'{resources_path}/en.lproj/Localizable.strings',
|
2022-03-28 19:44:50 +02:00
|
|
|
]
|
2022-04-11 19:09:38 +02:00
|
|
|
await track_additional_files(files_to_track, client_folder_name, crawled_data_folder, 'utf-16')
|
2022-04-10 15:00:01 +02:00
|
|
|
|
2022-04-11 19:09:38 +02:00
|
|
|
_, _, hash_of_files_to_track = next(os.walk(f'{client_folder_name}/{resources_path}'))
|
2022-09-09 17:00:11 +02:00
|
|
|
hash_of_files_to_track = [f'{resources_path}/{i}' for i in hash_of_files_to_track if i != assets_filename]
|
2022-04-11 19:09:38 +02:00
|
|
|
await track_additional_files(hash_of_files_to_track, client_folder_name, crawled_data_folder, save_hash_only=True)
|
2022-03-28 19:44:50 +02:00
|
|
|
|
2022-04-11 19:09:38 +02:00
|
|
|
def cleanup1():
|
|
|
|
os.path.isdir(client_folder_name) and shutil.rmtree(client_folder_name)
|
|
|
|
os.remove(client_archive_name)
|
|
|
|
|
|
|
|
# .car crawler works only in macOS
|
|
|
|
if 'darwin' not in platform.system().lower():
|
|
|
|
cleanup1()
|
|
|
|
return
|
|
|
|
|
|
|
|
path_to_car = os.path.join(client_folder_name, resources_path, assets_filename)
|
2022-05-08 23:17:03 +02:00
|
|
|
await (await asyncio.create_subprocess_exec('chmod', '+x', assets_extractor)).communicate()
|
|
|
|
process = await asyncio.create_subprocess_exec(f'./{assets_extractor}', '-i', path_to_car, '-o', assets_output_dir)
|
2022-04-11 19:09:38 +02:00
|
|
|
await process.communicate()
|
|
|
|
|
|
|
|
def cleanup2():
|
|
|
|
cleanup1()
|
|
|
|
os.path.isdir(assets_output_dir) and shutil.rmtree(assets_output_dir)
|
2022-05-08 23:17:03 +02:00
|
|
|
os.remove(assets_extractor)
|
2022-04-11 19:09:38 +02:00
|
|
|
|
|
|
|
if process.returncode != 0:
|
|
|
|
cleanup2()
|
|
|
|
return
|
|
|
|
|
|
|
|
_, _, hash_of_files_to_track = next(os.walk(assets_output_dir))
|
|
|
|
await track_additional_files(
|
|
|
|
hash_of_files_to_track,
|
|
|
|
assets_output_dir,
|
|
|
|
os.path.join(crawled_data_folder, assets_filename),
|
|
|
|
save_hash_only=True
|
|
|
|
)
|
|
|
|
|
2023-10-10 19:56:52 +02:00
|
|
|
cleanup2()
|
|
|
|
|
|
|
|
return # the code below returns a random result depending on the system?
|
|
|
|
|
2023-10-10 19:29:40 +02:00
|
|
|
executable_path = os.path.join(client_folder_name, 'Telegram.app/Contents/MacOS/Telegram')
|
|
|
|
process = await asyncio.create_subprocess_exec(
|
2023-10-10 19:50:28 +02:00
|
|
|
f'strings', '-n', '7', '-arch', 'x86_64', '--', executable_path, stdout=asyncio.subprocess.PIPE
|
2023-10-10 19:29:40 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
stdout = b''
|
|
|
|
while process.returncode is None:
|
|
|
|
stdout_part = await process.stdout.read(1024)
|
|
|
|
if not stdout_part:
|
|
|
|
break
|
|
|
|
|
|
|
|
stdout += stdout_part
|
|
|
|
|
|
|
|
if process.returncode != 0:
|
|
|
|
cleanup2()
|
|
|
|
return
|
|
|
|
|
|
|
|
import string
|
|
|
|
binary_strings = stdout.decode('utf-8').split('\n')
|
|
|
|
special_chars = list(string.punctuation)
|
|
|
|
valid_strings = []
|
|
|
|
for string in binary_strings:
|
|
|
|
if sum([1 for char in string if char in special_chars]) > 5:
|
|
|
|
continue
|
|
|
|
|
|
|
|
valid_strings.append(string.strip())
|
|
|
|
|
|
|
|
valid_strings = sorted(list(set(valid_strings)))
|
|
|
|
with open(os.path.join(crawled_data_folder, 'strings.txt'), 'w', encoding='utf-8') as f:
|
|
|
|
f.write('\n'.join(valid_strings))
|
|
|
|
|
2022-04-11 19:09:38 +02:00
|
|
|
cleanup2()
|
2022-03-28 19:44:50 +02:00
|
|
|
|
|
|
|
|
2022-05-09 12:05:55 +02:00
|
|
|
async def download_telegram_ios_beta_and_extract_resources(session: aiohttp.ClientSession):
|
|
|
|
# TODO fetch version automatically
|
|
|
|
# ref: https://docs.github.com/en/rest/releases/releases#get-the-latest-release
|
2022-09-04 14:33:42 +02:00
|
|
|
version = '9.0.24102'
|
2022-05-09 12:05:55 +02:00
|
|
|
|
|
|
|
download_url = f'https://github.com/MarshalX/decrypted-telegram-ios/releases/download/{version}/Telegram-{version}.ipa'
|
|
|
|
tool_download_url = 'https://github.com/MarshalX/acextract/releases/download/3.0/acextract'
|
|
|
|
|
|
|
|
ipa_filename = f'Telegram-{version}.ipa'
|
2022-05-09 12:16:25 +02:00
|
|
|
assets_extractor = 'acextract_ios'
|
2022-05-09 12:05:55 +02:00
|
|
|
assets_filename = 'Assets.car'
|
|
|
|
assets_output_dir = 'ios_assets'
|
|
|
|
client_folder_name = 'ios'
|
2022-05-13 11:57:11 +02:00
|
|
|
crawled_data_folder = os.path.join(OUTPUT_CLIENTS_FOLDER, 'ios-beta')
|
2022-05-09 12:05:55 +02:00
|
|
|
|
|
|
|
if 'darwin' not in platform.system().lower():
|
|
|
|
await download_file(download_url, ipa_filename, session)
|
|
|
|
else:
|
|
|
|
await asyncio.gather(
|
|
|
|
download_file(download_url, ipa_filename, session),
|
|
|
|
download_file(tool_download_url, assets_extractor, session),
|
|
|
|
)
|
|
|
|
|
|
|
|
# synced
|
|
|
|
with zipfile.ZipFile(ipa_filename, 'r') as f:
|
|
|
|
f.extractall(client_folder_name)
|
|
|
|
|
|
|
|
resources_path = 'Payload/Telegram.app'
|
|
|
|
|
|
|
|
files_to_convert = [
|
|
|
|
f'{resources_path}/en.lproj/Localizable.strings',
|
|
|
|
f'{resources_path}/en.lproj/InfoPlist.strings',
|
|
|
|
f'{resources_path}/en.lproj/AppIntentVocabulary.plist',
|
|
|
|
]
|
|
|
|
for filename in files_to_convert:
|
|
|
|
path = os.path.join(client_folder_name, filename)
|
|
|
|
|
|
|
|
# synced cuz ccl_bplist works with file objects and doesn't support asyncio
|
|
|
|
with open(path, 'rb') as r_file:
|
|
|
|
plist = ccl_bplist.load(r_file)
|
|
|
|
|
|
|
|
async with aiofiles.open(path, 'w', encoding='utf-8') as w_file:
|
|
|
|
await w_file.write(json.dumps(plist, indent=4))
|
|
|
|
|
|
|
|
files_to_track = files_to_convert + [
|
|
|
|
f'{resources_path}/_CodeSignature/CodeResources',
|
2022-09-04 14:24:02 +02:00
|
|
|
f'{resources_path}/SC_Info/Manifest.plist',
|
2022-05-09 12:05:55 +02:00
|
|
|
]
|
|
|
|
await track_additional_files(files_to_track, client_folder_name, crawled_data_folder)
|
|
|
|
|
|
|
|
resources_folder = os.path.join(client_folder_name, resources_path)
|
|
|
|
crawled_resources_folder = os.path.join(crawled_data_folder, resources_path)
|
|
|
|
_, _, hash_of_files_to_track = next(os.walk(resources_folder))
|
|
|
|
await track_additional_files(
|
|
|
|
hash_of_files_to_track, resources_folder, crawled_resources_folder, save_hash_only=True
|
|
|
|
)
|
|
|
|
|
|
|
|
def cleanup1():
|
|
|
|
os.path.isdir(client_folder_name) and shutil.rmtree(client_folder_name)
|
|
|
|
os.remove(ipa_filename)
|
|
|
|
|
|
|
|
# sry for copy-paste from macos def ;d
|
|
|
|
|
|
|
|
# .car crawler works only in macOS
|
|
|
|
if 'darwin' not in platform.system().lower():
|
2022-09-04 14:14:16 +02:00
|
|
|
cleanup1()
|
2022-05-09 12:05:55 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
path_to_car = os.path.join(resources_folder, assets_filename)
|
|
|
|
await (await asyncio.create_subprocess_exec('chmod', '+x', assets_extractor)).communicate()
|
|
|
|
process = await asyncio.create_subprocess_exec(f'./{assets_extractor}', '-i', path_to_car, '-o', assets_output_dir)
|
|
|
|
await process.communicate()
|
|
|
|
|
|
|
|
def cleanup2():
|
|
|
|
cleanup1()
|
|
|
|
os.path.isdir(assets_output_dir) and shutil.rmtree(assets_output_dir)
|
|
|
|
os.remove(assets_extractor)
|
|
|
|
|
|
|
|
if process.returncode != 0:
|
|
|
|
cleanup2()
|
|
|
|
return
|
|
|
|
|
|
|
|
for dir_path, _, hash_of_files_to_track in os.walk(assets_output_dir):
|
|
|
|
await track_additional_files(
|
|
|
|
# sry for this shit ;d
|
|
|
|
[os.path.join(dir_path, file).replace(f'{assets_output_dir}/', '') for file in hash_of_files_to_track],
|
|
|
|
assets_output_dir,
|
|
|
|
os.path.join(crawled_data_folder, assets_filename),
|
|
|
|
save_hash_only=True
|
|
|
|
)
|
|
|
|
|
2022-09-04 14:14:16 +02:00
|
|
|
cleanup2()
|
2022-05-09 12:05:55 +02:00
|
|
|
|
|
|
|
|
2024-02-20 09:21:18 +01:00
|
|
|
async def download_telegram_android_and_extract_resources(session: aiohttp.ClientSession) -> None:
|
|
|
|
await download_telegram_android_stable_dl_and_extract_resources(session)
|
|
|
|
await download_telegram_android_beta_and_extract_resources(session)
|
|
|
|
|
|
|
|
|
|
|
|
async def download_telegram_android_stable_dl_and_extract_resources(session: aiohttp.ClientSession):
|
|
|
|
download_url = 'https://telegram.org/dl/android/apk'
|
|
|
|
|
|
|
|
await _download_telegram_android_and_extract_resources(session, download_url, 'android-stable-dl')
|
|
|
|
|
|
|
|
|
2022-03-28 19:44:50 +02:00
|
|
|
async def download_telegram_android_beta_and_extract_resources(session: aiohttp.ClientSession):
|
|
|
|
parameterized_url = 'apps/drklo-2kb-ghpo/telegram-beta-2/distribution_groups/all-users-of-telegram-beta-2'
|
|
|
|
download_url = await get_download_link_of_latest_appcenter_release(parameterized_url, session)
|
|
|
|
|
2024-02-20 09:21:18 +01:00
|
|
|
await _download_telegram_android_and_extract_resources(session, download_url, 'android-beta')
|
|
|
|
|
|
|
|
|
|
|
|
async def _download_telegram_android_and_extract_resources(
|
|
|
|
session: aiohttp.ClientSession, download_url: str, folder_name: str
|
|
|
|
):
|
|
|
|
crawled_data_folder = os.path.join(OUTPUT_CLIENTS_FOLDER, folder_name)
|
2022-05-13 11:57:11 +02:00
|
|
|
|
2022-03-28 19:44:50 +02:00
|
|
|
if not download_url:
|
2022-03-18 21:25:00 +01:00
|
|
|
return
|
|
|
|
|
2022-04-24 08:45:20 +02:00
|
|
|
await asyncio.gather(
|
2023-10-28 20:21:49 +02:00
|
|
|
download_file('https://bitbucket.org/iBotPeaches/apktool/downloads/apktool_2.9.0.jar', 'tool.apk', session),
|
2022-04-24 08:45:20 +02:00
|
|
|
download_file(download_url, 'android.apk', session),
|
|
|
|
)
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2022-03-19 15:56:19 +01:00
|
|
|
def cleanup():
|
2022-03-28 19:44:50 +02:00
|
|
|
os.path.isdir('android') and shutil.rmtree('android')
|
2022-03-19 15:56:19 +01:00
|
|
|
os.remove('tool.apk')
|
2022-03-28 19:44:50 +02:00
|
|
|
os.remove('android.apk')
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2022-08-03 00:29:34 +02:00
|
|
|
process = await asyncio.create_subprocess_exec(
|
|
|
|
'java', '-jar', 'tool.apk', 'd', '-s', '-f', 'android.apk',
|
2022-08-03 00:32:24 +02:00
|
|
|
stdout=asyncio.subprocess.PIPE,
|
2022-08-03 00:29:34 +02:00
|
|
|
stderr=asyncio.subprocess.STDOUT
|
|
|
|
)
|
2022-03-19 15:56:19 +01:00
|
|
|
await process.communicate()
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2022-03-19 15:56:19 +01:00
|
|
|
if process.returncode != 0:
|
|
|
|
cleanup()
|
|
|
|
return
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2022-03-19 15:56:19 +01:00
|
|
|
files_to_track = [
|
|
|
|
'res/values/strings.xml',
|
|
|
|
'res/values/public.xml'
|
|
|
|
]
|
2022-05-13 11:57:11 +02:00
|
|
|
await track_additional_files(files_to_track, 'android', crawled_data_folder)
|
2022-03-19 15:56:19 +01:00
|
|
|
|
|
|
|
cleanup()
|
2022-03-18 21:30:43 +01:00
|
|
|
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2023-09-15 14:58:19 +02:00
|
|
|
def parse_string_with_possible_json(input_string) -> dict:
|
|
|
|
# chat gtp powered code:
|
|
|
|
try:
|
|
|
|
# Attempt to parse the entire input string as JSON
|
|
|
|
json_object = json.loads(input_string)
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
# Regular expression to find JSON objects within the string
|
|
|
|
json_regex = r'{[^{}]*}'
|
|
|
|
matches = re.findall(json_regex, input_string)
|
|
|
|
|
|
|
|
if matches:
|
|
|
|
# Use the first match as the extracted JSON
|
|
|
|
json_object = json.loads(matches[0])
|
|
|
|
else:
|
|
|
|
raise ValueError('No JSON found within the input string.')
|
|
|
|
|
|
|
|
return json_object
|
|
|
|
|
|
|
|
|
2023-09-15 11:57:25 +02:00
|
|
|
async def crawl_mini_app_wallet():
|
|
|
|
crawled_data_folder = os.path.join(OUTPUT_MINI_APPS_FOLDER, 'wallet')
|
|
|
|
|
|
|
|
def cleanup():
|
|
|
|
os.path.isdir('wallet') and shutil.rmtree('wallet')
|
|
|
|
|
2023-09-15 14:58:19 +02:00
|
|
|
async def _run_unwebpack_sourcemap(url: str):
|
|
|
|
process = await asyncio.create_subprocess_exec(
|
|
|
|
'python', 'unwebpack_sourcemap.py', '--make-directory', '--detect', url, 'wallet',
|
|
|
|
)
|
|
|
|
await process.communicate()
|
2023-09-15 11:57:25 +02:00
|
|
|
|
2023-09-15 14:58:19 +02:00
|
|
|
if process.returncode != 0:
|
|
|
|
cleanup()
|
|
|
|
raise RuntimeError('unwebpack_sourcemap failed')
|
|
|
|
|
|
|
|
crawled_unpacked_folder = os.path.join('wallet', 'webpack', 'wallet-react-form')
|
|
|
|
|
|
|
|
await _run_unwebpack_sourcemap('https://walletbot.me/')
|
|
|
|
|
|
|
|
webpack_chunks_db_path = os.path.join(crawled_unpacked_folder, 'webpack', 'runtime', 'get javascript chunk filename')
|
|
|
|
webpack_chunks_db = parse_string_with_possible_json(open(webpack_chunks_db_path, 'r').read())
|
|
|
|
for chunk_id, chunk_name in webpack_chunks_db.items():
|
|
|
|
await _run_unwebpack_sourcemap(f'https://walletbot.me/static/js/{chunk_id}.{chunk_name}.js')
|
2023-09-15 11:57:25 +02:00
|
|
|
|
|
|
|
files_to_track = []
|
|
|
|
|
2023-09-15 14:58:19 +02:00
|
|
|
crawled_empty_0_folder = os.path.join(crawled_unpacked_folder, 'empty_0')
|
|
|
|
crawled_src_folder = os.path.join(crawled_empty_0_folder, 'src')
|
2023-09-15 11:57:25 +02:00
|
|
|
for root, folders, files in os.walk(crawled_src_folder):
|
|
|
|
for file in files:
|
2023-09-15 14:58:19 +02:00
|
|
|
files_to_track.append(os.path.join(root, file).replace(f'{crawled_empty_0_folder}/', ''))
|
2023-09-15 11:57:25 +02:00
|
|
|
|
2023-09-15 14:58:19 +02:00
|
|
|
await track_additional_files(files_to_track, crawled_empty_0_folder, crawled_data_folder)
|
2023-09-15 11:57:25 +02:00
|
|
|
|
|
|
|
cleanup()
|
|
|
|
|
|
|
|
|
2022-03-12 01:20:05 +01:00
|
|
|
async def collect_translations_paginated_content(url: str, session: aiohttp.ClientSession) -> str:
|
2022-06-18 16:50:14 +02:00
|
|
|
import cssutils
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
|
|
|
css_parser = cssutils.CSSParser(loglevel=logging.FATAL, raiseExceptions=False)
|
|
|
|
|
2022-03-12 01:20:05 +01:00
|
|
|
headers = {'X-Requested-With': 'XMLHttpRequest'}
|
2022-05-27 16:44:56 +02:00
|
|
|
content = dict()
|
2022-03-12 01:20:05 +01:00
|
|
|
|
|
|
|
async def _get_page(offset: int):
|
|
|
|
logger.info(f'Url: {url}, offset: {offset}')
|
|
|
|
data = {'offset': offset, 'more': 1}
|
|
|
|
|
|
|
|
try:
|
2022-04-24 08:45:20 +02:00
|
|
|
new_offset = None
|
2022-03-12 01:20:05 +01:00
|
|
|
async with session.post(
|
|
|
|
f'{PROTOCOL}{url}', data=data, headers=headers, allow_redirects=False, timeout=TIMEOUT
|
|
|
|
) as response:
|
2022-06-18 16:50:14 +02:00
|
|
|
if (499 < response.status < 600) or (response.status != 200):
|
2022-03-12 01:20:05 +01:00
|
|
|
logger.debug(f'Resend cuz {response.status}')
|
2022-04-24 08:45:20 +02:00
|
|
|
new_offset = offset
|
|
|
|
else:
|
2023-04-16 01:28:32 +02:00
|
|
|
res_json = await response.json(encoding='UTF-8')
|
|
|
|
if 'more_html' in res_json and res_json['more_html']:
|
|
|
|
res_json['more_html'] = re.sub(TRANSLATE_SUGGESTION_REGEX, '', res_json['more_html'])
|
2022-05-27 16:44:56 +02:00
|
|
|
|
2023-04-16 01:28:32 +02:00
|
|
|
soup = BeautifulSoup(res_json['more_html'], 'html.parser')
|
2022-05-27 16:44:56 +02:00
|
|
|
tr_items = soup.find_all('div', {'class': 'tr-key-row-wrap'})
|
|
|
|
for tr_item in tr_items:
|
2023-04-16 01:28:32 +02:00
|
|
|
tr_key = tr_item.find('div', {'class': 'tr-value-key'}).text
|
2022-05-27 16:44:56 +02:00
|
|
|
|
2023-04-16 01:28:32 +02:00
|
|
|
tr_url = tr_item.find('div', {'class': 'tr-key-row'})['data-href']
|
2022-06-18 16:50:14 +02:00
|
|
|
tr_url = f'https://translations.telegram.org{tr_url}'
|
2022-05-27 16:44:56 +02:00
|
|
|
|
2023-04-16 01:28:32 +02:00
|
|
|
tr_photo = tr_item.find('a', {'class': 'tr-value-photo'})
|
2022-06-18 16:50:14 +02:00
|
|
|
if tr_photo:
|
|
|
|
tr_photo = css_parser.parseStyle(tr_photo['style']).backgroundImage[5:-2]
|
|
|
|
|
2023-04-16 01:28:32 +02:00
|
|
|
tr_has_binding = tr_item.find('span', {'class': 'has-binding binding'})
|
2022-06-18 16:50:14 +02:00
|
|
|
tr_has_binding = tr_has_binding is not None
|
|
|
|
|
|
|
|
tr_values = tr_item.find_all('span', {'class': 'value'})
|
|
|
|
tr_value_singular, *tr_value_plural = [tr_value.decode_contents() for tr_value in tr_values]
|
|
|
|
tr_values = {'singular': tr_value_singular}
|
|
|
|
if tr_value_plural:
|
|
|
|
tr_values['plural'] = tr_value_plural[0]
|
|
|
|
|
|
|
|
content[tr_key] = {
|
|
|
|
'url': tr_url,
|
|
|
|
'photo_url': tr_photo,
|
2023-04-16 01:28:32 +02:00
|
|
|
'has_binding': tr_has_binding,
|
2022-06-18 16:50:14 +02:00
|
|
|
'values': tr_values,
|
|
|
|
}
|
2022-05-27 16:44:56 +02:00
|
|
|
|
2022-04-24 08:45:20 +02:00
|
|
|
new_offset = offset + 200
|
|
|
|
|
|
|
|
new_offset and await _get_page(new_offset)
|
2022-06-18 16:50:14 +02:00
|
|
|
except (ServerDisconnectedError, TimeoutError, ClientConnectorError):
|
2022-03-12 01:20:05 +01:00
|
|
|
logger.warning(f'Client or timeout error. Retrying {url}; offset {offset}')
|
2022-04-18 00:11:21 +02:00
|
|
|
await _get_page(offset)
|
2022-03-12 01:20:05 +01:00
|
|
|
|
|
|
|
await _get_page(0)
|
|
|
|
|
2022-05-27 16:44:56 +02:00
|
|
|
content = dict(sorted(content.items()))
|
|
|
|
return json.dumps(content, indent=4, ensure_ascii=False)
|
2022-03-12 01:20:05 +01:00
|
|
|
|
|
|
|
|
2022-06-19 13:29:14 +02:00
|
|
|
async def track_mtproto_methods():
|
2024-01-30 23:06:05 +01:00
|
|
|
#####################
|
|
|
|
# PATH BROKEN PYROGRAM
|
|
|
|
import pkgutil
|
|
|
|
from pathlib import Path
|
|
|
|
pyrogram_path = Path(pkgutil.get_loader('pyrogram').path).parent
|
|
|
|
broken_class_path = os.path.join(pyrogram_path, 'raw', 'types', 'story_fwd_header.py')
|
|
|
|
with open(broken_class_path, 'w', encoding='UTF-8') as f:
|
|
|
|
# I rly don't want to fix bug in pyrogram about using reserved words as argument names
|
|
|
|
f.write('class StoryFwdHeader: ...')
|
|
|
|
#####################
|
|
|
|
|
2022-04-24 13:33:16 +02:00
|
|
|
from pyrogram import Client
|
|
|
|
|
2022-05-11 11:23:00 +02:00
|
|
|
kw = {
|
|
|
|
'api_id': int(os.environ['TELEGRAM_API_ID']),
|
|
|
|
'api_hash': os.environ['TELEGRAM_API_HASH'],
|
2022-08-21 10:04:44 +02:00
|
|
|
'app_version': '@tgcrawl',
|
2022-05-11 11:23:00 +02:00
|
|
|
'in_memory': True
|
|
|
|
}
|
|
|
|
|
2023-04-16 00:34:15 +02:00
|
|
|
test_dc = 2
|
|
|
|
test_phone_prefix = '99966'
|
2023-04-16 01:05:31 +02:00
|
|
|
test_phone_suffix = os.environ.get('TELEGRAM_TEST_PHONE_SUFFIX', random.randint(1000, 9999))
|
2023-04-16 00:34:15 +02:00
|
|
|
test_phone_number = f'{test_phone_prefix}{test_dc}{test_phone_suffix}'
|
|
|
|
test_phone_code = str(test_dc) * 5
|
|
|
|
|
|
|
|
app_test = Client('crawler_test', phone_number=test_phone_number, phone_code=test_phone_code, test_mode=True, **kw)
|
2022-05-11 11:23:00 +02:00
|
|
|
app = Client('crawler', session_string=os.environ['TELEGRAM_SESSION'], **kw)
|
2022-05-10 20:12:27 +02:00
|
|
|
|
2023-04-16 00:34:15 +02:00
|
|
|
await asyncio.gather(app_test.start(), app.start())
|
|
|
|
await asyncio.gather(_fetch_and_track_mtproto(app, ''), _fetch_and_track_mtproto(app_test, 'test'))
|
2022-05-10 20:12:27 +02:00
|
|
|
|
|
|
|
|
|
|
|
async def _fetch_and_track_mtproto(app, output_dir):
|
|
|
|
from pyrogram.raw import functions
|
2022-05-31 02:25:35 +02:00
|
|
|
from pyrogram.raw.types import InputStickerSetShortName
|
2022-04-24 13:33:16 +02:00
|
|
|
|
|
|
|
configs = {
|
2022-05-10 20:12:27 +02:00
|
|
|
'GetConfig': await app.invoke(functions.help.GetConfig()),
|
|
|
|
'GetCdnConfig': await app.invoke(functions.help.GetCdnConfig()),
|
|
|
|
# 'GetInviteText': await app.invoke(functions.help.GetInviteText()),
|
|
|
|
# 'GetSupport': await app.invoke(functions.help.GetSupport()),
|
|
|
|
# 'GetSupportName': await app.invoke(functions.help.GetSupportName()),
|
|
|
|
# 'GetPassportConfig': await app.invoke(functions.help.GetPassportConfig(hash=0)),
|
|
|
|
'GetCountriesList': await app.invoke(functions.help.GetCountriesList(lang_code='en', hash=0)),
|
2023-04-16 00:45:28 +02:00
|
|
|
'GetAppConfig': await app.invoke(functions.help.GetAppConfig(hash=0)),
|
2022-05-10 20:12:27 +02:00
|
|
|
# 'GetAppUpdate': await app.invoke(functions.help.GetAppUpdate(source='')),
|
2022-05-31 02:25:35 +02:00
|
|
|
# 'AnimatedEmoji': await app.invoke(
|
|
|
|
# functions.messages.GetStickerSet(stickerset=InputStickerSetAnimatedEmoji(), hash=0)
|
|
|
|
# ),
|
|
|
|
'GetAvailableReactions': await app.invoke(functions.messages.GetAvailableReactions(hash=0)),
|
2022-06-20 20:53:56 +02:00
|
|
|
'GetPremiumPromo': await app.invoke(functions.help.GetPremiumPromo()),
|
2022-05-11 11:23:00 +02:00
|
|
|
}
|
2022-04-24 13:33:16 +02:00
|
|
|
|
2022-07-16 15:06:57 +02:00
|
|
|
sticker_set_short_names = {
|
|
|
|
'EmojiAnimations',
|
|
|
|
'EmojiAroundAnimations',
|
|
|
|
'EmojiShortAnimations',
|
|
|
|
'EmojiAppearAnimations',
|
|
|
|
'EmojiCenterAnimations',
|
|
|
|
'AnimatedEmojies',
|
2022-09-06 15:31:05 +02:00
|
|
|
'EmojiGenericAnimations',
|
2022-07-16 15:06:57 +02:00
|
|
|
}
|
2022-05-31 02:25:35 +02:00
|
|
|
|
2022-07-16 15:27:41 +02:00
|
|
|
if app.test_mode:
|
|
|
|
sticker_set_short_names.add('PremiumGifts')
|
2022-09-06 15:31:05 +02:00
|
|
|
sticker_set_short_names.add('StatusEmojiWhite')
|
2022-07-16 15:27:41 +02:00
|
|
|
else:
|
2023-09-20 19:17:49 +02:00
|
|
|
sticker_set_short_names.add('UtyaDuckFull')
|
2022-07-16 15:27:41 +02:00
|
|
|
sticker_set_short_names.add('GiftsPremium')
|
2022-09-06 15:31:05 +02:00
|
|
|
sticker_set_short_names.add('StatusPack')
|
2023-02-11 15:28:26 +01:00
|
|
|
sticker_set_short_names.add('RestrictedEmoji')
|
2022-07-16 15:27:41 +02:00
|
|
|
|
2022-05-31 02:25:35 +02:00
|
|
|
for short_name in sticker_set_short_names:
|
|
|
|
sticker_set = await app.invoke(functions.messages.GetStickerSet(
|
|
|
|
stickerset=InputStickerSetShortName(short_name=short_name), hash=0
|
|
|
|
))
|
|
|
|
configs[f'sticker_set/{short_name}'] = sticker_set
|
|
|
|
|
2022-06-19 13:29:14 +02:00
|
|
|
bots_usernames_to_track = {'BotFather', 'DurgerKingBot', 'asmico_attach_bot'}
|
|
|
|
if app.test_mode:
|
|
|
|
bots_usernames_to_track.add('izpremiumbot')
|
|
|
|
else:
|
|
|
|
bots_usernames_to_track.add('PremiumBot')
|
|
|
|
|
2022-06-19 21:00:17 +02:00
|
|
|
bots_usernames_to_track.clear()
|
2022-06-19 13:29:14 +02:00
|
|
|
for bot_username in bots_usernames_to_track:
|
|
|
|
bot_peer = await app.resolve_peer(bot_username)
|
|
|
|
bot_full = (await app.invoke(functions.users.GetFullUser(id=bot_peer)))
|
|
|
|
configs[f'bot/{bot_username}'] = f'{{"full_user": {str(bot_full.full_user)}, "users": {str(bot_full.users)}}}'
|
|
|
|
|
|
|
|
peers_to_track = set()
|
|
|
|
if not app.test_mode:
|
|
|
|
peers_to_track.add('invoice')
|
|
|
|
peers_to_track.add('premium')
|
|
|
|
|
2022-06-19 21:00:17 +02:00
|
|
|
peers_to_track.clear()
|
2022-06-19 13:29:14 +02:00
|
|
|
for peer_id in peers_to_track:
|
|
|
|
peer = await app.resolve_peer(peer_id)
|
|
|
|
configs[f'peer/{peer_id}'] = peer
|
|
|
|
|
2022-06-20 20:53:56 +02:00
|
|
|
configs['GetPremiumPromo'].users = []
|
2022-09-06 15:03:16 +02:00
|
|
|
configs['GetPremiumPromo'].status_text = 'crawler'
|
|
|
|
configs['GetPremiumPromo'].status_entities = []
|
|
|
|
configs['GetPremiumPromo'].period_options = []
|
2022-06-19 13:29:14 +02:00
|
|
|
|
2023-04-16 00:53:56 +02:00
|
|
|
configs['GetAppConfig'].hash = 'crawler'
|
|
|
|
|
2022-10-29 22:42:39 +02:00
|
|
|
keys_to_hide = {'access_hash', 'autologin_token', 'file_reference', 'file_reference_base64', 'pending_suggestions'}
|
2023-04-16 10:49:19 +02:00
|
|
|
if app.test_mode:
|
|
|
|
keys_to_hide.add('dialog_filters_tooltip')
|
2022-04-24 13:33:16 +02:00
|
|
|
|
|
|
|
def rem_rec(config):
|
2022-05-03 13:11:12 +02:00
|
|
|
if not isinstance(config, dict):
|
|
|
|
return
|
|
|
|
|
2022-04-24 13:33:16 +02:00
|
|
|
for key, value in config.items():
|
|
|
|
if isinstance(value, dict):
|
|
|
|
rem_rec(value)
|
|
|
|
elif isinstance(value, list):
|
|
|
|
for item in value:
|
|
|
|
rem_rec(item)
|
|
|
|
elif key == 'key' and value in keys_to_hide:
|
|
|
|
config['value']['value'] = 'crawler'
|
2022-05-03 13:11:12 +02:00
|
|
|
elif key in keys_to_hide:
|
|
|
|
config[key] = 'crawler'
|
2022-04-24 13:33:16 +02:00
|
|
|
|
2022-06-20 20:53:56 +02:00
|
|
|
methods_to_filter = {'GetAppConfig', 'GetAvailableReactions', 'GetPremiumPromo'}
|
2022-05-31 18:50:20 +02:00
|
|
|
sticker_sets_to_filter = {f'sticker_set/{name}' for name in sticker_set_short_names}
|
2022-06-19 13:29:14 +02:00
|
|
|
bots_to_filter = {f'bot/{name}' for name in bots_usernames_to_track}
|
|
|
|
peers_to_filter = {f'peer/{name}' for name in peers_to_track}
|
|
|
|
|
|
|
|
combined_filter = methods_to_filter | sticker_sets_to_filter | bots_to_filter | peers_to_filter
|
|
|
|
for config_name in combined_filter:
|
2022-05-03 13:11:12 +02:00
|
|
|
configs[config_name] = json.loads(str(configs[config_name]))
|
|
|
|
rem_rec(configs[config_name])
|
2023-04-16 00:53:56 +02:00
|
|
|
configs[config_name] = json.dumps(configs[config_name], ensure_ascii=False, indent=4)
|
2022-04-24 13:33:16 +02:00
|
|
|
|
|
|
|
configs['GetConfig'].date = 0
|
|
|
|
configs['GetConfig'].expires = 0
|
2023-04-16 00:45:28 +02:00
|
|
|
configs['GetConfig'].autologin_token = 'crawler'
|
2022-04-24 13:44:08 +02:00
|
|
|
configs['GetConfig'].dc_options = []
|
2022-04-24 13:33:16 +02:00
|
|
|
|
|
|
|
for file, content in configs.items():
|
2022-05-13 11:57:11 +02:00
|
|
|
filename = os.path.join(OUTPUT_MTPROTO_FOLDER, output_dir, f'{file}.json')
|
2022-04-24 13:33:16 +02:00
|
|
|
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
|
|
|
async with aiofiles.open(filename, 'w', encoding='utf-8') as w_file:
|
|
|
|
await w_file.write(str(content))
|
|
|
|
|
|
|
|
await app.stop()
|
|
|
|
|
|
|
|
|
2022-04-10 17:46:36 +02:00
|
|
|
def is_hashable_only_content_type(content_type) -> bool:
|
|
|
|
hashable_only_content_types = (
|
|
|
|
'png',
|
|
|
|
'jpeg',
|
|
|
|
'x-icon',
|
|
|
|
'gif',
|
|
|
|
'mp4',
|
|
|
|
'webm',
|
2023-10-28 20:51:20 +02:00
|
|
|
'zip',
|
|
|
|
'stream',
|
2022-04-10 17:46:36 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
for hashable_only_content_type in hashable_only_content_types:
|
|
|
|
if hashable_only_content_type in content_type:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2022-04-24 08:45:20 +02:00
|
|
|
class RetryError(Exception):
|
|
|
|
...
|
|
|
|
|
|
|
|
|
2022-06-18 17:36:39 +02:00
|
|
|
async def crawl(url: str, session: aiohttp.ClientSession, output_dir: str):
|
2022-05-13 11:57:11 +02:00
|
|
|
while True:
|
2022-05-11 11:10:08 +02:00
|
|
|
try:
|
2022-05-13 11:57:11 +02:00
|
|
|
await _crawl(url, session, output_dir)
|
2022-05-11 11:10:08 +02:00
|
|
|
except (RetryError, ServerDisconnectedError, TimeoutError, ClientConnectorError):
|
|
|
|
logger.warning(f'Client or timeout error. Retrying {url}')
|
2022-05-13 11:57:11 +02:00
|
|
|
else:
|
|
|
|
break
|
2022-05-11 11:10:08 +02:00
|
|
|
|
|
|
|
|
2022-05-13 11:57:11 +02:00
|
|
|
async def _crawl(url: str, session: aiohttp.ClientSession, output_dir: str):
|
2022-05-11 11:10:08 +02:00
|
|
|
logger.info(f'Process {url}')
|
|
|
|
async with session.get(f'{PROTOCOL}{url}', allow_redirects=False, timeout=TIMEOUT, headers=HEADERS) as response:
|
2022-05-13 11:57:11 +02:00
|
|
|
if 499 < response.status < 600:
|
2022-05-11 11:10:08 +02:00
|
|
|
msg = f'Error 5XX. Retrying {url}'
|
|
|
|
logger.warning(msg)
|
|
|
|
raise RetryError(msg)
|
|
|
|
|
|
|
|
if response.status not in {200, 304}:
|
|
|
|
if response.status != 302:
|
|
|
|
content = await response.text()
|
|
|
|
logger.debug(f'Skip {url} because status code == {response.status}. Content: {content}')
|
2022-04-28 09:22:34 +02:00
|
|
|
return
|
|
|
|
|
2022-05-11 11:10:08 +02:00
|
|
|
# bypass external slashes and so on
|
|
|
|
url_parts = [p for p in url.split('/') if p not in ILLEGAL_PATH_CHARS]
|
2022-04-10 17:46:36 +02:00
|
|
|
|
2023-10-28 21:13:49 +02:00
|
|
|
content_type = response.content_type
|
2021-04-24 14:19:01 +02:00
|
|
|
|
2023-10-21 19:25:38 +02:00
|
|
|
# handle pure domains and html pages without ext in url as html do enable syntax highlighting
|
|
|
|
page_type, _ = mimetypes.guess_type(url)
|
2023-10-28 21:41:27 +02:00
|
|
|
|
|
|
|
ext = ''
|
|
|
|
if page_type:
|
|
|
|
ext = mimetypes.guess_extension(page_type) or ''
|
|
|
|
if ext != '' and url.endswith(ext):
|
|
|
|
ext = ''
|
|
|
|
|
2023-10-21 19:25:38 +02:00
|
|
|
if url.endswith('.tl'):
|
|
|
|
page_type = 'text/plain'
|
|
|
|
|
2023-10-28 21:13:49 +02:00
|
|
|
if page_type is None or len(url_parts) == 1:
|
|
|
|
ext = '.html'
|
|
|
|
content_type = 'text/html'
|
|
|
|
|
|
|
|
if re.search(TRANSLATIONS_EN_CATEGORY_URL_REGEX, url) or 'td.telegram.org/current' in url:
|
|
|
|
ext = '.json'
|
|
|
|
content_type = 'application/json'
|
|
|
|
|
|
|
|
is_hashable_only = is_hashable_only_content_type(content_type)
|
|
|
|
# amazing dirt for media files like
|
|
|
|
# telegram.org/file/811140591/1/q7zZHjgES6s/9d121a89ffb0015837
|
|
|
|
# with response content type HTML instead of image.
|
|
|
|
# shame on you.
|
|
|
|
# sometimes it returns a correct type.
|
|
|
|
# noice load balancing
|
|
|
|
is_sucking_file = '/file/' in url and 'text' in content_type
|
2022-04-10 17:46:36 +02:00
|
|
|
|
2023-10-28 20:21:49 +02:00
|
|
|
# I don't add ext by content type for images, and so on cuz TG servers suck.
|
|
|
|
# Some servers do not return a correct content type.
|
|
|
|
# Some servers do...
|
2022-05-11 11:10:08 +02:00
|
|
|
if is_hashable_only or is_sucking_file:
|
2023-10-28 21:41:27 +02:00
|
|
|
ext = '.sha256'
|
2022-04-10 17:46:36 +02:00
|
|
|
|
2022-05-13 11:57:11 +02:00
|
|
|
filename = os.path.join(output_dir, *url_parts) + ext
|
2022-05-11 11:10:08 +02:00
|
|
|
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
2021-04-24 14:38:39 +02:00
|
|
|
|
2022-05-11 11:10:08 +02:00
|
|
|
if is_sucking_file or is_hashable_only:
|
|
|
|
content = await response.read()
|
2022-04-24 08:45:20 +02:00
|
|
|
async with aiofiles.open(filename, 'w', encoding='utf-8') as f:
|
2022-05-11 11:10:08 +02:00
|
|
|
await f.write(get_hash(content))
|
|
|
|
return
|
|
|
|
|
|
|
|
content = await response.text(encoding='UTF-8')
|
|
|
|
if re.search(TRANSLATIONS_EN_CATEGORY_URL_REGEX, url):
|
|
|
|
content = await collect_translations_paginated_content(url, session)
|
|
|
|
|
|
|
|
content = re.sub(PAGE_GENERATION_TIME_REGEX, '', content)
|
|
|
|
content = re.sub(PAGE_API_HASH_REGEX, PAGE_API_HASH_TEMPLATE, content)
|
|
|
|
content = re.sub(PASSPORT_SSID_REGEX, PASSPORT_SSID_TEMPLATE, content)
|
|
|
|
content = re.sub(NONCE_REGEX, NONCE_TEMPLATE, content)
|
|
|
|
content = re.sub(PROXY_CONFIG_SUB_NET_REGEX, PROXY_CONFIG_SUB_NET_TEMPLATE, content)
|
|
|
|
content = re.sub(SPARKLE_SIG_REGEX, SPARKLE_SIG_TEMPLATE, content)
|
|
|
|
content = re.sub(SPARKLE_SE_REGEX, SPARKLE_SE_TEMPLATE, content)
|
2022-10-29 23:50:29 +02:00
|
|
|
content = re.sub(TON_RATE_REGEX, TON_RATE_TEMPLATE, content)
|
2022-05-11 11:10:08 +02:00
|
|
|
|
2023-10-21 20:04:37 +02:00
|
|
|
# there is a problem with the files with the same name (in the same path) but different case
|
|
|
|
# the content is random because of the async
|
|
|
|
# there is only one page with this problem, for now:
|
|
|
|
# - corefork.telegram.org/constructor/Updates
|
|
|
|
# - corefork.telegram.org/constructor/updates
|
2022-05-11 11:10:08 +02:00
|
|
|
async with aiofiles.open(filename, 'w', encoding='utf-8') as f:
|
|
|
|
logger.info(f'Write to {filename}')
|
|
|
|
await f.write(content)
|
2021-04-24 14:19:01 +02:00
|
|
|
|
|
|
|
|
2022-06-18 17:36:39 +02:00
|
|
|
async def _crawl_web(session: aiohttp.ClientSession, input_filename: str, output_folder=None):
|
|
|
|
with open(input_filename, 'r') as f:
|
2022-05-13 11:57:11 +02:00
|
|
|
tracked_urls = set([l.replace('\n', '') for l in f.readlines()])
|
|
|
|
|
2022-06-18 17:36:39 +02:00
|
|
|
await asyncio.gather(*[crawl(url, session, output_folder) for url in tracked_urls])
|
|
|
|
|
|
|
|
|
|
|
|
async def crawl_web(session: aiohttp.ClientSession):
|
|
|
|
await _crawl_web(session, INPUT_FILENAME, OUTPUT_SITES_FOLDER)
|
2022-05-13 11:57:11 +02:00
|
|
|
|
|
|
|
|
|
|
|
async def crawl_web_res(session: aiohttp.ClientSession):
|
2022-06-18 17:36:39 +02:00
|
|
|
await _crawl_web(session, INPUT_RES_FILENAME, OUTPUT_RESOURCES_FOLDER)
|
|
|
|
|
2022-05-13 11:57:11 +02:00
|
|
|
|
2022-09-07 11:06:50 +02:00
|
|
|
async def _collect_and_track_all_translation_keys():
|
|
|
|
translations = dict()
|
|
|
|
|
|
|
|
start_folder = 'en/'
|
|
|
|
file_format = '.json'
|
2022-09-07 11:13:56 +02:00
|
|
|
output_filename = 'translation_keys.json'
|
2022-09-07 11:06:50 +02:00
|
|
|
|
|
|
|
for root, folder, files in os.walk(OUTPUT_TRANSLATIONS_FOLDER):
|
|
|
|
for file in files:
|
|
|
|
if not file.endswith(file_format) or file == output_filename:
|
|
|
|
continue
|
|
|
|
|
2022-09-11 19:26:35 +02:00
|
|
|
async with aiofiles.open(os.path.join(root, file), encoding='utf-8') as f:
|
2022-09-07 11:06:50 +02:00
|
|
|
content = json.loads(await f.read())
|
|
|
|
|
|
|
|
client = root[root.index(start_folder) + len(start_folder):]
|
|
|
|
if client not in translations:
|
2022-09-11 19:26:35 +02:00
|
|
|
translations[client] = list()
|
2022-09-07 11:06:50 +02:00
|
|
|
|
2022-09-11 19:26:35 +02:00
|
|
|
translations[client].extend(content.keys())
|
2022-09-07 11:06:50 +02:00
|
|
|
|
|
|
|
for client in translations.keys():
|
2022-09-11 19:26:35 +02:00
|
|
|
translations[client] = sorted(translations[client])
|
2022-09-07 11:06:50 +02:00
|
|
|
|
|
|
|
translations = dict(sorted(translations.items()))
|
|
|
|
|
|
|
|
async with aiofiles.open(os.path.join(OUTPUT_TRANSLATIONS_FOLDER, output_filename), 'w', encoding='utf-8') as f:
|
|
|
|
await f.write(json.dumps(translations, indent=4))
|
|
|
|
|
|
|
|
|
2022-06-18 17:36:39 +02:00
|
|
|
async def crawl_web_tr(session: aiohttp.ClientSession):
|
|
|
|
await _crawl_web(session, INPUT_TR_FILENAME, OUTPUT_TRANSLATIONS_FOLDER)
|
2022-09-07 11:06:50 +02:00
|
|
|
await _collect_and_track_all_translation_keys()
|
2022-05-13 11:57:11 +02:00
|
|
|
|
|
|
|
|
2022-05-13 12:42:48 +02:00
|
|
|
async def start(mode: str):
|
2021-04-24 14:19:01 +02:00
|
|
|
async with aiohttp.ClientSession(connector=CONNECTOR) as session:
|
2022-05-13 12:42:48 +02:00
|
|
|
mode == 'all' and await asyncio.gather(
|
2022-05-13 11:57:11 +02:00
|
|
|
crawl_web(session),
|
2022-05-13 12:42:48 +02:00
|
|
|
crawl_web_res(session),
|
2022-06-18 17:36:39 +02:00
|
|
|
crawl_web_tr(session),
|
2022-06-19 13:29:14 +02:00
|
|
|
track_mtproto_methods(),
|
2022-04-18 00:11:21 +02:00
|
|
|
download_telegram_android_beta_and_extract_resources(session),
|
|
|
|
download_telegram_macos_beta_and_extract_resources(session),
|
2022-05-13 12:42:48 +02:00
|
|
|
download_telegram_ios_beta_and_extract_resources(session),
|
2023-09-15 11:57:25 +02:00
|
|
|
crawl_mini_app_wallet(),
|
2022-05-13 12:42:48 +02:00
|
|
|
)
|
|
|
|
mode == 'web' and await asyncio.gather(
|
|
|
|
crawl_web(session),
|
|
|
|
)
|
|
|
|
mode == 'web_res' and await asyncio.gather(
|
|
|
|
crawl_web_res(session),
|
|
|
|
)
|
2022-06-18 17:36:39 +02:00
|
|
|
mode == 'web_tr' and await asyncio.gather(
|
|
|
|
crawl_web_tr(session),
|
|
|
|
)
|
2022-05-13 12:42:48 +02:00
|
|
|
mode == 'server' and await asyncio.gather(
|
2022-06-19 13:29:14 +02:00
|
|
|
track_mtproto_methods(),
|
2022-05-13 12:42:48 +02:00
|
|
|
)
|
|
|
|
mode == 'client' and await asyncio.gather(
|
2024-02-20 09:21:18 +01:00
|
|
|
download_telegram_android_and_extract_resources(session),
|
2022-09-04 14:30:52 +02:00
|
|
|
download_telegram_macos_beta_and_extract_resources(session),
|
2022-05-09 12:05:55 +02:00
|
|
|
download_telegram_ios_beta_and_extract_resources(session),
|
2022-04-18 00:11:21 +02:00
|
|
|
)
|
2023-09-15 11:57:25 +02:00
|
|
|
mode == 'mini_app' and await asyncio.gather(
|
|
|
|
crawl_mini_app_wallet(),
|
|
|
|
)
|
2022-03-18 21:25:00 +01:00
|
|
|
|
2021-04-24 14:19:01 +02:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2022-05-13 12:42:48 +02:00
|
|
|
run_mode = 'all'
|
2022-05-13 11:57:11 +02:00
|
|
|
if 'MODE' in os.environ:
|
2022-05-13 12:42:48 +02:00
|
|
|
run_mode = os.environ['MODE']
|
2021-04-24 14:19:01 +02:00
|
|
|
|
|
|
|
start_time = time()
|
2022-05-13 11:57:11 +02:00
|
|
|
logger.info(f'Start crawling content of tracked urls...')
|
|
|
|
asyncio.get_event_loop().run_until_complete(start(run_mode))
|
2022-04-24 08:45:20 +02:00
|
|
|
logger.info(f'Stop crawling content in mode {run_mode}. {time() - start_time} sec.')
|