mirror of
https://github.com/soxoj/maigret.git
synced 2026-05-07 06:24:35 +00:00
Refactored self-check method, code formatting, small lint fixes (#1942)
This commit is contained in:
@@ -47,6 +47,9 @@ Use the following commands to check Maigret:
|
|||||||
# - mypy checks
|
# - mypy checks
|
||||||
make lint
|
make lint
|
||||||
|
|
||||||
|
# run black formatter
|
||||||
|
make format
|
||||||
|
|
||||||
# run testing with coverage html report
|
# run testing with coverage html report
|
||||||
# current test coverage is 58%
|
# current test coverage is 58%
|
||||||
make test
|
make test
|
||||||
|
|||||||
@@ -47,9 +47,7 @@ class ParsingActivator:
|
|||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
# 1 stage: get the redirect URL
|
# 1 stage: get the redirect URL
|
||||||
r = session.get(
|
r = session.get(
|
||||||
"https://weibo.com/clairekuo",
|
"https://weibo.com/clairekuo", headers=headers, allow_redirects=False
|
||||||
headers=headers,
|
|
||||||
allow_redirects=False
|
|
||||||
)
|
)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"1 stage: {'success' if r.status_code == 302 else 'no 302 redirect, fail!'}"
|
f"1 stage: {'success' if r.status_code == 302 else 'no 302 redirect, fail!'}"
|
||||||
@@ -68,11 +66,7 @@ class ParsingActivator:
|
|||||||
r = session.post(
|
r = session.post(
|
||||||
"https://passport.weibo.com/visitor/genvisitor2",
|
"https://passport.weibo.com/visitor/genvisitor2",
|
||||||
headers=headers,
|
headers=headers,
|
||||||
data={
|
data={'cb': 'visitor_gray_callback', 'tid': '', 'from': 'weibo'},
|
||||||
'cb': 'visitor_gray_callback',
|
|
||||||
'tid': '',
|
|
||||||
'from': 'weibo'
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
cookies = r.headers.get('set-cookie')
|
cookies = r.headers.get('set-cookie')
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
|||||||
+22
-8
@@ -16,6 +16,7 @@ from aiohttp import ClientSession, TCPConnector, http_exceptions
|
|||||||
from aiohttp.client_exceptions import ClientConnectorError, ServerDisconnectedError
|
from aiohttp.client_exceptions import ClientConnectorError, ServerDisconnectedError
|
||||||
from python_socks import _errors as proxy_errors
|
from python_socks import _errors as proxy_errors
|
||||||
from socid_extractor import extract
|
from socid_extractor import extract
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from mock import Mock
|
from mock import Mock
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@@ -77,7 +78,9 @@ class SimpleAiohttpChecker(CheckerBase):
|
|||||||
async def close(self):
|
async def close(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def _make_request(self, session, url, headers, allow_redirects, timeout, method, logger) -> Tuple[str, int, Optional[CheckError]]:
|
async def _make_request(
|
||||||
|
self, session, url, headers, allow_redirects, timeout, method, logger
|
||||||
|
) -> Tuple[str, int, Optional[CheckError]]:
|
||||||
try:
|
try:
|
||||||
request_method = session.get if method == 'get' else session.head
|
request_method = session.get if method == 'get' else session.head
|
||||||
async with request_method(
|
async with request_method(
|
||||||
@@ -120,7 +123,12 @@ class SimpleAiohttpChecker(CheckerBase):
|
|||||||
|
|
||||||
async def check(self) -> Tuple[str, int, Optional[CheckError]]:
|
async def check(self) -> Tuple[str, int, Optional[CheckError]]:
|
||||||
from aiohttp_socks import ProxyConnector
|
from aiohttp_socks import ProxyConnector
|
||||||
connector = ProxyConnector.from_url(self.proxy) if self.proxy else TCPConnector(ssl=False)
|
|
||||||
|
connector = (
|
||||||
|
ProxyConnector.from_url(self.proxy)
|
||||||
|
if self.proxy
|
||||||
|
else TCPConnector(ssl=False)
|
||||||
|
)
|
||||||
connector.verify_ssl = False
|
connector.verify_ssl = False
|
||||||
|
|
||||||
async with ClientSession(
|
async with ClientSession(
|
||||||
@@ -136,7 +144,7 @@ class SimpleAiohttpChecker(CheckerBase):
|
|||||||
self.allow_redirects,
|
self.allow_redirects,
|
||||||
self.timeout,
|
self.timeout,
|
||||||
self.method,
|
self.method,
|
||||||
self.logger
|
self.logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
if error and str(error) == "Invalid proxy response":
|
if error and str(error) == "Invalid proxy response":
|
||||||
@@ -385,7 +393,7 @@ def process_site_result(
|
|||||||
tree = ast.literal_eval(v)
|
tree = ast.literal_eval(v)
|
||||||
if type(tree) == list:
|
if type(tree) == list:
|
||||||
for n in tree:
|
for n in tree:
|
||||||
new_usernames[n] = "username"
|
new_usernames[n] = "username"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(e)
|
logger.warning(e)
|
||||||
if k in SUPPORTED_IDS:
|
if k in SUPPORTED_IDS:
|
||||||
@@ -549,7 +557,7 @@ async def check_site_for_username(
|
|||||||
)
|
)
|
||||||
# future = default_result.get("future")
|
# future = default_result.get("future")
|
||||||
# if not future:
|
# if not future:
|
||||||
# return site.name, default_result
|
# return site.name, default_result
|
||||||
|
|
||||||
checker = default_result.get("checker")
|
checker = default_result.get("checker")
|
||||||
if not checker:
|
if not checker:
|
||||||
@@ -804,6 +812,7 @@ async def site_self_check(
|
|||||||
tor_proxy=None,
|
tor_proxy=None,
|
||||||
i2p_proxy=None,
|
i2p_proxy=None,
|
||||||
skip_errors=False,
|
skip_errors=False,
|
||||||
|
cookies=None,
|
||||||
):
|
):
|
||||||
changes = {
|
changes = {
|
||||||
"disabled": False,
|
"disabled": False,
|
||||||
@@ -830,6 +839,7 @@ async def site_self_check(
|
|||||||
proxy=proxy,
|
proxy=proxy,
|
||||||
tor_proxy=tor_proxy,
|
tor_proxy=tor_proxy,
|
||||||
i2p_proxy=i2p_proxy,
|
i2p_proxy=i2p_proxy,
|
||||||
|
cookies=cookies,
|
||||||
)
|
)
|
||||||
|
|
||||||
# don't disable entries with other ids types
|
# don't disable entries with other ids types
|
||||||
@@ -878,7 +888,7 @@ async def site_self_check(
|
|||||||
|
|
||||||
if changes["disabled"] != site.disabled:
|
if changes["disabled"] != site.disabled:
|
||||||
site.disabled = changes["disabled"]
|
site.disabled = changes["disabled"]
|
||||||
logger.info(f"Switching disabled status of {site.name} to {site.disabled}")
|
logger.info(f"Switching property 'disabled' for {site.name} to {site.disabled}")
|
||||||
db.update_site(site)
|
db.update_site(site)
|
||||||
if not silent:
|
if not silent:
|
||||||
action = "Disabled" if site.disabled else "Enabled"
|
action = "Disabled" if site.disabled else "Enabled"
|
||||||
@@ -909,7 +919,9 @@ async def self_check(
|
|||||||
def disabled_count(lst):
|
def disabled_count(lst):
|
||||||
return len(list(filter(lambda x: x.disabled, lst)))
|
return len(list(filter(lambda x: x.disabled, lst)))
|
||||||
|
|
||||||
unchecked_old_count = len([site for site in all_sites.values() if "unchecked" in site.tags])
|
unchecked_old_count = len(
|
||||||
|
[site for site in all_sites.values() if "unchecked" in site.tags]
|
||||||
|
)
|
||||||
disabled_old_count = disabled_count(all_sites.values())
|
disabled_old_count = disabled_count(all_sites.values())
|
||||||
|
|
||||||
for _, site in all_sites.items():
|
for _, site in all_sites.items():
|
||||||
@@ -925,7 +937,9 @@ async def self_check(
|
|||||||
await f
|
await f
|
||||||
progress() # Update the progress bar
|
progress() # Update the progress bar
|
||||||
|
|
||||||
unchecked_new_count = len([site for site in all_sites.values() if "unchecked" in site.tags])
|
unchecked_new_count = len(
|
||||||
|
[site for site in all_sites.values() if "unchecked" in site.tags]
|
||||||
|
)
|
||||||
disabled_new_count = disabled_count(all_sites.values())
|
disabled_new_count = disabled_count(all_sites.values())
|
||||||
total_disabled = disabled_new_count - disabled_old_count
|
total_disabled = disabled_new_count - disabled_old_count
|
||||||
|
|
||||||
|
|||||||
+2
-4
@@ -58,12 +58,10 @@ COMMON_ERRORS = {
|
|||||||
'Сайт заблокирован хостинг-провайдером': CheckError(
|
'Сайт заблокирован хостинг-провайдером': CheckError(
|
||||||
'Site-specific', 'Site is disabled (Beget)'
|
'Site-specific', 'Site is disabled (Beget)'
|
||||||
),
|
),
|
||||||
'Generated by cloudfront (CloudFront)': CheckError(
|
'Generated by cloudfront (CloudFront)': CheckError('Request blocked', 'Cloudflare'),
|
||||||
'Request blocked', 'Cloudflare'
|
|
||||||
),
|
|
||||||
'/cdn-cgi/challenge-platform/h/b/orchestrate/chl_page': CheckError(
|
'/cdn-cgi/challenge-platform/h/b/orchestrate/chl_page': CheckError(
|
||||||
'Just a moment: bot redirect challenge', 'Cloudflare'
|
'Just a moment: bot redirect challenge', 'Cloudflare'
|
||||||
)
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
ERRORS_TYPES = {
|
ERRORS_TYPES = {
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ from alive_progress import alive_bar
|
|||||||
|
|
||||||
from .types import QueryDraft
|
from .types import QueryDraft
|
||||||
|
|
||||||
|
|
||||||
def create_task_func():
|
def create_task_func():
|
||||||
if sys.version_info.minor > 6:
|
if sys.version_info.minor > 6:
|
||||||
create_asyncio_task = asyncio.create_task
|
create_asyncio_task = asyncio.create_task
|
||||||
@@ -156,7 +157,9 @@ class AsyncioProgressbarQueueExecutor(AsyncExecutor):
|
|||||||
|
|
||||||
# Initialize the progress bar
|
# Initialize the progress bar
|
||||||
if self.progress_func:
|
if self.progress_func:
|
||||||
with self.progress_func(len(queries_list), title="Searching", force_tty=True) as bar:
|
with self.progress_func(
|
||||||
|
len(queries_list), title="Searching", force_tty=True
|
||||||
|
) as bar:
|
||||||
self.progress = bar # Assign alive_bar's callable to self.progress
|
self.progress = bar # Assign alive_bar's callable to self.progress
|
||||||
|
|
||||||
# Add tasks to the queue
|
# Add tasks to the queue
|
||||||
@@ -170,4 +173,4 @@ class AsyncioProgressbarQueueExecutor(AsyncExecutor):
|
|||||||
for w in workers:
|
for w in workers:
|
||||||
w.cancel()
|
w.cancel()
|
||||||
|
|
||||||
return self.results
|
return self.results
|
||||||
|
|||||||
+20
-9
@@ -1,6 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Maigret main module
|
Maigret main module
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import ast
|
import ast
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
@@ -44,7 +45,9 @@ from .settings import Settings
|
|||||||
from .permutator import Permute
|
from .permutator import Permute
|
||||||
|
|
||||||
|
|
||||||
def notify_about_errors(search_results: QueryResultWrapper, query_notify, show_statistics=False):
|
def notify_about_errors(
|
||||||
|
search_results: QueryResultWrapper, query_notify, show_statistics=False
|
||||||
|
):
|
||||||
errs = errors.extract_and_group(search_results)
|
errs = errors.extract_and_group(search_results)
|
||||||
was_errs_displayed = False
|
was_errs_displayed = False
|
||||||
for e in errs:
|
for e in errs:
|
||||||
@@ -61,7 +64,7 @@ def notify_about_errors(search_results: QueryResultWrapper, query_notify, show_s
|
|||||||
if show_statistics:
|
if show_statistics:
|
||||||
query_notify.warning(f'Verbose error statistics:')
|
query_notify.warning(f'Verbose error statistics:')
|
||||||
for e in errs:
|
for e in errs:
|
||||||
text = f'{e["err"]}: {round(e["perc"],2)}%'
|
text = f'{e["err"]}: {round(e["perc"],2)}%'
|
||||||
query_notify.warning(text, '!')
|
query_notify.warning(text, '!')
|
||||||
|
|
||||||
if was_errs_displayed:
|
if was_errs_displayed:
|
||||||
@@ -69,6 +72,7 @@ def notify_about_errors(search_results: QueryResultWrapper, query_notify, show_s
|
|||||||
'You can see detailed site check errors with a flag `--print-errors`'
|
'You can see detailed site check errors with a flag `--print-errors`'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def extract_ids_from_page(url, logger, timeout=5) -> dict:
|
def extract_ids_from_page(url, logger, timeout=5) -> dict:
|
||||||
results = {}
|
results = {}
|
||||||
# url, headers
|
# url, headers
|
||||||
@@ -100,7 +104,7 @@ def extract_ids_from_page(url, logger, timeout=5) -> dict:
|
|||||||
tree = ast.literal_eval(v)
|
tree = ast.literal_eval(v)
|
||||||
if type(tree) == list:
|
if type(tree) == list:
|
||||||
for n in tree:
|
for n in tree:
|
||||||
results[n] = 'username'
|
results[n] = 'username'
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(e)
|
logger.warning(e)
|
||||||
if k in SUPPORTED_IDS:
|
if k in SUPPORTED_IDS:
|
||||||
@@ -566,14 +570,19 @@ async def main():
|
|||||||
is_submitted = await submitter.dialog(args.new_site_to_submit, args.cookie_file)
|
is_submitted = await submitter.dialog(args.new_site_to_submit, args.cookie_file)
|
||||||
if is_submitted:
|
if is_submitted:
|
||||||
db.save_to_file(db_file)
|
db.save_to_file(db_file)
|
||||||
|
await submitter.close()
|
||||||
|
|
||||||
# Database self-checking
|
# Database self-checking
|
||||||
if args.self_check:
|
if args.self_check:
|
||||||
if len(site_data) == 0:
|
if len(site_data) == 0:
|
||||||
query_notify.warning('No sites to self-check with the current filters! Exiting...')
|
query_notify.warning(
|
||||||
|
'No sites to self-check with the current filters! Exiting...'
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
query_notify.success(f'Maigret sites database self-check started for {len(site_data)} sites...')
|
query_notify.success(
|
||||||
|
f'Maigret sites database self-check started for {len(site_data)} sites...'
|
||||||
|
)
|
||||||
is_need_update = await self_check(
|
is_need_update = await self_check(
|
||||||
db,
|
db,
|
||||||
site_data,
|
site_data,
|
||||||
@@ -594,7 +603,9 @@ async def main():
|
|||||||
print('Updates will be applied only for current search session.')
|
print('Updates will be applied only for current search session.')
|
||||||
|
|
||||||
if args.verbose or args.debug:
|
if args.verbose or args.debug:
|
||||||
query_notify.info('Scan sessions flags stats: ' + str(db.get_scan_stats(site_data)))
|
query_notify.info(
|
||||||
|
'Scan sessions flags stats: ' + str(db.get_scan_stats(site_data))
|
||||||
|
)
|
||||||
|
|
||||||
# Database statistics
|
# Database statistics
|
||||||
if args.stats:
|
if args.stats:
|
||||||
@@ -613,10 +624,10 @@ async def main():
|
|||||||
query_notify.warning('No usernames to check, exiting.')
|
query_notify.warning('No usernames to check, exiting.')
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
if len(usernames) > 1 and args.permute and args.id_type == 'username':
|
if len(usernames) > 1 and args.permute and args.id_type == 'username':
|
||||||
query_notify.warning(
|
query_notify.warning(
|
||||||
f"{len(usernames)} permutations from {original_usernames} to check..." +
|
f"{len(usernames)} permutations from {original_usernames} to check..."
|
||||||
get_dict_ascii_tree(usernames, prepend="\t")
|
+ get_dict_ascii_tree(usernames, prepend="\t")
|
||||||
)
|
)
|
||||||
|
|
||||||
if not site_data:
|
if not site_data:
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
This module defines the objects for notifying the caller about the
|
This module defines the objects for notifying the caller about the
|
||||||
results of queries.
|
results of queries.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from colorama import Fore, Style, init
|
from colorama import Fore, Style, init
|
||||||
|
|||||||
+6
-2
@@ -295,8 +295,12 @@ def generate_report_context(username_results: list):
|
|||||||
first_seen = created_at
|
first_seen = created_at
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
known_time = parse_datetime_str(first_seen, tzinfos=ADDITIONAL_TZINFO)
|
known_time = parse_datetime_str(
|
||||||
new_time = parse_datetime_str(created_at, tzinfos=ADDITIONAL_TZINFO)
|
first_seen, tzinfos=ADDITIONAL_TZINFO
|
||||||
|
)
|
||||||
|
new_time = parse_datetime_str(
|
||||||
|
created_at, tzinfos=ADDITIONAL_TZINFO
|
||||||
|
)
|
||||||
if new_time < known_time:
|
if new_time < known_time:
|
||||||
first_seen = created_at
|
first_seen = created_at
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -35293,6 +35293,22 @@
|
|||||||
"urlMain": "https://mynickname.com",
|
"urlMain": "https://mynickname.com",
|
||||||
"usernameClaimed": "godbrithil",
|
"usernameClaimed": "godbrithil",
|
||||||
"usernameUnclaimed": "fqiakbtdhu"
|
"usernameUnclaimed": "fqiakbtdhu"
|
||||||
|
},
|
||||||
|
"Substack": {
|
||||||
|
"absenceStrs": [
|
||||||
|
"Found. Redirecting to"
|
||||||
|
],
|
||||||
|
"presenseStrs": [
|
||||||
|
"profile\\"
|
||||||
|
],
|
||||||
|
"url": "https://substack.com/@{username}",
|
||||||
|
"urlMain": "https://substack.com",
|
||||||
|
"usernameClaimed": "user23",
|
||||||
|
"usernameUnclaimed": "noonewouldeverusethis7",
|
||||||
|
"checkType": "message",
|
||||||
|
"tags": [
|
||||||
|
"blog"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
This module defines various objects for recording the results of queries.
|
This module defines various objects for recording the results of queries.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
+46
-17
@@ -115,26 +115,43 @@ class MaigretSite:
|
|||||||
lower_name = self.name.lower()
|
lower_name = self.name.lower()
|
||||||
lower_url_main = self.url_main.lower()
|
lower_url_main = self.url_main.lower()
|
||||||
|
|
||||||
return \
|
return (
|
||||||
lower_name == lower_url_or_name_str or \
|
lower_name == lower_url_or_name_str
|
||||||
(lower_url_main and lower_url_main == lower_url_or_name_str) or \
|
or (lower_url_main and lower_url_main == lower_url_or_name_str)
|
||||||
(lower_url_main and lower_url_main in lower_url_or_name_str) or \
|
or (lower_url_main and lower_url_main in lower_url_or_name_str)
|
||||||
(lower_url_main and lower_url_or_name_str in lower_url_main) or \
|
or (lower_url_main and lower_url_or_name_str in lower_url_main)
|
||||||
(lower_url and lower_url_or_name_str in lower_url)
|
or (lower_url and lower_url_or_name_str in lower_url)
|
||||||
|
)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if isinstance(other, MaigretSite):
|
if isinstance(other, MaigretSite):
|
||||||
# Compare only relevant attributes, not internal state like request_future
|
# Compare only relevant attributes, not internal state like request_future
|
||||||
attrs_to_compare = [
|
attrs_to_compare = [
|
||||||
'name', 'url_main', 'url_subpath', 'type', 'headers',
|
'name',
|
||||||
'errors', 'activation', 'regex_check', 'url_probe',
|
'url_main',
|
||||||
'check_type', 'request_head_only', 'get_params',
|
'url_subpath',
|
||||||
'presense_strs', 'absence_strs', 'stats', 'engine',
|
'type',
|
||||||
'engine_data', 'alexa_rank', 'source', 'protocol'
|
'headers',
|
||||||
|
'errors',
|
||||||
|
'activation',
|
||||||
|
'regex_check',
|
||||||
|
'url_probe',
|
||||||
|
'check_type',
|
||||||
|
'request_head_only',
|
||||||
|
'get_params',
|
||||||
|
'presense_strs',
|
||||||
|
'absence_strs',
|
||||||
|
'stats',
|
||||||
|
'engine',
|
||||||
|
'engine_data',
|
||||||
|
'alexa_rank',
|
||||||
|
'source',
|
||||||
|
'protocol',
|
||||||
]
|
]
|
||||||
|
|
||||||
return all(getattr(self, attr) == getattr(other, attr)
|
return all(
|
||||||
for attr in attrs_to_compare)
|
getattr(self, attr) == getattr(other, attr) for attr in attrs_to_compare
|
||||||
|
)
|
||||||
elif isinstance(other, str):
|
elif isinstance(other, str):
|
||||||
# Compare only by name (exactly) or url_main (partial similarity)
|
# Compare only by name (exactly) or url_main (partial similarity)
|
||||||
return self.__is_equal_by_url_or_name(other)
|
return self.__is_equal_by_url_or_name(other)
|
||||||
@@ -556,12 +573,24 @@ class MaigretDatabase:
|
|||||||
|
|
||||||
return separator.join(output)
|
return separator.join(output)
|
||||||
|
|
||||||
def _format_top_items(self, title, items_dict, limit, is_markdown, valid_items=None):
|
def _format_top_items(
|
||||||
|
self, title, items_dict, limit, is_markdown, valid_items=None
|
||||||
|
):
|
||||||
"""Helper method to format top items lists"""
|
"""Helper method to format top items lists"""
|
||||||
output = f"Top {limit} {title}:\n"
|
output = f"Top {limit} {title}:\n"
|
||||||
for item, count in sorted(items_dict.items(), key=lambda x: x[1], reverse=True)[:limit]:
|
for item, count in sorted(items_dict.items(), key=lambda x: x[1], reverse=True)[
|
||||||
|
:limit
|
||||||
|
]:
|
||||||
if count == 1:
|
if count == 1:
|
||||||
break
|
break
|
||||||
mark = " (non-standard)" if valid_items is not None and item not in valid_items else ""
|
mark = (
|
||||||
output += f"- ({count})\t`{item}`{mark}\n" if is_markdown else f"{count}\t{item}{mark}\n"
|
" (non-standard)"
|
||||||
|
if valid_items is not None and item not in valid_items
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
output += (
|
||||||
|
f"- ({count})\t`{item}`{mark}\n"
|
||||||
|
if is_markdown
|
||||||
|
else f"{count}\t{item}{mark}\n"
|
||||||
|
)
|
||||||
return output
|
return output
|
||||||
|
|||||||
+36
-89
@@ -1,19 +1,19 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
from typing import List
|
from typing import Any, Dict, List, Optional
|
||||||
from xml.etree import ElementTree
|
|
||||||
from aiohttp import TCPConnector, ClientSession
|
from aiohttp import ClientSession, TCPConnector
|
||||||
import requests
|
from aiohttp_socks import ProxyConnector
|
||||||
import cloudscraper
|
import cloudscraper
|
||||||
from colorama import Fore, Style
|
from colorama import Fore, Style
|
||||||
|
|
||||||
from .activation import import_aiohttp_cookies
|
from .activation import import_aiohttp_cookies
|
||||||
from .checking import maigret
|
from .result import QueryResult
|
||||||
from .result import QueryStatus
|
|
||||||
from .settings import Settings
|
from .settings import Settings
|
||||||
from .sites import MaigretDatabase, MaigretSite, MaigretEngine
|
from .sites import MaigretDatabase, MaigretEngine, MaigretSite
|
||||||
from .utils import get_random_user_agent, get_match_ratio
|
from .utils import get_random_user_agent
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class CloudflareSession:
|
class CloudflareSession:
|
||||||
@@ -68,6 +68,9 @@ class Submitter:
|
|||||||
connector=connector, trust_env=True, cookie_jar=cookie_jar
|
connector=connector, trust_env=True, cookie_jar=cookie_jar
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
await self.session.close()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_alexa_rank(site_url_main):
|
def get_alexa_rank(site_url_main):
|
||||||
url = f"http://data.alexa.com/data?cli=10&url={site_url_main}"
|
url = f"http://data.alexa.com/data?cli=10&url={site_url_main}"
|
||||||
@@ -87,78 +90,18 @@ class Submitter:
|
|||||||
return "/".join(url.split("/", 3)[:3])
|
return "/".join(url.split("/", 3)[:3])
|
||||||
|
|
||||||
async def site_self_check(self, site, semaphore, silent=False):
|
async def site_self_check(self, site, semaphore, silent=False):
|
||||||
changes = {
|
# Call the general function from the checking.py
|
||||||
"disabled": False,
|
changes = await checking_site_self_check(
|
||||||
}
|
site=site,
|
||||||
|
logger=self.logger,
|
||||||
check_data = [
|
semaphore=semaphore,
|
||||||
(site.username_claimed, QueryStatus.CLAIMED),
|
db=self.db,
|
||||||
(site.username_unclaimed, QueryStatus.AVAILABLE),
|
silent=silent,
|
||||||
]
|
proxy=self.args.proxy,
|
||||||
|
cookies=self.args.cookie_file,
|
||||||
self.logger.info(f"Checking {site.name}...")
|
# Don't skip errors in submit mode - we need check both false positives/true negatives
|
||||||
|
skip_errors=False,
|
||||||
for username, status in check_data:
|
)
|
||||||
results_dict = await maigret(
|
|
||||||
username=username,
|
|
||||||
site_dict={site.name: site},
|
|
||||||
proxy=self.args.proxy,
|
|
||||||
logger=self.logger,
|
|
||||||
cookies=self.args.cookie_file,
|
|
||||||
timeout=30,
|
|
||||||
id_type=site.type,
|
|
||||||
forced=True,
|
|
||||||
no_progressbar=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# don't disable entries with other ids types
|
|
||||||
# TODO: make normal checking
|
|
||||||
if site.name not in results_dict:
|
|
||||||
self.logger.info(results_dict)
|
|
||||||
changes["disabled"] = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
result = results_dict[site.name]["status"]
|
|
||||||
|
|
||||||
site_status = result.status
|
|
||||||
|
|
||||||
if site_status != status:
|
|
||||||
if site_status == QueryStatus.UNKNOWN:
|
|
||||||
msgs = site.absence_strs
|
|
||||||
etype = site.check_type
|
|
||||||
self.logger.warning(
|
|
||||||
"Error while searching '%s' in %s: %s, %s, check type %s",
|
|
||||||
username,
|
|
||||||
site.name,
|
|
||||||
result.context,
|
|
||||||
msgs,
|
|
||||||
etype,
|
|
||||||
)
|
|
||||||
# don't disable in case of available username
|
|
||||||
if status == QueryStatus.CLAIMED:
|
|
||||||
changes["disabled"] = True
|
|
||||||
elif status == QueryStatus.CLAIMED:
|
|
||||||
print(
|
|
||||||
f"{Fore.YELLOW}[!] Not found `{username}` in {site.name}, must be claimed{Style.RESET_ALL}"
|
|
||||||
)
|
|
||||||
self.logger.warning(site.json)
|
|
||||||
changes["disabled"] = True
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
f"{Fore.YELLOW}[!] Found `{username}` in {site.name}, must be available{Style.RESET_ALL}"
|
|
||||||
)
|
|
||||||
self.logger.warning(site.json)
|
|
||||||
changes["disabled"] = True
|
|
||||||
else:
|
|
||||||
print(f"{Fore.GREEN}[+] {username} is successfully checked: {status} in {site.name}{Style.RESET_ALL}")
|
|
||||||
|
|
||||||
self.logger.info(f"Site {site.name} checking is finished")
|
|
||||||
|
|
||||||
# remove service tag "unchecked"
|
|
||||||
if "unchecked" in site.tags:
|
|
||||||
site.tags.remove("unchecked")
|
|
||||||
changes["tags"] = site.tags
|
|
||||||
|
|
||||||
return changes
|
return changes
|
||||||
|
|
||||||
def generate_additional_fields_dialog(self, engine: MaigretEngine, dialog):
|
def generate_additional_fields_dialog(self, engine: MaigretEngine, dialog):
|
||||||
@@ -294,8 +237,8 @@ class Submitter:
|
|||||||
b_minus_a = tokens_b.difference(tokens_a)
|
b_minus_a = tokens_b.difference(tokens_a)
|
||||||
|
|
||||||
# additional filtering by html response
|
# additional filtering by html response
|
||||||
a_minus_b = [t for t in a_minus_b if not t in non_exists_resp_text]
|
a_minus_b = [t for t in a_minus_b if t not in non_exists_resp_text]
|
||||||
b_minus_a = [t for t in b_minus_a if not t in exists_resp_text]
|
b_minus_a = [t for t in b_minus_a if t not in exists_resp_text]
|
||||||
|
|
||||||
if len(a_minus_b) == len(b_minus_a) == 0:
|
if len(a_minus_b) == len(b_minus_a) == 0:
|
||||||
print("The pages for existing and non-existing account are the same!")
|
print("The pages for existing and non-existing account are the same!")
|
||||||
@@ -352,13 +295,13 @@ class Submitter:
|
|||||||
|
|
||||||
async def add_site(self, site):
|
async def add_site(self, site):
|
||||||
sem = asyncio.Semaphore(1)
|
sem = asyncio.Semaphore(1)
|
||||||
print(f"{Fore.BLUE}{Style.BRIGHT}[*] Adding site {site.name}, let's check it...{Style.RESET_ALL}")
|
print(
|
||||||
|
f"{Fore.BLUE}{Style.BRIGHT}[*] Adding site {site.name}, let's check it...{Style.RESET_ALL}"
|
||||||
|
)
|
||||||
|
|
||||||
result = await self.site_self_check(site, sem)
|
result = await self.site_self_check(site, sem)
|
||||||
if result["disabled"]:
|
if result["disabled"]:
|
||||||
print(
|
print(f"Checks failed for {site.name}, please, verify them manually.")
|
||||||
f"Checks failed for {site.name}, please, verify them manually."
|
|
||||||
)
|
|
||||||
return {
|
return {
|
||||||
"valid": False,
|
"valid": False,
|
||||||
"reason": "checks_failed",
|
"reason": "checks_failed",
|
||||||
@@ -405,7 +348,9 @@ class Submitter:
|
|||||||
if choice in editable_fields:
|
if choice in editable_fields:
|
||||||
field = editable_fields[choice]
|
field = editable_fields[choice]
|
||||||
current_value = getattr(site, field)
|
current_value = getattr(site, field)
|
||||||
new_value = input(f"Enter new value for {field} (current: {current_value}): ").strip()
|
new_value = input(
|
||||||
|
f"Enter new value for {field} (current: {current_value}): "
|
||||||
|
).strip()
|
||||||
|
|
||||||
if field in ['tags', 'presense_strs', 'absence_strs']:
|
if field in ['tags', 'presense_strs', 'absence_strs']:
|
||||||
new_value = list(map(str.strip, new_value.split(',')))
|
new_value = list(map(str.strip, new_value.split(',')))
|
||||||
@@ -532,8 +477,10 @@ class Submitter:
|
|||||||
self.logger.debug(site_data.json)
|
self.logger.debug(site_data.json)
|
||||||
self.db.update_site(site_data)
|
self.db.update_site(site_data)
|
||||||
|
|
||||||
if self.args.db:
|
if self.args.db_file != self.settings.sites_db_path:
|
||||||
print(f"{Fore.GREEN}[+] Maigret DB is saved to {self.args.db}.{Style.RESET_ALL}")
|
print(
|
||||||
|
f"{Fore.GREEN}[+] Maigret DB is saved to {self.args.db}.{Style.RESET_ALL}"
|
||||||
|
)
|
||||||
self.db.save_to_file(self.args.db)
|
self.db.save_to_file(self.args.db)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|||||||
Generated
+75
-2
@@ -1,4 +1,4 @@
|
|||||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "about-time"
|
name = "about-time"
|
||||||
@@ -272,6 +272,52 @@ charset-normalizer = ["charset-normalizer"]
|
|||||||
html5lib = ["html5lib"]
|
html5lib = ["html5lib"]
|
||||||
lxml = ["lxml"]
|
lxml = ["lxml"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "black"
|
||||||
|
version = "24.10.0"
|
||||||
|
description = "The uncompromising code formatter."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
files = [
|
||||||
|
{file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"},
|
||||||
|
{file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"},
|
||||||
|
{file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"},
|
||||||
|
{file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"},
|
||||||
|
{file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"},
|
||||||
|
{file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"},
|
||||||
|
{file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"},
|
||||||
|
{file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"},
|
||||||
|
{file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"},
|
||||||
|
{file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"},
|
||||||
|
{file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"},
|
||||||
|
{file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"},
|
||||||
|
{file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"},
|
||||||
|
{file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"},
|
||||||
|
{file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"},
|
||||||
|
{file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"},
|
||||||
|
{file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"},
|
||||||
|
{file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"},
|
||||||
|
{file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"},
|
||||||
|
{file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"},
|
||||||
|
{file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"},
|
||||||
|
{file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
click = ">=8.0.0"
|
||||||
|
mypy-extensions = ">=0.4.3"
|
||||||
|
packaging = ">=22.0"
|
||||||
|
pathspec = ">=0.9.0"
|
||||||
|
platformdirs = ">=2"
|
||||||
|
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||||
|
typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
colorama = ["colorama (>=0.4.3)"]
|
||||||
|
d = ["aiohttp (>=3.10)"]
|
||||||
|
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||||
|
uvloop = ["uvloop (>=0.15.2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2024.8.30"
|
version = "2024.8.30"
|
||||||
@@ -1499,6 +1545,17 @@ files = [
|
|||||||
qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
|
qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
|
||||||
testing = ["docopt", "pytest"]
|
testing = ["docopt", "pytest"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pathspec"
|
||||||
|
version = "0.12.1"
|
||||||
|
description = "Utility library for gitignore style pattern matching of file paths."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
|
||||||
|
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pexpect"
|
name = "pexpect"
|
||||||
version = "4.9.0"
|
version = "4.9.0"
|
||||||
@@ -1605,6 +1662,22 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa
|
|||||||
typing = ["typing-extensions"]
|
typing = ["typing-extensions"]
|
||||||
xmp = ["defusedxml"]
|
xmp = ["defusedxml"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "platformdirs"
|
||||||
|
version = "4.3.6"
|
||||||
|
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
|
||||||
|
{file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
|
||||||
|
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
|
||||||
|
type = ["mypy (>=1.11.2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pluggy"
|
name = "pluggy"
|
||||||
version = "1.5.0"
|
version = "1.5.0"
|
||||||
@@ -2866,4 +2939,4 @@ propcache = ">=0.2.0"
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.10"
|
python-versions = "^3.10"
|
||||||
content-hash = "9c1c222702e0c8c11531f60480c7665247bb9338f6b2c79fed88b25213ae63fe"
|
content-hash = "7e36b57d14f5feedd75778934df5a24c669fd7dd3d5f0147f566ac4ea6eb1d27"
|
||||||
|
|||||||
@@ -84,6 +84,7 @@ pytest-rerunfailures = "^15.0"
|
|||||||
reportlab = "^4.2.0"
|
reportlab = "^4.2.0"
|
||||||
mypy = "^1.13.0"
|
mypy = "^1.13.0"
|
||||||
tuna = "^0.5.11"
|
tuna = "^0.5.11"
|
||||||
|
black = "^24.10.0"
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
# Run with: poetry run maigret <username>
|
# Run with: poetry run maigret <username>
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Maigret activation test functions"""
|
"""Maigret activation test functions"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@@ -41,7 +42,11 @@ async def test_import_aiohttp_cookies():
|
|||||||
|
|
||||||
cookie_jar = import_aiohttp_cookies(cookies_filename)
|
cookie_jar = import_aiohttp_cookies(cookies_filename)
|
||||||
# new aiohttp support
|
# new aiohttp support
|
||||||
assert list(cookie_jar._cookies.keys()) in (['xss.is', 'httpbin.org'], [('xss.is', '/'), ('httpbin.org', '/')], [('xss.is', ''), ('httpbin.org', '')])
|
assert list(cookie_jar._cookies.keys()) in (
|
||||||
|
['xss.is', 'httpbin.org'],
|
||||||
|
[('xss.is', '/'), ('httpbin.org', '/')],
|
||||||
|
[('xss.is', ''), ('httpbin.org', '')],
|
||||||
|
)
|
||||||
|
|
||||||
url = 'https://httpbin.org/cookies'
|
url = 'https://httpbin.org/cookies'
|
||||||
connector = aiohttp.TCPConnector(ssl=False)
|
connector = aiohttp.TCPConnector(ssl=False)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Maigret command-line arguments parsing tests"""
|
"""Maigret command-line arguments parsing tests"""
|
||||||
|
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
from typing import Dict, Any
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Maigret checking logic test functions"""
|
"""Maigret checking logic test functions"""
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Maigret main module test functions"""
|
"""Maigret main module test functions"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Maigret reports test functions"""
|
"""Maigret reports test functions"""
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Maigret Database test functions"""
|
"""Maigret Database test functions"""
|
||||||
|
|
||||||
from maigret.sites import MaigretDatabase, MaigretSite
|
from maigret.sites import MaigretDatabase, MaigretSite
|
||||||
|
|
||||||
EXAMPLE_DB = {
|
EXAMPLE_DB = {
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Maigret utils test functions"""
|
"""Maigret utils test functions"""
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
|||||||
@@ -16,18 +16,29 @@ def main():
|
|||||||
db = maigret.MaigretDatabase().load_from_file('./maigret/resources/data.json')
|
db = maigret.MaigretDatabase().load_from_file('./maigret/resources/data.json')
|
||||||
|
|
||||||
username = input('Enter username to search: ')
|
username = input('Enter username to search: ')
|
||||||
sites_count = int(input(
|
sites_count = (
|
||||||
f'Select the number of sites to search ({TOP_SITES_COUNT} for default, {len(db.sites_dict)} max): '
|
int(
|
||||||
)) or TOP_SITES_COUNT
|
input(
|
||||||
|
f'Select the number of sites to search ({TOP_SITES_COUNT} for default, {len(db.sites_dict)} max): '
|
||||||
|
)
|
||||||
|
)
|
||||||
|
or TOP_SITES_COUNT
|
||||||
|
)
|
||||||
sites = db.ranked_sites_dict(top=sites_count)
|
sites = db.ranked_sites_dict(top=sites_count)
|
||||||
|
|
||||||
show_progressbar = input('Do you want to show a progressbar? [Yn] ').lower() != 'n'
|
show_progressbar = input('Do you want to show a progressbar? [Yn] ').lower() != 'n'
|
||||||
extract_info = input(
|
extract_info = (
|
||||||
'Do you want to extract additional info from accounts\' pages? [Yn] '
|
input(
|
||||||
).lower() != 'n'
|
'Do you want to extract additional info from accounts\' pages? [Yn] '
|
||||||
use_notifier = input(
|
).lower()
|
||||||
'Do you want to use notifier for displaying results while searching? [Yn] '
|
!= 'n'
|
||||||
).lower() != 'n'
|
)
|
||||||
|
use_notifier = (
|
||||||
|
input(
|
||||||
|
'Do you want to use notifier for displaying results while searching? [Yn] '
|
||||||
|
).lower()
|
||||||
|
!= 'n'
|
||||||
|
)
|
||||||
|
|
||||||
notifier = None
|
notifier = None
|
||||||
if use_notifier:
|
if use_notifier:
|
||||||
|
|||||||
Reference in New Issue
Block a user