mirror of
https://github.com/soxoj/maigret.git
synced 2026-05-07 06:24:35 +00:00
Refactored self-check method, code formatting, small lint fixes (#1942)
This commit is contained in:
@@ -47,9 +47,7 @@ class ParsingActivator:
|
||||
session = requests.Session()
|
||||
# 1 stage: get the redirect URL
|
||||
r = session.get(
|
||||
"https://weibo.com/clairekuo",
|
||||
headers=headers,
|
||||
allow_redirects=False
|
||||
"https://weibo.com/clairekuo", headers=headers, allow_redirects=False
|
||||
)
|
||||
logger.debug(
|
||||
f"1 stage: {'success' if r.status_code == 302 else 'no 302 redirect, fail!'}"
|
||||
@@ -68,11 +66,7 @@ class ParsingActivator:
|
||||
r = session.post(
|
||||
"https://passport.weibo.com/visitor/genvisitor2",
|
||||
headers=headers,
|
||||
data={
|
||||
'cb': 'visitor_gray_callback',
|
||||
'tid': '',
|
||||
'from': 'weibo'
|
||||
},
|
||||
data={'cb': 'visitor_gray_callback', 'tid': '', 'from': 'weibo'},
|
||||
)
|
||||
cookies = r.headers.get('set-cookie')
|
||||
logger.debug(
|
||||
|
||||
+22
-8
@@ -16,6 +16,7 @@ from aiohttp import ClientSession, TCPConnector, http_exceptions
|
||||
from aiohttp.client_exceptions import ClientConnectorError, ServerDisconnectedError
|
||||
from python_socks import _errors as proxy_errors
|
||||
from socid_extractor import extract
|
||||
|
||||
try:
|
||||
from mock import Mock
|
||||
except ImportError:
|
||||
@@ -77,7 +78,9 @@ class SimpleAiohttpChecker(CheckerBase):
|
||||
async def close(self):
|
||||
pass
|
||||
|
||||
async def _make_request(self, session, url, headers, allow_redirects, timeout, method, logger) -> Tuple[str, int, Optional[CheckError]]:
|
||||
async def _make_request(
|
||||
self, session, url, headers, allow_redirects, timeout, method, logger
|
||||
) -> Tuple[str, int, Optional[CheckError]]:
|
||||
try:
|
||||
request_method = session.get if method == 'get' else session.head
|
||||
async with request_method(
|
||||
@@ -120,7 +123,12 @@ class SimpleAiohttpChecker(CheckerBase):
|
||||
|
||||
async def check(self) -> Tuple[str, int, Optional[CheckError]]:
|
||||
from aiohttp_socks import ProxyConnector
|
||||
connector = ProxyConnector.from_url(self.proxy) if self.proxy else TCPConnector(ssl=False)
|
||||
|
||||
connector = (
|
||||
ProxyConnector.from_url(self.proxy)
|
||||
if self.proxy
|
||||
else TCPConnector(ssl=False)
|
||||
)
|
||||
connector.verify_ssl = False
|
||||
|
||||
async with ClientSession(
|
||||
@@ -136,7 +144,7 @@ class SimpleAiohttpChecker(CheckerBase):
|
||||
self.allow_redirects,
|
||||
self.timeout,
|
||||
self.method,
|
||||
self.logger
|
||||
self.logger,
|
||||
)
|
||||
|
||||
if error and str(error) == "Invalid proxy response":
|
||||
@@ -385,7 +393,7 @@ def process_site_result(
|
||||
tree = ast.literal_eval(v)
|
||||
if type(tree) == list:
|
||||
for n in tree:
|
||||
new_usernames[n] = "username"
|
||||
new_usernames[n] = "username"
|
||||
except Exception as e:
|
||||
logger.warning(e)
|
||||
if k in SUPPORTED_IDS:
|
||||
@@ -549,7 +557,7 @@ async def check_site_for_username(
|
||||
)
|
||||
# future = default_result.get("future")
|
||||
# if not future:
|
||||
# return site.name, default_result
|
||||
# return site.name, default_result
|
||||
|
||||
checker = default_result.get("checker")
|
||||
if not checker:
|
||||
@@ -804,6 +812,7 @@ async def site_self_check(
|
||||
tor_proxy=None,
|
||||
i2p_proxy=None,
|
||||
skip_errors=False,
|
||||
cookies=None,
|
||||
):
|
||||
changes = {
|
||||
"disabled": False,
|
||||
@@ -830,6 +839,7 @@ async def site_self_check(
|
||||
proxy=proxy,
|
||||
tor_proxy=tor_proxy,
|
||||
i2p_proxy=i2p_proxy,
|
||||
cookies=cookies,
|
||||
)
|
||||
|
||||
# don't disable entries with other ids types
|
||||
@@ -878,7 +888,7 @@ async def site_self_check(
|
||||
|
||||
if changes["disabled"] != site.disabled:
|
||||
site.disabled = changes["disabled"]
|
||||
logger.info(f"Switching disabled status of {site.name} to {site.disabled}")
|
||||
logger.info(f"Switching property 'disabled' for {site.name} to {site.disabled}")
|
||||
db.update_site(site)
|
||||
if not silent:
|
||||
action = "Disabled" if site.disabled else "Enabled"
|
||||
@@ -909,7 +919,9 @@ async def self_check(
|
||||
def disabled_count(lst):
|
||||
return len(list(filter(lambda x: x.disabled, lst)))
|
||||
|
||||
unchecked_old_count = len([site for site in all_sites.values() if "unchecked" in site.tags])
|
||||
unchecked_old_count = len(
|
||||
[site for site in all_sites.values() if "unchecked" in site.tags]
|
||||
)
|
||||
disabled_old_count = disabled_count(all_sites.values())
|
||||
|
||||
for _, site in all_sites.items():
|
||||
@@ -925,7 +937,9 @@ async def self_check(
|
||||
await f
|
||||
progress() # Update the progress bar
|
||||
|
||||
unchecked_new_count = len([site for site in all_sites.values() if "unchecked" in site.tags])
|
||||
unchecked_new_count = len(
|
||||
[site for site in all_sites.values() if "unchecked" in site.tags]
|
||||
)
|
||||
disabled_new_count = disabled_count(all_sites.values())
|
||||
total_disabled = disabled_new_count - disabled_old_count
|
||||
|
||||
|
||||
+2
-4
@@ -58,12 +58,10 @@ COMMON_ERRORS = {
|
||||
'Сайт заблокирован хостинг-провайдером': CheckError(
|
||||
'Site-specific', 'Site is disabled (Beget)'
|
||||
),
|
||||
'Generated by cloudfront (CloudFront)': CheckError(
|
||||
'Request blocked', 'Cloudflare'
|
||||
),
|
||||
'Generated by cloudfront (CloudFront)': CheckError('Request blocked', 'Cloudflare'),
|
||||
'/cdn-cgi/challenge-platform/h/b/orchestrate/chl_page': CheckError(
|
||||
'Just a moment: bot redirect challenge', 'Cloudflare'
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
ERRORS_TYPES = {
|
||||
|
||||
@@ -8,6 +8,7 @@ from alive_progress import alive_bar
|
||||
|
||||
from .types import QueryDraft
|
||||
|
||||
|
||||
def create_task_func():
|
||||
if sys.version_info.minor > 6:
|
||||
create_asyncio_task = asyncio.create_task
|
||||
@@ -156,7 +157,9 @@ class AsyncioProgressbarQueueExecutor(AsyncExecutor):
|
||||
|
||||
# Initialize the progress bar
|
||||
if self.progress_func:
|
||||
with self.progress_func(len(queries_list), title="Searching", force_tty=True) as bar:
|
||||
with self.progress_func(
|
||||
len(queries_list), title="Searching", force_tty=True
|
||||
) as bar:
|
||||
self.progress = bar # Assign alive_bar's callable to self.progress
|
||||
|
||||
# Add tasks to the queue
|
||||
@@ -170,4 +173,4 @@ class AsyncioProgressbarQueueExecutor(AsyncExecutor):
|
||||
for w in workers:
|
||||
w.cancel()
|
||||
|
||||
return self.results
|
||||
return self.results
|
||||
|
||||
+20
-9
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Maigret main module
|
||||
"""
|
||||
|
||||
import ast
|
||||
import asyncio
|
||||
import logging
|
||||
@@ -44,7 +45,9 @@ from .settings import Settings
|
||||
from .permutator import Permute
|
||||
|
||||
|
||||
def notify_about_errors(search_results: QueryResultWrapper, query_notify, show_statistics=False):
|
||||
def notify_about_errors(
|
||||
search_results: QueryResultWrapper, query_notify, show_statistics=False
|
||||
):
|
||||
errs = errors.extract_and_group(search_results)
|
||||
was_errs_displayed = False
|
||||
for e in errs:
|
||||
@@ -61,7 +64,7 @@ def notify_about_errors(search_results: QueryResultWrapper, query_notify, show_s
|
||||
if show_statistics:
|
||||
query_notify.warning(f'Verbose error statistics:')
|
||||
for e in errs:
|
||||
text = f'{e["err"]}: {round(e["perc"],2)}%'
|
||||
text = f'{e["err"]}: {round(e["perc"],2)}%'
|
||||
query_notify.warning(text, '!')
|
||||
|
||||
if was_errs_displayed:
|
||||
@@ -69,6 +72,7 @@ def notify_about_errors(search_results: QueryResultWrapper, query_notify, show_s
|
||||
'You can see detailed site check errors with a flag `--print-errors`'
|
||||
)
|
||||
|
||||
|
||||
def extract_ids_from_page(url, logger, timeout=5) -> dict:
|
||||
results = {}
|
||||
# url, headers
|
||||
@@ -100,7 +104,7 @@ def extract_ids_from_page(url, logger, timeout=5) -> dict:
|
||||
tree = ast.literal_eval(v)
|
||||
if type(tree) == list:
|
||||
for n in tree:
|
||||
results[n] = 'username'
|
||||
results[n] = 'username'
|
||||
except Exception as e:
|
||||
logger.warning(e)
|
||||
if k in SUPPORTED_IDS:
|
||||
@@ -566,14 +570,19 @@ async def main():
|
||||
is_submitted = await submitter.dialog(args.new_site_to_submit, args.cookie_file)
|
||||
if is_submitted:
|
||||
db.save_to_file(db_file)
|
||||
await submitter.close()
|
||||
|
||||
# Database self-checking
|
||||
if args.self_check:
|
||||
if len(site_data) == 0:
|
||||
query_notify.warning('No sites to self-check with the current filters! Exiting...')
|
||||
query_notify.warning(
|
||||
'No sites to self-check with the current filters! Exiting...'
|
||||
)
|
||||
return
|
||||
|
||||
query_notify.success(f'Maigret sites database self-check started for {len(site_data)} sites...')
|
||||
query_notify.success(
|
||||
f'Maigret sites database self-check started for {len(site_data)} sites...'
|
||||
)
|
||||
is_need_update = await self_check(
|
||||
db,
|
||||
site_data,
|
||||
@@ -594,7 +603,9 @@ async def main():
|
||||
print('Updates will be applied only for current search session.')
|
||||
|
||||
if args.verbose or args.debug:
|
||||
query_notify.info('Scan sessions flags stats: ' + str(db.get_scan_stats(site_data)))
|
||||
query_notify.info(
|
||||
'Scan sessions flags stats: ' + str(db.get_scan_stats(site_data))
|
||||
)
|
||||
|
||||
# Database statistics
|
||||
if args.stats:
|
||||
@@ -613,10 +624,10 @@ async def main():
|
||||
query_notify.warning('No usernames to check, exiting.')
|
||||
sys.exit(0)
|
||||
|
||||
if len(usernames) > 1 and args.permute and args.id_type == 'username':
|
||||
if len(usernames) > 1 and args.permute and args.id_type == 'username':
|
||||
query_notify.warning(
|
||||
f"{len(usernames)} permutations from {original_usernames} to check..." +
|
||||
get_dict_ascii_tree(usernames, prepend="\t")
|
||||
f"{len(usernames)} permutations from {original_usernames} to check..."
|
||||
+ get_dict_ascii_tree(usernames, prepend="\t")
|
||||
)
|
||||
|
||||
if not site_data:
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
This module defines the objects for notifying the caller about the
|
||||
results of queries.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
from colorama import Fore, Style, init
|
||||
|
||||
+6
-2
@@ -295,8 +295,12 @@ def generate_report_context(username_results: list):
|
||||
first_seen = created_at
|
||||
else:
|
||||
try:
|
||||
known_time = parse_datetime_str(first_seen, tzinfos=ADDITIONAL_TZINFO)
|
||||
new_time = parse_datetime_str(created_at, tzinfos=ADDITIONAL_TZINFO)
|
||||
known_time = parse_datetime_str(
|
||||
first_seen, tzinfos=ADDITIONAL_TZINFO
|
||||
)
|
||||
new_time = parse_datetime_str(
|
||||
created_at, tzinfos=ADDITIONAL_TZINFO
|
||||
)
|
||||
if new_time < known_time:
|
||||
first_seen = created_at
|
||||
except Exception as e:
|
||||
|
||||
@@ -35293,6 +35293,22 @@
|
||||
"urlMain": "https://mynickname.com",
|
||||
"usernameClaimed": "godbrithil",
|
||||
"usernameUnclaimed": "fqiakbtdhu"
|
||||
},
|
||||
"Substack": {
|
||||
"absenceStrs": [
|
||||
"Found. Redirecting to"
|
||||
],
|
||||
"presenseStrs": [
|
||||
"profile\\"
|
||||
],
|
||||
"url": "https://substack.com/@{username}",
|
||||
"urlMain": "https://substack.com",
|
||||
"usernameClaimed": "user23",
|
||||
"usernameUnclaimed": "noonewouldeverusethis7",
|
||||
"checkType": "message",
|
||||
"tags": [
|
||||
"blog"
|
||||
]
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
This module defines various objects for recording the results of queries.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
|
||||
+46
-17
@@ -115,26 +115,43 @@ class MaigretSite:
|
||||
lower_name = self.name.lower()
|
||||
lower_url_main = self.url_main.lower()
|
||||
|
||||
return \
|
||||
lower_name == lower_url_or_name_str or \
|
||||
(lower_url_main and lower_url_main == lower_url_or_name_str) or \
|
||||
(lower_url_main and lower_url_main in lower_url_or_name_str) or \
|
||||
(lower_url_main and lower_url_or_name_str in lower_url_main) or \
|
||||
(lower_url and lower_url_or_name_str in lower_url)
|
||||
return (
|
||||
lower_name == lower_url_or_name_str
|
||||
or (lower_url_main and lower_url_main == lower_url_or_name_str)
|
||||
or (lower_url_main and lower_url_main in lower_url_or_name_str)
|
||||
or (lower_url_main and lower_url_or_name_str in lower_url_main)
|
||||
or (lower_url and lower_url_or_name_str in lower_url)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, MaigretSite):
|
||||
# Compare only relevant attributes, not internal state like request_future
|
||||
attrs_to_compare = [
|
||||
'name', 'url_main', 'url_subpath', 'type', 'headers',
|
||||
'errors', 'activation', 'regex_check', 'url_probe',
|
||||
'check_type', 'request_head_only', 'get_params',
|
||||
'presense_strs', 'absence_strs', 'stats', 'engine',
|
||||
'engine_data', 'alexa_rank', 'source', 'protocol'
|
||||
'name',
|
||||
'url_main',
|
||||
'url_subpath',
|
||||
'type',
|
||||
'headers',
|
||||
'errors',
|
||||
'activation',
|
||||
'regex_check',
|
||||
'url_probe',
|
||||
'check_type',
|
||||
'request_head_only',
|
||||
'get_params',
|
||||
'presense_strs',
|
||||
'absence_strs',
|
||||
'stats',
|
||||
'engine',
|
||||
'engine_data',
|
||||
'alexa_rank',
|
||||
'source',
|
||||
'protocol',
|
||||
]
|
||||
|
||||
return all(getattr(self, attr) == getattr(other, attr)
|
||||
for attr in attrs_to_compare)
|
||||
return all(
|
||||
getattr(self, attr) == getattr(other, attr) for attr in attrs_to_compare
|
||||
)
|
||||
elif isinstance(other, str):
|
||||
# Compare only by name (exactly) or url_main (partial similarity)
|
||||
return self.__is_equal_by_url_or_name(other)
|
||||
@@ -556,12 +573,24 @@ class MaigretDatabase:
|
||||
|
||||
return separator.join(output)
|
||||
|
||||
def _format_top_items(self, title, items_dict, limit, is_markdown, valid_items=None):
|
||||
def _format_top_items(
|
||||
self, title, items_dict, limit, is_markdown, valid_items=None
|
||||
):
|
||||
"""Helper method to format top items lists"""
|
||||
output = f"Top {limit} {title}:\n"
|
||||
for item, count in sorted(items_dict.items(), key=lambda x: x[1], reverse=True)[:limit]:
|
||||
for item, count in sorted(items_dict.items(), key=lambda x: x[1], reverse=True)[
|
||||
:limit
|
||||
]:
|
||||
if count == 1:
|
||||
break
|
||||
mark = " (non-standard)" if valid_items is not None and item not in valid_items else ""
|
||||
output += f"- ({count})\t`{item}`{mark}\n" if is_markdown else f"{count}\t{item}{mark}\n"
|
||||
mark = (
|
||||
" (non-standard)"
|
||||
if valid_items is not None and item not in valid_items
|
||||
else ""
|
||||
)
|
||||
output += (
|
||||
f"- ({count})\t`{item}`{mark}\n"
|
||||
if is_markdown
|
||||
else f"{count}\t{item}{mark}\n"
|
||||
)
|
||||
return output
|
||||
|
||||
+36
-89
@@ -1,19 +1,19 @@
|
||||
import asyncio
|
||||
import json
|
||||
import re
|
||||
from typing import List
|
||||
from xml.etree import ElementTree
|
||||
from aiohttp import TCPConnector, ClientSession
|
||||
import requests
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from aiohttp import ClientSession, TCPConnector
|
||||
from aiohttp_socks import ProxyConnector
|
||||
import cloudscraper
|
||||
from colorama import Fore, Style
|
||||
|
||||
from .activation import import_aiohttp_cookies
|
||||
from .checking import maigret
|
||||
from .result import QueryStatus
|
||||
from .result import QueryResult
|
||||
from .settings import Settings
|
||||
from .sites import MaigretDatabase, MaigretSite, MaigretEngine
|
||||
from .utils import get_random_user_agent, get_match_ratio
|
||||
from .sites import MaigretDatabase, MaigretEngine, MaigretSite
|
||||
from .utils import get_random_user_agent
|
||||
|
||||
|
||||
|
||||
class CloudflareSession:
|
||||
@@ -68,6 +68,9 @@ class Submitter:
|
||||
connector=connector, trust_env=True, cookie_jar=cookie_jar
|
||||
)
|
||||
|
||||
async def close(self):
|
||||
await self.session.close()
|
||||
|
||||
@staticmethod
|
||||
def get_alexa_rank(site_url_main):
|
||||
url = f"http://data.alexa.com/data?cli=10&url={site_url_main}"
|
||||
@@ -87,78 +90,18 @@ class Submitter:
|
||||
return "/".join(url.split("/", 3)[:3])
|
||||
|
||||
async def site_self_check(self, site, semaphore, silent=False):
|
||||
changes = {
|
||||
"disabled": False,
|
||||
}
|
||||
|
||||
check_data = [
|
||||
(site.username_claimed, QueryStatus.CLAIMED),
|
||||
(site.username_unclaimed, QueryStatus.AVAILABLE),
|
||||
]
|
||||
|
||||
self.logger.info(f"Checking {site.name}...")
|
||||
|
||||
for username, status in check_data:
|
||||
results_dict = await maigret(
|
||||
username=username,
|
||||
site_dict={site.name: site},
|
||||
proxy=self.args.proxy,
|
||||
logger=self.logger,
|
||||
cookies=self.args.cookie_file,
|
||||
timeout=30,
|
||||
id_type=site.type,
|
||||
forced=True,
|
||||
no_progressbar=True,
|
||||
)
|
||||
|
||||
# don't disable entries with other ids types
|
||||
# TODO: make normal checking
|
||||
if site.name not in results_dict:
|
||||
self.logger.info(results_dict)
|
||||
changes["disabled"] = True
|
||||
continue
|
||||
|
||||
result = results_dict[site.name]["status"]
|
||||
|
||||
site_status = result.status
|
||||
|
||||
if site_status != status:
|
||||
if site_status == QueryStatus.UNKNOWN:
|
||||
msgs = site.absence_strs
|
||||
etype = site.check_type
|
||||
self.logger.warning(
|
||||
"Error while searching '%s' in %s: %s, %s, check type %s",
|
||||
username,
|
||||
site.name,
|
||||
result.context,
|
||||
msgs,
|
||||
etype,
|
||||
)
|
||||
# don't disable in case of available username
|
||||
if status == QueryStatus.CLAIMED:
|
||||
changes["disabled"] = True
|
||||
elif status == QueryStatus.CLAIMED:
|
||||
print(
|
||||
f"{Fore.YELLOW}[!] Not found `{username}` in {site.name}, must be claimed{Style.RESET_ALL}"
|
||||
)
|
||||
self.logger.warning(site.json)
|
||||
changes["disabled"] = True
|
||||
else:
|
||||
print(
|
||||
f"{Fore.YELLOW}[!] Found `{username}` in {site.name}, must be available{Style.RESET_ALL}"
|
||||
)
|
||||
self.logger.warning(site.json)
|
||||
changes["disabled"] = True
|
||||
else:
|
||||
print(f"{Fore.GREEN}[+] {username} is successfully checked: {status} in {site.name}{Style.RESET_ALL}")
|
||||
|
||||
self.logger.info(f"Site {site.name} checking is finished")
|
||||
|
||||
# remove service tag "unchecked"
|
||||
if "unchecked" in site.tags:
|
||||
site.tags.remove("unchecked")
|
||||
changes["tags"] = site.tags
|
||||
|
||||
# Call the general function from the checking.py
|
||||
changes = await checking_site_self_check(
|
||||
site=site,
|
||||
logger=self.logger,
|
||||
semaphore=semaphore,
|
||||
db=self.db,
|
||||
silent=silent,
|
||||
proxy=self.args.proxy,
|
||||
cookies=self.args.cookie_file,
|
||||
# Don't skip errors in submit mode - we need check both false positives/true negatives
|
||||
skip_errors=False,
|
||||
)
|
||||
return changes
|
||||
|
||||
def generate_additional_fields_dialog(self, engine: MaigretEngine, dialog):
|
||||
@@ -294,8 +237,8 @@ class Submitter:
|
||||
b_minus_a = tokens_b.difference(tokens_a)
|
||||
|
||||
# additional filtering by html response
|
||||
a_minus_b = [t for t in a_minus_b if not t in non_exists_resp_text]
|
||||
b_minus_a = [t for t in b_minus_a if not t in exists_resp_text]
|
||||
a_minus_b = [t for t in a_minus_b if t not in non_exists_resp_text]
|
||||
b_minus_a = [t for t in b_minus_a if t not in exists_resp_text]
|
||||
|
||||
if len(a_minus_b) == len(b_minus_a) == 0:
|
||||
print("The pages for existing and non-existing account are the same!")
|
||||
@@ -352,13 +295,13 @@ class Submitter:
|
||||
|
||||
async def add_site(self, site):
|
||||
sem = asyncio.Semaphore(1)
|
||||
print(f"{Fore.BLUE}{Style.BRIGHT}[*] Adding site {site.name}, let's check it...{Style.RESET_ALL}")
|
||||
print(
|
||||
f"{Fore.BLUE}{Style.BRIGHT}[*] Adding site {site.name}, let's check it...{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
result = await self.site_self_check(site, sem)
|
||||
if result["disabled"]:
|
||||
print(
|
||||
f"Checks failed for {site.name}, please, verify them manually."
|
||||
)
|
||||
print(f"Checks failed for {site.name}, please, verify them manually.")
|
||||
return {
|
||||
"valid": False,
|
||||
"reason": "checks_failed",
|
||||
@@ -405,7 +348,9 @@ class Submitter:
|
||||
if choice in editable_fields:
|
||||
field = editable_fields[choice]
|
||||
current_value = getattr(site, field)
|
||||
new_value = input(f"Enter new value for {field} (current: {current_value}): ").strip()
|
||||
new_value = input(
|
||||
f"Enter new value for {field} (current: {current_value}): "
|
||||
).strip()
|
||||
|
||||
if field in ['tags', 'presense_strs', 'absence_strs']:
|
||||
new_value = list(map(str.strip, new_value.split(',')))
|
||||
@@ -532,8 +477,10 @@ class Submitter:
|
||||
self.logger.debug(site_data.json)
|
||||
self.db.update_site(site_data)
|
||||
|
||||
if self.args.db:
|
||||
print(f"{Fore.GREEN}[+] Maigret DB is saved to {self.args.db}.{Style.RESET_ALL}")
|
||||
if self.args.db_file != self.settings.sites_db_path:
|
||||
print(
|
||||
f"{Fore.GREEN}[+] Maigret DB is saved to {self.args.db}.{Style.RESET_ALL}"
|
||||
)
|
||||
self.db.save_to_file(self.args.db)
|
||||
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user