Added separate no-extracing flag to rule page parsing

This commit is contained in:
Soxoj
2021-03-14 13:03:29 +03:00
parent f7263c9b3c
commit 731a8e01f9
2 changed files with 10 additions and 5 deletions
+3 -3
View File
@@ -289,7 +289,7 @@ def process_site_result(response, query_notify, logger, results_info, site: Maig
async def maigret(username, site_dict, query_notify, logger, async def maigret(username, site_dict, query_notify, logger,
proxy=None, timeout=None, recursive_search=False, proxy=None, timeout=None, is_parsing_enabled=False,
id_type='username', debug=False, forced=False, id_type='username', debug=False, forced=False,
max_connections=100, no_progressbar=False, max_connections=100, no_progressbar=False,
cookies=None): cookies=None):
@@ -307,7 +307,7 @@ async def maigret(username, site_dict, query_notify, logger,
proxy -- String indicating the proxy URL proxy -- String indicating the proxy URL
timeout -- Time in seconds to wait before timing out request. timeout -- Time in seconds to wait before timing out request.
Default is no timeout. Default is no timeout.
recursive_search -- Search for other usernames in website pages & recursive search by them. is_parsing_enabled -- Search for other usernames in website pages.
Return Value: Return Value:
Dictionary containing results from report. Key of dictionary is the name Dictionary containing results from report. Key of dictionary is the name
@@ -364,7 +364,7 @@ async def maigret(username, site_dict, query_notify, logger,
# Record URL of main site and username # Record URL of main site and username
results_site['username'] = username results_site['username'] = username
results_site['parsing_enabled'] = recursive_search results_site['parsing_enabled'] = is_parsing_enabled
results_site['url_main'] = site.url_main results_site['url_main'] = site.url_main
results_site['cookies'] = cookie_jar and cookie_jar.filter_cookies(site.url_main) or None results_site['cookies'] = cookie_jar and cookie_jar.filter_cookies(site.url_main) or None
+7 -2
View File
@@ -106,7 +106,11 @@ async def main():
) )
parser.add_argument("--no-recursion", parser.add_argument("--no-recursion",
action="store_true", dest="disable_recursive_search", default=False, action="store_true", dest="disable_recursive_search", default=False,
help="Disable parsing pages for other usernames and recursive search by them." help="Disable recursive search by additional data extracted from pages."
)
parser.add_argument("--no-extracting",
action="store_true", dest="disable_extracting", default=False,
help="Disable parsing pages for additional data and other usernames."
) )
parser.add_argument("--self-check", parser.add_argument("--self-check",
action="store_true", default=False, action="store_true", default=False,
@@ -203,6 +207,7 @@ async def main():
and u not in args.ignore_ids_list and u not in args.ignore_ids_list
} }
parsing_enabled = not args.disable_extracting
recursive_search_enabled = not args.disable_recursive_search recursive_search_enabled = not args.disable_recursive_search
# Make prompts # Make prompts
@@ -324,7 +329,7 @@ async def main():
query_notify, query_notify,
proxy=args.proxy, proxy=args.proxy,
timeout=args.timeout, timeout=args.timeout,
recursive_search=recursive_search_enabled, is_parsing_enabled=parsing_enabled,
id_type=id_type, id_type=id_type,
debug=args.verbose, debug=args.verbose,
logger=logger, logger=logger,