Improved tests

This commit is contained in:
Soxoj
2021-05-18 00:43:56 +03:00
parent 435db7cdc9
commit 2c04ccce57
7 changed files with 124 additions and 17 deletions
+6 -6
View File
@@ -26,7 +26,7 @@ from .executors import (
from .result import QueryResult, QueryStatus from .result import QueryResult, QueryStatus
from .sites import MaigretDatabase, MaigretSite from .sites import MaigretDatabase, MaigretSite
from .types import QueryOptions, QueryResultWrapper from .types import QueryOptions, QueryResultWrapper
from .utils import get_random_user_agent from .utils import get_random_user_agent, ascii_data_display
SUPPORTED_IDS = ( SUPPORTED_IDS = (
@@ -233,9 +233,9 @@ def process_site_result(
result = build_result(QueryStatus.CLAIMED) result = build_result(QueryStatus.CLAIMED)
else: else:
result = build_result(QueryStatus.AVAILABLE) result = build_result(QueryStatus.AVAILABLE)
elif check_type == "status_code": elif check_type in "status_code":
# Checks if the status code of the response is 2XX # Checks if the status code of the response is 2XX
if is_presense_detected and (not status_code >= 300 or status_code < 200): if 200 <= status_code < 300:
result = build_result(QueryStatus.CLAIMED) result = build_result(QueryStatus.CLAIMED)
else: else:
result = build_result(QueryStatus.AVAILABLE) result = build_result(QueryStatus.AVAILABLE)
@@ -272,7 +272,7 @@ def process_site_result(
new_usernames[v] = k new_usernames[v] = k
results_info["ids_usernames"] = new_usernames results_info["ids_usernames"] = new_usernames
links = eval(extracted_ids_data.get("links", "[]")) links = ascii_data_display(extracted_ids_data.get("links", "[]"))
if "website" in extracted_ids_data: if "website" in extracted_ids_data:
links.append(extracted_ids_data["website"]) links.append(extracted_ids_data["website"])
results_info["ids_links"] = links results_info["ids_links"] = links
@@ -456,7 +456,7 @@ async def maigret(
logger, logger,
query_notify=None, query_notify=None,
proxy=None, proxy=None,
timeout=None, timeout=3,
is_parsing_enabled=False, is_parsing_enabled=False,
id_type="username", id_type="username",
debug=False, debug=False,
@@ -478,7 +478,7 @@ async def maigret(
query results. query results.
logger -- Standard Python logger object. logger -- Standard Python logger object.
timeout -- Time in seconds to wait before timing out request. timeout -- Time in seconds to wait before timing out request.
Default is no timeout. Default is 3 seconds.
is_parsing_enabled -- Extract additional info from account pages. is_parsing_enabled -- Extract additional info from account pages.
id_type -- Type of username to search. id_type -- Type of username to search.
Default is 'username', see all supported here: Default is 'username', see all supported here:
+7 -5
View File
@@ -13035,7 +13035,7 @@
"us" "us"
], ],
"headers": { "headers": {
"authorization": "Bearer BQBeVMTwloR4yQEzyayWE7uYo1A4OHV3Oe3Uuv8nHCIJqj73fH6UOJoSfNbzqeSSfLXAFNABEUSHxTZmPe0" "authorization": "Bearer BQBFMMVu1dPwJPlnzUteNyF8xlZy7545QnhHizEHWEUQGQrRLznY5k9B9v7JdAsL-wU-Tcep51JTqBesKKY"
}, },
"errors": { "errors": {
"Spotify is currently not available in your country.": "Access denied in your country, use proxy/vpn" "Spotify is currently not available in your country.": "Access denied in your country, use proxy/vpn"
@@ -14463,7 +14463,7 @@
"sec-ch-ua": "Google Chrome\";v=\"87\", \" Not;A Brand\";v=\"99\", \"Chromium\";v=\"87\"", "sec-ch-ua": "Google Chrome\";v=\"87\", \" Not;A Brand\";v=\"99\", \"Chromium\";v=\"87\"",
"authorization": "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA", "authorization": "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36", "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"x-guest-token": "1393906084428107777" "x-guest-token": "1394397954526560260"
}, },
"errors": { "errors": {
"Bad guest token": "x-guest-token update required" "Bad guest token": "x-guest-token update required"
@@ -14870,7 +14870,7 @@
"video" "video"
], ],
"headers": { "headers": {
"Authorization": "jwt eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MjExNjkwNDAsInVzZXJfaWQiOm51bGwsImFwcF9pZCI6NTg0NzksInNjb3BlcyI6InB1YmxpYyIsInRlYW1fdXNlcl9pZCI6bnVsbH0.uANToRPWBXHTZwnk-qucbJf-7ObHhCTwu87uJbEOj-I" "Authorization": "jwt eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MjEyODYyODAsInVzZXJfaWQiOm51bGwsImFwcF9pZCI6NTg0NzksInNjb3BlcyI6InB1YmxpYyIsInRlYW1fdXNlcl9pZCI6bnVsbH0.mxLdaOuP260WcxBvhadTTUQyn8t75pWNhTmtZLFS-W4"
}, },
"activation": { "activation": {
"url": "https://vimeo.com/_rv/viewer", "url": "https://vimeo.com/_rv/viewer",
@@ -15806,7 +15806,8 @@
"url": "https://yandex.ru/bugbounty/researchers/{username}/", "url": "https://yandex.ru/bugbounty/researchers/{username}/",
"source": "Yandex", "source": "Yandex",
"usernameClaimed": "pyrk1", "usernameClaimed": "pyrk1",
"usernameUnclaimed": "noonewouldeverusethis7" "usernameUnclaimed": "noonewouldeverusethis7",
"disabled": true
}, },
"YandexCollections API": { "YandexCollections API": {
"tags": [ "tags": [
@@ -16274,7 +16275,8 @@
}, },
"author.today": { "author.today": {
"tags": [ "tags": [
"ru" "ru",
"reading"
], ],
"checkType": "status_code", "checkType": "status_code",
"alexaRank": 12218, "alexaRank": 12218,
+8 -1
View File
@@ -1,5 +1,7 @@
import ast
import re import re
import random import random
from typing import Any
DEFAULT_USER_AGENTS = [ DEFAULT_USER_AGENTS = [
@@ -65,6 +67,10 @@ class URLMatcher:
return re.compile(regexp_str) return re.compile(regexp_str)
def ascii_data_display(data: str) -> Any:
return ast.literal_eval(data)
def get_dict_ascii_tree(items, prepend="", new_line=True): def get_dict_ascii_tree(items, prepend="", new_line=True):
text = "" text = ""
for num, item in enumerate(items): for num, item in enumerate(items):
@@ -75,7 +81,8 @@ def get_dict_ascii_tree(items, prepend="", new_line=True):
if field_value.startswith("['"): if field_value.startswith("['"):
is_last_item = num == len(items) - 1 is_last_item = num == len(items) - 1
prepend_symbols = " " * 3 if is_last_item else "" prepend_symbols = " " * 3 if is_last_item else ""
field_value = get_dict_ascii_tree(eval(field_value), prepend_symbols) data = ascii_data_display(field_value)
field_value = get_dict_ascii_tree(data, prepend_symbols)
text += f"\n{prepend}{box_symbol}{field_name}: {field_value}" text += f"\n{prepend}{box_symbol}{field_name}: {field_value}"
else: else:
text += f"\n{prepend}{box_symbol} {item}" text += f"\n{prepend}{box_symbol} {item}"
+12 -5
View File
@@ -12,6 +12,7 @@ from maigret.maigret import setup_arguments_parser
CUR_PATH = os.path.dirname(os.path.realpath(__file__)) CUR_PATH = os.path.dirname(os.path.realpath(__file__))
JSON_FILE = os.path.join(CUR_PATH, '../maigret/resources/data.json') JSON_FILE = os.path.join(CUR_PATH, '../maigret/resources/data.json')
TEST_JSON_FILE = os.path.join(CUR_PATH, 'db.json') TEST_JSON_FILE = os.path.join(CUR_PATH, 'db.json')
LOCAL_TEST_JSON_FILE = os.path.join(CUR_PATH, 'local.json')
empty_mark = Mark('', (), {}) empty_mark = Mark('', (), {})
@@ -36,16 +37,17 @@ def remove_test_reports():
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def default_db(): def default_db():
db = MaigretDatabase().load_from_file(JSON_FILE) return MaigretDatabase().load_from_file(JSON_FILE)
return db
@pytest.fixture(scope='function') @pytest.fixture(scope='function')
def test_db(): def test_db():
db = MaigretDatabase().load_from_file(TEST_JSON_FILE) return MaigretDatabase().load_from_file(TEST_JSON_FILE)
return db
@pytest.fixture(scope='function')
def local_test_db():
return MaigretDatabase().load_from_file(LOCAL_TEST_JSON_FILE)
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@@ -58,3 +60,8 @@ def reports_autoclean():
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def argparser(): def argparser():
return setup_arguments_parser() return setup_arguments_parser()
@pytest.fixture(scope="session")
def httpserver_listen_address():
return ("localhost", 8989)
+21
View File
@@ -0,0 +1,21 @@
{
"engines": {},
"sites": {
"StatusCode": {
"checkType": "status_code",
"url": "http://localhost:8989/url?id={username}",
"urlMain": "http://localhost:8989/",
"usernameClaimed": "claimed",
"usernameUnclaimed": "unclaimed"
},
"Message": {
"checkType": "message",
"url": "http://localhost:8989/url?id={username}",
"urlMain": "http://localhost:8989/",
"presenseStrs": ["user", "profile"],
"absenseStrs": ["not found", "404"],
"usernameClaimed": "claimed",
"usernameUnclaimed": "unclaimed"
}
}
}
+65
View File
@@ -0,0 +1,65 @@
from mock import Mock
import pytest
from maigret import search
def site_result_except(server, username, **kwargs):
query = f'id={username}'
server.expect_request('/url', query_string=query).respond_with_data(**kwargs)
@pytest.mark.asyncio
async def test_checking_by_status_code(httpserver, local_test_db):
sites_dict = local_test_db.sites_dict
site_result_except(httpserver, 'claimed', status=200)
site_result_except(httpserver, 'unclaimed', status=404)
result = await search('claimed', site_dict=sites_dict, logger=Mock())
assert result['StatusCode']['status'].is_found() is True
result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
assert result['StatusCode']['status'].is_found() is False
@pytest.mark.asyncio
async def test_checking_by_message_positive_full(httpserver, local_test_db):
sites_dict = local_test_db.sites_dict
site_result_except(httpserver, 'claimed', response_data="user profile")
site_result_except(httpserver, 'unclaimed', response_data="404 not found")
result = await search('claimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is True
result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is False
@pytest.mark.asyncio
async def test_checking_by_message_positive_part(httpserver, local_test_db):
sites_dict = local_test_db.sites_dict
site_result_except(httpserver, 'claimed', response_data="profile")
site_result_except(httpserver, 'unclaimed', response_data="404")
result = await search('claimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is True
result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is False
@pytest.mark.asyncio
async def test_checking_by_message_negative(httpserver, local_test_db):
sites_dict = local_test_db.sites_dict
site_result_except(httpserver, 'claimed', response_data="")
site_result_except(httpserver, 'unclaimed', response_data="user 404")
result = await search('claimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is False
result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is True
+5
View File
@@ -57,6 +57,11 @@ def test_enrich_link_str():
) )
def test_url_extract_main_part_negative():
url_main_part = 'None'
assert URLMatcher.extract_main_part(url_main_part) == ''
def test_url_extract_main_part(): def test_url_extract_main_part():
url_main_part = 'flickr.com/photos/alexaimephotography' url_main_part = 'flickr.com/photos/alexaimephotography'