Fixed json report generation bug, bump to 0.2.1

This commit is contained in:
Soxoj
2021-05-02 20:06:15 +03:00
parent ec0d3a1f70
commit 188edc1b7f
8 changed files with 172 additions and 92 deletions
+3
View File
@@ -2,6 +2,9 @@
## [Unreleased]
## [0.2.1] - 2021-05-02
* fixed json reports generation bug, added tests
## [0.2.0] - 2021-05-02
* added `--retries` option
* added `source` feature for sites' mirrors
+1 -1
View File
@@ -36,7 +36,7 @@ from .sites import MaigretDatabase
from .submit import submit_dialog
from .utils import get_dict_ascii_tree
__version__ = '0.2.0'
__version__ = '0.2.1'
def notify_about_errors(search_results, query_notify):
+3
View File
@@ -269,6 +269,9 @@ def generate_json_report(username: str, results: dict, file, report_type):
data = dict(site_result)
data["status"] = data["status"].json()
data["site"] = data["site"].json
if "future" in data:
del data["future"]
if is_report_per_line:
data["sitename"] = sitename
+1 -1
View File
@@ -12,7 +12,7 @@ with open('requirements.txt') as rf:
requires = rf.read().splitlines()
setup(name='maigret',
version='0.2.0',
version='0.2.1',
description='Collect a dossier on a person by username from a huge number of sites',
long_description=long_description,
long_description_content_type="text/markdown",
+8
View File
@@ -9,6 +9,7 @@ from maigret.sites import MaigretDatabase
CUR_PATH = os.path.dirname(os.path.realpath(__file__))
JSON_FILE = os.path.join(CUR_PATH, '../maigret/resources/data.json')
TEST_JSON_FILE = os.path.join(CUR_PATH, 'db.json')
empty_mark = Mark('', [], {})
@@ -38,6 +39,13 @@ def default_db():
return db
@pytest.fixture(scope='session')
def test_db():
db = MaigretDatabase().load_from_file(TEST_JSON_FILE)
return db
@pytest.fixture(autouse=True)
def reports_autoclean():
remove_test_reports()
+26
View File
@@ -0,0 +1,26 @@
{
"engines": {},
"sites": {
"GooglePlayStore": {
"tags": ["global", "us"],
"disabled": false,
"checkType": "status_code",
"alexaRank": 1,
"url": "https://play.google.com/store/apps/developer?id={username}",
"urlMain": "https://play.google.com/store",
"usernameClaimed": "Facebook_nosuchname",
"usernameUnclaimed": "noonewouldeverusethis7"
},
"Reddit": {
"tags": ["news", "social", "us"],
"checkType": "status_code",
"presenseStrs": ["totalKarma"],
"disabled": true,
"alexaRank": 17,
"url": "https://www.reddit.com/user/{username}",
"urlMain": "https://www.reddit.com/",
"usernameClaimed": "blue",
"usernameUnclaimed": "noonewouldeverusethis7"
}
}
}
+119 -82
View File
@@ -4,93 +4,130 @@ import asyncio
import pytest
from mock import Mock
from maigret.maigret import self_check
from maigret.sites import MaigretDatabase
from maigret.maigret import self_check, maigret
from maigret.sites import MaigretSite
from maigret.result import QueryResult, QueryStatus
EXAMPLE_DB = {
'engines': {},
'sites': {
"GooglePlayStore": {
"tags": ["global", "us"],
"disabled": False,
"checkType": "status_code",
"alexaRank": 1,
"url": "https://play.google.com/store/apps/developer?id={username}",
"urlMain": "https://play.google.com/store",
"usernameClaimed": "Facebook_nosuchname",
"usernameUnclaimed": "noonewouldeverusethis7",
},
"Reddit": {
"tags": ["news", "social", "us"],
"checkType": "status_code",
"presenseStrs": ["totalKarma"],
"disabled": True,
"alexaRank": 17,
"url": "https://www.reddit.com/user/{username}",
"urlMain": "https://www.reddit.com/",
"usernameClaimed": "blue",
"usernameUnclaimed": "noonewouldeverusethis7",
},
},
@pytest.mark.slow
def test_self_check_db_positive_disable(test_db):
logger = Mock()
assert test_db.sites[0].disabled is False
loop = asyncio.get_event_loop()
loop.run_until_complete(
self_check(test_db, test_db.sites_dict, logger, silent=True)
)
assert test_db.sites[0].disabled is True
@pytest.mark.slow
def test_self_check_db_positive_enable(test_db):
logger = Mock()
test_db.sites[0].disabled = True
test_db.sites[0].username_claimed = 'Facebook'
assert test_db.sites[0].disabled is True
loop = asyncio.get_event_loop()
loop.run_until_complete(
self_check(test_db, test_db.sites_dict, logger, silent=True)
)
assert test_db.sites[0].disabled is False
@pytest.mark.slow
def test_self_check_db_negative_disabled(test_db):
logger = Mock()
test_db.sites[0].disabled = True
assert test_db.sites[0].disabled is True
loop = asyncio.get_event_loop()
loop.run_until_complete(
self_check(test_db, test_db.sites_dict, logger, silent=True)
)
assert test_db.sites[0].disabled is True
@pytest.mark.slow
def test_self_check_db_negative_enabled(test_db):
logger = Mock()
test_db.sites[0].disabled = False
test_db.sites[0].username_claimed = 'Facebook'
assert test_db.sites[0].disabled is False
loop = asyncio.get_event_loop()
loop.run_until_complete(
self_check(test_db, test_db.sites_dict, logger, silent=True)
)
assert test_db.sites[0].disabled is False
@pytest.mark.slow
def test_maigret_results(test_db):
logger = Mock()
username = 'Facebook'
loop = asyncio.get_event_loop()
results = loop.run_until_complete(
maigret(username, site_dict=test_db.sites_dict, logger=logger, timeout=30)
)
assert isinstance(results, dict)
reddit_site = results['Reddit']['site']
assert isinstance(reddit_site, MaigretSite)
assert reddit_site.json == {
'tags': ['news', 'social', 'us'],
'checkType': 'status_code',
'presenseStrs': ['totalKarma'],
'disabled': True,
'alexaRank': 17,
'url': 'https://www.reddit.com/user/{username}',
'urlMain': 'https://www.reddit.com/',
'usernameClaimed': 'blue',
'usernameUnclaimed': 'noonewouldeverusethis7',
}
del results['Reddit']['site']
del results['GooglePlayStore']['site']
@pytest.mark.slow
def test_self_check_db_positive_disable():
logger = Mock()
db = MaigretDatabase()
db.load_from_json(EXAMPLE_DB)
reddit_status = results['Reddit']['status']
assert isinstance(reddit_status, QueryResult)
assert reddit_status.status == QueryStatus.ILLEGAL
assert db.sites[0].disabled == False
playstore_status = results['GooglePlayStore']['status']
assert isinstance(playstore_status, QueryResult)
assert playstore_status.status == QueryStatus.CLAIMED
loop = asyncio.get_event_loop()
loop.run_until_complete(self_check(db, db.sites_dict, logger, silent=True))
del results['Reddit']['status']
del results['GooglePlayStore']['status']
assert db.sites[0].disabled == True
assert results['Reddit'].get('future') is None
del results['GooglePlayStore']['future']
@pytest.mark.slow
def test_self_check_db_positive_enable():
logger = Mock()
db = MaigretDatabase()
db.load_from_json(EXAMPLE_DB)
db.sites[0].disabled = True
db.sites[0].username_claimed = 'Facebook'
assert db.sites[0].disabled == True
loop = asyncio.get_event_loop()
loop.run_until_complete(self_check(db, db.sites_dict, logger, silent=True))
assert db.sites[0].disabled == False
@pytest.mark.slow
def test_self_check_db_negative_disabled():
logger = Mock()
db = MaigretDatabase()
db.load_from_json(EXAMPLE_DB)
db.sites[0].disabled = True
assert db.sites[0].disabled == True
loop = asyncio.get_event_loop()
loop.run_until_complete(self_check(db, db.sites_dict, logger, silent=True))
assert db.sites[0].disabled == True
@pytest.mark.slow
def test_self_check_db_negative_enabled():
logger = Mock()
db = MaigretDatabase()
db.load_from_json(EXAMPLE_DB)
db.sites[0].disabled = False
db.sites[0].username_claimed = 'Facebook'
assert db.sites[0].disabled == False
loop = asyncio.get_event_loop()
loop.run_until_complete(self_check(db, db.sites_dict, logger, silent=True))
assert db.sites[0].disabled == False
assert results == {
'Reddit': {
'cookies': None,
'parsing_enabled': False,
'url_main': 'https://www.reddit.com/',
'username': 'Facebook',
},
'GooglePlayStore': {
'cookies': None,
'http_status': 200,
'is_similar': False,
'parsing_enabled': False,
'rank': 1,
'url_main': 'https://play.google.com/store',
'url_user': 'https://play.google.com/store/apps/developer?id=Facebook',
'username': 'Facebook',
},
}
+6 -3
View File
@@ -18,6 +18,11 @@ from maigret.report import (
generate_json_report,
)
from maigret.result import QueryResult, QueryStatus
from maigret.sites import MaigretSite
GOOD_RESULT = QueryResult('', '', '', QueryStatus.CLAIMED)
BAD_RESULT = QueryResult('', '', '', QueryStatus.AVAILABLE)
EXAMPLE_RESULTS = {
'GitHub': {
@@ -35,12 +40,10 @@ EXAMPLE_RESULTS = {
'http_status': 200,
'is_similar': False,
'rank': 78,
'site': MaigretSite('test', {}),
}
}
GOOD_RESULT = QueryResult('', '', '', QueryStatus.CLAIMED)
BAD_RESULT = QueryResult('', '', '', QueryStatus.AVAILABLE)
GOOD_500PX_RESULT = copy.deepcopy(GOOD_RESULT)
GOOD_500PX_RESULT.tags = ['photo', 'us', 'global']
GOOD_500PX_RESULT.ids_data = {