Skip to content

Commit

Permalink
Merge pull request #154 from soxoj/tests-improving
Browse files Browse the repository at this point in the history
Improved tests
  • Loading branch information
soxoj authored May 17, 2021
2 parents 435db7c + 99fa58c commit 5b405c6
Show file tree
Hide file tree
Showing 10 changed files with 132 additions and 18 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest pytest-rerunfailures
python -m pip install -r test-requirements.txt
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Test with pytest
run: |
Expand Down
12 changes: 6 additions & 6 deletions maigret/checking.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from .result import QueryResult, QueryStatus
from .sites import MaigretDatabase, MaigretSite
from .types import QueryOptions, QueryResultWrapper
from .utils import get_random_user_agent
from .utils import get_random_user_agent, ascii_data_display


SUPPORTED_IDS = (
Expand Down Expand Up @@ -233,9 +233,9 @@ def build_result(status, **kwargs):
result = build_result(QueryStatus.CLAIMED)
else:
result = build_result(QueryStatus.AVAILABLE)
elif check_type == "status_code":
elif check_type in "status_code":
# Checks if the status code of the response is 2XX
if is_presense_detected and (not status_code >= 300 or status_code < 200):
if 200 <= status_code < 300:
result = build_result(QueryStatus.CLAIMED)
else:
result = build_result(QueryStatus.AVAILABLE)
Expand Down Expand Up @@ -272,7 +272,7 @@ def build_result(status, **kwargs):
new_usernames[v] = k

results_info["ids_usernames"] = new_usernames
links = eval(extracted_ids_data.get("links", "[]"))
links = ascii_data_display(extracted_ids_data.get("links", "[]"))
if "website" in extracted_ids_data:
links.append(extracted_ids_data["website"])
results_info["ids_links"] = links
Expand Down Expand Up @@ -456,7 +456,7 @@ async def maigret(
logger,
query_notify=None,
proxy=None,
timeout=None,
timeout=3,
is_parsing_enabled=False,
id_type="username",
debug=False,
Expand All @@ -478,7 +478,7 @@ async def maigret(
query results.
logger -- Standard Python logger object.
timeout -- Time in seconds to wait before timing out request.
Default is no timeout.
Default is 3 seconds.
is_parsing_enabled -- Extract additional info from account pages.
id_type -- Type of username to search.
Default is 'username', see all supported here:
Expand Down
12 changes: 7 additions & 5 deletions maigret/resources/data.json
Original file line number Diff line number Diff line change
Expand Up @@ -13035,7 +13035,7 @@
"us"
],
"headers": {
"authorization": "Bearer BQBeVMTwloR4yQEzyayWE7uYo1A4OHV3Oe3Uuv8nHCIJqj73fH6UOJoSfNbzqeSSfLXAFNABEUSHxTZmPe0"
"authorization": "Bearer BQBFMMVu1dPwJPlnzUteNyF8xlZy7545QnhHizEHWEUQGQrRLznY5k9B9v7JdAsL-wU-Tcep51JTqBesKKY"
},
"errors": {
"Spotify is currently not available in your country.": "Access denied in your country, use proxy/vpn"
Expand Down Expand Up @@ -14463,7 +14463,7 @@
"sec-ch-ua": "Google Chrome\";v=\"87\", \" Not;A Brand\";v=\"99\", \"Chromium\";v=\"87\"",
"authorization": "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"x-guest-token": "1393906084428107777"
"x-guest-token": "1394397954526560260"
},
"errors": {
"Bad guest token": "x-guest-token update required"
Expand Down Expand Up @@ -14870,7 +14870,7 @@
"video"
],
"headers": {
"Authorization": "jwt eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MjExNjkwNDAsInVzZXJfaWQiOm51bGwsImFwcF9pZCI6NTg0NzksInNjb3BlcyI6InB1YmxpYyIsInRlYW1fdXNlcl9pZCI6bnVsbH0.uANToRPWBXHTZwnk-qucbJf-7ObHhCTwu87uJbEOj-I"
"Authorization": "jwt eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MjEyODYyODAsInVzZXJfaWQiOm51bGwsImFwcF9pZCI6NTg0NzksInNjb3BlcyI6InB1YmxpYyIsInRlYW1fdXNlcl9pZCI6bnVsbH0.mxLdaOuP260WcxBvhadTTUQyn8t75pWNhTmtZLFS-W4"
},
"activation": {
"url": "https://vimeo.com/_rv/viewer",
Expand Down Expand Up @@ -15806,7 +15806,8 @@
"url": "https://yandex.ru/bugbounty/researchers/{username}/",
"source": "Yandex",
"usernameClaimed": "pyrk1",
"usernameUnclaimed": "noonewouldeverusethis7"
"usernameUnclaimed": "noonewouldeverusethis7",
"disabled": true
},
"YandexCollections API": {
"tags": [
Expand Down Expand Up @@ -16274,7 +16275,8 @@
},
"author.today": {
"tags": [
"ru"
"ru",
"reading"
],
"checkType": "status_code",
"alexaRank": 12218,
Expand Down
9 changes: 8 additions & 1 deletion maigret/utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import ast
import re
import random
from typing import Any


DEFAULT_USER_AGENTS = [
Expand Down Expand Up @@ -65,6 +67,10 @@ def make_profile_url_regexp(self, url: str, username_regexp: str = ""):
return re.compile(regexp_str)


def ascii_data_display(data: str) -> Any:
return ast.literal_eval(data)


def get_dict_ascii_tree(items, prepend="", new_line=True):
text = ""
for num, item in enumerate(items):
Expand All @@ -75,7 +81,8 @@ def get_dict_ascii_tree(items, prepend="", new_line=True):
if field_value.startswith("['"):
is_last_item = num == len(items) - 1
prepend_symbols = " " * 3 if is_last_item else " ┃ "
field_value = get_dict_ascii_tree(eval(field_value), prepend_symbols)
data = ascii_data_display(field_value)
field_value = get_dict_ascii_tree(data, prepend_symbols)
text += f"\n{prepend}{box_symbol}{field_name}: {field_value}"
else:
text += f"\n{prepend}{box_symbol} {item}"
Expand Down
6 changes: 6 additions & 0 deletions test-requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
flake8==3.8.4
pytest==6.2.4
pytest-asyncio==0.14.0
pytest-cov==2.10.1
pytest-httpserver==1.0.0
pytest-rerunfailures==9.1.1
17 changes: 12 additions & 5 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
CUR_PATH = os.path.dirname(os.path.realpath(__file__))
JSON_FILE = os.path.join(CUR_PATH, '../maigret/resources/data.json')
TEST_JSON_FILE = os.path.join(CUR_PATH, 'db.json')
LOCAL_TEST_JSON_FILE = os.path.join(CUR_PATH, 'local.json')
empty_mark = Mark('', (), {})


Expand All @@ -36,16 +37,17 @@ def remove_test_reports():

@pytest.fixture(scope='session')
def default_db():
db = MaigretDatabase().load_from_file(JSON_FILE)

return db
return MaigretDatabase().load_from_file(JSON_FILE)


@pytest.fixture(scope='function')
def test_db():
db = MaigretDatabase().load_from_file(TEST_JSON_FILE)
return MaigretDatabase().load_from_file(TEST_JSON_FILE)


return db
@pytest.fixture(scope='function')
def local_test_db():
return MaigretDatabase().load_from_file(LOCAL_TEST_JSON_FILE)


@pytest.fixture(autouse=True)
Expand All @@ -58,3 +60,8 @@ def reports_autoclean():
@pytest.fixture(scope='session')
def argparser():
return setup_arguments_parser()


@pytest.fixture(scope="session")
def httpserver_listen_address():
return ("localhost", 8989)
21 changes: 21 additions & 0 deletions tests/local.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
{
"engines": {},
"sites": {
"StatusCode": {
"checkType": "status_code",
"url": "http://localhost:8989/url?id={username}",
"urlMain": "http://localhost:8989/",
"usernameClaimed": "claimed",
"usernameUnclaimed": "unclaimed"
},
"Message": {
"checkType": "message",
"url": "http://localhost:8989/url?id={username}",
"urlMain": "http://localhost:8989/",
"presenseStrs": ["user", "profile"],
"absenseStrs": ["not found", "404"],
"usernameClaimed": "claimed",
"usernameUnclaimed": "unclaimed"
}
}
}
1 change: 1 addition & 0 deletions tests/test_activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
"""


@pytest.mark.skip(reason="periodically fails")
@pytest.mark.slow
def test_twitter_activation(default_db):
twitter_site = default_db.sites_dict['Twitter']
Expand Down
65 changes: 65 additions & 0 deletions tests/test_checking.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
from mock import Mock
import pytest

from maigret import search


def site_result_except(server, username, **kwargs):
query = f'id={username}'
server.expect_request('/url', query_string=query).respond_with_data(**kwargs)


@pytest.mark.asyncio
async def test_checking_by_status_code(httpserver, local_test_db):
sites_dict = local_test_db.sites_dict

site_result_except(httpserver, 'claimed', status=200)
site_result_except(httpserver, 'unclaimed', status=404)

result = await search('claimed', site_dict=sites_dict, logger=Mock())
assert result['StatusCode']['status'].is_found() is True

result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
assert result['StatusCode']['status'].is_found() is False


@pytest.mark.asyncio
async def test_checking_by_message_positive_full(httpserver, local_test_db):
sites_dict = local_test_db.sites_dict

site_result_except(httpserver, 'claimed', response_data="user profile")
site_result_except(httpserver, 'unclaimed', response_data="404 not found")

result = await search('claimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is True

result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is False


@pytest.mark.asyncio
async def test_checking_by_message_positive_part(httpserver, local_test_db):
sites_dict = local_test_db.sites_dict

site_result_except(httpserver, 'claimed', response_data="profile")
site_result_except(httpserver, 'unclaimed', response_data="404")

result = await search('claimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is True

result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is False


@pytest.mark.asyncio
async def test_checking_by_message_negative(httpserver, local_test_db):
sites_dict = local_test_db.sites_dict

site_result_except(httpserver, 'claimed', response_data="")
site_result_except(httpserver, 'unclaimed', response_data="user 404")

result = await search('claimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is False

result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
assert result['Message']['status'].is_found() is True
5 changes: 5 additions & 0 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,11 @@ def test_enrich_link_str():
)


def test_url_extract_main_part_negative():
url_main_part = 'None'
assert URLMatcher.extract_main_part(url_main_part) == ''


def test_url_extract_main_part():
url_main_part = 'flickr.com/photos/alexaimephotography'

Expand Down

0 comments on commit 5b405c6

Please sign in to comment.