mirror of
https://github.com/soxoj/maigret.git
synced 2026-05-07 06:24:35 +00:00
Compare commits
60 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3638c911a3 | |||
| 1258ee0898 | |||
| 79e93ab715 | |||
| 52c8917e2c | |||
| 846feb6e7e | |||
| c510734e5e | |||
| 03b62027f6 | |||
| f293bff417 | |||
| 341db55099 | |||
| a77a8b3e84 | |||
| 3ff05b240a | |||
| 05d1eb6fb0 | |||
| 6cf5604075 | |||
| ff0ffce427 | |||
| ac1e3d33ec | |||
| 8b5dce1d3c | |||
| f897598f98 | |||
| 606fba01b4 | |||
| 9dbefcef11 | |||
| 533884bad5 | |||
| 12c8721a16 | |||
| b79f8aca28 | |||
| 1a9fe77d6e | |||
| 1352bd35c6 | |||
| 3960510b63 | |||
| a7bda700b4 | |||
| e962b8c693 | |||
| c6cfef84ce | |||
| b0ed09eb3e | |||
| 4e3bd3ab58 | |||
| 77c11df119 | |||
| 25026e21ea | |||
| b1004588af | |||
| 4bd2f7cb35 | |||
| 5e1cc45c17 | |||
| d9b361b626 | |||
| bfc6601c96 | |||
| 53ff696707 | |||
| 0131f0b64c | |||
| a5e558c5e8 | |||
| e8393bfce3 | |||
| 519eeb4d21 | |||
| 98f03c153b | |||
| 1f823e8322 | |||
| d6905a8fd8 | |||
| 7d216638fa | |||
| fb71f26fd0 | |||
| 621b104523 | |||
| 37ce4fe728 | |||
| f74f82ee13 | |||
| 7e6d70a680 | |||
| e900d4a853 | |||
| 9ee4eb9b69 | |||
| 53f21eda98 | |||
| 1a0f36ffb6 | |||
| 14114e681c | |||
| bc0649e6a8 | |||
| 8267367bed | |||
| 28cb6c9ffb | |||
| 7a31328325 |
@@ -2,7 +2,7 @@ name: Build docker image and push to DockerHub
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [ main, dev ]
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
@@ -10,24 +10,62 @@ jobs:
|
||||
steps:
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v3
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
-
|
||||
name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
name: Extract metadata (CLI)
|
||||
id: meta_cli
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_HUB_USERNAME }}/maigret
|
||||
tags: |
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=ref,event=branch
|
||||
type=sha,prefix=
|
||||
-
|
||||
name: Extract metadata (Web UI)
|
||||
id: meta_web
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_HUB_USERNAME }}/maigret
|
||||
tags: |
|
||||
type=raw,value=web,enable={{is_default_branch}}
|
||||
type=ref,event=branch,suffix=-web
|
||||
type=sha,prefix=web-
|
||||
-
|
||||
name: Build and push (CLI, default)
|
||||
id: docker_build_cli
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/maigret:latest
|
||||
target: cli
|
||||
tags: ${{ steps.meta_cli.outputs.tags }}
|
||||
labels: ${{ steps.meta_cli.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
-
|
||||
name: Image digest
|
||||
run: echo ${{ steps.docker_build.outputs.digest }}
|
||||
name: Build and push (Web UI)
|
||||
id: docker_build_web
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
target: web
|
||||
tags: ${{ steps.meta_web.outputs.tags }}
|
||||
labels: ${{ steps.meta_web.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
-
|
||||
name: Image digests
|
||||
run: |
|
||||
echo "cli: ${{ steps.docker_build_cli.outputs.digest }}"
|
||||
echo "web: ${{ steps.docker_build_web.outputs.digest }}"
|
||||
|
||||
@@ -2,41 +2,48 @@ name: Linting and testing
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y libcairo2-dev
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install poetry
|
||||
python -m poetry install --with dev
|
||||
- name: Test with Coverage and Pytest (Fail if coverage is low)
|
||||
run: |
|
||||
poetry run coverage run --source=./maigret -m pytest --reruns 3 --reruns-delay 5 tests
|
||||
poetry run coverage report --fail-under=60
|
||||
poetry run coverage html
|
||||
- name: Upload coverage report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: htmlcov-${{ strategy.job-index }}
|
||||
path: htmlcov
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libcairo2-dev
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install poetry
|
||||
python -m poetry install --with dev
|
||||
|
||||
- name: Test with Coverage and Pytest (fail if coverage is low)
|
||||
run: |
|
||||
poetry run coverage run --source=./maigret -m pytest --reruns 3 --reruns-delay 5 tests
|
||||
poetry run coverage report --fail-under=60
|
||||
poetry run coverage html
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: htmlcov-${{ strategy.job-index }}
|
||||
path: htmlcov
|
||||
@@ -44,3 +44,4 @@ settings.json
|
||||
*.egg-info
|
||||
build
|
||||
LLM
|
||||
lib
|
||||
|
||||
+191
@@ -1,5 +1,196 @@
|
||||
# Changelog
|
||||
|
||||
## [0.6.0] - 2025-04-10
|
||||
|
||||
## What's Changed
|
||||
* Updated workflows: added 3.13 to test, updated pypi upload by @soxoj in https://github.com/soxoj/maigret/pull/2111
|
||||
* Bump pypdf from 5.1.0 to 6.0.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2122
|
||||
* Bump coverage from 7.9.2 to 7.10.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2117
|
||||
* Bump soupsieve from 2.6 to 2.7 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2118
|
||||
* Bump mock from 5.1.0 to 5.2.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2116
|
||||
* Bump pytest-asyncio from 1.0.0 to 1.1.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2114
|
||||
* Bump pytest-cov from 6.0.0 to 6.2.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2115
|
||||
* Bump xhtml2pdf from 0.2.16 to 0.2.17 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2149
|
||||
* Bump requests from 2.32.4 to 2.32.5 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2165
|
||||
* Bump lxml from 5.3.0 to 6.0.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2146
|
||||
* Bump aiodns from 3.2.0 to 3.5.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2148
|
||||
* Bump alive-progress from 3.2.0 to 3.3.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2145
|
||||
* Bump certifi from 2025.6.15 to 2025.8.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2147
|
||||
* Disabled some sites giving false positive results by @soxoj in https://github.com/soxoj/maigret/pull/2170
|
||||
* Bump flask from 3.1.1 to 3.1.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2175
|
||||
* Bump pyinstaller from 6.11.1 to 6.15.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2174
|
||||
* Bump mypy from 1.14.1 to 1.17.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2173
|
||||
* Bump pytest from 8.3.4 to 8.4.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2172
|
||||
* Bump flake8 from 7.1.1 to 7.3.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2171
|
||||
* Bump aiohttp from 3.12.14 to 3.12.15 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2181
|
||||
* Bump coverage from 7.10.3 to 7.10.5 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2180
|
||||
* Bump psutil from 6.1.1 to 7.0.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2179
|
||||
* Bump lxml from 6.0.0 to 6.0.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2178
|
||||
* Bump multidict from 6.6.3 to 6.6.4 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2177
|
||||
* Bump soupsieve from 2.7 to 2.8 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2185
|
||||
* Bump typing-extensions from 4.14.1 to 4.15.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2182
|
||||
* Bump python-bidi from 0.6.3 to 0.6.6 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2183
|
||||
* Bump platformdirs from 4.3.8 to 4.4.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2184
|
||||
* Make web interface accessible for Docker deployment by default by @soxoj in https://github.com/soxoj/maigret/pull/2189
|
||||
* Bump coverage from 7.10.5 to 7.10.6 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2192
|
||||
* Bump pytest-rerunfailures from 15.1 to 16.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2191
|
||||
* Bump pytest-rerunfailures from 15.1 to 16.0.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2193
|
||||
* Bump pytest from 8.4.1 to 8.4.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2194
|
||||
* Bump pytest-cov from 6.2.1 to 6.3.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2195
|
||||
* Bump pytest-cov from 6.3.0 to 7.0.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2196
|
||||
* Bump mypy from 1.17.1 to 1.18.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2197
|
||||
* Bump black from 25.1.0 to 25.9.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2203
|
||||
* Bump mypy from 1.18.1 to 1.18.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2202
|
||||
* Bump pytest-asyncio from 1.1.0 to 1.2.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2200
|
||||
* Bump pyinstaller from 6.15.0 to 6.16.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2199
|
||||
* Bump reportlab from 4.4.3 to 4.4.4 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2206
|
||||
* Bump coverage from 7.10.6 to 7.10.7 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2207
|
||||
* Bump psutil from 7.0.0 to 7.1.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2201
|
||||
* Bump asgiref from 3.9.1 to 3.9.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2204
|
||||
* Bump lxml from 6.0.1 to 6.0.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2208
|
||||
* Bump platformdirs from 4.4.0 to 4.5.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2223
|
||||
* Bump asgiref from 3.9.2 to 3.10.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2220
|
||||
* Bump yarl from 1.20.1 to 1.22.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2221
|
||||
* Bump markupsafe from 3.0.2 to 3.0.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2209
|
||||
* Bump multidict from 6.6.4 to 6.7.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2224
|
||||
* Bump idna from 3.10 to 3.11 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2227
|
||||
* Bump aiohttp from 3.12.15 to 3.13.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2225
|
||||
* Bump coverage from 7.10.7 to 7.11.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2230
|
||||
* Bump certifi from 2025.8.3 to 2025.10.5 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2228
|
||||
* Bump pytest-rerunfailures from 16.0.1 to 16.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2229
|
||||
* Bump attrs from 25.3.0 to 25.4.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2226
|
||||
* Bump aiohttp from 3.13.0 to 3.13.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2237
|
||||
* Bump pypdf from 6.0.0 to 6.1.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2233
|
||||
* Bump black from 25.9.0 to 25.11.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2239
|
||||
* Bump python-bidi from 0.6.6 to 0.6.7 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2234
|
||||
* Bump psutil from 7.1.0 to 7.1.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2240
|
||||
* Bump coverage from 7.11.0 to 7.12.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2241
|
||||
* Bump werkzeug from 3.1.3 to 3.1.4 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2248
|
||||
* Bump pypdf from 6.1.3 to 6.4.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2245
|
||||
* Bump asgiref from 3.10.0 to 3.11.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2243
|
||||
* Bump pytest-asyncio from 1.2.0 to 1.3.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2242
|
||||
* Bump aiohttp from 3.13.2 to 3.13.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2261
|
||||
* Bump pytest from 8.4.2 to 9.0.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2244
|
||||
* Bump mypy from 1.18.2 to 1.19.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2250
|
||||
* ♻️ Refactor: Hardcoded relative path for database file by @tang-vu in https://github.com/soxoj/maigret/pull/2285
|
||||
* ✨ Quality: Missing tests for settings cascade and override logic by @tang-vu in https://github.com/soxoj/maigret/pull/2287
|
||||
* ✨ Quality: Unexpanded tilde in file path by @tang-vu in https://github.com/soxoj/maigret/pull/2283
|
||||
* Bump urllib3 from 2.5.0 to 2.6.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2262
|
||||
* Bump pillow from 11.0.0 to 12.1.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2271
|
||||
* Bump black from 25.11.0 to 26.3.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2280
|
||||
* Bump cryptography from 44.0.1 to 46.0.5 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2270
|
||||
* Bump pypdf from 6.4.0 to 6.9.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2281
|
||||
* Dockerfile fix by @soxoj in https://github.com/soxoj/maigret/pull/2290
|
||||
* Fixed false positives in top-500 by @soxoj in https://github.com/soxoj/maigret/pull/2292
|
||||
* Update Telegram bot link in README by @soxoj in https://github.com/soxoj/maigret/pull/2293
|
||||
* Pyinstaller GitHub workflow fix by @soxoj in https://github.com/soxoj/maigret/pull/2298
|
||||
* Twitter fixed, mirrors mechanism improvement by @soxoj in https://github.com/soxoj/maigret/pull/2299
|
||||
* build(deps): bump flask from 3.1.2 to 3.1.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2289
|
||||
* Bump reportlab from 4.4.4 to 4.4.5 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2251
|
||||
* build(deps): bump werkzeug from 3.1.4 to 3.1.6 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2288
|
||||
* Bump certifi from 2025.10.5 to 2025.11.12 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2249
|
||||
* Update Telegram bot link in README by @soxoj in https://github.com/soxoj/maigret/pull/2300
|
||||
* Improve site-check quality by @soxoj in https://github.com/soxoj/maigret/pull/2301
|
||||
* feat(sites): fix false positives: disable 74 broken sites, fix 8 with… by @soxoj in https://github.com/soxoj/maigret/pull/2302
|
||||
* Update sites list workflow by @soxoj in https://github.com/soxoj/maigret/pull/2303
|
||||
* Bump svglib from 1.5.1 to 1.6.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2205
|
||||
* feat(workflow): fix update site data workflow dependency by @soxoj in https://github.com/soxoj/maigret/pull/2306
|
||||
* Re-enable taplink.cc with browser User-Agent to bypass Cloudflare by @Copilot in https://github.com/soxoj/maigret/pull/2308
|
||||
* feat(workflow): fix update site data workflow err by @soxoj in https://github.com/soxoj/maigret/pull/2312
|
||||
* Update site data workflow fix: remove ambiguous main tag by @soxoj in https://github.com/soxoj/maigret/pull/2313
|
||||
* Automated Sites List Update by @github-actions[bot] in https://github.com/soxoj/maigret/pull/2314
|
||||
* Fix Love.Mail.ru: update to numeric-only identifiers and new profile URL by @Copilot in https://github.com/soxoj/maigret/pull/2307
|
||||
* Remove dead site xxxforum.org by @Copilot in https://github.com/soxoj/maigret/pull/2310
|
||||
* Disable forums.developer.nvidia.com (auth-gated user profiles) by @Copilot in https://github.com/soxoj/maigret/pull/2305
|
||||
* Pin requests-toolbelt>=1.0.0 to fix urllib3 v2 incompatibility by @Copilot in https://github.com/soxoj/maigret/pull/2316
|
||||
* build(deps): bump reportlab from 4.4.5 to 4.4.10 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2323
|
||||
* build(deps-dev): bump coverage from 7.12.0 to 7.13.5 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2321
|
||||
* build(deps-dev): bump pytest-cov from 7.0.0 to 7.1.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2320
|
||||
* build(deps): bump aiohttp-socks from 0.10.1 to 0.11.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2319
|
||||
* Disable false-positive site probe: amateurvoyeurforum.com by @Copilot in https://github.com/soxoj/maigret/pull/2332
|
||||
* Disable forums.stevehoffman.tv due to false positives by @Copilot in https://github.com/soxoj/maigret/pull/2331
|
||||
* [WIP] Fix false-positive probe for vegalab site by @Copilot in https://github.com/soxoj/maigret/pull/2336
|
||||
* Fix RoyalCams site check using BongaCams white-label pattern by @Copilot in https://github.com/soxoj/maigret/pull/2334
|
||||
* Fix Setlist site check: switch to message checkType with proper markers by @Copilot in https://github.com/soxoj/maigret/pull/2333
|
||||
* [WIP] Fix invalid link on forums.imore.com by @Copilot in https://github.com/soxoj/maigret/pull/2337
|
||||
* Automated Sites List Update by @github-actions[bot] in https://github.com/soxoj/maigret/pull/2315
|
||||
* Automated Sites List Update by @github-actions[bot] in https://github.com/soxoj/maigret/pull/2339
|
||||
* Fix false-positive site probe: Re-enable Taplink with message checkType by @Copilot in https://github.com/soxoj/maigret/pull/2326
|
||||
* build(deps): bump aiodns from 3.5.0 to 4.0.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2345
|
||||
* build(deps-dev): bump mypy from 1.19.0 to 1.19.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2347
|
||||
* Disable Librusec site check (false positive) by @Copilot in https://github.com/soxoj/maigret/pull/2349
|
||||
* Disable MirTesen site check (false positive) by @Copilot in https://github.com/soxoj/maigret/pull/2350
|
||||
* build(deps): bump attrs from 25.4.0 to 26.1.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2344
|
||||
* Automated Sites List Update by @github-actions[bot] in https://github.com/soxoj/maigret/pull/2341
|
||||
* feat: add cybersecurity platforms + re-enable Root-Me by @juliosuas in https://github.com/soxoj/maigret/pull/2318
|
||||
* Fix club.cnews.ru false positive: switch from status_code to message checkType by @Copilot in https://github.com/soxoj/maigret/pull/2342
|
||||
* Fix SoundCloud false-positive: switch to message-based check by @Copilot in https://github.com/soxoj/maigret/pull/2355
|
||||
* build(deps): bump certifi from 2025.11.12 to 2026.2.25 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2346
|
||||
* feat: add tag blacklisting via `--exclude-tags` by @Copilot in https://github.com/soxoj/maigret/pull/2352
|
||||
* Fix domain substring matching and NoneType crash in submit dialog by @Copilot in https://github.com/soxoj/maigret/pull/2367
|
||||
* feat(core): add POST request support, new sites, migrate to Majestic Million ranking by @soxoj in https://github.com/soxoj/maigret/pull/2317
|
||||
* Fix update-site-data workflow race condition on branch push by @Copilot in https://github.com/soxoj/maigret/pull/2366
|
||||
* Fix false-positive site checks reported by Maigret Bot by @soxoj in https://github.com/soxoj/maigret/pull/2376
|
||||
* build(deps): bump pycountry from 24.6.1 to 26.2.16 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2382
|
||||
* Added Max.ru check; --no-progressbar flag fixed by @soxoj in https://github.com/soxoj/maigret/pull/2386
|
||||
* build(deps): bump asgiref from 3.11.0 to 3.11.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2384
|
||||
* build(deps): bump yarl from 1.22.0 to 1.23.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2383
|
||||
* build(deps): bump pypdf from 6.9.1 to 6.9.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2392
|
||||
* build(deps-dev): bump pytest-httpserver from 1.1.0 to 1.1.5 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2397
|
||||
* Automated Sites List Update by @github-actions[bot] in https://github.com/soxoj/maigret/pull/2399
|
||||
* build(deps): bump requests from 2.32.5 to 2.33.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2394
|
||||
* Readme update: commercial use by @soxoj in https://github.com/soxoj/maigret/pull/2403
|
||||
* build(deps): bump pyinstaller from 6.16.0 to 6.19.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2405
|
||||
* build(deps): bump psutil from 7.1.3 to 7.2.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2406
|
||||
* build(deps-dev): bump pytest from 9.0.1 to 9.0.2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2381
|
||||
* build(deps): bump soupsieve from 2.8 to 2.8.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2404
|
||||
* Sites re-check by @soxoj in https://github.com/soxoj/maigret/pull/2423
|
||||
* Add urlProbes by @soxoj in https://github.com/soxoj/maigret/pull/2425
|
||||
* build(deps): bump cryptography from 46.0.5 to 46.0.6 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2422
|
||||
* Tags and site names improvements by @soxoj in https://github.com/soxoj/maigret/pull/2427
|
||||
* Overhaul site tags and naming: add social tag to 33 networks, fill mi… by @soxoj in https://github.com/soxoj/maigret/pull/2430
|
||||
* build(deps): bump multidict from 6.7.0 to 6.7.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2396
|
||||
* build(deps): bump chardet from 5.2.0 to 7.4.0.post2 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2436
|
||||
* build(deps): bump platformdirs from 4.5.0 to 4.9.4 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2434
|
||||
* build(deps): bump aiohttp from 3.13.3 to 3.13.4 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2435
|
||||
* build(deps): bump pygments from 2.18.0 to 2.20.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2440
|
||||
* build(deps): bump requests from 2.33.0 to 2.33.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2444
|
||||
* build(deps-dev): bump mypy from 1.19.1 to 1.20.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2447
|
||||
* build(deps): bump aiohttp from 3.13.4 to 3.13.5 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2448
|
||||
* Add site protection tracking system, fix broken site checks (Instagra… by @soxoj in https://github.com/soxoj/maigret/pull/2452
|
||||
* Multiple lint and types fixes by @soxoj in https://github.com/soxoj/maigret/pull/2454
|
||||
* fix(data): update InterPals absence string to match current site response by @juliosuas in https://github.com/soxoj/maigret/pull/2442
|
||||
* Update of MIT License by @soxoj in https://github.com/soxoj/maigret/pull/2455
|
||||
* Added Crypto/Web3 site checks by @soxoj in https://github.com/soxoj/maigret/pull/2457
|
||||
* DB update mechanism by @soxoj in https://github.com/soxoj/maigret/pull/2458
|
||||
* Fix false positives by @soxoj in https://github.com/soxoj/maigret/pull/2459
|
||||
* False positive fixes by @soxoj in https://github.com/soxoj/maigret/pull/2460
|
||||
* build(deps): bump curl-cffi from 0.14.0 to 0.15.0 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2462
|
||||
* Add Markdown reports for LLM analysis by @soxoj in https://github.com/soxoj/maigret/pull/2463
|
||||
* Sites fixes by @soxoj in https://github.com/soxoj/maigret/pull/2464
|
||||
* Add installation troubleshooting for missing system dependencies by @Copilot in https://github.com/soxoj/maigret/pull/2465
|
||||
* Fix Spotify, add Spotify Community forum by @soxoj in https://github.com/soxoj/maigret/pull/2467
|
||||
* Fix crash on `-a --self-check` by adding exception handling to site check coroutines by @Copilot in https://github.com/soxoj/maigret/pull/2466
|
||||
* Fix failing test for custom DB path resolution by @soxoj in https://github.com/soxoj/maigret/pull/2468
|
||||
* Bump lxml minimum to 6.0.2 for Python 3.14 compatibility by @ocervell in https://github.com/soxoj/maigret/pull/2279
|
||||
* build(deps-dev): bump pytest from 9.0.2 to 9.0.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2473
|
||||
* Update HackTheBox and Wikipedia to use new API endpoints by @Copilot in https://github.com/soxoj/maigret/pull/2470
|
||||
* Automated Sites List Update by @github-actions[bot] in https://github.com/soxoj/maigret/pull/2474
|
||||
* build(deps): bump chardet from 7.4.0.post2 to 7.4.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2472
|
||||
* build(deps): bump cryptography from 46.0.6 to 46.0.7 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2475
|
||||
* vBulletin cleanup, Flarum sites, engine stats, UA bump by @soxoj in https://github.com/soxoj/maigret/pull/2476
|
||||
* build(deps): bump platformdirs from 4.9.4 to 4.9.6 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2477
|
||||
* Re-enable 69 stale-disabled sites validated via self-check by @soxoj in https://github.com/soxoj/maigret/pull/2478
|
||||
* Fix false positives by @soxoj in https://github.com/soxoj/maigret/pull/2499
|
||||
* build(deps): bump socid-extractor from 0.0.27 to 0.0.28 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2502
|
||||
* build(deps): bump lxml from 6.0.2 to 6.0.3 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/2501
|
||||
* Disable Kinja.com site check by @Copilot in https://github.com/soxoj/maigret/pull/2503
|
||||
* Added 3 sites, fixed 6, disabled 8 by @soxoj in https://github.com/soxoj/maigret/pull/2505
|
||||
* Bump to 0.6.0 by @soxoj in https://github.com/soxoj/maigret/pull/2506
|
||||
* Update workflow to trigger on published releases by @soxoj in https://github.com/soxoj/maigret/pull/2508
|
||||
|
||||
**Full Changelog**: https://github.com/soxoj/maigret/compare/v0.5.0...v0.6.0
|
||||
|
||||
## [0.5.0] - 2025-08-10
|
||||
* Site Supression by @C3n7ral051nt4g3ncy in https://github.com/soxoj/maigret/pull/627
|
||||
* Bump yarl from 1.7.2 to 1.8.1 by @dependabot[bot] in https://github.com/soxoj/maigret/pull/626
|
||||
|
||||
+159
-27
@@ -1,53 +1,185 @@
|
||||
# How to contribute
|
||||
|
||||
Hey! I'm really glad you're reading this. Maigret contains a lot of sites, and it is very hard to keep all the sites operational. That's why any fix is important.
|
||||
Hey! I'm really glad you're reading this. Maigret contains a lot of sites, and it is very hard to keep all the sites operational. That's why any fix is important.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Please read and follow the [Code of Conduct](CODE_OF_CONDUCT.md) to foster a welcoming and inclusive community.
|
||||
|
||||
## How to add a new site
|
||||
## Local setup
|
||||
|
||||
#### Beginner level
|
||||
Install Maigret with development dependencies via [Poetry](https://python-poetry.org/):
|
||||
|
||||
You can use Maigret **submit mode** (`maigret --submit URL`) to add a new site or update an existing site. In this mode Maigret do an automatic analysis of the given account URL or site main page URL to determine the site engine and methods to check account presence. After checking Maigret asks if you want to add the site, answering y/Y will rewrite the local database.
|
||||
```bash
|
||||
git clone https://github.com/soxoj/maigret && cd maigret
|
||||
poetry install --with dev
|
||||
```
|
||||
|
||||
#### Advanced level
|
||||
Activate the repo's git hooks **once after cloning**:
|
||||
|
||||
You can edit [the database JSON file](https://github.com/soxoj/maigret/blob/main/maigret/resources/data.json) (`./maigret/resources/data.json`) manually.
|
||||
```bash
|
||||
git config --local core.hooksPath .githooks/
|
||||
```
|
||||
|
||||
The pre-commit hook does two things every time you commit changes that touch the site database:
|
||||
|
||||
- regenerates the database signature `maigret/resources/db_meta.json` (used to detect compatible auto-updates), and
|
||||
- regenerates `sites.md` (the human-readable list of supported sites with per-engine statistics).
|
||||
|
||||
It also auto-stages the regenerated files so they land in the same commit as your edits. **Always run `git commit` from inside the repo so the hook can fire** — without it, your PR will land with a stale signature and a stale `sites.md`, and database auto-update will misbehave for users on your branch.
|
||||
|
||||
## How to contribute
|
||||
|
||||
There are two main ways to help.
|
||||
|
||||
### 1. Add a new site
|
||||
|
||||
**Beginner.** Use the `--submit` mode — Maigret takes a single existing-account URL, auto-detects the site engine, picks `presenseStrs` / `absenceStrs`, and offers to add the entry:
|
||||
|
||||
```bash
|
||||
maigret --submit https://example.com/users/alice
|
||||
```
|
||||
|
||||
`--submit` works well when the site has clean status codes and no anti-bot protection. It will *not* discover a public JSON API (`urlProbe`), classify protection (`tls_fingerprint`, `cf_js_challenge`, `ip_reputation`, ...), or recognise SPA / soft-404 pages. For those, fall back to manual editing.
|
||||
|
||||
**Advanced.** Edit `maigret/resources/data.json` by hand — see *Editing `data.json` safely* below. There is also an `add-a-site` issue template if you want a maintainer to do it for you.
|
||||
|
||||
### 2. Fix existing sites
|
||||
|
||||
The most useful work in this project is keeping checks accurate over time. Sites change layout, switch engines, add Cloudflare, redirect to login walls — every fix is welcome.
|
||||
|
||||
**Where to start.** Good candidates:
|
||||
|
||||
- Issues with the `false-positive` label, especially those opened automatically by the Telegram bot.
|
||||
- Sites currently `disabled: true` in `data.json` — many were disabled on a transient symptom and have since healed.
|
||||
- Sites for which `--self-check --diagnose` reports a problem.
|
||||
- A focused audit of one engine (vBulletin, XenForo, phpBB, Discourse, Flarum, ...). Engine-wide breakage usually has a single root cause and several sites can be fixed in one PR.
|
||||
|
||||
**Diagnose with built-in tools.**
|
||||
|
||||
> By default, Maigret skips entries with `disabled: true` in every mode (`--self-check`, `--site`, plain search). Whenever your target is a disabled site — diagnosing it, validating a fix, running the two-filter check below — pass **`--use-disabled-sites`** explicitly. Without the flag, the site is silently dropped from the run and you get an empty result that looks like "everything's fine".
|
||||
|
||||
- Per-site diagnosis with recommendations:
|
||||
|
||||
```bash
|
||||
maigret --self-check --site "SiteName" --diagnose
|
||||
# add --use-disabled-sites if the entry is currently disabled
|
||||
```
|
||||
|
||||
Without `--auto-disable`, this only reports — it never edits the database. Add `--auto-disable` only when you really want to write the result back.
|
||||
|
||||
- Single-site comparison of claimed vs unclaimed responses (status, markers, headers):
|
||||
|
||||
```bash
|
||||
python utils/site_check.py --site "SiteName" --diagnose
|
||||
python utils/site_check.py --site "SiteName" --compare-methods # raw aiohttp vs Maigret's checker
|
||||
```
|
||||
|
||||
- Mass check of top-N sites:
|
||||
|
||||
```bash
|
||||
python utils/check_top_n.py --top 100 --only-broken
|
||||
```
|
||||
|
||||
### Understanding `checkType`
|
||||
|
||||
Each site entry uses one of three `checkType` modes to decide whether a profile exists. Picking the right one for your site is the most important data-modeling decision in `data.json`:
|
||||
|
||||
- **`message`** (most common, most flexible) — Maigret fetches the page and inspects the HTML body. The profile is reported as found when the body contains at least one substring from `presenseStrs` **and** none of the substrings from `absenceStrs`. Pick narrow, profile-specific markers: a `<title>` fragment unique to profile pages, a CSS class only rendered on profiles (e.g. `"profile-card"`), or a JSON field name from an embedded data blob (`"displayName":`). Avoid generic words (`name`, `email`) and HTML/ARIA boilerplate (`polite`, `alert`, `navigation`, `status`) — they match on every page including error and anti-bot challenge pages, and produce false positives. If the marker contains non-ASCII text, double-check the page is UTF-8 (some legacy sites serve KOI8-R or Windows-1251, in which case byte-level matching silently fails — prefer ASCII markers or a JSON API).
|
||||
|
||||
- **`status_code`** — Maigret only looks at the HTTP status code; 2xx means "found", anything else means "not found". Use this only when the site reliably returns proper status codes — typically clean JSON APIs that return HTTP 200 for real users and HTTP 404 for missing ones. Don't use it for sites that return HTTP 200 with a soft "user not found" page (this is the single most common cause of false-positive checks).
|
||||
|
||||
- **`response_url`** — Maigret follows the redirect chain and inspects the final URL. Useful when the server reliably redirects missing-user URLs to a different path (e.g. `/login`, `/404`, the homepage) while existing-user URLs stay put. For most sites `message` is a better fit; reach for `response_url` only when a redirect-based signal is genuinely the most stable one.
|
||||
|
||||
**`urlProbe` (optional, works with any `checkType`).** If the most reliable signal lives at a different URL than the public profile page — a JSON API, a GraphQL endpoint, a mobile-app route — set `urlProbe` to that URL. Maigret fetches `urlProbe` for the check, but reports continue to show the human-readable `url` so users see a profile link they can click. Examples: GitHub uses `https://github.com/{username}` as `url` and `https://api.github.com/users/{username}` as `urlProbe`; Picsart uses the web profile as `url` and `https://api.picsart.com/users/show/{username}.json` as `urlProbe`. A clean public API is almost always more stable than parsing HTML — it's worth probing for one before settling on `message` against the SPA shell.
|
||||
|
||||
**Errors vs absence.** Anything that means "the server can't answer right now" — rate limits, captchas, "Checking your browser", "unusual traffic", maintenance pages — belongs in `errors` (mapping the substring to a human-readable error string), not in `absenceStrs`. The `errors` mechanism produces an UNKNOWN result instead of a false CLAIMED or false AVAILABLE.
|
||||
|
||||
Full reference for `checkType`, `urlProbe`, `engine`, and the rest of the `data.json` schema is in the [development guide](docs/source/development.rst), section *How to fix false-positives*.
|
||||
|
||||
### Editing `data.json` safely
|
||||
|
||||
`data.json` is a single ~36 000-line JSON file. **Make surgical, line-level edits only.** Never rewrite it by reading it into a Python dict and dumping it back — `json.load` + `json.dump` reformats every entry and produces an unreviewable 70 000-line diff. The same rule applies to any helper script that touches the file: it must preserve the original formatting of untouched entries.
|
||||
|
||||
If your editor reformats JSON on save, disable that for `data.json` before editing.
|
||||
|
||||
### Two-filter validation when re-enabling a site
|
||||
|
||||
Removing `disabled: true` requires **two** independent checks. `--self-check` alone is not sufficient — it only verifies the two specific usernames recorded in the entry, so a site that returns CLAIMED for *any* arbitrary username will still pass the self-check.
|
||||
|
||||
```bash
|
||||
# Filter 1: self-check on the recorded claimed/unclaimed pair
|
||||
maigret --self-check --site "SiteName" --use-disabled-sites
|
||||
|
||||
# Filter 2: live probe with a clearly fake username — nothing should match
|
||||
maigret noonewouldeverusethis7 --site "SiteName" --use-disabled-sites --print-not-found
|
||||
```
|
||||
|
||||
Both filters need `--use-disabled-sites`, since a candidate for re-enable still has `disabled: true` in the working tree until your edit lands. If you forget the flag, both commands silently no-op.
|
||||
|
||||
If the second command reports `[+]` for the fake username, the check is a false positive — do not enable. This step takes seconds and is non-negotiable for any re-enable PR.
|
||||
|
||||
## Site naming, tags, and protection
|
||||
|
||||
- **Site naming conventions** (Title Case by default, brand-specific exceptions, no `www.` prefix, etc.) are documented in the [development guide](docs/source/development.rst), section *Site naming conventions*.
|
||||
|
||||
- **Country tags** (`us`, `ru`, `kr`, ...) attribute an account to a country of origin or residence — they're not a traffic-share label. Global services (GitHub, YouTube, Reddit) get **no** country tag; regional services (VK → `ru`, Naver → `kr`) **must** have one. Don't assign a country tag from Alexa/SimilarWeb audience stats.
|
||||
|
||||
- **Category tags** must come from the canonical `"tags"` array at the bottom of `data.json`. The `test_tags_validity` test fails if you introduce an unregistered tag. If no existing tag fits well, either pick the closest reasonable match or add the new tag to the canonical list as an explicit, separate change. Don't use platform names (`writefreely`, `pixelfed`) — use category names (`blog`, `photo`).
|
||||
|
||||
- **Protection tags** (`tls_fingerprint`, `ip_reputation`, `cf_js_challenge`, `cf_firewall`, `aws_waf_js_challenge`, `ddos_guard_challenge`, `js_challenge`, `custom_bot_protection`) describe the kind of anti-bot protection a site uses. One of them — **`tls_fingerprint`** — is load-bearing: when a site fingerprints the TLS handshake (JA3/JA4) and blocks non-browser clients, tagging it with `tls_fingerprint` makes Maigret automatically swap its HTTP client to [`curl_cffi`](https://github.com/lexiforest/curl_cffi) with Chrome browser emulation, which is usually enough to pass. The site stays `enabled` — no `disabled: true` is needed. Examples: Instagram, NPM, Codepen, Kickstarter, Letterboxd. The remaining tags are documentation-only and pair with `disabled: true` until a per-provider solver is integrated. The full taxonomy and the rules for picking the right tag are in the [development guide](docs/source/development.rst), section *protection (site protection tracking)*. Don't add a protection tag without empirical evidence it applies in the current environment.
|
||||
|
||||
## Testing
|
||||
|
||||
There are CI checks for every PR to the Maigret repository. But it will be better to run `make format`, `make link` and `make test` to ensure you've made a corrent changes.
|
||||
CI runs the same checks on every PR, but please run them locally first:
|
||||
|
||||
```bash
|
||||
make format # auto-format with black
|
||||
make lint # flake / mypy
|
||||
make test # pytest with coverage
|
||||
```
|
||||
|
||||
## Submitting changes
|
||||
|
||||
To submit you changes you must [send a GitHub PR](https://github.com/soxoj/maigret/pulls) to the Maigret project.
|
||||
Always write a clear log message for your commits. One-line messages are fine for small changes, but bigger changes should look like this:
|
||||
Open a [GitHub PR](https://github.com/soxoj/maigret/pulls) against `main`. Always write a clear log message:
|
||||
|
||||
$ git commit -m "A brief summary of the commit
|
||||
>
|
||||
> A paragraph describing what changed and its impact."
|
||||
```
|
||||
$ git commit -m "A brief summary of the commit
|
||||
>
|
||||
> A paragraph describing what changed and its impact."
|
||||
```
|
||||
|
||||
One-line messages are fine for small changes; bigger changes should explain the *why* in the body.
|
||||
|
||||
## Coding conventions
|
||||
|
||||
### General Guidelines
|
||||
### General
|
||||
|
||||
- Try to follow [PEP 8](https://www.python.org/dev/peps/pep-0008/) for Python code style.
|
||||
- Ensure your code passes all tests before submitting a pull request.
|
||||
- Follow [PEP 8](https://www.python.org/dev/peps/pep-0008/) for Python.
|
||||
- Make sure all tests pass before opening the PR.
|
||||
|
||||
### Code Style
|
||||
### Code style
|
||||
|
||||
- **Indentation**: Use 4 spaces per indentation level.
|
||||
- **Imports**:
|
||||
- Standard library imports should be placed at the top.
|
||||
- Third-party imports should follow.
|
||||
- Group imports logically.
|
||||
- **Indentation**: 4 spaces per level.
|
||||
- **Imports**: standard library first, third-party next, project-local last; group them logically.
|
||||
|
||||
### Naming Conventions
|
||||
### Naming
|
||||
|
||||
- **Variables and Functions**: Use `snake_case`.
|
||||
- **Classes**: Use `CamelCase`.
|
||||
- **Constants**: Use `UPPER_CASE`.
|
||||
|
||||
Start reading the code and you'll get the hang of it. ;)
|
||||
- **Variables and functions**: `snake_case`.
|
||||
- **Classes**: `CamelCase`.
|
||||
- **Constants**: `UPPER_CASE`.
|
||||
|
||||
Start reading the code and you'll get the hang of it.
|
||||
|
||||
## Getting help
|
||||
|
||||
If you're stuck on something — a check that won't behave, a setup error, an unclear field in `data.json`, or just want to discuss an approach before opening a PR — there are two places to ask:
|
||||
|
||||
- [GitHub Discussions](https://github.com/soxoj/maigret/discussions) — searchable, public, good for technical questions and design ideas. Prefer this for anything other contributors might run into too.
|
||||
- Telegram: [@soxoj](https://t.me/soxoj) — direct channel to the maintainer, good for quick questions and informal chat.
|
||||
|
||||
Bug reports and feature requests still belong in [GitHub Issues](https://github.com/soxoj/maigret/issues).
|
||||
|
||||
## License
|
||||
|
||||
Maigret is MIT-licensed; by submitting a contribution you agree to publish it under the same license. There is no CLA.
|
||||
|
||||
+10
-1
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11-slim
|
||||
FROM python:3.11-slim AS base
|
||||
LABEL maintainer="Soxoj <soxoj@protonmail.com>"
|
||||
WORKDIR /app
|
||||
RUN pip install --no-cache-dir --upgrade pip
|
||||
@@ -15,4 +15,13 @@ COPY . .
|
||||
RUN YARL_NO_EXTENSIONS=1 python3 -m pip install --no-cache-dir .
|
||||
# For production use, set FLASK_HOST to a specific IP address for security
|
||||
ENV FLASK_HOST=0.0.0.0
|
||||
|
||||
# Web UI variant: auto-launches the web interface on $PORT
|
||||
FROM base AS web
|
||||
ENV PORT=5000
|
||||
EXPOSE 5000
|
||||
ENTRYPOINT ["sh", "-c", "exec maigret --web \"$PORT\""]
|
||||
|
||||
# Default variant (last stage = `docker build .` target): CLI, backwards-compatible
|
||||
FROM base AS cli
|
||||
ENTRYPOINT ["maigret"]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Maigret
|
||||
|
||||
<p align="center">
|
||||
<p align="center">
|
||||
<div align="center">
|
||||
<div>
|
||||
<a href="https://pypi.org/project/maigret/">
|
||||
<img alt="PyPI version badge for Maigret" src="https://img.shields.io/pypi/v/maigret?style=flat-square" />
|
||||
</a>
|
||||
@@ -17,158 +17,73 @@
|
||||
<a href="https://github.com/soxoj/maigret">
|
||||
<img alt="View count for Maigret project" src="https://komarev.com/ghpvc/?username=maigret&color=brightgreen&label=views&style=flat-square" />
|
||||
</a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/soxoj/maigret/main/static/maigret.png" height="300"/>
|
||||
</p>
|
||||
</p>
|
||||
</div>
|
||||
<br>
|
||||
<div>
|
||||
<img src="https://raw.githubusercontent.com/soxoj/maigret/main/static/maigret.png" height="300" alt="Maigret logo"/>
|
||||
</div>
|
||||
<br>
|
||||
<div>
|
||||
<b>English</b> · <a href="README.zh-CN.md">简体中文</a>
|
||||
</div>
|
||||
<br>
|
||||
</div>
|
||||
|
||||
<i>The Commissioner Jules Maigret is a fictional French police detective, created by Georges Simenon. His investigation method is based on understanding the personality of different people and their interactions.</i>
|
||||
**Maigret** collects a dossier on a person **by username only**, checking for accounts on a huge number of sites and gathering all the available information from web pages. No API keys required.
|
||||
|
||||
<b>👉👉👉 [Online Telegram bot](https://t.me/maigret_search_bot) | 🏢 [Commercial use & API](#commercial-use)</b>
|
||||
## Contents
|
||||
|
||||
## About
|
||||
- [In one minute](#in-one-minute)
|
||||
- [Main features](#main-features)
|
||||
- [Demo](#demo)
|
||||
- [Installation](#installation)
|
||||
- [Usage](#usage)
|
||||
- [Contributing](#contributing)
|
||||
- [Commercial Use](#commercial-use)
|
||||
- [About](#about)
|
||||
|
||||
**Maigret** collects a dossier on a person **by username only**, checking for accounts on a huge number of sites and gathering all the available information from web pages. No API keys are required. Maigret is an easy-to-use and powerful fork of [Sherlock](https://github.com/sherlock-project/sherlock).
|
||||
<a id="one-minute"></a>
|
||||
## In one minute
|
||||
|
||||
Currently supports more than 3000 sites ([full list](https://github.com/soxoj/maigret/blob/main/sites.md)), search is launched against 500 popular sites in descending order of popularity by default. Also supported checking Tor sites, I2P sites, and domains (via DNS resolving).
|
||||
Ensure you have Python 3.10 or higher.
|
||||
|
||||
## Powered By Maigret
|
||||
```bash
|
||||
pip install maigret
|
||||
maigret YOUR_USERNAME
|
||||
```
|
||||
|
||||
These are professional tools for social media content analysis and OSINT investigations that use Maigret (banners are clickable).
|
||||
No install? Try the [Telegram bot](https://t.me/maigret_search_bot) or a [Cloud Shell](#cloud-shells).
|
||||
|
||||
Want a web UI? See [how to launch it](#web-interface).
|
||||
|
||||
See also: [Quick start](https://maigret.readthedocs.io/en/latest/quick-start.html).
|
||||
|
||||
## Main features
|
||||
|
||||
- Supports 3,000+ sites ([see full list](https://github.com/soxoj/maigret/blob/main/sites.md)). A default run checks the 500 highest-ranked sites by traffic; pass `-a` to scan everything, or `--tags` to narrow by category/country.
|
||||
- Embeddable in Python projects — import `maigret` and run searches programmatically (see [library usage](https://maigret.readthedocs.io/en/latest/library-usage.html)).
|
||||
- [Extracts](https://github.com/soxoj/socid_extractor) all available information about the account owner from profile pages and site APIs, including links to other accounts.
|
||||
- Performs recursive search using discovered usernames and other IDs.
|
||||
- Allows filtering by tags (site categories, countries).
|
||||
- Detects and partially bypasses blocks, censorship, and CAPTCHA.
|
||||
- Fetches an [auto-updated site database](https://maigret.readthedocs.io/en/latest/settings.html#database-auto-update) from GitHub each run (once per 24 hours), and falls back to the built-in database if offline.
|
||||
- Works with Tor and I2P websites; able to check domains.
|
||||
- Ships with a [web interface](#web-interface) for browsing results as a graph and downloading reports in every format from a single page.
|
||||
- Optional [AI analysis mode](#ai-analysis) (`--ai`) that turns raw findings into a short investigation summary using an OpenAI-compatible API.
|
||||
|
||||
For the complete feature list, see the [features documentation](https://maigret.readthedocs.io/en/latest/features.html).
|
||||
|
||||
### Used by
|
||||
|
||||
Professional OSINT and social-media analysis tools built on Maigret:
|
||||
|
||||
<a href="https://github.com/SocialLinks-IO/sociallinks-api"><img height="60" alt="Social Links API" src="https://github.com/user-attachments/assets/789747b2-d7a0-4d4e-8868-ffc4427df660"></a>
|
||||
<a href="https://sociallinks.io/products/sl-crimewall"><img height="60" alt="Social Links Crimewall" src="https://github.com/user-attachments/assets/0b18f06c-2f38-477b-b946-1be1a632a9d1"></a>
|
||||
<a href="https://usersearch.ai/"><img height="60" alt="UserSearch" src="https://github.com/user-attachments/assets/66daa213-cf7d-40cf-9267-42f97cf77580"></a>
|
||||
|
||||
## Main features
|
||||
## Demo
|
||||
|
||||
* Profile page parsing, [extraction](https://github.com/soxoj/socid_extractor) of personal info, links to other profiles, etc.
|
||||
* Recursive search by new usernames and other IDs found
|
||||
* Search by tags (site categories, countries)
|
||||
* Censorship and captcha detection
|
||||
* Requests retries
|
||||
|
||||
See the full description of Maigret features [in the documentation](https://maigret.readthedocs.io/en/latest/features.html).
|
||||
|
||||
## Installation
|
||||
|
||||
‼️ Maigret is available online via [official Telegram bot](https://t.me/maigret_search_bot). Consider using it if you don't want to install anything.
|
||||
|
||||
### Windows
|
||||
|
||||
Standalone EXE-binaries for Windows are located in [Releases section](https://github.com/soxoj/maigret/releases) of GitHub repository.
|
||||
|
||||
Video guide on how to run it: https://youtu.be/qIgwTZOmMmM.
|
||||
|
||||
### Installation in Cloud Shells
|
||||
|
||||
You can launch Maigret using cloud shells and Jupyter notebooks. Press one of the buttons below and follow the instructions to launch it in your browser.
|
||||
|
||||
[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/soxoj/maigret&tutorial=README.md)
|
||||
<a href="https://repl.it/github/soxoj/maigret"><img src="https://replit.com/badge/github/soxoj/maigret" alt="Run on Replit" height="50"></a>
|
||||
|
||||
<a href="https://colab.research.google.com/gist/soxoj/879b51bc3b2f8b695abb054090645000/maigret-collab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" height="45"></a>
|
||||
<a href="https://mybinder.org/v2/gist/soxoj/9d65c2f4d3bec5dd25949197ea73cf3a/HEAD"><img src="https://mybinder.org/badge_logo.svg" alt="Open In Binder" height="45"></a>
|
||||
|
||||
### Local installation
|
||||
|
||||
Maigret can be installed using pip, Docker, or simply can be launched from the cloned repo.
|
||||
|
||||
|
||||
**NOTE**: Python 3.10 or higher and pip is required, **Python 3.11 is recommended.**
|
||||
|
||||
```bash
|
||||
# install from pypi
|
||||
pip3 install maigret
|
||||
|
||||
# usage
|
||||
maigret username
|
||||
```
|
||||
|
||||
### Cloning a repository
|
||||
|
||||
```bash
|
||||
# or clone and install manually
|
||||
git clone https://github.com/soxoj/maigret && cd maigret
|
||||
|
||||
# build and install
|
||||
pip3 install .
|
||||
|
||||
# usage
|
||||
maigret username
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
```bash
|
||||
# official image
|
||||
docker pull soxoj/maigret
|
||||
|
||||
# usage
|
||||
docker run -v /mydir:/app/reports soxoj/maigret:latest username --html
|
||||
|
||||
# manual build
|
||||
docker build -t maigret .
|
||||
```
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
If you encounter build errors during installation, check the [troubleshooting guide](https://maigret.readthedocs.io/en/latest/installation.html#troubleshooting).
|
||||
|
||||
## Usage examples
|
||||
|
||||
```bash
|
||||
# make HTML, PDF, and Xmind8 reports
|
||||
maigret user --html
|
||||
maigret user --pdf
|
||||
maigret user --xmind #Output not compatible with xmind 2022+
|
||||
|
||||
# search on sites marked with tags photo & dating
|
||||
maigret user --tags photo,dating
|
||||
|
||||
# search on sites marked with tag us
|
||||
maigret user --tags us
|
||||
|
||||
# search for three usernames on all available sites
|
||||
maigret user1 user2 user3 -a
|
||||
```
|
||||
|
||||
Use `maigret --help` to get full options description. Also options [are documented](https://maigret.readthedocs.io/en/latest/command-line-options.html).
|
||||
|
||||
### Web interface
|
||||
|
||||
You can run Maigret with a web interface, where you can view the graph with results and download reports of all formats on a single page.
|
||||
|
||||
<details>
|
||||
<summary>Web Interface Screenshots</summary>
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
</details>
|
||||
|
||||
Instructions:
|
||||
|
||||
1. Run Maigret with the ``--web`` flag and specify the port number.
|
||||
|
||||
```console
|
||||
maigret --web 5000
|
||||
```
|
||||
2. Open http://127.0.0.1:5000 in your browser and enter one or more usernames to make a search.
|
||||
|
||||
3. Wait a bit for the search to complete and view the graph with results, the table with all accounts found, and download reports of all formats.
|
||||
|
||||
## Contributing
|
||||
|
||||
Maigret has open-source code, so you may contribute your own sites by adding them to `data.json` file, or bring changes to it's code!
|
||||
|
||||
For more information about development and contribution, please read the [development documentation](https://maigret.readthedocs.io/en/latest/development.html).
|
||||
|
||||
## Demo with page parsing and recursive username search
|
||||
|
||||
### Video (asciinema)
|
||||
### Video
|
||||
|
||||
<a href="https://asciinema.org/a/Ao0y7N0TTxpS0pisoprQJdylZ">
|
||||
<img src="https://asciinema.org/a/Ao0y7N0TTxpS0pisoprQJdylZ.svg" alt="asciicast" width="600">
|
||||
@@ -184,37 +99,205 @@ For more information about development and contribution, please read the [develo
|
||||
|
||||
[Full console output](https://raw.githubusercontent.com/soxoj/maigret/main/static/recursive_search.md)
|
||||
|
||||
## Disclaimer
|
||||
## Installation
|
||||
|
||||
**This tool is intended for educational and lawful purposes only.** The developers do not endorse or encourage any illegal activities or misuse of this tool. Regulations regarding the collection and use of personal data vary by country and region, including but not limited to GDPR in the EU, CCPA in the USA, and similar laws worldwide.
|
||||
Already ran the [In one minute](#one-minute) steps? You're set. Below are alternative methods.
|
||||
|
||||
It is your sole responsibility to ensure that your use of this tool complies with all applicable laws and regulations in your jurisdiction. Any illegal use of this tool is strictly prohibited, and you are fully accountable for your actions.
|
||||
Don't want to install anything? Use the [Telegram bot](https://t.me/maigret_search_bot).
|
||||
|
||||
The authors and developers of this tool bear no responsibility for any misuse or unlawful activities conducted by its users.
|
||||
### Windows
|
||||
|
||||
## Feedback
|
||||
Download a standalone EXE from [Releases](https://github.com/soxoj/maigret/releases). Video guide: https://youtu.be/qIgwTZOmMmM.
|
||||
|
||||
If you have any questions, suggestions, or feedback, please feel free to [open an issue](https://github.com/soxoj/maigret/issues), create a [GitHub discussion](https://github.com/soxoj/maigret/discussions), or contact the author directly via [Telegram](https://t.me/soxoj).
|
||||
<a id="cloud-shells"></a>
|
||||
### Cloud Shells
|
||||
|
||||
Run Maigret in the browser via cloud shells or Jupyter notebooks:
|
||||
|
||||
<a href="https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/soxoj/maigret&tutorial=cloudshell-tutorial.md"><img src="https://user-images.githubusercontent.com/27065646/92304704-8d146d80-ef80-11ea-8c29-0deaabb1c702.png" alt="Open in Cloud Shell" height="50"></a>
|
||||
<a href="https://repl.it/github/soxoj/maigret"><img src="https://replit.com/badge/github/soxoj/maigret" alt="Run on Replit" height="50"></a>
|
||||
|
||||
<a href="https://colab.research.google.com/gist/soxoj/879b51bc3b2f8b695abb054090645000/maigret-collab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" height="45"></a>
|
||||
<a href="https://mybinder.org/v2/gist/soxoj/9d65c2f4d3bec5dd25949197ea73cf3a/HEAD"><img src="https://mybinder.org/badge_logo.svg" alt="Open In Binder" height="45"></a>
|
||||
|
||||
### Local installation (pip)
|
||||
|
||||
```bash
|
||||
# install from pypi
|
||||
pip3 install maigret
|
||||
|
||||
# usage
|
||||
maigret username
|
||||
```
|
||||
|
||||
### From source
|
||||
|
||||
```bash
|
||||
# or clone and install manually
|
||||
git clone https://github.com/soxoj/maigret && cd maigret
|
||||
|
||||
# build and install
|
||||
pip3 install .
|
||||
|
||||
# usage
|
||||
maigret username
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
Two image variants are published:
|
||||
|
||||
- `soxoj/maigret:latest` — CLI mode (default)
|
||||
- `soxoj/maigret:web` — auto-launches the [web interface](#web-interface)
|
||||
|
||||
```bash
|
||||
# official image (CLI)
|
||||
docker pull soxoj/maigret
|
||||
|
||||
# CLI usage
|
||||
docker run -v /mydir:/app/reports soxoj/maigret:latest username --html
|
||||
|
||||
# Web UI (open http://localhost:5000)
|
||||
docker run -p 5000:5000 soxoj/maigret:web
|
||||
|
||||
# Web UI on a custom port
|
||||
docker run -e PORT=8080 -p 8080:8080 soxoj/maigret:web
|
||||
|
||||
# manual build
|
||||
docker build -t maigret . # CLI image (default target)
|
||||
docker build --target web -t maigret-web . # Web UI image
|
||||
```
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
Build errors? See the [troubleshooting guide](https://maigret.readthedocs.io/en/latest/installation.html#troubleshooting).
|
||||
|
||||
## Usage
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
# make HTML, PDF, and Xmind8 reports
|
||||
maigret user --html
|
||||
maigret user --pdf
|
||||
maigret user --xmind #Output not compatible with xmind 2022+
|
||||
|
||||
# machine-readable exports
|
||||
maigret user --json ndjson # newline-delimited JSON (also: --json simple)
|
||||
maigret user --csv
|
||||
maigret user --txt
|
||||
maigret user --graph # interactive D3 graph (HTML)
|
||||
|
||||
# search on sites marked with tags photo & dating
|
||||
maigret user --tags photo,dating
|
||||
|
||||
# search on sites marked with tag us
|
||||
maigret user --tags us
|
||||
|
||||
# search for three usernames on all available sites
|
||||
maigret user1 user2 user3 -a
|
||||
|
||||
# AI-assisted investigation summary (needs OPENAI_API_KEY)
|
||||
maigret user --ai
|
||||
```
|
||||
|
||||
Run `maigret --help` for all options. Docs: [CLI options](https://maigret.readthedocs.io/en/latest/command-line-options.html), [more examples](https://maigret.readthedocs.io/en/latest/usage-examples.html). Running into 403s or timeouts? See [TROUBLESHOOTING.md](TROUBLESHOOTING.md).
|
||||
|
||||
<a id="web-interface"></a>
|
||||
### Web interface
|
||||
|
||||
Maigret has a built-in web UI with a results graph and downloadable reports.
|
||||
|
||||
<details>
|
||||
<summary>Web Interface Screenshots</summary>
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
</details>
|
||||
|
||||
```console
|
||||
maigret --web 5000
|
||||
```
|
||||
|
||||
Open http://127.0.0.1:5000, enter a username, and view results.
|
||||
|
||||
### Python library
|
||||
|
||||
**Maigret can be embedded in your own Python projects.** The CLI is a thin wrapper around an async function you can call directly — build custom pipelines, feed results into your own tooling, or run it inside a larger OSINT workflow.
|
||||
|
||||
See the full [library usage guide](https://maigret.readthedocs.io/en/latest/library-usage.html) for a working example, async patterns, and how to filter sites by tag.
|
||||
|
||||
### Useful CLI flags
|
||||
|
||||
- `--parse URL` — parse a profile page, extract IDs/usernames, and use them to kick off a recursive search.
|
||||
- `--permute` — generate likely username variants from two or more inputs (e.g. `john doe` → `johndoe`, `j.doe`, …) and search for all of them.
|
||||
- `--self-check [--auto-disable]` — verify `usernameClaimed` / `usernameUnclaimed` pairs against live sites for maintainers auditing the database.
|
||||
- `--ai` / `--ai-model` — run the [AI analysis](#ai-analysis) over the search results and stream a short investigation summary to the terminal.
|
||||
|
||||
<a id="ai-analysis"></a>
|
||||
### AI analysis
|
||||
|
||||
`--ai` collects the search results, builds an internal Markdown report, and sends it to an OpenAI-compatible chat completion endpoint to produce a short, neutral investigation summary (likely real name, location, occupation, interests, languages, confidence, follow-up leads). Per-site progress is suppressed and the model's output is streamed to stdout.
|
||||
|
||||
```bash
|
||||
export OPENAI_API_KEY=sk-...
|
||||
maigret user --ai
|
||||
|
||||
# pick a different model
|
||||
maigret user --ai --ai-model gpt-4o-mini
|
||||
```
|
||||
|
||||
The key can also be set as `openai_api_key` in `settings.json`. The endpoint defaults to `https://api.openai.com/v1`, but `openai_api_base_url` in `settings.json` can point to any OpenAI-compatible API (Azure OpenAI, OpenRouter, a local server, …). See the [settings docs](https://maigret.readthedocs.io/en/latest/settings.html) for the full list of options.
|
||||
|
||||
### Tor / I2P / proxies
|
||||
|
||||
Maigret can route checks through a proxy, Tor, or I2P — useful for `.onion` / `.i2p` sites and for bypassing WAFs that block datacenter IPs.
|
||||
|
||||
```bash
|
||||
# any HTTP/SOCKS proxy
|
||||
maigret user --proxy socks5://127.0.0.1:1080
|
||||
|
||||
# Tor (default gateway socks5://127.0.0.1:9050)
|
||||
maigret user --tor-proxy socks5://127.0.0.1:9050
|
||||
|
||||
# I2P (default gateway http://127.0.0.1:4444)
|
||||
maigret user --i2p-proxy http://127.0.0.1:4444
|
||||
```
|
||||
|
||||
Start your Tor / I2P daemon before running the command — Maigret does not manage these gateways.
|
||||
|
||||
## Contributing
|
||||
|
||||
Add or fix new sites surgically in `data.json` (no `json.load`/`json.dump`), then run `./utils/update_site_data.py` to regenerate `sites.md` and the database metadata, and open a pull request. For more details, see the [CONTRIBUTING guide](https://github.com/soxoj/maigret/blob/main/CONTRIBUTING.md) and [development docs](https://maigret.readthedocs.io/en/latest/development.html). Release history: [CHANGELOG.md](CHANGELOG.md).
|
||||
|
||||
## Commercial Use
|
||||
|
||||
If you need a **daily updated database** of supported sites or an **API for username checks**, feel free to reach out:
|
||||
The open-source Maigret is MIT-licensed and free for commercial use without restriction — but site checks break over time and need active maintenance.
|
||||
|
||||
📧 [maigret@soxoj.com](mailto:maigret@soxoj.com)
|
||||
For serious commercial use — with a **daily-updated site database** or a **username-check API** — reach out: 📧 [maigret@soxoj.com](mailto:maigret@soxoj.com)
|
||||
|
||||
Available options:
|
||||
- Up-to-date site database - regularly maintained and updated list of 5K+ sites, delivered daily
|
||||
- Username check API - programmatic access to Maigret's search capabilities for integration into your products
|
||||
- Private site database — 5 000+ sites, updated daily (separate from the public open-source database)
|
||||
- Username check API — integrate Maigret into your product
|
||||
|
||||
## SOWEL classification
|
||||
## About
|
||||
|
||||
This tool uses the following OSINT techniques:
|
||||
### Disclaimer
|
||||
|
||||
**For educational and lawful purposes only.** You are responsible for complying with all applicable laws (GDPR, CCPA, etc.) in your jurisdiction. The authors bear no responsibility for misuse.
|
||||
|
||||
### Feedback
|
||||
|
||||
[Open an issue](https://github.com/soxoj/maigret/issues) · [GitHub Discussions](https://github.com/soxoj/maigret/discussions) · [Telegram](https://t.me/soxoj)
|
||||
|
||||
### SOWEL classification
|
||||
|
||||
OSINT techniques used:
|
||||
- [SOTL-2.2. Search For Accounts On Other Platforms](https://sowel.soxoj.com/other-platform-accounts)
|
||||
- [SOTL-6.1. Check Logins Reuse To Find Another Account](https://sowel.soxoj.com/logins-reuse)
|
||||
- [SOTL-6.2. Check Nicknames Reuse To Find Another Account](https://sowel.soxoj.com/nicknames-reuse)
|
||||
|
||||
## License
|
||||
### License
|
||||
|
||||
MIT © [Maigret](https://github.com/soxoj/maigret)<br/>
|
||||
MIT © [Sherlock Project](https://github.com/sherlock-project/)<br/>
|
||||
Original Creator of Sherlock Project - [Siddharth Dushantha](https://github.com/sdushantha)
|
||||
MIT © [Maigret](https://github.com/soxoj/maigret)
|
||||
|
||||
+310
@@ -0,0 +1,310 @@
|
||||
# Maigret
|
||||
|
||||
<div align="center">
|
||||
<div>
|
||||
<a href="https://pypi.org/project/maigret/">
|
||||
<img alt="Maigret 的 PyPI 版本" src="https://img.shields.io/pypi/v/maigret?style=flat-square" />
|
||||
</a>
|
||||
<a href="https://pypi.org/project/maigret/">
|
||||
<img alt="Maigret 的 PyPI 周下载量" src="https://img.shields.io/pypi/dw/maigret?style=flat-square" />
|
||||
</a>
|
||||
<a href="https://github.com/soxoj/maigret">
|
||||
<img alt="所需最低 Python 版本:3.10+" src="https://img.shields.io/badge/Python-3.10%2B-brightgreen?style=flat-square" />
|
||||
</a>
|
||||
<a href="https://github.com/soxoj/maigret/blob/main/LICENSE">
|
||||
<img alt="Maigret 的开源许可证" src="https://img.shields.io/github/license/soxoj/maigret?style=flat-square" />
|
||||
</a>
|
||||
<a href="https://github.com/soxoj/maigret">
|
||||
<img alt="Maigret 项目访问量" src="https://komarev.com/ghpvc/?username=maigret&color=brightgreen&label=views&style=flat-square" />
|
||||
</a>
|
||||
</div>
|
||||
<br>
|
||||
<div>
|
||||
<img src="https://raw.githubusercontent.com/soxoj/maigret/main/static/maigret.png" height="300" alt="Maigret logo"/>
|
||||
</div>
|
||||
<br>
|
||||
<div>
|
||||
<a href="README.md">English</a> · <b>简体中文</b>
|
||||
</div>
|
||||
<br>
|
||||
</div>
|
||||
|
||||
**Maigret** 仅凭一个用户名,就能在大量站点上查找其账号,并从网页中收集所有可获取的公开信息,为目标人物生成一份档案。无需任何 API 密钥。
|
||||
|
||||
## 目录
|
||||
|
||||
- [一分钟上手](#one-minute)
|
||||
- [核心特性](#main-features)
|
||||
- [演示](#demo)
|
||||
- [安装](#installation)
|
||||
- [使用](#usage)
|
||||
- [参与贡献](#contributing)
|
||||
- [商业使用](#commercial-use)
|
||||
- [关于](#about)
|
||||
|
||||
<a id="one-minute"></a>
|
||||
## 一分钟上手
|
||||
|
||||
请先确认本机的 Python 版本不低于 3.10。
|
||||
|
||||
```bash
|
||||
pip install maigret
|
||||
maigret YOUR_USERNAME
|
||||
```
|
||||
|
||||
不想本地安装?可以试试 [Telegram 机器人](https://t.me/maigret_search_bot),或者使用[云端 Shell](#cloud-shells)。
|
||||
|
||||
想要一个 Web 界面?参见[启动方式](#web-interface)。
|
||||
|
||||
延伸阅读:[快速入门](https://maigret.readthedocs.io/en/latest/quick-start.html)。
|
||||
|
||||
<a id="main-features"></a>
|
||||
## 核心特性
|
||||
|
||||
- 支持 3000+ 站点(完整列表见 [sites.md](https://github.com/soxoj/maigret/blob/main/sites.md))。默认仅检查访问量排名前 500 的站点;加上 `-a` 可全量扫描,或使用 `--tags` 按分类/国家筛选。
|
||||
- 可作为 Python 库嵌入到自己的项目中——直接 `import maigret` 即可在代码里发起搜索(参见[库使用文档](https://maigret.readthedocs.io/en/latest/library-usage.html))。
|
||||
- 通过 [socid_extractor](https://github.com/soxoj/socid_extractor) 从个人主页和站点 API 中[提取](https://github.com/soxoj/socid_extractor)账号所有者的所有可获取信息,包括指向其他账号的链接。
|
||||
- 基于已发现的用户名和其他 ID,执行递归搜索。
|
||||
- 支持按标签(站点分类、国家)进行筛选。
|
||||
- 能够检测并部分绕过封锁、审查和 CAPTCHA。
|
||||
- 每次运行时(每 24 小时一次)从 GitHub 拉取一份[自动更新的站点数据库](https://maigret.readthedocs.io/en/latest/settings.html#database-auto-update);离线时会回退到内置数据库。
|
||||
- 可访问 Tor 与 I2P 站点;支持检查域名。
|
||||
- 自带一个 [Web 界面](#web-interface),可在同一页面将结果以图谱方式浏览,并下载各种格式的报告。
|
||||
- 可选的 [AI 分析模式](#ai-analysis)(`--ai`),通过 OpenAI 兼容 API 将原始搜索结果整理成一份简短的调查摘要。
|
||||
|
||||
完整特性列表请见[特性文档](https://maigret.readthedocs.io/en/latest/features.html)。
|
||||
|
||||
### 谁在使用
|
||||
|
||||
基于 Maigret 构建的专业 OSINT 与社交媒体分析工具:
|
||||
|
||||
<a href="https://github.com/SocialLinks-IO/sociallinks-api"><img height="60" alt="Social Links API" src="https://github.com/user-attachments/assets/789747b2-d7a0-4d4e-8868-ffc4427df660"></a>
|
||||
<a href="https://sociallinks.io/products/sl-crimewall"><img height="60" alt="Social Links Crimewall" src="https://github.com/user-attachments/assets/0b18f06c-2f38-477b-b946-1be1a632a9d1"></a>
|
||||
<a href="https://usersearch.ai/"><img height="60" alt="UserSearch" src="https://github.com/user-attachments/assets/66daa213-cf7d-40cf-9267-42f97cf77580"></a>
|
||||
|
||||
<a id="demo"></a>
|
||||
## 演示
|
||||
|
||||
### 视频
|
||||
|
||||
<a href="https://asciinema.org/a/Ao0y7N0TTxpS0pisoprQJdylZ">
|
||||
<img src="https://asciinema.org/a/Ao0y7N0TTxpS0pisoprQJdylZ.svg" alt="asciicast" width="600">
|
||||
</a>
|
||||
|
||||
### 报告示例
|
||||
|
||||
[PDF 报告](https://raw.githubusercontent.com/soxoj/maigret/main/static/report_alexaimephotographycars.pdf)、[HTML 报告](https://htmlpreview.github.io/?https://raw.githubusercontent.com/soxoj/maigret/main/static/report_alexaimephotographycars.html)
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
[完整的命令行输出示例](https://raw.githubusercontent.com/soxoj/maigret/main/static/recursive_search.md)
|
||||
|
||||
<a id="installation"></a>
|
||||
## 安装
|
||||
|
||||
如果你已经按[一分钟上手](#one-minute)的步骤跑通了,就无需再装。下面列出几种可选的安装方式。
|
||||
|
||||
什么都不想装?直接用 [Telegram 机器人](https://t.me/maigret_search_bot)。
|
||||
|
||||
### Windows
|
||||
|
||||
从 [Releases](https://github.com/soxoj/maigret/releases) 下载独立的 EXE 文件。视频指引:https://youtu.be/qIgwTZOmMmM。
|
||||
|
||||
<a id="cloud-shells"></a>
|
||||
### 云端 Shell
|
||||
|
||||
通过云端 Shell 或 Jupyter Notebook 在浏览器里运行 Maigret:
|
||||
|
||||
<a href="https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/soxoj/maigret&tutorial=cloudshell-tutorial.md"><img src="https://user-images.githubusercontent.com/27065646/92304704-8d146d80-ef80-11ea-8c29-0deaabb1c702.png" alt="Open in Cloud Shell" height="50"></a>
|
||||
<a href="https://repl.it/github/soxoj/maigret"><img src="https://replit.com/badge/github/soxoj/maigret" alt="Run on Replit" height="50"></a>
|
||||
|
||||
<a href="https://colab.research.google.com/gist/soxoj/879b51bc3b2f8b695abb054090645000/maigret-collab.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab" height="45"></a>
|
||||
<a href="https://mybinder.org/v2/gist/soxoj/9d65c2f4d3bec5dd25949197ea73cf3a/HEAD"><img src="https://mybinder.org/badge_logo.svg" alt="Open In Binder" height="45"></a>
|
||||
|
||||
### 本地安装(pip)
|
||||
|
||||
```bash
|
||||
# 从 PyPI 安装
|
||||
pip3 install maigret
|
||||
|
||||
# 使用
|
||||
maigret username
|
||||
```
|
||||
|
||||
### 从源码安装
|
||||
|
||||
```bash
|
||||
# 也可以克隆仓库后手动安装
|
||||
git clone https://github.com/soxoj/maigret && cd maigret
|
||||
|
||||
# 构建并安装
|
||||
pip3 install .
|
||||
|
||||
# 使用
|
||||
maigret username
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
官方提供两个镜像变体:
|
||||
|
||||
- `soxoj/maigret:latest` —— CLI 模式(默认)
|
||||
- `soxoj/maigret:web` —— 自动启动 [Web 界面](#web-interface)
|
||||
|
||||
```bash
|
||||
# 拉取官方镜像(CLI)
|
||||
docker pull soxoj/maigret
|
||||
|
||||
# CLI 用法
|
||||
docker run -v /mydir:/app/reports soxoj/maigret:latest username --html
|
||||
|
||||
# Web UI(在 http://localhost:5000 打开)
|
||||
docker run -p 5000:5000 soxoj/maigret:web
|
||||
|
||||
# 自定义 Web UI 端口
|
||||
docker run -e PORT=8080 -p 8080:8080 soxoj/maigret:web
|
||||
|
||||
# 手动构建
|
||||
docker build -t maigret . # CLI 镜像(默认 target)
|
||||
docker build --target web -t maigret-web . # Web UI 镜像
|
||||
```
|
||||
|
||||
### 故障排查
|
||||
|
||||
构建报错?请见[故障排查指南](https://maigret.readthedocs.io/en/latest/installation.html#troubleshooting)。
|
||||
|
||||
<a id="usage"></a>
|
||||
## 使用
|
||||
|
||||
### 示例
|
||||
|
||||
```bash
|
||||
# 生成 HTML、PDF、XMind 8 报告
|
||||
maigret user --html
|
||||
maigret user --pdf
|
||||
maigret user --xmind # 与 XMind 2022+ 不兼容
|
||||
|
||||
# 机器可读的导出格式
|
||||
maigret user --json ndjson # 行分隔 JSON(也支持 --json simple)
|
||||
maigret user --csv
|
||||
maigret user --txt
|
||||
maigret user --graph # 交互式 D3 图谱(HTML)
|
||||
|
||||
# 仅在带有 photo 与 dating 标签的站点上搜索
|
||||
maigret user --tags photo,dating
|
||||
|
||||
# 仅在带有 us 标签的站点上搜索
|
||||
maigret user --tags us
|
||||
|
||||
# 同时在所有站点上搜索三个用户名
|
||||
maigret user1 user2 user3 -a
|
||||
|
||||
# AI 辅助调查摘要(需要 OPENAI_API_KEY)
|
||||
maigret user --ai
|
||||
```
|
||||
|
||||
完整选项请运行 `maigret --help`。文档:[命令行选项](https://maigret.readthedocs.io/en/latest/command-line-options.html)、[更多示例](https://maigret.readthedocs.io/en/latest/usage-examples.html)。遇到 403 或超时?参见 [TROUBLESHOOTING.md](TROUBLESHOOTING.md)。
|
||||
|
||||
<a id="web-interface"></a>
|
||||
### Web 界面
|
||||
|
||||
Maigret 内置一个 Web UI,提供结果图谱视图和报告下载。
|
||||
|
||||
<details>
|
||||
<summary>Web 界面截图</summary>
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
</details>
|
||||
|
||||
```console
|
||||
maigret --web 5000
|
||||
```
|
||||
|
||||
在浏览器中打开 http://127.0.0.1:5000,输入用户名即可查看结果。
|
||||
|
||||
### Python 库
|
||||
|
||||
**Maigret 可以嵌入到你自己的 Python 项目里使用。** CLI 只是对一个异步函数的薄包装,你完全可以直接调用它——构建自定义流水线、把结果接入自家工具,或将其嵌入更大的 OSINT 工作流。
|
||||
|
||||
完整示例(包含异步用法和按标签筛选站点)请参见[库使用指南](https://maigret.readthedocs.io/en/latest/library-usage.html)。
|
||||
|
||||
### 常用 CLI 参数
|
||||
|
||||
- `--parse URL` —— 解析一个个人主页,从中提取 ID/用户名,并以此为起点发起递归搜索。
|
||||
- `--permute` —— 基于两个或更多输入生成可能的用户名变体(例如 `john doe` → `johndoe`、`j.doe` …)并对其逐一搜索。
|
||||
- `--self-check [--auto-disable]` —— 维护者用于核对数据库的工具:针对线上站点验证 `usernameClaimed` / `usernameUnclaimed` 配对是否仍然有效。
|
||||
- `--ai` / `--ai-model` —— 启用 [AI 分析](#ai-analysis),将搜索结果交给 OpenAI 兼容 API,并把简短的调查摘要流式输出到终端。
|
||||
|
||||
<a id="ai-analysis"></a>
|
||||
### AI 分析
|
||||
|
||||
`--ai` 会先收集搜索结果、在内存中构建 Markdown 报告,再将其发送到一个 OpenAI 兼容的 chat completion 接口,生成一份简短、克制的调查摘要(最可能的真实姓名、所在地、职业、兴趣、语言、置信度以及后续线索)。开启该模式后,逐站点的进度输出会被静默,模型的输出会以流式方式打印到 stdout。
|
||||
|
||||
```bash
|
||||
export OPENAI_API_KEY=sk-...
|
||||
maigret user --ai
|
||||
|
||||
# 切换到其它模型
|
||||
maigret user --ai --ai-model gpt-4o-mini
|
||||
```
|
||||
|
||||
API key 也可以写入 `settings.json` 的 `openai_api_key` 字段。接口地址默认为 `https://api.openai.com/v1`,通过在 `settings.json` 中设置 `openai_api_base_url`,可以指向任何 OpenAI 兼容的服务(Azure OpenAI、OpenRouter、本地推理服务等)。完整选项见[配置文档](https://maigret.readthedocs.io/en/latest/settings.html)。
|
||||
|
||||
### Tor / I2P / 代理
|
||||
|
||||
Maigret 支持通过代理、Tor 或 I2P 转发请求——这对访问 `.onion` / `.i2p` 站点,以及绕过会拦截数据中心 IP 的 WAF 都很有用。
|
||||
|
||||
```bash
|
||||
# 任意 HTTP/SOCKS 代理
|
||||
maigret user --proxy socks5://127.0.0.1:1080
|
||||
|
||||
# Tor(默认网关 socks5://127.0.0.1:9050)
|
||||
maigret user --tor-proxy socks5://127.0.0.1:9050
|
||||
|
||||
# I2P(默认网关 http://127.0.0.1:4444)
|
||||
maigret user --i2p-proxy http://127.0.0.1:4444
|
||||
```
|
||||
|
||||
请先启动 Tor / I2P 守护进程再运行上述命令——Maigret 不会替你管理这些网关。
|
||||
|
||||
<a id="contributing"></a>
|
||||
## 参与贡献
|
||||
|
||||
请精确地在 `data.json` 里新增或修复站点(不要使用 `json.load`/`json.dump` 整体读写),然后运行 `./utils/update_site_data.py` 重新生成 `sites.md` 和数据库元数据,再提交 Pull Request。更多细节见 [CONTRIBUTING 指南](https://github.com/soxoj/maigret/blob/main/CONTRIBUTING.md) 和[开发文档](https://maigret.readthedocs.io/en/latest/development.html)。版本历史见 [CHANGELOG.md](CHANGELOG.md)。
|
||||
|
||||
<a id="commercial-use"></a>
|
||||
## 商业使用
|
||||
|
||||
开源版本的 Maigret 采用 MIT 许可证,可不受限制地用于商业用途——但站点检查会随时间失效,需要持续维护。
|
||||
|
||||
如果你有更严肃的商业需求——希望使用**每日更新的站点数据库**或**用户名查询 API**——欢迎联系:📧 [maigret@soxoj.com](mailto:maigret@soxoj.com)
|
||||
|
||||
- 私有站点数据库 —— 5000+ 站点,每日更新(独立于公开开源数据库)
|
||||
- 用户名查询 API —— 将 Maigret 集成进你的产品
|
||||
|
||||
<a id="about"></a>
|
||||
## 关于
|
||||
|
||||
### 免责声明
|
||||
|
||||
**仅供教育与合法用途。** 使用者需自行承担遵守所在司法辖区相关法律(GDPR、CCPA 等)的责任。作者不对任何滥用行为负责。
|
||||
|
||||
### 反馈
|
||||
|
||||
[提交 issue](https://github.com/soxoj/maigret/issues) · [GitHub Discussions](https://github.com/soxoj/maigret/discussions) · [Telegram](https://t.me/soxoj)
|
||||
|
||||
### SOWEL 分类
|
||||
|
||||
涉及到的 OSINT 技术:
|
||||
- [SOTL-2.2. Search For Accounts On Other Platforms](https://sowel.soxoj.com/other-platform-accounts)
|
||||
- [SOTL-6.1. Check Logins Reuse To Find Another Account](https://sowel.soxoj.com/logins-reuse)
|
||||
- [SOTL-6.2. Check Nicknames Reuse To Find Another Account](https://sowel.soxoj.com/nicknames-reuse)
|
||||
|
||||
### 许可证
|
||||
|
||||
MIT © [Maigret](https://github.com/soxoj/maigret)
|
||||
@@ -0,0 +1,91 @@
|
||||
# Troubleshooting
|
||||
|
||||
Common issues when running Maigret and how to fix them. If none of this helps, [open an issue](https://github.com/soxoj/maigret/issues) with the output of `maigret --version` and the exact command you ran.
|
||||
|
||||
## "Lots of sites fail / timeout / return 403"
|
||||
|
||||
This is by far the most common report. It almost always comes from anti-bot protection (Cloudflare, DDoS-Guard, Akamai, etc.) or a slow network — not from a bug in Maigret.
|
||||
|
||||
**Results vary a lot depending on where you run from.** The same command on the same username can produce very different output on:
|
||||
|
||||
- **Mobile internet** (4G/5G) — usually the best results. Carrier NAT shares your IP with thousands of real users, so WAFs rarely block it.
|
||||
- **Home broadband** — generally good, though some ISPs are reputation-flagged.
|
||||
- **Hosting / cloud / VPS infrastructure** (AWS, GCP, DigitalOcean, Hetzner, etc.) — the worst case. Datacenter IP ranges are blanket-blocked or challenged by most WAFs, so you will see many false negatives and 403s.
|
||||
|
||||
If a run looks suspiciously empty, **try a different network before assuming Maigret is broken**: tether from your phone, switch between Wi-Fi and mobile, or move the run off a VPS onto a residential machine. Comparing results across two networks is also the fastest way to tell whether a missing account is genuinely missing or just blocked on the current IP.
|
||||
|
||||
Once you have a sense of the baseline, try these tweaks in order:
|
||||
|
||||
1. **Raise the timeout.** The default is 30 seconds. On mobile networks or for slow sites, bump it:
|
||||
```bash
|
||||
maigret user --timeout 60
|
||||
```
|
||||
2. **Retry failed checks.** Transient 5xx / timeouts often clear on a second try:
|
||||
```bash
|
||||
maigret user --retries 2
|
||||
```
|
||||
3. **Lower parallelism.** Some WAFs rate-limit aggressively. Maigret defaults to 100 concurrent connections (`-n` / `--max-connections`) — dropping this makes you look less like a scanner:
|
||||
```bash
|
||||
maigret user -n 20
|
||||
```
|
||||
4. **Route through a residential proxy.** Datacenter IPs (AWS, GCP, DigitalOcean) are blanket-blocked by many WAFs. A residential / mobile proxy usually fixes this:
|
||||
```bash
|
||||
maigret user --proxy http://user:pass@residential-proxy:port
|
||||
```
|
||||
Note: Tor (`--tor-proxy`) rarely helps here — most WAFs block Tor exit nodes just as aggressively as datacenter IPs. Use Tor only when you actually need to reach `.onion` sites (see below).
|
||||
|
||||
If specific sites *always* fail regardless of the above, they are likely broken in the database (stale markers, new WAF, site redesign). Report them with `--print-errors` output so a maintainer can look at the check config.
|
||||
|
||||
## "No results at all" / "maigret: command not found"
|
||||
|
||||
- **`command not found`** — `pip install maigret` put the binary under `~/.local/bin` (Linux/macOS) or `%APPDATA%\Python\Scripts` (Windows). Add that directory to `PATH`, or run `python3 -m maigret user` instead.
|
||||
- **Empty output** — check that you actually passed a username; `maigret` alone prints help. Also confirm Python 3.10+ with `python3 --version`.
|
||||
|
||||
## "SSL / certificate errors"
|
||||
|
||||
Usually caused by a corporate MITM proxy or an outdated `certifi` bundle.
|
||||
|
||||
```bash
|
||||
pip install --upgrade certifi
|
||||
```
|
||||
|
||||
If you are behind a corporate proxy, set `HTTPS_PROXY` / `HTTP_PROXY` environment variables and pass `--proxy "$HTTPS_PROXY"` so Maigret uses the same route.
|
||||
|
||||
## ".onion / .i2p sites are skipped"
|
||||
|
||||
These sites only load through the matching gateway. Start your Tor or I2P daemon first, then:
|
||||
|
||||
```bash
|
||||
# Tor
|
||||
maigret user --tor-proxy socks5://127.0.0.1:9050
|
||||
|
||||
# I2P
|
||||
maigret user --i2p-proxy http://127.0.0.1:4444
|
||||
```
|
||||
|
||||
Maigret does not launch or manage these daemons — they must already be running.
|
||||
|
||||
## "The PDF / XMind / HTML report looks wrong"
|
||||
|
||||
- **PDF** — requires `weasyprint` and its system dependencies (Pango, Cairo, GDK-PixBuf). On Debian/Ubuntu: `apt install libpango-1.0-0 libpangoft2-1.0-0`. macOS: `brew install pango`.
|
||||
- **XMind** — the `--xmind` flag generates **XMind 8** files. XMind 2022+ (Zen / XMind 2023) uses a different format and will not open them. Use XMind 8 or convert via `--html`.
|
||||
- **HTML** looks unstyled — open it through a local file path (`file:///...`), not via a preview pane that strips CSS.
|
||||
|
||||
## "The site database is out of date"
|
||||
|
||||
Maigret auto-fetches a fresh `data.json` from GitHub once every 24 hours. To force-refresh now:
|
||||
|
||||
```bash
|
||||
maigret user --force-update
|
||||
```
|
||||
|
||||
To run entirely against the local built-in copy (e.g. offline):
|
||||
|
||||
```bash
|
||||
maigret user --no-autoupdate
|
||||
```
|
||||
|
||||
## Still stuck?
|
||||
|
||||
- [Open an issue](https://github.com/soxoj/maigret/issues) — include your OS, Python version, Maigret version, and the full command.
|
||||
- Ask in [GitHub Discussions](https://github.com/soxoj/maigret/discussions) or the [Telegram](https://t.me/soxoj) channel.
|
||||
@@ -0,0 +1,69 @@
|
||||
# Maigret
|
||||
|
||||
<div align="center">
|
||||
<img src="https://raw.githubusercontent.com/soxoj/maigret/main/static/maigret.png" height="220" alt="Maigret logo"/>
|
||||
</div>
|
||||
|
||||
**Maigret** collects a dossier on a person **by username only**, checking for accounts on a huge number of sites and gathering all the available information from web pages. No API keys required.
|
||||
|
||||
## Installation
|
||||
|
||||
Google Cloud Shell does not ship with all the system libraries Maigret needs (`libcairo2-dev`, `pkg-config`). The helper script below installs them and then builds Maigret from the cloned source.
|
||||
|
||||
Copy the command and run it in the Cloud Shell terminal:
|
||||
|
||||
```bash
|
||||
./utils/cloudshell_install.sh
|
||||
```
|
||||
|
||||
When the script finishes, verify the install:
|
||||
|
||||
```bash
|
||||
maigret --version
|
||||
```
|
||||
|
||||
## Usage examples
|
||||
|
||||
Run a basic search for a username. By default Maigret checks the **500 highest-ranked sites by traffic** — pass `-a` to scan the full 3,000+ database.
|
||||
|
||||
```bash
|
||||
maigret soxoj
|
||||
```
|
||||
|
||||
Search several usernames at once:
|
||||
|
||||
```bash
|
||||
maigret user1 user2 user3
|
||||
```
|
||||
|
||||
Narrow the run to sites related to cryptocurrency via the `crypto` tag (you can also use country tags):
|
||||
|
||||
```bash
|
||||
maigret vitalik.eth --tags crypto
|
||||
```
|
||||
|
||||
Generate reports in HTML, PDF, and XMind 8 formats:
|
||||
|
||||
```bash
|
||||
maigret soxoj --html
|
||||
maigret soxoj --pdf
|
||||
maigret soxoj --xmind
|
||||
```
|
||||
|
||||
Download a generated report from Cloud Shell to your local machine:
|
||||
|
||||
```bash
|
||||
cloudshell download reports/report_soxoj.pdf
|
||||
```
|
||||
|
||||
Tune reliability on flaky networks — raise the timeout and retry failed checks:
|
||||
|
||||
```bash
|
||||
maigret soxoj --timeout 60 --retries 2
|
||||
```
|
||||
|
||||
For the full list of options see `maigret --help` or the [CLI documentation](https://maigret.readthedocs.io/en/latest/command-line-options.html).
|
||||
|
||||
## Further reading
|
||||
|
||||
Full project documentation: [maigret.readthedocs.io](https://maigret.readthedocs.io/)
|
||||
@@ -161,6 +161,14 @@ ndjson (one report per username). E.g. ``--json ndjson``
|
||||
``-M``, ``--md`` - Generate a Markdown report (general report on all
|
||||
usernames). See :ref:`markdown-report` below.
|
||||
|
||||
``--ai`` - Run an AI-powered analysis of the search results using an
|
||||
OpenAI-compatible chat completion API. The internal Markdown report is
|
||||
sent to the model, which returns a short investigation summary that is
|
||||
streamed to the terminal. See :ref:`ai-analysis` below.
|
||||
|
||||
``--ai-model`` - Model name to use with ``--ai``. Defaults to
|
||||
``openai_model`` from settings (``gpt-4o`` out of the box).
|
||||
|
||||
``-fo``, ``--folderoutput`` - Results will be saved to this folder,
|
||||
``results`` by default. Will be created if doesn’t exist.
|
||||
|
||||
@@ -242,3 +250,51 @@ The Markdown format is optimized for LLM context windows. You can feed the repor
|
||||
|
||||
The structured Markdown with per-site sections makes it easy for AI tools to extract relationships, cross-reference identities, and identify patterns across accounts.
|
||||
|
||||
For a built-in alternative that calls the model for you and prints the
|
||||
summary directly, see :ref:`ai-analysis` below.
|
||||
|
||||
.. _ai-analysis:
|
||||
|
||||
AI analysis (built-in)
|
||||
----------------------
|
||||
|
||||
The ``--ai`` flag turns the search results into a short investigation
|
||||
summary by sending the internal Markdown report to an OpenAI-compatible
|
||||
chat completion API and streaming the model's reply to the terminal.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
export OPENAI_API_KEY=sk-...
|
||||
maigret username --ai
|
||||
|
||||
# use a smaller / cheaper model
|
||||
maigret username --ai --ai-model gpt-4o-mini
|
||||
|
||||
While ``--ai`` is active, per-site progress lines and the short text
|
||||
report at the end are suppressed so the streamed summary is the main
|
||||
output. The Markdown report itself is built in memory and is **not**
|
||||
written to disk by ``--ai`` alone — combine with ``--md`` if you also
|
||||
want the file on disk.
|
||||
|
||||
The summary follows a fixed format with sections for the most likely
|
||||
real name, location, occupation, interests, languages, main website,
|
||||
username variants, number of platforms, active years, a confidence
|
||||
rating, and a short list of follow-up leads. The model is instructed
|
||||
to rely only on what is supported by the report and to avoid mixing
|
||||
clearly unrelated profiles into the main identity.
|
||||
|
||||
**Configuration.** The API key is resolved from
|
||||
``settings.openai_api_key`` first, then from the ``OPENAI_API_KEY``
|
||||
environment variable. The endpoint defaults to
|
||||
``https://api.openai.com/v1`` and can be redirected to any
|
||||
OpenAI-compatible service (Azure OpenAI, OpenRouter, a local server,
|
||||
…) by setting ``openai_api_base_url`` in ``settings.json``. See
|
||||
:ref:`settings` for the full list of options.
|
||||
|
||||
.. note::
|
||||
|
||||
``--ai`` makes a network request to the configured chat completion
|
||||
endpoint and sends the full Markdown report (which contains the
|
||||
gathered profile data). Use it only with providers and accounts
|
||||
you trust with that data.
|
||||
|
||||
|
||||
@@ -96,7 +96,7 @@ You should make your git commits from your maigret git repo folder, or else the
|
||||
If you already know which site has a false-positive and want to fix it specifically, go to the next step.
|
||||
|
||||
Otherwise, simply run a search with a random username (e.g. `laiuhi3h4gi3u4hgt`) and check the results.
|
||||
Alternatively, you can use `the Telegram bot <https://t.me/osint_maigret_bot>`_.
|
||||
Alternatively, you can use `the Telegram bot <https://t.me/maigret_search_bot>`_.
|
||||
|
||||
2. Open the account link in your browser and check:
|
||||
|
||||
@@ -142,13 +142,30 @@ There are few options for sites data.json helpful in various cases:
|
||||
``protection`` (site protection tracking)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``protection`` field records what kind of anti-bot protection a site uses. Maigret reads this field and automatically applies the appropriate bypass mechanism.
|
||||
The ``protection`` field records what kind of anti-bot protection a site uses. Maigret reads this field and automatically applies the appropriate bypass mechanism where one exists.
|
||||
|
||||
Two categories of tag:
|
||||
|
||||
- **Load-bearing.** Maigret changes its HTTP client or headers based on the tag. Currently only ``tls_fingerprint`` (switches to ``curl_cffi`` with Chrome-class TLS).
|
||||
- **Documentation-only.** Maigret does **not** change behavior based on the tag; it records *why* the site is hard so a future solver can target the right set of sites without re-auditing.
|
||||
|
||||
Within the documentation-only tags, there is a further split that dictates whether the site is ``disabled: true``:
|
||||
|
||||
- ``ip_reputation`` is the **only** doc-tag that **keeps the site enabled**. It means "works for most users, fails from datacenter/cloud IPs." Disabling would silently hide a working site from anyone with a clean IP. The fix is **external** to Maigret (residential IP or ``--proxy``).
|
||||
- ``cf_js_challenge``, ``cf_firewall``, ``aws_waf_js_challenge``, ``ddos_guard_challenge``, ``custom_bot_protection``, ``js_challenge`` all pair with ``disabled: true``. They mean "does not work for anyone right now"; the tag identifies the provider so that when a bypass ships, every site with that tag can be re-enabled in one pass.
|
||||
|
||||
Supported values:
|
||||
|
||||
- ``tls_fingerprint`` — the site fingerprints the TLS handshake (JA3/JA4) and blocks non-browser clients. Maigret automatically uses ``curl_cffi`` with Chrome browser emulation to bypass this. Requires the ``curl_cffi`` package (included as a dependency). Examples: Instagram, NPM, Codepen, Kickstarter, Letterboxd.
|
||||
- ``ip_reputation`` — the site blocks requests from datacenter/cloud IPs regardless of headers or TLS. Cannot be bypassed automatically; run Maigret from a regular internet connection (not a datacenter) or use a proxy (``--proxy``). Examples: Reddit, Patreon, Figma.
|
||||
- ``js_challenge`` — the site serves a JavaScript challenge page (e.g. "Just a moment...") that cannot be solved without a browser. Maigret detects challenge signatures and returns UNKNOWN instead of a false positive.
|
||||
- ``tls_fingerprint`` *(load-bearing; site stays enabled)* — the site fingerprints the TLS handshake (JA3/JA4) and blocks non-browser clients. Maigret automatically uses ``curl_cffi`` with Chrome browser emulation to bypass this. Requires the ``curl_cffi`` package (included as a dependency). Examples: Instagram, NPM, Codepen, Kickstarter, Letterboxd.
|
||||
- ``ip_reputation`` *(documentation-only; site stays enabled)* — the site blocks requests from datacenter/cloud IPs regardless of headers or TLS. Cannot be bypassed automatically; run Maigret from a regular internet connection (not a datacenter) or use a proxy (``--proxy``). The site is **not** marked ``disabled`` because it continues to work for users on residential IPs. Examples: Reddit, Patreon, Figma, OnlyFans.
|
||||
- ``cf_js_challenge`` *(documentation-only; pair with ``disabled: true``)* — Cloudflare Managed Challenge / Turnstile JS challenge. Symptom: HTTP 403 with ``cf-mitigated: challenge`` header; body contains ``challenges.cloudflare.com``, ``_cf_chl_opt``, ``window._cf_chl``, or "Just a moment". Not bypassable via ``curl_cffi`` TLS impersonation (verified across Chrome 123/124/131, Safari 17/18, Firefox 133/135, Edge 101 — all return the same 403 challenge page); a real browser executing the challenge JS is required to obtain the clearance cookie. Sites stay ``disabled: true`` until a CF-challenge solver is integrated. Examples: DMOJ, Elakiri, Fanlore, Bdoutdoors, TheStudentRoom, forum.hr.
|
||||
- ``cf_firewall`` *(documentation-only; pair with ``disabled: true``)* — Cloudflare firewall rule / bot score block (WAF action=block, **not** action=challenge). Symptom: HTTP 403 served by Cloudflare (``server: cloudflare``, ``cf-ray`` header) **without** JS-challenge markers — body typically shows "Access denied", "Attention Required", or just a bare 1015/1016/1020 error page. Unlike ``ip_reputation``, residential IPs are **not** sufficient to bypass — Cloudflare decides based on a composite of bot score, TLS fingerprint, UA, ASN, and custom site-owner rules, so ``curl_cffi`` Chrome impersonation from a residential line still returns 403. Sites stay ``disabled: true`` until a per-site bypass (cookies, real browser, or residential+clean session) is found. Examples: Fark, Fodors, Huntingnet, Hunttalk.
|
||||
- ``aws_waf_js_challenge`` *(documentation-only; pair with ``disabled: true``)* — the site is protected by AWS WAF with a JavaScript challenge. Symptom: HTTP 202 with empty body and ``x-amzn-waf-action: challenge`` header (a token-granting challenge that requires executing the CAPTCHA/challenge JS bundle). Neither ``curl_cffi`` TLS impersonation nor User-Agent changes bypass this — a real browser or the official AWS WAF challenge-solver SDK is required. Sites stay ``disabled: true`` until a solver is integrated. Example: Dreamwidth.
|
||||
- ``ddos_guard_challenge`` *(documentation-only; pair with ``disabled: true``)* — DDoS-Guard (ddos-guard.net) anti-bot page. Symptom: HTTP 403 with ``server: ddos-guard`` header; body contains "DDoS-Guard". DDoS-Guard fingerprints different UAs per source IP, so a single User-Agent override does not work across environments; a JS-capable bypass or DDoS-Guard-aware solver is required. Sites stay ``disabled: true`` until a solver is integrated. Example: ForumHouse.
|
||||
- ``js_challenge`` *(documentation-only; pair with ``disabled: true``)* — **fallback** for JavaScript-challenge systems whose provider cannot be identified (custom in-house challenge pages that are not Cloudflare, AWS WAF, or any other recognized vendor). Prefer a provider-specific tag whenever the provider can be pinned down from response headers or body signatures.
|
||||
- ``custom_bot_protection`` *(documentation-only; pair with ``disabled: true``)* — **fallback** for non-JS-challenge bot protection served by a custom/in-house system (not Cloudflare, not AWS WAF, not DDoS-Guard). Typical symptom: HTTP 403 from the site's own origin server (``server: nginx``, AWS ELB, etc.) with a branded block page, returned regardless of TLS fingerprint or residential IP. Not generically bypassable; investigate per site (cookies, session, proxy geography). Examples: Hackerearth ("HackerEarth Guardian"), FreelanceJob (nginx-level block).
|
||||
|
||||
**Rule: prefer provider-specific protection tags.** When a site is blocked by an identifiable anti-bot vendor, always record the vendor in the tag (``cf_js_challenge``, ``cf_firewall``, ``aws_waf_js_challenge``, ``ddos_guard_challenge``, and future additions such as ``sucuri_challenge``, ``incapsula_challenge``). The generic ``js_challenge`` and ``custom_bot_protection`` tags are reserved for custom/unknown systems. Rationale: bypass solvers are inherently provider-specific (a Cloudflare Turnstile solver does not help with AWS WAF); recording the provider in advance lets us fan out fixes the moment a per-provider solver is added, without re-auditing every disabled site. The same principle applies to other protection categories when the provider is identifiable.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -321,7 +338,7 @@ Documentations is auto-generated and auto-deployed from the ``docs`` directory.
|
||||
To manually update documentation:
|
||||
|
||||
1. Change something in the ``.rst`` files in the ``docs/source`` directory.
|
||||
2. Install ``pip install -r requirements.txt`` in the docs directory.
|
||||
2. Install ``python -m pip install -e .`` in the docs directory.
|
||||
3. Run ``make singlehtml`` in the terminal in the docs directory.
|
||||
4. Open ``build/singlehtml/index.html`` in your browser to see the result.
|
||||
5. If everything is ok, commit and push your changes to GitHub.
|
||||
|
||||
@@ -147,6 +147,33 @@ Also, there is a short text report in the CLI output after the end of a searchin
|
||||
.. warning::
|
||||
XMind 8 mindmaps are incompatible with XMind 2022!
|
||||
|
||||
AI analysis
|
||||
-----------
|
||||
|
||||
Maigret can produce a short, human-readable investigation summary on top
|
||||
of the raw search results using the ``--ai`` flag. It builds the
|
||||
internal Markdown report, sends it to an OpenAI-compatible chat
|
||||
completion endpoint, and streams the model's reply directly to the
|
||||
terminal.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
export OPENAI_API_KEY=sk-...
|
||||
maigret username --ai
|
||||
|
||||
The summary uses a fixed format with the most likely real name,
|
||||
location, occupation, interests, languages, main website, username
|
||||
variants, number of platforms, active years, a confidence rating, and a
|
||||
short list of follow-up leads. While ``--ai`` is active, per-site
|
||||
progress and the short text report are suppressed so the streamed
|
||||
summary is the main output.
|
||||
|
||||
The endpoint, model, and API key are configured via ``settings.json``
|
||||
(``openai_api_key``, ``openai_model``, ``openai_api_base_url``) or the
|
||||
``OPENAI_API_KEY`` environment variable. Any OpenAI-compatible API can
|
||||
be used (Azure OpenAI, OpenRouter, a local server, …). See
|
||||
:ref:`ai-analysis` and :ref:`settings` for details.
|
||||
|
||||
Tags
|
||||
----
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ You may be interested in:
|
||||
- :doc:`Usage examples <usage-examples>`
|
||||
- :doc:`Command line options <command-line-options>`
|
||||
- :doc:`Features list <features>`
|
||||
- :doc:`Library usage <library-usage>`
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
@@ -39,8 +40,15 @@ You may be interested in:
|
||||
usage-examples
|
||||
command-line-options
|
||||
features
|
||||
library-usage
|
||||
philosophy
|
||||
supported-identifier-types
|
||||
tags
|
||||
settings
|
||||
development
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
:caption: Use cases
|
||||
|
||||
use-cases/crypto
|
||||
|
||||
@@ -4,7 +4,7 @@ Installation
|
||||
============
|
||||
|
||||
Maigret can be installed using pip, Docker, or simply can be launched from the cloned repo.
|
||||
Also, it is available online via `official Telegram bot <https://t.me/osint_maigret_bot>`_,
|
||||
Also, it is available online via `official Telegram bot <https://t.me/maigret_search_bot>`_,
|
||||
source code of a bot is `available on GitHub <https://github.com/soxoj/maigret-tg-bot>`_.
|
||||
|
||||
Windows Standalone EXE-binaries
|
||||
@@ -45,8 +45,7 @@ Press one of the buttons below and follow the instructions to launch it in your
|
||||
Local installation from PyPi
|
||||
----------------------------
|
||||
|
||||
Please note that the sites database in the PyPI package may be outdated.
|
||||
If you encounter frequent false positive results, we recommend installing the latest development version from GitHub instead.
|
||||
Maigret ships with a bundled site database. After installation from PyPI (or any other method), it can **automatically fetch a newer compatible database from GitHub** when you run it—see :ref:`database-auto-update` in :doc:`settings`.
|
||||
|
||||
.. note::
|
||||
Python 3.10 or higher and pip is required, **Python 3.11 is recommended.**
|
||||
|
||||
@@ -0,0 +1,139 @@
|
||||
.. _library-usage:
|
||||
|
||||
Library usage
|
||||
=============
|
||||
|
||||
Maigret's CLI is a thin wrapper around an async Python API. You can embed Maigret in your own tools, pipelines, and OSINT workflows — no need to shell out.
|
||||
|
||||
This page covers the common patterns. For the full argument list of the underlying function, see ``maigret.checking.maigret`` in the source.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip install maigret
|
||||
|
||||
Minimal example
|
||||
---------------
|
||||
|
||||
A working end-to-end search against the top 500 sites:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from maigret import search as maigret_search
|
||||
from maigret.sites import MaigretDatabase
|
||||
|
||||
# Load the bundled site database
|
||||
db = MaigretDatabase().load_from_path(
|
||||
"maigret/resources/data.json"
|
||||
)
|
||||
|
||||
# Pick which sites to scan (same filtering the CLI uses)
|
||||
sites = db.ranked_sites_dict(top=500)
|
||||
|
||||
results = asyncio.run(
|
||||
maigret_search(
|
||||
username="soxoj",
|
||||
site_dict=sites,
|
||||
logger=logging.getLogger("maigret"),
|
||||
timeout=30,
|
||||
is_parsing_enabled=True,
|
||||
)
|
||||
)
|
||||
|
||||
for site_name, result in results.items():
|
||||
if result["status"].is_found():
|
||||
print(site_name, result["url_user"])
|
||||
|
||||
Key points:
|
||||
|
||||
- ``maigret_search`` is an ``async`` function — wrap it with ``asyncio.run(...)`` or ``await`` it from inside your own event loop.
|
||||
- ``is_parsing_enabled=True`` turns on ``socid_extractor`` so ``result["ids_data"]`` is populated with profile fields (bio, linked accounts, uids, etc.).
|
||||
- Each entry in the returned dict has a ``"status"`` object with ``is_found()``, plus ``url_user``, ``http_status``, ``rank``, ``ids_data``, and more.
|
||||
|
||||
Filtering sites
|
||||
---------------
|
||||
|
||||
``ranked_sites_dict`` accepts the same filters as the CLI:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# All sites tagged as coding, top 200 by rank
|
||||
sites = db.ranked_sites_dict(top=200, tags=["coding"])
|
||||
|
||||
# Exclude NSFW and dating sites
|
||||
sites = db.ranked_sites_dict(excluded_tags=["nsfw", "dating"])
|
||||
|
||||
# Only specific sites by name
|
||||
sites = db.ranked_sites_dict(names=["GitHub", "Reddit", "VK"])
|
||||
|
||||
# Include disabled sites (useful for maintenance / self-check)
|
||||
sites = db.ranked_sites_dict(disabled=True)
|
||||
|
||||
Running inside an existing event loop
|
||||
-------------------------------------
|
||||
|
||||
If your application already runs an asyncio loop (FastAPI, aiohttp server, a Discord bot, etc.), ``await`` ``maigret_search`` directly instead of calling ``asyncio.run``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
async def check_username(username: str) -> dict:
|
||||
results = await maigret_search(
|
||||
username=username,
|
||||
site_dict=sites,
|
||||
logger=logger,
|
||||
timeout=30,
|
||||
)
|
||||
return {
|
||||
name: r["url_user"]
|
||||
for name, r in results.items()
|
||||
if r["status"].is_found()
|
||||
}
|
||||
|
||||
Routing through a proxy
|
||||
-----------------------
|
||||
|
||||
The same proxy / Tor / I2P flags the CLI exposes are plain keyword arguments:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
results = await maigret_search(
|
||||
username="soxoj",
|
||||
site_dict=sites,
|
||||
logger=logger,
|
||||
proxy="socks5://127.0.0.1:1080",
|
||||
tor_proxy="socks5://127.0.0.1:9050", # used for .onion sites
|
||||
i2p_proxy="http://127.0.0.1:4444", # used for .i2p sites
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
Full function signature
|
||||
-----------------------
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
async def maigret(
|
||||
username: str,
|
||||
site_dict: Dict[str, MaigretSite],
|
||||
logger,
|
||||
query_notify=None,
|
||||
proxy=None,
|
||||
tor_proxy=None,
|
||||
i2p_proxy=None,
|
||||
timeout=30,
|
||||
is_parsing_enabled=False,
|
||||
id_type="username",
|
||||
debug=False,
|
||||
forced=False,
|
||||
max_connections=100,
|
||||
no_progressbar=False,
|
||||
cookies=None,
|
||||
retries=0,
|
||||
check_domains=False,
|
||||
) -> QueryResultWrapper
|
||||
|
||||
See :doc:`command-line-options` for a description of each option — the semantics match the CLI flags one-to-one.
|
||||
@@ -3,6 +3,10 @@
|
||||
Philosophy
|
||||
==========
|
||||
|
||||
*The Commissioner Jules Maigret is a fictional French police detective, created by Georges Simenon.
|
||||
His investigation method is based on understanding the personality of different people and their
|
||||
interactions.*
|
||||
|
||||
TL;DR: Username => Dossier
|
||||
|
||||
Maigret is designed to gather all the available information about person by his username.
|
||||
@@ -15,3 +19,23 @@ All this information forms some dossier, but it also useful for other tools and
|
||||
Each collected piece of data has a label of a certain format (for example, ``follower_count`` for the number
|
||||
of subscribers or ``created_at`` for account creation time) so that it can be parsed and analyzed by various
|
||||
systems and stored in databases.
|
||||
|
||||
Origins
|
||||
-------
|
||||
|
||||
Maigret started from studying what OSINT investigators actually use in practice — and from
|
||||
the realization that many popular tools do not deliver real investigative value. The original
|
||||
research behind this observation is summarized in the article
|
||||
`What's wrong with namecheckers <https://soxoj.medium.com/whats-wrong-with-namecheckers-981e5cba600e>`_.
|
||||
For a broader landscape of username-checking tools, see the curated
|
||||
`OSINT namecheckers list <https://github.com/soxoj/osint-namecheckers-list>`_.
|
||||
|
||||
Two ideas grew out of that research:
|
||||
|
||||
- `socid-extractor <https://github.com/soxoj/socid-extractor>`_ — a library focused on pulling
|
||||
structured identity data (user IDs, full names, linked accounts, bios, timestamps, etc.) out of
|
||||
account pages and public API responses, so that finding an account is not the end of the pipeline.
|
||||
- **Maigret** itself — which started as a fork of
|
||||
`Sherlock <https://github.com/sherlock-project/sherlock>`_ but has long since outgrown the
|
||||
original project in coverage, extraction depth, and check reliability. Today Maigret is used
|
||||
as a component by major OSINT vendors in their commercial products.
|
||||
|
||||
@@ -101,3 +101,51 @@ This is recommended for **Docker containers**, **CI pipelines**, and **air-gappe
|
||||
- URL of the metadata file (for custom mirrors)
|
||||
|
||||
**Using a custom database** with ``--db`` always skips auto-update — you are explicitly choosing your data source.
|
||||
|
||||
.. _ai-analysis-settings:
|
||||
|
||||
AI analysis
|
||||
-----------
|
||||
|
||||
The ``--ai`` flag (see :ref:`ai-analysis`) talks to an OpenAI-compatible
|
||||
chat completion API. Three settings control how that request is made:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
:widths: 35 25 40
|
||||
|
||||
* - Setting
|
||||
- Default
|
||||
- Description
|
||||
* - ``openai_api_key``
|
||||
- ``""`` (empty)
|
||||
- API key. If empty, Maigret falls back to the ``OPENAI_API_KEY``
|
||||
environment variable.
|
||||
* - ``openai_model``
|
||||
- ``gpt-4o``
|
||||
- Default model name. Overridable per-run with ``--ai-model``.
|
||||
* - ``openai_api_base_url``
|
||||
- ``https://api.openai.com/v1``
|
||||
- Base URL of the chat completion API. Point this at any
|
||||
OpenAI-compatible service (Azure OpenAI, OpenRouter, a local
|
||||
server, …) to use it instead of OpenAI directly.
|
||||
|
||||
Example ``~/.maigret/settings.json`` snippet using a non-OpenAI
|
||||
endpoint:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"openai_api_key": "sk-...",
|
||||
"openai_model": "gpt-4o-mini",
|
||||
"openai_api_base_url": "https://openrouter.ai/api/v1"
|
||||
}
|
||||
|
||||
The key resolution order is ``settings.openai_api_key`` → ``OPENAI_API_KEY``
|
||||
environment variable; the first non-empty value wins.
|
||||
|
||||
.. note::
|
||||
|
||||
``--ai`` sends the full internal Markdown report (which contains the
|
||||
gathered profile data) to the configured endpoint. Only use providers
|
||||
and accounts you trust with that data.
|
||||
|
||||
@@ -0,0 +1,147 @@
|
||||
.. _use-case-crypto:
|
||||
|
||||
Cryptocurrency & Web3 Investigations
|
||||
=====================================
|
||||
|
||||
Blockchain transactions are public, but the people behind wallets are not. Maigret helps bridge this gap by finding Web3 accounts tied to a username, revealing the person behind a pseudonymous crypto persona.
|
||||
|
||||
Why it matters
|
||||
--------------
|
||||
|
||||
Crypto investigations often start with a wallet address or an ENS name but hit a wall — the blockchain tells you *what* happened, not *who* did it. A username, however, is reused across platforms. If someone trades on OpenSea as ``zachxbt`` and posts on Warpcast as ``zachxbt``, Maigret connects the dots and builds a full profile.
|
||||
|
||||
Common scenarios:
|
||||
|
||||
- **Scam attribution.** A rug-pull promoter uses the same alias on Fragment (Telegram username marketplace), OpenSea, and a personal blog.
|
||||
- **Sanctions compliance.** Verifying whether a counterparty's online footprint matches known sanctioned individuals.
|
||||
- **Due diligence.** Before an OTC deal or DAO vote, checking whether the other party has a consistent online presence or is a freshly created sockpuppet.
|
||||
- **Stolen funds tracing.** A stolen NFT appears on OpenSea under a new account — but the username matches a Warpcast profile with real-world links.
|
||||
|
||||
Supported sites
|
||||
---------------
|
||||
|
||||
Maigret currently checks the following crypto and Web3 platforms:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
:widths: 20 40 40
|
||||
|
||||
* - Site
|
||||
- What it reveals
|
||||
- Notes
|
||||
* - **OpenSea**
|
||||
- NFT collections, trading history, profile bio, linked website
|
||||
-
|
||||
* - **Rarible**
|
||||
- NFT marketplace profile, collections, listing history
|
||||
- Complements OpenSea for NFT attribution across marketplaces
|
||||
* - **Zora**
|
||||
- Zora Network profile, minted NFTs, creator activity
|
||||
- Ethereum L2 creator platform; useful for on-chain art attribution
|
||||
* - **Polymarket**
|
||||
- Prediction-market profile, positions, public portfolio P&L
|
||||
- Useful for political/financial prediction attribution
|
||||
* - **Warpcast** (Farcaster)
|
||||
- Decentralized social profile, posts, follower graph, Farcaster ID
|
||||
- Every Farcaster ID maps to an Ethereum address via the on-chain ID registry
|
||||
* - **Fragment**
|
||||
- Telegram username ownership, TON wallet address, purchase date and price
|
||||
- Valuable for linking Telegram identities to TON wallets
|
||||
* - **Paragraph**
|
||||
- Web3 blog/newsletter, ETH wallet address, linked Twitter handle
|
||||
- Richest cross-platform data among crypto sites
|
||||
* - **Tonometerbot**
|
||||
- TON wallet balance, subscriber count, NFT collection, rankings
|
||||
- TON blockchain analytics
|
||||
* - **Spatial**
|
||||
- Metaverse profile, linked social accounts (Discord, Twitter, Instagram, LinkedIn, TikTok)
|
||||
- Rich cross-platform links
|
||||
* - **Revolut.me**
|
||||
- Payment handle: first/last name, country code, base currency, supported payment methods
|
||||
- Not strictly Web3, but widely used by crypto OTC traders for fiat off-ramps; the public API returns structured KYC-adjacent data
|
||||
|
||||
Real-world example: zachxbt
|
||||
---------------------------
|
||||
|
||||
`ZachXBT <https://twitter.com/zachxbt>`_ is a well-known on-chain investigator. Let's see what Maigret can find from just the username ``zachxbt``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
maigret zachxbt --tags crypto
|
||||
|
||||
Maigret finds 5 accounts and automatically extracts structured data from each:
|
||||
|
||||
**Fragment** — confirms the Telegram username ``@zachxbt`` is claimed, reveals the TON wallet address (``EQBisZrk...``), purchase price (10 TON), and date (January 2023).
|
||||
|
||||
**Paragraph** — the richest result. Returns the real name used on the platform (``ZachXBT``), bio (``Scam survivor turned 2D investigator``), an Ethereum wallet address (``0x23dBf066...``), and a linked Twitter handle (``zachxbt``). The ``wallet_address`` field is especially valuable — it directly links the pseudonym to an on-chain identity.
|
||||
|
||||
**Warpcast** — Farcaster profile with a Farcaster ID (``fid: 20931``), profile image, and social graph (33K followers). Every Farcaster ID is tied to an Ethereum address via the on-chain ID registry, so this is another on-chain anchor.
|
||||
|
||||
**OpenSea** — NFT marketplace profile with bio (``On-chain sleuth | 10x rug pull survivor``), avatar (hosted on ``seadn.io`` with an Ethereum address in the URL path), and a link to an external investigations page.
|
||||
|
||||
**Hive Blog** — blockchain-based blog account created in March 2025. Low activity (1 post), but confirms the username is claimed across blockchain ecosystems.
|
||||
|
||||
From a single username, Maigret produces:
|
||||
|
||||
- **2 wallet addresses** — one TON (from Fragment), one Ethereum (from Paragraph)
|
||||
- **1 confirmed Twitter handle** — ``zachxbt`` (from Paragraph)
|
||||
- **1 Telegram username** — ``@zachxbt`` (from Fragment)
|
||||
- **1 external link** — ``investigations.notion.site`` (from OpenSea)
|
||||
- **Social graph data** — 33K Farcaster followers, blog activity timestamps
|
||||
|
||||
This is enough to pivot into blockchain analysis tools (Etherscan, Arkham, Nansen) using the wallet addresses, or into social media analysis using the Twitter handle.
|
||||
|
||||
Workflow: from username to wallet
|
||||
---------------------------------
|
||||
|
||||
**Step 1: Search crypto platforms**
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
maigret <username> --tags crypto -v
|
||||
|
||||
Review the results. Pay attention to:
|
||||
|
||||
- **Fragment** — if the username is claimed, you get a TON wallet address directly.
|
||||
- **Paragraph** — blog profiles often contain an ETH address and a Twitter handle.
|
||||
- **Warpcast** — Farcaster IDs map to Ethereum addresses via the on-chain registry.
|
||||
- **OpenSea** — avatar URLs sometimes contain wallet addresses in the path.
|
||||
|
||||
**Step 2: Expand with extracted identifiers**
|
||||
|
||||
Maigret automatically extracts additional identifiers from found profiles (real names, linked accounts, profile URLs) and recursively searches for them. This is enabled by default. If Maigret finds a linked Twitter handle on a Paragraph profile, it will automatically search for that handle across all sites.
|
||||
|
||||
**Step 3: Cross-reference with non-crypto platforms**
|
||||
|
||||
The real power is connecting crypto personas to mainstream accounts. Drop the tag filter:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
maigret <username> -a
|
||||
|
||||
This checks all 3000+ sites. A match on GitHub, Reddit, or a forum can reveal the person behind the wallet.
|
||||
|
||||
Workflow: from wallet to identity
|
||||
---------------------------------
|
||||
|
||||
If you start with a wallet address rather than a username, you can use complementary tools to get a username first:
|
||||
|
||||
1. **ENS / Unstoppable Domains** — resolve the wallet address to a human-readable name (``vitalik.eth``). Then search that name in Maigret.
|
||||
2. **Etherscan labels** — check if the address has a public label (exchange, known entity).
|
||||
3. **Fragment** — search the TON wallet address to find which Telegram usernames it purchased.
|
||||
4. **Arkham Intelligence / Nansen** — blockchain attribution platforms that may tag the address with a known identity.
|
||||
|
||||
Once you have a username candidate, feed it to Maigret.
|
||||
|
||||
Tips
|
||||
----
|
||||
|
||||
- **Username reuse is the #1 signal.** Crypto-native users often reuse their ENS name (``alice.eth``) or a variation (``alice_eth``, ``aliceeth``) across platforms. Try all variations.
|
||||
- **Fragment is uniquely valuable** because it directly links Telegram usernames to TON wallet addresses — a rare on-chain / off-chain bridge.
|
||||
- **Warpcast profiles are Ethereum-native.** Every Farcaster account is tied to an Ethereum address via the ID registry contract. If you find a Warpcast profile, you implicitly have a wallet address.
|
||||
- **Paragraph often has the richest data** — wallet address, Twitter handle, bio, and activity timestamps in a single API response.
|
||||
- **Use** ``--exclude-tags`` **to skip irrelevant sites** when you're focused on crypto:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
maigret alice_eth --exclude-tags porn,dating,forum
|
||||
+12
-1
@@ -7,7 +7,18 @@ __author_email__ = 'soxoj@protonmail.com'
|
||||
|
||||
|
||||
from .__version__ import __version__
|
||||
from .checking import maigret as search
|
||||
try:
|
||||
from .checking import maigret as search
|
||||
except ImportError as e:
|
||||
raise ImportError(
|
||||
"Missing required dependency while starting Maigret.\n\n"
|
||||
"If installed from PyPI:\n"
|
||||
" pip install -U maigret\n\n"
|
||||
"If running from a cloned repository:\n"
|
||||
" pip install -e .\n\n"
|
||||
"Then run Maigret as:\n"
|
||||
" python -m maigret <username>"
|
||||
) from e
|
||||
from .maigret import main as cli
|
||||
from .sites import MaigretEngine, MaigretSite, MaigretDatabase
|
||||
from .notify import QueryNotifyPrint as Notifier
|
||||
|
||||
+54
-3
@@ -7,7 +7,7 @@ from aiohttp import CookieJar
|
||||
|
||||
class ParsingActivator:
|
||||
@staticmethod
|
||||
def twitter(site, logger, cookies={}):
|
||||
def twitter(site, logger, cookies={}, **kwargs):
|
||||
headers = dict(site.headers)
|
||||
del headers["x-guest-token"]
|
||||
import requests
|
||||
@@ -19,7 +19,7 @@ class ParsingActivator:
|
||||
site.headers["x-guest-token"] = guest_token
|
||||
|
||||
@staticmethod
|
||||
def vimeo(site, logger, cookies={}):
|
||||
def vimeo(site, logger, cookies={}, **kwargs):
|
||||
headers = dict(site.headers)
|
||||
if "Authorization" in headers:
|
||||
del headers["Authorization"]
|
||||
@@ -31,7 +31,58 @@ class ParsingActivator:
|
||||
site.headers["Authorization"] = "jwt " + jwt_token
|
||||
|
||||
@staticmethod
|
||||
def weibo(site, logger):
|
||||
def onlyfans(site, logger, url=None, **kwargs):
|
||||
# Signing rules (static_param / checksum_indexes / checksum_constant / format / app_token)
|
||||
# live in data.json under OnlyFans.activation and rotate upstream every ~1–3 weeks.
|
||||
# If "Please refresh the page" keeps firing after activation, refresh them from:
|
||||
# https://raw.githubusercontent.com/DATAHOARDERS/dynamic-rules/main/onlyfans.json
|
||||
import hashlib
|
||||
import secrets
|
||||
import time as _time
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
|
||||
act = site.activation
|
||||
static_param = act["static_param"]
|
||||
indexes = act["checksum_indexes"]
|
||||
constant = act["checksum_constant"]
|
||||
fmt = act["format"]
|
||||
init_url = act["url"]
|
||||
|
||||
user_id = site.headers.get("user-id", "0") or "0"
|
||||
|
||||
def _sign(path):
|
||||
t = str(int(_time.time() * 1000))
|
||||
msg = "\n".join([static_param, t, path, user_id]).encode()
|
||||
sha = hashlib.sha1(msg).hexdigest()
|
||||
cs = sum(ord(sha[i]) for i in indexes) + constant
|
||||
return t, fmt.format(sha, abs(cs))
|
||||
|
||||
if site.headers.get("x-bc", "").strip("0") == "":
|
||||
site.headers["x-bc"] = secrets.token_hex(20)
|
||||
|
||||
if not site.headers.get("cookie"):
|
||||
init_path = urlparse(init_url).path
|
||||
t, sg = _sign(init_path)
|
||||
hdrs = dict(site.headers)
|
||||
hdrs["time"] = t
|
||||
hdrs["sign"] = sg
|
||||
hdrs.pop("cookie", None)
|
||||
r = requests.get(init_url, headers=hdrs, timeout=15)
|
||||
jar = "; ".join(f"{k}={v}" for k, v in r.cookies.items())
|
||||
if jar:
|
||||
site.headers["cookie"] = jar
|
||||
logger.debug(f"OnlyFans init: got cookies {list(r.cookies.keys())}")
|
||||
|
||||
target_path = urlparse(url).path if url else urlparse(init_url).path
|
||||
t, sg = _sign(target_path)
|
||||
site.headers["time"] = t
|
||||
site.headers["sign"] = sg
|
||||
logger.debug(f"OnlyFans signed {target_path} time={t}")
|
||||
|
||||
@staticmethod
|
||||
def weibo(site, logger, **kwargs):
|
||||
headers = dict(site.headers)
|
||||
import requests
|
||||
|
||||
|
||||
+162
@@ -0,0 +1,162 @@
|
||||
"""Maigret AI Analysis Module
|
||||
|
||||
Provides AI-powered analysis of search results using OpenAI-compatible APIs.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
import aiohttp
|
||||
|
||||
|
||||
def load_ai_prompt() -> str:
|
||||
"""Load the AI system prompt from the resources directory."""
|
||||
maigret_path = os.path.dirname(os.path.realpath(__file__))
|
||||
prompt_path = os.path.join(maigret_path, "resources", "ai_prompt.txt")
|
||||
with open(prompt_path, "r", encoding="utf-8") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def resolve_api_key(settings) -> str | None:
|
||||
"""Resolve OpenAI API key from settings or environment variable.
|
||||
|
||||
Priority: settings.openai_api_key > OPENAI_API_KEY env var.
|
||||
"""
|
||||
key = getattr(settings, "openai_api_key", None)
|
||||
if key:
|
||||
return key
|
||||
return os.environ.get("OPENAI_API_KEY")
|
||||
|
||||
|
||||
class _Spinner:
|
||||
"""Simple animated spinner for terminal output."""
|
||||
|
||||
FRAMES = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
|
||||
|
||||
def __init__(self, text=""):
|
||||
self.text = text
|
||||
self._stop = threading.Event()
|
||||
self._thread = None
|
||||
|
||||
def start(self):
|
||||
self._thread = threading.Thread(target=self._spin, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
def _spin(self):
|
||||
i = 0
|
||||
while not self._stop.is_set():
|
||||
frame = self.FRAMES[i % len(self.FRAMES)]
|
||||
sys.stderr.write(f"\r{frame} {self.text}")
|
||||
sys.stderr.flush()
|
||||
i += 1
|
||||
self._stop.wait(0.08)
|
||||
|
||||
def stop(self):
|
||||
self._stop.set()
|
||||
if self._thread:
|
||||
self._thread.join()
|
||||
sys.stderr.write("\r\033[2K")
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
async def print_streaming(text: str, delay: float = 0.04):
|
||||
"""Print text word by word with a delay, simulating streaming LLM output."""
|
||||
words = text.split(" ")
|
||||
for i, word in enumerate(words):
|
||||
if i > 0:
|
||||
sys.stdout.write(" ")
|
||||
sys.stdout.write(word)
|
||||
sys.stdout.flush()
|
||||
await asyncio.sleep(delay)
|
||||
sys.stdout.write("\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
async def _check_response(resp):
|
||||
"""Raise descriptive errors for non-success HTTP responses."""
|
||||
if resp.status == 401:
|
||||
raise RuntimeError("Invalid OpenAI API key (HTTP 401)")
|
||||
if resp.status == 429:
|
||||
raise RuntimeError("OpenAI API rate limit exceeded (HTTP 429)")
|
||||
if resp.status != 200:
|
||||
body = await resp.text()
|
||||
raise RuntimeError(f"OpenAI API error (HTTP {resp.status}): {body[:500]}")
|
||||
|
||||
|
||||
async def _stream_response(resp, spinner, first_token):
|
||||
"""Stream tokens from resp, display them, and return (first_token, full_analysis)."""
|
||||
full_response = []
|
||||
async for line in resp.content:
|
||||
decoded = line.decode("utf-8").strip()
|
||||
if not decoded or not decoded.startswith("data: "):
|
||||
continue
|
||||
data_str = decoded[len("data: "):]
|
||||
if data_str == "[DONE]":
|
||||
break
|
||||
try:
|
||||
chunk = json.loads(data_str)
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
delta = chunk.get("choices", [{}])[0].get("delta", {})
|
||||
content = delta.get("content", "")
|
||||
if not content:
|
||||
continue
|
||||
if first_token:
|
||||
spinner.stop()
|
||||
print()
|
||||
first_token = False
|
||||
sys.stdout.write(content)
|
||||
sys.stdout.flush()
|
||||
full_response.append(content)
|
||||
return first_token, "".join(full_response)
|
||||
|
||||
|
||||
async def get_ai_analysis(
|
||||
api_key: str,
|
||||
markdown_report: str,
|
||||
model: str = "gpt-4o",
|
||||
api_base_url: str = "https://api.openai.com/v1",
|
||||
) -> str:
|
||||
"""Send the markdown report to an OpenAI-compatible API and return the analysis.
|
||||
|
||||
Uses streaming to display tokens as they arrive.
|
||||
Raises on HTTP errors with descriptive messages.
|
||||
"""
|
||||
system_prompt = load_ai_prompt()
|
||||
|
||||
url = f"{api_base_url.rstrip('/')}/chat/completions"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {api_key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
payload = {
|
||||
"model": model,
|
||||
"stream": True,
|
||||
"messages": [
|
||||
{"role": "system", "content": system_prompt},
|
||||
{"role": "user", "content": markdown_report},
|
||||
],
|
||||
}
|
||||
|
||||
spinner = _Spinner("Analysing the data with AI...")
|
||||
spinner.start()
|
||||
first_token = True
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(url, json=payload, headers=headers) as resp:
|
||||
await _check_response(resp)
|
||||
first_token, analysis = await _stream_response(resp, spinner, first_token)
|
||||
except Exception:
|
||||
spinner.stop()
|
||||
raise
|
||||
|
||||
if first_token:
|
||||
# No tokens received — stop spinner anyway
|
||||
spinner.stop()
|
||||
|
||||
print()
|
||||
return analysis
|
||||
+15
-5
@@ -61,8 +61,6 @@ class SimpleAiohttpChecker(CheckerBase):
|
||||
self.headers = None
|
||||
self.allow_redirects = True
|
||||
self.timeout = 0
|
||||
self.allow_redirects = True
|
||||
self.timeout = 0
|
||||
self.method = 'get'
|
||||
self.payload = None
|
||||
|
||||
@@ -249,9 +247,15 @@ class CurlCffiChecker(CheckerBase):
|
||||
async def check(self) -> Tuple[Optional[str], int, Optional[CheckError]]:
|
||||
try:
|
||||
async with CurlCffiAsyncSession() as session:
|
||||
# Strip the User-Agent so curl_cffi can use the impersonated browser's
|
||||
# matching UA. Mixing a random UA with a Chrome TLS fingerprint trips
|
||||
# composite bot scoring (e.g. Cloudflare returns a JS challenge for
|
||||
# "Chrome 91 UA + Chrome 131 TLS"). Keep any site-specific custom headers.
|
||||
headers = {k: v for k, v in (self.headers or {}).items()
|
||||
if k.lower() not in ('user-agent', 'connection')}
|
||||
kwargs = {
|
||||
'url': self.url,
|
||||
'headers': self.headers,
|
||||
'headers': headers or None,
|
||||
'allow_redirects': self.allow_redirects,
|
||||
'timeout': self.timeout if self.timeout else 10,
|
||||
'impersonate': self.browser_emulate,
|
||||
@@ -347,7 +351,11 @@ def process_site_result(
|
||||
username = results_info["username"]
|
||||
is_parsing_enabled = results_info["parsing_enabled"]
|
||||
url = results_info.get("url_user")
|
||||
logger.info(url)
|
||||
url_probe = results_info.get("url_probe") or url
|
||||
if url_probe != url:
|
||||
logger.info(f"{url_probe} (display: {url})")
|
||||
else:
|
||||
logger.info(url)
|
||||
|
||||
status = results_info.get("status")
|
||||
if status is not None:
|
||||
@@ -605,6 +613,8 @@ def make_site_result(
|
||||
for k, v in site.get_params.items():
|
||||
url_probe += f"&{k}={v}"
|
||||
|
||||
results_site["url_probe"] = url_probe
|
||||
|
||||
if site.request_method:
|
||||
request_method = site.request_method.lower()
|
||||
elif site.check_type == "status_code" and site.request_head_only:
|
||||
@@ -680,7 +690,7 @@ async def check_site_for_username(
|
||||
method = act["method"]
|
||||
try:
|
||||
activate_fun = getattr(ParsingActivator(), method)
|
||||
activate_fun(site, logger)
|
||||
activate_fun(site, logger, url=checker.url)
|
||||
except AttributeError as e:
|
||||
logger.warning(
|
||||
f"Activation method {method} for site {site.name} not found!",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import sys
|
||||
import time
|
||||
from typing import Any, Iterable, List, Callable
|
||||
@@ -113,7 +114,7 @@ class AsyncioProgressbarQueueExecutor(AsyncExecutor):
|
||||
async def increment_progress(self, count):
|
||||
"""Update progress by calling the provided progress function."""
|
||||
if self.progress:
|
||||
if asyncio.iscoroutinefunction(self.progress):
|
||||
if inspect.iscoroutinefunction(self.progress):
|
||||
await self.progress(count)
|
||||
else:
|
||||
self.progress(count)
|
||||
@@ -124,7 +125,7 @@ class AsyncioProgressbarQueueExecutor(AsyncExecutor):
|
||||
"""Stop the progress tracking."""
|
||||
if hasattr(self.progress, "close") and self.progress:
|
||||
close_func = self.progress.close
|
||||
if asyncio.iscoroutinefunction(close_func):
|
||||
if inspect.iscoroutinefunction(close_func):
|
||||
await close_func()
|
||||
else:
|
||||
close_func()
|
||||
|
||||
+93
-15
@@ -13,7 +13,19 @@ from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
||||
from typing import List, Tuple
|
||||
import os.path as path
|
||||
|
||||
from socid_extractor import extract, parse # type: ignore[import-not-found]
|
||||
try:
|
||||
from socid_extractor import extract, parse
|
||||
except ImportError as e:
|
||||
raise ImportError(
|
||||
"Missing dependency: socid_extractor\n\n"
|
||||
"If installed from PyPI:\n"
|
||||
" pip install -U maigret\n\n"
|
||||
"If running from a cloned repository:\n"
|
||||
" pip install -e .\n\n"
|
||||
"Then run Maigret as:\n"
|
||||
" python -m maigret <username>"
|
||||
) from e
|
||||
|
||||
|
||||
from .__version__ import __version__
|
||||
from .checking import (
|
||||
@@ -494,6 +506,21 @@ def setup_arguments_parser(settings: Settings):
|
||||
" (one report per username).",
|
||||
)
|
||||
|
||||
report_group.add_argument(
|
||||
"--ai",
|
||||
action="store_true",
|
||||
dest="ai",
|
||||
default=False,
|
||||
help="Generate an AI-powered analysis of the search results using OpenAI API. "
|
||||
"Requires OPENAI_API_KEY env var or openai_api_key in settings.",
|
||||
)
|
||||
report_group.add_argument(
|
||||
"--ai-model",
|
||||
dest="ai_model",
|
||||
default=settings.openai_model,
|
||||
help="OpenAI model to use for AI analysis (default: gpt-4o).",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--reports-sorting",
|
||||
default=settings.report_sorting,
|
||||
@@ -596,6 +623,7 @@ async def main():
|
||||
print_found_only=not args.print_not_found,
|
||||
skip_check_errors=not args.print_check_errors,
|
||||
color=not args.no_color,
|
||||
silent=args.ai,
|
||||
)
|
||||
|
||||
# Create object with all information about sites we are aware of.
|
||||
@@ -711,17 +739,33 @@ async def main():
|
||||
+ get_dict_ascii_tree(usernames, prepend="\t")
|
||||
)
|
||||
|
||||
if args.ai:
|
||||
from .ai import resolve_api_key
|
||||
|
||||
if not resolve_api_key(settings):
|
||||
query_notify.warning(
|
||||
'AI analysis requires an OpenAI API key. '
|
||||
'Set OPENAI_API_KEY environment variable or add '
|
||||
'openai_api_key to settings.json.'
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if not site_data:
|
||||
query_notify.warning('No sites to check, exiting!')
|
||||
sys.exit(2)
|
||||
|
||||
query_notify.warning(
|
||||
f'Starting a search on top {len(site_data)} sites from the Maigret database...'
|
||||
)
|
||||
if not args.all_sites:
|
||||
if args.ai:
|
||||
query_notify.warning(
|
||||
'You can run search by full list of sites with flag `-a`', '!'
|
||||
f'Starting AI-assisted search on top {len(site_data)} sites from the Maigret database...'
|
||||
)
|
||||
else:
|
||||
query_notify.warning(
|
||||
f'Starting a search on top {len(site_data)} sites from the Maigret database...'
|
||||
)
|
||||
if not args.all_sites:
|
||||
query_notify.warning(
|
||||
'You can run search by full list of sites with flag `-a`', '!'
|
||||
)
|
||||
|
||||
already_checked = set()
|
||||
general_results = []
|
||||
@@ -774,11 +818,12 @@ async def main():
|
||||
check_domains=args.with_domains,
|
||||
)
|
||||
|
||||
errs = errors.notify_about_errors(
|
||||
results, query_notify, show_statistics=args.verbose
|
||||
)
|
||||
for e in errs:
|
||||
query_notify.warning(*e)
|
||||
if not args.ai:
|
||||
errs = errors.notify_about_errors(
|
||||
results, query_notify, show_statistics=args.verbose
|
||||
)
|
||||
for e in errs:
|
||||
query_notify.warning(*e)
|
||||
|
||||
if args.reports_sorting == "data":
|
||||
results = sort_report_by_data_points(results)
|
||||
@@ -867,10 +912,43 @@ async def main():
|
||||
save_graph_report(filename, general_results, db)
|
||||
query_notify.warning(f'Graph report on all usernames saved in {filename}')
|
||||
|
||||
text_report = get_plaintext_report(report_context)
|
||||
if text_report:
|
||||
query_notify.info('Short text report:')
|
||||
print(text_report)
|
||||
if not args.ai:
|
||||
text_report = get_plaintext_report(report_context)
|
||||
if text_report:
|
||||
query_notify.info('Short text report:')
|
||||
print(text_report)
|
||||
|
||||
if args.ai:
|
||||
from .ai import get_ai_analysis, resolve_api_key
|
||||
from .report import generate_markdown_report
|
||||
|
||||
api_key = resolve_api_key(settings)
|
||||
|
||||
run_flags = []
|
||||
if args.tags:
|
||||
run_flags.append(f"--tags {args.tags}")
|
||||
if args.site_list:
|
||||
run_flags.append(f"--site {','.join(args.site_list)}")
|
||||
if args.all_sites:
|
||||
run_flags.append("--all-sites")
|
||||
run_info = {
|
||||
"sites_count": sum(len(d) for _, _, d in general_results),
|
||||
"flags": " ".join(run_flags) if run_flags else None,
|
||||
}
|
||||
|
||||
md_report = generate_markdown_report(report_context, run_info=run_info)
|
||||
|
||||
try:
|
||||
await get_ai_analysis(
|
||||
api_key=api_key,
|
||||
markdown_report=md_report,
|
||||
model=args.ai_model,
|
||||
api_base_url=getattr(
|
||||
settings, 'openai_api_base_url', 'https://api.openai.com/v1'
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
query_notify.warning(f'AI analysis failed: {e}')
|
||||
|
||||
# update database
|
||||
db.save_to_file(db_file)
|
||||
|
||||
+10
-3
@@ -1,7 +1,6 @@
|
||||
"""Sherlock Notify Module
|
||||
"""Console and query notification helpers.
|
||||
|
||||
This module defines the objects for notifying the caller about the
|
||||
results of queries.
|
||||
This module defines objects for notifying the caller about the results of queries.
|
||||
"""
|
||||
|
||||
import sys
|
||||
@@ -124,6 +123,7 @@ class QueryNotifyPrint(QueryNotify):
|
||||
print_found_only=False,
|
||||
skip_check_errors=False,
|
||||
color=True,
|
||||
silent=False,
|
||||
):
|
||||
"""Create Query Notify Print Object.
|
||||
|
||||
@@ -150,6 +150,7 @@ class QueryNotifyPrint(QueryNotify):
|
||||
self.print_found_only = print_found_only
|
||||
self.skip_check_errors = skip_check_errors
|
||||
self.color = color
|
||||
self.silent = silent
|
||||
|
||||
return
|
||||
|
||||
@@ -188,6 +189,9 @@ class QueryNotifyPrint(QueryNotify):
|
||||
Nothing.
|
||||
"""
|
||||
|
||||
if self.silent:
|
||||
return
|
||||
|
||||
title = f"Checking {id_type}"
|
||||
if self.color:
|
||||
print(
|
||||
@@ -237,6 +241,9 @@ class QueryNotifyPrint(QueryNotify):
|
||||
Return Value:
|
||||
Nothing.
|
||||
"""
|
||||
if self.silent:
|
||||
return
|
||||
|
||||
notify = None
|
||||
self.result = result
|
||||
|
||||
|
||||
+16
-7
@@ -30,14 +30,18 @@ UTILS
|
||||
|
||||
|
||||
def filter_supposed_data(data):
|
||||
# interesting fields
|
||||
allowed_fields = ["fullname", "gender", "location", "age"]
|
||||
filtered_supposed_data = {
|
||||
CaseConverter.snake_to_title(k): v[0]
|
||||
|
||||
def _first(v):
|
||||
if isinstance(v, (list, tuple)):
|
||||
return v[0] if v else ""
|
||||
return v
|
||||
|
||||
return {
|
||||
CaseConverter.snake_to_title(k): _first(v)
|
||||
for k, v in data.items()
|
||||
if k in allowed_fields
|
||||
}
|
||||
return filtered_supposed_data
|
||||
|
||||
|
||||
def sort_report_by_data_points(results):
|
||||
@@ -241,7 +245,7 @@ def save_graph_report(filename: str, username_results: list, db: MaigretDatabase
|
||||
# Generate interactive visualization
|
||||
from pyvis.network import Network # type: ignore[import-untyped]
|
||||
|
||||
nt = Network(notebook=True, height="750px", width="100%")
|
||||
nt = Network(notebook=True, height="100vh", width="100%")
|
||||
nt.from_nx(G)
|
||||
nt.show(filename)
|
||||
|
||||
@@ -267,7 +271,7 @@ def _md_format_value(value) -> str:
|
||||
return s
|
||||
|
||||
|
||||
def save_markdown_report(filename: str, context: dict, run_info: dict = None):
|
||||
def generate_markdown_report(context: dict, run_info: dict = None) -> str:
|
||||
username = context.get("username", "unknown")
|
||||
generated_at = context.get("generated_at", "")
|
||||
brief = context.get("brief", "")
|
||||
@@ -391,8 +395,13 @@ def save_markdown_report(filename: str, context: dict, run_info: dict = None):
|
||||
"CCPA, and similar).\n"
|
||||
)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def save_markdown_report(filename: str, context: dict, run_info: dict = None):
|
||||
content = generate_markdown_report(context, run_info)
|
||||
with open(filename, "w", encoding="utf-8") as f:
|
||||
f.write("\n".join(lines))
|
||||
f.write(content)
|
||||
|
||||
|
||||
"""
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
You are an OSINT analyst that converts raw username-investigation reports into a short, clean human-readable summary.
|
||||
|
||||
Your task:
|
||||
Read the attached account-discovery report and produce a concise report in exactly this style:
|
||||
|
||||
# Investigation Summary
|
||||
|
||||
Name: <most likely real full name>
|
||||
Location: <most likely current location>
|
||||
Occupation: <short combined description based only on strong signals>
|
||||
Interests: <3–6 broad interests inferred from platform types, bios, and activity>
|
||||
Languages: <languages supported by strong evidence only>
|
||||
Website: <main personal website if clearly present>
|
||||
Username: <main username> (variant: <variant usernames if any>)
|
||||
Platforms: <number> profiles, active from <first year> to <last year>
|
||||
Confidence: <High / Medium / Low> — <one short explanation why>
|
||||
|
||||
# Other leads
|
||||
|
||||
- <lead 1>
|
||||
- <lead 2>
|
||||
- <lead 3 if needed>
|
||||
|
||||
Rules:
|
||||
1. Use only information supported by the report.
|
||||
2. Resolve identity using consistency of username, full name, bio, links, company, and location.
|
||||
3. Prefer strong repeated signals over one-off weak signals.
|
||||
4. If one profile clearly conflicts with the rest, mention it in "Other leads" as a likely false positive instead of mixing it into the main identity.
|
||||
5. Keep the tone analytical and neutral.
|
||||
6. Do not mention every platform individually.
|
||||
7. Do not include raw URLs except for the main website.
|
||||
8. Do not mention NSFW/adult platforms in the main summary unless they are the only source for a critical lead; if such a profile looks inconsistent, mention it only as a likely false positive.
|
||||
9. "Occupation" should be a compact merged description, for example: "Chief Product Officer (CPO) at ..., entrepreneur, OSINT community founder".
|
||||
10. "Interests" should be broad categories, not noisy tags. Convert raw platform/tag evidence into natural categories like OSINT, software development, blogging, gaming, streaming, etc.
|
||||
11. "Languages" should only include languages clearly supported by bios, texts, country tags, or profile content.
|
||||
12. For "Platforms", count the profiles reported as found by the report summary, not manually deduplicated.
|
||||
13. For active years, use the earliest and latest reliable dates from the consistent identity cluster. Ignore obvious outlier dates if they belong to likely false positives or weak profiles.
|
||||
14. For confidence:
|
||||
- High = strong consistency across username, name, bio, links, location, and/or company
|
||||
- Medium = partial consistency with some gaps
|
||||
- Low = mostly username-only matches
|
||||
15. If some field is not reliably known, omit speculation and use the best cautious wording possible.
|
||||
16. For "Name", output only the most likely real personal name in clean canonical form.
|
||||
- Remove nicknames, handles, aliases, or bracketed parts such as "(Soxoj)".
|
||||
- Example: "Dmitriy (Soxoj) Danilov" -> "Dmitriy Danilov".
|
||||
17. For "Website", output only the plain domain or URL as text, not a markdown hyperlink.
|
||||
18. In "Other leads", do not label conflicting profiles as "false positive", "likely unrelated", or "potentially a false positive".
|
||||
- Instead, use neutral intelligence wording such as:
|
||||
"Accounts were found that are most likely unrelated to the main identity, but may indicate possible cross-border activity and should be verified."
|
||||
19. When describing anomalies in "Other leads", prefer cautious investigative phrasing:
|
||||
- "may be unrelated"
|
||||
- "requires verification"
|
||||
- "could indicate separate activity"
|
||||
- "should be checked manually"
|
||||
20. Do not include nicknames or aliases inside the Name field unless they are clearly part of the legal or real-world name.
|
||||
|
||||
Output requirements:
|
||||
- Return only the final formatted text.
|
||||
- Keep it short.
|
||||
- No preamble, no explanations.
|
||||
|
||||
Now analyze the following report
|
||||
+2135
-1554
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"version": 1,
|
||||
"updated_at": "2026-04-10T10:28:14Z",
|
||||
"sites_count": 3150,
|
||||
"updated_at": "2026-05-05T20:17:24Z",
|
||||
"sites_count": 3154,
|
||||
"min_maigret_version": "0.6.0",
|
||||
"data_sha256": "72a493fef4eb8958fe8ed0c9b895841ec10c335f1b8e5e9b24b50784be6ad017",
|
||||
"data_sha256": "acf9d9fef8412bf05fa09d50c1ae363e5c8394597b1aaa3f98a9a1c4e31ca356",
|
||||
"data_url": "https://raw.githubusercontent.com/soxoj/maigret/main/maigret/resources/data.json"
|
||||
}
|
||||
@@ -55,6 +55,9 @@
|
||||
"pdf_report": false,
|
||||
"html_report": false,
|
||||
"md_report": false,
|
||||
"openai_api_key": "",
|
||||
"openai_model": "gpt-4o",
|
||||
"openai_api_base_url": "https://api.openai.com/v1",
|
||||
"web_interface_port": 5000,
|
||||
"no_autoupdate": false,
|
||||
"db_update_meta_url": "https://raw.githubusercontent.com/soxoj/maigret/main/maigret/resources/db_meta.json",
|
||||
|
||||
+19
-3
@@ -181,7 +181,15 @@ class MaigretSite:
|
||||
if self.url_regexp:
|
||||
match_groups = self.url_regexp.match(url)
|
||||
if match_groups:
|
||||
return match_groups.groups()[-1].rstrip("/")
|
||||
username = next(
|
||||
(
|
||||
group.rstrip("/")
|
||||
for group in reversed(match_groups.groups())
|
||||
if isinstance(group, str) and group
|
||||
),
|
||||
None,
|
||||
)
|
||||
return username
|
||||
|
||||
return None
|
||||
|
||||
@@ -196,8 +204,16 @@ class MaigretSite:
|
||||
match_groups = self.url_regexp.match(url)
|
||||
if not match_groups:
|
||||
return None
|
||||
|
||||
_id = match_groups.groups()[-1].rstrip("/")
|
||||
_id = next(
|
||||
(
|
||||
group.rstrip("/")
|
||||
for group in reversed(match_groups.groups())
|
||||
if isinstance(group, str) and group
|
||||
),
|
||||
None,
|
||||
)
|
||||
if _id is None:
|
||||
return None
|
||||
_type = self.type
|
||||
|
||||
return _id, _type
|
||||
|
||||
Generated
+607
-529
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
maigret @ https://github.com/soxoj/maigret/archive/refs/heads/main.zip
|
||||
pefile==2023.2.7 # do not bump while pyinstaller is 6.11.1, there is a conflict
|
||||
psutil==7.2.2
|
||||
pyinstaller==6.19.0
|
||||
pyinstaller==6.20.0
|
||||
pywin32-ctypes==0.2.3
|
||||
|
||||
+6
-1
@@ -15,6 +15,11 @@ repository = "https://github.com/soxoj/maigret"
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Operating System :: OS Independent",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
@@ -87,7 +92,7 @@ pytest-cov = ">=6,<8"
|
||||
pytest-httpserver = "^1.0.0"
|
||||
pytest-rerunfailures = ">=15.1,<17.0"
|
||||
reportlab = "^4.4.3"
|
||||
mypy = "^1.14.1"
|
||||
mypy = ">=1.14.1,<3.0.0"
|
||||
tuna = "^0.5.11"
|
||||
coverage = "^7.9.2"
|
||||
black = ">=25.1,<27.0"
|
||||
|
||||
@@ -3,4 +3,5 @@
|
||||
filterwarnings =
|
||||
error
|
||||
ignore::UserWarning
|
||||
ignore:codecs.open\(\) is deprecated:DeprecationWarning:xmind.core.saver
|
||||
asyncio_mode=auto
|
||||
+1
-1
@@ -3,7 +3,7 @@ icon: static/maigret.png
|
||||
name: maigret
|
||||
summary: 🕵️♂️ Collect a dossier on a person by username from thousands of sites.
|
||||
description: |
|
||||
**Maigret** collects a dossier on a person **by username only**, checking for accounts on a huge number of sites and gathering all the available information from web pages. No API keys required. Maigret is an easy-to-use and powerful fork of Sherlock.
|
||||
**Maigret** collects a dossier on a person **by username only**, checking for accounts on a huge number of sites and gathering all the available information from web pages. No API keys required.
|
||||
|
||||
Currently supported more than 3000 sites, search is launched against 500 popular sites in descending order of popularity by default. Also supported checking of Tor sites, I2P sites, and domains (via DNS resolving).
|
||||
|
||||
|
||||
@@ -56,3 +56,110 @@ async def test_import_aiohttp_cookies(cookie_test_server):
|
||||
print(f"Server response: {result}")
|
||||
|
||||
assert result == {'cookies': {'a': 'b'}}
|
||||
|
||||
|
||||
# ---- OnlyFans signing tests (pure-compute, no network) ----
|
||||
|
||||
class _FakeSite:
|
||||
"""Minimal stand-in for MaigretSite with the attributes onlyfans() touches."""
|
||||
|
||||
def __init__(self, headers=None, activation=None):
|
||||
self.headers = headers or {}
|
||||
self.activation = activation or {
|
||||
"static_param": "jLM8LXHU1CGcuCzPMNwWX9osCScVuP4D",
|
||||
"checksum_indexes": [28, 3, 16, 32, 25, 24, 23, 0, 26],
|
||||
"checksum_constant": -180,
|
||||
"format": "57203:{}:{:x}:69cfa6d8",
|
||||
"url": "https://onlyfans.com/api2/v2/init",
|
||||
}
|
||||
|
||||
|
||||
class _FakeResponse:
|
||||
def __init__(self, cookies=None):
|
||||
self.cookies = cookies or {}
|
||||
|
||||
|
||||
def test_onlyfans_sets_xbc_when_zero(monkeypatch):
|
||||
site = _FakeSite(headers={"x-bc": "0", "cookie": "existing=1"})
|
||||
|
||||
# Prevent any real network. If _sign path still fires requests.get, fail loudly.
|
||||
import maigret.activation as act_mod
|
||||
|
||||
def boom(*a, **kw): # pragma: no cover - sanity
|
||||
raise AssertionError("requests.get should not run when cookie is present")
|
||||
|
||||
monkeypatch.setattr(act_mod.__dict__.get("requests", None) or __import__("requests"), "get", boom, raising=False)
|
||||
|
||||
logger = Mock()
|
||||
ParsingActivator.onlyfans(site, logger, url="https://onlyfans.com/api2/v2/users/adam")
|
||||
|
||||
# x-bc must be rewritten to a non-zero hex token
|
||||
assert site.headers["x-bc"] != "0"
|
||||
assert len(site.headers["x-bc"]) == 40 # 20 bytes → 40 hex chars
|
||||
# time / sign headers set for target URL
|
||||
assert "time" in site.headers and site.headers["time"].isdigit()
|
||||
assert site.headers["sign"].startswith("57203:")
|
||||
|
||||
|
||||
def test_onlyfans_fetches_init_cookie_when_missing(monkeypatch):
|
||||
"""When cookie header is absent, init endpoint is called and its cookies stored."""
|
||||
site = _FakeSite(headers={"x-bc": "already_set_token", "user-id": "0"})
|
||||
|
||||
import requests
|
||||
|
||||
captured = {}
|
||||
|
||||
def fake_get(url, headers=None, timeout=15):
|
||||
captured["url"] = url
|
||||
captured["headers"] = dict(headers or {})
|
||||
return _FakeResponse(cookies={"sess": "abc123", "csrf": "xyz"})
|
||||
|
||||
monkeypatch.setattr(requests, "get", fake_get)
|
||||
|
||||
logger = Mock()
|
||||
ParsingActivator.onlyfans(site, logger, url="https://onlyfans.com/api2/v2/users/adam")
|
||||
|
||||
# init request made
|
||||
assert captured["url"] == site.activation["url"]
|
||||
# headers passed to init include freshly generated time/sign
|
||||
assert "time" in captured["headers"]
|
||||
assert captured["headers"]["sign"].startswith("57203:")
|
||||
# cookie header populated from response
|
||||
assert site.headers["cookie"] == "sess=abc123; csrf=xyz"
|
||||
|
||||
|
||||
def test_onlyfans_signature_is_deterministic_for_same_time(monkeypatch):
|
||||
"""Two calls with patched time produce identical signatures."""
|
||||
site1 = _FakeSite(headers={"x-bc": "token", "cookie": "c=1"})
|
||||
site2 = _FakeSite(headers={"x-bc": "token", "cookie": "c=1"})
|
||||
|
||||
import maigret.activation
|
||||
monkeypatch.setattr(maigret.activation, "_time", __import__("time"), raising=False)
|
||||
|
||||
fixed = 1_700_000_000.123
|
||||
import time as time_mod
|
||||
monkeypatch.setattr(time_mod, "time", lambda: fixed)
|
||||
|
||||
logger = Mock()
|
||||
ParsingActivator.onlyfans(site1, logger, url="https://onlyfans.com/api2/v2/users/adam")
|
||||
ParsingActivator.onlyfans(site2, logger, url="https://onlyfans.com/api2/v2/users/adam")
|
||||
|
||||
assert site1.headers["time"] == site2.headers["time"]
|
||||
assert site1.headers["sign"] == site2.headers["sign"]
|
||||
|
||||
|
||||
def test_onlyfans_sign_differs_per_path(monkeypatch):
|
||||
"""Different target URLs must yield different signatures."""
|
||||
site = _FakeSite(headers={"x-bc": "token", "cookie": "c=1"})
|
||||
|
||||
import time as time_mod
|
||||
monkeypatch.setattr(time_mod, "time", lambda: 1_700_000_000.0)
|
||||
|
||||
logger = Mock()
|
||||
ParsingActivator.onlyfans(site, logger, url="https://onlyfans.com/api2/v2/users/adam")
|
||||
sig_adam = site.headers["sign"]
|
||||
|
||||
ParsingActivator.onlyfans(site, logger, url="https://onlyfans.com/api2/v2/users/bob")
|
||||
sig_bob = site.headers["sign"]
|
||||
|
||||
assert sig_adam != sig_bob
|
||||
|
||||
@@ -1,7 +1,22 @@
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
from mock import Mock
|
||||
import pytest
|
||||
|
||||
from maigret import search
|
||||
from maigret.checking import (
|
||||
detect_error_page,
|
||||
extract_ids_data,
|
||||
parse_usernames,
|
||||
update_results_info,
|
||||
get_failed_sites,
|
||||
timeout_check,
|
||||
debug_response_logging,
|
||||
process_site_result,
|
||||
)
|
||||
from maigret.errors import CheckError
|
||||
from maigret.result import MaigretCheckResult, MaigretCheckStatus
|
||||
from maigret.sites import MaigretSite
|
||||
|
||||
|
||||
def site_result_except(server, username, **kwargs):
|
||||
@@ -67,3 +82,386 @@ async def test_checking_by_message_negative(httpserver, local_test_db):
|
||||
|
||||
result = await search('unclaimed', site_dict=sites_dict, logger=Mock())
|
||||
assert result['Message']['status'].is_found() is True
|
||||
|
||||
|
||||
# ---- Pure-function unit tests (no network) ----
|
||||
|
||||
|
||||
def test_detect_error_page_site_specific():
|
||||
err = detect_error_page(
|
||||
"Please enable JavaScript to proceed",
|
||||
200,
|
||||
{"Please enable JavaScript to proceed": "Scraping protection"},
|
||||
ignore_403=False,
|
||||
)
|
||||
assert err is not None
|
||||
assert err.type == "Site-specific"
|
||||
assert err.desc == "Scraping protection"
|
||||
|
||||
|
||||
def test_detect_error_page_403():
|
||||
err = detect_error_page("some body", 403, {}, ignore_403=False)
|
||||
assert err is not None
|
||||
assert err.type == "Access denied"
|
||||
|
||||
|
||||
def test_detect_error_page_403_ignored():
|
||||
# XenForo engine uses ignore403 because member-not-found also returns 403
|
||||
assert detect_error_page("not found body", 403, {}, ignore_403=True) is None
|
||||
|
||||
|
||||
def test_detect_error_page_999_linkedin():
|
||||
# LinkedIn returns 999 on bot suspicion — must NOT be reported as Server error
|
||||
assert detect_error_page("", 999, {}, ignore_403=False) is None
|
||||
|
||||
|
||||
def test_detect_error_page_500():
|
||||
err = detect_error_page("", 503, {}, ignore_403=False)
|
||||
assert err is not None
|
||||
assert err.type == "Server"
|
||||
assert "503" in err.desc
|
||||
|
||||
|
||||
def test_detect_error_page_ok():
|
||||
assert detect_error_page("hello world", 200, {}, ignore_403=False) is None
|
||||
|
||||
|
||||
def test_parse_usernames_single_username():
|
||||
logger = Mock()
|
||||
result = parse_usernames({"profile_username": "alice"}, logger)
|
||||
assert result == {"alice": "username"}
|
||||
|
||||
|
||||
def test_parse_usernames_list_of_usernames():
|
||||
logger = Mock()
|
||||
result = parse_usernames({"other_usernames": "['alice', 'bob']"}, logger)
|
||||
assert result == {"alice": "username", "bob": "username"}
|
||||
|
||||
|
||||
def test_parse_usernames_malformed_list():
|
||||
logger = Mock()
|
||||
result = parse_usernames({"other_usernames": "not-a-list"}, logger)
|
||||
# should swallow the error and just return empty
|
||||
assert result == {}
|
||||
assert logger.warning.called
|
||||
|
||||
|
||||
def test_parse_usernames_supported_id():
|
||||
logger = Mock()
|
||||
# "telegram" is in SUPPORTED_IDS per socid_extractor
|
||||
from maigret.checking import SUPPORTED_IDS
|
||||
if SUPPORTED_IDS:
|
||||
key = next(iter(SUPPORTED_IDS))
|
||||
result = parse_usernames({key: "some_value"}, logger)
|
||||
assert result.get("some_value") == key
|
||||
|
||||
|
||||
def test_update_results_info_links():
|
||||
info = {"username": "test"}
|
||||
result = update_results_info(
|
||||
info,
|
||||
{"links": "['https://example.com/a', 'https://example.com/b']", "website": "https://example.com/w"},
|
||||
{"alice": "username"},
|
||||
)
|
||||
assert result["ids_usernames"] == {"alice": "username"}
|
||||
assert "https://example.com/w" in result["ids_links"]
|
||||
assert "https://example.com/a" in result["ids_links"]
|
||||
|
||||
|
||||
def test_update_results_info_no_website():
|
||||
info = {}
|
||||
result = update_results_info(info, {"links": "[]"}, {})
|
||||
assert result["ids_links"] == []
|
||||
|
||||
|
||||
def test_extract_ids_data_bad_html_returns_empty():
|
||||
logger = Mock()
|
||||
# Random HTML should not raise — returns {} if nothing matches
|
||||
out = extract_ids_data("<html><body>nothing special</body></html>", logger, Mock(name="Site"))
|
||||
assert isinstance(out, dict)
|
||||
|
||||
|
||||
def test_get_failed_sites_filters_permanent_errors():
|
||||
# Temporary errors (Request timeout, Connecting failure, etc.) are retryable → returned.
|
||||
# Permanent ones (Captcha, Access denied, etc.) and results without error → filtered out.
|
||||
good_status = MaigretCheckResult("u", "S1", "https://s1", MaigretCheckStatus.CLAIMED)
|
||||
timeout_err = MaigretCheckResult(
|
||||
"u", "S2", "https://s2", MaigretCheckStatus.UNKNOWN,
|
||||
error=CheckError("Request timeout", "slow server"),
|
||||
)
|
||||
captcha_err = MaigretCheckResult(
|
||||
"u", "S3", "https://s3", MaigretCheckStatus.UNKNOWN,
|
||||
error=CheckError("Captcha", "Cloudflare"),
|
||||
)
|
||||
results = {
|
||||
"S1": {"status": good_status},
|
||||
"S2": {"status": timeout_err},
|
||||
"S3": {"status": captcha_err},
|
||||
"S4": {}, # no status at all
|
||||
}
|
||||
failed = get_failed_sites(results)
|
||||
# Only the temporary-error site is retry-worthy
|
||||
assert failed == ["S2"]
|
||||
|
||||
|
||||
def test_timeout_check_valid():
|
||||
assert timeout_check("2.5") == 2.5
|
||||
assert timeout_check("30") == 30.0
|
||||
|
||||
|
||||
def test_timeout_check_invalid():
|
||||
with pytest.raises(ArgumentTypeError):
|
||||
timeout_check("abc")
|
||||
with pytest.raises(ArgumentTypeError):
|
||||
timeout_check("0")
|
||||
with pytest.raises(ArgumentTypeError):
|
||||
timeout_check("-1")
|
||||
|
||||
|
||||
def test_debug_response_logging_writes(tmp_path, monkeypatch):
|
||||
monkeypatch.chdir(tmp_path)
|
||||
debug_response_logging("https://example.com", "<html>hi</html>", 200, None)
|
||||
out = (tmp_path / "debug.log").read_text()
|
||||
assert "https://example.com" in out
|
||||
assert "200" in out
|
||||
|
||||
|
||||
def test_debug_response_logging_no_response(tmp_path, monkeypatch):
|
||||
monkeypatch.chdir(tmp_path)
|
||||
debug_response_logging("https://example.com", None, None, CheckError("Timeout"))
|
||||
out = (tmp_path / "debug.log").read_text()
|
||||
assert "No response" in out
|
||||
|
||||
|
||||
def _make_site(data_overrides=None):
|
||||
base = {
|
||||
"url": "https://x/{username}",
|
||||
"urlMain": "https://x",
|
||||
"checkType": "status_code",
|
||||
"usernameClaimed": "a",
|
||||
"usernameUnclaimed": "b",
|
||||
}
|
||||
if data_overrides:
|
||||
base.update(data_overrides)
|
||||
return MaigretSite("TestSite", base)
|
||||
|
||||
|
||||
def test_process_site_result_no_response_returns_info():
|
||||
site = _make_site()
|
||||
info = {"username": "a", "parsing_enabled": False, "url_user": "https://x/a"}
|
||||
out = process_site_result(None, Mock(), Mock(), info, site)
|
||||
assert out is info
|
||||
|
||||
|
||||
def test_process_site_result_status_already_set():
|
||||
site = _make_site()
|
||||
pre = MaigretCheckResult("a", "S", "u", MaigretCheckStatus.ILLEGAL)
|
||||
info = {"username": "a", "parsing_enabled": False, "status": pre, "url_user": "u"}
|
||||
# Since status is already set, function returns without changes
|
||||
out = process_site_result(("<html/>", 200, None), Mock(), Mock(), info, site)
|
||||
assert out["status"] is pre
|
||||
|
||||
|
||||
def test_process_site_result_status_code_claimed():
|
||||
site = _make_site({"checkType": "status_code"})
|
||||
info = {"username": "a", "parsing_enabled": False, "url_user": "https://x/a"}
|
||||
out = process_site_result(("<html/>", 200, None), Mock(), Mock(), info, site)
|
||||
assert out["status"].status == MaigretCheckStatus.CLAIMED
|
||||
assert out["http_status"] == 200
|
||||
|
||||
|
||||
def test_process_site_result_status_code_available():
|
||||
site = _make_site({"checkType": "status_code"})
|
||||
info = {"username": "a", "parsing_enabled": False, "url_user": "https://x/a"}
|
||||
out = process_site_result(("<html/>", 404, None), Mock(), Mock(), info, site)
|
||||
assert out["status"].status == MaigretCheckStatus.AVAILABLE
|
||||
|
||||
|
||||
def test_process_site_result_message_claimed():
|
||||
site = _make_site({
|
||||
"checkType": "message",
|
||||
"presenseStrs": ["profile-name"],
|
||||
"absenceStrs": ["not found"],
|
||||
})
|
||||
info = {"username": "a", "parsing_enabled": False, "url_user": "https://x/a"}
|
||||
out = process_site_result(("<div class='profile-name'>Alice</div>", 200, None), Mock(), Mock(), info, site)
|
||||
assert out["status"].status == MaigretCheckStatus.CLAIMED
|
||||
|
||||
|
||||
def test_process_site_result_message_available_by_absence():
|
||||
site = _make_site({
|
||||
"checkType": "message",
|
||||
"presenseStrs": ["profile-name"],
|
||||
"absenceStrs": ["not found"],
|
||||
})
|
||||
info = {"username": "a", "parsing_enabled": False, "url_user": "https://x/a"}
|
||||
out = process_site_result(("<h1>not found</h1> profile-name too", 200, None), Mock(), Mock(), info, site)
|
||||
# absence marker wins even if presence marker also appears
|
||||
assert out["status"].status == MaigretCheckStatus.AVAILABLE
|
||||
|
||||
|
||||
def test_process_site_result_with_error_is_unknown():
|
||||
site = _make_site({"checkType": "status_code"})
|
||||
info = {"username": "a", "parsing_enabled": False, "url_user": "https://x/a"}
|
||||
resp = ("body", 403, CheckError("Captcha", "Cloudflare"))
|
||||
out = process_site_result(resp, Mock(), Mock(), info, site)
|
||||
assert out["status"].status == MaigretCheckStatus.UNKNOWN
|
||||
assert out["status"].error is not None
|
||||
|
||||
|
||||
# ---- CurlCffiChecker: TLS impersonation header sanitisation ----
|
||||
|
||||
|
||||
class _FakeCurlResponse:
|
||||
def __init__(self, text="ok", status_code=200):
|
||||
self.text = text
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
class _FakeCurlSession:
|
||||
"""Captures the kwargs of the last .get/.post/.head call for assertions."""
|
||||
|
||||
last_method = None
|
||||
last_kwargs = None
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc, tb):
|
||||
return False
|
||||
|
||||
async def get(self, **kwargs):
|
||||
type(self).last_method = 'get'
|
||||
type(self).last_kwargs = kwargs
|
||||
return _FakeCurlResponse()
|
||||
|
||||
async def post(self, **kwargs):
|
||||
type(self).last_method = 'post'
|
||||
type(self).last_kwargs = kwargs
|
||||
return _FakeCurlResponse()
|
||||
|
||||
async def head(self, **kwargs):
|
||||
type(self).last_method = 'head'
|
||||
type(self).last_kwargs = kwargs
|
||||
return _FakeCurlResponse()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_curl_cffi(monkeypatch):
|
||||
"""Replace CurlCffiAsyncSession with a recorder. Resets capture between tests."""
|
||||
from maigret import checking
|
||||
_FakeCurlSession.last_method = None
|
||||
_FakeCurlSession.last_kwargs = None
|
||||
monkeypatch.setattr(checking, 'CurlCffiAsyncSession', _FakeCurlSession)
|
||||
return _FakeCurlSession
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_curl_cffi_strips_random_user_agent_to_let_impersonation_drive_ua(fake_curl_cffi):
|
||||
"""Regression: maigret used to forward `get_random_user_agent()` (often Chrome 91)
|
||||
to curl_cffi alongside `impersonate="chrome"` (Chrome 131 TLS). Cloudflare composite
|
||||
bot scoring rejects the resulting "Chrome 91 UA + Chrome 131 TLS" combo with a JS
|
||||
challenge. The fix strips User-Agent and Connection from the headers passed to
|
||||
curl_cffi so the impersonation default UA wins.
|
||||
"""
|
||||
from maigret.checking import CurlCffiChecker
|
||||
|
||||
checker = CurlCffiChecker(logger=Mock(), browser_emulate='chrome')
|
||||
checker.prepare(
|
||||
url='https://example.com/u/test',
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 ... Chrome/91.0.4472.124 ...", # maigret default
|
||||
"Connection": "close", # maigret default
|
||||
},
|
||||
allow_redirects=True,
|
||||
timeout=10,
|
||||
method='get',
|
||||
)
|
||||
await checker.check()
|
||||
|
||||
sent = fake_curl_cffi.last_kwargs
|
||||
assert fake_curl_cffi.last_method == 'get'
|
||||
assert sent['impersonate'] == 'chrome'
|
||||
# The whole point of the fix: random UA must not leak through.
|
||||
assert sent['headers'] is None or 'User-Agent' not in sent['headers']
|
||||
assert sent['headers'] is None or 'user-agent' not in {k.lower() for k in sent['headers']}
|
||||
# Connection: close also stripped (interferes with impersonation defaults).
|
||||
assert sent['headers'] is None or 'Connection' not in sent['headers']
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_curl_cffi_preserves_site_specific_headers(fake_curl_cffi):
|
||||
"""Site-specific headers (e.g. Content-Type for POST APIs, auth tokens, cookies)
|
||||
must survive the User-Agent strip — only UA and Connection are removed.
|
||||
"""
|
||||
from maigret.checking import CurlCffiChecker
|
||||
|
||||
checker = CurlCffiChecker(logger=Mock(), browser_emulate='chrome')
|
||||
checker.prepare(
|
||||
url='https://example.com/api',
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 random",
|
||||
"Connection": "close",
|
||||
"Content-Type": "application/json",
|
||||
"X-Csrf-Token": "abc123",
|
||||
},
|
||||
allow_redirects=True,
|
||||
timeout=10,
|
||||
method='get',
|
||||
)
|
||||
await checker.check()
|
||||
|
||||
sent_headers = fake_curl_cffi.last_kwargs['headers']
|
||||
assert sent_headers is not None
|
||||
assert sent_headers.get("Content-Type") == "application/json"
|
||||
assert sent_headers.get("X-Csrf-Token") == "abc123"
|
||||
# Sanity: stripped pair is gone
|
||||
assert "User-Agent" not in sent_headers
|
||||
assert "Connection" not in sent_headers
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_curl_cffi_handles_empty_headers(fake_curl_cffi):
|
||||
"""No headers at all → headers kwarg is None (not an empty dict that could confuse
|
||||
curl_cffi's impersonation header injection)."""
|
||||
from maigret.checking import CurlCffiChecker
|
||||
|
||||
checker = CurlCffiChecker(logger=Mock(), browser_emulate='chrome')
|
||||
checker.prepare(
|
||||
url='https://example.com/u/test',
|
||||
headers=None,
|
||||
allow_redirects=True,
|
||||
timeout=10,
|
||||
method='get',
|
||||
)
|
||||
await checker.check()
|
||||
|
||||
assert fake_curl_cffi.last_kwargs['headers'] is None
|
||||
assert fake_curl_cffi.last_kwargs['impersonate'] == 'chrome'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_curl_cffi_strips_ua_for_post_too(fake_curl_cffi):
|
||||
"""The same UA-strip must apply on POST (e.g. Discord-style POST username probes
|
||||
with `tls_fingerprint`)."""
|
||||
from maigret.checking import CurlCffiChecker
|
||||
|
||||
checker = CurlCffiChecker(logger=Mock(), browser_emulate='chrome')
|
||||
checker.prepare(
|
||||
url='https://example.com/api/check',
|
||||
headers={
|
||||
"User-Agent": "Mozilla/5.0 random",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
allow_redirects=True,
|
||||
timeout=10,
|
||||
method='post',
|
||||
payload={"username": "test"},
|
||||
)
|
||||
await checker.check()
|
||||
|
||||
sent = fake_curl_cffi.last_kwargs
|
||||
assert fake_curl_cffi.last_method == 'post'
|
||||
assert sent['json'] == {"username": "test"}
|
||||
assert "User-Agent" not in sent['headers']
|
||||
assert sent['headers'].get("Content-Type") == "application/json"
|
||||
|
||||
@@ -49,6 +49,8 @@ DEFAULT_ARGS: Dict[str, Any] = {
|
||||
'with_domains': False,
|
||||
'xmind': False,
|
||||
'md': False,
|
||||
'ai': False,
|
||||
'ai_model': 'gpt-4o',
|
||||
'no_autoupdate': False,
|
||||
'force_update': False,
|
||||
}
|
||||
|
||||
+11
-11
@@ -26,7 +26,7 @@ async def test_simple_asyncio_executor():
|
||||
executor = AsyncioSimpleExecutor(logger=logger)
|
||||
assert await executor.run(tasks) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
assert executor.execution_time > 0.2
|
||||
assert executor.execution_time < 0.3
|
||||
assert executor.execution_time < 1.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -37,7 +37,7 @@ async def test_asyncio_progressbar_executor():
|
||||
# no guarantees for the results order
|
||||
assert sorted(await executor.run(tasks)) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
assert executor.execution_time > 0.2
|
||||
assert executor.execution_time < 0.3
|
||||
assert executor.execution_time < 1.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -48,7 +48,7 @@ async def test_asyncio_progressbar_semaphore_executor():
|
||||
# no guarantees for the results order
|
||||
assert sorted(await executor.run(tasks)) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
assert executor.execution_time > 0.2
|
||||
assert executor.execution_time < 0.4
|
||||
assert executor.execution_time < 1.1
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@@ -59,12 +59,12 @@ async def test_asyncio_progressbar_queue_executor():
|
||||
executor = AsyncioProgressbarQueueExecutor(logger=logger, in_parallel=2)
|
||||
assert await executor.run(tasks) == [0, 1, 3, 2, 4, 6, 7, 5, 9, 8]
|
||||
assert executor.execution_time > 0.5
|
||||
assert executor.execution_time < 0.7
|
||||
assert executor.execution_time < 1.4
|
||||
|
||||
executor = AsyncioProgressbarQueueExecutor(logger=logger, in_parallel=3)
|
||||
assert await executor.run(tasks) == [0, 3, 1, 4, 6, 2, 7, 9, 5, 8]
|
||||
assert executor.execution_time > 0.4
|
||||
assert executor.execution_time < 0.6
|
||||
assert executor.execution_time < 1.3
|
||||
|
||||
executor = AsyncioProgressbarQueueExecutor(logger=logger, in_parallel=5)
|
||||
assert await executor.run(tasks) in (
|
||||
@@ -72,12 +72,12 @@ async def test_asyncio_progressbar_queue_executor():
|
||||
[0, 3, 6, 1, 4, 9, 7, 2, 5, 8],
|
||||
)
|
||||
assert executor.execution_time > 0.3
|
||||
assert executor.execution_time < 0.5
|
||||
assert executor.execution_time < 1.2
|
||||
|
||||
executor = AsyncioProgressbarQueueExecutor(logger=logger, in_parallel=10)
|
||||
assert await executor.run(tasks) == [0, 3, 6, 9, 1, 4, 7, 2, 5, 8]
|
||||
assert executor.execution_time > 0.2
|
||||
assert executor.execution_time < 0.4
|
||||
assert executor.execution_time < 1.1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -88,13 +88,13 @@ async def test_asyncio_queue_generator_executor():
|
||||
results = [result async for result in executor.run(tasks)] # type: ignore[arg-type]
|
||||
assert results == [0, 1, 3, 2, 4, 6, 7, 5, 9, 8]
|
||||
assert executor.execution_time > 0.5
|
||||
assert executor.execution_time < 0.6
|
||||
assert executor.execution_time < 1.3
|
||||
|
||||
executor = AsyncioQueueGeneratorExecutor(logger=logger, in_parallel=3)
|
||||
results = [result async for result in executor.run(tasks)] # type: ignore[arg-type]
|
||||
assert results == [0, 3, 1, 4, 6, 2, 7, 9, 5, 8]
|
||||
assert executor.execution_time > 0.4
|
||||
assert executor.execution_time < 0.5
|
||||
assert executor.execution_time < 1.2
|
||||
|
||||
executor = AsyncioQueueGeneratorExecutor(logger=logger, in_parallel=5)
|
||||
results = [result async for result in executor.run(tasks)] # type: ignore[arg-type]
|
||||
@@ -103,10 +103,10 @@ async def test_asyncio_queue_generator_executor():
|
||||
[0, 3, 6, 1, 4, 9, 7, 2, 5, 8],
|
||||
)
|
||||
assert executor.execution_time > 0.3
|
||||
assert executor.execution_time < 0.4
|
||||
assert executor.execution_time < 1.1
|
||||
|
||||
executor = AsyncioQueueGeneratorExecutor(logger=logger, in_parallel=10)
|
||||
results = [result async for result in executor.run(tasks)] # type: ignore[arg-type]
|
||||
assert results == [0, 3, 6, 9, 1, 4, 7, 2, 5, 8]
|
||||
assert executor.execution_time > 0.2
|
||||
assert executor.execution_time < 0.3
|
||||
assert executor.execution_time < 1.0
|
||||
|
||||
@@ -10,8 +10,15 @@ import xmind # type: ignore[import-untyped]
|
||||
from jinja2 import Template
|
||||
|
||||
from maigret.report import (
|
||||
filter_supposed_data,
|
||||
sort_report_by_data_points,
|
||||
_md_format_value,
|
||||
generate_csv_report,
|
||||
generate_txt_report,
|
||||
save_csv_report,
|
||||
save_txt_report,
|
||||
save_json_report,
|
||||
save_markdown_report,
|
||||
save_xmind_report,
|
||||
save_html_report,
|
||||
save_pdf_report,
|
||||
@@ -456,3 +463,223 @@ def test_text_report_broken():
|
||||
assert brief_part in report_text
|
||||
assert 'us' in report_text
|
||||
assert 'photo' in report_text
|
||||
|
||||
|
||||
def test_filter_supposed_data():
|
||||
data = {
|
||||
'fullname': ['Alice'],
|
||||
'gender': ['female'],
|
||||
'location': ['Berlin'],
|
||||
'age': ['30'],
|
||||
'email': ['x@y.z'], # not allowed, must be dropped
|
||||
'bio': ['hi'], # not allowed
|
||||
}
|
||||
result = filter_supposed_data(data)
|
||||
assert result == {
|
||||
'Fullname': 'Alice',
|
||||
'Gender': 'female',
|
||||
'Location': 'Berlin',
|
||||
'Age': '30',
|
||||
}
|
||||
|
||||
|
||||
def test_filter_supposed_data_empty():
|
||||
assert filter_supposed_data({}) == {}
|
||||
assert filter_supposed_data({'nope': ['v']}) == {}
|
||||
|
||||
|
||||
def test_filter_supposed_data_scalar_values():
|
||||
# Strings and scalars must be kept whole — previously v[0] on "Alice"
|
||||
# silently returned "A" instead of "Alice".
|
||||
data = {
|
||||
'fullname': 'Alice',
|
||||
'gender': 'female',
|
||||
'location': 'Berlin',
|
||||
'age': 30,
|
||||
}
|
||||
assert filter_supposed_data(data) == {
|
||||
'Fullname': 'Alice',
|
||||
'Gender': 'female',
|
||||
'Location': 'Berlin',
|
||||
'Age': 30,
|
||||
}
|
||||
|
||||
|
||||
def test_filter_supposed_data_empty_list_yields_empty_string():
|
||||
# Edge case: list value present but empty should not crash with IndexError.
|
||||
assert filter_supposed_data({'fullname': []}) == {'Fullname': ''}
|
||||
|
||||
|
||||
def test_filter_supposed_data_mixed_values():
|
||||
# List and scalar mixed in the same payload.
|
||||
data = {'fullname': ['Alice', 'Alicia'], 'gender': 'female'}
|
||||
assert filter_supposed_data(data) == {
|
||||
'Fullname': 'Alice',
|
||||
'Gender': 'female',
|
||||
}
|
||||
|
||||
|
||||
def test_sort_report_by_data_points():
|
||||
status_many = MaigretCheckResult('', '', '', MaigretCheckStatus.CLAIMED)
|
||||
status_many.ids_data = {'a': 1, 'b': 2, 'c': 3}
|
||||
status_one = MaigretCheckResult('', '', '', MaigretCheckStatus.CLAIMED)
|
||||
status_one.ids_data = {'a': 1}
|
||||
status_none = MaigretCheckResult('', '', '', MaigretCheckStatus.CLAIMED)
|
||||
|
||||
results = {
|
||||
'few': {'status': status_one},
|
||||
'many': {'status': status_many},
|
||||
'zero': {'status': status_none},
|
||||
'nostatus': {},
|
||||
}
|
||||
sorted_out = sort_report_by_data_points(results)
|
||||
keys = list(sorted_out.keys())
|
||||
# site with 3 ids_data fields must come first
|
||||
assert keys[0] == 'many'
|
||||
# site with 1 field next
|
||||
assert keys[1] == 'few'
|
||||
|
||||
|
||||
def test_md_format_value_list():
|
||||
assert _md_format_value(['a', 'b', 'c']) == 'a, b, c'
|
||||
|
||||
|
||||
def test_md_format_value_url():
|
||||
assert _md_format_value('https://example.com') == '[https://example.com](https://example.com)'
|
||||
assert _md_format_value('http://x.y') == '[http://x.y](http://x.y)'
|
||||
|
||||
|
||||
def test_md_format_value_plain():
|
||||
assert _md_format_value('hello') == 'hello'
|
||||
assert _md_format_value(42) == '42'
|
||||
|
||||
|
||||
def test_save_csv_report():
|
||||
filename = 'report_test.csv'
|
||||
save_csv_report(filename, 'test', EXAMPLE_RESULTS)
|
||||
with open(filename) as f:
|
||||
content = f.read()
|
||||
assert 'username,name,url_main' in content
|
||||
assert 'test,GitHub' in content
|
||||
|
||||
|
||||
def test_save_txt_report():
|
||||
filename = 'report_test.txt'
|
||||
save_txt_report(filename, 'test', EXAMPLE_RESULTS)
|
||||
with open(filename) as f:
|
||||
content = f.read()
|
||||
assert 'https://www.github.com/test' in content
|
||||
assert 'Total Websites Username Detected On : 1' in content
|
||||
|
||||
|
||||
def test_save_json_report_simple():
|
||||
filename = 'report_test.json'
|
||||
save_json_report(filename, 'test', EXAMPLE_RESULTS, 'simple')
|
||||
with open(filename) as f:
|
||||
data = json.load(f)
|
||||
assert 'GitHub' in data
|
||||
|
||||
|
||||
def test_save_json_report_ndjson():
|
||||
filename = 'report_test_ndjson.json'
|
||||
save_json_report(filename, 'test', EXAMPLE_RESULTS, 'ndjson')
|
||||
with open(filename) as f:
|
||||
lines = f.readlines()
|
||||
assert len(lines) == 1
|
||||
assert json.loads(lines[0])['sitename'] == 'GitHub'
|
||||
|
||||
|
||||
def _markdown_context_with_rich_ids():
|
||||
"""Build a context with found accounts, ids_data (incl. image, url, list) to exercise all branches."""
|
||||
found_result = copy.deepcopy(GOOD_RESULT)
|
||||
found_result.tags = ['photo', 'us']
|
||||
found_result.ids_data = {
|
||||
"fullname": "Alice",
|
||||
"name": "Alice A.",
|
||||
"location": "Berlin",
|
||||
"bio": "Photographer",
|
||||
"external_url": "https://example.com/profile",
|
||||
"image": "https://example.com/avatar.png", # must be skipped
|
||||
"aliases": ["alice", "alicea"], # list value
|
||||
"last_online": "2024-01-02 10:00:00",
|
||||
}
|
||||
data = {
|
||||
'Github': {
|
||||
'username': 'alice',
|
||||
'parsing_enabled': True,
|
||||
'url_main': 'https://github.com/',
|
||||
'url_user': 'https://github.com/alice',
|
||||
'status': found_result,
|
||||
'http_status': 200,
|
||||
'is_similar': False,
|
||||
'rank': 1,
|
||||
'site': MaigretSite('Github', {}),
|
||||
'found': True,
|
||||
'ids_data': found_result.ids_data,
|
||||
},
|
||||
'Similar': {
|
||||
'username': 'alice',
|
||||
'url_user': 'https://other.com/alice',
|
||||
'is_similar': True,
|
||||
'found': True,
|
||||
'status': copy.deepcopy(GOOD_RESULT),
|
||||
},
|
||||
}
|
||||
return {
|
||||
'username': 'alice',
|
||||
'generated_at': '2024-01-02 10:00',
|
||||
'brief': 'Search returned 1 account',
|
||||
'countries_tuple_list': [('us', 1)],
|
||||
'interests_tuple_list': [('photo', 1)],
|
||||
'first_seen': '2023-01-01',
|
||||
'results': [('alice', 'username', data)],
|
||||
}
|
||||
|
||||
|
||||
def test_save_markdown_report():
|
||||
filename = 'report_test.md'
|
||||
context = _markdown_context_with_rich_ids()
|
||||
save_markdown_report(filename, context, run_info={'sites_count': 100, 'flags': '--top-sites 100'})
|
||||
with open(filename) as f:
|
||||
content = f.read()
|
||||
assert '# Report by searching on username "alice"' in content
|
||||
assert '## Summary' in content
|
||||
assert '## Accounts found' in content
|
||||
assert '### Github' in content
|
||||
assert '[https://github.com/alice](https://github.com/alice)' in content
|
||||
assert 'Ethical use' in content
|
||||
assert '100 sites checked' in content
|
||||
# image field must NOT appear in per-site listing
|
||||
assert 'avatar.png' not in content
|
||||
# list field rendered with join
|
||||
assert 'alice, alicea' in content
|
||||
# external url formatted as markdown link
|
||||
assert '[https://example.com/profile](https://example.com/profile)' in content
|
||||
|
||||
|
||||
def test_save_markdown_report_minimal_context():
|
||||
"""No run_info, no first_seen — exercise the fallback branches."""
|
||||
filename = 'report_test_min.md'
|
||||
context = {
|
||||
'username': 'bob',
|
||||
'brief': 'nothing found',
|
||||
'results': [],
|
||||
}
|
||||
save_markdown_report(filename, context)
|
||||
with open(filename) as f:
|
||||
content = f.read()
|
||||
assert '# Report by searching on username "bob"' in content
|
||||
assert '## Summary' in content
|
||||
|
||||
|
||||
def test_get_plaintext_report_minimal():
|
||||
"""Minimal context without countries/interests."""
|
||||
context = {
|
||||
'brief': 'Nothing to report.',
|
||||
'interests_tuple_list': [],
|
||||
'countries_tuple_list': [],
|
||||
}
|
||||
out = get_plaintext_report(context)
|
||||
assert 'Nothing to report.' in out
|
||||
assert 'Countries:' not in out
|
||||
assert 'Interests' not in out
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Maigret Database test functions"""
|
||||
|
||||
import re
|
||||
|
||||
from typing import Any, Dict
|
||||
|
||||
from maigret.sites import MaigretDatabase, MaigretSite
|
||||
@@ -126,6 +128,22 @@ def test_site_url_detector():
|
||||
)
|
||||
|
||||
|
||||
def test_extract_id_from_url_skips_none_groups():
|
||||
site = MaigretSite(
|
||||
"Example",
|
||||
{
|
||||
"urlMain": "https://example.com",
|
||||
"url": "https://example.com/{username}",
|
||||
},
|
||||
)
|
||||
site.url_regexp = re.compile(r"^https://example\.com/([^/?#]+)(?:/(.*))?$")
|
||||
|
||||
assert site.extract_id_from_url("https://example.com/username") == (
|
||||
"username",
|
||||
"username",
|
||||
)
|
||||
|
||||
|
||||
def test_ranked_sites_dict():
|
||||
db = MaigretDatabase()
|
||||
db.update_site(MaigretSite('3', {'alexaRank': 1000, 'engine': 'ucoz'}))
|
||||
|
||||
@@ -0,0 +1,172 @@
|
||||
"""Smoke tests for the Flask web interface in maigret.web.app.
|
||||
|
||||
The goal is to catch breakage in the basic user flow (render index, kick off
|
||||
search, redirect to results) without making real network calls. Heavy maigret
|
||||
internals are mocked; the report-generation smoke test keeps `save_graph_report`
|
||||
unmocked so regressions like `nt.options.groups = ...` (AttributeError on a
|
||||
plain dict) are caught automatically.
|
||||
"""
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import maigret
|
||||
import maigret.report
|
||||
from maigret.web import app as web_app_module
|
||||
|
||||
|
||||
CUR_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
TEST_DB = os.path.join(CUR_PATH, 'db.json')
|
||||
|
||||
|
||||
class _SyncThread:
|
||||
"""Drop-in for threading.Thread that runs target synchronously on start()."""
|
||||
|
||||
def __init__(self, target=None, args=(), kwargs=None, **_):
|
||||
self._target = target
|
||||
self._args = args
|
||||
self._kwargs = kwargs or {}
|
||||
|
||||
def start(self):
|
||||
self._target(*self._args, **self._kwargs)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def web_app(tmp_path):
|
||||
web_app_module.app.config['TESTING'] = True
|
||||
web_app_module.app.config['REPORTS_FOLDER'] = str(tmp_path)
|
||||
web_app_module.app.config['MAIGRET_DB_FILE'] = TEST_DB
|
||||
|
||||
web_app_module.background_jobs.clear()
|
||||
web_app_module.job_results.clear()
|
||||
|
||||
yield web_app_module
|
||||
|
||||
web_app_module.background_jobs.clear()
|
||||
web_app_module.job_results.clear()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(web_app):
|
||||
return web_app.app.test_client()
|
||||
|
||||
|
||||
def test_index_renders(client):
|
||||
resp = client.get('/')
|
||||
assert resp.status_code == 200
|
||||
body = resp.get_data(as_text=True)
|
||||
assert 'name="usernames"' in body
|
||||
assert '<form' in body
|
||||
|
||||
|
||||
def test_search_empty_input_redirects_to_index(client):
|
||||
resp = client.post('/search', data={'usernames': ''})
|
||||
assert resp.status_code == 302
|
||||
assert resp.location.rstrip('/').endswith('') or resp.location.endswith('/')
|
||||
|
||||
|
||||
def test_search_redirects_to_status(client, web_app, monkeypatch):
|
||||
monkeypatch.setattr(web_app, 'process_search_task', lambda *a, **kw: None)
|
||||
monkeypatch.setattr(web_app, 'Thread', _SyncThread)
|
||||
|
||||
resp = client.post('/search', data={'usernames': 'soxoj'})
|
||||
|
||||
assert resp.status_code == 302
|
||||
assert '/status/' in resp.location
|
||||
|
||||
|
||||
def test_invalid_timestamp_redirects_to_index(client):
|
||||
resp = client.get('/status/nonexistent_ts')
|
||||
assert resp.status_code == 302
|
||||
assert resp.location.endswith('/')
|
||||
|
||||
|
||||
def test_status_running_renders_status_page(client, web_app, monkeypatch):
|
||||
"""While the background job is still running, /status/<ts> returns 200."""
|
||||
|
||||
def never_completes(usernames, options, timestamp):
|
||||
# leave background_jobs[timestamp]['completed'] as False
|
||||
pass
|
||||
|
||||
monkeypatch.setattr(web_app, 'process_search_task', never_completes)
|
||||
monkeypatch.setattr(web_app, 'Thread', _SyncThread)
|
||||
|
||||
post = client.post('/search', data={'usernames': 'soxoj'})
|
||||
status_resp = client.get(post.location)
|
||||
|
||||
assert status_resp.status_code == 200
|
||||
|
||||
|
||||
def test_completed_search_redirects_to_results(client, web_app, monkeypatch):
|
||||
"""Happy path: POST /search → background completes → /status/<ts> → /results/<session>."""
|
||||
|
||||
def fake_task(usernames, options, timestamp):
|
||||
web_app.job_results[timestamp] = {
|
||||
'status': 'completed',
|
||||
'session_folder': f'search_{timestamp}',
|
||||
'graph_file': f'search_{timestamp}/combined_graph.html',
|
||||
'usernames': usernames,
|
||||
'individual_reports': [],
|
||||
}
|
||||
web_app.background_jobs[timestamp]['completed'] = True
|
||||
|
||||
monkeypatch.setattr(web_app, 'process_search_task', fake_task)
|
||||
monkeypatch.setattr(web_app, 'Thread', _SyncThread)
|
||||
|
||||
post = client.post('/search', data={'usernames': 'soxoj'})
|
||||
assert post.status_code == 302
|
||||
|
||||
status_resp = client.get(post.location)
|
||||
assert status_resp.status_code == 302
|
||||
assert '/results/search_' in status_resp.location
|
||||
|
||||
results_resp = client.get(status_resp.location)
|
||||
assert results_resp.status_code == 200
|
||||
assert b'soxoj' in results_resp.data
|
||||
|
||||
|
||||
def test_failed_task_redirects_to_index(client, web_app, monkeypatch):
|
||||
def failing_task(usernames, options, timestamp):
|
||||
web_app.job_results[timestamp] = {'status': 'failed', 'error': 'boom'}
|
||||
web_app.background_jobs[timestamp]['completed'] = True
|
||||
|
||||
monkeypatch.setattr(web_app, 'process_search_task', failing_task)
|
||||
monkeypatch.setattr(web_app, 'Thread', _SyncThread)
|
||||
|
||||
post = client.post('/search', data={'usernames': 'soxoj'})
|
||||
status_resp = client.get(post.location)
|
||||
|
||||
assert status_resp.status_code == 302
|
||||
assert status_resp.location.endswith('/')
|
||||
|
||||
|
||||
def test_real_report_generation_does_not_crash(client, web_app, monkeypatch):
|
||||
"""End-to-end with mocked maigret.search but REAL report generation.
|
||||
|
||||
This is the regression guard for bugs inside `save_graph_report` and friends
|
||||
(e.g. `nt.options.groups = ...` raising AttributeError on a dict). If any of
|
||||
the unmocked report functions throws, the task records a failed status and
|
||||
this assertion catches it.
|
||||
"""
|
||||
|
||||
async def fake_search(*args, **kwargs):
|
||||
return {}
|
||||
|
||||
monkeypatch.setattr(maigret, 'search', fake_search)
|
||||
# Mock the per-username report writers — they are not what we care about here,
|
||||
# and pdf/html generation pulls in xhtml2pdf which is slow and brittle.
|
||||
monkeypatch.setattr(maigret.report, 'save_csv_report', lambda *a, **kw: None)
|
||||
monkeypatch.setattr(maigret.report, 'save_json_report', lambda *a, **kw: None)
|
||||
monkeypatch.setattr(maigret.report, 'save_pdf_report', lambda *a, **kw: None)
|
||||
monkeypatch.setattr(maigret.report, 'save_html_report', lambda *a, **kw: None)
|
||||
monkeypatch.setattr(maigret.report, 'generate_report_context', lambda *a, **kw: {})
|
||||
monkeypatch.setattr(web_app, 'Thread', _SyncThread)
|
||||
|
||||
post = client.post('/search', data={'usernames': 'testuser'})
|
||||
timestamp = post.location.rsplit('/', 1)[1]
|
||||
|
||||
assert timestamp in web_app.job_results, 'background task did not record any result'
|
||||
result = web_app.job_results[timestamp]
|
||||
assert result['status'] == 'completed', (
|
||||
f"report generation failed: {result.get('error')!r}"
|
||||
)
|
||||
Executable
+5
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
sudo apt-get update && sudo apt-get install -y libcairo2-dev pkg-config
|
||||
pip install .
|
||||
Reference in New Issue
Block a user