Add OnlyFans with activation mechanism; updated site ranks (#2546)

This commit is contained in:
Soxoj
2026-04-21 19:03:45 +02:00
committed by GitHub
parent a5e558c5e8
commit 0131f0b64c
5 changed files with 2056 additions and 1924 deletions
+54 -3
View File
@@ -7,7 +7,7 @@ from aiohttp import CookieJar
class ParsingActivator: class ParsingActivator:
@staticmethod @staticmethod
def twitter(site, logger, cookies={}): def twitter(site, logger, cookies={}, **kwargs):
headers = dict(site.headers) headers = dict(site.headers)
del headers["x-guest-token"] del headers["x-guest-token"]
import requests import requests
@@ -19,7 +19,7 @@ class ParsingActivator:
site.headers["x-guest-token"] = guest_token site.headers["x-guest-token"] = guest_token
@staticmethod @staticmethod
def vimeo(site, logger, cookies={}): def vimeo(site, logger, cookies={}, **kwargs):
headers = dict(site.headers) headers = dict(site.headers)
if "Authorization" in headers: if "Authorization" in headers:
del headers["Authorization"] del headers["Authorization"]
@@ -31,7 +31,58 @@ class ParsingActivator:
site.headers["Authorization"] = "jwt " + jwt_token site.headers["Authorization"] = "jwt " + jwt_token
@staticmethod @staticmethod
def weibo(site, logger): def onlyfans(site, logger, url=None, **kwargs):
# Signing rules (static_param / checksum_indexes / checksum_constant / format / app_token)
# live in data.json under OnlyFans.activation and rotate upstream every ~13 weeks.
# If "Please refresh the page" keeps firing after activation, refresh them from:
# https://raw.githubusercontent.com/DATAHOARDERS/dynamic-rules/main/onlyfans.json
import hashlib
import secrets
import time as _time
from urllib.parse import urlparse
import requests
act = site.activation
static_param = act["static_param"]
indexes = act["checksum_indexes"]
constant = act["checksum_constant"]
fmt = act["format"]
init_url = act["url"]
user_id = site.headers.get("user-id", "0") or "0"
def _sign(path):
t = str(int(_time.time() * 1000))
msg = "\n".join([static_param, t, path, user_id]).encode()
sha = hashlib.sha1(msg).hexdigest()
cs = sum(ord(sha[i]) for i in indexes) + constant
return t, fmt.format(sha, abs(cs))
if site.headers.get("x-bc", "").strip("0") == "":
site.headers["x-bc"] = secrets.token_hex(20)
if not site.headers.get("cookie"):
init_path = urlparse(init_url).path
t, sg = _sign(init_path)
hdrs = dict(site.headers)
hdrs["time"] = t
hdrs["sign"] = sg
hdrs.pop("cookie", None)
r = requests.get(init_url, headers=hdrs, timeout=15)
jar = "; ".join(f"{k}={v}" for k, v in r.cookies.items())
if jar:
site.headers["cookie"] = jar
logger.debug(f"OnlyFans init: got cookies {list(r.cookies.keys())}")
target_path = urlparse(url).path if url else urlparse(init_url).path
t, sg = _sign(target_path)
site.headers["time"] = t
site.headers["sign"] = sg
logger.debug(f"OnlyFans signed {target_path} time={t}")
@staticmethod
def weibo(site, logger, **kwargs):
headers = dict(site.headers) headers = dict(site.headers)
import requests import requests
+1 -1
View File
@@ -678,7 +678,7 @@ async def check_site_for_username(
method = act["method"] method = act["method"]
try: try:
activate_fun = getattr(ParsingActivator(), method) activate_fun = getattr(ParsingActivator(), method)
activate_fun(site, logger) activate_fun(site, logger, url=checker.url)
except AttributeError as e: except AttributeError as e:
logger.warning( logger.warning(
f"Activation method {method} for site {site.name} not found!", f"Activation method {method} for site {site.name} not found!",
+1233 -1153
View File
File diff suppressed because it is too large Load Diff
+3 -3
View File
@@ -1,8 +1,8 @@
{ {
"version": 1, "version": 1,
"updated_at": "2026-04-21T14:03:36Z", "updated_at": "2026-04-21T16:59:52Z",
"sites_count": 3143, "sites_count": 3144,
"min_maigret_version": "0.6.0", "min_maigret_version": "0.6.0",
"data_sha256": "5f0b81460df8c2de59fec23c3573dc5558c994e106141007f78f09c595a4edfa", "data_sha256": "da83957ecfd04da788da4509ea318aa6afcc610441fab5642c9ebd376e8f3c18",
"data_url": "https://raw.githubusercontent.com/soxoj/maigret/main/maigret/resources/data.json" "data_url": "https://raw.githubusercontent.com/soxoj/maigret/main/maigret/resources/data.json"
} }
+765 -764
View File
File diff suppressed because it is too large Load Diff