You've already forked directdnsonly
feat: Update dependencies in poetry.lock and pyproject.toml ✨
- Added `certifi` version 2026.1.4 and `charset-normalizer` version 3.4.4 to poetry.lock. - Introduced `idna` version 3.11 to poetry.lock. - Updated `requests` to version 2.32.5 in poetry.lock and added it as a dependency in pyproject.toml. - Updated `urllib3` to version 2.6.3 in poetry.lock. - Added extras for `requests` and `urllib3` in poetry.lock.
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
from loguru import logger
|
||||
import sys
|
||||
from config import config
|
||||
from directdnsonly.config import config
|
||||
|
||||
|
||||
def configure_logging():
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import cherrypy
|
||||
from urllib.parse import urlencode, parse_qs
|
||||
from loguru import logger
|
||||
from directdnsonly.app.utils import (
|
||||
check_zone_exists,
|
||||
check_parent_domain_owner,
|
||||
get_domain_record,
|
||||
get_parent_domain_record,
|
||||
)
|
||||
from directdnsonly.app.utils.zone_parser import validate_and_normalize_zone
|
||||
|
||||
|
||||
@@ -14,10 +20,18 @@ class DNSAdminAPI:
|
||||
def index(self):
|
||||
return "DNS Admin API - Available endpoints: /CMD_API_DNS_ADMIN"
|
||||
|
||||
@cherrypy.expose
|
||||
def CMD_API_LOGIN_TEST(self):
|
||||
"""DirectAdmin login test — confirms credentials are valid"""
|
||||
return urlencode({"error": 0, "text": "Login OK"})
|
||||
|
||||
@cherrypy.expose
|
||||
def CMD_API_DNS_ADMIN(self, **params):
|
||||
"""Handle both DirectAdmin-style API calls and raw zone file uploads"""
|
||||
try:
|
||||
if cherrypy.request.method == "GET":
|
||||
return self._handle_exists(params)
|
||||
|
||||
if cherrypy.request.method != "POST":
|
||||
cherrypy.response.status = 405
|
||||
return urlencode({"error": 1, "text": "Method not allowed"})
|
||||
@@ -77,6 +91,42 @@ class DNSAdminAPI:
|
||||
cherrypy.response.status = 400
|
||||
return urlencode({"error": 1, "text": str(e)})
|
||||
|
||||
def _handle_exists(self, params: dict):
|
||||
"""Handle GET action=exists — domain and optional parent domain lookup"""
|
||||
action = params.get("action")
|
||||
if action != "exists":
|
||||
cherrypy.response.status = 400
|
||||
return urlencode({"error": 1, "text": f"Unsupported GET action: {action}"})
|
||||
|
||||
domain = params.get("domain")
|
||||
if not domain:
|
||||
cherrypy.response.status = 400
|
||||
return urlencode({"error": 1, "text": "Missing 'domain' parameter"})
|
||||
|
||||
check_parent = bool(params.get("check_for_parent_domain"))
|
||||
|
||||
domain_exists = check_zone_exists(domain)
|
||||
parent_exists = check_parent_domain_owner(domain) if check_parent else False
|
||||
|
||||
if not domain_exists and not parent_exists:
|
||||
return urlencode({"error": 0, "exists": 0})
|
||||
|
||||
if domain_exists:
|
||||
record = get_domain_record(domain)
|
||||
return urlencode({
|
||||
"error": 0,
|
||||
"exists": 1,
|
||||
"details": f"Domain exists on {record.hostname}",
|
||||
})
|
||||
|
||||
# parent match only
|
||||
parent_record = get_parent_domain_record(domain)
|
||||
return urlencode({
|
||||
"error": 0,
|
||||
"exists": 2,
|
||||
"details": f"Parent Domain exists on {parent_record.hostname}",
|
||||
})
|
||||
|
||||
def _handle_rawsave(self, domain: str, params: dict):
|
||||
"""Process zone file saves"""
|
||||
zone_data = params.get("zone_file")
|
||||
|
||||
426
directdnsonly/app/reconciler.py
Executable file
426
directdnsonly/app/reconciler.py
Executable file
@@ -0,0 +1,426 @@
|
||||
#!/usr/bin/env python3
|
||||
import threading
|
||||
from urllib.parse import parse_qs
|
||||
from loguru import logger
|
||||
|
||||
import requests
|
||||
import requests.exceptions
|
||||
|
||||
from directdnsonly.app.db import connect
|
||||
from directdnsonly.app.db.models import Domain
|
||||
|
||||
|
||||
class ReconciliationWorker:
|
||||
"""Periodically polls configured DirectAdmin servers and queues deletes
|
||||
for any zones in our DB that no longer exist in DirectAdmin.
|
||||
|
||||
Safety rules:
|
||||
- If a DA server is unreachable, skip it entirely — never delete on uncertainty
|
||||
- Only touches domains registered via DaDNS (present in our `domains` table)
|
||||
- Domains in CoreDNS but NOT in our DB are not our zones; left untouched
|
||||
- Pushes to the existing delete_queue so the full delete path is exercised
|
||||
"""
|
||||
|
||||
def __init__(self, delete_queue, reconciliation_config: dict):
|
||||
self.delete_queue = delete_queue
|
||||
self.enabled = reconciliation_config.get("enabled", False)
|
||||
self.interval_seconds = reconciliation_config.get("interval_minutes", 60) * 60
|
||||
self.servers = reconciliation_config.get("directadmin_servers") or []
|
||||
self.verify_ssl = reconciliation_config.get("verify_ssl", True)
|
||||
self._stop_event = threading.Event()
|
||||
self._thread = None
|
||||
|
||||
def start(self):
|
||||
if not self.enabled:
|
||||
logger.info("Reconciliation poller disabled — skipping")
|
||||
return
|
||||
if not self.servers:
|
||||
logger.warning(
|
||||
"Reconciliation enabled but no directadmin_servers configured"
|
||||
)
|
||||
return
|
||||
|
||||
self._stop_event.clear()
|
||||
self._thread = threading.Thread(
|
||||
target=self._run, daemon=True, name="reconciliation_worker"
|
||||
)
|
||||
self._thread.start()
|
||||
server_names = [s.get("hostname", "?") for s in self.servers]
|
||||
logger.info(
|
||||
f"Reconciliation poller started — "
|
||||
f"interval: {self.interval_seconds // 60}m, "
|
||||
f"servers: {server_names}"
|
||||
)
|
||||
|
||||
def stop(self):
|
||||
self._stop_event.set()
|
||||
if self._thread:
|
||||
self._thread.join(timeout=10)
|
||||
logger.info("Reconciliation poller stopped")
|
||||
|
||||
@property
|
||||
def is_alive(self):
|
||||
return self._thread is not None and self._thread.is_alive()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Internal
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _run(self):
|
||||
logger.info("Reconciliation worker starting — running initial check now")
|
||||
self._reconcile_all()
|
||||
# Wait for interval or stop signal; returns True when stopped
|
||||
while not self._stop_event.wait(timeout=self.interval_seconds):
|
||||
self._reconcile_all()
|
||||
|
||||
def _reconcile_all(self):
|
||||
logger.info(
|
||||
f"[reconciler] Starting reconciliation pass across "
|
||||
f"{len(self.servers)} server(s)"
|
||||
)
|
||||
total_queued = 0
|
||||
# Build a map of all domains seen on all DA servers
|
||||
all_da_domains = {} # domain -> hostname
|
||||
for server in self.servers:
|
||||
hostname = server.get("hostname")
|
||||
if not hostname:
|
||||
logger.warning("[reconciler] Server config missing hostname — skipping")
|
||||
continue
|
||||
try:
|
||||
da_domains = self._fetch_da_domains(
|
||||
hostname,
|
||||
server.get("port", 2222),
|
||||
server.get("username"),
|
||||
server.get("password"),
|
||||
server.get("ssl", True),
|
||||
)
|
||||
if da_domains is not None:
|
||||
for d in da_domains:
|
||||
all_da_domains[d] = hostname
|
||||
logger.debug(
|
||||
f"[reconciler] {hostname}: {len(da_domains) if da_domains else 0} active domain(s) in DA"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"[reconciler] Unexpected error polling {hostname}: {e}"
|
||||
)
|
||||
|
||||
# Now check local DB for all domains, update master if needed, and queue deletes only from recorded master
|
||||
session = connect()
|
||||
all_local_domains = session.query(Domain).all()
|
||||
migrated = 0
|
||||
for record in all_local_domains:
|
||||
domain = record.domain
|
||||
recorded_master = record.hostname
|
||||
actual_master = all_da_domains.get(domain)
|
||||
if actual_master:
|
||||
if actual_master != recorded_master:
|
||||
logger.warning(
|
||||
f"[reconciler] Domain '{domain}' migrated: recorded master '{recorded_master}' -> new master '{actual_master}'. Updating local DB."
|
||||
)
|
||||
record.hostname = actual_master
|
||||
migrated += 1
|
||||
else:
|
||||
# Only queue delete if this is the recorded master
|
||||
if recorded_master in [s.get("hostname") for s in self.servers]:
|
||||
self.delete_queue.put({
|
||||
"domain": record.domain,
|
||||
"hostname": record.hostname,
|
||||
"username": record.username or "",
|
||||
"source": "reconciler",
|
||||
})
|
||||
logger.debug(
|
||||
f"[reconciler] Queued delete for orphan: {record.domain} (master: {recorded_master})"
|
||||
)
|
||||
total_queued += 1
|
||||
if migrated:
|
||||
session.commit()
|
||||
logger.info(f"[reconciler] {migrated} domain(s) migrated to new master and updated in DB.")
|
||||
logger.info(
|
||||
f"[reconciler] Reconciliation pass complete — "
|
||||
f"{total_queued} domain(s) queued for deletion"
|
||||
)
|
||||
|
||||
def _reconcile_server(self, server: dict) -> int:
|
||||
"""Reconcile one DA server. Returns number of domains queued for delete."""
|
||||
hostname = server["hostname"]
|
||||
port = server.get("port", 2222)
|
||||
username = server.get("username")
|
||||
password = server.get("password")
|
||||
use_ssl = server.get("ssl", True)
|
||||
|
||||
logger.info(f"[reconciler] Polling {hostname}:{port}")
|
||||
|
||||
da_domains = self._fetch_da_domains(
|
||||
hostname, port, username, password, use_ssl
|
||||
)
|
||||
if da_domains is None:
|
||||
# Fetch failed — never delete on uncertainty
|
||||
return 0
|
||||
|
||||
logger.debug(
|
||||
f"[reconciler] {hostname}: {len(da_domains)} active domain(s) in DA"
|
||||
)
|
||||
|
||||
session = connect()
|
||||
our_domains = session.query(Domain).filter_by(hostname=hostname).all()
|
||||
|
||||
if not our_domains:
|
||||
logger.debug(
|
||||
f"[reconciler] {hostname}: no domains registered from this server"
|
||||
)
|
||||
return 0
|
||||
|
||||
orphans = [d for d in our_domains if d.domain not in da_domains]
|
||||
|
||||
if not orphans:
|
||||
logger.info(
|
||||
f"[reconciler] {hostname}: all {len(our_domains)} registered "
|
||||
f"domain(s) confirmed active in DA"
|
||||
)
|
||||
return 0
|
||||
|
||||
logger.warning(
|
||||
f"[reconciler] {hostname}: {len(orphans)} orphaned domain(s) "
|
||||
f"no longer in DA — queuing for deletion: "
|
||||
f"{[d.domain for d in orphans]}"
|
||||
)
|
||||
|
||||
for record in orphans:
|
||||
self.delete_queue.put({
|
||||
"domain": record.domain,
|
||||
"hostname": record.hostname,
|
||||
"username": record.username or "",
|
||||
"source": "reconciler",
|
||||
})
|
||||
logger.debug(
|
||||
f"[reconciler] Queued delete for orphan: {record.domain}"
|
||||
)
|
||||
|
||||
return len(orphans)
|
||||
|
||||
def _fetch_da_domains(
|
||||
self, hostname: str, port: int, username: str, password: str, use_ssl: bool, ipp: int = 1000
|
||||
):
|
||||
"""Fetch all domains from a DA server via CMD_DNS_ADMIN (JSON, paging supported).
|
||||
|
||||
Returns a set of domain strings on success, or None on any failure.
|
||||
"""
|
||||
scheme = "https" if use_ssl else "http"
|
||||
page = 1
|
||||
all_domains = set()
|
||||
total_pages = 1
|
||||
cookies = None
|
||||
|
||||
try:
|
||||
while page <= total_pages:
|
||||
url = f"{scheme}://{hostname}:{port}/CMD_DNS_ADMIN?json=yes&page={page}&ipp={ipp}"
|
||||
req_kwargs = dict(
|
||||
timeout=30,
|
||||
verify=self.verify_ssl,
|
||||
allow_redirects=False,
|
||||
)
|
||||
if cookies:
|
||||
req_kwargs["cookies"] = cookies
|
||||
else:
|
||||
req_kwargs["auth"] = (username, password)
|
||||
|
||||
response = requests.get(url, **req_kwargs)
|
||||
|
||||
if response.is_redirect or response.status_code in (301, 302, 303, 307, 308):
|
||||
if not cookies:
|
||||
logger.debug(
|
||||
f"[reconciler] {hostname}:{port} redirected Basic Auth "
|
||||
f"(HTTP {response.status_code}) — attempting session login (DA Evo)"
|
||||
)
|
||||
cookies = self._da_session_login(scheme, hostname, port, username, password)
|
||||
if cookies is None:
|
||||
return None
|
||||
continue # retry this page with cookies
|
||||
else:
|
||||
logger.error(
|
||||
f"[reconciler] {hostname}:{port} still redirecting after session login — "
|
||||
f"check that '{username}' has admin-level access. Skipping."
|
||||
)
|
||||
return None
|
||||
|
||||
response.raise_for_status()
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if "text/html" in content_type:
|
||||
logger.error(
|
||||
f"[reconciler] {hostname}:{port} returned HTML instead of API response — "
|
||||
f"check credentials and admin-level access. Skipping."
|
||||
)
|
||||
return None
|
||||
|
||||
# Try JSON first
|
||||
try:
|
||||
data = response.json()
|
||||
# Domains are in keys '0', '1', ...
|
||||
for k, v in data.items():
|
||||
if k.isdigit() and isinstance(v, dict) and "domain" in v:
|
||||
all_domains.add(v["domain"].strip().lower())
|
||||
# Paging info
|
||||
info = data.get("info", {})
|
||||
total_pages = int(info.get("total_pages", 1))
|
||||
page += 1
|
||||
continue
|
||||
except Exception:
|
||||
# Fallback to legacy parser
|
||||
domains = self._parse_da_domain_list(response.text)
|
||||
all_domains.update(domains)
|
||||
break # No paging in legacy mode
|
||||
|
||||
return all_domains
|
||||
|
||||
except requests.exceptions.SSLError as e:
|
||||
logger.error(
|
||||
f"[reconciler] SSL error connecting to {hostname}:{port} — {e}. "
|
||||
f"Set verify_ssl: false in reconciliation config if using self-signed certs."
|
||||
)
|
||||
return None
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.error(
|
||||
f"[reconciler] Cannot reach {hostname}:{port} — {e}. "
|
||||
f"Skipping this server."
|
||||
)
|
||||
return None
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error(
|
||||
f"[reconciler] Timeout connecting to {hostname}:{port}. "
|
||||
f"Skipping this server."
|
||||
)
|
||||
return None
|
||||
except requests.exceptions.HTTPError as e:
|
||||
logger.error(
|
||||
f"[reconciler] HTTP {response.status_code} from {hostname}:{port} — {e}. "
|
||||
f"Skipping this server."
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"[reconciler] Unexpected error fetching from {hostname}: {e}"
|
||||
)
|
||||
return None
|
||||
|
||||
def _da_session_login(
|
||||
self, scheme: str, hostname: str, port: int, username: str, password: str
|
||||
):
|
||||
"""POST to CMD_LOGIN to obtain a DA Evo session cookie.
|
||||
|
||||
Returns a RequestsCookieJar on success, or None on failure.
|
||||
"""
|
||||
login_url = f"{scheme}://{hostname}:{port}/CMD_LOGIN"
|
||||
try:
|
||||
response = requests.post(
|
||||
login_url,
|
||||
data={
|
||||
"username": username,
|
||||
"password": password,
|
||||
"referer": "/CMD_DNS_ADMIN?json=yes&page=1&ipp=500",
|
||||
},
|
||||
timeout=30,
|
||||
verify=self.verify_ssl,
|
||||
allow_redirects=False,
|
||||
)
|
||||
if not response.cookies:
|
||||
logger.error(
|
||||
f"[reconciler] {hostname}:{port} CMD_LOGIN returned no session cookie — "
|
||||
f"check username/password."
|
||||
)
|
||||
return None
|
||||
logger.debug(f"[reconciler] {hostname}:{port} session login successful (DA Evo)")
|
||||
return response.cookies
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"[reconciler] {hostname}:{port} session login failed: {e}"
|
||||
)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _parse_da_domain_list(body: str) -> set:
|
||||
"""Parse DA's CMD_API_SHOW_ALL_DOMAINS response.
|
||||
|
||||
DA returns URL-encoded key=value pairs, either on one line or newline-
|
||||
separated. The domain list uses the key 'list[]'.
|
||||
|
||||
Example response:
|
||||
list[]=example.com&list[]=example2.com
|
||||
"""
|
||||
# Normalise newline-separated responses to a single query string
|
||||
normalised = body.replace("\n", "&").strip("&")
|
||||
params = parse_qs(normalised)
|
||||
domains = params.get("list[]", [])
|
||||
return {d.strip().lower() for d in domains if d.strip()}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
import sys
|
||||
from queue import Queue
|
||||
|
||||
parser = argparse.ArgumentParser(description="Test DirectAdmin domain fetcher (JSON/paging)")
|
||||
parser.add_argument("--hostname", required=True, help="DirectAdmin server hostname")
|
||||
parser.add_argument("--port", type=int, default=2222, help="DirectAdmin port (default: 2222)")
|
||||
parser.add_argument("--username", required=True, help="DirectAdmin admin username")
|
||||
parser.add_argument("--password", required=True, help="DirectAdmin admin password")
|
||||
parser.add_argument("--ssl", action="store_true", help="Use HTTPS (default: True)")
|
||||
parser.add_argument("--no-ssl", dest="ssl", action="store_false", help="Use HTTP (not recommended)")
|
||||
parser.set_defaults(ssl=True)
|
||||
parser.add_argument("--verify-ssl", action="store_true", help="Verify SSL certs (default: True)")
|
||||
parser.add_argument("--no-verify-ssl", dest="verify_ssl", action="store_false", help="Don't verify SSL certs")
|
||||
parser.set_defaults(verify_ssl=True)
|
||||
parser.add_argument("--ipp", type=int, default=1000, help="Items per page (default: 1000)")
|
||||
parser.add_argument("--print-json", action="store_true", help="Print raw JSON response for first page")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Minimal config for testing
|
||||
config = {
|
||||
"enabled": True,
|
||||
"directadmin_servers": [
|
||||
{
|
||||
"hostname": args.hostname,
|
||||
"port": args.port,
|
||||
"username": args.username,
|
||||
"password": args.password,
|
||||
"ssl": args.ssl,
|
||||
}
|
||||
],
|
||||
"verify_ssl": args.verify_ssl,
|
||||
}
|
||||
q = Queue()
|
||||
worker = ReconciliationWorker(q, config)
|
||||
server = config["directadmin_servers"][0]
|
||||
print(f"Fetching domains from {server['hostname']}:{server['port']} (ipp={args.ipp})...")
|
||||
# Directly call the fetch method for testing
|
||||
domains = worker._fetch_da_domains(
|
||||
server["hostname"],
|
||||
server.get("port", 2222),
|
||||
server.get("username"),
|
||||
server.get("password"),
|
||||
server.get("ssl", True),
|
||||
ipp=args.ipp
|
||||
)
|
||||
if domains is None:
|
||||
print("Failed to fetch domains.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
print(f"Fetched {len(domains)} domains:")
|
||||
for d in sorted(domains):
|
||||
print(d)
|
||||
|
||||
if args.print_json:
|
||||
# Print the first page's raw JSON for inspection
|
||||
scheme = "https" if server.get("ssl", True) else "http"
|
||||
url = f"{scheme}://{server['hostname']}:{server.get('port', 2222)}/CMD_DNS_ADMIN?json=yes&page=1&ipp={args.ipp}"
|
||||
resp = requests.get(
|
||||
url,
|
||||
auth=(server.get("username"), server.get("password")),
|
||||
timeout=30,
|
||||
verify=args.verify_ssl,
|
||||
allow_redirects=False,
|
||||
)
|
||||
try:
|
||||
print("\nRaw JSON for first page:")
|
||||
print(resp.json())
|
||||
except Exception:
|
||||
print("(Could not parse JSON)")
|
||||
@@ -23,3 +23,28 @@ def put_zone_index(zone_name, host_name, user_name):
|
||||
domain = Domain(domain=zone_name, hostname=host_name, username=user_name)
|
||||
session.add(domain)
|
||||
session.commit()
|
||||
|
||||
|
||||
def get_domain_record(zone_name):
|
||||
"""Return the Domain record for zone_name, or None if not found"""
|
||||
session = connect()
|
||||
return session.query(Domain).filter_by(domain=zone_name).first()
|
||||
|
||||
|
||||
def check_parent_domain_owner(zone_name):
|
||||
"""Return True if the immediate parent domain of zone_name exists in the DB"""
|
||||
parent_domain = ".".join(zone_name.split(".")[1:])
|
||||
if not parent_domain:
|
||||
return False
|
||||
session = connect()
|
||||
logger.debug("Checking if parent domain {} exists in DB".format(parent_domain))
|
||||
return bool(session.query(Domain.id).filter_by(domain=parent_domain).first())
|
||||
|
||||
|
||||
def get_parent_domain_record(zone_name):
|
||||
"""Return the Domain record for the parent of zone_name, or None"""
|
||||
parent_domain = ".".join(zone_name.split(".")[1:])
|
||||
if not parent_domain:
|
||||
return None
|
||||
session = connect()
|
||||
return session.query(Domain).filter_by(domain=parent_domain).first()
|
||||
|
||||
@@ -52,6 +52,11 @@ def load_config() -> Vyper:
|
||||
v.set_default("datastore.port", 3306)
|
||||
v.set_default("datastore.db_location", "data/directdns.db")
|
||||
|
||||
# Reconciliation poller defaults
|
||||
v.set_default("reconciliation.enabled", False)
|
||||
v.set_default("reconciliation.interval_minutes", 60)
|
||||
v.set_default("reconciliation.verify_ssl", True)
|
||||
|
||||
# Read configuration
|
||||
if not v.read_in_config():
|
||||
logger.warning("No config file found, using defaults")
|
||||
|
||||
@@ -7,6 +7,25 @@ app:
|
||||
auth_username: directdnsonly
|
||||
auth_password: changeme # Override via DADNS_APP_AUTH_PASSWORD env var
|
||||
|
||||
# Reconciliation poller — queries each DA server and removes orphaned zones
|
||||
# Disabled by default. Only touches zones registered via DaDNS (in our DB).
|
||||
# If a DA server is unreachable, that server is skipped entirely.
|
||||
#reconciliation:
|
||||
# enabled: true
|
||||
# interval_minutes: 60
|
||||
# verify_ssl: true # set false for self-signed DA certs
|
||||
# directadmin_servers:
|
||||
# - hostname: da1.example.com
|
||||
# port: 2222
|
||||
# username: admin
|
||||
# password: secret
|
||||
# ssl: true
|
||||
# - hostname: da2.example.com
|
||||
# port: 2222
|
||||
# username: admin
|
||||
# password: secret
|
||||
# ssl: true
|
||||
|
||||
dns:
|
||||
default_backend: bind
|
||||
backends:
|
||||
|
||||
@@ -37,8 +37,11 @@ def main():
|
||||
logger.info("Database Connected!")
|
||||
|
||||
# Setup worker manager
|
||||
reconciliation_config = config.get("reconciliation") or {}
|
||||
worker_manager = WorkerManager(
|
||||
queue_path=config.get("queue_location"), backend_registry=registry
|
||||
queue_path=config.get("queue_location"),
|
||||
backend_registry=registry,
|
||||
reconciliation_config=reconciliation_config,
|
||||
)
|
||||
worker_manager.start()
|
||||
logger.info(
|
||||
|
||||
@@ -10,14 +10,18 @@ from app.utils import check_zone_exists, put_zone_index
|
||||
from app.utils.zone_parser import count_zone_records
|
||||
from directdnsonly.app.db.models import Domain
|
||||
from directdnsonly.app.db import connect
|
||||
from directdnsonly.app.reconciler import ReconciliationWorker
|
||||
|
||||
|
||||
class WorkerManager:
|
||||
def __init__(self, queue_path: str, backend_registry):
|
||||
def __init__(self, queue_path: str, backend_registry, reconciliation_config: dict = None):
|
||||
self.queue_path = queue_path
|
||||
self.backend_registry = backend_registry
|
||||
self._running = False
|
||||
self._thread = None
|
||||
self._save_thread = None
|
||||
self._delete_thread = None
|
||||
self._reconciler = None
|
||||
self._reconciliation_config = reconciliation_config or {}
|
||||
|
||||
# Initialize queues with error handling
|
||||
try:
|
||||
@@ -146,6 +150,113 @@ class WorkerManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Error in {backend_name}: {str(e)}")
|
||||
|
||||
def _process_delete_queue(self):
|
||||
"""Worker loop for processing zone deletion requests"""
|
||||
logger.info("Delete queue worker started")
|
||||
session = connect()
|
||||
|
||||
while self._running:
|
||||
try:
|
||||
item = self.delete_queue.get(block=True, timeout=5)
|
||||
domain = item.get("domain")
|
||||
hostname = item.get("hostname", "")
|
||||
|
||||
logger.debug(f"Processing delete for {domain}")
|
||||
|
||||
record = session.query(Domain).filter_by(domain=domain).first()
|
||||
if not record:
|
||||
logger.warning(
|
||||
f"Domain {domain} not found in DB — skipping delete"
|
||||
)
|
||||
self.delete_queue.task_done()
|
||||
continue
|
||||
|
||||
if record.hostname and record.hostname != hostname:
|
||||
logger.warning(
|
||||
f"Hostname mismatch for {domain}: registered on "
|
||||
f"{record.hostname}, delete requested from {hostname} — rejected"
|
||||
)
|
||||
self.delete_queue.task_done()
|
||||
continue
|
||||
if not record.hostname:
|
||||
logger.warning(
|
||||
f"No origin hostname stored for {domain} — "
|
||||
f"skipping ownership check, proceeding with delete"
|
||||
)
|
||||
|
||||
session.delete(record)
|
||||
session.commit()
|
||||
logger.info(f"Removed {domain} from database")
|
||||
|
||||
remaining_domains = [d.domain for d in session.query(Domain).all()]
|
||||
|
||||
backends = self.backend_registry.get_available_backends()
|
||||
if not backends:
|
||||
logger.warning(
|
||||
f"No active backends — {domain} removed from DB only"
|
||||
)
|
||||
elif len(backends) > 1:
|
||||
self._process_backends_delete_parallel(
|
||||
backends, domain, remaining_domains
|
||||
)
|
||||
else:
|
||||
for backend_name, backend in backends.items():
|
||||
self._delete_single_backend(
|
||||
backend_name, backend, domain, remaining_domains
|
||||
)
|
||||
|
||||
self.delete_queue.task_done()
|
||||
logger.success(f"Delete completed for {domain}")
|
||||
|
||||
except Empty:
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected delete worker error: {e}")
|
||||
time.sleep(1)
|
||||
|
||||
def _delete_single_backend(self, backend_name, backend, domain, remaining_domains):
|
||||
"""Delete a zone from a single backend"""
|
||||
try:
|
||||
if backend.delete_zone(domain):
|
||||
logger.debug(f"Deleted {domain} from {backend_name}")
|
||||
if backend.get_name() == "bind":
|
||||
backend.update_named_conf(remaining_domains)
|
||||
backend.reload_zone()
|
||||
else:
|
||||
backend.reload_zone(zone_name=domain)
|
||||
else:
|
||||
logger.error(f"Failed to delete {domain} from {backend_name}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting {domain} from {backend_name}: {e}")
|
||||
|
||||
def _process_backends_delete_parallel(self, backends, domain, remaining_domains):
|
||||
"""Delete a zone from multiple backends in parallel"""
|
||||
start_time = time.monotonic()
|
||||
with ThreadPoolExecutor(
|
||||
max_workers=len(backends),
|
||||
thread_name_prefix="backend_del",
|
||||
) as executor:
|
||||
futures = {
|
||||
executor.submit(
|
||||
self._delete_single_backend,
|
||||
backend_name, backend, domain, remaining_domains
|
||||
): backend_name
|
||||
for backend_name, backend in backends.items()
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
backend_name = futures[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Unhandled error deleting from {backend_name}: {e}"
|
||||
)
|
||||
elapsed = (time.monotonic() - start_time) * 1000
|
||||
logger.debug(
|
||||
f"Parallel delete of {domain} across "
|
||||
f"{len(backends)} backends completed in {elapsed:.0f}ms"
|
||||
)
|
||||
|
||||
def _process_backends_parallel(self, backends, item, session):
|
||||
"""Process zone updates across multiple backends in parallel"""
|
||||
start_time = time.monotonic()
|
||||
@@ -260,17 +371,33 @@ class WorkerManager:
|
||||
return
|
||||
|
||||
self._running = True
|
||||
self._thread = threading.Thread(
|
||||
self._save_thread = threading.Thread(
|
||||
target=self._process_save_queue, daemon=True, name="save_queue_worker"
|
||||
)
|
||||
self._thread.start()
|
||||
logger.info(f"Started worker thread {self._thread.name}")
|
||||
self._delete_thread = threading.Thread(
|
||||
target=self._process_delete_queue, daemon=True, name="delete_queue_worker"
|
||||
)
|
||||
self._save_thread.start()
|
||||
self._delete_thread.start()
|
||||
logger.info(
|
||||
f"Started worker threads: {self._save_thread.name}, {self._delete_thread.name}"
|
||||
)
|
||||
|
||||
self._reconciler = ReconciliationWorker(
|
||||
delete_queue=self.delete_queue,
|
||||
reconciliation_config=self._reconciliation_config,
|
||||
)
|
||||
self._reconciler.start()
|
||||
|
||||
def stop(self):
|
||||
"""Stop background workers gracefully"""
|
||||
self._running = False
|
||||
if self._thread:
|
||||
self._thread.join(timeout=5)
|
||||
if self._reconciler:
|
||||
self._reconciler.stop()
|
||||
if self._save_thread:
|
||||
self._save_thread.join(timeout=5)
|
||||
if self._delete_thread:
|
||||
self._delete_thread.join(timeout=5)
|
||||
logger.info("Workers stopped")
|
||||
|
||||
def queue_status(self):
|
||||
@@ -278,5 +405,7 @@ class WorkerManager:
|
||||
return {
|
||||
"save_queue_size": self.save_queue.qsize(),
|
||||
"delete_queue_size": self.delete_queue.qsize(),
|
||||
"worker_alive": self._thread and self._thread.is_alive(),
|
||||
"save_worker_alive": self._save_thread and self._save_thread.is_alive(),
|
||||
"delete_worker_alive": self._delete_thread and self._delete_thread.is_alive(),
|
||||
"reconciler_alive": self._reconciler.is_alive if self._reconciler else False,
|
||||
}
|
||||
|
||||
192
poetry.lock
generated
192
poetry.lock
generated
@@ -86,6 +86,141 @@ d = ["aiohttp (>=3.10)"]
|
||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||
uvloop = ["uvloop (>=0.15.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2026.1.4"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"},
|
||||
{file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.4"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"},
|
||||
{file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"},
|
||||
{file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"},
|
||||
{file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"},
|
||||
{file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"},
|
||||
{file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"},
|
||||
{file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"},
|
||||
{file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"},
|
||||
{file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"},
|
||||
{file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cheroot"
|
||||
version = "10.0.1"
|
||||
@@ -346,6 +481,21 @@ files = [
|
||||
docs = ["Sphinx", "furo"]
|
||||
test = ["objgraph", "psutil"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.11"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"},
|
||||
{file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.1.0"
|
||||
@@ -825,6 +975,28 @@ files = [
|
||||
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"},
|
||||
{file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset_normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "80.7.1"
|
||||
@@ -972,6 +1144,24 @@ files = [
|
||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.6.3"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"},
|
||||
{file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""]
|
||||
h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""]
|
||||
|
||||
[[package]]
|
||||
name = "vyper-config"
|
||||
version = "1.2.1"
|
||||
@@ -1070,4 +1260,4 @@ test = ["zope.testing"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.14"
|
||||
content-hash = "a106c5dc27a2dbd11f79134950afb7d71c628e836b9a8bf2d78aa2cc6eb0626f"
|
||||
content-hash = "2728efc224c06cd502a28a74082eaf035b6bfea1395c803d8fa8f50148fd1222"
|
||||
|
||||
@@ -17,6 +17,7 @@ dependencies = [
|
||||
"pymysql (>=1.1.1,<2.0.0)",
|
||||
"dnspython (>=2.7.0,<3.0.0)",
|
||||
"pyyaml (>=6.0.2,<7.0.0)",
|
||||
"requests (>=2.32.0,<3.0.0)",
|
||||
]
|
||||
|
||||
[tool.poetry]
|
||||
|
||||
Reference in New Issue
Block a user