diff --git a/.gitignore b/.gitignore
index af48204..c09c4f7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,4 +3,25 @@ venv/
.venv
.idea
build
-!build/.gitkeep
\ No newline at end of file
+!build/.gitkeep
+**/__pycache__/
+*.pyc
+*.pyo
+*.pyd
+*.egg-info
+*.egg
+*.log
+*.DS_Store
+*.swp
+*.swo
+*.bak
+*.tmp
+*.orig
+*.coverage
+*.cover
+*.tox
+*.dist-info
+*.egg-info
+*.mypy_cache
+*.pytest_cache
+/data/*
diff --git a/.python-version b/.python-version
new file mode 100644
index 0000000..3b564fa
--- /dev/null
+++ b/.python-version
@@ -0,0 +1 @@
+3.11.12
diff --git a/Dockerfile b/Dockerfile
index ca74c07..c5c1428 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,6 @@
FROM pypy:slim-buster
-RUN mkdir -p /opt/apikeyhandler/config
+RUN mkdir -p /opt/apikeyhandler/conf
VOLUME /opt/apikeyhandler/config
COPY ./src/ /opt/apikeyhandler
diff --git a/Dockerfile.deepseek b/Dockerfile.deepseek
new file mode 100644
index 0000000..e965eca
--- /dev/null
+++ b/Dockerfile.deepseek
@@ -0,0 +1,53 @@
+FROM python:3.11.12-slim
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ bind9 \
+ bind9utils \
+ dnsutils \
+ gcc \
+ python3-dev \
+ default-libmysqlclient-dev \
+ && rm -rf /var/lib/apt/lists/*
+
+# Configure BIND
+RUN mkdir -p /etc/named/zones && \
+ chown -R bind:bind /etc/named && \
+ chmod 755 /etc/named/zones
+
+COPY docker/named.conf.local /etc/bind/
+COPY docker/named.conf.options /etc/bind/
+RUN chown root:bind /etc/bind/named.conf.*
+
+# Install Python dependencies
+WORKDIR /app
+COPY pyproject.toml poetry.lock README.md ./
+
+# Install specific Poetry version that matches your lock file
+RUN pip install "poetry==2.1.2" # Adjust version to match your lock file
+
+# Copy application files
+COPY directdnsonly ./directdnsonly
+COPY config ./config
+COPY schema ./schema
+
+RUN poetry config virtualenvs.create false && \
+ poetry install
+
+
+
+# Create data directories
+RUN mkdir -p /app/data/queues && \
+ mkdir -p /app/data/zones && \
+ mkdir -p /app/logs && \
+ chmod -R 755 /app/data
+
+# Configure BIND zone directory to match app config
+#RUN ln -s /app/data/zones /etc/named/zones/dadns
+
+# Start script
+COPY docker/entrypoint.sh /entrypoint.sh
+RUN chmod +x /entrypoint.sh
+
+EXPOSE 2222 53/udp
+CMD ["/entrypoint.sh"]
\ No newline at end of file
diff --git a/Dockerfile.scratch b/Dockerfile.scratch
index 3c2c4a2..afca59f 100644
--- a/Dockerfile.scratch
+++ b/Dockerfile.scratch
@@ -1,4 +1,4 @@
-FROM python:3.7.9 as builder
+FROM python:3.8 AS builder
# Allow Passing Version from CI
ARG VERSION
ENV LC_ALL=en_NZ.utf8
@@ -6,7 +6,7 @@ ENV LANG=en_NZ.utf8
ENV APP_NAME="directdnsonly"
RUN mkdir -p /tmp/build && apt-get update && \
- apt-get install -y libgcc1-dbg libssl-dev
+ apt-get install -y libssl-dev python3-cryptography
COPY src/ /tmp/build/
COPY requirements.txt /tmp/build
@@ -29,6 +29,7 @@ RUN pip3 install -r requirements.txt && \
--hidden-import=cheroot \
--hidden-import=cheroot.ssl.pyopenssl \
--hidden-import=cheroot.ssl.builtin \
+ --hidden-import=lib \
--noconfirm --onefile ${APP_NAME}.py && \
cd /tmp/build/dist && \
staticx ${APP_NAME} ./${APP_NAME}_static
@@ -39,10 +40,8 @@ RUN mkdir -p /tmp/approot && \
mkdir -p /tmp/approot/etc && \
mkdir -p /tmp/approot/tmp && \
mkdir -p /tmp/approot/data && \
- mkdir -p /tmp/approot/lib/x86_64-linux-gnu && \
cp /tmp/build/config/app.yml /tmp/approot/app/config/app.yml && \
- cp /tmp/build/dist/${APP_NAME}_static /tmp/approot/app/${APP_NAME} && \
- cp /usr/lib/gcc/x86_64-linux-gnu/8/libgcc_s.so.1 /tmp/approot/lib/x86_64-linux-gnu/libgcc_s.so.1
+ cp /tmp/build/dist/${APP_NAME}_static /tmp/approot/app/${APP_NAME}
FROM scratch
COPY --from=builder /tmp/approot /
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..75cdebd
--- /dev/null
+++ b/README.md
@@ -0,0 +1,40 @@
+# DaDNS - DNS Management System
+
+## Features
+- Multi-backend DNS management (BIND, CoreDNS MySQL)
+- Atomic zone updates
+- Thread-safe operations
+- Loguru-based logging
+
+## Installation
+```bash
+ poetry install
+ poetry run dadns
+```
+
+## Configuration
+
+Edit config/app.yml for backend settings
+
+### Config Files
+#### `config/app.yml`
+```yaml
+timezone: Pacific/Auckland
+log_level: INFO
+queue_location: ./data/queues
+
+dns:
+ default_backend: bind
+ backends:
+ bind:
+ enabled: true
+ zones_dir: ./data/zones
+ named_conf: ./data/named.conf.include
+
+ coredns_mysql:
+ enabled: true
+ host: "127.0.0.1"
+ port: 3306
+ database: "coredns"
+ username: "coredns"
+ password: "password"
\ No newline at end of file
diff --git a/config.json b/config.json
new file mode 100644
index 0000000..9e26dfe
--- /dev/null
+++ b/config.json
@@ -0,0 +1 @@
+{}
\ No newline at end of file
diff --git a/config/app.yml b/config/app.yml
new file mode 100644
index 0000000..f581001
--- /dev/null
+++ b/config/app.yml
@@ -0,0 +1,29 @@
+---
+timezone: Pacific/Auckland
+log_level: INFO
+queue_location: ./data/queues
+
+dns:
+ # default_backend: coredns_mysql
+ backends:
+ bind_backend:
+ type: bind
+ enabled: false
+ zones_dir: /etc/named/zones/dadns
+ named_conf: /etc/bind/named.conf.local
+ coredns_primary:
+ enabled: true
+ host: "mysql" # Matches Docker service name
+ port: 3306
+ database: "coredns"
+ username: "coredns"
+ password: "coredns123"
+ table_name: "records"
+ coredns_secondary:
+ enabled: false
+ host: "mysql" # Matches Docker service name
+ port: 3306
+ database: "coredns"
+ username: "coredns"
+ password: "coredns123"
+ table_name: "records"
\ No newline at end of file
diff --git a/directdnsonly/__init__.py b/directdnsonly/__init__.py
new file mode 100644
index 0000000..8074dd0
--- /dev/null
+++ b/directdnsonly/__init__.py
@@ -0,0 +1 @@
+# Package initialization
diff --git a/directdnsonly/app/__init__.py b/directdnsonly/app/__init__.py
new file mode 100644
index 0000000..ef35a8b
--- /dev/null
+++ b/directdnsonly/app/__init__.py
@@ -0,0 +1,18 @@
+from loguru import logger
+import sys
+from config import config
+
+
+def configure_logging():
+ logger.remove()
+ logger.add(
+ sys.stderr,
+ level=config.get("log_level"),
+ format="{time:YYYY-MM-DD HH:mm:ss} | {level: <8} | {name}:{function}:{line} - {message}",
+ )
+ logger.add(
+ "logs/directdnsonly_{time}.log",
+ rotation="10 MB",
+ retention="30 days",
+ level="DEBUG",
+ )
diff --git a/directdnsonly/app/api/__init__.py b/directdnsonly/app/api/__init__.py
new file mode 100644
index 0000000..8074dd0
--- /dev/null
+++ b/directdnsonly/app/api/__init__.py
@@ -0,0 +1 @@
+# Package initialization
diff --git a/directdnsonly/app/api/admin.py b/directdnsonly/app/api/admin.py
new file mode 100644
index 0000000..c154b8b
--- /dev/null
+++ b/directdnsonly/app/api/admin.py
@@ -0,0 +1,134 @@
+import cherrypy
+from urllib.parse import urlencode, parse_qs
+from loguru import logger
+from directdnsonly.app.utils.zone_parser import validate_and_normalize_zone
+
+
+class DNSAdminAPI:
+ def __init__(self, save_queue, delete_queue, backend_registry):
+ self.save_queue = save_queue
+ self.delete_queue = delete_queue
+ self.backend_registry = backend_registry
+
+ @cherrypy.expose
+ def index(self):
+ return "DNS Admin API - Available endpoints: /CMD_API_DNS_ADMIN"
+
+ @cherrypy.expose
+ def CMD_API_DNS_ADMIN(self, **params):
+ """Handle both DirectAdmin-style API calls and raw zone file uploads"""
+ try:
+ if cherrypy.request.method != "POST":
+ cherrypy.response.status = 405
+ return urlencode({"error": 1, "text": "Method not allowed"})
+
+ # Parse parameters from both query string and body
+ body_params = {}
+ if cherrypy.request.body:
+ content_type = cherrypy.request.headers.get("Content-Type", "")
+
+ if "application/x-www-form-urlencoded" in content_type:
+ raw_body = cherrypy.request.body.read()
+ if raw_body:
+ body_params = parse_qs(raw_body.decode("utf-8"))
+ body_params = {
+ k: v[0] if len(v) == 1 else v
+ for k, v in body_params.items()
+ }
+ elif "text/plain" in content_type:
+ body_params = {
+ "zone_file": cherrypy.request.body.read().decode("utf-8")
+ }
+
+ # Combine parameters (body overrides query)
+ all_params = {**params, **body_params}
+ logger.debug(f"Request parameters: {all_params}")
+
+ if "zone_file" not in all_params:
+ logger.debug(
+ "No zone file provided. Maybe in body as DirectAdmin does?"
+ )
+ # Grab from body
+ all_params["zone_file"] = str(cherrypy.request.body.read(), "utf-8")
+ logger.debug("Read zone file from body :)")
+
+ # Required parameters
+ action = all_params.get("action")
+ domain = all_params.get("domain")
+
+ if not action:
+ # DirectAdmin sends an initial request without an action
+ # parameter as a connectivity check — respond with success
+ logger.debug("Received request with no action — connectivity check")
+ return urlencode({"error": 0, "text": "OK"})
+ if not domain:
+ raise ValueError("Missing 'domain' parameter")
+
+ # Handle different actions
+ if action == "rawsave":
+ return self._handle_rawsave(domain, all_params)
+ elif action == "delete":
+ return self._handle_delete(domain, all_params)
+ else:
+ raise ValueError(f"Unsupported action: {action}")
+
+ except Exception as e:
+ logger.error(f"API error: {str(e)}")
+ cherrypy.response.status = 400
+ return urlencode({"error": 1, "text": str(e)})
+
+ def _handle_rawsave(self, domain: str, params: dict):
+ """Process zone file saves"""
+ zone_data = params.get("zone_file")
+ if not zone_data:
+ raise ValueError("Missing zone file content")
+
+ normalized_zone = validate_and_normalize_zone(zone_data, domain)
+ logger.info(f"Validated zone for {domain}")
+
+ self.save_queue.put(
+ {
+ "domain": domain,
+ "zone_file": normalized_zone,
+ "hostname": params.get("hostname", ""),
+ "username": params.get("username", ""),
+ "client_ip": cherrypy.request.remote.ip,
+ }
+ )
+
+ logger.success(f"Queued zone update for {domain}")
+ return urlencode({"error": 0})
+
+ def _handle_delete(self, domain: str, params: dict):
+ """Process zone deletions"""
+ self.delete_queue.put(
+ {
+ "domain": domain,
+ "hostname": params.get("hostname", ""),
+ "username": params.get("username", ""),
+ "client_ip": cherrypy.request.remote.ip,
+ }
+ )
+
+ logger.success(f"Queued deletion for {domain}")
+ return urlencode({"error": 0})
+
+ @cherrypy.expose
+ def queue_status(self):
+ """Debug endpoint for queue monitoring"""
+ return {
+ "save_queue_size": self.save_queue.qsize(),
+ "delete_queue_size": self.delete_queue.qsize(),
+ "last_save_item": self._get_last_item(self.save_queue),
+ "last_delete_item": self._get_last_item(self.delete_queue),
+ }
+
+ @staticmethod
+ def _get_last_item(queue):
+ """Helper to safely get last queue item"""
+ try:
+ if hasattr(queue, "last_item"):
+ return queue.last_item
+ return "Last item tracking not available"
+ except Exception:
+ return "Error retrieving last item"
diff --git a/directdnsonly/app/api/health.py b/directdnsonly/app/api/health.py
new file mode 100644
index 0000000..cf7d2ea
--- /dev/null
+++ b/directdnsonly/app/api/health.py
@@ -0,0 +1,24 @@
+import cherrypy
+from loguru import logger
+
+
+class HealthAPI:
+ def __init__(self, backend_registry):
+ self.registry = backend_registry
+
+ @cherrypy.expose
+ def health(self):
+ status = {"status": "OK", "backends": []}
+
+ for name, backend in self.registry.get_available_backends().items():
+ status["backends"].append(
+ {
+ "name": name,
+ "status": (
+ "active" if backend().zone_exists("test") else "unavailable"
+ ),
+ }
+ )
+
+ logger.debug("Health check performed")
+ return status
diff --git a/directdnsonly/app/backends/__init__.py b/directdnsonly/app/backends/__init__.py
new file mode 100644
index 0000000..0a70700
--- /dev/null
+++ b/directdnsonly/app/backends/__init__.py
@@ -0,0 +1,89 @@
+from typing import Dict, Type, Optional
+from .base import DNSBackend
+from .bind import BINDBackend
+from .coredns_mysql import CoreDNSMySQLBackend
+from directdnsonly.config import config
+from loguru import logger
+
+
+class BackendRegistry:
+ def __init__(self):
+ self._backend_types = {
+ "bind": BINDBackend,
+ "coredns_mysql": CoreDNSMySQLBackend,
+ }
+ self._backend_instances: Dict[str, DNSBackend] = {}
+ self._initialized = False
+
+ def _initialize_backends(self):
+ """Initialize and cache all enabled backend instances"""
+ if self._initialized:
+ return
+
+ try:
+ logger.debug("Attempting to load backend configurations")
+ backend_configs = config.get("dns")
+ if not backend_configs:
+ logger.warning("No 'dns' configuration found")
+ self._initialized = True
+ return
+
+ backend_configs = backend_configs.get("backends")
+ if not backend_configs:
+ logger.warning("No 'dns.backends' configuration found")
+ self._initialized = True
+ return
+
+ logger.debug(f"Found backend configs: {backend_configs}")
+
+ for instance_name, instance_config in backend_configs.items():
+ logger.debug(f"Processing backend instance: {instance_name}")
+ backend_type = instance_config.get("type")
+
+ if not backend_type:
+ logger.warning(
+ f"No type specified for backend instance: {instance_name}"
+ )
+ continue
+
+ if backend_type not in self._backend_types:
+ logger.warning(
+ f"Unknown backend type '{backend_type}' for instance: {instance_name}"
+ )
+ continue
+
+ backend_class = self._backend_types[backend_type]
+ if not backend_class.is_available():
+ logger.warning(
+ f"Backend {backend_type} is not available for instance: {instance_name}"
+ )
+ continue
+
+ enabled = instance_config.get("enabled", False)
+ if not enabled:
+ logger.debug(f"Backend instance {instance_name} is disabled")
+ continue
+
+ logger.debug(
+ f"Initializing backend instance {instance_name} of type {backend_type}"
+ )
+ try:
+ backend = backend_class(instance_config)
+ self._backend_instances[instance_name] = backend
+ logger.info(
+ f"Successfully initialized backend instance: {instance_name}"
+ )
+ except Exception as e:
+ logger.error(
+ f"Failed to initialize backend instance {instance_name}: {e}"
+ )
+
+ except Exception as e:
+ logger.error(f"Error loading backend configurations: {e}")
+
+ self._initialized = True
+
+ def get_available_backends(self) -> Dict[str, DNSBackend]:
+ """Return cached backend instances, initializing on first call"""
+ self._initialize_backends()
+ return self._backend_instances
diff --git a/directdnsonly/app/backends/base.py b/directdnsonly/app/backends/base.py
new file mode 100644
index 0000000..90c5e1b
--- /dev/null
+++ b/directdnsonly/app/backends/base.py
@@ -0,0 +1,75 @@
+from abc import ABC, abstractmethod
+from typing import List, Optional, Dict, Any, Tuple
+
+
+class DNSBackend(ABC):
+ def __init__(self, config: Dict[str, Any]):
+ self.config = config
+ self.instance_name = config.get("instance_name", self.get_name())
+
+ @classmethod
+ @abstractmethod
+ def get_name(cls) -> str:
+ """Return the backend type name"""
+ pass
+
+ @property
+ def instance_id(self) -> str:
+ """Return the unique instance identifier"""
+ return self.instance_name
+
+ @classmethod
+ @abstractmethod
+ def is_available(cls) -> bool:
+ pass
+
+ @abstractmethod
+ def write_zone(self, zone_name: str, zone_data: str) -> bool:
+ pass
+
+ @abstractmethod
+ def delete_zone(self, zone_name: str) -> bool:
+ pass
+
+ @abstractmethod
+ def reload_zone(self, zone_name: Optional[str] = None) -> bool:
+ pass
+
+ @abstractmethod
+ def zone_exists(self, zone_name: str) -> bool:
+ pass
+
+ def verify_zone_record_count(
+ self, zone_name: str, expected_count: int
+ ) -> Tuple[bool, int]:
+ """Verify the record count in this backend matches the expected count
+ from the source zone file.
+
+ Args:
+ zone_name: The zone to verify
+ expected_count: The number of records parsed from the source zone
+
+ Returns:
+ Tuple of (matches: bool, actual_count: int)
+ """
+ raise NotImplementedError(
+ f"Backend {self.get_name()} does not implement record count verification"
+ )
+
+ def reconcile_zone_records(
+ self, zone_name: str, zone_data: str
+ ) -> Tuple[bool, int]:
+ """Reconcile backend records against the authoritative BIND zone from
+ DirectAdmin. Any records in the backend that are not present in the
+ source zone will be removed.
+
+ Args:
+ zone_name: The zone to reconcile
+ zone_data: The raw BIND zone file content (authoritative source)
+
+ Returns:
+ Tuple of (success: bool, records_removed: int)
+ """
+ raise NotImplementedError(
+ f"Backend {self.get_name()} does not implement zone reconciliation"
+ )
diff --git a/directdnsonly/app/backends/bind.py b/directdnsonly/app/backends/bind.py
new file mode 100644
index 0000000..cd43446
--- /dev/null
+++ b/directdnsonly/app/backends/bind.py
@@ -0,0 +1,124 @@
+import os
+import subprocess
+from loguru import logger
+from pathlib import Path
+from typing import Dict, List, Optional
+from .base import DNSBackend
+
+
+class BINDBackend(DNSBackend):
+ @classmethod
+ def get_name(cls) -> str:
+ return "bind"
+
+ @classmethod
+ def is_available(cls) -> bool:
+ try:
+ result = subprocess.run(["named", "-v"], capture_output=True, text=True)
+ if result.returncode == 0:
+ logger.info(f"BIND available: {result.stdout.splitlines()[0]}")
+ return True
+ return False
+ except FileNotFoundError:
+ logger.warning("BIND/named not found in PATH")
+ return False
+
+ def __init__(self, config: Dict):
+ self.zones_dir = Path(config["zones_dir"])
+ self.named_conf = Path(config["named_conf"])
+
+ # Safe directory creation handling
+ try:
+ # Check if it's a symlink first
+ if self.zones_dir.is_symlink():
+ logger.debug(f"{self.zones_dir} is already a symlink")
+ elif not self.zones_dir.exists():
+ self.zones_dir.mkdir(parents=True, mode=0o755)
+ logger.debug(f"Created zones directory: {self.zones_dir}")
+ else:
+ logger.debug(f"Directory already exists: {self.zones_dir}")
+
+ # Ensure proper permissions
+ os.chmod(self.zones_dir, 0o755)
+ logger.debug(f"Using zones directory: {self.zones_dir}")
+
+ except FileExistsError:
+ logger.debug(f"Directory already exists (safe to ignore): {self.zones_dir}")
+ except Exception as e:
+ logger.error(f"Failed to setup zones directory: {e}")
+ raise
+
+ # Verify named.conf exists
+ if not self.named_conf.exists():
+ logger.warning(f"named.conf not found at {self.named_conf}")
+ self.named_conf.touch()
+ logger.info(f"Created empty named.conf at {self.named_conf}")
+
+ logger.success(f"BIND backend initialized for {self.zones_dir}")
+
+ def write_zone(self, zone_name: str, zone_data: str) -> bool:
+ zone_file = self.zones_dir / f"{zone_name}.db"
+ try:
+ with open(zone_file, "w") as f:
+ f.write(zone_data)
+ logger.debug(f"Wrote zone file: {zone_file}")
+ return True
+ except IOError as e:
+ logger.error(f"Failed to write zone file {zone_file}: {e}")
+ return False
+
+ def delete_zone(self, zone_name: str) -> bool:
+ zone_file = self.zones_dir / f"{zone_name}.db"
+ try:
+ if zone_file.exists():
+ zone_file.unlink()
+ logger.debug(f"Deleted zone file: {zone_file}")
+ return True
+ logger.warning(f"Zone file not found: {zone_file}")
+ return False
+ except IOError as e:
+ logger.error(f"Failed to delete zone file {zone_file}: {e}")
+ return False
+
+ def reload_zone(self, zone_name: Optional[str] = None) -> bool:
+ try:
+ if zone_name:
+ cmd = ["rndc", "reload", zone_name]
+ logger.debug(f"Reloading single zone: {zone_name}")
+ else:
+ cmd = ["rndc", "reload"]
+ logger.debug("Reloading all zones")
+
+ result = subprocess.run(
+ cmd,
+ check=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ text=True,
+ )
+ logger.debug(f"BIND reload successful: {result.stdout}")
+ return True
+ except subprocess.CalledProcessError as e:
+ logger.error(f"BIND reload failed: {e.stderr}")
+ return False
+ except Exception as e:
+ logger.error(f"Unexpected error during BIND reload: {e}")
+ return False
+
+ def zone_exists(self, zone_name: str) -> bool:
+ zone_file = self.zones_dir / f"{zone_name}.db"
+ exists = zone_file.exists()
+ logger.debug(f"Zone existence check for {zone_name}: {exists}")
+ return exists
+
+ def update_named_conf(self, zones: List[str]) -> bool:
+ try:
+ with open(self.named_conf, "w") as f:
+ for zone in zones:
+ zone_file = self.zones_dir / f"{zone}.db"
+ f.write(f'zone "{zone}" {{ type master; file "{zone_file}"; }};\n')
+ logger.debug(f"Updated named.conf: {self.named_conf}")
+ return True
+ except IOError as e:
+ logger.error(f"Failed to update named.conf: {e}")
+ return False
diff --git a/directdnsonly/app/backends/coredns_mysql.py b/directdnsonly/app/backends/coredns_mysql.py
new file mode 100644
index 0000000..e75c7f0
--- /dev/null
+++ b/directdnsonly/app/backends/coredns_mysql.py
@@ -0,0 +1,460 @@
+from typing import Optional, Dict, Set, Tuple, Any
+
+from sqlalchemy import create_engine, Column, String, Integer, Text, ForeignKey, Boolean
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import sessionmaker, scoped_session, relationship
+from dns import zone as dns_zone_module
+from dns.rdataclass import IN
+from loguru import logger
+from .base import DNSBackend
+from config import config
+
+Base = declarative_base()
+
+
+class Zone(Base):
+ __tablename__ = "zones"
+ id = Column(Integer, primary_key=True)
+ zone_name = Column(String(255), nullable=False, index=True, unique=True)
+
+
+class Record(Base):
+ __tablename__ = "records"
+ id = Column(Integer, primary_key=True)
+ zone_id = Column(Integer, ForeignKey("zones.id"), nullable=False)
+ hostname = Column(String(255), nullable=False, index=True)
+ type = Column(String(10), nullable=False)
+ data = Column(Text, nullable=False)
+ ttl = Column(Integer, nullable=True)
+ online = Column(Boolean, nullable=False, default=False)
+
+ zone = relationship("Zone", backref="records")
+
+
+class CoreDNSMySQLBackend(DNSBackend):
+ def __init__(self, config: Dict[str, Any]):
+ super().__init__(config)
+ self.host = config.get("host", "localhost")
+ self.port = config.get("port", 3306)
+ self.database = config.get("database", "coredns")
+ self.username = config.get("username")
+ self.password = config.get("password")
+
+ self.engine = create_engine(
+ f"mysql+pymysql://{self.username}:{self.password}@"
+ f"{self.host}:{self.port}/{self.database}",
+ pool_pre_ping=True,
+ pool_size=5,
+ max_overflow=10,
+ )
+ self.Session = scoped_session(sessionmaker(bind=self.engine))
+ Base.metadata.create_all(self.engine)
+ logger.info(
+ f"Initialized CoreDNS MySQL backend '{self.instance_name}' "
+ f"for {self.database}@{self.host}:{self.port}"
+ )
+
+ @staticmethod
+ def dot_fqdn(zone_name):
+ return f"{zone_name}." if not zone_name.endswith(".") else zone_name
+
+ @classmethod
+ def get_name(cls) -> str:
+ return "coredns_mysql"
+
+ @classmethod
+ def is_available(cls) -> bool:
+ try:
+ import pymysql
+
+ return True
+ except ImportError:
+ logger.warning("PyMySQL not available - CoreDNS MySQL backend disabled")
+ return False
+
+ def write_zone(self, zone_name: str, zone_data: str) -> bool:
+ session = self.Session()
+ try:
+ # Ensure zone exists
+ zone = self._ensure_zone_exists(session, zone_name)
+
+ # Get existing records for this zone but track SOA records separately
+ existing_records = {}
+ existing_soa = None
+ for r in session.query(Record).filter_by(zone_id=zone.id).all():
+ if r.type == "SOA":
+ existing_soa = r
+ else:
+ existing_records[(r.hostname, r.type, r.data)] = r
+
+ # Parse the zone data into a normalised record set
+ source_records, source_soa = self._parse_zone_to_record_set(
+ zone_name, zone_data
+ )
+
+ # Track changes
+ current_records = set()
+ changes = {"added": 0, "updated": 0, "removed": 0}
+
+ # Handle SOA record
+ if source_soa:
+ soa_name, soa_content, soa_ttl = source_soa
+ soa_parts = soa_content.split()
+ if len(soa_parts) == 7:
+ if existing_soa:
+ existing_soa.data = soa_content
+ existing_soa.ttl = soa_ttl
+ existing_soa.online = True
+ changes["updated"] += 1
+ logger.debug(
+ f"Updated SOA record: {soa_name} SOA {soa_content}"
+ )
+ else:
+ existing_soa = Record(
+ zone_id=zone.id,
+ hostname=soa_name,
+ type="SOA",
+ data=soa_content,
+ ttl=soa_ttl,
+ online=True,
+ )
+ session.add(existing_soa)
+ changes["added"] += 1
+ logger.debug(
+ f"Added SOA record: {soa_name} SOA {soa_content}"
+ )
+
+ # Process all non-SOA records
+ for record_name, record_type, record_content, record_ttl in source_records:
+ key = (record_name, record_type, record_content)
+ current_records.add(key)
+
+ if key in existing_records:
+ # Update existing record if TTL changed
+ record = existing_records[key]
+ if record.ttl != record_ttl:
+ record.ttl = record_ttl
+ record.online = True
+ changes["updated"] += 1
+ logger.debug(
+ f"Updated TTL for record: {record_name} {record_type} {record_content}"
+ )
+ else:
+ # Add new record
+ new_record = Record(
+ zone_id=zone.id,
+ hostname=record_name,
+ type=record_type,
+ data=record_content,
+ ttl=record_ttl,
+ online=True,
+ )
+ session.add(new_record)
+ changes["added"] += 1
+ logger.debug(
+ f"Added new record: {record_name} {record_type} {record_content}"
+ )
+
+ # Remove records that no longer exist in the source zone
+ for key, record in existing_records.items():
+ if key not in current_records:
+ logger.debug(
+ f"Removed record: {record.hostname} {record.type} {record.data}"
+ )
+ session.delete(record)
+ changes["removed"] += 1
+
+ # Handle SOA removal if needed
+ if existing_soa and not source_soa:
+ logger.debug(
+ f"Removed SOA record: {existing_soa.hostname} SOA {existing_soa.data}"
+ )
+ session.delete(existing_soa)
+ changes["removed"] += 1
+
+ session.commit()
+ total_changes = changes['added'] + changes['updated'] + changes['removed']
+ if total_changes > 0:
+ logger.info(
+ f"[{self.instance_name}] Zone {zone_name} updated: "
+ f"{changes['added']} added, {changes['updated']} updated, "
+ f"{changes['removed']} removed"
+ )
+ else:
+ logger.debug(
+ f"[{self.instance_name}] Zone {zone_name}: no changes"
+ )
+ return True
+
+ except Exception as e:
+ logger.error(f"Error writing zone {zone_name}: {e}")
+ session.rollback()
+ return False
+ finally:
+ session.close()
+
+ def delete_zone(self, zone_name: str) -> bool:
+ session = self.Session()
+ try:
+ # First find the zone
+ zone = session.query(Zone).filter_by(name=zone_name).first()
+ if not zone:
+ logger.warning(f"Zone {zone_name} not found for deletion")
+ return False
+
+ # Delete all records associated with the zone
+ count = session.query(Record).filter_by(zone_id=zone.id).delete()
+
+ # Delete the zone itself
+ session.delete(zone)
+ session.commit()
+
+ logger.info(f"Deleted zone {zone_name} with {count} records")
+ return True
+ except Exception as e:
+ session.rollback()
+ logger.error(f"Zone deletion failed for {zone_name}: {e}")
+ return False
+ finally:
+ session.close()
+
+ def reload_zone(self, zone_name: Optional[str] = None) -> bool:
+ # In coredns_mysql_extend, the core plugin handles reloading automatically
+ # when database changes are detected, so we just log the request
+ if zone_name:
+ logger.debug(f"CoreDNS reload triggered for zone {zone_name}")
+ else:
+ logger.debug("CoreDNS reload triggered for all zones")
+ return True
+
+ def zone_exists(self, zone_name: str) -> bool:
+ session = self.Session()
+ try:
+ exists = (
+ session.query(Zone).filter_by(name=self.dot_fqdn(zone_name)).first()
+ is not None
+ )
+ logger.debug(f"Zone existence check for {zone_name}: {exists}")
+ return exists
+ except Exception as e:
+ logger.error(f"Zone existence check failed for {zone_name}: {e}")
+ return False
+ finally:
+ session.close()
+
+ def _ensure_zone_exists(self, session, zone_name: str) -> Zone:
+ """Ensure a zone exists in the database, creating it if necessary"""
+ zone = session.query(Zone).filter_by(zone_name=self.dot_fqdn(zone_name)).first()
+ if not zone:
+ logger.debug(f"Creating new zone: {self.dot_fqdn(zone_name)}")
+ zone = Zone(zone_name=self.dot_fqdn(zone_name))
+ session.add(zone)
+ session.flush() # Get the zone ID
+ return zone
+
+ def _normalize_cname_data(self, zone_name: str, record_content: str) -> str:
+ """Normalize CNAME record data to ensure consistent FQDN format.
+
+ This ensures CNAME targets are always stored as fully-qualified domain
+ names so that record comparison between the BIND zone source and the
+ database is deterministic.
+
+ Args:
+ zone_name: The zone name for relative-name expansion
+ record_content: The raw CNAME target from the parsed zone
+
+ Returns:
+ The normalized CNAME target string
+ """
+ if record_content.startswith("@"):
+ logger.debug(
+ f"CNAME target starts with '@', replacing with zone FQDN"
+ )
+ record_content = self.dot_fqdn(zone_name)
+ elif not record_content.endswith("."):
+ logger.debug(
+ f"CNAME target {record_content} is relative, appending zone"
+ )
+ record_content = ".".join(
+ [record_content, self.dot_fqdn(zone_name)]
+ )
+ return record_content
+
+ def _parse_zone_to_record_set(
+ self, zone_name: str, zone_data: str
+ ) -> Tuple[Set[Tuple[str, str, str, int]], Optional[Tuple[str, str, int]]]:
+ """Parse a BIND zone file into a set of normalised record keys.
+
+ Returns:
+ Tuple of:
+ - set of (hostname, type, data, ttl) tuples for non-SOA records
+ - (hostname, soa_data, ttl) tuple for the SOA record, or None
+ """
+ dns_zone = dns_zone_module.from_text(zone_data, check_origin=False)
+ records: Set[Tuple[str, str, str, int]] = set()
+ soa = None
+
+ for name, ttl, rdata in dns_zone.iterate_rdatas():
+ if rdata.rdclass != IN:
+ continue
+
+ record_name = str(name)
+ record_type = rdata.rdtype.name
+ record_content = rdata.to_text()
+
+ if record_type == "SOA":
+ soa = (record_name, record_content, ttl)
+ continue
+
+ if record_type == "CNAME":
+ record_content = self._normalize_cname_data(
+ zone_name, record_content
+ )
+
+ records.add((record_name, record_type, record_content, ttl))
+
+ return records, soa
+
+ def verify_zone_record_count(
+ self, zone_name: str, expected_count: int
+ ) -> tuple[bool, int]:
+ """Verify the record count in this backend matches the expected count
+ from the source (DirectAdmin) zone file.
+
+ Args:
+ zone_name: The zone to verify
+ expected_count: The number of records parsed from the source BIND zone
+
+ Returns:
+ Tuple of (matches: bool, actual_count: int)
+ """
+ session = self.Session()
+ try:
+ zone = (
+ session.query(Zone)
+ .filter_by(zone_name=self.dot_fqdn(zone_name))
+ .first()
+ )
+ if not zone:
+ logger.warning(
+ f"[{self.instance_name}] Zone {zone_name} not found "
+ f"during record count verification"
+ )
+ return False, 0
+
+ actual_count = (
+ session.query(Record).filter_by(zone_id=zone.id).count()
+ )
+ matches = actual_count == expected_count
+
+ if not matches:
+ logger.warning(
+ f"[{self.instance_name}] Record count mismatch for "
+ f"{zone_name}: source zone has {expected_count} records, "
+ f"backend has {actual_count} records "
+ f"(difference: {actual_count - expected_count:+d})"
+ )
+ else:
+ logger.debug(
+ f"[{self.instance_name}] Record count verified for "
+ f"{zone_name}: {actual_count} records match source"
+ )
+
+ return matches, actual_count
+
+ except Exception as e:
+ logger.error(
+ f"[{self.instance_name}] Error verifying record count "
+ f"for {zone_name}: {e}"
+ )
+ return False, -1
+ finally:
+ session.close()
+
+ def reconcile_zone_records(
+ self, zone_name: str, zone_data: str
+ ) -> Tuple[bool, int]:
+ """Reconcile backend records against the authoritative BIND zone from
+ DirectAdmin. Any records in the backend that are **not** present in
+ the source zone will be deleted.
+
+ This is the post-write safety net: even though ``write_zone`` already
+ removes stale records during normal processing, this method catches
+ any extras that may have crept in via race conditions, manual edits,
+ or replication drift between MySQL nodes.
+
+ Args:
+ zone_name: The zone to reconcile
+ zone_data: The raw BIND zone file content (authoritative source)
+
+ Returns:
+ Tuple of (success: bool, records_removed: int)
+ """
+ session = self.Session()
+ try:
+ zone = (
+ session.query(Zone)
+ .filter_by(zone_name=self.dot_fqdn(zone_name))
+ .first()
+ )
+ if not zone:
+ logger.warning(
+ f"[{self.instance_name}] Zone {zone_name} not found "
+ f"during reconciliation"
+ )
+ return False, 0
+
+ # Build the expected record set from the source BIND zone
+ source_records, source_soa = self._parse_zone_to_record_set(
+ zone_name, zone_data
+ )
+ # Build lookup keys (without TTL) matching write_zone's key format
+ expected_keys: Set[Tuple[str, str, str]] = {
+ (hostname, rtype, data)
+ for hostname, rtype, data, _ in source_records
+ }
+
+ # Query all records currently in the backend for this zone
+ db_records = (
+ session.query(Record).filter_by(zone_id=zone.id).all()
+ )
+
+ removed = 0
+ for record in db_records:
+ # SOA records are managed separately – skip them
+ if record.type == "SOA":
+ continue
+
+ key = (record.hostname, record.type, record.data)
+ if key not in expected_keys:
+ logger.debug(
+ f"[{self.instance_name}] Reconcile: removing extra "
+ f"record from {zone_name}: "
+ f"{record.hostname} {record.type} {record.data}"
+ )
+ session.delete(record)
+ removed += 1
+
+ if removed > 0:
+ session.commit()
+ logger.info(
+ f"[{self.instance_name}] Reconciliation for {zone_name}: "
+ f"removed {removed} extra record(s) not in source zone"
+ )
+ else:
+ logger.debug(
+ f"[{self.instance_name}] Reconciliation for {zone_name}: "
+ f"all records match source zone — no action needed"
+ )
+
+ return True, removed
+
+ except Exception as e:
+ logger.error(
+ f"[{self.instance_name}] Error reconciling records "
+ f"for {zone_name}: {e}"
+ )
+ session.rollback()
+ return False, 0
+ finally:
+ session.close()
diff --git a/directdnsonly/app/backends/powerdns_mysql.py b/directdnsonly/app/backends/powerdns_mysql.py
new file mode 100644
index 0000000..491d217
--- /dev/null
+++ b/directdnsonly/app/backends/powerdns_mysql.py
@@ -0,0 +1,332 @@
+from typing import Optional, Dict, Set, Tuple, List
+
+from sqlalchemy import (
+ create_engine,
+ Column,
+ String,
+ Integer,
+ Text,
+ Boolean,
+ DateTime,
+ func,
+)
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import sessionmaker, scoped_session
+from loguru import logger
+from .base import DNSBackend
+from config import config
+import time
+
+Base = declarative_base()
+
+
+class Domain(Base):
+ __tablename__ = "domains"
+ id = Column(Integer, primary_key=True)
+ name = Column(String(255), nullable=False, index=True, unique=True)
+ master = Column(String(128), nullable=True)
+ last_check = Column(Integer, nullable=True)
+ type = Column(String(6), nullable=False, default="NATIVE")
+ notified_serial = Column(Integer, nullable=True)
+ account = Column(String(40), nullable=True)
+
+
+class Record(Base):
+ __tablename__ = "records"
+ id = Column(Integer, primary_key=True)
+ domain_id = Column(Integer, nullable=False, index=True)
+ name = Column(String(255), nullable=False, index=True)
+ type = Column(String(10), nullable=False)
+ content = Column(Text, nullable=False)
+ ttl = Column(Integer, nullable=True)
+ prio = Column(Integer, nullable=True)
+ change_date = Column(Integer, nullable=True)
+ disabled = Column(Boolean, nullable=False, default=False)
+ ordername = Column(String(255), nullable=True)
+ auth = Column(Boolean, nullable=False, default=True)
+
+
+class PowerDNSMySQLBackend(DNSBackend):
+ @classmethod
+ def get_name(cls) -> str:
+ return "powerdns_mysql"
+
+ @classmethod
+ def is_available(cls) -> bool:
+ try:
+ import pymysql
+
+ return True
+ except ImportError:
+ logger.warning("PyMySQL not available - PowerDNS MySQL backend disabled")
+ return False
+
+ @staticmethod
+ def ensure_fqdn(name: str, zone_name: str) -> str:
+ """Ensure name is fully qualified for PowerDNS"""
+ if name == "@" or name == "":
+ return zone_name
+ elif name.endswith("."):
+ return name.rstrip(".")
+ elif name == zone_name:
+ return name
+ else:
+ return f"{name}.{zone_name}"
+
+ def __init__(self, config: dict = None):
+ c = config or config.get("dns.backends.powerdns_mysql")
+ self.engine = create_engine(
+ f"mysql+pymysql://{c['username']}:{c['password']}@"
+ f"{c['host']}:{c['port']}/{c['database']}",
+ pool_pre_ping=True,
+ )
+ self.Session = scoped_session(sessionmaker(bind=self.engine))
+ Base.metadata.create_all(self.engine)
+ logger.info(f"Initialized PowerDNS MySQL backend for {c['database']}")
+
+ def _ensure_domain_exists(self, session, zone_name: str) -> Domain:
+ """Ensure domain exists and return domain object"""
+ domain = session.query(Domain).filter_by(name=zone_name).first()
+ if not domain:
+ domain = Domain(name=zone_name, type="NATIVE")
+ session.add(domain)
+ session.flush() # Flush to get the domain ID
+ logger.info(f"Created new domain: {zone_name}")
+ return domain
+
+ def _parse_soa_content(self, soa_content: str) -> Dict[str, str]:
+ """Parse SOA record content into components"""
+ parts = soa_content.split()
+ if len(parts) >= 7:
+ return {
+ "primary_ns": parts[0],
+ "hostmaster": parts[1],
+ "serial": parts[2],
+ "refresh": parts[3],
+ "retry": parts[4],
+ "expire": parts[5],
+ "minimum": parts[6],
+ }
+ return {}
+
+ def write_zone(self, zone_name: str, zone_data: str) -> bool:
+ from dns import zone as dns_zone_module
+ from dns.rdataclass import IN
+
+ session = self.Session()
+ try:
+ # Ensure domain exists
+ domain = self._ensure_domain_exists(session, zone_name)
+
+ # Get existing records for this domain
+ existing_records = {
+ (r.name, r.type): r
+ for r in session.query(Record).filter_by(domain_id=domain.id).all()
+ }
+
+ # Parse the zone data
+ dns_zone = dns_zone_module.from_text(zone_data, check_origin=False)
+
+ # Track records we process
+ current_records: Set[Tuple[str, str]] = set()
+ changes = {"added": 0, "updated": 0, "removed": 0}
+ current_time = int(time.time())
+
+ # Process all records
+ for name, ttl, rdata in dns_zone.iterate_rdatas():
+ if rdata.rdclass != IN:
+ continue
+
+ record_name = self.ensure_fqdn(str(name), zone_name)
+ record_type = rdata.rdtype.name
+ record_content = rdata.to_text()
+ record_ttl = ttl
+ record_prio = None
+
+ # Handle MX records priority
+ if record_type == "MX":
+ parts = record_content.split(" ", 1)
+ if len(parts) == 2:
+ record_prio = int(parts[0])
+ record_content = parts[1]
+
+ # Handle SRV records priority and other fields
+ elif record_type == "SRV":
+ parts = record_content.split(" ", 3)
+ if len(parts) == 4:
+ record_prio = int(parts[0])
+ record_content = f"{parts[1]} {parts[2]} {parts[3]}"
+
+ # Ensure CNAME and other records have proper FQDN format
+ if record_type in ["CNAME", "MX", "NS"]:
+ if not record_content.endswith(".") and record_content != "@":
+ if record_content == "@":
+ record_content = zone_name
+ elif "." not in record_content:
+ record_content = f"{record_content}.{zone_name}"
+
+ key = (record_name, record_type)
+ current_records.add(key)
+
+ if key in existing_records:
+ # Update existing record if needed
+ record = existing_records[key]
+ if (
+ record.content != record_content
+ or record.ttl != record_ttl
+ or record.prio != record_prio
+ ):
+ record.content = record_content
+ record.ttl = record_ttl
+ record.prio = record_prio
+ record.change_date = current_time
+ record.disabled = False
+ changes["updated"] += 1
+ else:
+ # Add new record
+ new_record = Record(
+ domain_id=domain.id,
+ name=record_name,
+ type=record_type,
+ content=record_content,
+ ttl=record_ttl,
+ prio=record_prio,
+ change_date=current_time,
+ disabled=False,
+ auth=True,
+ )
+ session.add(new_record)
+ changes["added"] += 1
+
+ # Remove deleted records
+ for key in set(existing_records.keys()) - current_records:
+ session.delete(existing_records[key])
+ changes["removed"] += 1
+
+ session.commit()
+ logger.success(
+ f"Zone {zone_name} updated: "
+ f"+{changes['added']} ~{changes['updated']} -{changes['removed']}"
+ )
+ return True
+
+ except Exception as e:
+ session.rollback()
+ logger.error(f"Zone update failed for {zone_name}: {e}")
+ return False
+ finally:
+ session.close()
+
+ def delete_zone(self, zone_name: str) -> bool:
+ session = self.Session()
+ try:
+ # First find the domain
+ domain = session.query(Domain).filter_by(name=zone_name).first()
+ if not domain:
+ logger.warning(f"Domain {zone_name} not found for deletion")
+ return False
+
+ # Delete all records associated with the domain
+ count = session.query(Record).filter_by(domain_id=domain.id).delete()
+
+ # Delete the domain itself
+ session.delete(domain)
+ session.commit()
+
+ logger.info(f"Deleted domain {zone_name} with {count} records")
+ return True
+ except Exception as e:
+ session.rollback()
+ logger.error(f"Domain deletion failed for {zone_name}: {e}")
+ return False
+ finally:
+ session.close()
+
+ def reload_zone(self, zone_name: Optional[str] = None) -> bool:
+ """PowerDNS reload - could trigger pdns_control reload if needed"""
+ if zone_name:
+ logger.debug(f"PowerDNS reload triggered for zone {zone_name}")
+ # Optional: Call pdns_control reload-zones here if needed
+ # subprocess.run(['pdns_control', 'reload-zones'], check=True)
+ else:
+ logger.debug("PowerDNS reload triggered for all zones")
+ # Optional: Call pdns_control reload here if needed
+ # subprocess.run(['pdns_control', 'reload'], check=True)
+ return True
+
+ def zone_exists(self, zone_name: str) -> bool:
+ session = self.Session()
+ try:
+ exists = session.query(Domain).filter_by(name=zone_name).first() is not None
+ logger.debug(f"Zone existence check for {zone_name}: {exists}")
+ return exists
+ except Exception as e:
+ logger.error(f"Zone existence check failed for {zone_name}: {e}")
+ return False
+ finally:
+ session.close()
+
+ def get_zone_records(self, zone_name: str) -> List[Dict]:
+ """Get all records for a zone - useful for debugging/inspection"""
+ session = self.Session()
+ try:
+ domain = session.query(Domain).filter_by(name=zone_name).first()
+ if not domain:
+ return []
+
+ records = session.query(Record).filter_by(domain_id=domain.id).all()
+ return [
+ {
+ "name": r.name,
+ "type": r.type,
+ "content": r.content,
+ "ttl": r.ttl,
+ "prio": r.prio,
+ "disabled": r.disabled,
+ }
+ for r in records
+ ]
+ except Exception as e:
+ logger.error(f"Failed to get records for {zone_name}: {e}")
+ return []
+ finally:
+ session.close()
+
+ def set_record_status(
+ self, zone_name: str, record_name: str, record_type: str, disabled: bool
+ ) -> bool:
+ """Enable/disable specific records"""
+ session = self.Session()
+ try:
+ domain = session.query(Domain).filter_by(name=zone_name).first()
+ if not domain:
+ logger.warning(f"Domain {zone_name} not found")
+ return False
+
+ full_name = self.ensure_fqdn(record_name, zone_name)
+ record = (
+ session.query(Record)
+ .filter_by(domain_id=domain.id, name=full_name, type=record_type)
+ .first()
+ )
+
+ if not record:
+ logger.warning(
+ f"Record {full_name} {record_type} not found in {zone_name}"
+ )
+ return False
+
+ record.disabled = disabled
+ record.change_date = int(time.time())
+ session.commit()
+
+ status = "disabled" if disabled else "enabled"
+ logger.info(f"Record {full_name} {record_type} {status} in {zone_name}")
+ return True
+
+ except Exception as e:
+ session.rollback()
+ logger.error(f"Failed to set record status: {e}")
+ return False
+ finally:
+ session.close()
diff --git a/directdnsonly/app/db/__init__.py b/directdnsonly/app/db/__init__.py
new file mode 100644
index 0000000..3913ec5
--- /dev/null
+++ b/directdnsonly/app/db/__init__.py
@@ -0,0 +1,55 @@
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy.ext.declarative import declarative_base
+from vyper import v
+
+import datetime
+
+Base = declarative_base()
+
+
+def connect(dbtype="sqlite", **kwargs):
+ if dbtype == "sqlite":
+ # Start SQLite engine
+ db_location = v.get("datastore.db_location")
+ if db_location == -1:
+ raise Exception("DB Type is sqlite but db_location is not defined")
+ else:
+ engine = create_engine(
+ "sqlite:///" + db_location, connect_args={"check_same_thread": False}
+ )
+ Base.metadata.create_all(engine)
+ return sessionmaker(bind=engine)()
+ elif dbtype == "mysql":
+ # Start a MySQL engine
+ db_user = v.get_string("datastore.user")
+ db_host = v.get_string("datastore.host")
+ db_name = v.get_string("datastore.name")
+ db_pass = v.get_string("datastore.pass")
+ db_port = v.get_string("datastore.port")
+ if (
+ not v.is_set("datastore.user")
+ or not v.is_set("datastore.name")
+ or not v.is_set("datastore.pass")
+ or not v.is_set("datastore.host")
+ ):
+ raise Exception(
+ "DB Type is mysql but db_(host,name,and pass) are not populated"
+ )
+ else:
+ engine = create_engine(
+ "mysql+pymysql://"
+ + db_user
+ + ":"
+ + db_pass
+ + "@"
+ + db_host
+ + ":"
+ + db_port
+ + "/"
+ + db_name
+ )
+ Base.metadata.create_all(engine)
+ return sessionmaker(bind=engine)()
+ else:
+ raise Exception("Unknown/unimplemented database type: {}".format(dbtype))
diff --git a/directdnsonly/app/db/models/__init__.py b/directdnsonly/app/db/models/__init__.py
new file mode 100644
index 0000000..23331b1
--- /dev/null
+++ b/directdnsonly/app/db/models/__init__.py
@@ -0,0 +1,35 @@
+from directdnsonly.app.db import Base
+from sqlalchemy import Column, Integer, String, DateTime
+
+
+class Key(Base):
+ __tablename__ = "keys"
+ id = Column(Integer, primary_key=True)
+ key = Column(String(255), unique=True)
+ name = Column(String(255))
+ expires = Column(DateTime)
+ service = Column(String(255))
+
+ def __repr__(self):
+ return "" % (
+ self.key,
+ self.name,
+ self.expires,
+ self.service,
+ )
+
+
+class Domain(Base):
+ __tablename__ = "domains"
+ id = Column(Integer, primary_key=True)
+ domain = Column(String(255), unique=True)
+ hostname = Column(String(255))
+ username = Column(String(255))
+
+ def __repr__(self):
+ return "" % (
+ self.id,
+ self.domain,
+ self.hostname,
+ self.username,
+ )
diff --git a/directdnsonly/app/utils/__init__.py b/directdnsonly/app/utils/__init__.py
new file mode 100644
index 0000000..3980cd6
--- /dev/null
+++ b/directdnsonly/app/utils/__init__.py
@@ -0,0 +1,25 @@
+from loguru import logger
+
+from directdnsonly.app.db.models import *
+from directdnsonly.app.db import connect
+
+
+def check_zone_exists(zone_name):
+ # Check if zone is present in the index
+ session = connect()
+ logger.debug("Checking if {} is present in the DB".format(zone_name))
+ domain_exists = bool(session.query(Domain.id).filter_by(domain=zone_name).first())
+ logger.debug("Returned from query: {}".format(domain_exists))
+ if domain_exists:
+ return True
+ else:
+ return False
+
+
+def put_zone_index(zone_name, host_name, user_name):
+ # add a new zone to index
+ session = connect()
+ logger.debug("Placed zone into database.. {}".format(str(zone_name)))
+ domain = Domain(domain=zone_name, hostname=host_name, username=user_name)
+ session.add(domain)
+ session.commit()
diff --git a/directdnsonly/app/utils/zone_parser.py b/directdnsonly/app/utils/zone_parser.py
new file mode 100644
index 0000000..c124ce7
--- /dev/null
+++ b/directdnsonly/app/utils/zone_parser.py
@@ -0,0 +1,69 @@
+from dns import zone, name
+from dns.rdataclass import IN
+from dns.exception import DNSException
+from loguru import logger
+
+
+def validate_and_normalize_zone(zone_data: str, domain_name: str) -> str:
+ """
+ Normalize zone file content and ensure proper origin handling
+ Returns normalized zone data
+ Raises DNSException on validation failure
+ """
+ # Ensure domain ends with dot
+ if not domain_name.endswith("."):
+ domain_name = f"{domain_name}."
+
+ # Add $ORIGIN if missing
+ if "$ORIGIN" not in zone_data:
+ zone_data = f"$ORIGIN {domain_name}\n{zone_data}"
+
+ # Add $TTL if missing
+ if "$TTL" not in zone_data:
+ zone_data = f"$TTL 300\n{zone_data}"
+
+ # Validate the zone
+ try:
+ zone.from_text(
+ zone_data, origin=name.from_text(domain_name), check_origin=False
+ )
+ return zone_data
+ except DNSException as e:
+ logger.error(f"Zone validation failed: {e}")
+ raise ValueError(f"Invalid zone data: {str(e)}")
+
+
+def count_zone_records(zone_data: str, domain_name: str) -> int:
+ """Count the number of individual DNS records in a parsed BIND zone file.
+
+ This counts every individual resource record (each A, AAAA, MX, TXT, etc.)
+ the same way the CoreDNS MySQL backend stores them — one row per record.
+
+ Args:
+ zone_data: The raw or normalized BIND zone file content
+ domain_name: The domain name for the zone
+
+ Returns:
+ The total number of individual records in the zone
+ """
+ if not domain_name.endswith("."):
+ domain_name = f"{domain_name}."
+
+ try:
+ dns_zone = zone.from_text(
+ zone_data, origin=name.from_text(domain_name), check_origin=False
+ )
+
+ count = 0
+ for _, _, rdata in dns_zone.iterate_rdatas():
+ if rdata.rdclass == IN:
+ count += 1
+
+ logger.debug(
+ f"Source zone {domain_name} contains {count} records"
+ )
+ return count
+
+ except DNSException as e:
+ logger.error(f"Failed to count records for {domain_name}: {e}")
+ return -1
diff --git a/directdnsonly/config/__init__.py b/directdnsonly/config/__init__.py
new file mode 100644
index 0000000..192a2a7
--- /dev/null
+++ b/directdnsonly/config/__init__.py
@@ -0,0 +1,63 @@
+from vyper import v, Vyper
+from loguru import logger
+
+# from vyper.config import Config
+import os
+from pathlib import Path
+from typing import Any, Dict
+
+
+def load_config() -> Vyper:
+ # Initialize Vyper
+ v.set_config_name("app") # Looks for app.yaml/app.yml
+ v.add_config_path(".") # Search in current directory
+ v.add_config_path("./config")
+ v.set_env_prefix("DADNS")
+ v.set_env_key_replacer("_", ".")
+ v.automatic_env()
+ # Set defaults for all required parameters
+ v.set_default("log_level", "info")
+ v.set_default("queue_location", "./data/queues")
+ v.set_default("timezone", "Pacific/Aucland")
+
+ # Set defaults for app
+ v.set_default("app.listen_port", 2222)
+ v.set_default("app.proxy_support", True)
+ v.set_default("app.proxy_support_base", "http://127.0.0.1")
+ v.set_default("app.log_level", "debug")
+ v.set_default("app.log_to", "file")
+ v.set_default("app.ssl_enable", "false")
+ v.set_default("app.listen_port", 2222)
+ v.set_default("app.token_valid_for_days", 30)
+ v.set_default("app.queue_location", "conf/queues")
+ v.set_default("app.auth_username", "directdnsonly")
+ v.set_default("app.auth_password", "changeme")
+ v.set_default("timezone", "Pacific/Auckland")
+
+ # DNS backend defaults
+ v.set_default("dns.backends.bind.enabled", False)
+ v.set_default("dns.backends.bind.zones_dir", "/etc/named/zones")
+ v.set_default("dns.backends.bind.named_conf", "/etc/named.conf.local")
+
+ v.set_default("dns.backends.coredns_mysql.enabled", False)
+ v.set_default("dns.backends.coredns_mysql.host", "localhost")
+ v.set_default("dns.backends.coredns_mysql.port", 3306)
+ v.set_default("dns.backends.coredns_mysql.database", "coredns")
+ v.set_default("dns.backends.coredns_mysql.username", "coredns")
+ v.set_default("dns.backends.coredns_mysql.password", "")
+ v.set_default("dns.backends.coredns_mysql.table_name", "records")
+
+ # Set Defaults Datastore
+ v.set_default("datastore.type", "sqlite")
+ v.set_default("datastore.port", 3306)
+ v.set_default("datastore.db_location", "data/directdns.db")
+
+ # Read configuration
+ if not v.read_in_config():
+ logger.warning("No config file found, using defaults")
+
+ return v
+
+
+# Global config instance
+config = load_config()
diff --git a/directdnsonly/config/app.yml b/directdnsonly/config/app.yml
new file mode 100644
index 0000000..b755b9d
--- /dev/null
+++ b/directdnsonly/config/app.yml
@@ -0,0 +1,35 @@
+---
+timezone: Pacific/Auckland
+log_level: INFO
+queue_location: ./data/queues
+
+app:
+ auth_username: directdnsonly
+ auth_password: changeme # Override via DADNS_APP_AUTH_PASSWORD env var
+
+dns:
+ default_backend: bind
+ backends:
+ bind:
+ type: bind
+ enabled: true
+ zones_dir: ./data/zones
+ named_conf: ./data/named.conf.include
+ coredns_dc1:
+ type: coredns_mysql
+ enabled: true
+ host: "mysql-dc1"
+ port: 3306
+ database: "coredns"
+ username: "coredns"
+ password: "coredns123"
+ table_name: "records"
+ coredns_dc2:
+ type: coredns_mysql
+ enabled: true
+ host: "mysql-dc2"
+ port: 3306
+ database: "coredns"
+ username: "coredns"
+ password: "coredns123"
+ table_name: "records"
\ No newline at end of file
diff --git a/directdnsonly/main.py b/directdnsonly/main.py
new file mode 100644
index 0000000..69c2d56
--- /dev/null
+++ b/directdnsonly/main.py
@@ -0,0 +1,114 @@
+from loguru import logger
+import cherrypy
+from app.backends import BackendRegistry
+from app.api.admin import DNSAdminAPI
+from app.api.health import HealthAPI
+from app import configure_logging
+from worker import WorkerManager
+from directdnsonly.config import config
+from directdnsonly.app.db import connect
+import importlib.metadata
+
+app_version = importlib.metadata.version("directdnsonly")
+
+
+class Root:
+ pass
+
+
+def main():
+ try:
+ # Initialize logging
+ configure_logging()
+ logger.info("Starting DaDNS server initialization")
+
+ # Initialize backend registry
+ registry = BackendRegistry()
+ available_backends = registry.get_available_backends()
+ logger.info(f"Available backend instances: {list(available_backends.keys())}")
+
+ global session
+ try:
+ session = connect(config.get("datastore.type"))
+ except Exception as e:
+ logger.error(str(e))
+ print("ERROR: " + str(e))
+ exit(1)
+ logger.info("Database Connected!")
+
+ # Setup worker manager
+ worker_manager = WorkerManager(
+ queue_path=config.get("queue_location"), backend_registry=registry
+ )
+ worker_manager.start()
+ logger.info(
+ f"Worker manager started with queue path: {config.get('queue_location')}"
+ )
+
+ # Configure CherryPy
+ user_password_dict = {
+ config.get_string("app.auth_username"): config.get_string("app.auth_password")
+ }
+ check_password = cherrypy.lib.auth_basic.checkpassword_dict(user_password_dict)
+
+ cherrypy.config.update(
+ {
+ "server.socket_host": "0.0.0.0",
+ "server.socket_port": config.get_int("app.listen_port"),
+ "tools.proxy.on": config.get_bool("app.proxy_support"),
+ "tools.proxy.base": config.get_string("app.proxy_support_base"),
+ "tools.auth_basic.on": True,
+ "tools.auth_basic.realm": "dadns",
+ "tools.auth_basic.checkpassword": check_password,
+ "tools.response_headers.on": True,
+ "tools.response_headers.headers": [
+ ("Server", "DirectDNS v" + app_version)
+ ],
+ "environment": config.get("environment"),
+ }
+ )
+
+ if config.get_bool("app.ssl_enable"):
+ cherrypy.config.update(
+ {
+ "server.ssl_module": "builtin",
+ "server.ssl_certificate": config.get("app.ssl_cert"),
+ "server.ssl_private_key": config.get("app.ssl_key"),
+ "server.ssl_certificate_chain": config.get("ssl_bundle"),
+ }
+ )
+
+ # cherrypy.log.error_log.propagate = False
+ if config.get_string("app.log_level").upper() != "DEBUG":
+ cherrypy.log.access_log.propagate = False
+
+ # Mount applications
+ root = Root()
+ root = DNSAdminAPI(
+ save_queue=worker_manager.save_queue,
+ delete_queue=worker_manager.delete_queue,
+ backend_registry=registry,
+ )
+ root.health = HealthAPI(registry)
+
+ # Add queue status endpoint
+ root.queue_status = lambda: worker_manager.queue_status()
+
+ cherrypy.tree.mount(root, "/")
+ cherrypy.engine.start()
+ logger.success(f"Server started on port {config.get_int('app.listen_port')}")
+
+ # Add shutdown handler
+ cherrypy.engine.subscribe("stop", worker_manager.stop)
+
+ cherrypy.engine.block()
+
+ except Exception as e:
+ logger.critical(f"Server startup failed: {e}")
+ if "worker_manager" in locals():
+ worker_manager.stop()
+ raise
+
+
+if __name__ == "__main__":
+ main()
diff --git a/directdnsonly/worker.py b/directdnsonly/worker.py
new file mode 100644
index 0000000..97d1e2f
--- /dev/null
+++ b/directdnsonly/worker.py
@@ -0,0 +1,282 @@
+import os
+import threading
+import time
+from concurrent.futures import ThreadPoolExecutor, as_completed
+from loguru import logger
+from persistqueue import Queue
+from persistqueue.exceptions import Empty
+
+from app.utils import check_zone_exists, put_zone_index
+from app.utils.zone_parser import count_zone_records
+from directdnsonly.app.db.models import Domain
+from directdnsonly.app.db import connect
+
+
+class WorkerManager:
+ def __init__(self, queue_path: str, backend_registry):
+ self.queue_path = queue_path
+ self.backend_registry = backend_registry
+ self._running = False
+ self._thread = None
+
+ # Initialize queues with error handling
+ try:
+ os.makedirs(queue_path, exist_ok=True)
+ self.save_queue = Queue(f"{queue_path}/save")
+ self.delete_queue = Queue(f"{queue_path}/delete")
+ logger.success(f"Initialized queues at {queue_path}")
+ except Exception as e:
+ logger.critical(f"Failed to initialize queues: {e}")
+ raise
+
+ def _process_save_queue(self):
+ """Main worker loop for processing save requests"""
+ logger.info("Save queue worker started")
+ # Get DB Connection
+ session = connect()
+
+ # Batch tracking
+ batch_start = None
+ batch_processed = 0
+ batch_failed = 0
+
+ while self._running:
+ try:
+ item = self.save_queue.get(block=True, timeout=5)
+
+ # Start a new batch timer on the first item
+ if batch_start is None:
+ batch_start = time.monotonic()
+ batch_processed = 0
+ batch_failed = 0
+ pending = self.save_queue.qsize()
+ logger.info(
+ f"📥 Batch started — {pending + 1} zone(s) queued "
+ f"for processing"
+ )
+
+ logger.debug(
+ f"Processing zone update for {item.get('domain', 'unknown')}"
+ )
+
+ if not check_zone_exists(item.get("domain")):
+ put_zone_index(
+ item.get("domain"), item.get("hostname"), item.get("username")
+ )
+ # Validate item structure
+ if not all(k in item for k in ["domain", "zone_file"]):
+ logger.error(f"Invalid queue item: {item}")
+ self.save_queue.task_done()
+ batch_failed += 1
+ continue
+
+ # Process with all available backends
+ backends = self.backend_registry.get_available_backends()
+ if not backends:
+ logger.warning("No active backends available!")
+
+ if len(backends) > 1:
+ # Process backends in parallel for faster sync
+ logger.debug(
+ f"Processing {item['domain']} across "
+ f"{len(backends)} backends concurrently: "
+ f"{', '.join(backends.keys())}"
+ )
+ self._process_backends_parallel(
+ backends, item, session
+ )
+ else:
+ # Single backend, no need for thread overhead
+ for backend_name, backend in backends.items():
+ self._process_single_backend(
+ backend_name, backend, item, session
+ )
+
+ self.save_queue.task_done()
+ batch_processed += 1
+ logger.debug(f"Completed processing for {item['domain']}")
+
+ except Empty:
+ # Queue is empty — if we were in a batch, log the summary
+ if batch_start is not None:
+ elapsed = time.monotonic() - batch_start
+ total = batch_processed + batch_failed
+ rate = batch_processed / elapsed if elapsed > 0 else 0
+ logger.success(
+ f"📦 Batch complete — {batch_processed}/{total} zone(s) "
+ f"processed successfully in {elapsed:.1f}s "
+ f"({rate:.1f} zones/sec)"
+ + (f", {batch_failed} failed" if batch_failed else "")
+ )
+ batch_start = None
+ batch_processed = 0
+ batch_failed = 0
+ continue
+ except Exception as e:
+ logger.error(f"Unexpected worker error: {e}")
+ batch_failed += 1
+ time.sleep(1) # Prevent tight error loops
+
+ def _process_single_backend(self, backend_name, backend, item, session):
+ """Process a zone update for a single backend"""
+ try:
+ logger.debug(f"Using backend: {backend_name}")
+ if backend.write_zone(item["domain"], item["zone_file"]):
+ logger.debug(
+ f"Successfully updated {item['domain']} in {backend_name}"
+ )
+ if backend.get_name() == "bind":
+ # Need to update the named.conf
+ backend.update_named_conf(
+ [d.domain for d in session.query(Domain).all()]
+ )
+ # Reload all zones
+ backend.reload_zone()
+ else:
+ backend.reload_zone(zone_name=item["domain"])
+
+ # Verify record count matches the source zone from DirectAdmin
+ self._verify_backend_record_count(
+ backend_name, backend, item["domain"], item["zone_file"]
+ )
+ else:
+ logger.error(
+ f"Failed to update {item['domain']} in {backend_name}"
+ )
+ except Exception as e:
+ logger.error(f"Error in {backend_name}: {str(e)}")
+
+ def _process_backends_parallel(self, backends, item, session):
+ """Process zone updates across multiple backends in parallel"""
+ start_time = time.monotonic()
+ with ThreadPoolExecutor(
+ max_workers=len(backends),
+ thread_name_prefix="backend"
+ ) as executor:
+ futures = {
+ executor.submit(
+ self._process_single_backend,
+ backend_name, backend, item, session
+ ): backend_name
+ for backend_name, backend in backends.items()
+ }
+ for future in as_completed(futures):
+ backend_name = futures[future]
+ try:
+ future.result()
+ except Exception as e:
+ logger.error(
+ f"Unhandled error processing backend "
+ f"{backend_name}: {str(e)}"
+ )
+ elapsed = (time.monotonic() - start_time) * 1000
+ logger.debug(
+ f"Parallel processing of {item['domain']} across "
+ f"{len(backends)} backends completed in {elapsed:.0f}ms"
+ )
+
+ def _verify_backend_record_count(
+ self, backend_name, backend, zone_name, zone_data
+ ):
+ """Verify and reconcile the backend record count against the
+ authoritative BIND zone from DirectAdmin.
+
+ After a successful write, this method checks whether the number of
+ records stored in the backend matches the number of records parsed
+ from the source zone file. If there are **extra** records in the
+ backend (e.g. from replication drift or stale data) they are
+ automatically removed via the backend's reconcile method.
+
+ Args:
+ backend_name: Display name of the backend instance
+ backend: The backend instance
+ zone_name: The zone that was just written
+ zone_data: The raw BIND zone file content (authoritative source)
+ """
+ try:
+ expected = count_zone_records(zone_data, zone_name)
+ if expected < 0:
+ logger.warning(
+ f"[{backend_name}] Could not parse source zone for "
+ f"{zone_name} — skipping record count verification"
+ )
+ return
+
+ matches, actual = backend.verify_zone_record_count(
+ zone_name, expected
+ )
+
+ if matches:
+ return # All good
+
+ if actual > expected:
+ logger.warning(
+ f"[{backend_name}] Backend has {actual - expected} extra "
+ f"record(s) for {zone_name} — reconciling against "
+ f"DirectAdmin source zone"
+ )
+ success, removed = backend.reconcile_zone_records(
+ zone_name, zone_data
+ )
+ if success and removed > 0:
+ # Verify again after reconciliation
+ matches, new_count = backend.verify_zone_record_count(
+ zone_name, expected
+ )
+ if matches:
+ logger.success(
+ f"[{backend_name}] Reconciliation successful for "
+ f"{zone_name}: removed {removed} extra record(s), "
+ f"count now matches source ({new_count})"
+ )
+ else:
+ logger.error(
+ f"[{backend_name}] Reconciliation for {zone_name} "
+ f"removed {removed} record(s) but count still "
+ f"mismatched: expected {expected}, got {new_count}"
+ )
+ else:
+ logger.warning(
+ f"[{backend_name}] Backend has fewer records than source "
+ f"for {zone_name} (expected {expected}, got {actual}) — "
+ f"this may indicate a write failure; the next zone push "
+ f"from DirectAdmin should correct this"
+ )
+
+ except NotImplementedError:
+ logger.debug(
+ f"[{backend_name}] Record count verification not "
+ f"supported — skipping"
+ )
+ except Exception as e:
+ logger.error(
+ f"[{backend_name}] Error during record count verification "
+ f"for {zone_name}: {e}"
+ )
+
+ def start(self):
+ """Start background workers"""
+ if self._running:
+ return
+
+ self._running = True
+ self._thread = threading.Thread(
+ target=self._process_save_queue, daemon=True, name="save_queue_worker"
+ )
+ self._thread.start()
+ logger.info(f"Started worker thread {self._thread.name}")
+
+ def stop(self):
+ """Stop background workers gracefully"""
+ self._running = False
+ if self._thread:
+ self._thread.join(timeout=5)
+ logger.info("Workers stopped")
+
+ def queue_status(self):
+ """Return current queue status"""
+ return {
+ "save_queue_size": self.save_queue.qsize(),
+ "delete_queue_size": self.delete_queue.qsize(),
+ "worker_alive": self._thread and self._thread.is_alive(),
+ }
diff --git a/docker-compose.yml b/docker-compose.yml
index 2fab4e7..50a4507 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,48 +1,52 @@
-version: '3.7'
-services:
- app:
- image: registry.dockerprod.ultrafast.co.nz/uff/apikeyhandler:0.10
- networks:
- - traefik-net
- volumes:
- - /etc/localtime:/etc/localtime:ro # Mount Timezone config to container
- - /data/swarm-vols/apikeyhandler:/opt/apikeyhandler/config # Store Config on Persistent drive shared between nodes
- deploy:
- mode: replicated
- replicas: 1
- placement:
- constraints:
- - node.role == worker # Place this service on Worker Nodes alternatively may specify manager if you want service on manager node.
- labels:
- - "traefik.http.routers.apikeyauth.rule=Host(`apiauth-internal.dockertest.ultrafast.co.nz`)" # This label creates a route Traefik will listen on
- - "traefik.http.routers.apikeyauth.tls=true" # Enable TLS, in this example using default TLS cert
- - "traefik.http.services.apikeyauth.loadbalancer.server.port=8080" # Set Port to proxy
- - "traefik.enable=true" # This flag enables load balancing through Traefik :)
- - "traefik.docker.network=traefik-net" # Set the network to connect to container on
- - "traefik.http.middlewares.apikeyauth.forwardauth.address=https://apiauth-internal.dockertest.ultrafast.co.nz"
- - "traefik.http.middlewares.apikeyauth.forwardauth.trustForwardHeader=true"
- - "traefik.http.middlewares.apikeyauth.forwardauth.authResponseHeaders=X-Client-Id"
- - "traefik.http.middlewares.apikeyauth.forwardauth.tls.insecureSkipVerify=true"
- test_app:
- image: containous/whoami
- networks:
- - traefik-net
- volumes:
- - /etc/localtime:/etc/localtime:ro # Mount Timezone config to container
- deploy:
- mode: replicated
- replicas: 1
- placement:
- constraints:
- - node.role == worker # Place this service on Worker Nodes alternatively may specify manager if you want service on manager node.
- labels:
- - "traefik.http.routers.testapp.rule=Host(`testapp.dockertest.ultrafast.co.nz`)" # This label creates a route Traefik will listen on
- - "traefik.http.routers.testapp.tls=true" # Enable TLS, in this example using default TLS cert
- - "traefik.http.routers.testapp.middlewares=apikeyauth"
- - "traefik.http.services.testapp.loadbalancer.server.port=80" # Set Port to proxy
- - "traefik.enable=true" # This flag enables load balancing through Traefik :)
- - "traefik.docker.network=traefik-net" # Set the network to connect to container on
+version: '3.8'
-networks:
- traefik-net:
- external: true
\ No newline at end of file
+services:
+ mysql:
+ image: mysql:8.0
+ container_name: dadns_mysql
+ environment:
+ MYSQL_ROOT_PASSWORD: rootpassword
+ MYSQL_DATABASE: coredns
+ MYSQL_USER: coredns
+ MYSQL_PASSWORD: coredns123
+ ports:
+ - "3306:3306"
+ volumes:
+ - ./schema/coredns_mysql.sql:/docker-entrypoint-initdb.d/init.sql
+ - mysql_data:/var/lib/mysql
+ healthcheck:
+ test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
+ interval: 5s
+ timeout: 5s
+ retries: 5
+
+ dadns:
+ build:
+ dockerfile: Dockerfile.deepseek
+ context: .
+ no_cache: false
+ container_name: dadns_app
+ depends_on:
+ mysql:
+ condition: service_healthy
+ ports:
+ - "2222:2222"
+ volumes:
+ - ./config:/app/config
+ - ./data:/app/data
+ - ./logs:/app/logs
+ environment:
+ - TZ=Pacific/Auckland
+ - DNS_BACKENDS__BIND__ENABLED=true
+ - DNS_BACKENDS__BIND__ZONES_DIR=/etc/named/zones/dadns
+ - DNS_BACKENDS__BIND__NAMED_CONF=/etc/bind/named.conf.local
+ - DNS_BACKENDS__COREDNS_MYSQL__ENABLED=true
+ - DNS_BACKENDS__COREDNS_MYSQL__HOST=mysql
+ - DNS_BACKENDS__COREDNS_MYSQL__PORT=3306
+ - DNS_BACKENDS__COREDNS_MYSQL__DATABASE=coredns
+ - DNS_BACKENDS__COREDNS_MYSQL__USERNAME=coredns
+ - DNS_BACKENDS__COREDNS_MYSQL__PASSWORD=coredns123
+ restart: unless-stopped
+
+volumes:
+ mysql_data:
\ No newline at end of file
diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh
new file mode 100755
index 0000000..1bbaadd
--- /dev/null
+++ b/docker/entrypoint.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# Start BIND
+/usr/sbin/named -u bind -f &
+
+## Initialize MySQL schema if needed
+#if [ -f /app/schema/coredns_mysql.sql ]; then
+# mysql -h mysql -u root -prootpassword coredns < /app/schema/coredns_mysql.sql
+#fi
+
+# Start the application
+poetry run python directdnsonly/main.py
\ No newline at end of file
diff --git a/docker/named.conf.local b/docker/named.conf.local
new file mode 100644
index 0000000..e7457c1
--- /dev/null
+++ b/docker/named.conf.local
@@ -0,0 +1,4 @@
+zone "guise.nz" {
+ type master;
+ file "/etc/named/zones/dadns/guise.nz.db";
+};
\ No newline at end of file
diff --git a/docker/named.conf.options b/docker/named.conf.options
new file mode 100644
index 0000000..1d5d26f
--- /dev/null
+++ b/docker/named.conf.options
@@ -0,0 +1,8 @@
+options {
+ directory "/var/cache/bind";
+ allow-query { any; };
+ recursion no;
+ dnssec-validation no;
+ listen-on { any; };
+ listen-on-v6 { any; };
+};
\ No newline at end of file
diff --git a/justfile b/justfile
new file mode 100644
index 0000000..a9a9ab4
--- /dev/null
+++ b/justfile
@@ -0,0 +1,17 @@
+#!/usr/bin/env just --justfile
+APP_NAME := "directdnsonly"
+build:
+ cd src && \
+ pyinstaller \
+ -p . \
+ --hidden-import=json \
+ --hidden-import=pyopenssl \
+ --hidden-import=pymysql \
+ --hidden-import=jaraco \
+ --hidden-import=cheroot \
+ --hidden-import=cheroot.ssl.pyopenssl \
+ --hidden-import=cheroot.ssl.builtin \
+ --hidden-import=lib \
+ --hidden-import=os \
+ --hidden-import=builtins \
+ --noconfirm --onefile {{APP_NAME}}.py
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..be43e7d
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,1073 @@
+# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
+
+[[package]]
+name = "altgraph"
+version = "0.17.4"
+description = "Python graph (network) package"
+optional = false
+python-versions = "*"
+groups = ["dev"]
+files = [
+ {file = "altgraph-0.17.4-py2.py3-none-any.whl", hash = "sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff"},
+ {file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"},
+]
+
+[[package]]
+name = "autocommand"
+version = "2.2.2"
+description = "A library to create a command-line program from a function"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "autocommand-2.2.2-py3-none-any.whl", hash = "sha256:710afe251075e038e19e815e25f8155cabe02196cfb545b2185e0d9c8b2b0459"},
+ {file = "autocommand-2.2.2.tar.gz", hash = "sha256:878de9423c5596491167225c2a455043c3130fb5b7286ac83443d45e74955f34"},
+]
+
+[[package]]
+name = "backports-tarfile"
+version = "1.2.0"
+description = "Backport of CPython tarfile module"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+markers = "python_version == \"3.11\""
+files = [
+ {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"},
+ {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"]
+
+[[package]]
+name = "black"
+version = "25.1.0"
+description = "The uncompromising code formatter."
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"},
+ {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"},
+ {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"},
+ {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"},
+ {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"},
+ {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"},
+ {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"},
+ {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"},
+ {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"},
+ {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"},
+ {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"},
+ {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"},
+ {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"},
+ {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"},
+ {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"},
+ {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"},
+ {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"},
+ {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"},
+ {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"},
+ {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"},
+ {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"},
+ {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"},
+]
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.10)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
+name = "cheroot"
+version = "10.0.1"
+description = "Highly-optimized, pure-python HTTP server"
+optional = false
+python-versions = ">=3.6"
+groups = ["main"]
+files = [
+ {file = "cheroot-10.0.1-py3-none-any.whl", hash = "sha256:6ea332f20bfcede14e66174d112b30e9807492320d737ca628badc924d997595"},
+ {file = "cheroot-10.0.1.tar.gz", hash = "sha256:e0b82f797658d26b8613ec8eb563c3b08e6bd6a7921e9d5089bd1175ad1b1740"},
+]
+
+[package.dependencies]
+"jaraco.functools" = "*"
+more-itertools = ">=2.6"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=3.2)", "python-dateutil", "sphinx (>=1.8.2)", "sphinx-tabs (>=1.1.0)", "sphinxcontrib-apidoc (>=0.3.0)"]
+
+[[package]]
+name = "cherrypy"
+version = "18.10.0"
+description = "Object-Oriented HTTP framework"
+optional = false
+python-versions = ">=3.6"
+groups = ["main"]
+files = [
+ {file = "CherryPy-18.10.0-py3-none-any.whl", hash = "sha256:129e444b9a63cea4e765481b156376f1cfe319e64caaaec2485636532373b298"},
+ {file = "cherrypy-18.10.0.tar.gz", hash = "sha256:6c70e78ee11300e8b21c0767c542ae6b102a49cac5cfd4e3e313d7bb907c5891"},
+]
+
+[package.dependencies]
+cheroot = ">=8.2.1"
+"jaraco.collections" = "*"
+more-itertools = "*"
+portend = ">=2.1.1"
+"zc.lockfile" = "*"
+
+[package.extras]
+docs = ["alabaster", "docutils", "jaraco.packaging (>=3.2)", "rst.linker (>=1.11)", "sphinx", "sphinxcontrib-apidoc (>=0.3.0)"]
+json = ["simplejson"]
+memcached-session = ["python-memcached (>=1.58)"]
+routes-dispatcher = ["routes (>=2.3.1)"]
+ssl = ["pyOpenSSL"]
+testing = ["objgraph", "path.py", "pytest (>=5.3.5)", "pytest-cov", "pytest-forked", "pytest-services (>=2)", "pytest-sugar", "requests-toolbelt", "setuptools"]
+xcgi = ["flup"]
+
+[[package]]
+name = "click"
+version = "8.2.0"
+description = "Composable command line interface toolkit"
+optional = false
+python-versions = ">=3.10"
+groups = ["dev"]
+files = [
+ {file = "click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c"},
+ {file = "click-8.2.0.tar.gz", hash = "sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+groups = ["main", "dev"]
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+markers = {main = "sys_platform == \"win32\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""}
+
+[[package]]
+name = "coverage"
+version = "7.8.0"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"},
+ {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"},
+ {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"},
+ {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"},
+ {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"},
+ {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"},
+ {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"},
+ {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"},
+ {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"},
+ {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"},
+ {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"},
+ {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"},
+ {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"},
+ {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"},
+ {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"},
+ {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"},
+ {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"},
+ {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"},
+ {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"},
+ {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"},
+ {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"},
+ {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"},
+ {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"},
+ {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"},
+ {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"},
+ {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"},
+ {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"},
+ {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"},
+ {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"},
+ {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"},
+ {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"},
+ {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"},
+ {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"},
+ {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"},
+ {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"},
+ {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"},
+ {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"},
+ {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"},
+ {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"},
+ {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"},
+ {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"},
+ {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"},
+ {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"},
+ {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"},
+ {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"},
+ {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"},
+ {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"},
+ {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"},
+ {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"},
+ {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"},
+ {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"},
+ {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"},
+ {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"},
+ {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"},
+ {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"},
+ {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"},
+ {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"},
+ {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"},
+ {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"},
+ {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"},
+ {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"},
+ {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"},
+ {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"},
+]
+
+[package.extras]
+toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
+
+[[package]]
+name = "distconfig3"
+version = "1.0.1"
+description = "Library to manage configuration using Zookeeper, Etcd, Consul"
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "distconfig3-1.0.1-py2.py3-none-any.whl", hash = "sha256:823e35ae044677e8aa77bed8d9be0780862a2500c63cf95ce85544b9d3d9fc89"},
+ {file = "distconfig3-1.0.1.tar.gz", hash = "sha256:7d2c7f30a57ef494c5683270587ba7593318746c6e22b9b8953e288c9c303c65"},
+]
+
+[package.extras]
+consul = ["python-consul (>=0.3.15)"]
+etcd = ["python-etcd (>=0.3.3)"]
+gevent = ["gevent (>=1.4.0)"]
+zookeeper = ["kazoo (>=2.0)"]
+
+[[package]]
+name = "dnspython"
+version = "2.7.0"
+description = "DNS toolkit"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"},
+ {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"},
+]
+
+[package.extras]
+dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
+dnssec = ["cryptography (>=43)"]
+doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
+doq = ["aioquic (>=1.0.0)"]
+idna = ["idna (>=3.7)"]
+trio = ["trio (>=0.23)"]
+wmi = ["wmi (>=1.5.1)"]
+
+[[package]]
+name = "greenlet"
+version = "3.2.2"
+description = "Lightweight in-process concurrent programming"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""
+files = [
+ {file = "greenlet-3.2.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c49e9f7c6f625507ed83a7485366b46cbe325717c60837f7244fc99ba16ba9d6"},
+ {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3cc1a3ed00ecfea8932477f729a9f616ad7347a5e55d50929efa50a86cb7be7"},
+ {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c9896249fbef2c615853b890ee854f22c671560226c9221cfd27c995db97e5c"},
+ {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7409796591d879425997a518138889d8d17e63ada7c99edc0d7a1c22007d4907"},
+ {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7791dcb496ec53d60c7f1c78eaa156c21f402dda38542a00afc3e20cae0f480f"},
+ {file = "greenlet-3.2.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8009ae46259e31bc73dc183e402f548e980c96f33a6ef58cc2e7865db012e13"},
+ {file = "greenlet-3.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fd9fb7c941280e2c837b603850efc93c999ae58aae2b40765ed682a6907ebbc5"},
+ {file = "greenlet-3.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:00cd814b8959b95a546e47e8d589610534cfb71f19802ea8a2ad99d95d702057"},
+ {file = "greenlet-3.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:d0cb7d47199001de7658c213419358aa8937df767936506db0db7ce1a71f4a2f"},
+ {file = "greenlet-3.2.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068"},
+ {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce"},
+ {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b"},
+ {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3"},
+ {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74"},
+ {file = "greenlet-3.2.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe"},
+ {file = "greenlet-3.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e"},
+ {file = "greenlet-3.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6"},
+ {file = "greenlet-3.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b"},
+ {file = "greenlet-3.2.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330"},
+ {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b"},
+ {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e"},
+ {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275"},
+ {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65"},
+ {file = "greenlet-3.2.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3"},
+ {file = "greenlet-3.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e"},
+ {file = "greenlet-3.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5"},
+ {file = "greenlet-3.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec"},
+ {file = "greenlet-3.2.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:3ab7194ee290302ca15449f601036007873028712e92ca15fc76597a0aeb4c59"},
+ {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc5c43bb65ec3669452af0ab10729e8fdc17f87a1f2ad7ec65d4aaaefabf6bf"},
+ {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:decb0658ec19e5c1f519faa9a160c0fc85a41a7e6654b3ce1b44b939f8bf1325"},
+ {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fadd183186db360b61cb34e81117a096bff91c072929cd1b529eb20dd46e6c5"},
+ {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1919cbdc1c53ef739c94cf2985056bcc0838c1f217b57647cbf4578576c63825"},
+ {file = "greenlet-3.2.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3885f85b61798f4192d544aac7b25a04ece5fe2704670b4ab73c2d2c14ab740d"},
+ {file = "greenlet-3.2.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:85f3e248507125bf4af607a26fd6cb8578776197bd4b66e35229cdf5acf1dfbf"},
+ {file = "greenlet-3.2.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1e76106b6fc55fa3d6fe1c527f95ee65e324a13b62e243f77b48317346559708"},
+ {file = "greenlet-3.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:fe46d4f8e94e637634d54477b0cfabcf93c53f29eedcbdeecaf2af32029b4421"},
+ {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba30e88607fb6990544d84caf3c706c4b48f629e18853fc6a646f82db9629418"},
+ {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:055916fafad3e3388d27dd68517478933a97edc2fc54ae79d3bec827de2c64c4"},
+ {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2593283bf81ca37d27d110956b79e8723f9aa50c4bcdc29d3c0543d4743d2763"},
+ {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c69e9a10670eb7a66b8cef6354c24671ba241f46152dd3eed447f79c29fb5b"},
+ {file = "greenlet-3.2.2-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a98600899ca1ca5d3a2590974c9e3ec259503b2d6ba6527605fcd74e08e207"},
+ {file = "greenlet-3.2.2-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b50a8c5c162469c3209e5ec92ee4f95c8231b11db6a04db09bbe338176723bb8"},
+ {file = "greenlet-3.2.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:45f9f4853fb4cc46783085261c9ec4706628f3b57de3e68bae03e8f8b3c0de51"},
+ {file = "greenlet-3.2.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:9ea5231428af34226c05f927e16fc7f6fa5e39e3ad3cd24ffa48ba53a47f4240"},
+ {file = "greenlet-3.2.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:1e4747712c4365ef6765708f948acc9c10350719ca0545e362c24ab973017370"},
+ {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782743700ab75716650b5238a4759f840bb2dcf7bff56917e9ffdf9f1f23ec59"},
+ {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:354f67445f5bed6604e493a06a9a49ad65675d3d03477d38a4db4a427e9aad0e"},
+ {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3aeca9848d08ce5eb653cf16e15bb25beeab36e53eb71cc32569f5f3afb2a3aa"},
+ {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cb8553ee954536500d88a1a2f58fcb867e45125e600e80f586ade399b3f8819"},
+ {file = "greenlet-3.2.2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1592a615b598643dbfd566bac8467f06c8c8ab6e56f069e573832ed1d5d528cc"},
+ {file = "greenlet-3.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1f72667cc341c95184f1c68f957cb2d4fc31eef81646e8e59358a10ce6689457"},
+ {file = "greenlet-3.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a8fa80665b1a29faf76800173ff5325095f3e66a78e62999929809907aca5659"},
+ {file = "greenlet-3.2.2-cp39-cp39-win32.whl", hash = "sha256:6629311595e3fe7304039c67f00d145cd1d38cf723bb5b99cc987b23c1433d61"},
+ {file = "greenlet-3.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:eeb27bece45c0c2a5842ac4c5a1b5c2ceaefe5711078eed4e8043159fa05c834"},
+ {file = "greenlet-3.2.2.tar.gz", hash = "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485"},
+]
+
+[package.extras]
+docs = ["Sphinx", "furo"]
+test = ["objgraph", "psutil"]
+
+[[package]]
+name = "iniconfig"
+version = "2.1.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"},
+ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
+]
+
+[[package]]
+name = "jaraco-collections"
+version = "5.1.0"
+description = "Collection objects similar to those in stdlib by jaraco"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "jaraco.collections-5.1.0-py3-none-any.whl", hash = "sha256:a9480be7fe741d34639b3c32049066d7634b520746552d1a5d0fcda07ada1020"},
+ {file = "jaraco_collections-5.1.0.tar.gz", hash = "sha256:0e4829409d39ad18a40aa6754fee2767f4d9730c4ba66dc9df89f1d2756994c2"},
+]
+
+[package.dependencies]
+"jaraco.text" = "*"
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["pytest (>=6,!=8.1.*)"]
+type = ["pytest-mypy"]
+
+[[package]]
+name = "jaraco-context"
+version = "6.0.1"
+description = "Useful decorators and context managers"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"},
+ {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"},
+]
+
+[package.dependencies]
+"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""}
+
+[package.extras]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
+
+[[package]]
+name = "jaraco-functools"
+version = "4.1.0"
+description = "Functools like those found in stdlib"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"},
+ {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"},
+]
+
+[package.dependencies]
+more-itertools = "*"
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"]
+type = ["pytest-mypy"]
+
+[[package]]
+name = "jaraco-text"
+version = "4.0.0"
+description = "Module for text manipulation"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "jaraco.text-4.0.0-py3-none-any.whl", hash = "sha256:08de508939b5e681b14cdac2f1f73036cd97f6f8d7b25e96b8911a9a428ca0d1"},
+ {file = "jaraco_text-4.0.0.tar.gz", hash = "sha256:5b71fecea69ab6f939d4c906c04fee1eda76500d1641117df6ec45b865f10db0"},
+]
+
+[package.dependencies]
+autocommand = "*"
+"jaraco.context" = ">=4.1"
+"jaraco.functools" = "*"
+more-itertools = "*"
+
+[package.extras]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+inflect = ["inflect"]
+test = ["pathlib2 ; python_version < \"3.10\"", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
+
+[[package]]
+name = "loguru"
+version = "0.7.3"
+description = "Python logging made (stupidly) simple"
+optional = false
+python-versions = "<4.0,>=3.5"
+groups = ["main"]
+files = [
+ {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"},
+ {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"},
+]
+
+[package.dependencies]
+colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
+win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
+
+[package.extras]
+dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""]
+
+[[package]]
+name = "macholib"
+version = "1.16.3"
+description = "Mach-O header analysis and editing"
+optional = false
+python-versions = "*"
+groups = ["dev"]
+markers = "sys_platform == \"darwin\""
+files = [
+ {file = "macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c"},
+ {file = "macholib-1.16.3.tar.gz", hash = "sha256:07ae9e15e8e4cd9a788013d81f5908b3609aa76f9b1421bae9c4d7606ec86a30"},
+]
+
+[package.dependencies]
+altgraph = ">=0.17"
+
+[[package]]
+name = "more-itertools"
+version = "10.7.0"
+description = "More routines for operating on iterables, beyond itertools"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e"},
+ {file = "more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3"},
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.1.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"},
+ {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
+]
+
+[[package]]
+name = "packaging"
+version = "23.2"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.7"
+groups = ["dev"]
+files = [
+ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
+ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
+]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
+]
+
+[[package]]
+name = "pefile"
+version = "2023.2.7"
+description = "Python PE parsing module"
+optional = false
+python-versions = ">=3.6.0"
+groups = ["dev"]
+markers = "sys_platform == \"win32\""
+files = [
+ {file = "pefile-2023.2.7-py3-none-any.whl", hash = "sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6"},
+ {file = "pefile-2023.2.7.tar.gz", hash = "sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc"},
+]
+
+[[package]]
+name = "persist-queue"
+version = "1.0.0"
+description = "A thread-safe disk based persistent queue in Python."
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "persist-queue-1.0.0.tar.gz", hash = "sha256:3ffb746902d3023fd09eb46897609fdee6c77b1641f19e2fc8d98d744bdfc845"},
+ {file = "persist_queue-1.0.0-py3-none-any.whl", hash = "sha256:81bb20030b480fcacecc3abe6261480c818246f4d838fdf0217e36c2552a5f3a"},
+]
+
+[package.extras]
+extra = ["DBUtils (<3.0.0)", "PyMySQL", "cbor2 (>=5.2.0)", "msgpack (>=0.5.6)"]
+
+[[package]]
+name = "platformdirs"
+version = "4.3.8"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"},
+ {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"},
+]
+
+[package.extras]
+docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"]
+type = ["mypy (>=1.14.1)"]
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
+ {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["coverage", "pytest", "pytest-benchmark"]
+
+[[package]]
+name = "portend"
+version = "3.2.0"
+description = "TCP port monitoring and discovery"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "portend-3.2.0-py3-none-any.whl", hash = "sha256:8b3fe3f78779df906559a21d9eaa6e21c8fa5a7a8cc76362cbbe1e16777399cf"},
+ {file = "portend-3.2.0.tar.gz", hash = "sha256:5250a352c19c959d767cac878b829d93e5dc7625a5143399a2a00dc6628ffb72"},
+]
+
+[package.dependencies]
+tempora = ">=1.8"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-ruff"]
+
+[[package]]
+name = "pyinstaller"
+version = "6.13.0"
+description = "PyInstaller bundles a Python application and all its dependencies into a single package."
+optional = false
+python-versions = "<3.14,>=3.8"
+groups = ["dev"]
+files = [
+ {file = "pyinstaller-6.13.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:aa404f0b02cd57948098055e76ee190b8e65ccf7a2a3f048e5000f668317069f"},
+ {file = "pyinstaller-6.13.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:92efcf2f09e78f07b568c5cb7ed48c9940f5dad627af4b49bede6320fab2a06e"},
+ {file = "pyinstaller-6.13.0-py3-none-manylinux2014_i686.whl", hash = "sha256:9f82f113c463f012faa0e323d952ca30a6f922685d9636e754bd3a256c7ed200"},
+ {file = "pyinstaller-6.13.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:db0e7945ebe276f604eb7c36e536479556ab32853412095e19172a5ec8fca1c5"},
+ {file = "pyinstaller-6.13.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:92fe7337c5aa08d42b38d7a79614492cb571489f2cb0a8f91dc9ef9ccbe01ed3"},
+ {file = "pyinstaller-6.13.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:bc09795f5954135dd4486c1535650958c8218acb954f43860e4b05fb515a21c0"},
+ {file = "pyinstaller-6.13.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:589937548d34978c568cfdc39f31cf386f45202bc27fdb8facb989c79dfb4c02"},
+ {file = "pyinstaller-6.13.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:b7260832f7501ba1d2ce1834d4cddc0f2b94315282bc89c59333433715015447"},
+ {file = "pyinstaller-6.13.0-py3-none-win32.whl", hash = "sha256:80c568848529635aa7ca46d8d525f68486d53e03f68b7bb5eba2c88d742e302c"},
+ {file = "pyinstaller-6.13.0-py3-none-win_amd64.whl", hash = "sha256:8d4296236b85aae570379488c2da833b28828b17c57c2cc21fccd7e3811fe372"},
+ {file = "pyinstaller-6.13.0-py3-none-win_arm64.whl", hash = "sha256:d9f21d56ca2443aa6a1e255e7ad285c76453893a454105abe1b4d45e92bb9a20"},
+ {file = "pyinstaller-6.13.0.tar.gz", hash = "sha256:38911feec2c5e215e5159a7e66fdb12400168bd116143b54a8a7a37f08733456"},
+]
+
+[package.dependencies]
+altgraph = "*"
+macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""}
+packaging = ">=22.0"
+pefile = {version = ">=2022.5.30,<2024.8.26 || >2024.8.26", markers = "sys_platform == \"win32\""}
+pyinstaller-hooks-contrib = ">=2025.2"
+pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""}
+setuptools = ">=42.0.0"
+
+[package.extras]
+completion = ["argcomplete"]
+hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"]
+
+[[package]]
+name = "pyinstaller-hooks-contrib"
+version = "2025.4"
+description = "Community maintained hooks for PyInstaller"
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "pyinstaller_hooks_contrib-2025.4-py3-none-any.whl", hash = "sha256:6c2d73269b4c484eb40051fc1acee0beb113c2cfb3b37437b8394faae6f0d072"},
+ {file = "pyinstaller_hooks_contrib-2025.4.tar.gz", hash = "sha256:5ce1afd1997b03e70f546207031cfdf2782030aabacc102190677059e2856446"},
+]
+
+[package.dependencies]
+packaging = ">=22.0"
+setuptools = ">=42.0.0"
+
+[[package]]
+name = "pymysql"
+version = "1.1.1"
+description = "Pure Python MySQL Driver"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"},
+ {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"},
+]
+
+[package.extras]
+ed25519 = ["PyNaCl (>=1.4.0)"]
+rsa = ["cryptography"]
+
+[[package]]
+name = "pytest"
+version = "8.3.5"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"},
+ {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=1.5,<2"
+
+[package.extras]
+dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-cov"
+version = "6.1.1"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.9"
+groups = ["dev"]
+files = [
+ {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"},
+ {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"},
+]
+
+[package.dependencies]
+coverage = {version = ">=7.5", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
+
+[[package]]
+name = "pytest-mock"
+version = "3.14.0"
+description = "Thin-wrapper around the mock package for easier use with pytest"
+optional = false
+python-versions = ">=3.8"
+groups = ["dev"]
+files = [
+ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
+ {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
+]
+
+[package.dependencies]
+pytest = ">=6.2.5"
+
+[package.extras]
+dev = ["pre-commit", "pytest-asyncio", "tox"]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+groups = ["main"]
+files = [
+ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
+ {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "pywin32-ctypes"
+version = "0.2.3"
+description = "A (partial) reimplementation of pywin32 using ctypes/cffi"
+optional = false
+python-versions = ">=3.6"
+groups = ["dev"]
+markers = "sys_platform == \"win32\""
+files = [
+ {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"},
+ {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"},
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+description = "YAML parser and emitter for Python"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
+ {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
+ {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
+ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
+]
+
+[[package]]
+name = "setuptools"
+version = "80.7.1"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+optional = false
+python-versions = ">=3.9"
+groups = ["main", "dev"]
+files = [
+ {file = "setuptools-80.7.1-py3-none-any.whl", hash = "sha256:ca5cc1069b85dc23070a6628e6bcecb3292acac802399c7f8edc0100619f9009"},
+ {file = "setuptools-80.7.1.tar.gz", hash = "sha256:f6ffc5f0142b1bd8d0ca94ee91b30c0ca862ffd50826da1ea85258a06fd94552"},
+]
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
+core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
+type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+groups = ["main"]
+files = [
+ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
+ {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "1.4.54"
+description = "Database Abstraction Library"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+groups = ["main"]
+files = [
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:af00236fe21c4d4f4c227b6ccc19b44c594160cc3ff28d104cdce85855369277"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1183599e25fa38a1a322294b949da02b4f0da13dbc2688ef9dbe746df573f8a6"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1990d5a6a5dc358a0894c8ca02043fb9a5ad9538422001fb2826e91c50f1d539"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14b3f4783275339170984cadda66e3ec011cce87b405968dc8d51cf0f9997b0d"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b24364150738ce488333b3fb48bfa14c189a66de41cd632796fbcacb26b4585"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-win32.whl", hash = "sha256:a8a72259a1652f192c68377be7011eac3c463e9892ef2948828c7d58e4829988"},
+ {file = "SQLAlchemy-1.4.54-cp310-cp310-win_amd64.whl", hash = "sha256:b67589f7955924865344e6eacfdcf70675e64f36800a576aa5e961f0008cde2a"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc"},
+ {file = "SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f"},
+ {file = "SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a41611835010ed4ea4c7aed1da5b58aac78ee7e70932a91ed2705a7b38e40f52"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8c1b9ecaf9f2590337d5622189aeb2f0dbc54ba0232fa0856cf390957584a9"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de620f978ca273ce027769dc8db7e6ee72631796187adc8471b3c76091b809e"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a2530400a6e7e68fd1552a55515de6a4559122e495f73554a51cedafc11669"},
+ {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cf7076c8578b3de4e43a046cc7a1af8466e1c3f5e64167189fe8958a4f9c02"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f1e1b92ee4ee9ffc68624ace218b89ca5ca667607ccee4541a90cc44999b9aea"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41cffc63c7c83dfc30c4cab5b4308ba74440a9633c4509c51a0c52431fb0f8ab"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5933c45d11cbd9694b1540aa9076816cc7406964c7b16a380fd84d3a5fe3241"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cafe0ba3a96d0845121433cffa2b9232844a2609fce694fcc02f3f31214ece28"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19f816f4702d7b1951d7576026c7124b9bfb64a9543e571774cf517b7a50b29"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-win32.whl", hash = "sha256:76c2ba7b5a09863d0a8166fbc753af96d561818c572dbaf697c52095938e7be4"},
+ {file = "SQLAlchemy-1.4.54-cp37-cp37m-win_amd64.whl", hash = "sha256:a86b0e4be775902a5496af4fb1b60d8a2a457d78f531458d294360b8637bb014"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a49730afb716f3f675755afec109895cab95bc9875db7ffe2e42c1b1c6279482"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e78444bc77d089e62874dc74df05a5c71f01ac598010a327881a48408d0064"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02d2ecb9508f16ab9c5af466dfe5a88e26adf2e1a8d1c56eb616396ccae2c186"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:394b0135900b62dbf63e4809cdc8ac923182af2816d06ea61cd6763943c2cc05"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed3576675c187e3baa80b02c4c9d0edfab78eff4e89dd9da736b921333a2432"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-win32.whl", hash = "sha256:fc9ffd9a38e21fad3e8c5a88926d57f94a32546e937e0be46142b2702003eba7"},
+ {file = "SQLAlchemy-1.4.54-cp38-cp38-win_amd64.whl", hash = "sha256:a01bc25eb7a5688656c8770f931d5cb4a44c7de1b3cec69b84cc9745d1e4cc10"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0b76bbb1cbae618d10679be8966f6d66c94f301cfc15cb49e2f2382563fb6efb"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb2886c0be2c6c54d0651d5a61c29ef347e8eec81fd83afebbf7b59b80b7393"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954816850777ac234a4e32b8c88ac1f7847088a6e90cfb8f0e127a1bf3feddff"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d83cd1cc03c22d922ec94d0d5f7b7c96b1332f5e122e81b1a61fb22da77879a"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1576fba3616f79496e2f067262200dbf4aab1bb727cd7e4e006076686413c80c"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-win32.whl", hash = "sha256:3112de9e11ff1957148c6de1df2bc5cc1440ee36783412e5eedc6f53638a577d"},
+ {file = "SQLAlchemy-1.4.54-cp39-cp39-win_amd64.whl", hash = "sha256:6da60fb24577f989535b8fc8b2ddc4212204aaf02e53c4c7ac94ac364150ed08"},
+ {file = "sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a"},
+]
+
+[package.dependencies]
+greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
+
+[package.extras]
+aiomysql = ["aiomysql (>=0.2.0) ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\""]
+aiosqlite = ["aiosqlite ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\"", "typing_extensions (!=3.10.0.1)"]
+asyncio = ["greenlet (!=0.4.17) ; python_version >= \"3\""]
+asyncmy = ["asyncmy (>=0.2.3,!=0.2.4) ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\""]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2) ; python_version >= \"3\"", "mariadb (>=1.0.1,!=1.1.2) ; python_version >= \"3\""]
+mssql = ["pyodbc"]
+mssql-pymssql = ["pymssql", "pymssql"]
+mssql-pyodbc = ["pyodbc", "pyodbc"]
+mypy = ["mypy (>=0.910) ; python_version >= \"3\"", "sqlalchemy2-stubs"]
+mysql = ["mysqlclient (>=1.4.0) ; python_version >= \"3\"", "mysqlclient (>=1.4.0,<2) ; python_version < \"3\""]
+mysql-connector = ["mysql-connector-python", "mysql-connector-python"]
+oracle = ["cx_oracle (>=7) ; python_version >= \"3\"", "cx_oracle (>=7,<8) ; python_version < \"3\""]
+postgresql = ["psycopg2 (>=2.7)"]
+postgresql-asyncpg = ["asyncpg ; python_version >= \"3\"", "asyncpg ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\""]
+postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0) ; python_version >= \"3\"", "pg8000 (>=1.16.6,!=1.29.0) ; python_version >= \"3\""]
+postgresql-psycopg2binary = ["psycopg2-binary"]
+postgresql-psycopg2cffi = ["psycopg2cffi"]
+pymysql = ["pymysql (<1) ; python_version < \"3\"", "pymysql ; python_version >= \"3\""]
+sqlcipher = ["sqlcipher3_binary ; python_version >= \"3\""]
+
+[[package]]
+name = "tempora"
+version = "5.8.0"
+description = "Objects and routines pertaining to date and time (tempora)"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "tempora-5.8.0-py3-none-any.whl", hash = "sha256:c6fa4bc090e0b3b7ce879a81fdda6e4e0e7ab20d1cd580b5c92fecdcf9e5fb65"},
+ {file = "tempora-5.8.0.tar.gz", hash = "sha256:1e9606e65a3f2063460961d68515dee07bdaca0859305a8d3e6604168175fef1"},
+]
+
+[package.dependencies]
+"jaraco.functools" = ">=1.20"
+python-dateutil = "*"
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["backports.zoneinfo ; python_version < \"3.9\"", "pytest (>=6,!=8.1.*)", "pytest-freezer", "tzdata ; platform_system == \"Windows\""]
+type = ["pytest-mypy", "types-python-dateutil"]
+
+[[package]]
+name = "toml"
+version = "0.10.2"
+description = "Python Library for Tom's Obvious, Minimal Language"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+groups = ["main"]
+files = [
+ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
+ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
+]
+
+[[package]]
+name = "vyper-config"
+version = "1.2.1"
+description = "Python configuration with (more) fangs"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "vyper-config-1.2.1.tar.gz", hash = "sha256:d7e6ecaf363539caab4e3cb85d55a98df00a42547965b2703d1989302d30ec57"},
+ {file = "vyper_config-1.2.1-py3-none-any.whl", hash = "sha256:a9907a5707602e7c289f964498a0d5d6e477e8005a9e3797bdab90771ec421c1"},
+]
+
+[package.dependencies]
+distconfig3 = ">=1.0.1"
+PyYAML = ">=6.0.1"
+toml = ">=0.10.0"
+watchdog = ">=3.0.0"
+
+[[package]]
+name = "watchdog"
+version = "6.0.0"
+description = "Filesystem events monitoring"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"},
+ {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"},
+ {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"},
+ {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"},
+ {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"},
+ {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"},
+ {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"},
+ {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"},
+ {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"},
+ {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"},
+ {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"},
+ {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"},
+ {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"},
+ {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"},
+ {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"},
+ {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"},
+ {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"},
+ {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"},
+ {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"},
+ {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"},
+ {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"},
+ {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"},
+ {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"},
+ {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"},
+ {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"},
+ {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"},
+ {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"},
+ {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"},
+ {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"},
+ {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"},
+]
+
+[package.extras]
+watchmedo = ["PyYAML (>=3.10)"]
+
+[[package]]
+name = "win32-setctime"
+version = "1.2.0"
+description = "A small Python utility to set file creation time on Windows"
+optional = false
+python-versions = ">=3.5"
+groups = ["main"]
+markers = "sys_platform == \"win32\""
+files = [
+ {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"},
+ {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"},
+]
+
+[package.extras]
+dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"]
+
+[[package]]
+name = "zc-lockfile"
+version = "3.0.post1"
+description = "Basic inter-process locks"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "zc.lockfile-3.0.post1-py3-none-any.whl", hash = "sha256:ddb2d71088c061dc8a5edbaa346b637d742ca1e1564be75cb98e7dcae715de19"},
+ {file = "zc.lockfile-3.0.post1.tar.gz", hash = "sha256:adb2ee6d9e6a2333c91178dcb2c9b96a5744c78edb7712dc784a7d75648e81ec"},
+]
+
+[package.dependencies]
+setuptools = "*"
+
+[package.extras]
+test = ["zope.testing"]
+
+[metadata]
+lock-version = "2.1"
+python-versions = ">=3.11,<3.14"
+content-hash = "a106c5dc27a2dbd11f79134950afb7d71c628e836b9a8bf2d78aa2cc6eb0626f"
diff --git a/poetry.toml b/poetry.toml
new file mode 100644
index 0000000..ab1033b
--- /dev/null
+++ b/poetry.toml
@@ -0,0 +1,2 @@
+[virtualenvs]
+in-project = true
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..fee4cbf
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,34 @@
+[project]
+name = "directdnsonly"
+version = "1.0.9"
+description = "DNS Management System - DirectAdmin to multiple backends"
+authors = [
+ {name = "Aaron Guise",email = "aaron@guise.net.nz"}
+]
+license = {text = "MIT"}
+readme = "README.md"
+requires-python = ">=3.11,<3.14"
+dependencies = [
+ "vyper-config (>=1.2.1,<2.0.0)",
+ "loguru (>=0.7.3,<0.8.0)",
+ "persist-queue (>=1.0.0,<2.0.0)",
+ "cherrypy (>=18.10.0,<19.0.0)",
+ "sqlalchemy (<2.0.0)",
+ "pymysql (>=1.1.1,<2.0.0)",
+ "dnspython (>=2.7.0,<3.0.0)",
+ "pyyaml (>=6.0.2,<7.0.0)",
+]
+
+[tool.poetry]
+package-mode = true
+
+[tool.poetry.group.dev.dependencies]
+black = "^25.1.0"
+pyinstaller = "^6.13.0"
+pytest = "^8.3.5"
+pytest-cov = "^6.1.1"
+pytest-mock = "^3.14.0"
+
+[build-system]
+requires = ["poetry-core>=2.0.0,<3.0.0"]
+build-backend = "poetry.core.masonry.api"
diff --git a/schema/coredns_mysql.sql b/schema/coredns_mysql.sql
new file mode 100644
index 0000000..644850e
--- /dev/null
+++ b/schema/coredns_mysql.sql
@@ -0,0 +1,12 @@
+CREATE TABLE IF NOT EXISTS `records` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `zone` varchar(255) NOT NULL,
+ `name` varchar(255) NOT NULL,
+ `ttl` int(11) DEFAULT NULL,
+ `type` varchar(10) NOT NULL,
+ `data` text NOT NULL,
+ PRIMARY KEY (`id`),
+ KEY `idx_zone` (`zone`),
+ KEY `idx_name` (`name`),
+ KEY `idx_type` (`type`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
\ No newline at end of file
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/test_coredns_mysql.py b/tests/test_coredns_mysql.py
new file mode 100644
index 0000000..c0d9b2c
--- /dev/null
+++ b/tests/test_coredns_mysql.py
@@ -0,0 +1,47 @@
+import pytest
+from sqlalchemy import create_engine
+from sqlalchemy.orm import scoped_session, sessionmaker
+
+from directdnsonly.app.backends.coredns_mysql import CoreDNSMySQLBackend, CoreDNSRecord
+from loguru import logger
+
+
+@pytest.fixture
+def mysql_backend(tmp_path):
+ # Setup in-memory SQLite for testing (replace with test MySQL in CI)
+ engine = create_engine("sqlite:///:memory:")
+ CoreDNSRecord.metadata.create_all(engine)
+
+ class TestBackend(CoreDNSMySQLBackend):
+ def __init__(self):
+ super().__init__()
+ self.engine = engine
+ self.Session = scoped_session(sessionmaker(bind=engine))
+
+ yield TestBackend()
+ engine.dispose()
+
+
+def test_zone_operations(mysql_backend):
+ zone_data = """
+example.com. 300 IN SOA ns.example.com. admin.example.com. (2023 3600 1800 604800 86400)
+example.com. 300 IN A 192.0.2.1
+"""
+ # Test zone creation
+ assert mysql_backend.write_zone("example.com", zone_data)
+ assert mysql_backend.zone_exists("example.com")
+
+ # Test record update
+ updated_zone = """
+example.com. 3600 IN A 192.0.2.1
+example.com. 300 IN AAAA 2001:db8::1
+"""
+ assert mysql_backend.write_zone("example.com", updated_zone)
+
+ # Test record removal
+ reduced_zone = "example.com. 300 IN A 192.0.2.1"
+ assert mysql_backend.write_zone("example.com", reduced_zone)
+
+ # Test zone deletion
+ assert mysql_backend.delete_zone("example.com")
+ assert not mysql_backend.zone_exists("example.com")
\ No newline at end of file