You've already forked directdnsonly
version 1.0.9 refresh
This commit is contained in:
2
.dockerignore
Normal file
2
.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
venv/*
|
||||||
|
build/*
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,6 @@
|
|||||||
*.db
|
*.db
|
||||||
venv/
|
venv/
|
||||||
.venv
|
.venv
|
||||||
.idea
|
.idea
|
||||||
|
build
|
||||||
|
!build/.gitkeep
|
||||||
@@ -3,10 +3,10 @@ FROM python:3.7.9 as builder
|
|||||||
ARG VERSION
|
ARG VERSION
|
||||||
ENV LC_ALL=en_NZ.utf8
|
ENV LC_ALL=en_NZ.utf8
|
||||||
ENV LANG=en_NZ.utf8
|
ENV LANG=en_NZ.utf8
|
||||||
ENV APP_NAME="apikeyauthhandler"
|
ENV APP_NAME="directdnsonly"
|
||||||
|
|
||||||
RUN mkdir -p /tmp/build && apt-get update && \
|
RUN mkdir -p /tmp/build && apt-get update && \
|
||||||
apt-get install -y libgcc1-dbg
|
apt-get install -y libgcc1-dbg libssl-dev
|
||||||
|
|
||||||
COPY src/ /tmp/build/
|
COPY src/ /tmp/build/
|
||||||
COPY requirements.txt /tmp/build
|
COPY requirements.txt /tmp/build
|
||||||
@@ -22,8 +22,13 @@ RUN wget https://github.com/NixOS/patchelf/releases/download/0.12/patchelf-0.12.
|
|||||||
|
|
||||||
WORKDIR /tmp/build
|
WORKDIR /tmp/build
|
||||||
RUN pip3 install -r requirements.txt && \
|
RUN pip3 install -r requirements.txt && \
|
||||||
pyinstaller --hidden-import=json \
|
pyinstaller \
|
||||||
|
--hidden-import=json \
|
||||||
|
--hidden-import=pyopenssl \
|
||||||
--hidden-import=jaraco \
|
--hidden-import=jaraco \
|
||||||
|
--hidden-import=cheroot \
|
||||||
|
--hidden-import=cheroot.ssl.pyopenssl \
|
||||||
|
--hidden-import=cheroot.ssl.builtin \
|
||||||
--noconfirm --onefile ${APP_NAME}.py && \
|
--noconfirm --onefile ${APP_NAME}.py && \
|
||||||
cd /tmp/build/dist && \
|
cd /tmp/build/dist && \
|
||||||
staticx ${APP_NAME} ./${APP_NAME}_static
|
staticx ${APP_NAME} ./${APP_NAME}_static
|
||||||
@@ -47,4 +52,4 @@ WORKDIR /app
|
|||||||
|
|
||||||
VOLUME /app/config /data
|
VOLUME /app/config /data
|
||||||
|
|
||||||
CMD ["/app/apikeyauthhandler"]
|
CMD ["/app/directdnsonly"]
|
||||||
|
|||||||
0
build/.gitkeep
Normal file
0
build/.gitkeep
Normal file
@@ -2,7 +2,8 @@ cherrypy==18.6.1
|
|||||||
pyyaml==5.3.1
|
pyyaml==5.3.1
|
||||||
python-json-logger
|
python-json-logger
|
||||||
sqlalchemy==1.3.20
|
sqlalchemy==1.3.20
|
||||||
pyinstaller==4.0
|
pyinstaller==4.5.1
|
||||||
patchelf-wrapper
|
patchelf-wrapper
|
||||||
staticx
|
staticx
|
||||||
|
pyopenssl
|
||||||
|
persistqueue
|
||||||
|
|||||||
@@ -1,89 +1,100 @@
|
|||||||
import mmap
|
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
from cherrypy import request
|
from cherrypy import request
|
||||||
from cherrypy._cpnative_server import CPHTTPServer
|
|
||||||
from pythonjsonlogger import jsonlogger
|
from pythonjsonlogger import jsonlogger
|
||||||
|
from persistqueue import Queue, Empty
|
||||||
import logging
|
import logging
|
||||||
|
from logging.handlers import TimedRotatingFileHandler
|
||||||
import os
|
import os
|
||||||
|
import subprocess
|
||||||
import time
|
import time
|
||||||
import sys
|
import sys
|
||||||
import yaml
|
import yaml
|
||||||
import datetime
|
import threading
|
||||||
import lib.common
|
import lib.common
|
||||||
import lib.db
|
import lib.db
|
||||||
import lib.db.models
|
import lib.db.models
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
class DaDNS(object):
|
class DaDNS(object):
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def CMD_API_LOGIN_TEST(self, **params):
|
def CMD_API_LOGIN_TEST(self):
|
||||||
return 'error=0&text=Login OK&details=none'
|
return urllib.parse.urlencode({'error': 0,
|
||||||
|
'text': 'Login OK'})
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def CMD_API_DNS_ADMIN(self, **params):
|
def CMD_API_DNS_ADMIN(self):
|
||||||
applog.debug('Processing Method: ' + request.method)
|
applog.debug('Processing Method: '.format(request.method))
|
||||||
|
|
||||||
if request.method == 'POST':
|
if request.method == 'POST':
|
||||||
action = request.params.get('action')
|
action = request.params.get('action')
|
||||||
|
applog.debug('Action received via querystring: {}'.format(action))
|
||||||
|
body = str(request.body.read(), 'utf-8')
|
||||||
decoded_params = None
|
decoded_params = None
|
||||||
if action is None:
|
if action is None:
|
||||||
decoded_params = decode_params(str(request.body.read(), 'utf-8'))
|
applog.debug('Action was not specified, check body')
|
||||||
|
decoded_params = decode_params(str(body))
|
||||||
|
applog.debug('Parameters decoded: {}'.format(decoded_params))
|
||||||
action = decoded_params['action']
|
action = decoded_params['action']
|
||||||
zone_file = str(request.body.read(), 'utf-8')
|
zone_file = body
|
||||||
applog.debug(zone_file)
|
applog.debug(zone_file)
|
||||||
if action == 'delete':
|
if action == 'delete':
|
||||||
|
# TODO: Support multiple domain deletion
|
||||||
# Domain is being removed from the DNS
|
# Domain is being removed from the DNS
|
||||||
hostname = decoded_params['hostname']
|
queue_item('delete', {'hostname': decoded_params['hostname'],
|
||||||
domain = decoded_params['select0']
|
'domain': decoded_params['select0']})
|
||||||
record = session.query(lib.db.models.Domain).filter_by(domain=domain).one()
|
return urllib.parse.urlencode({'error': 0})
|
||||||
if record.hostname == hostname:
|
|
||||||
applog.debug('Hostname matches the original host {}: Delete is allowed'.format('hostname'))
|
|
||||||
session.delete(record)
|
|
||||||
applog.info('{} deleted from database')
|
|
||||||
write_named_include()
|
|
||||||
if action == 'rawsave':
|
if action == 'rawsave':
|
||||||
# DirectAdmin wants to add/update a domain
|
# DirectAdmin wants to add/update a domain
|
||||||
hostname = request.params.get('hostname')
|
queue_item('save', {'hostname': request.params.get('hostname'),
|
||||||
username = request.params.get('username')
|
'username': request.params.get('username'),
|
||||||
domain = request.params.get('domain')
|
'domain': request.params.get('domain'),
|
||||||
applog.debug('Domain name to check: ' + domain)
|
'zone_file': zone_file})
|
||||||
applog.debug('Does zone exist? ' + str(check_zone_exists(str(domain))))
|
applog.info('Enqueued {} request for {}'.format('save', request.params.get('domain')))
|
||||||
if not check_zone_exists(str(domain)):
|
return urllib.parse.urlencode({'error': 0})
|
||||||
applog.debug('Zone is not present in db')
|
|
||||||
put_zone_index(str(domain), str(hostname), str(username))
|
|
||||||
write_zone_file(str(domain), zone_file)
|
|
||||||
else:
|
|
||||||
# Domain already exists
|
|
||||||
applog.debug('Zone is present in db')
|
|
||||||
write_zone_file(str(domain), zone_file)
|
|
||||||
elif request.method == 'GET':
|
elif request.method == 'GET':
|
||||||
applog.debug('Action Type: ' + request.params.get('action'))
|
applog.debug('Action Type: ' + request.params.get('action'))
|
||||||
action = request.params.get('action')
|
action = request.params.get('action')
|
||||||
if action == 'exists':
|
check_parent = bool(request.params.get('check_for_parent_domain'))
|
||||||
|
if action == 'exists' and check_parent:
|
||||||
|
domain_result = check_zone_exists(request.params.get('domain'))
|
||||||
|
applog.debug('Domain result: {}'.format(domain_result))
|
||||||
|
parent_result = check_parent_domain_owner(request.params.get('domain'))
|
||||||
|
applog.debug('Domain result: {}'.format(domain_result))
|
||||||
|
if not domain_result and not parent_result:
|
||||||
|
return urllib.parse.urlencode({'error': 0,
|
||||||
|
'exists': 0})
|
||||||
|
elif domain_result:
|
||||||
|
domain_record = session.query(lib.db.models.Domain).filter_by(
|
||||||
|
domain=request.params.get('domain')).one()
|
||||||
|
return urllib.parse.urlencode({'error': 0,
|
||||||
|
'exists': 1,
|
||||||
|
'details': 'Domain exists on {}'
|
||||||
|
.format(domain_record.hostname)
|
||||||
|
})
|
||||||
|
elif parent_result:
|
||||||
|
parent_domain = ".".join(request.params.get('domain').split('.')[1:])
|
||||||
|
domain_record = session.query(lib.db.models.Domain).filter_by(
|
||||||
|
domain=parent_domain).one()
|
||||||
|
return urllib.parse.urlencode({'error': 0,
|
||||||
|
'exists': 2,
|
||||||
|
'details': 'Parent Domain exists on {}'
|
||||||
|
.format(domain_record.hostname)
|
||||||
|
})
|
||||||
|
|
||||||
|
elif action == 'exists':
|
||||||
# DirectAdmin is checking whether the domain is in the cluster
|
# DirectAdmin is checking whether the domain is in the cluster
|
||||||
if check_zone_exists(request.params.get('domain')):
|
if check_zone_exists(request.params.get('domain')):
|
||||||
return 'result: exists=1'
|
domain_record = session.query(lib.db.models.Domain).filter_by(
|
||||||
|
domain=request.params.get('domain')).one()
|
||||||
|
return urllib.parse.urlencode({'error': 0,
|
||||||
|
'exists': 1,
|
||||||
|
'details': 'Domain exists on {}'
|
||||||
|
.format(domain_record.hostname)
|
||||||
|
})
|
||||||
else:
|
else:
|
||||||
return 'result: exists=0'
|
return urllib.parse.urlencode({'exists': 0})
|
||||||
|
|
||||||
|
|
||||||
def create_zone_index():
|
|
||||||
# Create an index of all zones present from zone definitions
|
|
||||||
regex = r"(?<=\")(?P<domain>.*)(?=\"\s)"
|
|
||||||
|
|
||||||
with open(zone_index_file, 'w+') as f:
|
|
||||||
with open(named_conf, 'r') as named_file:
|
|
||||||
while True:
|
|
||||||
# read line
|
|
||||||
line = named_file.readline()
|
|
||||||
if not line:
|
|
||||||
# Reached end of file
|
|
||||||
break
|
|
||||||
print(line)
|
|
||||||
hosted_domain = re.search(regex, line).group(0)
|
|
||||||
f.write(hosted_domain + "\n")
|
|
||||||
|
|
||||||
|
|
||||||
def put_zone_index(zone_name, host_name, user_name):
|
def put_zone_index(zone_name, host_name, user_name):
|
||||||
@@ -94,6 +105,21 @@ def put_zone_index(zone_name, host_name, user_name):
|
|||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def queue_item(action, data=None):
|
||||||
|
data = {'payload': data}
|
||||||
|
if action == 'save':
|
||||||
|
save_queue.put(data)
|
||||||
|
elif action == 'delete':
|
||||||
|
delete_queue.put(data)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_zone_file(zone_name):
|
||||||
|
# Delete the zone file
|
||||||
|
applog.debug('Zone Name for delete: ' + zone_name)
|
||||||
|
os.remove(zones_dir + '/' + zone_name + '.db')
|
||||||
|
applog.debug('Zone deleted: {}'.format(zones_dir + '/' + zone_name + '.db'))
|
||||||
|
|
||||||
|
|
||||||
def write_zone_file(zone_name, data):
|
def write_zone_file(zone_name, data):
|
||||||
# Write the zone to file
|
# Write the zone to file
|
||||||
applog.debug('Zone Name for write: ' + zone_name)
|
applog.debug('Zone Name for write: ' + zone_name)
|
||||||
@@ -109,13 +135,13 @@ def write_named_include():
|
|||||||
with open(named_conf, 'w') as f:
|
with open(named_conf, 'w') as f:
|
||||||
for domain in domains:
|
for domain in domains:
|
||||||
applog.debug('Writing zone {} to named.config'.format(domain.domain))
|
applog.debug('Writing zone {} to named.config'.format(domain.domain))
|
||||||
f.write('zone "{}" { type master; file "/etc/pdns/zones/{}.db"; };'
|
f.write('zone "' + domain.domain
|
||||||
.format(domain.domain,
|
+ '" { type master; file "' + zones_dir + '/'
|
||||||
domain.domain))
|
+ domain.domain + '.db"; };\n')
|
||||||
|
|
||||||
|
|
||||||
def check_parent_domain_owner(zone_name, owner):
|
def check_parent_domain_owner(zone_name):
|
||||||
applog.debug('Checking if {} is owner of parent in the DB'.format(zone_name))
|
applog.debug('Checking if {} exists in the DB'.format(zone_name))
|
||||||
# check try to find domain name
|
# check try to find domain name
|
||||||
parent_domain = ".".join(zone_name.split('.')[1:])
|
parent_domain = ".".join(zone_name.split('.')[1:])
|
||||||
domain_exists = session.query(session.query(lib.db.models.Domain).filter_by(domain=parent_domain).exists()).scalar()
|
domain_exists = session.query(session.query(lib.db.models.Domain).filter_by(domain=parent_domain).exists()).scalar()
|
||||||
@@ -124,16 +150,60 @@ def check_parent_domain_owner(zone_name, owner):
|
|||||||
applog.debug('{} exists in db'.format(parent_domain))
|
applog.debug('{} exists in db'.format(parent_domain))
|
||||||
domain_record = session.query(lib.db.models.Domain).filter_by(domain=parent_domain).one()
|
domain_record = session.query(lib.db.models.Domain).filter_by(domain=parent_domain).one()
|
||||||
applog.debug(str(domain_record))
|
applog.debug(str(domain_record))
|
||||||
if domain_record.username == owner:
|
return True
|
||||||
return True
|
else:
|
||||||
else:
|
return False
|
||||||
return False
|
|
||||||
|
|
||||||
|
def reconfigure_nameserver():
|
||||||
|
env = dict(os.environ) # make a copy of the environment
|
||||||
|
lp_key = 'LD_LIBRARY_PATH' # for Linux and *BSD
|
||||||
|
lp_orig = env.get(lp_key + '_ORIG') # pyinstaller >= 20160820
|
||||||
|
if lp_orig is not None:
|
||||||
|
env[lp_key] = lp_orig # restore the original
|
||||||
|
else:
|
||||||
|
env.pop(lp_key, None) # last resort: remove the env var
|
||||||
|
|
||||||
|
reconfigure = subprocess.run(['rndc', 'reconfig'],
|
||||||
|
capture_output=True,
|
||||||
|
universal_newlines=True,
|
||||||
|
env=env)
|
||||||
|
applog.debug("Stdout: {}".format(reconfigure.stdout))
|
||||||
|
applog.info('Reloaded bind')
|
||||||
|
|
||||||
|
|
||||||
|
def reload_nameserver(zone=None):
|
||||||
|
# Workaround for LD_LIBRARY_PATH/ LIBPATH issues
|
||||||
|
#
|
||||||
|
env = dict(os.environ) # make a copy of the environment
|
||||||
|
lp_key = 'LD_LIBRARY_PATH' # for Linux and *BSD
|
||||||
|
lp_orig = env.get(lp_key + '_ORIG') # pyinstaller >= 20160820
|
||||||
|
if lp_orig is not None:
|
||||||
|
env[lp_key] = lp_orig # restore the original
|
||||||
|
else:
|
||||||
|
env.pop(lp_key, None) # last resort: remove the env var
|
||||||
|
|
||||||
|
if zone is not None:
|
||||||
|
reload = subprocess.run(['rndc', 'reload', zone],
|
||||||
|
capture_output=True,
|
||||||
|
universal_newlines=True,
|
||||||
|
env=env)
|
||||||
|
applog.debug("Stdout: {}".format(reload.stdout))
|
||||||
|
applog.info('Reloaded bind for {}'.format(zone))
|
||||||
|
else:
|
||||||
|
reload = subprocess.run(['rndc', 'reload'],
|
||||||
|
capture_output=True,
|
||||||
|
universal_newlines=True,
|
||||||
|
env=env)
|
||||||
|
applog.debug("Stdout: {}".format(reload.stdout))
|
||||||
|
applog.info('Reloaded bind')
|
||||||
|
|
||||||
|
|
||||||
def check_zone_exists(zone_name):
|
def check_zone_exists(zone_name):
|
||||||
# Check if zone is present in the index
|
# Check if zone is present in the index
|
||||||
applog.debug('Checking if {} is present in the DB'.format(zone_name))
|
applog.debug('Checking if {} is present in the DB'.format(zone_name))
|
||||||
domain_exists = session.query(session.query(lib.db.models.Domain).filter_by(domain=zone_name).exists()).scalar()
|
domain_exists = bool(session.query(lib.db.models.Domain.id).filter_by(domain=zone_name).first())
|
||||||
|
applog.debug('Returned from query: {}'.format(domain_exists))
|
||||||
if domain_exists:
|
if domain_exists:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
@@ -149,18 +219,64 @@ def decode_params(payload):
|
|||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
@cherrypy.expose
|
def background_thread(worker_type):
|
||||||
@cherrypy.tools.json_out()
|
if worker_type == 'save':
|
||||||
def health(self):
|
applog.debug('Started worker thread for save action')
|
||||||
# Defaults to 200
|
while True:
|
||||||
return {"Message": "OK!"}
|
try:
|
||||||
|
item = save_queue.get(block=True, timeout=10)
|
||||||
|
data = item['payload']
|
||||||
|
applog.info('Processing save from queue for {}'.format(data['domain']))
|
||||||
|
applog.debug('Domain name to check: ' + data['domain'])
|
||||||
|
applog.debug('Does zone exist? ' + str(check_zone_exists(str(data['domain']))))
|
||||||
|
if not check_zone_exists(str(data['domain'])):
|
||||||
|
applog.debug('Zone is not present in db')
|
||||||
|
put_zone_index(str(data['domain']), str(data['hostname']), str(data['username']))
|
||||||
|
write_zone_file(str(data['domain']), data['zone_file'])
|
||||||
|
write_named_include()
|
||||||
|
reconfigure_nameserver()
|
||||||
|
reload_nameserver(str(data['domain']))
|
||||||
|
else:
|
||||||
|
# Domain already exists
|
||||||
|
applog.debug('Zone is present in db')
|
||||||
|
write_zone_file(str(data['domain']), data['zone_file'])
|
||||||
|
write_named_include()
|
||||||
|
reload_nameserver(str(data['domain']))
|
||||||
|
save_queue.task_done()
|
||||||
|
|
||||||
|
except Empty:
|
||||||
|
# Queue is empty
|
||||||
|
applog.debug('Save queue is empty')
|
||||||
|
elif worker_type == 'delete':
|
||||||
|
applog.debug('Started worker thread for delete action')
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
item = delete_queue.get(block=True, timeout=10)
|
||||||
|
data = item['payload']
|
||||||
|
applog.info('Processing deletion from queue for {}'.format(data['domain']))
|
||||||
|
record = session.query(lib.db.models.Domain).filter_by(domain=data['domain']).one()
|
||||||
|
if record.hostname == data['hostname']:
|
||||||
|
applog.debug('Hostname matches the original host {}: Delete is allowed'.format(data['domain']))
|
||||||
|
session.delete(record)
|
||||||
|
session.commit()
|
||||||
|
applog.info('{} deleted from database'.format(data['domain']))
|
||||||
|
delete_zone_file(data['domain'])
|
||||||
|
write_named_include()
|
||||||
|
reload_nameserver()
|
||||||
|
delete_queue.task_done()
|
||||||
|
time.sleep(5)
|
||||||
|
except Empty:
|
||||||
|
# Queue is empty
|
||||||
|
applog.debug('Delete queue is empty')
|
||||||
|
except Exception as e:
|
||||||
|
applog.error(e)
|
||||||
|
|
||||||
|
|
||||||
def setup_logging():
|
def setup_logging():
|
||||||
os.environ['TZ'] = config['timezone']
|
os.environ['TZ'] = config['timezone']
|
||||||
time.tzset()
|
time.tzset()
|
||||||
applog = logging.getLogger()
|
_applog = logging.getLogger()
|
||||||
applog.setLevel(level=getattr(logging, config['log_level'].upper()))
|
_applog.setLevel(level=getattr(logging, config['log_level'].upper()))
|
||||||
if config['log_to'] == 'stdout':
|
if config['log_to'] == 'stdout':
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
handler.setLevel(level=getattr(logging, config['log_level'].upper()))
|
handler.setLevel(level=getattr(logging, config['log_level'].upper()))
|
||||||
@@ -168,41 +284,53 @@ def setup_logging():
|
|||||||
fmt='%(asctime)s %(levelname)s %(message)s'
|
fmt='%(asctime)s %(levelname)s %(message)s'
|
||||||
)
|
)
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
applog.addHandler(handler)
|
_applog.addHandler(handler)
|
||||||
elif config['log_to'] == 'file':
|
elif config['log_to'] == 'file':
|
||||||
handler = logging.FileHandler('./config/directdns.log')
|
handler = TimedRotatingFileHandler(config['log_path'],
|
||||||
|
when='midnight',
|
||||||
|
backupCount=10)
|
||||||
handler.setLevel(level=getattr(logging, config['log_level'].upper()))
|
handler.setLevel(level=getattr(logging, config['log_level'].upper()))
|
||||||
formatter = jsonlogger.JsonFormatter(
|
formatter = jsonlogger.JsonFormatter(
|
||||||
fmt='%(asctime)s %(levelname)s %(message)s'
|
fmt='%(asctime)s %(levelname)s %(message)s'
|
||||||
)
|
)
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
applog.addHandler(handler)
|
_applog.addHandler(handler)
|
||||||
return applog
|
return _applog
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
app_version = "1.0.0"
|
app_version = "1.0.9"
|
||||||
if os.path.isfile("/lib/x86_64-linux-gnu/" + "libgcc_s.so.1"):
|
if os.path.isfile("/lib/x86_64-linux-gnu/" + "libgcc_s.so.1"):
|
||||||
# Load local library
|
# Load local library
|
||||||
libgcc_s = ctypes.cdll.LoadLibrary("/lib/x86_64-linux-gnu/" + "libgcc_s.so.1")
|
libgcc_s = ctypes.cdll.LoadLibrary("/lib/x86_64-linux-gnu/" + "libgcc_s.so.1")
|
||||||
# We are about to start our application
|
# We are about to start our application
|
||||||
with open(r'config/app.yml') as config_file:
|
with open(r'conf/app.yml') as config_file:
|
||||||
config = yaml.load(config_file, Loader=yaml.SafeLoader)
|
config = yaml.load(config_file, Loader=yaml.SafeLoader)
|
||||||
applog = setup_logging()
|
applog = setup_logging()
|
||||||
applog.info('DirectDNS Starting')
|
applog.info('DirectDNS Starting')
|
||||||
applog.info('Timezone is {}'.format(config['timezone']))
|
applog.info('Timezone is {}'.format(config['timezone']))
|
||||||
applog.info('Get Database Connection')
|
applog.info('Get Database Connection')
|
||||||
session = lib.db.connect()
|
session = lib.db.connect(config['db_location'])
|
||||||
applog.info('Database Connected!')
|
applog.info('Database Connected!')
|
||||||
|
|
||||||
zones_dir = "/etc/pdns/zones"
|
zones_dir = "/etc/named/directdnsonly"
|
||||||
named_conf = "/etc/pdns/named.conf"
|
named_conf = "/etc/named/directdnsonly.inc"
|
||||||
|
|
||||||
|
save_queue = Queue(config['queue_location'] + '/rawsave')
|
||||||
|
save_thread = threading.Thread(target=background_thread, args=('save',))
|
||||||
|
save_thread.daemon = True # Daemonize thread
|
||||||
|
save_thread.start() # Start the execution
|
||||||
|
delete_queue = Queue(config['queue_location'] + '/delete')
|
||||||
|
delete_thread = threading.Thread(target=background_thread, args=('delete',))
|
||||||
|
delete_thread.daemon = True # Daemonize thread
|
||||||
|
delete_thread.start() # Start the execution
|
||||||
|
|
||||||
cherrypy.__version__ = ''
|
cherrypy.__version__ = ''
|
||||||
cherrypy._cperror._HTTPErrorTemplate = cherrypy._cperror._HTTPErrorTemplate.replace(
|
cherrypy._cperror._HTTPErrorTemplate = cherrypy._cperror._HTTPErrorTemplate.replace(
|
||||||
'Powered by <a href="http://www.cherrypy.org">CherryPy %(version)s</a>\n', '%(version)s')
|
'Powered by <a href="http://www.cherrypy.org">CherryPy %(version)s</a>\n', '%(version)s')
|
||||||
userpassdict = {'test': 'test'}
|
|
||||||
checkpassword = cherrypy.lib.auth_basic.checkpassword_dict(userpassdict)
|
user_password_dict = {'test': 'test'}
|
||||||
|
check_password = cherrypy.lib.auth_basic.checkpassword_dict(user_password_dict)
|
||||||
|
|
||||||
cherrypy.config.update({
|
cherrypy.config.update({
|
||||||
'server.socket_host': '0.0.0.0',
|
'server.socket_host': '0.0.0.0',
|
||||||
@@ -211,13 +339,23 @@ if __name__ == '__main__':
|
|||||||
'tools.proxy.base': config['proxy_support_base'],
|
'tools.proxy.base': config['proxy_support_base'],
|
||||||
'tools.auth_basic.on': True,
|
'tools.auth_basic.on': True,
|
||||||
'tools.auth_basic.realm': 'dadns',
|
'tools.auth_basic.realm': 'dadns',
|
||||||
'tools.auth_basic.checkpassword': checkpassword,
|
'tools.auth_basic.checkpassword': check_password,
|
||||||
'tools.response_headers.on': True,
|
'tools.response_headers.on': True,
|
||||||
'tools.response_headers.headers': [('Server', 'DirectDNS v' + app_version)],
|
'tools.response_headers.headers': [('Server', 'DirectDNS v' + app_version)],
|
||||||
'environment': config['environment']
|
'environment': config['environment']
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if bool(config['ssl_enable']):
|
||||||
|
cherrypy.config.update({
|
||||||
|
'server.ssl_module': 'builtin',
|
||||||
|
'server.ssl_certificate': config['ssl_cert'],
|
||||||
|
'server.ssl_private_key': config['ssl_key'],
|
||||||
|
'server.ssl_certificate_chain': config['ssl_bundle']
|
||||||
|
})
|
||||||
|
|
||||||
# cherrypy.log.error_log.propagate = False
|
# cherrypy.log.error_log.propagate = False
|
||||||
# cherrypy.log.access_log.propagate = False
|
if config['log_level'].upper() != 'DEBUG':
|
||||||
|
cherrypy.log.access_log.propagate = False
|
||||||
|
|
||||||
if not lib.common.check_if_super_user_exists(session):
|
if not lib.common.check_if_super_user_exists(session):
|
||||||
password_str = lib.common.get_random_string(35)
|
password_str = lib.common.get_random_string(35)
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ import datetime
|
|||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
def connect():
|
def connect(db_location):
|
||||||
# Start SQLite engine
|
# Start SQLite engine
|
||||||
engine = create_engine('sqlite:///./config/keys.db', connect_args={'check_same_thread': False})
|
engine = create_engine('sqlite:///' + db_location, connect_args={'check_same_thread': False})
|
||||||
Base.metadata.create_all(engine)
|
Base.metadata.create_all(engine)
|
||||||
Session = sessionmaker(bind=engine)
|
Session = sessionmaker(bind=engine)
|
||||||
session = Session()
|
session = Session()
|
||||||
|
|||||||
Reference in New Issue
Block a user