haproxy/plugins/action/load_caddy_config.py
2024-09-13 18:53:23 +02:00

285 lines
13 KiB
Python

# Copyright: (c) 2016, Allen Sanabria <asanabria@linuxdynasty.org>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from os import path, walk
import re
import ansible.constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six import string_types
from ansible.module_utils.common.text.converters import to_native, to_text
from ansible.plugins.action import ActionBase
from ansible.template import Templar
from ansible.utils.vars import combine_vars
from pathlib import Path
class ActionModule(ActionBase):
TRANSFERS_FILES = False
_requires_connection = False
def run(self, tmp=None, task_vars=None):
""" Load yml files recursively from a directory.
"""
del tmp # tmp no longer has any effect
if task_vars is None:
task_vars = dict()
self.task_var = task_vars
self.show_content = True
self.included_files = []
# Validate arguments
dirs = 0
module_args = self._task.args.copy()
if not "dir" in module_args:
raise AnsibleError('\'dir\' option is mendatory in load_haproxy_config')
if not "default_domain" in module_args:
raise AnsibleError('\'default_domain\' option is mendatory in load_haproxy_config')
if not "default_dns_provider" in module_args:
raise AnsibleError('\'default_dns_provider\' option is mendatory in load_haproxy_config')
if not "default_dns_target" in module_args:
raise AnsibleError('\'default_dns_target\' option is mendatory in load_haproxy_config')
self.source_dir = module_args.get('dir')
self.default_domain = module_args.get('default_domain')
self.default_dns_provider = module_args.get('default_dns_provider')
self.default_dns_target = module_args.get('default_dns_target')
self.dir = module_args.get('dir')
self.depth = module_args.get('depth', 0)
results = {
"dns_hostnames": dict(), # { provider: [ { hostname:"", domain:"", state: "", target: "" } ] }
"proxy_config": [],
"uptime_config": [] # [{ name:"", url: "", protected: True, state: "" }]
}
failed = False
self._set_root_dir()
if not path.exists(self.source_dir):
failed = True
err_msg = ('{0} directory does not exist'.format(to_native(self.source_dir)))
elif not path.isdir(self.source_dir):
failed = True
err_msg = ('{0} is not a directory'.format(to_native(self.source_dir)))
else:
for root_dir, filenames in self._traverse_dir_depth():
failed, err_msg, updated_results = (self._load_files_in_dir(root_dir, filenames))
if failed:
break
results['proxy_config'] = results['proxy_config'] + updated_results['proxy_config']
for key, value in updated_results['dns_hostnames'].items():
results['dns_hostnames'][key] = results['dns_hostnames'].get(key, []) + value
results['uptime_config'] = results['uptime_config'] + updated_results['uptime_config']
result = super(ActionModule, self).run(task_vars=task_vars)
if failed:
result['failed'] = failed
result['message'] = err_msg
scope = dict()
scope['haproxy_config'] = results
results = scope
result['ansible_included_var_files'] = self.included_files
result['ansible_facts'] = results
result['_ansible_no_log'] = not self.show_content
return result
def _set_root_dir(self):
if self._task._role:
if self.source_dir.split('/')[0] == 'vars':
path_to_use = (
path.join(self._task._role._role_path, self.source_dir)
)
if path.exists(path_to_use):
self.source_dir = path_to_use
else:
path_to_use = (
path.join(
self._task._role._role_path, 'vars', self.source_dir
)
)
self.source_dir = path_to_use
else:
if hasattr(self._task._ds, '_data_source'):
current_dir = (
"/".join(self._task._ds._data_source.split('/')[:-1])
)
self.source_dir = path.join(current_dir, self.source_dir)
def _log_walk(self, error):
self._display.vvv('Issue with walking through "%s": %s' % (to_native(error.filename), to_native(error)))
def _traverse_dir_depth(self):
""" Recursively iterate over a directory and sort the files in
alphabetical order. Do not iterate pass the set depth.
The default depth is unlimited.
"""
current_depth = 0
sorted_walk = list(walk(self.source_dir, onerror=self._log_walk, followlinks=True))
sorted_walk.sort(key=lambda x: x[0])
for current_root, current_dir, current_files in sorted_walk:
current_depth += 1
if current_depth <= self.depth or self.depth == 0:
current_files.sort()
yield (current_root, current_files)
else:
break
def _load_files(self, filename):
""" Loads a file and converts the output into a valid Python dict.
Args:
filename (str): The source file.
Returns:
Tuple (bool, str, dict)
"""
results = dict()
failed = False
err_msg = ''
b_data, show_content = self._loader._get_file_contents(filename)
data = to_text(b_data, errors='surrogate_or_strict')
self.show_content = show_content
data = self._loader.load(data, file_name=filename, show_content=show_content)
if not data:
data = dict()
if not isinstance(data, dict):
failed = True
err_msg = ('{0} must be stored as a dictionary/hash'.format(to_native(filename)))
else:
# Apply Ansible templating to the data
templar = Templar(loader=self._loader, variables=self.task_var)
data = templar.template(data)
self.included_files.append(filename)
results.update(data)
return failed, err_msg, results
def _load_files_in_dir(self, root_dir, var_files):
""" Load the found yml files and update/overwrite the dictionary.
Args:
root_dir (str): The base directory of the list of files that is being passed.
var_files: (list): List of files to iterate over and load into a dictionary.
Returns:
Tuple (bool, str, dict)
"""
results = {
"dns_hostnames": dict(), # { provider: [ { hostname:"", domain:"", state: "", target: "" } ] }
"proxy_config": [], # {site: "xx.fr", upstream: "", upstream_config: [], protected: true, whitelist_local: true, domain: ""}
"uptime_config": [] # [{ name:"", url: "", protected: True, state: "" }]
}
failed = False
err_msg = ''
for filename in var_files:
stop_iter = False
# Never include main.yml from a role, as that is the default included by the role
if self._task._role:
if path.join(self._task._role._role_path, filename) == path.join(root_dir, 'vars', 'main.yml'):
stop_iter = True
continue
filepath = path.join(root_dir, filename)
if not stop_iter and not failed:
if path.exists(filepath):
loaded_data = {}
failed, err_msg, loaded_data = self._load_files(filepath)
if not failed:
main_hostname = Path(filepath).stem
dns = loaded_data.get("dns", dict())
domain = dns.get("domain", self.default_domain)
dns_provider = dns.get("provider", self.default_dns_provider)
dns_target = dns.get("target", self.default_dns_target)
protected = loaded_data.get("protected", False)
whitelist_local = loaded_data.get("whitelist_local", False)
additionnal_hostname = loaded_data.get('additionnal_hostname', [])
state = loaded_data.get("state", "present")
uptime_raw_config = loaded_data.get("uptime", dict())
if "upstream" not in loaded_data:
failed = True
err_msg = ('Could not find "upstream" in {0}'.format(to_native(filename)))
continue
upstream = loaded_data.get("upstream")
upstream_config = loaded_data.get("upstream_config")
if state == "present":
results['proxy_config'].append({
"site": '{0}.{1}'.format(main_hostname, domain),
"upstream": upstream,
"upstream_config": upstream_config,
"protected": protected,
"whitelist_local": whitelist_local,
"health_uri": "/" + uptime_raw_config.get("endpoint", "")
})
if not dns.get("skip", False):
if not dns_provider in results['dns_hostnames']:
results['dns_hostnames'].update({ dns_provider: []})
results['dns_hostnames'][dns_provider].append({
"hostname": main_hostname,
"domain": domain,
"target": dns_target,
"state": state,
})
if not uptime_raw_config.get('skip', False):
results['uptime_config'].append({
"name": '{0}.{1}'.format(main_hostname, domain),
"url": 'https://{0}.{1}/{2}'.format(main_hostname, domain, uptime_raw_config.get("endpoint", "")),
"protected": protected,
"state": uptime_raw_config.get("state", state)
})
for host in additionnal_hostname:
this_dns = host.get("dns", dns)
this_domain = this_dns.get("domain", domain)
this_dns_provider = this_dns.get("provider", dns_provider)
this_dns_target = this_dns.get("target", dns_target)
this_protected = host.get('protected', protected)
this_whitelist_local = loaded_data.get("whitelist_local", whitelist_local)
this_state = host.get('state', state)
this_uptime_raw_config = host.get("uptime", dict())
full_domain = '{0}.{1}'.format(host.get("hostname"), this_domain) if host.get("hostname") else this_domain
this_upstream_config = host.get("upstream_config")
if this_state == "present":
results['proxy_config'].append({
"site": full_domain,
"upstream": upstream,
"upstream_config": this_upstream_config,
"protected": this_protected,
"whitelist_local": this_whitelist_local,
"health_uri": "/" + this_uptime_raw_config.get("endpoint", "")
})
if not this_dns.get("skip", dns.get("skip", False)):
if not this_dns_provider in results['dns_hostnames']:
results['dns_hostnames'].update({ this_dns_provider: []})
results['dns_hostnames'][this_dns_provider].append({
"hostname": host.get("hostname"),
"domain": this_domain,
"target": this_dns_target,
"state": this_state
})
if not this_uptime_raw_config.get('skip', False):
results['uptime_config'].append({
"name": full_domain,
"url": 'https://{0}/{1}'.format(full_domain, this_uptime_raw_config.get("endpoint", "")),
"protected": this_protected,
"state": this_uptime_raw_config.get("state", this_state)
})
return failed, err_msg, results