Commit 186f3ca4 authored by Elena Grandi's avatar Elena Grandi

flake8

parent 08abb49d
......@@ -14,6 +14,7 @@ class JsonableLog:
It can be used for operations executed by the manager, as well as
for events, or anything else with a method ``.to_json()``.
"""
def __init__(self, pathname):
self.pathname = pathname
os.makedirs(os.path.dirname(self.pathname), exist_ok=True)
......@@ -61,49 +62,47 @@ class Manager:
"""
# sources that require no params
for source in [
sources.DhcpdDataSource,
sources.ChaosDataSource,
]:
sources.DhcpdDataSource,
sources.ChaosDataSource,
]:
if not source.is_viable(self.config):
self.log.warn(
"%s: data source will not be started",
source.__name__
)
"%s: data source will not be started", source.__name__
)
continue
self.log.info("%s: data source started", source.__name__)
self.sources.append(
source(event_hub=self.event_hub, config=self.config)
)
)
# arpwatch data source requires a filename
# we only try to load the arpwatch datasource if the directory
# is readable
if os.path.isdir(self.config.arpwatch_datadir) and \
os.access(self.config.arpwatch_datadir, os.R_OK):
if os.path.isdir(self.config.arpwatch_datadir) and os.access(
self.config.arpwatch_datadir, os.R_OK
):
for fname in os.listdir(self.config.arpwatch_datadir):
if fname.endswith('.dat'):
source = sources.ArpwatchDataSource
dat_fname = os.path.join(
self.config.arpwatch_datadir,
fname
)
self.config.arpwatch_datadir, fname
)
if not source.is_viable(self.config, dat_fname):
self.log.warn(
"ArpwatchDataSource: data source will not be started on %s",
dat_fname
"ArpwatchDataSource: data source will not be started on %s",
dat_fname,
)
continue
self.log.info(
"ArpwatchDataSource: data source started on %s",
dat_fname
)
dat_fname,
)
self.sources.append(
source(
event_hub=self.event_hub,
config=self.config,
datafile=dat_fname,
)
)
)
async def load_user_database(self):
"""
......@@ -113,18 +112,18 @@ class Manager:
from .users import local
from .users import master
from .users import mock
# sources that require no params
for db in [
ldap.LDAP,
local.Local,
master.Master,
mock.Mock,
]:
ldap.LDAP,
local.Local,
master.Master,
mock.Mock,
]:
if not await db.is_viable(self.config):
self.log.warn(
"%s: user database will not be used",
db.__name__
)
"%s: user database will not be used", db.__name__
)
continue
return db(self.config)
raise RuntimeError("Cannot instantiate a valid user database")
......
......@@ -30,6 +30,7 @@ class PlaybookLog:
"""
Logger for results of Playbook runs
"""
def __init__(self, root):
self.root = root
......@@ -65,34 +66,44 @@ class PlaybookLog:
pb_relpath = start.strftime("%Y-%m")
os.makedirs(os.path.join(self.root, pb_relpath), exist_ok=True)
id_relpath = os.path.join("id", hashlib.md5(playbook_id.encode()).hexdigest()[:2])
id_relpath = os.path.join(
"id", hashlib.md5(playbook_id.encode()).hexdigest()[:2]
)
os.makedirs(os.path.join(self.root, id_relpath), exist_ok=True)
pb_name = "{:%d-%H%M%S-%f}-{}.json".format(start, playbook.replace("/", "-"))
pb_name = "{:%d-%H%M%S-%f}-{}.json".format(
start, playbook.replace("/", "-")
)
pb_relpathname = os.path.join(pb_relpath, pb_name)
pb_abspath = os.path.join(self.root, pb_relpathname)
with open(pb_abspath, "wt") as fd:
json.dump({
"playbook": playbook,
"results": results,
}, fd, indent=1)
json.dump(
{"playbook": playbook, "results": results}, fd, indent=1
)
with open(pb_abspath + ".summary", "wt") as fd:
json.dump({
"playbook": playbook,
"start": start.timestamp(),
"end": end.timestamp(),
"id": playbook_id,
"title": first_play["name"],
"has_ok": has_ok,
"has_changed": has_changed,
"has_failures": has_failures,
"has_unreachable": has_unreachable,
}, fd, indent=1)
os.symlink(os.path.join("..", "..", pb_relpathname), os.path.join(self.root, id_relpath, playbook_id))
json.dump(
{
"playbook": playbook,
"start": start.timestamp(),
"end": end.timestamp(),
"id": playbook_id,
"title": first_play["name"],
"has_ok": has_ok,
"has_changed": has_changed,
"has_failures": has_failures,
"has_unreachable": has_unreachable,
},
fd,
indent=1,
)
os.symlink(
os.path.join("..", "..", pb_relpathname),
os.path.join(self.root, id_relpath, playbook_id),
)
def load(self, playbook_id):
"""
......@@ -100,7 +111,9 @@ class PlaybookLog:
"""
id_relpath = hashlib.md5(playbook_id.encode()).hexdigest()[:2]
try:
with open(os.path.join(self.root, "id", id_relpath, playbook_id), "rt") as fd:
with open(
os.path.join(self.root, "id", id_relpath, playbook_id), "rt"
) as fd:
return json.load(fd)
except (OSError, json.JSONDecodeError):
return None
......@@ -126,7 +139,9 @@ class PlaybookLog:
"""
Return a list of all playbook summaries in the given month
"""
month_dir = os.path.join(self.root, "{:04d}-{:02d}".format(year, month))
month_dir = os.path.join(
self.root, "{:04d}-{:02d}".format(year, month)
)
if not os.path.exists(month_dir):
return
for fn in sorted(os.listdir(month_dir)):
......@@ -160,14 +175,20 @@ class Ansible:
try:
playbook = yaml.load(fp)
except Exception as e:
self.log.warning("Cannot load playbook %s: %s", abspath, e)
self.log.warning(
"Cannot load playbook %s: %s", abspath, e
)
playbook = None
try:
if playbook is not None:
info["title"] = playbook[0].get("name")
except Exception as e:
self.log.warning("Cannot extract information from playbook %s: %s", abspath, e)
self.log.warning(
"Cannot extract information from playbook %s: %s",
abspath,
e,
)
playbook = None
res.append(info)
......@@ -200,7 +221,7 @@ class Ansible:
'-a', 'mac=' + m,
'-i', 'localhost,',
'-c', 'local',
'all'
'all',
]
# https://docs.ansible.com/ansible/latest/modules/wakeonlan_module.html
# wakeonlan does not know whether the magic packet has
......@@ -226,6 +247,7 @@ class PlaybookRunner:
"""
Asyncronously run an ansible playbook on an inventory of hosts.
"""
def __init__(self, ansible, playbook_name, playbook):
self.ansible = ansible
self.playbook_name = playbook_name
......@@ -245,10 +267,12 @@ class PlaybookRunner:
proc = await asyncio.create_subprocess_exec(*cmd, **ANSIBLE_BASE_KW)
stdout = []
stderr = []
await asyncio.wait([
store_stream(proc.stdout, stdout),
store_stream(proc.stderr, stderr),
])
await asyncio.wait(
[
store_stream(proc.stdout, stdout),
store_stream(proc.stderr, stderr),
]
)
res = await proc.wait()
if res != 0:
self.ansible.log.error(
......@@ -295,6 +319,7 @@ class FactsCollector:
"""
Asyncronously run ansible data gathering on an inventory of hosts.
"""
def __init__(self, ansible):
self.ansible = ansible
self.stdout = []
......@@ -312,13 +337,19 @@ class FactsCollector:
cmd = self.get_cmdline(hosts)
proc = await asyncio.create_subprocess_exec(*cmd, **ANSIBLE_BASE_KW)
stderr = []
await asyncio.wait([
self.parse_stdout(proc.stdout),
store_stream(proc.stderr, stderr),
])
await asyncio.wait(
[
self.parse_stdout(proc.stdout),
store_stream(proc.stderr, stderr),
]
)
res = await proc.wait()
if res != 0:
self.ansible.log.error("gathering ansible facts on %s failed. stderr: %r", hosts, b"\n".join(stderr))
self.ansible.log.error(
"gathering ansible facts on %s failed. stderr: %r",
hosts,
b"\n".join(stderr),
)
return res == 0
async def parse_stdout(self, stream):
......@@ -344,7 +375,10 @@ class FactsCollector:
return
mo = re_head.match(self.stdout[0].decode())
if not mo:
self.ansible.log.warning("ansible facts output has malformed header line %r", self.stdout[0])
self.ansible.log.warning(
"ansible facts output has malformed header line %r",
self.stdout[0],
)
return
host = mo.group("host")
......@@ -358,17 +392,10 @@ class FactsCollector:
if result != "SUCCESS":
await self.ansible.event_hub.publish(
events.HostFactsFailed(
self.ansible,
name=host,
result=result,
details=data,
self.ansible, name=host, result=result, details=data,
)
)
else:
await self.ansible.event_hub.publish(
events.HostFactsEvent(
self.ansible,
name=host,
facts=data,
)
events.HostFactsEvent(self.ansible, name=host, facts=data,)
)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment