Initial commit
This commit is contained in:
commit
5398ad123c
14 changed files with 2147 additions and 0 deletions
1
enroll/__init__.py
Normal file
1
enroll/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
__all__ = []
|
||||
4
enroll/__main__.py
Normal file
4
enroll/__main__.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
from .cli import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
145
enroll/accounts.py
Normal file
145
enroll/accounts.py
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserRecord:
|
||||
name: str
|
||||
uid: int
|
||||
gid: int
|
||||
gecos: str
|
||||
home: str
|
||||
shell: str
|
||||
primary_group: str
|
||||
supplementary_groups: List[str]
|
||||
ssh_files: List[str]
|
||||
|
||||
|
||||
def parse_login_defs(path: str = "/etc/login.defs") -> Dict[str, int]:
|
||||
vals: Dict[str, int] = {}
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8", errors="replace") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = line.split()
|
||||
if len(parts) >= 2 and parts[0] in {"UID_MIN", "UID_MAX", "SYS_UID_MIN", "SYS_UID_MAX"}:
|
||||
try:
|
||||
vals[parts[0]] = int(parts[1])
|
||||
except ValueError:
|
||||
continue
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return vals
|
||||
|
||||
|
||||
def parse_passwd(path: str = "/etc/passwd") -> List[Tuple[str, int, int, str, str, str]]:
|
||||
rows: List[Tuple[str, int, int, str, str, str]] = []
|
||||
with open(path, "r", encoding="utf-8", errors="replace") as f:
|
||||
for line in f:
|
||||
line = line.rstrip("\n")
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = line.split(":")
|
||||
if len(parts) < 7:
|
||||
continue
|
||||
name = parts[0]
|
||||
try:
|
||||
uid = int(parts[2])
|
||||
gid = int(parts[3])
|
||||
except ValueError:
|
||||
continue
|
||||
gecos = parts[4]
|
||||
home = parts[5]
|
||||
shell = parts[6]
|
||||
rows.append((name, uid, gid, gecos, home, shell))
|
||||
return rows
|
||||
|
||||
|
||||
def parse_group(path: str = "/etc/group") -> Tuple[Dict[int, str], Dict[str, int], Dict[str, Set[str]]]:
|
||||
gid_to_name: Dict[int, str] = {}
|
||||
name_to_gid: Dict[str, int] = {}
|
||||
members: Dict[str, Set[str]] = {}
|
||||
with open(path, "r", encoding="utf-8", errors="replace") as f:
|
||||
for line in f:
|
||||
line = line.rstrip("\n")
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = line.split(":")
|
||||
if len(parts) < 4:
|
||||
continue
|
||||
name = parts[0]
|
||||
try:
|
||||
gid = int(parts[2])
|
||||
except ValueError:
|
||||
continue
|
||||
mem = set([m for m in parts[3].split(",") if m])
|
||||
gid_to_name[gid] = name
|
||||
name_to_gid[name] = gid
|
||||
members[name] = mem
|
||||
return gid_to_name, name_to_gid, members
|
||||
|
||||
|
||||
def is_human_user(uid: int, shell: str, uid_min: int) -> bool:
|
||||
if uid < uid_min:
|
||||
return False
|
||||
shell = (shell or "").strip()
|
||||
if shell in {"/usr/sbin/nologin", "/usr/bin/nologin", "/bin/false"}:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def find_user_ssh_files(home: str) -> List[str]:
|
||||
sshdir = os.path.join(home, ".ssh")
|
||||
out: List[str] = []
|
||||
if not os.path.isdir(sshdir):
|
||||
return out
|
||||
|
||||
ak = os.path.join(sshdir, "authorized_keys")
|
||||
if os.path.isfile(ak) and not os.path.islink(ak):
|
||||
out.append(ak)
|
||||
|
||||
return sorted(set(out))
|
||||
|
||||
|
||||
def collect_non_system_users() -> List[UserRecord]:
|
||||
defs = parse_login_defs()
|
||||
uid_min = defs.get("UID_MIN", 1000)
|
||||
|
||||
passwd_rows = parse_passwd()
|
||||
gid_to_name, _, group_members = parse_group()
|
||||
|
||||
users: List[UserRecord] = []
|
||||
for name, uid, gid, gecos, home, shell in passwd_rows:
|
||||
if name in {"root", "nobody"}:
|
||||
continue
|
||||
if not is_human_user(uid, shell, uid_min):
|
||||
continue
|
||||
|
||||
primary_group = gid_to_name.get(gid, str(gid))
|
||||
|
||||
supp: List[str] = []
|
||||
for gname, mem in group_members.items():
|
||||
if name in mem and gname != primary_group:
|
||||
supp.append(gname)
|
||||
supp = sorted(set(supp))
|
||||
|
||||
ssh_files = find_user_ssh_files(home) if home and home.startswith("/") else []
|
||||
|
||||
users.append(UserRecord(
|
||||
name=name,
|
||||
uid=uid,
|
||||
gid=gid,
|
||||
gecos=gecos,
|
||||
home=home,
|
||||
shell=shell,
|
||||
primary_group=primary_group,
|
||||
supplementary_groups=supp,
|
||||
ssh_files=ssh_files,
|
||||
))
|
||||
|
||||
return users
|
||||
32
enroll/cli.py
Normal file
32
enroll/cli.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from .harvest import harvest
|
||||
from .manifest import manifest
|
||||
|
||||
|
||||
def main() -> None:
|
||||
ap = argparse.ArgumentParser(prog="enroll")
|
||||
sub = ap.add_subparsers(dest="cmd", required=True)
|
||||
|
||||
h = sub.add_parser("harvest", help="Harvest service/package/config state into a bundle")
|
||||
h.add_argument("--out", required=True, help="Bundle output directory")
|
||||
|
||||
r = sub.add_parser("manifest", help="Render Ansible roles from a harvested bundle")
|
||||
r.add_argument("--bundle", required=True, help="Path to the bundle directory created by the harvest command")
|
||||
r.add_argument("--out", required=True, help="Output directory for generated roles/playbook Ansible manifest")
|
||||
|
||||
e = sub.add_parser("export", help="Harvest then manifest in one shot")
|
||||
e.add_argument("--bundle", required=True, help="Path to the directory to place the bundle in")
|
||||
e.add_argument("--out", required=True, help="Output directory for generated roles/playbook Ansible manifest")
|
||||
|
||||
args = ap.parse_args()
|
||||
|
||||
if args.cmd == "harvest":
|
||||
path = harvest(args.out)
|
||||
print(path)
|
||||
elif args.cmd == "manifest":
|
||||
manifest(args.bundle, args.out)
|
||||
elif args.cmd == "export":
|
||||
harvest(args.bundle)
|
||||
manifest(args.bundle, args.out)
|
||||
175
enroll/debian.py
Normal file
175
enroll/debian.py
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import glob
|
||||
import hashlib
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
|
||||
def _run(cmd: list[str]) -> str:
|
||||
p = subprocess.run(cmd, check=False, text=True, capture_output=True)
|
||||
if p.returncode != 0:
|
||||
raise RuntimeError(f"Command failed: {cmd}\n{p.stderr}")
|
||||
return p.stdout
|
||||
|
||||
|
||||
def dpkg_owner(path: str) -> Optional[str]:
|
||||
p = subprocess.run(["dpkg", "-S", path], text=True, capture_output=True)
|
||||
if p.returncode != 0:
|
||||
return None
|
||||
left = p.stdout.split(":", 1)[0].strip()
|
||||
pkg = left.split(":", 1)[0].strip()
|
||||
return pkg or None
|
||||
|
||||
|
||||
|
||||
def list_manual_packages() -> List[str]:
|
||||
"""Return packages marked as manually installed (apt-mark showmanual)."""
|
||||
p = subprocess.run(["apt-mark", "showmanual"], text=True, capture_output=True)
|
||||
if p.returncode != 0:
|
||||
return []
|
||||
pkgs: List[str] = []
|
||||
for line in (p.stdout or "").splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
pkgs.append(line)
|
||||
return sorted(set(pkgs))
|
||||
|
||||
def build_dpkg_etc_index(
|
||||
info_dir: str = "/var/lib/dpkg/info",
|
||||
) -> Tuple[Set[str], Dict[str, str], Dict[str, Set[str]], Dict[str, List[str]]]:
|
||||
"""
|
||||
Returns:
|
||||
owned_etc_paths: set of /etc paths owned by dpkg
|
||||
etc_owner_map: /etc/path -> pkg
|
||||
topdir_to_pkgs: "nginx" -> {"nginx-common", ...} based on /etc/<topdir>/...
|
||||
pkg_to_etc_paths: pkg -> list of /etc paths it installs
|
||||
"""
|
||||
owned: Set[str] = set()
|
||||
owner: Dict[str, str] = {}
|
||||
topdir_to_pkgs: Dict[str, Set[str]] = {}
|
||||
pkg_to_etc: Dict[str, List[str]] = {}
|
||||
|
||||
for list_path in glob.glob(os.path.join(info_dir, "*.list")):
|
||||
pkg_raw = os.path.basename(list_path)[:-5] # strip ".list"
|
||||
pkg = pkg_raw.split(":", 1)[0] # drop arch suffix if present
|
||||
|
||||
etc_paths: List[str] = []
|
||||
try:
|
||||
with open(list_path, "r", encoding="utf-8", errors="replace") as f:
|
||||
for line in f:
|
||||
p = line.rstrip("\n")
|
||||
if not p.startswith("/etc/"):
|
||||
continue
|
||||
owned.add(p)
|
||||
owner.setdefault(p, pkg)
|
||||
etc_paths.append(p)
|
||||
|
||||
parts = p.split("/", 3)
|
||||
if len(parts) >= 3 and parts[2]:
|
||||
top = parts[2]
|
||||
topdir_to_pkgs.setdefault(top, set()).add(pkg)
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
|
||||
if etc_paths:
|
||||
pkg_to_etc.setdefault(pkg, []).extend(etc_paths)
|
||||
|
||||
for k, v in list(pkg_to_etc.items()):
|
||||
pkg_to_etc[k] = sorted(set(v))
|
||||
|
||||
return owned, owner, topdir_to_pkgs, pkg_to_etc
|
||||
|
||||
|
||||
def parse_status_conffiles(status_path: str = "/var/lib/dpkg/status") -> Dict[str, Dict[str, str]]:
|
||||
"""
|
||||
pkg -> { "/etc/foo": md5hex, ... } based on dpkg status "Conffiles" field.
|
||||
This md5 is the packaged baseline for the conffile.
|
||||
"""
|
||||
out: Dict[str, Dict[str, str]] = {}
|
||||
|
||||
cur: Dict[str, str] = {}
|
||||
key: Optional[str] = None
|
||||
|
||||
def flush() -> None:
|
||||
pkg = cur.get("Package")
|
||||
if not pkg:
|
||||
return
|
||||
raw = cur.get("Conffiles")
|
||||
if not raw:
|
||||
return
|
||||
m: Dict[str, str] = {}
|
||||
for line in raw.splitlines():
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
parts = line.split()
|
||||
if len(parts) >= 2 and parts[0].startswith("/"):
|
||||
m[parts[0]] = parts[1]
|
||||
if m:
|
||||
out[pkg] = m
|
||||
|
||||
with open(status_path, "r", encoding="utf-8", errors="replace") as f:
|
||||
for line in f:
|
||||
if line.strip() == "":
|
||||
if cur:
|
||||
flush()
|
||||
cur = {}
|
||||
key = None
|
||||
continue
|
||||
if line[0].isspace() and key:
|
||||
cur[key] += line
|
||||
else:
|
||||
if ":" in line:
|
||||
k, v = line.split(":", 1)
|
||||
key = k
|
||||
cur[key] = v.lstrip()
|
||||
|
||||
if cur:
|
||||
flush()
|
||||
return out
|
||||
|
||||
|
||||
def read_pkg_md5sums(pkg: str) -> Dict[str, str]:
|
||||
"""
|
||||
relpath -> md5hex from /var/lib/dpkg/info/<pkg>.md5sums
|
||||
relpath has no leading slash, e.g. 'etc/nginx/nginx.conf'
|
||||
"""
|
||||
path = f"/var/lib/dpkg/info/{pkg}.md5sums"
|
||||
if not os.path.exists(path):
|
||||
return {}
|
||||
m: Dict[str, str] = {}
|
||||
with open(path, "r", encoding="utf-8", errors="replace") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
md5, rel = line.split(None, 1)
|
||||
m[rel.strip()] = md5.strip()
|
||||
return m
|
||||
|
||||
|
||||
def file_md5(path: str) -> str:
|
||||
h = hashlib.md5()
|
||||
with open(path, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(1024 * 1024), b""):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def stat_triplet(path: str) -> Tuple[str, str, str]:
|
||||
st = os.stat(path, follow_symlinks=True)
|
||||
mode = oct(st.st_mode & 0o777)[2:].zfill(4)
|
||||
|
||||
import pwd, grp
|
||||
try:
|
||||
owner = pwd.getpwuid(st.st_uid).pw_name
|
||||
except KeyError:
|
||||
owner = str(st.st_uid)
|
||||
try:
|
||||
group = grp.getgrgid(st.st_gid).gr_name
|
||||
except KeyError:
|
||||
group = str(st.st_gid)
|
||||
return owner, group, mode
|
||||
474
enroll/harvest.py
Normal file
474
enroll/harvest.py
Normal file
|
|
@ -0,0 +1,474 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from dataclasses import dataclass, asdict
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
from .systemd import list_enabled_services, get_unit_info, UnitQueryError
|
||||
from .debian import (
|
||||
build_dpkg_etc_index,
|
||||
dpkg_owner,
|
||||
file_md5,
|
||||
list_manual_packages,
|
||||
parse_status_conffiles,
|
||||
read_pkg_md5sums,
|
||||
stat_triplet,
|
||||
)
|
||||
from .secrets import SecretPolicy
|
||||
from .accounts import collect_non_system_users, UserRecord
|
||||
|
||||
|
||||
|
||||
@dataclass
|
||||
class ManagedFile:
|
||||
path: str
|
||||
src_rel: str
|
||||
owner: str
|
||||
group: str
|
||||
mode: str
|
||||
reason: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExcludedFile:
|
||||
path: str
|
||||
reason: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ServiceSnapshot:
|
||||
unit: str
|
||||
role_name: str
|
||||
packages: List[str]
|
||||
managed_files: List[ManagedFile]
|
||||
excluded: List[ExcludedFile]
|
||||
notes: List[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class PackageSnapshot:
|
||||
package: str
|
||||
role_name: str
|
||||
managed_files: List[ManagedFile]
|
||||
excluded: List[ExcludedFile]
|
||||
notes: List[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class UsersSnapshot:
|
||||
role_name: str
|
||||
users: List[dict]
|
||||
managed_files: List[ManagedFile]
|
||||
excluded: List[ExcludedFile]
|
||||
notes: List[str]
|
||||
|
||||
|
||||
ALLOWED_UNOWNED_EXTS = {
|
||||
".conf", ".cfg", ".ini", ".cnf", ".yaml", ".yml", ".json", ".toml",
|
||||
".rules", ".service", ".socket", ".timer", ".target", ".path", ".mount",
|
||||
".network", ".netdev", ".link",
|
||||
"", # allow extensionless (common in /etc/default and /etc/init.d)
|
||||
}
|
||||
|
||||
MAX_UNOWNED_FILES_PER_ROLE = 400
|
||||
|
||||
|
||||
def _safe_name(s: str) -> str:
|
||||
out: List[str] = []
|
||||
for ch in s:
|
||||
out.append(ch if ch.isalnum() or ch in ("_", "-") else "_")
|
||||
return "".join(out).replace("-", "_")
|
||||
|
||||
|
||||
def _role_name_from_unit(unit: str) -> str:
|
||||
base = unit.removesuffix(".service")
|
||||
return _safe_name(base)
|
||||
|
||||
|
||||
def _role_name_from_pkg(pkg: str) -> str:
|
||||
return "pkg_" + _safe_name(pkg)
|
||||
|
||||
|
||||
def _copy_into_bundle(bundle_dir: str, role_name: str, abs_path: str, src_rel: str) -> None:
|
||||
dst = os.path.join(bundle_dir, "artifacts", role_name, src_rel)
|
||||
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
||||
shutil.copy2(abs_path, dst)
|
||||
|
||||
|
||||
def _is_confish(path: str) -> bool:
|
||||
base = os.path.basename(path)
|
||||
_, ext = os.path.splitext(base)
|
||||
return ext in ALLOWED_UNOWNED_EXTS
|
||||
|
||||
|
||||
def _hint_names(unit: str, pkgs: Set[str]) -> Set[str]:
|
||||
base = unit.removesuffix(".service")
|
||||
hints = {base}
|
||||
if "@" in base:
|
||||
hints.add(base.split("@", 1)[0])
|
||||
hints |= set(pkgs)
|
||||
hints |= {h.split(".", 1)[0] for h in list(hints) if "." in h}
|
||||
return {h for h in hints if h}
|
||||
|
||||
|
||||
def _add_pkgs_from_etc_topdirs(hints: Set[str], topdir_to_pkgs: Dict[str, Set[str]], pkgs: Set[str]) -> None:
|
||||
for h in hints:
|
||||
for p in topdir_to_pkgs.get(h, set()):
|
||||
pkgs.add(p)
|
||||
|
||||
|
||||
def _maybe_add_specific_paths(hints: Set[str]) -> List[str]:
|
||||
paths: List[str] = []
|
||||
for h in hints:
|
||||
paths.extend([
|
||||
f"/etc/default/{h}",
|
||||
f"/etc/init.d/{h}",
|
||||
f"/etc/sysctl.d/{h}.conf",
|
||||
f"/etc/logrotate.d/{h}",
|
||||
])
|
||||
return paths
|
||||
|
||||
|
||||
def _scan_unowned_under_roots(roots: List[str], owned_etc: Set[str], limit: int = MAX_UNOWNED_FILES_PER_ROLE) -> List[str]:
|
||||
found: List[str] = []
|
||||
for root in roots:
|
||||
if not os.path.isdir(root):
|
||||
continue
|
||||
for dirpath, _, filenames in os.walk(root):
|
||||
if len(found) >= limit:
|
||||
return found
|
||||
for fn in filenames:
|
||||
if len(found) >= limit:
|
||||
return found
|
||||
p = os.path.join(dirpath, fn)
|
||||
if not p.startswith("/etc/"):
|
||||
continue
|
||||
if p in owned_etc:
|
||||
continue
|
||||
if not os.path.isfile(p) or os.path.islink(p):
|
||||
continue
|
||||
if not _is_confish(p):
|
||||
continue
|
||||
found.append(p)
|
||||
return found
|
||||
|
||||
|
||||
def _topdirs_for_package(pkg: str, pkg_to_etc_paths: Dict[str, List[str]]) -> Set[str]:
|
||||
topdirs: Set[str] = set()
|
||||
for path in pkg_to_etc_paths.get(pkg, []):
|
||||
parts = path.split("/", 3)
|
||||
if len(parts) >= 3 and parts[1] == "etc" and parts[2]:
|
||||
topdirs.add(parts[2])
|
||||
return topdirs
|
||||
|
||||
|
||||
def harvest(bundle_dir: str, policy: Optional[SecretPolicy] = None) -> str:
|
||||
policy = policy or SecretPolicy()
|
||||
os.makedirs(bundle_dir, exist_ok=True)
|
||||
|
||||
if hasattr(os, "geteuid") and os.geteuid() != 0:
|
||||
print("Warning: not running as root; harvest may miss files or metadata.", flush=True)
|
||||
|
||||
owned_etc, etc_owner_map, topdir_to_pkgs, pkg_to_etc_paths = build_dpkg_etc_index()
|
||||
conffiles_by_pkg = parse_status_conffiles()
|
||||
|
||||
# -------------------------
|
||||
# Service roles
|
||||
# -------------------------
|
||||
service_snaps: List[ServiceSnapshot] = []
|
||||
for unit in list_enabled_services():
|
||||
role = _role_name_from_unit(unit)
|
||||
|
||||
try:
|
||||
ui = get_unit_info(unit)
|
||||
except UnitQueryError as e:
|
||||
service_snaps.append(ServiceSnapshot(
|
||||
unit=unit,
|
||||
role_name=role,
|
||||
packages=[],
|
||||
managed_files=[],
|
||||
excluded=[],
|
||||
notes=[str(e)],
|
||||
))
|
||||
continue
|
||||
|
||||
pkgs: Set[str] = set()
|
||||
notes: List[str] = []
|
||||
excluded: List[ExcludedFile] = []
|
||||
managed: List[ManagedFile] = []
|
||||
candidates: Dict[str, str] = {}
|
||||
|
||||
if ui.fragment_path:
|
||||
p = dpkg_owner(ui.fragment_path)
|
||||
if p:
|
||||
pkgs.add(p)
|
||||
|
||||
for exe in ui.exec_paths:
|
||||
p = dpkg_owner(exe)
|
||||
if p:
|
||||
pkgs.add(p)
|
||||
|
||||
for pth in ui.dropin_paths:
|
||||
if pth.startswith("/etc/"):
|
||||
candidates[pth] = "systemd_dropin"
|
||||
|
||||
for ef in ui.env_files:
|
||||
ef = ef.lstrip("-")
|
||||
if any(ch in ef for ch in "*?["):
|
||||
for g in glob.glob(ef):
|
||||
if g.startswith("/etc/") and os.path.isfile(g):
|
||||
candidates[g] = "systemd_envfile"
|
||||
else:
|
||||
if ef.startswith("/etc/") and os.path.isfile(ef):
|
||||
candidates[ef] = "systemd_envfile"
|
||||
|
||||
hints = _hint_names(unit, pkgs)
|
||||
_add_pkgs_from_etc_topdirs(hints, topdir_to_pkgs, pkgs)
|
||||
|
||||
for sp in _maybe_add_specific_paths(hints):
|
||||
if not os.path.exists(sp):
|
||||
continue
|
||||
if sp in etc_owner_map:
|
||||
pkgs.add(etc_owner_map[sp])
|
||||
else:
|
||||
candidates.setdefault(sp, "custom_specific_path")
|
||||
|
||||
for pkg in sorted(pkgs):
|
||||
conff = conffiles_by_pkg.get(pkg, {})
|
||||
md5sums = read_pkg_md5sums(pkg)
|
||||
for path in pkg_to_etc_paths.get(pkg, []):
|
||||
if not os.path.isfile(path) or os.path.islink(path):
|
||||
continue
|
||||
if path in conff:
|
||||
try:
|
||||
current = file_md5(path)
|
||||
except OSError:
|
||||
continue
|
||||
if current != conff[path]:
|
||||
candidates.setdefault(path, "modified_conffile")
|
||||
continue
|
||||
rel = path.lstrip("/")
|
||||
baseline = md5sums.get(rel)
|
||||
if baseline:
|
||||
try:
|
||||
current = file_md5(path)
|
||||
except OSError:
|
||||
continue
|
||||
if current != baseline:
|
||||
candidates.setdefault(path, "modified_packaged_file")
|
||||
|
||||
roots: List[str] = []
|
||||
for h in hints:
|
||||
roots.extend([f"/etc/{h}", f"/etc/{h}.d"])
|
||||
for pth in _scan_unowned_under_roots(roots, owned_etc):
|
||||
candidates.setdefault(pth, "custom_unowned")
|
||||
|
||||
if not pkgs and not candidates:
|
||||
notes.append("No packages or /etc candidates detected (unexpected for enabled service).")
|
||||
|
||||
for path, reason in sorted(candidates.items()):
|
||||
deny = policy.deny_reason(path)
|
||||
if deny:
|
||||
excluded.append(ExcludedFile(path=path, reason=deny))
|
||||
continue
|
||||
try:
|
||||
owner, group, mode = stat_triplet(path)
|
||||
except OSError:
|
||||
excluded.append(ExcludedFile(path=path, reason="unreadable"))
|
||||
continue
|
||||
src_rel = path.lstrip("/")
|
||||
try:
|
||||
_copy_into_bundle(bundle_dir, role, path, src_rel)
|
||||
except OSError:
|
||||
excluded.append(ExcludedFile(path=path, reason="unreadable"))
|
||||
continue
|
||||
managed.append(ManagedFile(
|
||||
path=path,
|
||||
src_rel=src_rel,
|
||||
owner=owner,
|
||||
group=group,
|
||||
mode=mode,
|
||||
reason=reason,
|
||||
))
|
||||
|
||||
service_snaps.append(ServiceSnapshot(
|
||||
unit=unit,
|
||||
role_name=role,
|
||||
packages=sorted(pkgs),
|
||||
managed_files=managed,
|
||||
excluded=excluded,
|
||||
notes=notes,
|
||||
))
|
||||
|
||||
# -------------------------
|
||||
# Manual package roles
|
||||
# -------------------------
|
||||
manual_pkgs = list_manual_packages()
|
||||
pkg_snaps: List[PackageSnapshot] = []
|
||||
|
||||
for pkg in manual_pkgs:
|
||||
role = _role_name_from_pkg(pkg)
|
||||
notes: List[str] = []
|
||||
excluded: List[ExcludedFile] = []
|
||||
managed: List[ManagedFile] = []
|
||||
candidates: Dict[str, str] = {}
|
||||
|
||||
conff = conffiles_by_pkg.get(pkg, {})
|
||||
md5sums = read_pkg_md5sums(pkg)
|
||||
|
||||
for path in pkg_to_etc_paths.get(pkg, []):
|
||||
if not os.path.isfile(path) or os.path.islink(path):
|
||||
continue
|
||||
if path in conff:
|
||||
try:
|
||||
current = file_md5(path)
|
||||
except OSError:
|
||||
continue
|
||||
if current != conff[path]:
|
||||
candidates.setdefault(path, "modified_conffile")
|
||||
continue
|
||||
rel = path.lstrip("/")
|
||||
baseline = md5sums.get(rel)
|
||||
if baseline:
|
||||
try:
|
||||
current = file_md5(path)
|
||||
except OSError:
|
||||
continue
|
||||
if current != baseline:
|
||||
candidates.setdefault(path, "modified_packaged_file")
|
||||
|
||||
topdirs = _topdirs_for_package(pkg, pkg_to_etc_paths)
|
||||
roots: List[str] = []
|
||||
for td in sorted(topdirs):
|
||||
roots.extend([f"/etc/{td}", f"/etc/{td}.d"])
|
||||
roots.extend([f"/etc/default/{td}"])
|
||||
roots.extend([f"/etc/init.d/{td}"])
|
||||
roots.extend([f"/etc/logrotate.d/{td}"])
|
||||
roots.extend([f"/etc/sysctl.d/{td}.conf"])
|
||||
|
||||
for pth in _scan_unowned_under_roots([r for r in roots if os.path.isdir(r)], owned_etc):
|
||||
candidates.setdefault(pth, "custom_unowned")
|
||||
|
||||
for r in roots:
|
||||
if os.path.isfile(r) and not os.path.islink(r):
|
||||
if r not in owned_etc and _is_confish(r):
|
||||
candidates.setdefault(r, "custom_specific_path")
|
||||
|
||||
for path, reason in sorted(candidates.items()):
|
||||
deny = policy.deny_reason(path)
|
||||
if deny:
|
||||
excluded.append(ExcludedFile(path=path, reason=deny))
|
||||
continue
|
||||
try:
|
||||
owner, group, mode = stat_triplet(path)
|
||||
except OSError:
|
||||
excluded.append(ExcludedFile(path=path, reason="unreadable"))
|
||||
continue
|
||||
src_rel = path.lstrip("/")
|
||||
try:
|
||||
_copy_into_bundle(bundle_dir, role, path, src_rel)
|
||||
except OSError:
|
||||
excluded.append(ExcludedFile(path=path, reason="unreadable"))
|
||||
continue
|
||||
managed.append(ManagedFile(
|
||||
path=path,
|
||||
src_rel=src_rel,
|
||||
owner=owner,
|
||||
group=group,
|
||||
mode=mode,
|
||||
reason=reason,
|
||||
))
|
||||
|
||||
if not pkg_to_etc_paths.get(pkg, []) and not managed:
|
||||
notes.append("No /etc files detected for this package (may be a meta package).")
|
||||
|
||||
pkg_snaps.append(PackageSnapshot(
|
||||
package=pkg,
|
||||
role_name=role,
|
||||
managed_files=managed,
|
||||
excluded=excluded,
|
||||
notes=notes,
|
||||
))
|
||||
|
||||
# -------------------------
|
||||
# Users role (non-system users)
|
||||
# -------------------------
|
||||
users_notes: List[str] = []
|
||||
users_excluded: List[ExcludedFile] = []
|
||||
users_managed: List[ManagedFile] = []
|
||||
users_list: List[dict] = []
|
||||
|
||||
try:
|
||||
us
|
||||
except Exception as e:
|
||||
user_records = []
|
||||
users_notes.append(f"Failed to enumerate users: {e!r}")
|
||||
|
||||
users_role_name = "users"
|
||||
|
||||
for u in user_records:
|
||||
users_list.append({
|
||||
"name": u.name,
|
||||
"uid": u.uid,
|
||||
"gid": u.gid,
|
||||
"gecos": u.gecos,
|
||||
"home": u.home,
|
||||
"shell": u.shell,
|
||||
"primary_group": u.primary_group,
|
||||
"supplementary_groups": u.supplementary_groups,
|
||||
})
|
||||
|
||||
# Copy authorized_keys
|
||||
for sf in u.ssh_files:
|
||||
deny = policy.deny_reason(sf)
|
||||
if deny:
|
||||
users_excluded.append(ExcludedFile(path=sf, reason=deny))
|
||||
continue
|
||||
|
||||
# Force safe modes; still record current owner/group for reference.
|
||||
try:
|
||||
owner, group, mode = stat_triplet(sf)
|
||||
except OSError:
|
||||
users_excluded.append(ExcludedFile(path=sf, reason="unreadable"))
|
||||
continue
|
||||
|
||||
src_rel = sf.lstrip("/")
|
||||
try:
|
||||
_copy_into_bundle(bundle_dir, users_role_name, sf, src_rel)
|
||||
except OSError:
|
||||
users_excluded.append(ExcludedFile(path=sf, reason="unreadable"))
|
||||
continue
|
||||
|
||||
reason = "authorized_keys" if sf.endswith("/authorized_keys") else "ssh_public_key"
|
||||
users_managed.append(ManagedFile(
|
||||
path=sf,
|
||||
src_rel=src_rel,
|
||||
owner=owner,
|
||||
group=group,
|
||||
mode=mode,
|
||||
reason=reason,
|
||||
))
|
||||
|
||||
users_snapshot = UsersSnapshot(
|
||||
role_name=users_role_name,
|
||||
users=users_list,
|
||||
managed_files=users_managed,
|
||||
excluded=users_excluded,
|
||||
notes=users_notes,
|
||||
)
|
||||
|
||||
state = {
|
||||
"host": {"hostname": os.uname().nodename, "os": "debian"},
|
||||
"users": asdict(users_snapshot),
|
||||
"services": [asdict(s) for s in service_snaps],
|
||||
"manual_packages": manual_pkgs,
|
||||
"package_roles": [asdict(p) for p in pkg_snaps],
|
||||
}
|
||||
|
||||
state_path = os.path.join(bundle_dir, "state.json")
|
||||
with open(state_path, "w", encoding="utf-8") as f:
|
||||
json.dump(state, f, indent=2, sort_keys=True)
|
||||
return state_path
|
||||
392
enroll/manifest.py
Normal file
392
enroll/manifest.py
Normal file
|
|
@ -0,0 +1,392 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
def _yaml_list(items: List[str], indent: int = 2) -> str:
|
||||
pad = " " * indent
|
||||
if not items:
|
||||
return f"{pad}[]"
|
||||
return "\n".join(f"{pad}- {x}" for x in items)
|
||||
|
||||
|
||||
def _copy_artifacts(bundle_dir: str, role: str, role_dir: str) -> None:
|
||||
artifacts_dir = os.path.join(bundle_dir, "artifacts", role)
|
||||
if not os.path.isdir(artifacts_dir):
|
||||
return
|
||||
for root, _, files in os.walk(artifacts_dir):
|
||||
for fn in files:
|
||||
src = os.path.join(root, fn)
|
||||
rel = os.path.relpath(src, artifacts_dir)
|
||||
dst = os.path.join(role_dir, "files", rel)
|
||||
os.makedirs(os.path.dirname(dst), exist_ok=True)
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
|
||||
def _write_role_scaffold(role_dir: str) -> None:
|
||||
os.makedirs(os.path.join(role_dir, "tasks"), exist_ok=True)
|
||||
os.makedirs(os.path.join(role_dir, "handlers"), exist_ok=True)
|
||||
os.makedirs(os.path.join(role_dir, "defaults"), exist_ok=True)
|
||||
os.makedirs(os.path.join(role_dir, "meta"), exist_ok=True)
|
||||
os.makedirs(os.path.join(role_dir, "files"), exist_ok=True)
|
||||
|
||||
|
||||
def _write_playbook(path: str, roles: List[str]) -> None:
|
||||
pb_lines = ["---", "- hosts: all", " become: true", " roles:"]
|
||||
for r in roles:
|
||||
pb_lines.append(f" - {r}")
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write("\n".join(pb_lines) + "\n")
|
||||
|
||||
|
||||
def manifest(bundle_dir: str, out_dir: str) -> None:
|
||||
state_path = os.path.join(bundle_dir, "state.json")
|
||||
with open(state_path, "r", encoding="utf-8") as f:
|
||||
state = json.load(f)
|
||||
|
||||
services: List[Dict[str, Any]] = state.get("services", [])
|
||||
package_roles: List[Dict[str, Any]] = state.get("package_roles", [])
|
||||
users_snapshot: Dict[str, Any] = state.get("users", {})
|
||||
|
||||
os.makedirs(out_dir, exist_ok=True)
|
||||
roles_root = os.path.join(out_dir, "roles")
|
||||
os.makedirs(roles_root, exist_ok=True)
|
||||
|
||||
manifested_users_roles: List[str] = []
|
||||
manifested_service_roles: List[str] = []
|
||||
manifested_pkg_roles: List[str] = []
|
||||
|
||||
# -------------------------
|
||||
# Users role (non-system users)
|
||||
# -------------------------
|
||||
if users_snapshot and users_snapshot.get("users"):
|
||||
role = users_snapshot.get("role_name", "users")
|
||||
role_dir = os.path.join(roles_root, role)
|
||||
_write_role_scaffold(role_dir)
|
||||
_copy_artifacts(bundle_dir, role, role_dir)
|
||||
|
||||
users = users_snapshot.get("users", [])
|
||||
managed_files = users_snapshot.get("managed_files", [])
|
||||
excluded = users_snapshot.get("excluded", [])
|
||||
notes = users_snapshot.get("notes", [])
|
||||
|
||||
# Build group set from users
|
||||
group_names = set()
|
||||
for u in users:
|
||||
pg = u.get("primary_group")
|
||||
if pg:
|
||||
group_names.add(pg)
|
||||
for g in u.get("supplementary_groups", []) or []:
|
||||
group_names.add(g)
|
||||
group_names = sorted(group_names)
|
||||
|
||||
# defaults: store users list (handy for later), but tasks are explicit for readability
|
||||
defaults = """---
|
||||
users_accounts:
|
||||
""" + ("\n".join([f" - name: {u.get('name')}" for u in users]) + "\n")
|
||||
with open(os.path.join(role_dir, "defaults", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write(defaults)
|
||||
|
||||
with open(os.path.join(role_dir, "meta", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write("---\ndependencies: []\n")
|
||||
|
||||
# tasks
|
||||
lines: List[str] = ["---"]
|
||||
# groups first (idempotent; safe even if already present)
|
||||
for g in group_names:
|
||||
lines.append(f"- name: Ensure group {g} exists")
|
||||
lines.append(" ansible.builtin.group:")
|
||||
lines.append(f" name: {g}")
|
||||
lines.append(" state: present")
|
||||
|
||||
# users
|
||||
for u in users:
|
||||
name = u["name"]
|
||||
lines.append(f"- name: Ensure user {name} exists")
|
||||
lines.append(" ansible.builtin.user:")
|
||||
lines.append(f" name: {name}")
|
||||
lines.append(f" uid: {u.get('uid')}")
|
||||
lines.append(f" group: {u.get('primary_group')}")
|
||||
supp = u.get("supplementary_groups") or []
|
||||
if supp:
|
||||
lines.append(" groups: " + ",".join(supp))
|
||||
lines.append(" append: true")
|
||||
lines.append(f" home: {u.get('home')}")
|
||||
lines.append(" create_home: true")
|
||||
if u.get("shell"):
|
||||
lines.append(f" shell: {u.get('shell')}")
|
||||
if u.get("gecos"):
|
||||
# quote to avoid YAML surprises
|
||||
gec = u.get("gecos").replace('"', '\"')
|
||||
lines.append(f' comment: "{gec}"')
|
||||
lines.append(" password_lock: true")
|
||||
lines.append(" state: present")
|
||||
|
||||
# Ensure ~/.ssh
|
||||
home = u.get("home") or f"/home/{name}"
|
||||
sshdir = home.rstrip("/") + "/.ssh"
|
||||
lines.append(f"- name: Ensure {name} .ssh directory exists")
|
||||
lines.append(" ansible.builtin.file:")
|
||||
lines.append(f" path: {sshdir}")
|
||||
lines.append(" state: directory")
|
||||
lines.append(f" owner: {name}")
|
||||
lines.append(f" group: {u.get('primary_group')}")
|
||||
lines.append(" mode: '0700'")
|
||||
|
||||
# Copy harvested SSH public material (authorized_keys + *.pub)
|
||||
for mf in managed_files:
|
||||
dest = mf["path"]
|
||||
src = mf["src_rel"]
|
||||
# Determine file owner from dest path: /home/<user>/...
|
||||
owner = None
|
||||
for u in users:
|
||||
if dest.startswith((u.get("home") or "").rstrip("/") + "/"):
|
||||
owner = u["name"]
|
||||
group = u.get("primary_group")
|
||||
break
|
||||
if owner is None:
|
||||
# fallback: try /home/<user>/
|
||||
parts = dest.split("/")
|
||||
owner = parts[2] if len(parts) > 2 and parts[1] == "home" else "root"
|
||||
group = owner
|
||||
|
||||
mode = "0600" if mf.get("reason") == "authorized_keys" else "0644"
|
||||
lines.append(f"- name: Deploy {dest}")
|
||||
lines.append(" ansible.builtin.copy:")
|
||||
lines.append(f" src: {src}")
|
||||
lines.append(f" dest: {dest}")
|
||||
lines.append(f" owner: {owner}")
|
||||
lines.append(f" group: {group}")
|
||||
lines.append(f" mode: '{mode}'")
|
||||
|
||||
tasks = "\n".join(lines).rstrip() + "\n"
|
||||
with open(os.path.join(role_dir, "tasks", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write(tasks)
|
||||
|
||||
# handlers (none needed)
|
||||
with open(os.path.join(role_dir, "handlers", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write("---\n")
|
||||
|
||||
readme = """# users
|
||||
|
||||
Generated non-system user accounts and SSH public material.
|
||||
|
||||
## Users
|
||||
""" + ("\n".join([f"- {u.get('name')} (uid {u.get('uid')})" for u in users]) or "- (none)") + """\n
|
||||
## Included SSH files
|
||||
""" + ("\n".join([f"- {mf.get('path')} ({mf.get('reason')})" for mf in managed_files]) or "- (none)") + """\n
|
||||
## Excluded
|
||||
""" + ("\n".join([f"- {e.get('path')} ({e.get('reason')})" for e in excluded]) or "- (none)") + """\n
|
||||
## Notes
|
||||
""" + ("\n".join([f"- {n}" for n in notes]) or "- (none)") + """\n"""
|
||||
with open(os.path.join(role_dir, "README.md"), "w", encoding="utf-8") as f:
|
||||
f.write(readme)
|
||||
|
||||
manifested_users_roles.append(role)
|
||||
|
||||
# -------------------------
|
||||
# Service roles
|
||||
# -------------------------
|
||||
for svc in services:
|
||||
role = svc["role_name"]
|
||||
unit = svc["unit"]
|
||||
pkgs = svc["packages"]
|
||||
managed_files = svc["managed_files"]
|
||||
|
||||
role_dir = os.path.join(roles_root, role)
|
||||
_write_role_scaffold(role_dir)
|
||||
_copy_artifacts(bundle_dir, role, role_dir)
|
||||
|
||||
var_prefix = role
|
||||
|
||||
defaults = f"""---
|
||||
{var_prefix}_packages:
|
||||
{_yaml_list(pkgs, indent=2)}
|
||||
"""
|
||||
with open(os.path.join(role_dir, "defaults", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write(defaults)
|
||||
|
||||
handlers = """---
|
||||
- name: systemd daemon-reload
|
||||
ansible.builtin.systemd:
|
||||
daemon_reload: true
|
||||
|
||||
- name: Restart service
|
||||
ansible.builtin.service:
|
||||
name: "{{ unit_name }}"
|
||||
state: restarted
|
||||
"""
|
||||
with open(os.path.join(role_dir, "handlers", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write(handlers)
|
||||
|
||||
systemd_files = [mf for mf in managed_files if mf["path"].startswith("/etc/systemd/system/")]
|
||||
other_files = [mf for mf in managed_files if mf not in systemd_files]
|
||||
|
||||
def copy_task(mf: Dict[str, Any], notify: str | None) -> str:
|
||||
notify_line = f" notify: {notify}\n" if notify else ""
|
||||
return f"""- name: Deploy {mf["path"]}
|
||||
ansible.builtin.copy:
|
||||
src: "{mf["src_rel"]}"
|
||||
dest: "{mf["path"]}"
|
||||
owner: "{mf["owner"]}"
|
||||
group: "{mf["group"]}"
|
||||
mode: "{mf["mode"]}"
|
||||
{notify_line}"""
|
||||
|
||||
task_parts: List[str] = []
|
||||
task_parts.append(f"""---
|
||||
- name: Set unit name
|
||||
ansible.builtin.set_fact:
|
||||
unit_name: "{unit}"
|
||||
|
||||
- name: Install packages for {role}
|
||||
ansible.builtin.apt:
|
||||
name: "{{{{ {var_prefix}_packages }}}}"
|
||||
state: present
|
||||
update_cache: true
|
||||
when: {var_prefix}_packages | length > 0
|
||||
""")
|
||||
|
||||
if systemd_files:
|
||||
for mf in systemd_files:
|
||||
task_parts.append(copy_task(mf, "[systemd daemon-reload]"))
|
||||
task_parts.append("""- name: Reload systemd to pick up unit changes
|
||||
ansible.builtin.meta: flush_handlers
|
||||
""")
|
||||
|
||||
for mf in other_files:
|
||||
task_parts.append(copy_task(mf, "[Restart service]"))
|
||||
|
||||
task_parts.append(f"""- name: Ensure {unit} is enabled and running
|
||||
ansible.builtin.service:
|
||||
name: "{{{{ unit_name }}}}"
|
||||
enabled: true
|
||||
state: started
|
||||
""")
|
||||
|
||||
tasks = "\n".join(task_parts).rstrip() + "\n"
|
||||
with open(os.path.join(role_dir, "tasks", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write(tasks)
|
||||
|
||||
with open(os.path.join(role_dir, "meta", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write("---\ndependencies: []\n")
|
||||
|
||||
excluded = svc.get("excluded", [])
|
||||
notes = svc.get("notes", [])
|
||||
readme = f"""# {role}
|
||||
|
||||
Generated from `{unit}`.
|
||||
|
||||
## Packages
|
||||
{os.linesep.join("- " + p for p in pkgs) or "- (none detected)"}
|
||||
|
||||
## Managed files
|
||||
{os.linesep.join("- " + mf["path"] + " (" + mf["reason"] + ")" for mf in managed_files) or "- (none)"}
|
||||
|
||||
## Excluded (possible secrets / unsafe)
|
||||
{os.linesep.join("- " + e["path"] + " (" + e["reason"] + ")" for e in excluded) or "- (none)"}
|
||||
|
||||
## Notes
|
||||
{os.linesep.join("- " + n for n in notes) or "- (none)"}
|
||||
"""
|
||||
with open(os.path.join(role_dir, "README.md"), "w", encoding="utf-8") as f:
|
||||
f.write(readme)
|
||||
|
||||
manifested_service_roles.append(role)
|
||||
|
||||
# -------------------------
|
||||
# Manual package roles
|
||||
# -------------------------
|
||||
for pr in package_roles:
|
||||
role = pr["role_name"]
|
||||
pkg = pr["package"]
|
||||
managed_files = pr["managed_files"]
|
||||
|
||||
role_dir = os.path.join(roles_root, role)
|
||||
_write_role_scaffold(role_dir)
|
||||
_copy_artifacts(bundle_dir, role, role_dir)
|
||||
|
||||
var_prefix = role
|
||||
|
||||
defaults = f"""---
|
||||
{var_prefix}_packages:
|
||||
- {pkg}
|
||||
"""
|
||||
with open(os.path.join(role_dir, "defaults", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write(defaults)
|
||||
|
||||
handlers = """---
|
||||
- name: systemd daemon-reload
|
||||
ansible.builtin.systemd:
|
||||
daemon_reload: true
|
||||
"""
|
||||
with open(os.path.join(role_dir, "handlers", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write(handlers)
|
||||
|
||||
systemd_files = [mf for mf in managed_files if mf["path"].startswith("/etc/systemd/system/")]
|
||||
other_files = [mf for mf in managed_files if mf not in systemd_files]
|
||||
|
||||
def copy_task(mf: Dict[str, Any], notify: str | None) -> str:
|
||||
notify_line = f" notify: {notify}\n" if notify else ""
|
||||
return f"""- name: Deploy {mf["path"]}
|
||||
ansible.builtin.copy:
|
||||
src: "{mf["src_rel"]}"
|
||||
dest: "{mf["path"]}"
|
||||
owner: "{mf["owner"]}"
|
||||
group: "{mf["group"]}"
|
||||
mode: "{mf["mode"]}"
|
||||
{notify_line}"""
|
||||
|
||||
task_parts: List[str] = []
|
||||
task_parts.append(f"""---
|
||||
- name: Install manual package {pkg}
|
||||
ansible.builtin.apt:
|
||||
name: "{{{{ {var_prefix}_packages }}}}"
|
||||
state: present
|
||||
update_cache: true
|
||||
""")
|
||||
|
||||
if systemd_files:
|
||||
for mf in systemd_files:
|
||||
task_parts.append(copy_task(mf, "[systemd daemon-reload]"))
|
||||
task_parts.append("""- name: Reload systemd to pick up unit changes
|
||||
ansible.builtin.meta: flush_handlers
|
||||
""")
|
||||
|
||||
for mf in other_files:
|
||||
task_parts.append(copy_task(mf, None))
|
||||
|
||||
tasks = "\n".join(task_parts).rstrip() + "\n"
|
||||
with open(os.path.join(role_dir, "tasks", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write(tasks)
|
||||
|
||||
with open(os.path.join(role_dir, "meta", "main.yml"), "w", encoding="utf-8") as f:
|
||||
f.write("---\ndependencies: []\n")
|
||||
|
||||
excluded = pr.get("excluded", [])
|
||||
notes = pr.get("notes", [])
|
||||
readme = f"""# {role}
|
||||
|
||||
Generated for manual package `{pkg}`.
|
||||
|
||||
## Managed files
|
||||
{os.linesep.join("- " + mf["path"] + " (" + mf["reason"] + ")" for mf in managed_files) or "- (none)"}
|
||||
|
||||
## Excluded (possible secrets / unsafe)
|
||||
{os.linesep.join("- " + e["path"] + " (" + e["reason"] + ")" for e in excluded) or "- (none)"}
|
||||
|
||||
## Notes
|
||||
{os.linesep.join("- " + n for n in notes) or "- (none)"}
|
||||
|
||||
> Note: package roles do not attempt to restart or enable services automatically.
|
||||
"""
|
||||
with open(os.path.join(role_dir, "README.md"), "w", encoding="utf-8") as f:
|
||||
f.write(readme)
|
||||
|
||||
manifested_pkg_roles.append(role)
|
||||
|
||||
# Playbooks
|
||||
_write_playbook(os.path.join(out_dir, "playbook.yml"), manifested_users_roles + manifested_pkg_roles + manifested_service_roles)
|
||||
65
enroll/secrets.py
Normal file
65
enroll/secrets.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
|
||||
DEFAULT_DENY_GLOBS = [
|
||||
"/etc/ssl/private/*",
|
||||
"/etc/ssh/ssh_host_*",
|
||||
"/etc/shadow",
|
||||
"/etc/gshadow",
|
||||
"/etc/*shadow",
|
||||
"/etc/letsencrypt/*",
|
||||
]
|
||||
|
||||
SENSITIVE_CONTENT_PATTERNS = [
|
||||
re.compile(br"-----BEGIN (RSA |EC |OPENSSH |)PRIVATE KEY-----"),
|
||||
re.compile(br"(?i)\bpassword\s*="),
|
||||
re.compile(br"(?i)\b(pass|passwd|token|secret|api[_-]?key)\b"),
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class SecretPolicy:
|
||||
deny_globs: list[str] = None
|
||||
max_file_bytes: int = 256_000
|
||||
sample_bytes: int = 64_000
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.deny_globs is None:
|
||||
self.deny_globs = list(DEFAULT_DENY_GLOBS)
|
||||
|
||||
def deny_reason(self, path: str) -> Optional[str]:
|
||||
for g in self.deny_globs:
|
||||
if fnmatch.fnmatch(path, g):
|
||||
return "denied_path"
|
||||
|
||||
try:
|
||||
st = os.stat(path, follow_symlinks=True)
|
||||
except OSError:
|
||||
return "unreadable"
|
||||
|
||||
if st.st_size > self.max_file_bytes:
|
||||
return "too_large"
|
||||
|
||||
if not os.path.isfile(path) or os.path.islink(path):
|
||||
return "not_regular_file"
|
||||
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
data = f.read(min(self.sample_bytes, st.st_size))
|
||||
except OSError:
|
||||
return "unreadable"
|
||||
|
||||
if b"\x00" in data:
|
||||
return "binary_like"
|
||||
|
||||
for pat in SENSITIVE_CONTENT_PATTERNS:
|
||||
if pat.search(data):
|
||||
return "sensitive_content"
|
||||
|
||||
return None
|
||||
90
enroll/systemd.py
Normal file
90
enroll/systemd.py
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import subprocess
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnitInfo:
|
||||
name: str
|
||||
fragment_path: Optional[str]
|
||||
dropin_paths: List[str]
|
||||
env_files: List[str]
|
||||
exec_paths: List[str] # binaries from ExecStart "path=" parts
|
||||
|
||||
|
||||
class UnitQueryError(RuntimeError):
|
||||
def __init__(self, unit: str, stderr: str):
|
||||
self.unit = unit
|
||||
self.stderr = (stderr or "").strip()
|
||||
super().__init__(f"systemctl show failed for {unit}: {self.stderr}")
|
||||
|
||||
|
||||
def _run(cmd: list[str]) -> str:
|
||||
p = subprocess.run(cmd, check=False, text=True, capture_output=True)
|
||||
if p.returncode != 0:
|
||||
raise RuntimeError(f"Command failed: {cmd}\n{p.stderr}")
|
||||
return p.stdout
|
||||
|
||||
|
||||
def list_enabled_services() -> List[str]:
|
||||
out = _run(["systemctl", "list-unit-files", "--type=service", "--state=enabled", "--no-legend"])
|
||||
units: List[str] = []
|
||||
for line in out.splitlines():
|
||||
parts = line.split()
|
||||
if not parts:
|
||||
continue
|
||||
unit = parts[0].strip()
|
||||
if not unit.endswith(".service"):
|
||||
continue
|
||||
# Skip template units like "getty@.service" which are enabled but not valid for systemctl show
|
||||
if unit.endswith("@.service") or "@.service" in unit:
|
||||
continue
|
||||
units.append(unit)
|
||||
return sorted(set(units))
|
||||
|
||||
|
||||
def get_unit_info(unit: str) -> UnitInfo:
|
||||
p = subprocess.run(
|
||||
[
|
||||
"systemctl", "show", unit,
|
||||
"-p", "FragmentPath",
|
||||
"-p", "DropInPaths",
|
||||
"-p", "EnvironmentFiles",
|
||||
"-p", "ExecStart",
|
||||
"--no-page",
|
||||
],
|
||||
check=False,
|
||||
text=True,
|
||||
capture_output=True,
|
||||
)
|
||||
if p.returncode != 0:
|
||||
raise UnitQueryError(unit, p.stderr)
|
||||
|
||||
kv: dict[str, str] = {}
|
||||
for line in (p.stdout or "").splitlines():
|
||||
if "=" in line:
|
||||
k, v = line.split("=", 1)
|
||||
kv[k] = v.strip()
|
||||
|
||||
fragment = kv.get("FragmentPath") or None
|
||||
|
||||
dropins = [pp for pp in (kv.get("DropInPaths", "") or "").split() if pp]
|
||||
|
||||
env_files: List[str] = []
|
||||
for token in (kv.get("EnvironmentFiles", "") or "").split():
|
||||
token = token.lstrip("-")
|
||||
if token:
|
||||
env_files.append(token)
|
||||
|
||||
exec_paths = re.findall(r"path=([^ ;}]+)", kv.get("ExecStart", "") or "")
|
||||
|
||||
return UnitInfo(
|
||||
name=unit,
|
||||
fragment_path=fragment,
|
||||
dropin_paths=sorted(set(dropins)),
|
||||
env_files=sorted(set(env_files)),
|
||||
exec_paths=sorted(set(exec_paths)),
|
||||
)
|
||||
Loading…
Add table
Add a link
Reference in a new issue