Compare commits
33 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5f6b0f49d9 | |||
| 1856e3a79d | |||
| 478b0e1b9d | |||
| f5eaac9f75 | |||
| 5754ef1aad | |||
| d172d848c4 | |||
| f84d795c49 | |||
| 95b784c1a0 | |||
| ebd30247d1 | |||
| 9a249cc973 | |||
| 9749190cd8 | |||
| ca3d958a96 | |||
| 8be821c494 | |||
| 8daed96b7c | |||
| e0ef5ede98 | |||
| 025f00f924 | |||
| 66d032d981 | |||
| 45e0d9bb16 | |||
| 9f30c56e8a | |||
| 7a9a0abcd1 | |||
| aea58c8684 | |||
| ca4cf00e84 | |||
| d3fdfc9ef7 | |||
| bcf3dd7422 | |||
| 91ec1b8791 | |||
| b5e32770a3 | |||
| e04b158c39 | |||
| a1433d645f | |||
| e68ec0bffc | |||
| 24cedc8c8d | |||
| c9003d589d | |||
| 59674d4660 | |||
| 56d0148614 |
39 changed files with 6264 additions and 643 deletions
|
|
@ -21,6 +21,7 @@ jobs:
|
|||
python3-poetry-core \
|
||||
python3-yaml \
|
||||
python3-paramiko \
|
||||
python3-jsonschema \
|
||||
rsync \
|
||||
ca-certificates
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ jobs:
|
|||
run: |
|
||||
apt-get update
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
ansible ansible-lint python3-venv pipx systemctl python3-apt
|
||||
ansible ansible-lint python3-venv pipx systemctl python3-apt jq python3-jsonschema
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
|
|
|
|||
37
CHANGELOG.md
37
CHANGELOG.md
|
|
@ -1,3 +1,34 @@
|
|||
# 0.4.3
|
||||
|
||||
* Add support for AddressFamily and ConnectTimeout in the .ssh/config when using `--remote-ssh-config`.
|
||||
* Update dependencies
|
||||
|
||||
# 0.4.2
|
||||
|
||||
* Support `--remote-ssh-config [path-to-ssh-config]` as an argument in case extra params are required beyond `--remote-port` or `--remote-user`. Note: `--remote-host` must still be set, but it can be an 'alias' represented by the 'Host' value in the ssh config.
|
||||
|
||||
# 0.4.1
|
||||
|
||||
* Add interactive output when 'enroll diff --enforce' is invoking Ansible.
|
||||
|
||||
# 0.4.0
|
||||
|
||||
* Introduce `enroll validate` - a tool to validate a harvest against the state schema, or check for missing or orphaned obsolete artifacts in a harvest.
|
||||
* Attempt to generate Jinja2 templates of systemd unit files and Postfix main.cf (now that JinjaTurtle supports it)
|
||||
* Update pynacl dependency to resolve CVE-2025-69277
|
||||
* Add `--exclude-path` to `enroll diff` command, so that you can ignore certain churn from the diff (stuff you still wanted to harvest as a baseline but don't care if it changes day to day)
|
||||
* Add `--ignore-package-versions` to `enroll diff` command, to optionally ignore package upgrades (e.g due to patching) from the diff.
|
||||
* Add tags to the playbook for each role, to allow easier targeting of specific roles during play later.
|
||||
* Add `--enforce` mode to `enroll diff`. If there is diff detected between the two harvests, and it can enforce restoring the state from the older harvest, it will manifest the state and apply it with ansible. Only the specific roles that had diffed will be applied (via the new tags capability)
|
||||
|
||||
# 0.3.0
|
||||
|
||||
* Introduce `enroll explain` - a tool to analyze and explain what's in (or not in) a harvest and why.
|
||||
* Centralise the cron and logrotate stuff into their respective roles, we had a bit of duplication between roles based on harvest discovery.
|
||||
* Capture other files in the user's home directory such as `.bashrc`, `.bash_aliases`, `.profile`, if these files differ from the `/etc/skel` defaults
|
||||
* Ignore files that end with a tilde or - (probably backup files generated by editors or shadow file changes)
|
||||
* Manage certain symlinks e.g for apache2/nginx sites-enabled and so on
|
||||
|
||||
# 0.2.3
|
||||
|
||||
* Introduce --ask-become-pass or -K to support password-required sudo on remote hosts, just like Ansible. It will also fall back to this prompt if a password is required but the arg wasn't passed in.
|
||||
|
|
@ -9,7 +40,7 @@
|
|||
|
||||
# 0.2.1
|
||||
|
||||
* Don't accidentally add extra_paths role to usr_local_custom list, resulting in extra_paths appearing twice in manifested playbook
|
||||
* Don't accidentally add `extra_paths` role to `usr_local_custom` list, resulting in `extra_paths` appearing twice in manifested playbook
|
||||
* Ensure directories in the tree of anything included with --include are defined in the state and manifest so we make dirs before we try to create files
|
||||
|
||||
# 0.2.0
|
||||
|
|
@ -30,8 +61,8 @@
|
|||
# 0.1.5
|
||||
|
||||
* Consolidate logrotate and cron files into their main service/package roles if they exist.
|
||||
* Standardise on MAX_FILES_CAP in one place
|
||||
* Manage apt stuff in its own role, not in etc_custom
|
||||
* Standardise on `MAX_FILES_CAP` in one place
|
||||
* Manage apt stuff in its own role, not in `etc_custom`
|
||||
|
||||
# 0.1.4
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ RUN set -eux; \
|
|||
python3-poetry-core \
|
||||
python3-yaml \
|
||||
python3-paramiko \
|
||||
python3-jsonschema \
|
||||
rsync \
|
||||
ca-certificates \
|
||||
; \
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ RUN set -eux; \
|
|||
python3-rpm-macros \
|
||||
python3-yaml \
|
||||
python3-paramiko \
|
||||
python3-jsonschema \
|
||||
openssl-devel \
|
||||
python3-poetry-core ; \
|
||||
dnf -y clean all
|
||||
|
|
@ -34,25 +35,8 @@ set -euo pipefail
|
|||
SRC="${SRC:-/src}"
|
||||
WORKROOT="${WORKROOT:-/work}"
|
||||
OUT="${OUT:-/out}"
|
||||
DEPS_DIR="${DEPS_DIR:-/deps}"
|
||||
VERSION_ID="$(grep VERSION_ID /etc/os-release | cut -d= -f2)"
|
||||
echo "Version ID is ${VERSION_ID}"
|
||||
# Install jinjaturtle from local rpm
|
||||
# Filter out .src.rpm and debug* subpackages if present.
|
||||
if [ -d "${DEPS_DIR}" ] && compgen -G "${DEPS_DIR}/*.rpm" > /dev/null; then
|
||||
mapfile -t rpms < <(ls -1 "${DEPS_DIR}"/*.rpm | grep -vE '(\.src\.rpm$|-(debuginfo|debugsource)-)' | grep "${VERSION_ID}")
|
||||
if [ "${#rpms[@]}" -gt 0 ]; then
|
||||
echo "Installing dependency RPMs from ${DEPS_DIR}:"
|
||||
printf ' - %s\n' "${rpms[@]}"
|
||||
dnf -y install "${rpms[@]}"
|
||||
dnf -y clean all
|
||||
else
|
||||
echo "NOTE: Only src/debug RPMs found in ${DEPS_DIR}; nothing installed." >&2
|
||||
fi
|
||||
else
|
||||
echo "NOTE: No RPMs found in ${DEPS_DIR}. If the build fails with missing python3dist(jinjaturtle)," >&2
|
||||
echo " mount your jinjaturtle RPM directory as -v <dir>:/deps" >&2
|
||||
fi
|
||||
|
||||
mkdir -p "${WORKROOT}" "${OUT}"
|
||||
WORK="${WORKROOT}/src"
|
||||
|
|
|
|||
202
README.md
202
README.md
|
|
@ -11,8 +11,9 @@
|
|||
- Captures config that has **changed from packaged defaults** where possible (e.g dpkg conffile hashes + package md5sums when available).
|
||||
- Also captures **service-relevant custom/unowned files** under `/etc/<service>/...` (e.g. drop-in config includes).
|
||||
- Defensively excludes likely secrets (path denylist + content sniff + size caps).
|
||||
- Captures non-system users and their SSH public keys.
|
||||
- Captures non-system users and their SSH public keys and any .bashrc or .bash_aliases or .profile files that deviate from the skel defaults.
|
||||
- Captures miscellaneous `/etc` files it can't attribute to a package and installs them in an `etc_custom` role.
|
||||
- Captures symlinks in common applications that rely on them, e.g apache2/nginx 'sites-enabled'
|
||||
- Ditto for /usr/local/bin (for non-binary files) and /usr/local/etc
|
||||
- Avoids trying to start systemd services that were detected as inactive during harvest.
|
||||
|
||||
|
|
@ -73,7 +74,7 @@ Harvest state about a host and write a harvest bundle.
|
|||
|
||||
**Common flags**
|
||||
- Remote harvesting:
|
||||
- `--remote-host`, `--remote-user`, `--remote-port`
|
||||
- `--remote-host`, `--remote-user`, `--remote-port`, `--remote-ssh-config`
|
||||
- `--no-sudo` (if you don't want/need sudo)
|
||||
- Sensitive-data behaviour:
|
||||
- default: tries to avoid likely secrets
|
||||
|
|
@ -88,6 +89,8 @@ Harvest state about a host and write a harvest bundle.
|
|||
- glob (default): supports `*` and `**` (prefix with `glob:` to force)
|
||||
- regex: prefix with `re:` or `regex:`
|
||||
- Precedence: excludes win over includes.
|
||||
* Using remote mode and sudo requires password?
|
||||
- `--ask-become-pass` (or `-K`) will prompt for the password. If you forget, and remote requires password for sudo, it'll still fall back to prompting for a password, but will be a bit slower to do so.
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -105,6 +108,17 @@ Generate Ansible output from an existing harvest bundle.
|
|||
**Common flags**
|
||||
- `--fqdn <host>`: enables **multi-site** output style
|
||||
|
||||
**Role tags**
|
||||
Generated playbooks tag each role so you can target just the parts you need:
|
||||
|
||||
- Tag format: `role_<role_name>` (e.g. `role_services`, `role_users`)
|
||||
- Fallback/safe tag: `role_other`
|
||||
|
||||
Example:
|
||||
```bash
|
||||
ansible-playbook -i "localhost," -c local /tmp/enroll-ansible/playbook.yml --tags role_services,role_users
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### `enroll single-shot`
|
||||
|
|
@ -128,6 +142,26 @@ Compare two harvest bundles and report what changed.
|
|||
**Inputs**
|
||||
- `--old <harvest>` and `--new <harvest>` (directories or `state.json` paths)
|
||||
- `--sops` when comparing SOPS-encrypted harvest bundles
|
||||
- `--exclude-path <PATTERN>` (repeatable) to ignore file/dir drift under matching paths (same pattern syntax as harvest)
|
||||
- `--ignore-package-versions` to ignore package version-only drift (upgrades/downgrades)
|
||||
- `--enforce` to apply the **old** harvest state locally (requires `ansible-playbook` on `PATH`)
|
||||
|
||||
**Noise suppression**
|
||||
- `--exclude-path` is useful for things that change often but you still want in the harvest baseline (e.g. `/var/anacron`).
|
||||
- `--ignore-package-versions` keeps routine upgrades from alerting; package add/remove drift is still reported.
|
||||
|
||||
**Enforcement (`--enforce`)**
|
||||
If a diff exists and `ansible-playbook` is available, Enroll will:
|
||||
1) generate a manifest from the **old** harvest into a temporary directory
|
||||
2) run `ansible-playbook -i localhost, -c local <tmp>/playbook.yml` (often with `--tags role_<...>` to limit runtime)
|
||||
3) record in the diff report that the old harvest was enforced
|
||||
|
||||
Enforcement is intentionally “safe”:
|
||||
- reinstalls packages that were removed (`state: present`), but does **not** attempt downgrades/pinning
|
||||
- restores users, files (contents + permissions/ownership), and service enable/start state
|
||||
|
||||
If `ansible-playbook` is not on `PATH`, Enroll returns an error and does not enforce.
|
||||
|
||||
|
||||
**Output formats**
|
||||
- `--format json` (default for webhooks)
|
||||
|
|
@ -143,6 +177,72 @@ Compare two harvest bundles and report what changed.
|
|||
|
||||
---
|
||||
|
||||
### `enroll explain`
|
||||
Analyze a harvest and provide user-friendly explanations for what's in it and why.
|
||||
|
||||
This may also explain why something *wasn't* included (e.g a binary file, a file that was too large, unreadable due to permissions, or looked like a log file/secret.
|
||||
|
||||
Provide either the path to the harvest or the path to its state.json. It can also handle SOPS-encrypted harvests.
|
||||
|
||||
Output can be provided in plaintext or json.
|
||||
|
||||
---
|
||||
|
||||
### `enroll validate`
|
||||
|
||||
Validates a harvest by checking:
|
||||
|
||||
* state.json exists and is valid JSON
|
||||
* state.json validates against a JSON Schema (by default the vendored one)
|
||||
* Every `managed_file` entry has a corresponding artifact at: `artifacts/<role_name>/<src_rel>`
|
||||
* That there are no **unreferenced files** sitting in `artifacts/` that aren't in the state.
|
||||
|
||||
#### Schema location + overrides
|
||||
|
||||
The master schema lives at: `enroll/schema/state.schema.json`.
|
||||
|
||||
You can override with a local file or URL:
|
||||
|
||||
```
|
||||
enroll validate /path/to/harvest --schema ./state.schema.json
|
||||
enroll validate /path/to/harvest --schema https://enroll.sh/schema/state.schema.json
|
||||
```
|
||||
|
||||
Or skip schema checks (still does artifact consistency checks):
|
||||
|
||||
```
|
||||
enroll validate /path/to/harvest --no-schema
|
||||
```
|
||||
|
||||
#### CLI usage examples
|
||||
|
||||
Validate a local harvest:
|
||||
|
||||
```
|
||||
enroll validate ./harvest
|
||||
```
|
||||
|
||||
Validate a harvest tarball or a sops bundle:
|
||||
|
||||
```
|
||||
enroll validate ./harvest.tar.gz
|
||||
enroll validate ./harvest.sops --sops
|
||||
```
|
||||
|
||||
JSON output + write to file:
|
||||
|
||||
```
|
||||
enroll validate ./harvest --format json --out validate.json
|
||||
```
|
||||
|
||||
Return exit code 1 for any warnings, not just errors (useful for CI):
|
||||
|
||||
```
|
||||
enroll validate ./harvest --fail-on-warnings
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Sensitive data
|
||||
|
||||
By default, `enroll` does **not** assume how you handle secrets in Ansible. It will attempt to avoid harvesting likely sensitive data (private keys, passwords, tokens, etc.). This can mean it skips some config files you may ultimately want to manage.
|
||||
|
|
@ -255,6 +355,14 @@ enroll harvest --out /tmp/enroll-harvest
|
|||
enroll harvest --remote-host myhost.example.com --remote-user myuser --out /tmp/enroll-harvest
|
||||
```
|
||||
|
||||
### Remote harvest over SSH, where the SSH configuration is in ~/.ssh/config (e.g a different SSH key)
|
||||
|
||||
Note: you must still pass `--remote-host`, but in this case, its value can be the 'Host' alias of an entry in your `~/.ssh/config`.
|
||||
|
||||
```bash
|
||||
enroll harvest --remote-host myhostalias --remote-ssh-config ~/.ssh/config --out /tmp/enroll-harvest
|
||||
```
|
||||
|
||||
### Include paths (`--include-path`)
|
||||
```bash
|
||||
# Add a few dotfiles from /home (still secret-safe unless --dangerous)
|
||||
|
|
@ -330,7 +438,7 @@ enroll single-shot --remote-host myhost.example.com --remote-user myuser --har
|
|||
|
||||
## Diff
|
||||
|
||||
### Compare two harvest directories
|
||||
### Compare two harvest directories, output in json
|
||||
```bash
|
||||
enroll diff --old /path/to/harvestA --new /path/to/harvestB --format json
|
||||
```
|
||||
|
|
@ -342,6 +450,82 @@ enroll diff --old /path/to/golden/harvest --new /path/to/new/harvest --web
|
|||
|
||||
`diff` mode also supports email sending and text or markdown format, as well as `--exit-code` mode to trigger a return code of 2 (useful for crons or CI)
|
||||
|
||||
### Ignore a specific directory or file from the diff
|
||||
```bash
|
||||
enroll diff --old /path/to/harvestA --new /path/to/harvestB --exclude-path /var/anacron
|
||||
```
|
||||
|
||||
### Ignore package version drift (routine upgrades) but still alert on add/remove
|
||||
```bash
|
||||
enroll diff --old /path/to/harvestA --new /path/to/harvestB --ignore-package-versions
|
||||
```
|
||||
|
||||
### Enforce the old harvest state when drift is detected (requires Ansible)
|
||||
```bash
|
||||
enroll diff --old /path/to/harvestA --new /path/to/harvestB --enforce --ignore-package-versions --exclude-path /var/anacron
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Explain
|
||||
|
||||
### Explain a harvest
|
||||
|
||||
All of these do the same thing:
|
||||
|
||||
```bash
|
||||
enroll explain /path/to/state.json
|
||||
enroll explain /path/to/bundle_dir
|
||||
enroll explain /path/to/harvest.tar.gz
|
||||
```
|
||||
|
||||
### Explain a SOPS-encrypted harvest
|
||||
|
||||
```bash
|
||||
enroll explain /path/to/harvest.tar.gz.sops --sops
|
||||
```
|
||||
|
||||
### Explain with JSON output and more examples
|
||||
|
||||
```bash
|
||||
enroll explain /path/to/state.json --format json --max-examples 25
|
||||
```
|
||||
|
||||
### Example output
|
||||
|
||||
```
|
||||
❯ enroll explain /tmp/syrah.harvest
|
||||
Enroll explain: /tmp/syrah.harvest
|
||||
Host: syrah.mig5.net (os: debian, pkg: dpkg)
|
||||
Enroll: 0.2.3
|
||||
|
||||
Inventory
|
||||
- Packages: 254
|
||||
- Why packages were included (observed_via):
|
||||
- user_installed: 248 – Package appears explicitly installed (as opposed to only pulled in as a dependency).
|
||||
- package_role: 232 – Package was referenced by an enroll packages snapshot/role. (e.g. acl, acpid, adduser)
|
||||
- systemd_unit: 22 – Package is associated with a systemd unit that was harvested. (e.g. postfix.service, tor.service, apparmor.service)
|
||||
|
||||
Roles collected
|
||||
- users: 1 user(s), 1 file(s), 0 excluded
|
||||
- services: 19 unit(s), 111 file(s), 6 excluded
|
||||
- packages: 232 package snapshot(s), 41 file(s), 0 excluded
|
||||
- apt_config: 26 file(s), 7 dir(s), 10 excluded
|
||||
- dnf_config: 0 file(s), 0 dir(s), 0 excluded
|
||||
- etc_custom: 70 file(s), 20 dir(s), 0 excluded
|
||||
- usr_local_custom: 35 file(s), 1 dir(s), 0 excluded
|
||||
- extra_paths: 0 file(s), 0 dir(s), 0 excluded
|
||||
|
||||
Why files were included (managed_files.reason)
|
||||
- custom_unowned (179): A file not owned by any package (often custom/operator-managed).. Examples: /etc/apparmor.d/local/lsb_release, /etc/apparmor.d/local/nvidia_modprobe, /etc/apparmor.d/local/sbin.dhclient
|
||||
- usr_local_bin_script (35): Executable scripts under /usr/local/bin (often operator-installed).. Examples: /usr/local/bin/check_firewall, /usr/local/bin/awslogs
|
||||
- apt_keyring (13): Repository signing key material used by APT.. Examples: /etc/apt/keyrings/openvpn-repo-public.asc, /etc/apt/trusted.gpg, /etc/apt/trusted.gpg.d/deb.torproject.org-keyring.gpg
|
||||
- modified_conffile (10): A package-managed conffile differs from the packaged/default version.. Examples: /etc/dnsmasq.conf, /etc/ssh/moduli, /etc/tor/torrc
|
||||
- logrotate_snippet (9): logrotate snippets/configs referenced in system configuration.. Examples: /etc/logrotate.d/rsyslog, /etc/logrotate.d/tor, /etc/logrotate.d/apt
|
||||
- apt_config (7): APT configuration affecting package installation and repository behavior.. Examples: /etc/apt/apt.conf.d/01autoremove, /etc/apt/apt.conf.d/20listchanges, /etc/apt/apt.conf.d/70debconf
|
||||
[...]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Run Ansible
|
||||
|
|
@ -356,6 +540,12 @@ ansible-playbook -i "localhost," -c local /tmp/enroll-ansible/playbook.yml
|
|||
ansible-playbook /tmp/enroll-ansible/playbooks/"$(hostname -f)".yml
|
||||
```
|
||||
|
||||
### Run only specific roles (tags)
|
||||
Generated playbooks tag each role as `role_<name>` (e.g. `role_users`, `role_services`), so you can speed up targeted runs:
|
||||
```bash
|
||||
ansible-playbook -i "localhost," -c local /tmp/enroll-ansible/playbook.yml --tags role_users
|
||||
```
|
||||
|
||||
## Configuration file
|
||||
|
||||
As can be seen above, there are a lot of powerful 'permutations' available to all four subcommands.
|
||||
|
|
@ -405,6 +595,12 @@ exclude_path = /usr/local/bin/docker-*, /usr/local/bin/some-tool
|
|||
no_jinjaturtle = true
|
||||
sops = 00AE817C24A10C2540461A9C1D7CDE0234DB458D
|
||||
|
||||
[diff]
|
||||
# ignore noisy drift
|
||||
exclude_path = /var/anacron
|
||||
ignore_package_versions = true
|
||||
# enforce = true # requires ansible-playbook on PATH
|
||||
|
||||
[single-shot]
|
||||
# if you use single-shot, put its defaults here.
|
||||
# It does not inherit those of the subsections above, so you
|
||||
|
|
|
|||
39
debian/changelog
vendored
39
debian/changelog
vendored
|
|
@ -1,3 +1,42 @@
|
|||
enroll (0.4.3) unstable; urgency=medium
|
||||
|
||||
* Add support for AddressFamily and ConnectTimeout in the .ssh/config when using `--remote-ssh-config`.
|
||||
|
||||
-- Miguel Jacq <mig@mig5.net> Fri, 16 Jan 2026 11:00 +1100
|
||||
|
||||
enroll (0.4.2) unstable; urgency=medium
|
||||
|
||||
* Support `--remote-ssh-config [path-to-ssh-config]` as an argument in case extra params are required beyond `--remote-port` or `--remote-user`. Note: `--remote-host` must still be set, but it can be an 'alias' represented by the 'Host' value in the ssh config.
|
||||
|
||||
-- Miguel Jacq <mig@mig5.net> Tue, 13 Jan 2026 21:55:00 +1100
|
||||
|
||||
enroll (0.4.1) unstable; urgency=medium
|
||||
* Add interactive output when 'enroll diff --enforce' is invoking Ansible.
|
||||
|
||||
-- Miguel Jacq <mig@mig5.net> Sun, 11 Jan 2026 10:00:00 +1100
|
||||
|
||||
enroll (0.4.0) unstable; urgency=medium
|
||||
* Introduce `enroll validate` - a tool to validate a harvest against the state schema, or check for missing or orphaned obsolete artifacts in a harvest.
|
||||
* Attempt to generate Jinja2 templates of systemd unit files and Postfix main.cf (now that JinjaTurtle supports it)
|
||||
* Update pynacl dependency to resolve CVE-2025-69277
|
||||
* Add `--exclude-path` to `enroll diff` command, so that you can ignore certain churn from the diff (stuff you still wanted to harvest as a baseline but don't care if it changes day to day)
|
||||
* Add `--ignore-package-versions` to `enroll diff` command, to optionally ignore package upgrades (e.g due to patching) from the diff.
|
||||
* Add tags to the playbook for each role, to allow easier targeting of specific roles during play later.
|
||||
* Add `--enforce` mode to `enroll diff`. If there is diff detected between the two harvests, and it can enforce restoring the state from the older harvest, it will manifest the state and apply it with ansible.
|
||||
Only the specific roles that had diffed will be applied (via the new tags capability)
|
||||
|
||||
-- Miguel Jacq <mig@mig5.net> Sat, 10 Jan 2026 10:30:00 +1100
|
||||
|
||||
enroll (0.3.0) unstable; urgency=medium
|
||||
|
||||
* Introduce `enroll explain` - a tool to analyze and explain what's in (or not in) a harvest and why.
|
||||
* Centralise the cron and logrotate stuff into their respective roles, we had a bit of duplication between roles based on harvest discovery.
|
||||
* Capture other files in the user's home directory such as `.bashrc`, `.bash_aliases`, `.profile`, if these files differ from the `/etc/skel` defaults
|
||||
* Ignore files that end with a tilde or - (probably backup files generated by editors or shadow file changes)
|
||||
* Manage certain symlinks e.g for apache2/nginx sites-enabled and so on
|
||||
|
||||
-- Miguel Jacq <mig@mig5.net> Mon, 05 Jan 2026 17:00:00 +1100
|
||||
|
||||
enroll (0.2.3) unstable; urgency=medium
|
||||
|
||||
* Introduce --ask-become-pass or -K to support password-required sudo on remote hosts, just like Ansible. It will also fall back to this prompt if a password is required but the arg wasn't passed in.
|
||||
|
|
|
|||
5
debian/control
vendored
5
debian/control
vendored
|
|
@ -10,12 +10,13 @@ Build-Depends:
|
|||
python3-all,
|
||||
python3-yaml,
|
||||
python3-poetry-core,
|
||||
python3-paramiko
|
||||
python3-paramiko,
|
||||
python3-jsonschema
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://git.mig5.net/mig5/enroll
|
||||
|
||||
Package: enroll
|
||||
Architecture: all
|
||||
Depends: ${misc:Depends}, ${python3:Depends}, python3-yaml, python3-paramiko
|
||||
Depends: ${misc:Depends}, ${python3:Depends}, python3-yaml, python3-paramiko, python3-jsonschema
|
||||
Description: Harvest a host into Ansible roles
|
||||
A tool that inspects a system and emits Ansible roles/playbooks to reproduce it.
|
||||
|
|
|
|||
283
enroll/cli.py
283
enroll/cli.py
|
|
@ -2,6 +2,7 @@ from __future__ import annotations
|
|||
|
||||
import argparse
|
||||
import configparser
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tarfile
|
||||
|
|
@ -10,11 +11,20 @@ from pathlib import Path
|
|||
from typing import Optional
|
||||
|
||||
from .cache import new_harvest_cache_dir
|
||||
from .diff import compare_harvests, format_report, post_webhook, send_email
|
||||
from .diff import (
|
||||
compare_harvests,
|
||||
enforce_old_harvest,
|
||||
format_report,
|
||||
has_enforceable_drift,
|
||||
post_webhook,
|
||||
send_email,
|
||||
)
|
||||
from .explain import explain_state
|
||||
from .harvest import harvest
|
||||
from .manifest import manifest
|
||||
from .remote import remote_harvest, RemoteSudoPasswordRequired
|
||||
from .sopsutil import SopsError, encrypt_file_binary
|
||||
from .validate import validate_harvest
|
||||
from .version import get_enroll_version
|
||||
|
||||
|
||||
|
|
@ -340,16 +350,33 @@ def _add_remote_args(p: argparse.ArgumentParser) -> None:
|
|||
"--remote-host",
|
||||
help="SSH host to run harvesting on (if set, harvest runs remotely and is pulled locally).",
|
||||
)
|
||||
p.add_argument(
|
||||
"--remote-ssh-config",
|
||||
nargs="?",
|
||||
const=str(Path.home() / ".ssh" / "config"),
|
||||
default=None,
|
||||
help=(
|
||||
"Use OpenSSH-style ssh_config settings for --remote-host. "
|
||||
"If provided without a value, defaults to ~/.ssh/config. "
|
||||
"(Applies HostName/User/Port/IdentityFile/ProxyCommand/HostKeyAlias when supported.)"
|
||||
),
|
||||
)
|
||||
p.add_argument(
|
||||
"--remote-port",
|
||||
type=int,
|
||||
default=22,
|
||||
help="SSH port for --remote-host (default: 22).",
|
||||
default=None,
|
||||
help=(
|
||||
"SSH port for --remote-host. If omitted, defaults to 22, or a value from ssh_config when "
|
||||
"--remote-ssh-config is set."
|
||||
),
|
||||
)
|
||||
p.add_argument(
|
||||
"--remote-user",
|
||||
default=os.environ.get("USER") or None,
|
||||
help="SSH username for --remote-host (default: local $USER).",
|
||||
default=None,
|
||||
help=(
|
||||
"SSH username for --remote-host. If omitted, defaults to local $USER, or a value from ssh_config when "
|
||||
"--remote-ssh-config is set."
|
||||
),
|
||||
)
|
||||
|
||||
# Align terminology with Ansible: "become" == sudo.
|
||||
|
|
@ -547,6 +574,33 @@ def main() -> None:
|
|||
default="text",
|
||||
help="Report output format (default: text).",
|
||||
)
|
||||
d.add_argument(
|
||||
"--exclude-path",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="PATTERN",
|
||||
help=(
|
||||
"Exclude file paths from the diff report (repeatable). Supports globs (including '**') and regex via 're:<regex>'. "
|
||||
"This affects file drift reporting only (added/removed/changed files), not package/service/user diffs."
|
||||
),
|
||||
)
|
||||
d.add_argument(
|
||||
"--ignore-package-versions",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Ignore package version changes in the diff report and exit status. "
|
||||
"Package additions/removals are still reported. Useful when routine upgrades would otherwise create noisy drift."
|
||||
),
|
||||
)
|
||||
d.add_argument(
|
||||
"--enforce",
|
||||
action="store_true",
|
||||
help=(
|
||||
"If differences are detected, attempt to enforce the old harvest state locally by generating a manifest and "
|
||||
"running ansible-playbook. Requires ansible-playbook on PATH. "
|
||||
"Enroll does not attempt to downgrade packages; if the only drift is package version upgrades (or newly installed packages), enforcement is skipped."
|
||||
),
|
||||
)
|
||||
d.add_argument(
|
||||
"--out",
|
||||
help="Write the report to this file instead of stdout.",
|
||||
|
|
@ -605,6 +659,75 @@ def main() -> None:
|
|||
help="Environment variable containing SMTP password (optional).",
|
||||
)
|
||||
|
||||
e = sub.add_parser("explain", help="Explain a harvest state.json")
|
||||
_add_config_args(e)
|
||||
e.add_argument(
|
||||
"harvest",
|
||||
help=(
|
||||
"Harvest input (directory, a path to state.json, a tarball, or a SOPS-encrypted bundle)."
|
||||
),
|
||||
)
|
||||
e.add_argument(
|
||||
"--sops",
|
||||
action="store_true",
|
||||
help="Treat the input as a SOPS-encrypted bundle (auto-detected if the filename ends with .sops).",
|
||||
)
|
||||
e.add_argument(
|
||||
"--format",
|
||||
choices=["text", "json"],
|
||||
default="text",
|
||||
help="Output format.",
|
||||
)
|
||||
e.add_argument(
|
||||
"--max-examples",
|
||||
type=int,
|
||||
default=3,
|
||||
help="How many example paths/refs to show per reason.",
|
||||
)
|
||||
|
||||
v = sub.add_parser(
|
||||
"validate", help="Validate a harvest bundle (state.json + artifacts)"
|
||||
)
|
||||
_add_config_args(v)
|
||||
v.add_argument(
|
||||
"harvest",
|
||||
help=(
|
||||
"Harvest input (directory, a path to state.json, a tarball, or a SOPS-encrypted bundle)."
|
||||
),
|
||||
)
|
||||
v.add_argument(
|
||||
"--sops",
|
||||
action="store_true",
|
||||
help="Treat the input as a SOPS-encrypted bundle (auto-detected if the filename ends with .sops).",
|
||||
)
|
||||
v.add_argument(
|
||||
"--schema",
|
||||
help=(
|
||||
"Optional JSON schema source (file path or https:// URL). "
|
||||
"If omitted, uses the schema vendored in the enroll codebase."
|
||||
),
|
||||
)
|
||||
v.add_argument(
|
||||
"--no-schema",
|
||||
action="store_true",
|
||||
help="Skip JSON schema validation and only perform bundle consistency checks.",
|
||||
)
|
||||
v.add_argument(
|
||||
"--fail-on-warnings",
|
||||
action="store_true",
|
||||
help="Exit non-zero if validation produces warnings.",
|
||||
)
|
||||
v.add_argument(
|
||||
"--format",
|
||||
choices=["text", "json"],
|
||||
default="text",
|
||||
help="Output format.",
|
||||
)
|
||||
v.add_argument(
|
||||
"--out",
|
||||
help="Write the report to this file instead of stdout.",
|
||||
)
|
||||
|
||||
argv = sys.argv[1:]
|
||||
cfg_path = _discover_config_path(argv)
|
||||
argv = _inject_config_argv(
|
||||
|
|
@ -616,10 +739,23 @@ def main() -> None:
|
|||
"manifest": m,
|
||||
"single-shot": s,
|
||||
"diff": d,
|
||||
"explain": e,
|
||||
"validate": v,
|
||||
},
|
||||
)
|
||||
args = ap.parse_args(argv)
|
||||
|
||||
# Preserve historical defaults for remote harvesting unless ssh_config lookup is enabled.
|
||||
# This lets ssh_config values take effect when the user did not explicitly set
|
||||
# --remote-user / --remote-port.
|
||||
if hasattr(args, "remote_host"):
|
||||
rsc = getattr(args, "remote_ssh_config", None)
|
||||
if not rsc:
|
||||
if getattr(args, "remote_port", None) is None:
|
||||
setattr(args, "remote_port", 22)
|
||||
if getattr(args, "remote_user", None) is None:
|
||||
setattr(args, "remote_user", os.environ.get("USER") or None)
|
||||
|
||||
try:
|
||||
if args.cmd == "harvest":
|
||||
sops_fps = getattr(args, "sops", None)
|
||||
|
|
@ -637,8 +773,9 @@ def main() -> None:
|
|||
ask_become_pass=args.ask_become_pass,
|
||||
local_out_dir=tmp_bundle,
|
||||
remote_host=args.remote_host,
|
||||
remote_port=int(args.remote_port),
|
||||
remote_port=args.remote_port,
|
||||
remote_user=args.remote_user,
|
||||
remote_ssh_config=args.remote_ssh_config,
|
||||
dangerous=bool(args.dangerous),
|
||||
no_sudo=bool(args.no_sudo),
|
||||
include_paths=list(getattr(args, "include_path", []) or []),
|
||||
|
|
@ -658,8 +795,9 @@ def main() -> None:
|
|||
ask_become_pass=args.ask_become_pass,
|
||||
local_out_dir=out_dir,
|
||||
remote_host=args.remote_host,
|
||||
remote_port=int(args.remote_port),
|
||||
remote_port=args.remote_port,
|
||||
remote_user=args.remote_user,
|
||||
remote_ssh_config=args.remote_ssh_config,
|
||||
dangerous=bool(args.dangerous),
|
||||
no_sudo=bool(args.no_sudo),
|
||||
include_paths=list(getattr(args, "include_path", []) or []),
|
||||
|
|
@ -702,6 +840,42 @@ def main() -> None:
|
|||
exclude_paths=list(getattr(args, "exclude_path", []) or []),
|
||||
)
|
||||
print(path)
|
||||
elif args.cmd == "explain":
|
||||
out = explain_state(
|
||||
args.harvest,
|
||||
sops_mode=bool(getattr(args, "sops", False)),
|
||||
fmt=str(getattr(args, "format", "text")),
|
||||
max_examples=int(getattr(args, "max_examples", 3)),
|
||||
)
|
||||
sys.stdout.write(out)
|
||||
|
||||
elif args.cmd == "validate":
|
||||
res = validate_harvest(
|
||||
args.harvest,
|
||||
sops_mode=bool(getattr(args, "sops", False)),
|
||||
schema=getattr(args, "schema", None),
|
||||
no_schema=bool(getattr(args, "no_schema", False)),
|
||||
)
|
||||
|
||||
fmt = str(getattr(args, "format", "text"))
|
||||
if fmt == "json":
|
||||
txt = json.dumps(res.to_dict(), indent=2, sort_keys=True) + "\n"
|
||||
else:
|
||||
txt = res.to_text()
|
||||
|
||||
out_path = getattr(args, "out", None)
|
||||
if out_path:
|
||||
p = Path(out_path).expanduser()
|
||||
p.parent.mkdir(parents=True, exist_ok=True)
|
||||
p.write_text(txt, encoding="utf-8")
|
||||
else:
|
||||
sys.stdout.write(txt)
|
||||
|
||||
if res.errors:
|
||||
raise SystemExit(1)
|
||||
if res.warnings and bool(getattr(args, "fail_on_warnings", False)):
|
||||
raise SystemExit(1)
|
||||
|
||||
elif args.cmd == "manifest":
|
||||
out_enc = manifest(
|
||||
args.harvest,
|
||||
|
|
@ -717,8 +891,47 @@ def main() -> None:
|
|||
args.old,
|
||||
args.new,
|
||||
sops_mode=bool(getattr(args, "sops", False)),
|
||||
exclude_paths=list(getattr(args, "exclude_path", []) or []),
|
||||
ignore_package_versions=bool(
|
||||
getattr(args, "ignore_package_versions", False)
|
||||
),
|
||||
)
|
||||
|
||||
# Optional enforcement: if drift is detected, attempt to restore the
|
||||
# system to the *old* (baseline) state using ansible-playbook.
|
||||
if bool(getattr(args, "enforce", False)):
|
||||
if has_changes:
|
||||
if not has_enforceable_drift(report):
|
||||
report["enforcement"] = {
|
||||
"requested": True,
|
||||
"status": "skipped",
|
||||
"reason": (
|
||||
"no enforceable drift detected (only additions and/or package version changes); "
|
||||
"enroll does not attempt to downgrade packages"
|
||||
),
|
||||
}
|
||||
else:
|
||||
try:
|
||||
info = enforce_old_harvest(
|
||||
args.old,
|
||||
sops_mode=bool(getattr(args, "sops", False)),
|
||||
report=report,
|
||||
)
|
||||
except Exception as e:
|
||||
raise SystemExit(
|
||||
f"error: could not enforce old harvest state: {e}"
|
||||
) from e
|
||||
report["enforcement"] = {
|
||||
"requested": True,
|
||||
**(info or {}),
|
||||
}
|
||||
else:
|
||||
report["enforcement"] = {
|
||||
"requested": True,
|
||||
"status": "skipped",
|
||||
"reason": "no differences detected",
|
||||
}
|
||||
|
||||
txt = format_report(report, fmt=str(getattr(args, "format", "text")))
|
||||
out_path = getattr(args, "out", None)
|
||||
if out_path:
|
||||
|
|
@ -785,8 +998,9 @@ def main() -> None:
|
|||
ask_become_pass=args.ask_become_pass,
|
||||
local_out_dir=tmp_bundle,
|
||||
remote_host=args.remote_host,
|
||||
remote_port=int(args.remote_port),
|
||||
remote_port=args.remote_port,
|
||||
remote_user=args.remote_user,
|
||||
remote_ssh_config=args.remote_ssh_config,
|
||||
dangerous=bool(args.dangerous),
|
||||
no_sudo=bool(args.no_sudo),
|
||||
include_paths=list(getattr(args, "include_path", []) or []),
|
||||
|
|
@ -815,8 +1029,9 @@ def main() -> None:
|
|||
ask_become_pass=args.ask_become_pass,
|
||||
local_out_dir=harvest_dir,
|
||||
remote_host=args.remote_host,
|
||||
remote_port=int(args.remote_port),
|
||||
remote_port=args.remote_port,
|
||||
remote_user=args.remote_user,
|
||||
remote_ssh_config=args.remote_ssh_config,
|
||||
dangerous=bool(args.dangerous),
|
||||
no_sudo=bool(args.no_sudo),
|
||||
include_paths=list(getattr(args, "include_path", []) or []),
|
||||
|
|
@ -877,56 +1092,6 @@ def main() -> None:
|
|||
fqdn=args.fqdn,
|
||||
jinjaturtle=_jt_mode(args),
|
||||
)
|
||||
elif args.cmd == "diff":
|
||||
report, has_changes = compare_harvests(
|
||||
args.old, args.new, sops_mode=bool(getattr(args, "sops", False))
|
||||
)
|
||||
|
||||
rendered = format_report(report, fmt=str(args.format))
|
||||
if args.out:
|
||||
Path(args.out).expanduser().write_text(rendered, encoding="utf-8")
|
||||
else:
|
||||
print(rendered, end="")
|
||||
|
||||
do_notify = bool(has_changes or getattr(args, "notify_always", False))
|
||||
|
||||
if do_notify and getattr(args, "webhook", None):
|
||||
wf = str(getattr(args, "webhook_format", "json"))
|
||||
body = format_report(report, fmt=wf).encode("utf-8")
|
||||
headers = {"User-Agent": "enroll"}
|
||||
if wf == "json":
|
||||
headers["Content-Type"] = "application/json"
|
||||
else:
|
||||
headers["Content-Type"] = "text/plain; charset=utf-8"
|
||||
for hv in getattr(args, "webhook_header", []) or []:
|
||||
if ":" not in hv:
|
||||
raise SystemExit(
|
||||
"error: --webhook-header must be in the form 'K:V'"
|
||||
)
|
||||
k, v = hv.split(":", 1)
|
||||
headers[k.strip()] = v.strip()
|
||||
status, _ = post_webhook(str(args.webhook), body, headers=headers)
|
||||
if status and status >= 400:
|
||||
raise SystemExit(f"error: webhook returned HTTP {status}")
|
||||
|
||||
if do_notify and (getattr(args, "email_to", []) or []):
|
||||
subject = getattr(args, "email_subject", None) or "enroll diff report"
|
||||
smtp_password = None
|
||||
pw_env = getattr(args, "smtp_password_env", None)
|
||||
if pw_env:
|
||||
smtp_password = os.environ.get(str(pw_env))
|
||||
send_email(
|
||||
to_addrs=list(getattr(args, "email_to", []) or []),
|
||||
subject=str(subject),
|
||||
body=rendered,
|
||||
from_addr=getattr(args, "email_from", None),
|
||||
smtp=getattr(args, "smtp", None),
|
||||
smtp_user=getattr(args, "smtp_user", None),
|
||||
smtp_password=smtp_password,
|
||||
)
|
||||
|
||||
if getattr(args, "exit_code", False) and has_changes:
|
||||
raise SystemExit(2)
|
||||
except RemoteSudoPasswordRequired:
|
||||
raise SystemExit(
|
||||
"error: remote sudo requires a password. Re-run with --ask-become-pass."
|
||||
|
|
|
|||
506
enroll/diff.py
506
enroll/diff.py
|
|
@ -3,10 +3,15 @@ from __future__ import annotations
|
|||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess # nosec
|
||||
import tarfile
|
||||
import tempfile
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import itertools
|
||||
import urllib.request
|
||||
from contextlib import ExitStack
|
||||
from dataclasses import dataclass
|
||||
|
|
@ -16,9 +21,73 @@ from pathlib import Path
|
|||
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
||||
|
||||
from .remote import _safe_extract_tar
|
||||
from .pathfilter import PathFilter
|
||||
from .sopsutil import decrypt_file_binary_to, require_sops_cmd
|
||||
|
||||
|
||||
def _progress_enabled() -> bool:
|
||||
"""Return True if we should display interactive progress UI on the CLI.
|
||||
|
||||
We only emit progress when stderr is a TTY, so it won't pollute JSON/text reports
|
||||
captured by systemd, CI, webhooks, etc. Users can also disable this explicitly via
|
||||
ENROLL_NO_PROGRESS=1.
|
||||
"""
|
||||
if os.environ.get("ENROLL_NO_PROGRESS", "").strip() in {"1", "true", "yes"}:
|
||||
return False
|
||||
try:
|
||||
return sys.stderr.isatty()
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
class _Spinner:
|
||||
"""A tiny terminal spinner with an elapsed-time counter (stderr-only)."""
|
||||
|
||||
def __init__(self, message: str, *, interval: float = 0.12) -> None:
|
||||
self.message = message.rstrip()
|
||||
self.interval = interval
|
||||
self._stop = threading.Event()
|
||||
self._thread: Optional[threading.Thread] = None
|
||||
self._last_len = 0
|
||||
self._start = 0.0
|
||||
|
||||
def start(self) -> None:
|
||||
if self._thread is not None:
|
||||
return
|
||||
self._start = time.monotonic()
|
||||
self._thread = threading.Thread(
|
||||
target=self._run, name="enroll-spinner", daemon=True
|
||||
)
|
||||
self._thread.start()
|
||||
|
||||
def stop(self, final_line: Optional[str] = None) -> None:
|
||||
self._stop.set()
|
||||
if self._thread is not None:
|
||||
self._thread.join(timeout=1.0)
|
||||
|
||||
# Clear spinner line.
|
||||
try:
|
||||
sys.stderr.write("\r" + (" " * max(self._last_len, 0)) + "\r")
|
||||
if final_line:
|
||||
sys.stderr.write(final_line.rstrip() + "\n")
|
||||
sys.stderr.flush()
|
||||
except Exception:
|
||||
pass # nosec
|
||||
|
||||
def _run(self) -> None:
|
||||
frames = itertools.cycle("|/-\\")
|
||||
while not self._stop.is_set():
|
||||
elapsed = time.monotonic() - self._start
|
||||
line = f"{self.message} {next(frames)} {elapsed:0.1f}s"
|
||||
try:
|
||||
sys.stderr.write("\r" + line)
|
||||
sys.stderr.flush()
|
||||
self._last_len = max(self._last_len, len(line))
|
||||
except Exception:
|
||||
return
|
||||
self._stop.wait(self.interval)
|
||||
|
||||
|
||||
def _utc_now_iso() -> str:
|
||||
return datetime.now(tz=timezone.utc).isoformat()
|
||||
|
||||
|
|
@ -289,6 +358,8 @@ def compare_harvests(
|
|||
new_path: str,
|
||||
*,
|
||||
sops_mode: bool = False,
|
||||
exclude_paths: Optional[List[str]] = None,
|
||||
ignore_package_versions: bool = False,
|
||||
) -> Tuple[Dict[str, Any], bool]:
|
||||
"""Compare two harvests.
|
||||
|
||||
|
|
@ -315,10 +386,14 @@ def compare_harvests(
|
|||
pkgs_removed = sorted(old_pkgs - new_pkgs)
|
||||
|
||||
pkgs_version_changed: List[Dict[str, Any]] = []
|
||||
pkgs_version_changed_ignored_count = 0
|
||||
for pkg in sorted(old_pkgs & new_pkgs):
|
||||
a = old_inv.get(pkg) or {}
|
||||
b = new_inv.get(pkg) or {}
|
||||
if _pkg_version_key(a) != _pkg_version_key(b):
|
||||
if ignore_package_versions:
|
||||
pkgs_version_changed_ignored_count += 1
|
||||
else:
|
||||
pkgs_version_changed.append(
|
||||
{
|
||||
"package": pkg,
|
||||
|
|
@ -387,6 +462,17 @@ def compare_harvests(
|
|||
|
||||
old_files = _file_index(old_b.dir, old_state)
|
||||
new_files = _file_index(new_b.dir, new_state)
|
||||
|
||||
# Optional user-supplied path exclusions (same semantics as harvest --exclude-path),
|
||||
# applied only to file drift reporting.
|
||||
diff_filter = PathFilter(include=(), exclude=exclude_paths or ())
|
||||
if exclude_paths:
|
||||
old_files = {
|
||||
p: r for p, r in old_files.items() if not diff_filter.is_excluded(p)
|
||||
}
|
||||
new_files = {
|
||||
p: r for p, r in new_files.items() if not diff_filter.is_excluded(p)
|
||||
}
|
||||
old_paths_set = set(old_files)
|
||||
new_paths_set = set(new_files)
|
||||
|
||||
|
|
@ -462,6 +548,10 @@ def compare_harvests(
|
|||
|
||||
report: Dict[str, Any] = {
|
||||
"generated_at": _utc_now_iso(),
|
||||
"filters": {
|
||||
"exclude_paths": list(exclude_paths or []),
|
||||
"ignore_package_versions": bool(ignore_package_versions),
|
||||
},
|
||||
"old": {
|
||||
"input": old_path,
|
||||
"bundle_dir": str(old_b.dir),
|
||||
|
|
@ -478,6 +568,9 @@ def compare_harvests(
|
|||
"added": pkgs_added,
|
||||
"removed": pkgs_removed,
|
||||
"version_changed": pkgs_version_changed,
|
||||
"version_changed_ignored_count": int(
|
||||
pkgs_version_changed_ignored_count
|
||||
),
|
||||
},
|
||||
"services": {
|
||||
"enabled_added": units_added,
|
||||
|
|
@ -513,6 +606,302 @@ def compare_harvests(
|
|||
return report, has_changes
|
||||
|
||||
|
||||
def has_enforceable_drift(report: Dict[str, Any]) -> bool:
|
||||
"""Return True if the diff report contains drift that is safe/meaningful to enforce.
|
||||
|
||||
Enforce mode is intended to restore *state* (files/users/services) and to
|
||||
reinstall packages that were removed.
|
||||
|
||||
It is deliberately conservative about package drift:
|
||||
- Package *version* changes alone are not enforced (no downgrades).
|
||||
- Newly installed packages are not removed.
|
||||
|
||||
This helper lets the CLI decide whether `--enforce` should actually run.
|
||||
"""
|
||||
|
||||
pk = report.get("packages", {}) or {}
|
||||
if pk.get("removed"):
|
||||
return True
|
||||
|
||||
sv = report.get("services", {}) or {}
|
||||
# We do not try to disable newly-enabled services; we only restore units
|
||||
# that were enabled in the baseline but are now missing.
|
||||
if sv.get("enabled_removed") or []:
|
||||
return True
|
||||
|
||||
for ch in sv.get("changed", []) or []:
|
||||
changes = ch.get("changes") or {}
|
||||
# Ignore package set drift for enforceability decisions; package
|
||||
# enforcement is handled via reinstalling removed packages, and we
|
||||
# avoid trying to "undo" upgrades/renames.
|
||||
for k in changes.keys():
|
||||
if k != "packages":
|
||||
return True
|
||||
|
||||
us = report.get("users", {}) or {}
|
||||
# We restore baseline users (missing/changed). We do not remove newly-added users.
|
||||
if (us.get("removed") or []) or (us.get("changed") or []):
|
||||
return True
|
||||
|
||||
fl = report.get("files", {}) or {}
|
||||
# We restore baseline files (missing/changed). We do not delete newly-managed files.
|
||||
if (fl.get("removed") or []) or (fl.get("changed") or []):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _role_tag(role: str) -> str:
|
||||
"""Return the Ansible tag name for a role (must match manifest generation)."""
|
||||
r = str(role or "").strip()
|
||||
safe = re.sub(r"[^A-Za-z0-9_-]+", "_", r).strip("_")
|
||||
if not safe:
|
||||
safe = "other"
|
||||
return f"role_{safe}"
|
||||
|
||||
|
||||
def _enforcement_plan(
|
||||
report: Dict[str, Any],
|
||||
old_state: Dict[str, Any],
|
||||
old_bundle_dir: Path,
|
||||
) -> Dict[str, Any]:
|
||||
"""Return a best-effort enforcement plan (roles/tags) for this diff report.
|
||||
|
||||
We only plan for drift that the baseline manifest can safely restore:
|
||||
- packages that were removed (reinstall, no downgrades)
|
||||
- baseline users that were removed/changed
|
||||
- baseline files that were removed/changed
|
||||
- baseline systemd units that were disabled/changed
|
||||
|
||||
We do NOT plan to remove newly-added packages/users/files/services.
|
||||
"""
|
||||
roles: set[str] = set()
|
||||
|
||||
# --- Packages (only removals)
|
||||
pk = report.get("packages", {}) or {}
|
||||
removed_pkgs = set(pk.get("removed") or [])
|
||||
if removed_pkgs:
|
||||
pkg_to_roles: Dict[str, set[str]] = {}
|
||||
|
||||
for svc in _roles(old_state).get("services") or []:
|
||||
r = str(svc.get("role_name") or "").strip()
|
||||
for p in svc.get("packages", []) or []:
|
||||
if p:
|
||||
pkg_to_roles.setdefault(str(p), set()).add(r)
|
||||
|
||||
for pr in _roles(old_state).get("packages") or []:
|
||||
r = str(pr.get("role_name") or "").strip()
|
||||
p = pr.get("package")
|
||||
if p:
|
||||
pkg_to_roles.setdefault(str(p), set()).add(r)
|
||||
|
||||
for p in removed_pkgs:
|
||||
for r in pkg_to_roles.get(str(p), set()):
|
||||
if r:
|
||||
roles.add(r)
|
||||
|
||||
# --- Users (removed/changed)
|
||||
us = report.get("users", {}) or {}
|
||||
if (us.get("removed") or []) or (us.get("changed") or []):
|
||||
u = _roles(old_state).get("users") or {}
|
||||
u_role = str(u.get("role_name") or "users")
|
||||
if u_role:
|
||||
roles.add(u_role)
|
||||
|
||||
# --- Files (removed/changed)
|
||||
fl = report.get("files", {}) or {}
|
||||
file_paths: List[str] = []
|
||||
for e in fl.get("removed", []) or []:
|
||||
if isinstance(e, dict):
|
||||
p = e.get("path")
|
||||
else:
|
||||
p = e
|
||||
if p:
|
||||
file_paths.append(str(p))
|
||||
for e in fl.get("changed", []) or []:
|
||||
if isinstance(e, dict):
|
||||
p = e.get("path")
|
||||
else:
|
||||
p = e
|
||||
if p:
|
||||
file_paths.append(str(p))
|
||||
|
||||
if file_paths:
|
||||
idx = _file_index(old_bundle_dir, old_state)
|
||||
for p in file_paths:
|
||||
rec = idx.get(p)
|
||||
if rec and rec.role:
|
||||
roles.add(str(rec.role))
|
||||
|
||||
# --- Services (enabled_removed + meaningful changes)
|
||||
sv = report.get("services", {}) or {}
|
||||
units: List[str] = []
|
||||
for u in sv.get("enabled_removed", []) or []:
|
||||
if u:
|
||||
units.append(str(u))
|
||||
for ch in sv.get("changed", []) or []:
|
||||
if not isinstance(ch, dict):
|
||||
continue
|
||||
unit = ch.get("unit")
|
||||
changes = ch.get("changes") or {}
|
||||
if unit and any(k != "packages" for k in changes.keys()):
|
||||
units.append(str(unit))
|
||||
|
||||
if units:
|
||||
old_units = _service_units(old_state)
|
||||
for u in units:
|
||||
snap = old_units.get(u)
|
||||
if snap and snap.get("role_name"):
|
||||
roles.add(str(snap.get("role_name")))
|
||||
|
||||
# Drop empty/unknown roles.
|
||||
roles = {r for r in roles if r and str(r).strip() and str(r).strip() != "unknown"}
|
||||
|
||||
tags = sorted({_role_tag(r) for r in roles})
|
||||
return {
|
||||
"roles": sorted(roles),
|
||||
"tags": tags,
|
||||
}
|
||||
|
||||
|
||||
def enforce_old_harvest(
|
||||
old_path: str,
|
||||
*,
|
||||
sops_mode: bool = False,
|
||||
report: Optional[Dict[str, Any]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""Enforce the *old* (baseline) harvest state on the current machine.
|
||||
|
||||
When Ansible is available, this:
|
||||
1) renders a temporary manifest from the old harvest, and
|
||||
2) runs ansible-playbook locally to apply it.
|
||||
|
||||
Returns a dict suitable for attaching to the diff report under
|
||||
report['enforcement'].
|
||||
"""
|
||||
|
||||
ansible_playbook = shutil.which("ansible-playbook")
|
||||
if not ansible_playbook:
|
||||
raise RuntimeError(
|
||||
"ansible-playbook not found on PATH (cannot enforce; install Ansible)"
|
||||
)
|
||||
|
||||
# Import lazily to avoid heavy import cost and potential CLI cycles.
|
||||
from .manifest import manifest
|
||||
|
||||
started_at = _utc_now_iso()
|
||||
|
||||
with ExitStack() as stack:
|
||||
old_b = _bundle_from_input(old_path, sops_mode=sops_mode)
|
||||
if old_b.tempdir:
|
||||
stack.callback(old_b.tempdir.cleanup)
|
||||
|
||||
old_state = _load_state(old_b.dir)
|
||||
|
||||
plan: Optional[Dict[str, Any]] = None
|
||||
tags: Optional[List[str]] = None
|
||||
roles: List[str] = []
|
||||
if report is not None:
|
||||
plan = _enforcement_plan(report, old_state, old_b.dir)
|
||||
roles = list(plan.get("roles") or [])
|
||||
t = list(plan.get("tags") or [])
|
||||
tags = t if t else None
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="enroll-enforce-") as td:
|
||||
td_path = Path(td)
|
||||
try:
|
||||
os.chmod(td_path, 0o700)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# 1) Generate a manifest in a temp directory.
|
||||
manifest(str(old_b.dir), str(td_path))
|
||||
|
||||
playbook = td_path / "playbook.yml"
|
||||
if not playbook.exists():
|
||||
raise RuntimeError(
|
||||
f"manifest did not produce expected playbook.yml at {playbook}"
|
||||
)
|
||||
|
||||
# 2) Apply it locally.
|
||||
env = dict(os.environ)
|
||||
cfg = td_path / "ansible.cfg"
|
||||
if cfg.exists():
|
||||
env["ANSIBLE_CONFIG"] = str(cfg)
|
||||
|
||||
cmd = [
|
||||
ansible_playbook,
|
||||
"-i",
|
||||
"localhost,",
|
||||
"-c",
|
||||
"local",
|
||||
str(playbook),
|
||||
]
|
||||
if tags:
|
||||
cmd.extend(["--tags", ",".join(tags)])
|
||||
|
||||
spinner: Optional[_Spinner] = None
|
||||
p: Optional[subprocess.CompletedProcess[str]] = None
|
||||
t0 = time.monotonic()
|
||||
if _progress_enabled():
|
||||
if tags:
|
||||
sys.stderr.write(
|
||||
f"Enforce: running ansible-playbook (tags: {','.join(tags)})\n",
|
||||
)
|
||||
else:
|
||||
sys.stderr.write("Enforce: running ansible-playbook\n")
|
||||
sys.stderr.flush()
|
||||
spinner = _Spinner(" ansible-playbook")
|
||||
spinner.start()
|
||||
|
||||
try:
|
||||
p = subprocess.run(
|
||||
cmd,
|
||||
cwd=str(td_path),
|
||||
env=env,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
) # nosec
|
||||
finally:
|
||||
if spinner:
|
||||
elapsed = time.monotonic() - t0
|
||||
rc = p.returncode if p is not None else None
|
||||
spinner.stop(
|
||||
final_line=(
|
||||
f"Enforce: ansible-playbook finished in {elapsed:0.1f}s"
|
||||
+ (f" (rc={rc})" if rc is not None else ""),
|
||||
),
|
||||
)
|
||||
|
||||
finished_at = _utc_now_iso()
|
||||
|
||||
info: Dict[str, Any] = {
|
||||
"status": "applied" if p.returncode == 0 else "failed",
|
||||
"started_at": started_at,
|
||||
"finished_at": finished_at,
|
||||
"ansible_playbook": ansible_playbook,
|
||||
"command": cmd,
|
||||
"returncode": int(p.returncode),
|
||||
}
|
||||
|
||||
# Record tag selection (if we could attribute drift to specific roles).
|
||||
info["roles"] = roles
|
||||
info["tags"] = list(tags or [])
|
||||
if not tags:
|
||||
info["scope"] = "full_playbook"
|
||||
|
||||
if p.returncode != 0:
|
||||
err = (p.stderr or p.stdout or "").strip()
|
||||
raise RuntimeError(
|
||||
"ansible-playbook failed"
|
||||
+ (f" (rc={p.returncode})" if p.returncode is not None else "")
|
||||
+ (f": {err}" if err else "")
|
||||
)
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def format_report(report: Dict[str, Any], *, fmt: str = "text") -> str:
|
||||
fmt = (fmt or "text").lower()
|
||||
if fmt == "json":
|
||||
|
|
@ -532,11 +921,60 @@ def _report_text(report: Dict[str, Any]) -> str:
|
|||
f"new: {new.get('input')} (host={new.get('host')}, state_mtime={new.get('state_mtime')})"
|
||||
)
|
||||
|
||||
filt = report.get("filters", {}) or {}
|
||||
ex_paths = filt.get("exclude_paths", []) or []
|
||||
if ex_paths:
|
||||
lines.append(f"file exclude patterns: {', '.join(str(p) for p in ex_paths)}")
|
||||
|
||||
if filt.get("ignore_package_versions"):
|
||||
ignored = int(
|
||||
(report.get("packages", {}) or {}).get("version_changed_ignored_count") or 0
|
||||
)
|
||||
msg = "package version drift: ignored (--ignore-package-versions)"
|
||||
if ignored:
|
||||
msg += f" (ignored {ignored} change{'s' if ignored != 1 else ''})"
|
||||
lines.append(msg)
|
||||
|
||||
enf = report.get("enforcement") or {}
|
||||
if enf:
|
||||
lines.append("\nEnforcement")
|
||||
status = str(enf.get("status") or "").strip().lower()
|
||||
if status == "applied":
|
||||
extra = ""
|
||||
tags = enf.get("tags") or []
|
||||
scope = enf.get("scope")
|
||||
if tags:
|
||||
extra = f" (tags={','.join(str(t) for t in tags)})"
|
||||
elif scope:
|
||||
extra = f" ({scope})"
|
||||
lines.append(
|
||||
f" applied old harvest via ansible-playbook (rc={enf.get('returncode')})"
|
||||
+ extra
|
||||
+ (
|
||||
f" (finished {enf.get('finished_at')})"
|
||||
if enf.get("finished_at")
|
||||
else ""
|
||||
)
|
||||
)
|
||||
elif status == "failed":
|
||||
lines.append(
|
||||
f" attempted enforcement but ansible-playbook failed (rc={enf.get('returncode')})"
|
||||
)
|
||||
elif status == "skipped":
|
||||
r = enf.get("reason")
|
||||
lines.append(" skipped" + (f": {r}" if r else ""))
|
||||
else:
|
||||
# Best-effort formatting for future fields.
|
||||
lines.append(" " + json.dumps(enf, sort_keys=True))
|
||||
|
||||
pk = report.get("packages", {})
|
||||
lines.append("\nPackages")
|
||||
lines.append(f" added: {len(pk.get('added', []) or [])}")
|
||||
lines.append(f" removed: {len(pk.get('removed', []) or [])}")
|
||||
lines.append(f" version_changed: {len(pk.get('version_changed', []) or [])}")
|
||||
ignored_v = int(pk.get("version_changed_ignored_count") or 0)
|
||||
vc = len(pk.get("version_changed", []) or [])
|
||||
suffix = f" (ignored {ignored_v})" if ignored_v else ""
|
||||
lines.append(f" version_changed: {vc}{suffix}")
|
||||
for p in pk.get("added", []) or []:
|
||||
lines.append(f" + {p}")
|
||||
for p in pk.get("removed", []) or []:
|
||||
|
|
@ -638,6 +1076,67 @@ def _report_markdown(report: Dict[str, Any]) -> str:
|
|||
f"- **New**: `{new.get('input')}` (host={new.get('host')}, state_mtime={new.get('state_mtime')})\n"
|
||||
)
|
||||
|
||||
filt = report.get("filters", {}) or {}
|
||||
ex_paths = filt.get("exclude_paths", []) or []
|
||||
if ex_paths:
|
||||
out.append(
|
||||
"- **File exclude patterns**: "
|
||||
+ ", ".join(f"`{p}`" for p in ex_paths)
|
||||
+ "\n"
|
||||
)
|
||||
|
||||
if filt.get("ignore_package_versions"):
|
||||
ignored = int(
|
||||
(report.get("packages", {}) or {}).get("version_changed_ignored_count") or 0
|
||||
)
|
||||
msg = "- **Package version drift**: ignored (`--ignore-package-versions`)"
|
||||
if ignored:
|
||||
msg += f" (ignored {ignored} change{'s' if ignored != 1 else ''})"
|
||||
out.append(msg + "\n")
|
||||
|
||||
enf = report.get("enforcement") or {}
|
||||
if enf:
|
||||
out.append("\n## Enforcement\n")
|
||||
status = str(enf.get("status") or "").strip().lower()
|
||||
if status == "applied":
|
||||
extra = ""
|
||||
tags = enf.get("tags") or []
|
||||
scope = enf.get("scope")
|
||||
if tags:
|
||||
extra = " (tags=" + ",".join(str(t) for t in tags) + ")"
|
||||
elif scope:
|
||||
extra = f" ({scope})"
|
||||
out.append(
|
||||
"- ✅ Applied old harvest via ansible-playbook"
|
||||
+ extra
|
||||
+ (
|
||||
f" (rc={enf.get('returncode')})"
|
||||
if enf.get("returncode") is not None
|
||||
else ""
|
||||
)
|
||||
+ (
|
||||
f" (finished `{enf.get('finished_at')}`)"
|
||||
if enf.get("finished_at")
|
||||
else ""
|
||||
)
|
||||
+ "\n"
|
||||
)
|
||||
elif status == "failed":
|
||||
out.append(
|
||||
"- ⚠️ Attempted enforcement but ansible-playbook failed"
|
||||
+ (
|
||||
f" (rc={enf.get('returncode')})"
|
||||
if enf.get("returncode") is not None
|
||||
else ""
|
||||
)
|
||||
+ "\n"
|
||||
)
|
||||
elif status == "skipped":
|
||||
r = enf.get("reason")
|
||||
out.append("- Skipped" + (f": {r}" if r else "") + "\n")
|
||||
else:
|
||||
out.append(f"- {json.dumps(enf, sort_keys=True)}\n")
|
||||
|
||||
pk = report.get("packages", {})
|
||||
out.append("## Packages\n")
|
||||
out.append(f"- Added: {len(pk.get('added', []) or [])}\n")
|
||||
|
|
@ -647,7 +1146,10 @@ def _report_markdown(report: Dict[str, Any]) -> str:
|
|||
for p in pk.get("removed", []) or []:
|
||||
out.append(f" - `- {p}`\n")
|
||||
|
||||
out.append(f"- Version changed: {len(pk.get('version_changed', []) or [])}\n")
|
||||
ignored_v = int(pk.get("version_changed_ignored_count") or 0)
|
||||
vc = len(pk.get("version_changed", []) or [])
|
||||
suffix = f" (ignored {ignored_v})" if ignored_v else ""
|
||||
out.append(f"- Version changed: {vc}{suffix}\n")
|
||||
for ch in pk.get("version_changed", []) or []:
|
||||
out.append(
|
||||
f" - `~ {ch.get('package')}`: `{ch.get('old')}` → `{ch.get('new')}`\n"
|
||||
|
|
|
|||
578
enroll/explain.py
Normal file
578
enroll/explain.py
Normal file
|
|
@ -0,0 +1,578 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from collections import Counter, defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Iterable, List, Tuple
|
||||
|
||||
from .diff import _bundle_from_input, _load_state # reuse existing bundle handling
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReasonInfo:
|
||||
title: str
|
||||
why: str
|
||||
|
||||
|
||||
_MANAGED_FILE_REASONS: Dict[str, ReasonInfo] = {
|
||||
# Package manager / repo config
|
||||
"apt_config": ReasonInfo(
|
||||
"APT configuration",
|
||||
"APT configuration affecting package installation and repository behavior.",
|
||||
),
|
||||
"apt_source": ReasonInfo(
|
||||
"APT repository source",
|
||||
"APT source list entries (e.g. sources.list or sources.list.d).",
|
||||
),
|
||||
"apt_keyring": ReasonInfo(
|
||||
"APT keyring",
|
||||
"Repository signing key material used by APT.",
|
||||
),
|
||||
"apt_signed_by_keyring": ReasonInfo(
|
||||
"APT Signed-By keyring",
|
||||
"Keyring referenced via a Signed-By directive in an APT source.",
|
||||
),
|
||||
"yum_conf": ReasonInfo(
|
||||
"YUM/DNF main config",
|
||||
"Primary YUM configuration (often /etc/yum.conf).",
|
||||
),
|
||||
"yum_config": ReasonInfo(
|
||||
"YUM/DNF config",
|
||||
"YUM/DNF configuration files (including conf.d).",
|
||||
),
|
||||
"yum_repo": ReasonInfo(
|
||||
"YUM/DNF repository",
|
||||
"YUM/DNF repository definitions (e.g. yum.repos.d).",
|
||||
),
|
||||
"dnf_config": ReasonInfo(
|
||||
"DNF configuration",
|
||||
"DNF configuration affecting package installation and repositories.",
|
||||
),
|
||||
"rpm_gpg_key": ReasonInfo(
|
||||
"RPM GPG key",
|
||||
"Repository signing keys used by RPM/YUM/DNF.",
|
||||
),
|
||||
# SSH
|
||||
"authorized_keys": ReasonInfo(
|
||||
"SSH authorized keys",
|
||||
"User authorized_keys files (controls who can log in with SSH keys).",
|
||||
),
|
||||
"ssh_public_key": ReasonInfo(
|
||||
"SSH public key",
|
||||
"SSH host/user public keys relevant to authentication.",
|
||||
),
|
||||
# System config / security
|
||||
"system_security": ReasonInfo(
|
||||
"Security configuration",
|
||||
"Security-sensitive configuration (SSH, sudoers, PAM, auth, etc.).",
|
||||
),
|
||||
"system_network": ReasonInfo(
|
||||
"Network configuration",
|
||||
"Network configuration (interfaces, resolv.conf, network managers, etc.).",
|
||||
),
|
||||
"system_firewall": ReasonInfo(
|
||||
"Firewall configuration",
|
||||
"Firewall rules/configuration (ufw, nftables, iptables, etc.).",
|
||||
),
|
||||
"system_sysctl": ReasonInfo(
|
||||
"sysctl configuration",
|
||||
"Kernel sysctl tuning (sysctl.conf / sysctl.d).",
|
||||
),
|
||||
"system_modprobe": ReasonInfo(
|
||||
"modprobe configuration",
|
||||
"Kernel module configuration (modprobe.d).",
|
||||
),
|
||||
"system_mounts": ReasonInfo(
|
||||
"Mount configuration",
|
||||
"Mount configuration (e.g. /etc/fstab and related).",
|
||||
),
|
||||
"system_rc": ReasonInfo(
|
||||
"Startup/rc configuration",
|
||||
"Startup scripts / rc configuration that can affect boot behavior.",
|
||||
),
|
||||
# systemd + timers
|
||||
"systemd_dropin": ReasonInfo(
|
||||
"systemd drop-in",
|
||||
"systemd override/drop-in files that modify a unit's behavior.",
|
||||
),
|
||||
"systemd_envfile": ReasonInfo(
|
||||
"systemd EnvironmentFile",
|
||||
"Files referenced by systemd units via EnvironmentFile.",
|
||||
),
|
||||
"related_timer": ReasonInfo(
|
||||
"Related systemd timer",
|
||||
"A systemd timer captured because it is related to a unit/service.",
|
||||
),
|
||||
# cron / logrotate
|
||||
"system_cron": ReasonInfo(
|
||||
"System cron",
|
||||
"System cron configuration (crontab, cron.d, etc.).",
|
||||
),
|
||||
"cron_snippet": ReasonInfo(
|
||||
"Cron snippet",
|
||||
"Cron snippets referenced/used by harvested services or configs.",
|
||||
),
|
||||
"system_logrotate": ReasonInfo(
|
||||
"System logrotate",
|
||||
"System logrotate configuration.",
|
||||
),
|
||||
"logrotate_snippet": ReasonInfo(
|
||||
"logrotate snippet",
|
||||
"logrotate snippets/configs referenced in system configuration.",
|
||||
),
|
||||
# Custom paths / drift signals
|
||||
"modified_conffile": ReasonInfo(
|
||||
"Modified package conffile",
|
||||
"A package-managed conffile differs from the packaged/default version.",
|
||||
),
|
||||
"modified_packaged_file": ReasonInfo(
|
||||
"Modified packaged file",
|
||||
"A file owned by a package differs from the packaged version.",
|
||||
),
|
||||
"custom_unowned": ReasonInfo(
|
||||
"Unowned custom file",
|
||||
"A file not owned by any package (often custom/operator-managed).",
|
||||
),
|
||||
"custom_specific_path": ReasonInfo(
|
||||
"Custom specific path",
|
||||
"A specific path included by a custom rule or snapshot.",
|
||||
),
|
||||
"usr_local_bin_script": ReasonInfo(
|
||||
"/usr/local/bin script",
|
||||
"Executable scripts under /usr/local/bin (often operator-installed).",
|
||||
),
|
||||
"usr_local_etc_custom": ReasonInfo(
|
||||
"/usr/local/etc custom",
|
||||
"Custom configuration under /usr/local/etc.",
|
||||
),
|
||||
# User includes
|
||||
"user_include": ReasonInfo(
|
||||
"User-included path",
|
||||
"Included because you specified it via --include-path / include patterns.",
|
||||
),
|
||||
}
|
||||
|
||||
_MANAGED_DIR_REASONS: Dict[str, ReasonInfo] = {
|
||||
"parent_of_managed_file": ReasonInfo(
|
||||
"Parent directory",
|
||||
"Included so permissions/ownership can be recreated for managed files.",
|
||||
),
|
||||
"user_include_dir": ReasonInfo(
|
||||
"User-included directory",
|
||||
"Included because you specified it via --include-path / include patterns.",
|
||||
),
|
||||
}
|
||||
|
||||
_EXCLUDED_REASONS: Dict[str, ReasonInfo] = {
|
||||
"user_excluded": ReasonInfo(
|
||||
"User excluded",
|
||||
"Excluded because you explicitly excluded it (e.g. --exclude-path / patterns).",
|
||||
),
|
||||
"unreadable": ReasonInfo(
|
||||
"Unreadable",
|
||||
"Enroll could not read this path with the permissions it had.",
|
||||
),
|
||||
"log_file": ReasonInfo(
|
||||
"Log file",
|
||||
"Excluded because it appears to be a log file (usually noisy/large).",
|
||||
),
|
||||
"denied_path": ReasonInfo(
|
||||
"Denied path",
|
||||
"Excluded because the path is in a denylist for safety.",
|
||||
),
|
||||
"too_large": ReasonInfo(
|
||||
"Too large",
|
||||
"Excluded because it exceeded the size limit for harvested files.",
|
||||
),
|
||||
"not_regular_file": ReasonInfo(
|
||||
"Not a regular file",
|
||||
"Excluded because it was not a regular file (device, socket, etc.).",
|
||||
),
|
||||
"binary_like": ReasonInfo(
|
||||
"Binary-like",
|
||||
"Excluded because it looked like binary content (not useful for config management).",
|
||||
),
|
||||
"sensitive_content": ReasonInfo(
|
||||
"Sensitive content",
|
||||
"Excluded because it likely contains secrets (e.g. shadow, private keys).",
|
||||
),
|
||||
}
|
||||
|
||||
_OBSERVED_VIA: Dict[str, ReasonInfo] = {
|
||||
"user_installed": ReasonInfo(
|
||||
"User-installed",
|
||||
"Package appears explicitly installed (as opposed to only pulled in as a dependency).",
|
||||
),
|
||||
"systemd_unit": ReasonInfo(
|
||||
"Referenced by systemd unit",
|
||||
"Package is associated with a systemd unit that was harvested.",
|
||||
),
|
||||
"package_role": ReasonInfo(
|
||||
"Referenced by package role",
|
||||
"Package was referenced by an enroll packages snapshot/role.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def _ri(mapping: Dict[str, ReasonInfo], key: str) -> ReasonInfo:
|
||||
return mapping.get(key) or ReasonInfo(key, f"Captured with reason '{key}'")
|
||||
|
||||
|
||||
def _role_common_counts(role_obj: Dict[str, Any]) -> Tuple[int, int, int, int]:
|
||||
"""Return (managed_files, managed_dirs, excluded, notes) counts for a RoleCommon object."""
|
||||
mf = len(role_obj.get("managed_files") or [])
|
||||
md = len(role_obj.get("managed_dirs") or [])
|
||||
ex = len(role_obj.get("excluded") or [])
|
||||
nt = len(role_obj.get("notes") or [])
|
||||
return mf, md, ex, nt
|
||||
|
||||
|
||||
def _summarize_reasons(
|
||||
items: Iterable[Dict[str, Any]],
|
||||
reason_key: str,
|
||||
*,
|
||||
mapping: Dict[str, ReasonInfo],
|
||||
max_examples: int,
|
||||
) -> List[Dict[str, Any]]:
|
||||
by_reason: Dict[str, List[str]] = defaultdict(list)
|
||||
counts: Counter[str] = Counter()
|
||||
|
||||
for it in items:
|
||||
if not isinstance(it, dict):
|
||||
continue
|
||||
r = it.get(reason_key)
|
||||
if not r:
|
||||
continue
|
||||
r = str(r)
|
||||
counts[r] += 1
|
||||
p = it.get("path")
|
||||
if (
|
||||
max_examples > 0
|
||||
and isinstance(p, str)
|
||||
and p
|
||||
and len(by_reason[r]) < max_examples
|
||||
):
|
||||
by_reason[r].append(p)
|
||||
|
||||
out: List[Dict[str, Any]] = []
|
||||
for reason, count in counts.most_common():
|
||||
info = _ri(mapping, reason)
|
||||
out.append(
|
||||
{
|
||||
"reason": reason,
|
||||
"count": count,
|
||||
"title": info.title,
|
||||
"why": info.why,
|
||||
"examples": by_reason.get(reason, []),
|
||||
}
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
def explain_state(
|
||||
harvest: str,
|
||||
*,
|
||||
sops_mode: bool = False,
|
||||
fmt: str = "text",
|
||||
max_examples: int = 3,
|
||||
) -> str:
|
||||
"""Explain a harvest bundle's state.json.
|
||||
|
||||
`harvest` may be:
|
||||
- a bundle directory
|
||||
- a path to state.json
|
||||
- a tarball (.tar.gz/.tgz)
|
||||
- a SOPS-encrypted bundle (.sops)
|
||||
"""
|
||||
bundle = _bundle_from_input(harvest, sops_mode=sops_mode)
|
||||
state = _load_state(bundle.dir)
|
||||
|
||||
host = state.get("host") or {}
|
||||
enroll = state.get("enroll") or {}
|
||||
roles = state.get("roles") or {}
|
||||
inv = state.get("inventory") or {}
|
||||
inv_pkgs = (inv.get("packages") or {}) if isinstance(inv, dict) else {}
|
||||
|
||||
role_summaries: List[Dict[str, Any]] = []
|
||||
|
||||
# Users
|
||||
users_obj = roles.get("users") or {}
|
||||
user_entries = users_obj.get("users") or []
|
||||
mf, md, ex, _nt = (
|
||||
_role_common_counts(users_obj) if isinstance(users_obj, dict) else (0, 0, 0, 0)
|
||||
)
|
||||
role_summaries.append(
|
||||
{
|
||||
"role": "users",
|
||||
"summary": f"{len(user_entries)} user(s), {mf} file(s), {ex} excluded",
|
||||
"notes": users_obj.get("notes") or [],
|
||||
}
|
||||
)
|
||||
|
||||
# Services
|
||||
services_list = roles.get("services") or []
|
||||
if isinstance(services_list, list):
|
||||
total_mf = sum(
|
||||
len((s.get("managed_files") or []))
|
||||
for s in services_list
|
||||
if isinstance(s, dict)
|
||||
)
|
||||
total_ex = sum(
|
||||
len((s.get("excluded") or [])) for s in services_list if isinstance(s, dict)
|
||||
)
|
||||
role_summaries.append(
|
||||
{
|
||||
"role": "services",
|
||||
"summary": f"{len(services_list)} unit(s), {total_mf} file(s), {total_ex} excluded",
|
||||
"units": [
|
||||
{
|
||||
"unit": s.get("unit"),
|
||||
"active_state": s.get("active_state"),
|
||||
"sub_state": s.get("sub_state"),
|
||||
"unit_file_state": s.get("unit_file_state"),
|
||||
"condition_result": s.get("condition_result"),
|
||||
}
|
||||
for s in services_list
|
||||
if isinstance(s, dict)
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
# Package snapshots
|
||||
pkgs_list = roles.get("packages") or []
|
||||
if isinstance(pkgs_list, list):
|
||||
total_mf = sum(
|
||||
len((p.get("managed_files") or []))
|
||||
for p in pkgs_list
|
||||
if isinstance(p, dict)
|
||||
)
|
||||
total_ex = sum(
|
||||
len((p.get("excluded") or [])) for p in pkgs_list if isinstance(p, dict)
|
||||
)
|
||||
role_summaries.append(
|
||||
{
|
||||
"role": "packages",
|
||||
"summary": f"{len(pkgs_list)} package snapshot(s), {total_mf} file(s), {total_ex} excluded",
|
||||
"packages": [
|
||||
p.get("package") for p in pkgs_list if isinstance(p, dict)
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
# Single snapshots
|
||||
for rname in [
|
||||
"apt_config",
|
||||
"dnf_config",
|
||||
"etc_custom",
|
||||
"usr_local_custom",
|
||||
"extra_paths",
|
||||
]:
|
||||
robj = roles.get(rname) or {}
|
||||
if not isinstance(robj, dict):
|
||||
continue
|
||||
mf, md, ex, _nt = _role_common_counts(robj)
|
||||
extra: Dict[str, Any] = {}
|
||||
if rname == "extra_paths":
|
||||
extra = {
|
||||
"include_patterns": robj.get("include_patterns") or [],
|
||||
"exclude_patterns": robj.get("exclude_patterns") or [],
|
||||
}
|
||||
role_summaries.append(
|
||||
{
|
||||
"role": rname,
|
||||
"summary": f"{mf} file(s), {md} dir(s), {ex} excluded",
|
||||
"notes": robj.get("notes") or [],
|
||||
**extra,
|
||||
}
|
||||
)
|
||||
|
||||
# Flatten managed/excluded across roles
|
||||
all_managed_files: List[Dict[str, Any]] = []
|
||||
all_managed_dirs: List[Dict[str, Any]] = []
|
||||
all_excluded: List[Dict[str, Any]] = []
|
||||
|
||||
def _consume_role(role_obj: Dict[str, Any]) -> None:
|
||||
for f in role_obj.get("managed_files") or []:
|
||||
if isinstance(f, dict):
|
||||
all_managed_files.append(f)
|
||||
for d in role_obj.get("managed_dirs") or []:
|
||||
if isinstance(d, dict):
|
||||
all_managed_dirs.append(d)
|
||||
for e in role_obj.get("excluded") or []:
|
||||
if isinstance(e, dict):
|
||||
all_excluded.append(e)
|
||||
|
||||
if isinstance(users_obj, dict):
|
||||
_consume_role(users_obj)
|
||||
if isinstance(services_list, list):
|
||||
for s in services_list:
|
||||
if isinstance(s, dict):
|
||||
_consume_role(s)
|
||||
if isinstance(pkgs_list, list):
|
||||
for p in pkgs_list:
|
||||
if isinstance(p, dict):
|
||||
_consume_role(p)
|
||||
for rname in [
|
||||
"apt_config",
|
||||
"dnf_config",
|
||||
"etc_custom",
|
||||
"usr_local_custom",
|
||||
"extra_paths",
|
||||
]:
|
||||
robj = roles.get(rname)
|
||||
if isinstance(robj, dict):
|
||||
_consume_role(robj)
|
||||
|
||||
managed_file_reasons = _summarize_reasons(
|
||||
all_managed_files,
|
||||
"reason",
|
||||
mapping=_MANAGED_FILE_REASONS,
|
||||
max_examples=max_examples,
|
||||
)
|
||||
managed_dir_reasons = _summarize_reasons(
|
||||
all_managed_dirs,
|
||||
"reason",
|
||||
mapping=_MANAGED_DIR_REASONS,
|
||||
max_examples=max_examples,
|
||||
)
|
||||
excluded_reasons = _summarize_reasons(
|
||||
all_excluded,
|
||||
"reason",
|
||||
mapping=_EXCLUDED_REASONS,
|
||||
max_examples=max_examples,
|
||||
)
|
||||
|
||||
# Inventory observed_via breakdown (count packages that contain at least one entry for that kind)
|
||||
observed_kinds: Counter[str] = Counter()
|
||||
observed_refs: Dict[str, Counter[str]] = defaultdict(Counter)
|
||||
for _pkg, entry in inv_pkgs.items():
|
||||
if not isinstance(entry, dict):
|
||||
continue
|
||||
seen_kinds = set()
|
||||
for ov in entry.get("observed_via") or []:
|
||||
if not isinstance(ov, dict):
|
||||
continue
|
||||
kind = ov.get("kind")
|
||||
if not kind:
|
||||
continue
|
||||
kind = str(kind)
|
||||
seen_kinds.add(kind)
|
||||
ref = ov.get("ref")
|
||||
if isinstance(ref, str) and ref:
|
||||
observed_refs[kind][ref] += 1
|
||||
for k in seen_kinds:
|
||||
observed_kinds[k] += 1
|
||||
|
||||
observed_via_summary: List[Dict[str, Any]] = []
|
||||
for kind, cnt in observed_kinds.most_common():
|
||||
info = _ri(_OBSERVED_VIA, kind)
|
||||
top_refs = [
|
||||
r for r, _ in observed_refs.get(kind, Counter()).most_common(max_examples)
|
||||
]
|
||||
observed_via_summary.append(
|
||||
{
|
||||
"kind": kind,
|
||||
"count": cnt,
|
||||
"title": info.title,
|
||||
"why": info.why,
|
||||
"top_refs": top_refs,
|
||||
}
|
||||
)
|
||||
|
||||
report: Dict[str, Any] = {
|
||||
"bundle_dir": str(bundle.dir),
|
||||
"host": host,
|
||||
"enroll": enroll,
|
||||
"inventory": {
|
||||
"package_count": len(inv_pkgs),
|
||||
"observed_via": observed_via_summary,
|
||||
},
|
||||
"roles": role_summaries,
|
||||
"reasons": {
|
||||
"managed_files": managed_file_reasons,
|
||||
"managed_dirs": managed_dir_reasons,
|
||||
"excluded": excluded_reasons,
|
||||
},
|
||||
}
|
||||
|
||||
if fmt == "json":
|
||||
return json.dumps(report, indent=2, sort_keys=True)
|
||||
|
||||
# Text rendering
|
||||
out: List[str] = []
|
||||
out.append(f"Enroll explained: {harvest}")
|
||||
hn = host.get("hostname") or "(unknown host)"
|
||||
os_family = host.get("os") or "unknown"
|
||||
pkg_backend = host.get("pkg_backend") or "?"
|
||||
ver = enroll.get("version") or "?"
|
||||
out.append(f"Host: {hn} (os: {os_family}, pkg: {pkg_backend})")
|
||||
out.append(f"Enroll: {ver}")
|
||||
out.append("")
|
||||
|
||||
out.append("Inventory")
|
||||
out.append(f"- Packages: {len(inv_pkgs)}")
|
||||
if observed_via_summary:
|
||||
out.append("- Why packages were included (observed_via):")
|
||||
for ov in observed_via_summary:
|
||||
extra = ""
|
||||
if ov.get("top_refs"):
|
||||
extra = f" (e.g. {', '.join(ov['top_refs'])})"
|
||||
out.append(f" - {ov['kind']}: {ov['count']} – {ov['why']}{extra}")
|
||||
out.append("")
|
||||
|
||||
out.append("Roles collected")
|
||||
for rs in role_summaries:
|
||||
out.append(f"- {rs['role']}: {rs['summary']}")
|
||||
if rs["role"] == "extra_paths":
|
||||
inc = rs.get("include_patterns") or []
|
||||
exc = rs.get("exclude_patterns") or []
|
||||
if inc:
|
||||
suffix = "…" if len(inc) > max_examples else ""
|
||||
out.append(
|
||||
f" include_patterns: {', '.join(map(str, inc[:max_examples]))}{suffix}"
|
||||
)
|
||||
if exc:
|
||||
suffix = "…" if len(exc) > max_examples else ""
|
||||
out.append(
|
||||
f" exclude_patterns: {', '.join(map(str, exc[:max_examples]))}{suffix}"
|
||||
)
|
||||
notes = rs.get("notes") or []
|
||||
if notes:
|
||||
for n in notes[:max_examples]:
|
||||
out.append(f" note: {n}")
|
||||
if len(notes) > max_examples:
|
||||
out.append(
|
||||
f" note: (+{len(notes) - max_examples} more. Use --format json to see them all)"
|
||||
)
|
||||
out.append("")
|
||||
|
||||
out.append("Why files were included (managed_files.reason)")
|
||||
if managed_file_reasons:
|
||||
for r in managed_file_reasons[:15]:
|
||||
exs = r.get("examples") or []
|
||||
ex_txt = f" Examples: {', '.join(exs)}" if exs else ""
|
||||
out.append(f"- {r['reason']} ({r['count']}): {r['why']}.{ex_txt}")
|
||||
if len(managed_file_reasons) > 15:
|
||||
out.append(
|
||||
f"- (+{len(managed_file_reasons) - 15} more reasons. Use --format json to see them all)"
|
||||
)
|
||||
else:
|
||||
out.append("- (no managed files)")
|
||||
|
||||
if managed_dir_reasons:
|
||||
out.append("")
|
||||
out.append("Why directories were included (managed_dirs.reason)")
|
||||
for r in managed_dir_reasons:
|
||||
out.append(f"- {r['reason']} ({r['count']}): {r['why']}")
|
||||
|
||||
out.append("")
|
||||
out.append("Why paths were excluded")
|
||||
if excluded_reasons:
|
||||
for r in excluded_reasons:
|
||||
exs = r.get("examples") or []
|
||||
ex_txt = f" Examples: {', '.join(exs)}" if exs else ""
|
||||
out.append(f"- {r['reason']} ({r['count']}): {r['why']}.{ex_txt}")
|
||||
else:
|
||||
out.append("- (no excluded paths)")
|
||||
|
||||
return "\n".join(out) + "\n"
|
||||
|
|
@ -5,6 +5,7 @@ import json
|
|||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import time
|
||||
from dataclasses import dataclass, asdict, field
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
|
@ -34,6 +35,19 @@ class ManagedFile:
|
|||
reason: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ManagedLink:
|
||||
"""A symlink we want to materialise on the target host.
|
||||
|
||||
For configuration enablement patterns (e.g. sites-enabled), the symlink is
|
||||
meaningful state even when the link target is captured elsewhere.
|
||||
"""
|
||||
|
||||
path: str
|
||||
target: str
|
||||
reason: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ManagedDir:
|
||||
path: str
|
||||
|
|
@ -60,6 +74,7 @@ class ServiceSnapshot:
|
|||
condition_result: Optional[str]
|
||||
managed_dirs: List[ManagedDir] = field(default_factory=list)
|
||||
managed_files: List[ManagedFile] = field(default_factory=list)
|
||||
managed_links: List[ManagedLink] = field(default_factory=list)
|
||||
excluded: List[ExcludedFile] = field(default_factory=list)
|
||||
notes: List[str] = field(default_factory=list)
|
||||
|
||||
|
|
@ -70,6 +85,7 @@ class PackageSnapshot:
|
|||
role_name: str
|
||||
managed_dirs: List[ManagedDir] = field(default_factory=list)
|
||||
managed_files: List[ManagedFile] = field(default_factory=list)
|
||||
managed_links: List[ManagedLink] = field(default_factory=list)
|
||||
excluded: List[ExcludedFile] = field(default_factory=list)
|
||||
notes: List[str] = field(default_factory=list)
|
||||
|
||||
|
|
@ -123,12 +139,13 @@ class UsrLocalCustomSnapshot:
|
|||
@dataclass
|
||||
class ExtraPathsSnapshot:
|
||||
role_name: str
|
||||
include_patterns: List[str]
|
||||
exclude_patterns: List[str]
|
||||
managed_dirs: List[ManagedDir]
|
||||
managed_files: List[ManagedFile]
|
||||
excluded: List[ExcludedFile]
|
||||
notes: List[str]
|
||||
include_patterns: List[str] = field(default_factory=list)
|
||||
exclude_patterns: List[str] = field(default_factory=list)
|
||||
managed_dirs: List[ManagedDir] = field(default_factory=list)
|
||||
managed_files: List[ManagedFile] = field(default_factory=list)
|
||||
managed_links: List[ManagedLink] = field(default_factory=list)
|
||||
excluded: List[ExcludedFile] = field(default_factory=list)
|
||||
notes: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
ALLOWED_UNOWNED_EXTS = {
|
||||
|
|
@ -157,11 +174,60 @@ MAX_FILES_CAP = 4000
|
|||
MAX_UNOWNED_FILES_PER_ROLE = 500
|
||||
|
||||
|
||||
def _files_differ(a: str, b: str, *, max_bytes: int = 2_000_000) -> bool:
|
||||
"""Return True if file `a` differs from file `b`.
|
||||
|
||||
Best-effort and conservative:
|
||||
- If `b` (baseline) does not exist or is not a regular file, treat as
|
||||
"different" so we err on the side of capturing user state.
|
||||
- If we can't stat/read either file, treat as "different" (capture will
|
||||
later be filtered via IgnorePolicy).
|
||||
- If files are large, avoid reading them fully.
|
||||
"""
|
||||
|
||||
try:
|
||||
st_a = os.stat(a, follow_symlinks=True)
|
||||
except OSError:
|
||||
return True
|
||||
|
||||
# Refuse to do content comparisons on non-regular files.
|
||||
if not stat.S_ISREG(st_a.st_mode):
|
||||
return True
|
||||
|
||||
try:
|
||||
st_b = os.stat(b, follow_symlinks=True)
|
||||
except OSError:
|
||||
return True
|
||||
|
||||
if not stat.S_ISREG(st_b.st_mode):
|
||||
return True
|
||||
|
||||
if st_a.st_size != st_b.st_size:
|
||||
return True
|
||||
|
||||
# If it's unexpectedly big, treat as different to avoid expensive reads.
|
||||
if st_a.st_size > max_bytes:
|
||||
return True
|
||||
|
||||
try:
|
||||
with open(a, "rb") as fa, open(b, "rb") as fb:
|
||||
while True:
|
||||
ca = fa.read(1024 * 64)
|
||||
cb = fb.read(1024 * 64)
|
||||
if ca != cb:
|
||||
return True
|
||||
if not ca: # EOF on both
|
||||
return False
|
||||
except OSError:
|
||||
return True
|
||||
|
||||
|
||||
def _merge_parent_dirs(
|
||||
existing_dirs: List[ManagedDir],
|
||||
managed_files: List[ManagedFile],
|
||||
*,
|
||||
policy: IgnorePolicy,
|
||||
extra_paths: Optional[List[str]] = None,
|
||||
) -> List[ManagedDir]:
|
||||
"""Ensure parent directories for managed_files are present in managed_dirs.
|
||||
|
||||
|
|
@ -177,8 +243,18 @@ def _merge_parent_dirs(
|
|||
d.path: d for d in (existing_dirs or []) if d.path
|
||||
}
|
||||
|
||||
def _iter_paths() -> List[str]:
|
||||
paths: List[str] = []
|
||||
for mf in managed_files or []:
|
||||
p = str(mf.path or "").rstrip("/")
|
||||
if mf and mf.path:
|
||||
paths.append(str(mf.path))
|
||||
for p in extra_paths or []:
|
||||
if p:
|
||||
paths.append(str(p))
|
||||
return paths
|
||||
|
||||
for p0 in _iter_paths():
|
||||
p = str(p0 or "").rstrip("/")
|
||||
if not p:
|
||||
continue
|
||||
dpath = os.path.dirname(p)
|
||||
|
|
@ -365,6 +441,72 @@ def _capture_file(
|
|||
return True
|
||||
|
||||
|
||||
def _capture_link(
|
||||
*,
|
||||
role_name: str,
|
||||
abs_path: str,
|
||||
reason: str,
|
||||
policy: IgnorePolicy,
|
||||
path_filter: PathFilter,
|
||||
managed_out: List[ManagedLink],
|
||||
excluded_out: List[ExcludedFile],
|
||||
seen_role: Optional[Set[str]] = None,
|
||||
seen_global: Optional[Set[str]] = None,
|
||||
) -> bool:
|
||||
"""Try to capture a symlink into the manifest.
|
||||
|
||||
NOTE: Symlinks are *not* copied into artifacts; we record their link target
|
||||
and materialise them via ansible.builtin.file state=link.
|
||||
"""
|
||||
|
||||
if seen_global is not None and abs_path in seen_global:
|
||||
return False
|
||||
if seen_role is not None and abs_path in seen_role:
|
||||
return False
|
||||
|
||||
def _mark_seen() -> None:
|
||||
if seen_role is not None:
|
||||
seen_role.add(abs_path)
|
||||
if seen_global is not None:
|
||||
seen_global.add(abs_path)
|
||||
|
||||
if path_filter.is_excluded(abs_path):
|
||||
excluded_out.append(ExcludedFile(path=abs_path, reason="user_excluded"))
|
||||
_mark_seen()
|
||||
return False
|
||||
|
||||
deny_link = getattr(policy, "deny_reason_link", None)
|
||||
if callable(deny_link):
|
||||
deny = deny_link(abs_path)
|
||||
else:
|
||||
# Fallback: apply deny_reason() but treat "not_regular_file" as acceptable
|
||||
# for symlinks.
|
||||
deny = policy.deny_reason(abs_path)
|
||||
if deny in ("not_regular_file", "not_file", "not_regular"):
|
||||
deny = None
|
||||
|
||||
if deny:
|
||||
excluded_out.append(ExcludedFile(path=abs_path, reason=deny))
|
||||
_mark_seen()
|
||||
return False
|
||||
|
||||
if not os.path.islink(abs_path):
|
||||
excluded_out.append(ExcludedFile(path=abs_path, reason="not_symlink"))
|
||||
_mark_seen()
|
||||
return False
|
||||
|
||||
try:
|
||||
target = os.readlink(abs_path)
|
||||
except OSError:
|
||||
excluded_out.append(ExcludedFile(path=abs_path, reason="unreadable"))
|
||||
_mark_seen()
|
||||
return False
|
||||
|
||||
managed_out.append(ManagedLink(path=abs_path, target=target, reason=reason))
|
||||
_mark_seen()
|
||||
return True
|
||||
|
||||
|
||||
def _is_confish(path: str) -> bool:
|
||||
base = os.path.basename(path)
|
||||
_, ext = os.path.splitext(base)
|
||||
|
|
@ -490,23 +632,12 @@ _SYSTEM_CAPTURE_GLOBS: List[tuple[str, str]] = [
|
|||
# mounts
|
||||
("/etc/fstab", "system_mounts"),
|
||||
("/etc/crypttab", "system_mounts"),
|
||||
# logrotate
|
||||
("/etc/logrotate.conf", "system_logrotate"),
|
||||
("/etc/logrotate.d/*", "system_logrotate"),
|
||||
# sysctl / modules
|
||||
("/etc/sysctl.conf", "system_sysctl"),
|
||||
("/etc/sysctl.d/*", "system_sysctl"),
|
||||
("/etc/modprobe.d/*", "system_modprobe"),
|
||||
("/etc/modules", "system_modprobe"),
|
||||
("/etc/modules-load.d/*", "system_modprobe"),
|
||||
# cron
|
||||
("/etc/crontab", "system_cron"),
|
||||
("/etc/cron.d/*", "system_cron"),
|
||||
("/etc/anacrontab", "system_cron"),
|
||||
("/etc/anacron/*", "system_cron"),
|
||||
("/var/spool/cron/crontabs/*", "system_cron"),
|
||||
("/var/spool/crontabs/*", "system_cron"),
|
||||
("/var/spool/cron/*", "system_cron"),
|
||||
# network
|
||||
("/etc/netplan/*", "system_network"),
|
||||
("/etc/systemd/network/*", "system_network"),
|
||||
|
|
@ -762,6 +893,135 @@ def harvest(
|
|||
# This avoids multiple Ansible roles managing the same destination file.
|
||||
captured_global: Set[str] = set()
|
||||
|
||||
# -------------------------
|
||||
# Cron / logrotate unification
|
||||
#
|
||||
# If cron/logrotate are installed, capture all related configuration/state into
|
||||
# dedicated package roles ("cron" and "logrotate") so the same destination path
|
||||
# is never managed by unrelated roles.
|
||||
#
|
||||
# This includes user-specific crontabs under /var/spool, which means the cron role
|
||||
# should be applied after users have been created (handled in manifest ordering).
|
||||
# -------------------------
|
||||
|
||||
installed_pkgs = backend.installed_packages() or {}
|
||||
installed_names: Set[str] = set(installed_pkgs.keys())
|
||||
|
||||
def _pick_installed(cands: List[str]) -> Optional[str]:
|
||||
for c in cands:
|
||||
if c in installed_names:
|
||||
return c
|
||||
return None
|
||||
|
||||
cron_pkg = _pick_installed(
|
||||
["cron", "cronie", "cronie-anacron", "vixie-cron", "fcron"]
|
||||
)
|
||||
logrotate_pkg = _pick_installed(["logrotate"])
|
||||
|
||||
cron_role_name = "cron"
|
||||
logrotate_role_name = "logrotate"
|
||||
|
||||
def _is_cron_path(p: str) -> bool:
|
||||
return (
|
||||
p == "/etc/crontab"
|
||||
or p == "/etc/anacrontab"
|
||||
or p in ("/etc/cron.allow", "/etc/cron.deny")
|
||||
or p.startswith("/etc/cron.")
|
||||
or p.startswith("/etc/cron.d/")
|
||||
or p.startswith("/etc/anacron/")
|
||||
or p.startswith("/var/spool/cron/")
|
||||
or p.startswith("/var/spool/crontabs/")
|
||||
or p.startswith("/var/spool/anacron/")
|
||||
)
|
||||
|
||||
def _is_logrotate_path(p: str) -> bool:
|
||||
return p == "/etc/logrotate.conf" or p.startswith("/etc/logrotate.d/")
|
||||
|
||||
cron_snapshot: Optional[PackageSnapshot] = None
|
||||
logrotate_snapshot: Optional[PackageSnapshot] = None
|
||||
|
||||
if cron_pkg:
|
||||
cron_managed: List[ManagedFile] = []
|
||||
cron_excluded: List[ExcludedFile] = []
|
||||
cron_notes: List[str] = []
|
||||
cron_seen: Set[str] = set()
|
||||
|
||||
cron_globs = [
|
||||
"/etc/crontab",
|
||||
"/etc/cron.d/*",
|
||||
"/etc/cron.hourly/*",
|
||||
"/etc/cron.daily/*",
|
||||
"/etc/cron.weekly/*",
|
||||
"/etc/cron.monthly/*",
|
||||
"/etc/cron.allow",
|
||||
"/etc/cron.deny",
|
||||
"/etc/anacrontab",
|
||||
"/etc/anacron/*",
|
||||
# user crontabs / spool state
|
||||
"/var/spool/cron/*",
|
||||
"/var/spool/cron/crontabs/*",
|
||||
"/var/spool/crontabs/*",
|
||||
"/var/spool/anacron/*",
|
||||
]
|
||||
for spec in cron_globs:
|
||||
for path in _iter_matching_files(spec):
|
||||
if not os.path.isfile(path) or os.path.islink(path):
|
||||
continue
|
||||
_capture_file(
|
||||
bundle_dir=bundle_dir,
|
||||
role_name=cron_role_name,
|
||||
abs_path=path,
|
||||
reason="system_cron",
|
||||
policy=policy,
|
||||
path_filter=path_filter,
|
||||
managed_out=cron_managed,
|
||||
excluded_out=cron_excluded,
|
||||
seen_role=cron_seen,
|
||||
seen_global=captured_global,
|
||||
)
|
||||
|
||||
cron_snapshot = PackageSnapshot(
|
||||
package=cron_pkg,
|
||||
role_name=cron_role_name,
|
||||
managed_files=cron_managed,
|
||||
excluded=cron_excluded,
|
||||
notes=cron_notes,
|
||||
)
|
||||
|
||||
if logrotate_pkg:
|
||||
lr_managed: List[ManagedFile] = []
|
||||
lr_excluded: List[ExcludedFile] = []
|
||||
lr_notes: List[str] = []
|
||||
lr_seen: Set[str] = set()
|
||||
|
||||
lr_globs = [
|
||||
"/etc/logrotate.conf",
|
||||
"/etc/logrotate.d/*",
|
||||
]
|
||||
for spec in lr_globs:
|
||||
for path in _iter_matching_files(spec):
|
||||
if not os.path.isfile(path) or os.path.islink(path):
|
||||
continue
|
||||
_capture_file(
|
||||
bundle_dir=bundle_dir,
|
||||
role_name=logrotate_role_name,
|
||||
abs_path=path,
|
||||
reason="system_logrotate",
|
||||
policy=policy,
|
||||
path_filter=path_filter,
|
||||
managed_out=lr_managed,
|
||||
excluded_out=lr_excluded,
|
||||
seen_role=lr_seen,
|
||||
seen_global=captured_global,
|
||||
)
|
||||
|
||||
logrotate_snapshot = PackageSnapshot(
|
||||
package=logrotate_pkg,
|
||||
role_name=logrotate_role_name,
|
||||
managed_files=lr_managed,
|
||||
excluded=lr_excluded,
|
||||
notes=lr_notes,
|
||||
)
|
||||
# -------------------------
|
||||
# Service roles
|
||||
# -------------------------
|
||||
|
|
@ -777,6 +1037,17 @@ def harvest(
|
|||
excluded_by_role: Dict[str, List[ExcludedFile]] = {}
|
||||
|
||||
enabled_services = list_enabled_services()
|
||||
|
||||
# Avoid role-name collisions with dedicated cron/logrotate package roles.
|
||||
if cron_snapshot is not None or logrotate_snapshot is not None:
|
||||
blocked_roles = set()
|
||||
if cron_snapshot is not None:
|
||||
blocked_roles.add(cron_role_name)
|
||||
if logrotate_snapshot is not None:
|
||||
blocked_roles.add(logrotate_role_name)
|
||||
enabled_services = [
|
||||
u for u in enabled_services if _role_name_from_unit(u) not in blocked_roles
|
||||
]
|
||||
enabled_set = set(enabled_services)
|
||||
|
||||
def _service_sort_key(unit: str) -> tuple[int, str, str]:
|
||||
|
|
@ -886,6 +1157,10 @@ def harvest(
|
|||
for path, reason in backend.modified_paths(pkg, etc_paths).items():
|
||||
if not os.path.isfile(path) or os.path.islink(path):
|
||||
continue
|
||||
if cron_snapshot is not None and _is_cron_path(path):
|
||||
continue
|
||||
if logrotate_snapshot is not None and _is_logrotate_path(path):
|
||||
continue
|
||||
if backend.is_pkg_config_path(path):
|
||||
continue
|
||||
candidates.setdefault(path, reason)
|
||||
|
|
@ -1074,7 +1349,20 @@ def harvest(
|
|||
manual_pkgs_skipped: List[str] = []
|
||||
pkg_snaps: List[PackageSnapshot] = []
|
||||
|
||||
# Add dedicated cron/logrotate roles (if detected) as package roles.
|
||||
# These roles centralise all cron/logrotate managed files so they aren't scattered
|
||||
# across unrelated roles.
|
||||
if cron_snapshot is not None:
|
||||
pkg_snaps.append(cron_snapshot)
|
||||
if logrotate_snapshot is not None:
|
||||
pkg_snaps.append(logrotate_snapshot)
|
||||
for pkg in sorted(manual_pkgs):
|
||||
if cron_snapshot is not None and pkg == cron_pkg:
|
||||
manual_pkgs_skipped.append(pkg)
|
||||
continue
|
||||
if logrotate_snapshot is not None and pkg == logrotate_pkg:
|
||||
manual_pkgs_skipped.append(pkg)
|
||||
continue
|
||||
if pkg in covered_by_services:
|
||||
manual_pkgs_skipped.append(pkg)
|
||||
continue
|
||||
|
|
@ -1091,6 +1379,10 @@ def harvest(
|
|||
for path, reason in backend.modified_paths(pkg, etc_paths).items():
|
||||
if not os.path.isfile(path) or os.path.islink(path):
|
||||
continue
|
||||
if cron_snapshot is not None and _is_cron_path(path):
|
||||
continue
|
||||
if logrotate_snapshot is not None and _is_logrotate_path(path):
|
||||
continue
|
||||
if backend.is_pkg_config_path(path):
|
||||
continue
|
||||
candidates.setdefault(path, reason)
|
||||
|
|
@ -1147,11 +1439,72 @@ def harvest(
|
|||
package=pkg,
|
||||
role_name=role,
|
||||
managed_files=managed,
|
||||
managed_links=[],
|
||||
excluded=excluded,
|
||||
notes=notes,
|
||||
)
|
||||
)
|
||||
|
||||
# -------------------------
|
||||
# Web server enablement symlinks (nginx/apache2)
|
||||
#
|
||||
# Debian-style nginx/apache2 configurations often use *-enabled directories
|
||||
# populated with symlinks pointing back into *-available. The symlinks
|
||||
# represent the enablement state and are important to reproduce.
|
||||
#
|
||||
# We only harvest these when the relevant service/package has already been
|
||||
# detected in this run (i.e. we have a role that will manage nginx/apache2).
|
||||
# -------------------------
|
||||
|
||||
def _find_role_snapshot(role_name: str):
|
||||
for s in service_snaps:
|
||||
if s.role_name == role_name:
|
||||
return s
|
||||
for p in pkg_snaps:
|
||||
if p.role_name == role_name:
|
||||
return p
|
||||
return None
|
||||
|
||||
def _capture_enabled_symlinks(role_name: str, dirs: List[str]) -> None:
|
||||
snap = _find_role_snapshot(role_name)
|
||||
if snap is None:
|
||||
return
|
||||
|
||||
role_seen = seen_by_role.setdefault(role_name, set())
|
||||
for d in dirs:
|
||||
if not os.path.isdir(d):
|
||||
continue
|
||||
for pth in sorted(glob.glob(os.path.join(d, "*"))):
|
||||
if not os.path.islink(pth):
|
||||
continue
|
||||
_capture_link(
|
||||
role_name=role_name,
|
||||
abs_path=pth,
|
||||
reason="enabled_symlink",
|
||||
policy=policy,
|
||||
path_filter=path_filter,
|
||||
managed_out=snap.managed_links,
|
||||
excluded_out=snap.excluded,
|
||||
seen_role=role_seen,
|
||||
seen_global=captured_global,
|
||||
)
|
||||
|
||||
_capture_enabled_symlinks(
|
||||
"nginx",
|
||||
[
|
||||
"/etc/nginx/modules-enabled",
|
||||
"/etc/nginx/sites-enabled",
|
||||
],
|
||||
)
|
||||
_capture_enabled_symlinks(
|
||||
"apache2",
|
||||
[
|
||||
"/etc/apache2/conf-enabled",
|
||||
"/etc/apache2/mods-enabled",
|
||||
"/etc/apache2/sites-enabled",
|
||||
],
|
||||
)
|
||||
|
||||
# -------------------------
|
||||
# Users role (non-system users)
|
||||
# -------------------------
|
||||
|
|
@ -1169,6 +1522,18 @@ def harvest(
|
|||
users_role_name = "users"
|
||||
users_role_seen = seen_by_role.setdefault(users_role_name, set())
|
||||
|
||||
skel_dir = "/etc/skel"
|
||||
# Dotfiles to harvest for non-system users. For the common "skeleton"
|
||||
# files, only capture if the user's copy differs from /etc/skel.
|
||||
skel_dotfiles = [
|
||||
(".bashrc", "user_shell_rc"),
|
||||
(".profile", "user_profile"),
|
||||
(".bash_logout", "user_shell_logout"),
|
||||
]
|
||||
extra_dotfiles = [
|
||||
(".bash_aliases", "user_shell_aliases"),
|
||||
]
|
||||
|
||||
for u in user_records:
|
||||
users_list.append(
|
||||
{
|
||||
|
|
@ -1203,6 +1568,48 @@ def harvest(
|
|||
seen_global=captured_global,
|
||||
)
|
||||
|
||||
# Capture common per-user shell dotfiles when they differ from /etc/skel.
|
||||
# These still go through IgnorePolicy and user path filters.
|
||||
home = (u.home or "").rstrip("/")
|
||||
if home and home.startswith("/"):
|
||||
for rel, reason in skel_dotfiles:
|
||||
upath = os.path.join(home, rel)
|
||||
if not os.path.exists(upath):
|
||||
continue
|
||||
skel_path = os.path.join(skel_dir, rel)
|
||||
if not _files_differ(upath, skel_path, max_bytes=policy.max_file_bytes):
|
||||
continue
|
||||
_capture_file(
|
||||
bundle_dir=bundle_dir,
|
||||
role_name=users_role_name,
|
||||
abs_path=upath,
|
||||
reason=reason,
|
||||
policy=policy,
|
||||
path_filter=path_filter,
|
||||
managed_out=users_managed,
|
||||
excluded_out=users_excluded,
|
||||
seen_role=users_role_seen,
|
||||
seen_global=captured_global,
|
||||
)
|
||||
|
||||
# Capture other common per-user shell files unconditionally if present.
|
||||
for rel, reason in extra_dotfiles:
|
||||
upath = os.path.join(home, rel)
|
||||
if not os.path.exists(upath):
|
||||
continue
|
||||
_capture_file(
|
||||
bundle_dir=bundle_dir,
|
||||
role_name=users_role_name,
|
||||
abs_path=upath,
|
||||
reason=reason,
|
||||
policy=policy,
|
||||
path_filter=path_filter,
|
||||
managed_out=users_managed,
|
||||
excluded_out=users_excluded,
|
||||
seen_role=users_role_seen,
|
||||
seen_global=captured_global,
|
||||
)
|
||||
|
||||
users_snapshot = UsersSnapshot(
|
||||
role_name=users_role_name,
|
||||
users=users_list,
|
||||
|
|
@ -1693,7 +2100,7 @@ def harvest(
|
|||
# -------------------------
|
||||
# Inventory: packages (SBOM-ish)
|
||||
# -------------------------
|
||||
installed = backend.installed_packages() or {}
|
||||
installed = installed_pkgs
|
||||
|
||||
manual_set: Set[str] = set(manual_pkgs or [])
|
||||
|
||||
|
|
@ -1748,11 +2155,17 @@ def harvest(
|
|||
)
|
||||
for s in service_snaps:
|
||||
s.managed_dirs = _merge_parent_dirs(
|
||||
s.managed_dirs, s.managed_files, policy=policy
|
||||
s.managed_dirs,
|
||||
s.managed_files,
|
||||
policy=policy,
|
||||
extra_paths=[ml.path for ml in (s.managed_links or [])],
|
||||
)
|
||||
for p in pkg_snaps:
|
||||
p.managed_dirs = _merge_parent_dirs(
|
||||
p.managed_dirs, p.managed_files, policy=policy
|
||||
p.managed_dirs,
|
||||
p.managed_files,
|
||||
policy=policy,
|
||||
extra_paths=[ml.path for ml in (p.managed_links or [])],
|
||||
)
|
||||
|
||||
if apt_config_snapshot:
|
||||
|
|
|
|||
|
|
@ -100,6 +100,12 @@ class IgnorePolicy:
|
|||
# Always ignore plain *.log files (rarely useful as config, often noisy).
|
||||
if path.endswith(".log"):
|
||||
return "log_file"
|
||||
# Ignore editor/backup files that end with a trailing tilde.
|
||||
if path.endswith("~"):
|
||||
return "backup_file"
|
||||
# Ignore backup shadow files
|
||||
if path.startswith("/etc/") and path.endswith("-"):
|
||||
return "backup_file"
|
||||
|
||||
if not self.dangerous:
|
||||
for g in self.deny_globs or []:
|
||||
|
|
@ -167,3 +173,45 @@ class IgnorePolicy:
|
|||
return "not_directory"
|
||||
|
||||
return None
|
||||
|
||||
def deny_reason_link(self, path: str) -> Optional[str]:
|
||||
"""Symlink-specific deny logic.
|
||||
|
||||
Symlinks are meaningful configuration state (e.g. Debian-style
|
||||
*-enabled directories). deny_reason() is file-oriented and rejects
|
||||
symlinks as "not_regular_file".
|
||||
|
||||
For symlinks we:
|
||||
- apply the usual deny_globs (unless dangerous)
|
||||
- ensure the path is a symlink and we can readlink() it
|
||||
|
||||
No size checks or content scanning are performed for symlinks.
|
||||
"""
|
||||
|
||||
# Keep the same fast-path filename ignores as deny_reason().
|
||||
if path.endswith(".log"):
|
||||
return "log_file"
|
||||
if path.endswith("~"):
|
||||
return "backup_file"
|
||||
if path.startswith("/etc/") and path.endswith("-"):
|
||||
return "backup_file"
|
||||
|
||||
if not self.dangerous:
|
||||
for g in self.deny_globs or []:
|
||||
if fnmatch.fnmatch(path, g):
|
||||
return "denied_path"
|
||||
|
||||
try:
|
||||
os.lstat(path)
|
||||
except OSError:
|
||||
return "unreadable"
|
||||
|
||||
if not os.path.islink(path):
|
||||
return "not_symlink"
|
||||
|
||||
try:
|
||||
os.readlink(path)
|
||||
except OSError:
|
||||
return "unreadable"
|
||||
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -8,7 +8,45 @@ from pathlib import Path
|
|||
from typing import Optional
|
||||
|
||||
|
||||
SUPPORTED_EXTS = {".ini", ".json", ".toml", ".yaml", ".yml", ".xml"}
|
||||
SYSTEMD_SUFFIXES = {
|
||||
".service",
|
||||
".socket",
|
||||
".target",
|
||||
".timer",
|
||||
".path",
|
||||
".mount",
|
||||
".automount",
|
||||
".slice",
|
||||
".swap",
|
||||
".scope",
|
||||
".link",
|
||||
".netdev",
|
||||
".network",
|
||||
}
|
||||
|
||||
SUPPORTED_SUFFIXES = {
|
||||
".ini",
|
||||
".cfg",
|
||||
".json",
|
||||
".toml",
|
||||
".yaml",
|
||||
".yml",
|
||||
".xml",
|
||||
".repo",
|
||||
} | SYSTEMD_SUFFIXES
|
||||
|
||||
|
||||
def infer_other_formats(dest_path: str) -> Optional[str]:
|
||||
p = Path(dest_path)
|
||||
name = p.name.lower()
|
||||
suffix = p.suffix.lower()
|
||||
# postfix
|
||||
if name == "main.cf":
|
||||
return "postfix"
|
||||
# systemd units
|
||||
if suffix in SYSTEMD_SUFFIXES:
|
||||
return "systemd"
|
||||
return None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
|
|
@ -22,9 +60,15 @@ def find_jinjaturtle_cmd() -> Optional[str]:
|
|||
return shutil.which("jinjaturtle")
|
||||
|
||||
|
||||
def can_jinjify_path(path: str) -> bool:
|
||||
p = Path(path)
|
||||
return p.suffix.lower() in SUPPORTED_EXTS
|
||||
def can_jinjify_path(dest_path: str) -> bool:
|
||||
p = Path(dest_path)
|
||||
suffix = p.suffix.lower()
|
||||
if infer_other_formats(dest_path):
|
||||
return True
|
||||
# allow unambiguous structured formats
|
||||
if suffix in SUPPORTED_SUFFIXES:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def run_jinjaturtle(
|
||||
|
|
|
|||
|
|
@ -11,8 +11,9 @@ from pathlib import Path
|
|||
from typing import Any, Dict, List, Optional, Set, Tuple
|
||||
|
||||
from .jinjaturtle import (
|
||||
find_jinjaturtle_cmd,
|
||||
can_jinjify_path,
|
||||
find_jinjaturtle_cmd,
|
||||
infer_other_formats,
|
||||
run_jinjaturtle,
|
||||
)
|
||||
|
||||
|
|
@ -162,6 +163,19 @@ def _write_role_scaffold(role_dir: str) -> None:
|
|||
os.makedirs(os.path.join(role_dir, "templates"), exist_ok=True)
|
||||
|
||||
|
||||
def _role_tag(role: str) -> str:
|
||||
"""Return a stable Ansible tag name for a role.
|
||||
|
||||
Used by `enroll diff --enforce` to run only the roles needed to repair drift.
|
||||
"""
|
||||
r = str(role or "").strip()
|
||||
# Ansible tag charset is fairly permissive, but keep it portable and consistent.
|
||||
safe = re.sub(r"[^A-Za-z0-9_-]+", "_", r).strip("_")
|
||||
if not safe:
|
||||
safe = "other"
|
||||
return f"role_{safe}"
|
||||
|
||||
|
||||
def _write_playbook_all(path: str, roles: List[str]) -> None:
|
||||
pb_lines = [
|
||||
"---",
|
||||
|
|
@ -172,7 +186,8 @@ def _write_playbook_all(path: str, roles: List[str]) -> None:
|
|||
" roles:",
|
||||
]
|
||||
for r in roles:
|
||||
pb_lines.append(f" - {r}")
|
||||
pb_lines.append(f" - role: {r}")
|
||||
pb_lines.append(f" tags: [{_role_tag(r)}]")
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write("\n".join(pb_lines) + "\n")
|
||||
|
||||
|
|
@ -187,7 +202,8 @@ def _write_playbook_host(path: str, fqdn: str, roles: List[str]) -> None:
|
|||
" roles:",
|
||||
]
|
||||
for r in roles:
|
||||
pb_lines.append(f" - {r}")
|
||||
pb_lines.append(f" - role: {r}")
|
||||
pb_lines.append(f" tags: [{_role_tag(r)}]")
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
f.write("\n".join(pb_lines) + "\n")
|
||||
|
||||
|
|
@ -309,7 +325,10 @@ def _jinjify_managed_files(
|
|||
continue
|
||||
|
||||
try:
|
||||
res = run_jinjaturtle(jt_exe, artifact_path, role_name=role)
|
||||
force_fmt = infer_other_formats(dest_path)
|
||||
res = run_jinjaturtle(
|
||||
jt_exe, artifact_path, role_name=role, force_format=force_fmt
|
||||
)
|
||||
except Exception:
|
||||
# If jinjaturtle cannot process a file for any reason, skip silently.
|
||||
# (Enroll's core promise is to be optimistic and non-interactive.)
|
||||
|
|
@ -406,6 +425,20 @@ def _build_managed_files_var(
|
|||
return out
|
||||
|
||||
|
||||
def _build_managed_links_var(
|
||||
managed_links: List[Dict[str, Any]],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Convert enroll managed_links into an Ansible-friendly list of dicts."""
|
||||
out: List[Dict[str, Any]] = []
|
||||
for ml in managed_links or []:
|
||||
dest = ml.get("path") or ""
|
||||
src = ml.get("target") or ""
|
||||
if not dest or not src:
|
||||
continue
|
||||
out.append({"dest": dest, "src": src})
|
||||
return out
|
||||
|
||||
|
||||
def _render_generic_files_tasks(
|
||||
var_prefix: str, *, include_restart_notify: bool
|
||||
) -> str:
|
||||
|
|
@ -495,6 +528,14 @@ def _render_generic_files_tasks(
|
|||
| selectattr('kind', 'equalto', 'copy')
|
||||
| list }}}}
|
||||
notify: "{{{{ item.notify | default([]) }}}}"
|
||||
|
||||
- name: Ensure managed symlinks exist
|
||||
ansible.builtin.file:
|
||||
src: "{{{{ item.src }}}}"
|
||||
dest: "{{{{ item.dest }}}}"
|
||||
state: link
|
||||
force: true
|
||||
loop: "{{{{ {var_prefix}_managed_links | default([]) }}}}"
|
||||
"""
|
||||
|
||||
|
||||
|
|
@ -819,7 +860,12 @@ def _manifest_from_bundle_dir(
|
|||
group = str(u.get("primary_group") or owner)
|
||||
break
|
||||
|
||||
mode = "0600" if mf.get("reason") == "authorized_keys" else "0644"
|
||||
# Prefer the harvested file mode so we preserve any deliberate
|
||||
# permissions (e.g. 0600 for certain dotfiles). For authorized_keys,
|
||||
# enforce 0600 regardless.
|
||||
mode = mf.get("mode") or "0644"
|
||||
if mf.get("reason") == "authorized_keys":
|
||||
mode = "0600"
|
||||
ssh_files.append(
|
||||
{
|
||||
"dest": dest,
|
||||
|
|
@ -1647,6 +1693,7 @@ User-requested extra file harvesting.
|
|||
pkgs = svc.get("packages", []) or []
|
||||
managed_files = svc.get("managed_files", []) or []
|
||||
managed_dirs = svc.get("managed_dirs", []) or []
|
||||
managed_links = svc.get("managed_links", []) or []
|
||||
|
||||
role_dir = os.path.join(roles_root, role)
|
||||
_write_role_scaffold(role_dir)
|
||||
|
|
@ -1691,6 +1738,8 @@ User-requested extra file harvesting.
|
|||
notify_systemd="Run systemd daemon-reload",
|
||||
)
|
||||
|
||||
links_var = _build_managed_links_var(managed_links)
|
||||
|
||||
dirs_var = _build_managed_dirs_var(managed_dirs)
|
||||
|
||||
jt_map = _yaml_load_mapping(jt_vars) if jt_vars.strip() else {}
|
||||
|
|
@ -1699,6 +1748,7 @@ User-requested extra file harvesting.
|
|||
f"{var_prefix}_packages": pkgs,
|
||||
f"{var_prefix}_managed_files": files_var,
|
||||
f"{var_prefix}_managed_dirs": dirs_var,
|
||||
f"{var_prefix}_managed_links": links_var,
|
||||
f"{var_prefix}_manage_unit": True,
|
||||
f"{var_prefix}_systemd_enabled": bool(enabled_at_harvest),
|
||||
f"{var_prefix}_systemd_state": desired_state,
|
||||
|
|
@ -1714,6 +1764,7 @@ User-requested extra file harvesting.
|
|||
f"{var_prefix}_packages": [],
|
||||
f"{var_prefix}_managed_files": [],
|
||||
f"{var_prefix}_managed_dirs": [],
|
||||
f"{var_prefix}_managed_links": [],
|
||||
f"{var_prefix}_manage_unit": False,
|
||||
f"{var_prefix}_systemd_enabled": False,
|
||||
f"{var_prefix}_systemd_state": "stopped",
|
||||
|
|
@ -1799,6 +1850,9 @@ Generated from `{unit}`.
|
|||
## Managed files
|
||||
{os.linesep.join("- " + mf["path"] + " (" + mf["reason"] + ")" for mf in managed_files) or "- (none)"}
|
||||
|
||||
## Managed symlinks
|
||||
{os.linesep.join("- " + ml["path"] + " -> " + ml["target"] + " (" + ml.get("reason", "") + ")" for ml in managed_links) or "- (none)"}
|
||||
|
||||
## Excluded (possible secrets / unsafe)
|
||||
{os.linesep.join("- " + e["path"] + " (" + e["reason"] + ")" for e in excluded) or "- (none)"}
|
||||
|
||||
|
|
@ -1818,6 +1872,7 @@ Generated from `{unit}`.
|
|||
pkg = pr.get("package") or ""
|
||||
managed_files = pr.get("managed_files", []) or []
|
||||
managed_dirs = pr.get("managed_dirs", []) or []
|
||||
managed_links = pr.get("managed_links", []) or []
|
||||
|
||||
role_dir = os.path.join(roles_root, role)
|
||||
_write_role_scaffold(role_dir)
|
||||
|
|
@ -1859,6 +1914,8 @@ Generated from `{unit}`.
|
|||
notify_systemd="Run systemd daemon-reload",
|
||||
)
|
||||
|
||||
links_var = _build_managed_links_var(managed_links)
|
||||
|
||||
dirs_var = _build_managed_dirs_var(managed_dirs)
|
||||
|
||||
jt_map = _yaml_load_mapping(jt_vars) if jt_vars.strip() else {}
|
||||
|
|
@ -1866,6 +1923,7 @@ Generated from `{unit}`.
|
|||
f"{var_prefix}_packages": pkgs,
|
||||
f"{var_prefix}_managed_files": files_var,
|
||||
f"{var_prefix}_managed_dirs": dirs_var,
|
||||
f"{var_prefix}_managed_links": links_var,
|
||||
}
|
||||
base_vars = _merge_mappings_overwrite(base_vars, jt_map)
|
||||
|
||||
|
|
@ -1876,6 +1934,7 @@ Generated from `{unit}`.
|
|||
f"{var_prefix}_packages": [],
|
||||
f"{var_prefix}_managed_files": [],
|
||||
f"{var_prefix}_managed_dirs": [],
|
||||
f"{var_prefix}_managed_links": [],
|
||||
},
|
||||
)
|
||||
_write_hostvars(out_dir, fqdn or "", role, base_vars)
|
||||
|
|
@ -1918,6 +1977,9 @@ Generated for package `{pkg}`.
|
|||
## Managed files
|
||||
{os.linesep.join("- " + mf["path"] + " (" + mf["reason"] + ")" for mf in managed_files) or "- (none)"}
|
||||
|
||||
## Managed symlinks
|
||||
{os.linesep.join("- " + ml["path"] + " -> " + ml["target"] + " (" + ml.get("reason", "") + ")" for ml in managed_links) or "- (none)"}
|
||||
|
||||
## Excluded (possible secrets / unsafe)
|
||||
{os.linesep.join("- " + e["path"] + " (" + e["reason"] + ")" for e in excluded) or "- (none)"}
|
||||
|
||||
|
|
@ -1930,15 +1992,26 @@ Generated for package `{pkg}`.
|
|||
f.write(readme)
|
||||
|
||||
manifested_pkg_roles.append(role)
|
||||
# Place cron/logrotate at the end of the playbook so:
|
||||
# - users exist before we restore per-user crontabs in /var/spool
|
||||
# - most packages/services are installed/configured first
|
||||
tail_roles: List[str] = []
|
||||
for r in ("cron", "logrotate"):
|
||||
if r in manifested_pkg_roles:
|
||||
tail_roles.append(r)
|
||||
|
||||
main_pkg_roles = [r for r in manifested_pkg_roles if r not in set(tail_roles)]
|
||||
|
||||
all_roles = (
|
||||
manifested_apt_config_roles
|
||||
+ manifested_dnf_config_roles
|
||||
+ manifested_pkg_roles
|
||||
+ main_pkg_roles
|
||||
+ manifested_service_roles
|
||||
+ manifested_etc_custom_roles
|
||||
+ manifested_usr_local_custom_roles
|
||||
+ manifested_extra_paths_roles
|
||||
+ manifested_users_roles
|
||||
+ tail_roles
|
||||
)
|
||||
|
||||
if site_mode:
|
||||
|
|
|
|||
110
enroll/remote.py
110
enroll/remote.py
|
|
@ -330,8 +330,9 @@ def _remote_harvest(
|
|||
*,
|
||||
local_out_dir: Path,
|
||||
remote_host: str,
|
||||
remote_port: int = 22,
|
||||
remote_port: Optional[int] = None,
|
||||
remote_user: Optional[str] = None,
|
||||
remote_ssh_config: Optional[str] = None,
|
||||
remote_python: str = "python3",
|
||||
dangerous: bool = False,
|
||||
no_sudo: bool = False,
|
||||
|
|
@ -370,12 +371,113 @@ def _remote_harvest(
|
|||
# Users should add the key to known_hosts.
|
||||
ssh.set_missing_host_key_policy(paramiko.RejectPolicy())
|
||||
|
||||
# Resolve SSH connection parameters.
|
||||
connect_host = remote_host
|
||||
connect_port = int(remote_port) if remote_port is not None else 22
|
||||
connect_user = remote_user
|
||||
key_filename = None
|
||||
sock = None
|
||||
hostkey_name = connect_host
|
||||
|
||||
# Timeouts derived from ssh_config if set (ConnectTimeout).
|
||||
# Used both for socket connect (when we create one) and Paramiko handshake/auth.
|
||||
connect_timeout: Optional[float] = None
|
||||
|
||||
if remote_ssh_config:
|
||||
from paramiko.config import SSHConfig # type: ignore
|
||||
from paramiko.proxy import ProxyCommand # type: ignore
|
||||
import socket as _socket
|
||||
|
||||
cfg_path = Path(str(remote_ssh_config)).expanduser()
|
||||
if not cfg_path.exists():
|
||||
raise RuntimeError(f"SSH config file not found: {cfg_path}")
|
||||
|
||||
cfg = SSHConfig()
|
||||
with cfg_path.open("r", encoding="utf-8") as _fp:
|
||||
cfg.parse(_fp)
|
||||
hcfg = cfg.lookup(remote_host)
|
||||
|
||||
connect_host = str(hcfg.get("hostname") or remote_host)
|
||||
hostkey_name = str(hcfg.get("hostkeyalias") or connect_host)
|
||||
|
||||
if remote_port is None and hcfg.get("port"):
|
||||
try:
|
||||
connect_port = int(str(hcfg.get("port")))
|
||||
except ValueError:
|
||||
pass
|
||||
if connect_user is None and hcfg.get("user"):
|
||||
connect_user = str(hcfg.get("user"))
|
||||
|
||||
ident = hcfg.get("identityfile")
|
||||
if ident:
|
||||
if isinstance(ident, (list, tuple)):
|
||||
key_filename = [str(Path(p).expanduser()) for p in ident]
|
||||
else:
|
||||
key_filename = str(Path(str(ident)).expanduser())
|
||||
|
||||
# Honour OpenSSH ConnectTimeout (seconds) if present.
|
||||
if hcfg.get("connecttimeout"):
|
||||
try:
|
||||
connect_timeout = float(str(hcfg.get("connecttimeout")))
|
||||
except (TypeError, ValueError):
|
||||
connect_timeout = None
|
||||
|
||||
proxycmd = hcfg.get("proxycommand")
|
||||
|
||||
# AddressFamily support: inet (IPv4 only), inet6 (IPv6 only), any (default).
|
||||
addrfam = str(hcfg.get("addressfamily") or "any").strip().lower()
|
||||
family: Optional[int] = None
|
||||
if addrfam == "inet":
|
||||
family = _socket.AF_INET
|
||||
elif addrfam == "inet6":
|
||||
family = _socket.AF_INET6
|
||||
|
||||
if proxycmd:
|
||||
# ProxyCommand provides the transport; AddressFamily doesn't apply here.
|
||||
sock = ProxyCommand(str(proxycmd))
|
||||
elif family is not None:
|
||||
# Enforce the requested address family by pre-connecting the socket and
|
||||
# passing it into Paramiko via sock=.
|
||||
last_err: Optional[OSError] = None
|
||||
infos = _socket.getaddrinfo(
|
||||
connect_host, connect_port, family, _socket.SOCK_STREAM
|
||||
)
|
||||
for af, socktype, proto, _, sa in infos:
|
||||
s = _socket.socket(af, socktype, proto)
|
||||
if connect_timeout is not None:
|
||||
s.settimeout(connect_timeout)
|
||||
try:
|
||||
s.connect(sa)
|
||||
sock = s
|
||||
break
|
||||
except OSError as e:
|
||||
last_err = e
|
||||
try:
|
||||
s.close()
|
||||
except Exception:
|
||||
pass # nosec
|
||||
if sock is None and last_err is not None:
|
||||
raise last_err
|
||||
elif hostkey_name != connect_host:
|
||||
# If HostKeyAlias is used, connect to HostName via a socket but
|
||||
# use HostKeyAlias for known_hosts lookups.
|
||||
sock = _socket.create_connection(
|
||||
(connect_host, connect_port), timeout=connect_timeout
|
||||
)
|
||||
|
||||
# If we created a socket (sock!=None), pass hostkey_name as hostname so
|
||||
# known_hosts lookup uses HostKeyAlias (or whatever hostkey_name resolved to).
|
||||
ssh.connect(
|
||||
hostname=remote_host,
|
||||
port=int(remote_port),
|
||||
username=remote_user,
|
||||
hostname=hostkey_name if sock is not None else connect_host,
|
||||
port=connect_port,
|
||||
username=connect_user,
|
||||
key_filename=key_filename,
|
||||
sock=sock,
|
||||
allow_agent=True,
|
||||
look_for_keys=True,
|
||||
timeout=connect_timeout,
|
||||
banner_timeout=connect_timeout,
|
||||
auth_timeout=connect_timeout,
|
||||
)
|
||||
|
||||
# If no username was explicitly provided, SSH may have selected a default.
|
||||
|
|
|
|||
4
enroll/schema/__init__.py
Normal file
4
enroll/schema/__init__.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
"""Vendored JSON schemas.
|
||||
|
||||
These are used by `enroll validate` so validation can run offline.
|
||||
"""
|
||||
712
enroll/schema/state.schema.json
Normal file
712
enroll/schema/state.schema.json
Normal file
|
|
@ -0,0 +1,712 @@
|
|||
{
|
||||
"$defs": {
|
||||
"AptConfigSnapshot": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RoleCommon"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"role_name": {
|
||||
"const": "apt_config"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"unevaluatedProperties": false
|
||||
},
|
||||
"DnfConfigSnapshot": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RoleCommon"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"role_name": {
|
||||
"const": "dnf_config"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"unevaluatedProperties": false
|
||||
},
|
||||
"EtcCustomSnapshot": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RoleCommon"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"role_name": {
|
||||
"const": "etc_custom"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"unevaluatedProperties": false
|
||||
},
|
||||
"ExcludedFile": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"path": {
|
||||
"minLength": 1,
|
||||
"pattern": "^/.*",
|
||||
"type": "string"
|
||||
},
|
||||
"reason": {
|
||||
"enum": [
|
||||
"user_excluded",
|
||||
"unreadable",
|
||||
"backup_file",
|
||||
"log_file",
|
||||
"denied_path",
|
||||
"too_large",
|
||||
"not_regular_file",
|
||||
"not_symlink",
|
||||
"binary_like",
|
||||
"sensitive_content"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path",
|
||||
"reason"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ExtraPathsSnapshot": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RoleCommon"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"exclude_patterns": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"include_patterns": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"role_name": {
|
||||
"const": "extra_paths"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"include_patterns",
|
||||
"exclude_patterns"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"unevaluatedProperties": false
|
||||
},
|
||||
"InstalledPackageInstance": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"arch": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"version",
|
||||
"arch"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ManagedDir": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"group": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"mode": {
|
||||
"pattern": "^[0-7]{4}$",
|
||||
"type": "string"
|
||||
},
|
||||
"owner": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"minLength": 1,
|
||||
"pattern": "^/.*",
|
||||
"type": "string"
|
||||
},
|
||||
"reason": {
|
||||
"enum": [
|
||||
"parent_of_managed_file",
|
||||
"user_include_dir"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path",
|
||||
"owner",
|
||||
"group",
|
||||
"mode",
|
||||
"reason"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ManagedFile": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"group": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"mode": {
|
||||
"pattern": "^[0-7]{4}$",
|
||||
"type": "string"
|
||||
},
|
||||
"owner": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"path": {
|
||||
"minLength": 1,
|
||||
"pattern": "^/.*",
|
||||
"type": "string"
|
||||
},
|
||||
"reason": {
|
||||
"enum": [
|
||||
"apt_config",
|
||||
"apt_keyring",
|
||||
"apt_signed_by_keyring",
|
||||
"apt_source",
|
||||
"authorized_keys",
|
||||
"cron_snippet",
|
||||
"custom_specific_path",
|
||||
"custom_unowned",
|
||||
"dnf_config",
|
||||
"logrotate_snippet",
|
||||
"modified_conffile",
|
||||
"modified_packaged_file",
|
||||
"related_timer",
|
||||
"rpm_gpg_key",
|
||||
"ssh_public_key",
|
||||
"system_cron",
|
||||
"system_firewall",
|
||||
"system_logrotate",
|
||||
"system_modprobe",
|
||||
"system_mounts",
|
||||
"system_network",
|
||||
"system_rc",
|
||||
"system_security",
|
||||
"system_sysctl",
|
||||
"systemd_dropin",
|
||||
"systemd_envfile",
|
||||
"user_include",
|
||||
"user_profile",
|
||||
"user_shell_aliases",
|
||||
"user_shell_logout",
|
||||
"user_shell_rc",
|
||||
"usr_local_bin_script",
|
||||
"usr_local_etc_custom",
|
||||
"yum_conf",
|
||||
"yum_config",
|
||||
"yum_repo"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"src_rel": {
|
||||
"minLength": 1,
|
||||
"pattern": "^[^/].*",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path",
|
||||
"src_rel",
|
||||
"owner",
|
||||
"group",
|
||||
"mode",
|
||||
"reason"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ManagedLink": {
|
||||
"additionalProperties": false,
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"minLength": 1,
|
||||
"pattern": "^/.*"
|
||||
},
|
||||
"target": {
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
},
|
||||
"reason": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"enabled_symlink"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"path",
|
||||
"target",
|
||||
"reason"
|
||||
]
|
||||
},
|
||||
"ObservedVia": {
|
||||
"oneOf": [
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"kind": {
|
||||
"const": "user_installed"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"kind": {
|
||||
"const": "systemd_unit"
|
||||
},
|
||||
"ref": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind",
|
||||
"ref"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"kind": {
|
||||
"const": "package_role"
|
||||
},
|
||||
"ref": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"kind",
|
||||
"ref"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"PackageInventoryEntry": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"arches": {
|
||||
"items": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"installations": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/InstalledPackageInstance"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"observed_via": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/ObservedVia"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"roles": {
|
||||
"items": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"version": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"version",
|
||||
"arches",
|
||||
"installations",
|
||||
"observed_via",
|
||||
"roles"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"PackageSnapshot": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RoleCommon"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"package": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"package"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"unevaluatedProperties": false
|
||||
},
|
||||
"RoleCommon": {
|
||||
"properties": {
|
||||
"excluded": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/ExcludedFile"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"managed_dirs": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/ManagedDir"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"managed_files": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/ManagedFile"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"managed_links": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/ManagedLink"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"notes": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"role_name": {
|
||||
"minLength": 1,
|
||||
"pattern": "^[A-Za-z0-9_]+$",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"role_name",
|
||||
"managed_dirs",
|
||||
"managed_files",
|
||||
"excluded",
|
||||
"notes"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"ServiceSnapshot": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RoleCommon"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"active_state": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"condition_result": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"packages": {
|
||||
"items": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"role_name": {
|
||||
"minLength": 1,
|
||||
"pattern": "^[a-z_][a-z0-9_]*$",
|
||||
"type": "string"
|
||||
},
|
||||
"sub_state": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"unit": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"unit_file_state": {
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"unit",
|
||||
"packages",
|
||||
"active_state",
|
||||
"sub_state",
|
||||
"unit_file_state",
|
||||
"condition_result"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"unevaluatedProperties": false
|
||||
},
|
||||
"UserEntry": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"gecos": {
|
||||
"type": "string"
|
||||
},
|
||||
"gid": {
|
||||
"minimum": 0,
|
||||
"type": "integer"
|
||||
},
|
||||
"home": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"primary_group": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"shell": {
|
||||
"type": "string"
|
||||
},
|
||||
"supplementary_groups": {
|
||||
"items": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"uid": {
|
||||
"minimum": 0,
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"name",
|
||||
"uid",
|
||||
"gid",
|
||||
"gecos",
|
||||
"home",
|
||||
"shell",
|
||||
"primary_group",
|
||||
"supplementary_groups"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"UsersSnapshot": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RoleCommon"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"role_name": {
|
||||
"const": "users"
|
||||
},
|
||||
"users": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/UserEntry"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"users"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"unevaluatedProperties": false
|
||||
},
|
||||
"UsrLocalCustomSnapshot": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RoleCommon"
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"role_name": {
|
||||
"const": "usr_local_custom"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
],
|
||||
"unevaluatedProperties": false
|
||||
}
|
||||
},
|
||||
"$id": "https://enroll.sh/schema/state.schema.json",
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"enroll": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"harvest_time": {
|
||||
"minimum": 0,
|
||||
"type": "integer"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"version",
|
||||
"harvest_time"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"host": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"hostname": {
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"os": {
|
||||
"enum": [
|
||||
"debian",
|
||||
"redhat",
|
||||
"unknown"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"os_release": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"pkg_backend": {
|
||||
"enum": [
|
||||
"dpkg",
|
||||
"rpm"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"hostname",
|
||||
"os",
|
||||
"pkg_backend",
|
||||
"os_release"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"inventory": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"packages": {
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/PackageInventoryEntry"
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"packages"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"roles": {
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"apt_config": {
|
||||
"$ref": "#/$defs/AptConfigSnapshot"
|
||||
},
|
||||
"dnf_config": {
|
||||
"$ref": "#/$defs/DnfConfigSnapshot"
|
||||
},
|
||||
"etc_custom": {
|
||||
"$ref": "#/$defs/EtcCustomSnapshot"
|
||||
},
|
||||
"extra_paths": {
|
||||
"$ref": "#/$defs/ExtraPathsSnapshot"
|
||||
},
|
||||
"packages": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/PackageSnapshot"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"services": {
|
||||
"items": {
|
||||
"$ref": "#/$defs/ServiceSnapshot"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"users": {
|
||||
"$ref": "#/$defs/UsersSnapshot"
|
||||
},
|
||||
"usr_local_custom": {
|
||||
"$ref": "#/$defs/UsrLocalCustomSnapshot"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"users",
|
||||
"services",
|
||||
"packages",
|
||||
"apt_config",
|
||||
"dnf_config",
|
||||
"etc_custom",
|
||||
"usr_local_custom",
|
||||
"extra_paths"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"enroll",
|
||||
"host",
|
||||
"inventory",
|
||||
"roles"
|
||||
],
|
||||
"title": "Enroll harvest state.json schema (latest)",
|
||||
"type": "object"
|
||||
}
|
||||
223
enroll/validate.py
Normal file
223
enroll/validate.py
Normal file
|
|
@ -0,0 +1,223 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import urllib.request
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import jsonschema
|
||||
|
||||
from .diff import BundleRef, _bundle_from_input
|
||||
|
||||
|
||||
@dataclass
|
||||
class ValidationResult:
|
||||
errors: List[str]
|
||||
warnings: List[str]
|
||||
|
||||
@property
|
||||
def ok(self) -> bool:
|
||||
return not self.errors
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"ok": self.ok,
|
||||
"errors": list(self.errors),
|
||||
"warnings": list(self.warnings),
|
||||
}
|
||||
|
||||
def to_text(self) -> str:
|
||||
lines: List[str] = []
|
||||
if not self.errors and not self.warnings:
|
||||
lines.append("OK: harvest bundle validated")
|
||||
elif not self.errors and self.warnings:
|
||||
lines.append(f"WARN: {len(self.warnings)} warning(s)")
|
||||
else:
|
||||
lines.append(f"ERROR: {len(self.errors)} validation error(s)")
|
||||
|
||||
if self.errors:
|
||||
lines.append("")
|
||||
lines.append("Errors:")
|
||||
for e in self.errors:
|
||||
lines.append(f"- {e}")
|
||||
if self.warnings:
|
||||
lines.append("")
|
||||
lines.append("Warnings:")
|
||||
for w in self.warnings:
|
||||
lines.append(f"- {w}")
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def _default_schema_path() -> Path:
|
||||
# Keep the schema vendored with the codebase so enroll can validate offline.
|
||||
return Path(__file__).resolve().parent / "schema" / "state.schema.json"
|
||||
|
||||
|
||||
def _load_schema(schema: Optional[str]) -> Dict[str, Any]:
|
||||
"""Load a JSON schema.
|
||||
|
||||
If schema is None, load the vendored schema.
|
||||
If schema begins with http(s)://, fetch it.
|
||||
Otherwise, treat it as a local file path.
|
||||
"""
|
||||
|
||||
if not schema:
|
||||
p = _default_schema_path()
|
||||
with open(p, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
if schema.startswith("http://") or schema.startswith("https://"):
|
||||
with urllib.request.urlopen(schema, timeout=10) as resp: # nosec
|
||||
data = resp.read()
|
||||
return json.loads(data.decode("utf-8"))
|
||||
|
||||
p = Path(schema).expanduser()
|
||||
with open(p, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def _json_pointer(err: jsonschema.ValidationError) -> str:
|
||||
# Build a JSON pointer-ish path that is easy to read.
|
||||
if err.absolute_path:
|
||||
parts = [str(p) for p in err.absolute_path]
|
||||
return "/" + "/".join(parts)
|
||||
return "/"
|
||||
|
||||
|
||||
def _iter_managed_files(state: Dict[str, Any]) -> List[Tuple[str, Dict[str, Any]]]:
|
||||
"""Return (role_name, managed_file_dict) tuples across all roles."""
|
||||
|
||||
roles = state.get("roles") or {}
|
||||
out: List[Tuple[str, Dict[str, Any]]] = []
|
||||
|
||||
# Singleton roles
|
||||
for rn in [
|
||||
"users",
|
||||
"apt_config",
|
||||
"dnf_config",
|
||||
"etc_custom",
|
||||
"usr_local_custom",
|
||||
"extra_paths",
|
||||
]:
|
||||
snap = roles.get(rn) or {}
|
||||
for mf in snap.get("managed_files") or []:
|
||||
if isinstance(mf, dict):
|
||||
out.append((rn, mf))
|
||||
|
||||
# Array roles
|
||||
for s in roles.get("services") or []:
|
||||
if not isinstance(s, dict):
|
||||
continue
|
||||
role_name = str(s.get("role_name") or "unknown")
|
||||
for mf in s.get("managed_files") or []:
|
||||
if isinstance(mf, dict):
|
||||
out.append((role_name, mf))
|
||||
|
||||
for p in roles.get("packages") or []:
|
||||
if not isinstance(p, dict):
|
||||
continue
|
||||
role_name = str(p.get("role_name") or "unknown")
|
||||
for mf in p.get("managed_files") or []:
|
||||
if isinstance(mf, dict):
|
||||
out.append((role_name, mf))
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def validate_harvest(
|
||||
harvest_input: str,
|
||||
*,
|
||||
sops_mode: bool = False,
|
||||
schema: Optional[str] = None,
|
||||
no_schema: bool = False,
|
||||
) -> ValidationResult:
|
||||
"""Validate an enroll harvest bundle.
|
||||
|
||||
Checks:
|
||||
- state.json parses
|
||||
- state.json validates against the schema (unless no_schema)
|
||||
- every managed_file src_rel exists in artifacts/<role>/<src_rel>
|
||||
"""
|
||||
|
||||
errors: List[str] = []
|
||||
warnings: List[str] = []
|
||||
|
||||
bundle: BundleRef = _bundle_from_input(harvest_input, sops_mode=sops_mode)
|
||||
try:
|
||||
state_path = bundle.state_path
|
||||
if not state_path.exists():
|
||||
return ValidationResult(
|
||||
errors=[f"missing state.json at {state_path}"], warnings=[]
|
||||
)
|
||||
|
||||
try:
|
||||
state = json.loads(state_path.read_text(encoding="utf-8"))
|
||||
except Exception as e: # noqa: BLE001
|
||||
return ValidationResult(
|
||||
errors=[f"failed to parse state.json: {e!r}"], warnings=[]
|
||||
)
|
||||
|
||||
if not no_schema:
|
||||
try:
|
||||
sch = _load_schema(schema)
|
||||
validator = jsonschema.Draft202012Validator(sch)
|
||||
for err in sorted(validator.iter_errors(state), key=str):
|
||||
ptr = _json_pointer(err)
|
||||
msg = err.message
|
||||
errors.append(f"schema {ptr}: {msg}")
|
||||
except Exception as e: # noqa: BLE001
|
||||
errors.append(f"failed to load/validate schema: {e!r}")
|
||||
|
||||
# Artifact existence checks
|
||||
artifacts_dir = bundle.dir / "artifacts"
|
||||
referenced: Set[Tuple[str, str]] = set()
|
||||
for role_name, mf in _iter_managed_files(state):
|
||||
src_rel = str(mf.get("src_rel") or "")
|
||||
if not src_rel:
|
||||
errors.append(
|
||||
f"managed_file missing src_rel for role {role_name} (path={mf.get('path')!r})"
|
||||
)
|
||||
continue
|
||||
if src_rel.startswith("/") or ".." in src_rel.split("/"):
|
||||
errors.append(
|
||||
f"managed_file has suspicious src_rel for role {role_name}: {src_rel!r}"
|
||||
)
|
||||
continue
|
||||
|
||||
referenced.add((role_name, src_rel))
|
||||
p = artifacts_dir / role_name / src_rel
|
||||
if not p.exists():
|
||||
errors.append(
|
||||
f"missing artifact for role {role_name}: artifacts/{role_name}/{src_rel}"
|
||||
)
|
||||
continue
|
||||
if not p.is_file():
|
||||
errors.append(
|
||||
f"artifact is not a file for role {role_name}: artifacts/{role_name}/{src_rel}"
|
||||
)
|
||||
|
||||
# Warn if there are extra files in artifacts not referenced.
|
||||
if artifacts_dir.exists() and artifacts_dir.is_dir():
|
||||
for fp in artifacts_dir.rglob("*"):
|
||||
if not fp.is_file():
|
||||
continue
|
||||
try:
|
||||
rel = fp.relative_to(artifacts_dir)
|
||||
except ValueError:
|
||||
continue
|
||||
parts = rel.parts
|
||||
if len(parts) < 2:
|
||||
continue
|
||||
role_name = parts[0]
|
||||
src_rel = "/".join(parts[1:])
|
||||
if (role_name, src_rel) not in referenced:
|
||||
warnings.append(
|
||||
f"unreferenced artifact present: artifacts/{role_name}/{src_rel}"
|
||||
)
|
||||
|
||||
return ValidationResult(errors=errors, warnings=warnings)
|
||||
finally:
|
||||
# Ensure any temp extraction dirs are cleaned up.
|
||||
if bundle.tempdir is not None:
|
||||
bundle.tempdir.cleanup()
|
||||
531
poetry.lock
generated
531
poetry.lock
generated
|
|
@ -1,5 +1,16 @@
|
|||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "25.4.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"},
|
||||
{file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "5.0.0"
|
||||
|
|
@ -78,13 +89,13 @@ typecheck = ["mypy"]
|
|||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.11.12"
|
||||
version = "2026.1.4"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"},
|
||||
{file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"},
|
||||
{file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"},
|
||||
{file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -318,103 +329,103 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.13.0"
|
||||
version = "7.13.1"
|
||||
description = "Code coverage measurement for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "coverage-7.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02d9fb9eccd48f6843c98a37bd6817462f130b86da8660461e8f5e54d4c06070"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:367449cf07d33dc216c083f2036bb7d976c6e4903ab31be400ad74ad9f85ce98"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cdb3c9f8fef0a954c632f64328a3935988d33a6604ce4bf67ec3e39670f12ae5"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d10fd186aac2316f9bbb46ef91977f9d394ded67050ad6d84d94ed6ea2e8e54e"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f88ae3e69df2ab62fb0bc5219a597cb890ba5c438190ffa87490b315190bb33"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4be718e51e86f553bcf515305a158a1cd180d23b72f07ae76d6017c3cc5d791"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a00d3a393207ae12f7c49bb1c113190883b500f48979abb118d8b72b8c95c032"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a7b1cd820e1b6116f92c6128f1188e7afe421c7e1b35fa9836b11444e53ebd9"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:37eee4e552a65866f15dedd917d5e5f3d59805994260720821e2c1b51ac3248f"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:62d7c4f13102148c78d7353c6052af6d899a7f6df66a32bddcc0c0eb7c5326f8"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-win32.whl", hash = "sha256:24e4e56304fdb56f96f80eabf840eab043b3afea9348b88be680ec5986780a0f"},
|
||||
{file = "coverage-7.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:74c136e4093627cf04b26a35dab8cbfc9b37c647f0502fc313376e11726ba303"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0dfa3855031070058add1a59fdfda0192fd3e8f97e7c81de0596c145dea51820"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fdb6f54f38e334db97f72fa0c701e66d8479af0bc3f9bfb5b90f1c30f54500f"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7e442c013447d1d8d195be62852270b78b6e255b79b8675bad8479641e21fd96"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ed5630d946859de835a85e9a43b721123a8a44ec26e2830b296d478c7fd4259"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f15a931a668e58087bc39d05d2b4bf4b14ff2875b49c994bbdb1c2217a8daeb"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30a3a201a127ea57f7e14ba43c93c9c4be8b7d17a26e03bb49e6966d019eede9"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a485ff48fbd231efa32d58f479befce52dcb6bfb2a88bb7bf9a0b89b1bc8030"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:22486cdafba4f9e471c816a2a5745337742a617fef68e890d8baf9f3036d7833"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:263c3dbccc78e2e331e59e90115941b5f53e85cfcc6b3b2fbff1fd4e3d2c6ea8"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5330fa0cc1f5c3c4c3bb8e101b742025933e7848989370a1d4c8c5e401ea753"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-win32.whl", hash = "sha256:0f4872f5d6c54419c94c25dd6ae1d015deeb337d06e448cd890a1e89a8ee7f3b"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51a202e0f80f241ccb68e3e26e19ab5b3bf0f813314f2c967642f13ebcf1ddfe"},
|
||||
{file = "coverage-7.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:d2a9d7f1c11487b1c69367ab3ac2d81b9b3721f097aa409a3191c3e90f8f3dd7"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0b3d67d31383c4c68e19a88e28fc4c2e29517580f1b0ebec4a069d502ce1e0bf"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:581f086833d24a22c89ae0fe2142cfaa1c92c930adf637ddf122d55083fb5a0f"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0a3a30f0e257df382f5f9534d4ce3d4cf06eafaf5192beb1a7bd066cb10e78fb"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:583221913fbc8f53b88c42e8dbb8fca1d0f2e597cb190ce45916662b8b9d9621"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f5d9bd30756fff3e7216491a0d6d520c448d5124d3d8e8f56446d6412499e74"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a23e5a1f8b982d56fa64f8e442e037f6ce29322f1f9e6c2344cd9e9f4407ee57"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b01c22bc74a7fb44066aaf765224c0d933ddf1f5047d6cdfe4795504a4493f8"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:898cce66d0836973f48dda4e3514d863d70142bdf6dfab932b9b6a90ea5b222d"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3ab483ea0e251b5790c2aac03acde31bff0c736bf8a86829b89382b407cd1c3b"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d84e91521c5e4cb6602fe11ece3e1de03b2760e14ae4fcf1a4b56fa3c801fcd"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-win32.whl", hash = "sha256:193c3887285eec1dbdb3f2bd7fbc351d570ca9c02ca756c3afbc71b3c98af6ef"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:4f3e223b2b2db5e0db0c2b97286aba0036ca000f06aca9b12112eaa9af3d92ae"},
|
||||
{file = "coverage-7.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:086cede306d96202e15a4b77ace8472e39d9f4e5f9fd92dd4fecdfb2313b2080"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a"},
|
||||
{file = "coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39"},
|
||||
{file = "coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc"},
|
||||
{file = "coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc"},
|
||||
{file = "coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a"},
|
||||
{file = "coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904"},
|
||||
{file = "coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f"},
|
||||
{file = "coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a"},
|
||||
{file = "coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6"},
|
||||
{file = "coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500"},
|
||||
{file = "coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2"},
|
||||
{file = "coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a"},
|
||||
{file = "coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79"},
|
||||
{file = "coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4"},
|
||||
{file = "coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573"},
|
||||
{file = "coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -567,6 +578,41 @@ files = [
|
|||
{file = "invoke-2.2.1.tar.gz", hash = "sha256:515bf49b4a48932b79b024590348da22f39c4942dff991ad1fb8b8baea1be707"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.26.0"
|
||||
description = "An implementation of JSON Schema validation for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce"},
|
||||
{file = "jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=22.2.0"
|
||||
jsonschema-specifications = ">=2023.03.6"
|
||||
referencing = ">=0.28.4"
|
||||
rpds-py = ">=0.25.0"
|
||||
|
||||
[package.extras]
|
||||
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
|
||||
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema-specifications"
|
||||
version = "2025.9.1"
|
||||
description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe"},
|
||||
{file = "jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
referencing = ">=0.31.0"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
|
|
@ -640,38 +686,36 @@ windows-terminal = ["colorama (>=0.4.6)"]
|
|||
|
||||
[[package]]
|
||||
name = "pynacl"
|
||||
version = "1.6.1"
|
||||
version = "1.6.2"
|
||||
description = "Python binding to the Networking and Cryptography (NaCl) library"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:7d7c09749450c385301a3c20dca967a525152ae4608c0a096fe8464bfc3df93d"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fc734c1696ffd49b40f7c1779c89ba908157c57345cf626be2e0719488a076d3"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3cd787ec1f5c155dc8ecf39b1333cfef41415dc96d392f1ce288b4fe970df489"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b35d93ab2df03ecb3aa506be0d3c73609a51449ae0855c2e89c7ed44abde40b"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dece79aecbb8f4640a1adbb81e4aa3bfb0e98e99834884a80eb3f33c7c30e708"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c2228054f04bf32d558fb89bb99f163a8197d5a9bf4efa13069a7fa8d4b93fc3"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:2b12f1b97346f177affcdfdc78875ff42637cb40dcf79484a97dae3448083a78"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e735c3a1bdfde3834503baf1a6d74d4a143920281cb724ba29fb84c9f49b9c48"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3384a454adf5d716a9fadcb5eb2e3e72cd49302d1374a60edc531c9957a9b014"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-win32.whl", hash = "sha256:d8615ee34d01c8e0ab3f302dcdd7b32e2bcf698ba5f4809e7cc407c8cdea7717"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-win_amd64.whl", hash = "sha256:5f5b35c1a266f8a9ad22525049280a600b19edd1f785bccd01ae838437dcf935"},
|
||||
{file = "pynacl-1.6.1-cp314-cp314t-win_arm64.whl", hash = "sha256:d984c91fe3494793b2a1fb1e91429539c6c28e9ec8209d26d25041ec599ccf63"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:a6f9fd6d6639b1e81115c7f8ff16b8dedba1e8098d2756275d63d208b0e32021"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e49a3f3d0da9f79c1bec2aa013261ab9fa651c7da045d376bd306cf7c1792993"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7713f8977b5d25f54a811ec9efa2738ac592e846dd6e8a4d3f7578346a841078"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a3becafc1ee2e5ea7f9abc642f56b82dcf5be69b961e782a96ea52b55d8a9fc"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4ce50d19f1566c391fedc8dc2f2f5be265ae214112ebe55315e41d1f36a7f0a9"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:543f869140f67d42b9b8d47f922552d7a967e6c116aad028c9bfc5f3f3b3a7b7"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a2bb472458c7ca959aeeff8401b8efef329b0fc44a89d3775cffe8fad3398ad8"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3206fa98737fdc66d59b8782cecc3d37d30aeec4593d1c8c145825a345bba0f0"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:53543b4f3d8acb344f75fd4d49f75e6572fce139f4bfb4815a9282296ff9f4c0"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:319de653ef84c4f04e045eb250e6101d23132372b0a61a7acf91bac0fda8e58c"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:262a8de6bba4aee8a66f5edf62c214b06647461c9b6b641f8cd0cb1e3b3196fe"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-win32.whl", hash = "sha256:9fd1a4eb03caf8a2fe27b515a998d26923adb9ddb68db78e35ca2875a3830dde"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-win_amd64.whl", hash = "sha256:a569a4069a7855f963940040f35e87d8bc084cb2d6347428d5ad20550a0a1a21"},
|
||||
{file = "pynacl-1.6.1-cp38-abi3-win_arm64.whl", hash = "sha256:5953e8b8cfadb10889a6e7bd0f53041a745d1b3d30111386a1bb37af171e6daf"},
|
||||
{file = "pynacl-1.6.1.tar.gz", hash = "sha256:8d361dac0309f2b6ad33b349a56cd163c98430d409fa503b10b70b3ad66eaa1d"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-win32.whl", hash = "sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-win_amd64.whl", hash = "sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590"},
|
||||
{file = "pynacl-1.6.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-win32.whl", hash = "sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0"},
|
||||
{file = "pynacl-1.6.2-cp38-abi3-win_arm64.whl", hash = "sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c"},
|
||||
{file = "pynacl-1.6.2.tar.gz", hash = "sha256:018494d6d696ae03c7e656e5e74cdfd8ea1326962cc401bcf018f1ed8436811c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -820,6 +864,22 @@ files = [
|
|||
{file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "referencing"
|
||||
version = "0.37.0"
|
||||
description = "JSON Referencing + Python"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231"},
|
||||
{file = "referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=22.2.0"
|
||||
rpds-py = ">=0.7.0"
|
||||
typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
|
|
@ -841,55 +901,184 @@ urllib3 = ">=1.21.1,<3"
|
|||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "rpds-py"
|
||||
version = "0.30.0"
|
||||
description = "Python bindings to Rust's persistent data structures (rpds)"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
files = [
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:679ae98e00c0e8d68a7fda324e16b90fd5260945b45d3b824c892cec9eea3288"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4cc2206b76b4f576934f0ed374b10d7ca5f457858b157ca52064bdfc26b9fc00"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389a2d49eded1896c3d48b0136ead37c48e221b391c052fba3f4055c367f60a6"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:32c8528634e1bf7121f3de08fa85b138f4e0dc47657866630611b03967f041d7"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f207f69853edd6f6700b86efb84999651baf3789e78a466431df1331608e5324"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67b02ec25ba7a9e8fa74c63b6ca44cf5707f2fbfadae3ee8e7494297d56aa9df"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0e95f6819a19965ff420f65578bacb0b00f251fefe2c8b23347c37174271f3"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:a452763cc5198f2f98898eb98f7569649fe5da666c2dc6b5ddb10fde5a574221"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e0b65193a413ccc930671c55153a03ee57cecb49e6227204b04fae512eb657a7"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:858738e9c32147f78b3ac24dc0edb6610000e56dc0f700fd5f651d0a0f0eb9ff"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:da279aa314f00acbb803da1e76fa18666778e8a8f83484fba94526da5de2cba7"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7c64d38fb49b6cdeda16ab49e35fe0da2e1e9b34bc38bd78386530f218b37139"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-win32.whl", hash = "sha256:6de2a32a1665b93233cde140ff8b3467bdb9e2af2b91079f0333a0974d12d464"},
|
||||
{file = "rpds_py-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:1726859cd0de969f88dc8673bdd954185b9104e05806be64bcd87badbe313169"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825"},
|
||||
{file = "rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b"},
|
||||
{file = "rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6"},
|
||||
{file = "rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0"},
|
||||
{file = "rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4"},
|
||||
{file = "rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e"},
|
||||
{file = "rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.3.0"
|
||||
version = "2.4.0"
|
||||
description = "A lil' TOML parser"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"},
|
||||
{file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"},
|
||||
{file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"},
|
||||
{file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"},
|
||||
{file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"},
|
||||
{file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"},
|
||||
{file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"},
|
||||
{file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"},
|
||||
{file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"},
|
||||
{file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"},
|
||||
{file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"},
|
||||
{file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"},
|
||||
{file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"},
|
||||
{file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"},
|
||||
{file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"},
|
||||
{file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"},
|
||||
{file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"},
|
||||
{file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"},
|
||||
{file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"},
|
||||
{file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"},
|
||||
{file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"},
|
||||
{file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"},
|
||||
{file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"},
|
||||
{file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"},
|
||||
{file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"},
|
||||
{file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"},
|
||||
{file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"},
|
||||
{file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"},
|
||||
{file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"},
|
||||
{file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"},
|
||||
{file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"},
|
||||
{file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"},
|
||||
{file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"},
|
||||
{file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"},
|
||||
{file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"},
|
||||
{file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"},
|
||||
{file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"},
|
||||
{file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"},
|
||||
{file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"},
|
||||
{file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"},
|
||||
{file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"},
|
||||
{file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa"},
|
||||
{file = "tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da"},
|
||||
{file = "tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87"},
|
||||
{file = "tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475"},
|
||||
{file = "tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd"},
|
||||
{file = "tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4"},
|
||||
{file = "tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a"},
|
||||
{file = "tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -905,13 +1094,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.6.2"
|
||||
version = "2.6.3"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd"},
|
||||
{file = "urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797"},
|
||||
{file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"},
|
||||
{file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -923,4 +1112,4 @@ zstd = ["backports-zstd (>=1.0.0)"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "20623104a1a5f4c6d4aaa759f25b2591d5de345d1464e727eb4140a6ef9a5b6e"
|
||||
content-hash = "30e16396439f2cdd69005a5b7bdf8144aac33422a77a63accbc9eaa74151d851"
|
||||
|
|
|
|||
|
|
@ -1,17 +1,21 @@
|
|||
[tool.poetry]
|
||||
name = "enroll"
|
||||
version = "0.2.3"
|
||||
version = "0.4.3"
|
||||
description = "Enroll a server's running state retrospectively into Ansible"
|
||||
authors = ["Miguel Jacq <mig@mig5.net>"]
|
||||
license = "GPL-3.0-or-later"
|
||||
readme = "README.md"
|
||||
packages = [{ include = "enroll" }]
|
||||
repository = "https://git.mig5.net/mig5/enroll"
|
||||
include = [
|
||||
{ path = "enroll/schema/state.schema.json", format = ["sdist", "wheel"] }
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
pyyaml = "^6"
|
||||
paramiko = ">=3.5"
|
||||
jsonschema = "^4.23.0"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
enroll = "enroll.cli:main"
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ for dist in ${DISTS[@]}; do
|
|||
rm -rf "$PWD/dist/rpm"/*
|
||||
mkdir -p "$PWD/dist/rpm"
|
||||
|
||||
docker run --rm -v "$PWD":/src -v "$PWD/dist/rpm":/out -v "$HOME/git/jinjaturtle/dist/rpm":/deps:ro enroll-rpm:${release}
|
||||
docker run --rm -v "$PWD":/src -v "$PWD/dist/rpm":/out enroll-rpm:${release}
|
||||
sudo chown -R "${USER}" "$PWD/dist"
|
||||
|
||||
for file in `ls -1 "${BUILD_OUTPUT}/rpm"`; do
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
%global upstream_version 0.2.3
|
||||
%global upstream_version 0.4.3
|
||||
|
||||
Name: enroll
|
||||
Version: %{upstream_version}
|
||||
|
|
@ -17,8 +17,8 @@ BuildRequires: python3-poetry-core
|
|||
|
||||
Requires: python3-yaml
|
||||
Requires: python3-paramiko
|
||||
Requires: python3-jsonschema
|
||||
|
||||
# Make sure private repo dependency is pulled in by package name as well.
|
||||
Recommends: jinjaturtle
|
||||
|
||||
%description
|
||||
|
|
@ -43,6 +43,28 @@ Enroll a server's running state retrospectively into Ansible.
|
|||
%{_bindir}/enroll
|
||||
|
||||
%changelog
|
||||
* Fri Jan 16 2026 Miguel Jacq <mig@mig5.net> - %{version}-%{release}
|
||||
- Add support for AddressFamily and ConnectTimeout in the .ssh/config when using `--remote-ssh-config`.
|
||||
* Tue Jan 13 2026 Miguel Jacq <mig@mig5.net> - %{version}-%{release}
|
||||
- Support `--remote-ssh-config [path-to-ssh-config]` as an argument in case extra params are required beyond `--remote-port` or `--remote-user`. Note: `--remote-host` must still be s
|
||||
et, but it can be an 'alias' represented by the 'Host' value in the ssh config.
|
||||
* Sun Jan 11 2026 Miguel Jacq <mig@mig5.net> - %{version}-%{release}
|
||||
- Add interactive output when 'enroll diff --enforce' is invoking Ansible.
|
||||
* Sat Jan 10 2026 Miguel Jacq <mig@mig5.net> - %{version}-%{release}
|
||||
- Introduce `enroll validate` - a tool to validate a harvest against the state schema, or check for missing or orphaned obsolete artifacts in a harvest.
|
||||
- Attempt to generate Jinja2 templates of systemd unit files and Postfix main.cf (now that JinjaTurtle supports it)
|
||||
- Update pynacl dependency to resolve CVE-2025-69277
|
||||
- Add `--exclude-path` to `enroll diff` command, so that you can ignore certain churn from the diff (stuff you still wanted to harvest as a baseline but don't care if it changes day to day)
|
||||
- Add `--ignore-package-versions` to `enroll diff` command, to optionally ignore package upgrades (e.g due to patching) from the diff.
|
||||
- Add tags to the playbook for each role, to allow easier targeting of specific roles during play later.
|
||||
- Add `--enforce` mode to `enroll diff`. If there is diff detected between the two harvests, and it can enforce restoring the state from the older harvest, it will manifest the state and apply it with ansible.
|
||||
Only the specific roles that had diffed will be applied (via the new tags capability)
|
||||
* Mon Jan 05 2026 Miguel Jacq <mig@mig5.net> - %{version}-%{release}
|
||||
- Introduce `enroll explain` - a tool to analyze and explain what's in (or not in) a harvest and why.
|
||||
- Centralise the cron and logrotate stuff into their respective roles, we had a bit of duplication between roles based on harvest discovery.
|
||||
- Capture other files in the user's home directory such as `.bashrc`, `.bash_aliases`, `.profile`, if these files differ from the `/etc/skel` defaults
|
||||
- Ignore files that end with a tilde or - (probably backup files generated by editors or shadow file changes)
|
||||
- Manage certain symlinks e.g for apache2/nginx sites-enabled and so on
|
||||
* Sun Jan 04 2026 Miguel Jacq <mig@mig5.net> - %{version}-%{release}
|
||||
- Introduce --ask-become-pass or -K to support password-required sudo on remote hosts, just like Ansible. It will also fall back to this prompt if a password is required but the arg wasn't passed in.
|
||||
* Sat Jan 03 2026 Miguel Jacq <mig@mig5.net> - %{version}-%{release}
|
||||
|
|
|
|||
31
tests.sh
31
tests.sh
|
|
@ -9,14 +9,43 @@ BUNDLE_DIR="/tmp/bundle"
|
|||
ANSIBLE_DIR="/tmp/ansible"
|
||||
rm -rf "${BUNDLE_DIR}" "${ANSIBLE_DIR}"
|
||||
|
||||
# Install something that has symlinks like apache2,
|
||||
# to extend the manifests that will be linted later
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends apache2
|
||||
|
||||
# Generate data
|
||||
poetry run \
|
||||
enroll single-shot \
|
||||
--harvest "${BUNDLE_DIR}" \
|
||||
--out "${ANSIBLE_DIR}"
|
||||
|
||||
builtin cd "${ANSIBLE_DIR}"
|
||||
# Analyse
|
||||
poetry run \
|
||||
enroll explain "${BUNDLE_DIR}"
|
||||
poetry run \
|
||||
enroll explain "${BUNDLE_DIR}" --format json | jq
|
||||
|
||||
# Validate
|
||||
poetry run \
|
||||
enroll validate --fail-on-warnings "${BUNDLE_DIR}"
|
||||
|
||||
# Install/remove something, harvest again and diff the harvests
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends cowsay
|
||||
poetry run \
|
||||
enroll harvest --out "${BUNDLE_DIR}2"
|
||||
# Validate
|
||||
poetry run \
|
||||
enroll validate --fail-on-warnings "${BUNDLE_DIR}2"
|
||||
# Diff
|
||||
poetry run \
|
||||
enroll diff \
|
||||
--old "${BUNDLE_DIR}" \
|
||||
--new "${BUNDLE_DIR}2" \
|
||||
--format json | jq
|
||||
DEBIAN_FRONTEND=noninteractive apt-get remove -y --purge cowsay
|
||||
|
||||
# Ansible test
|
||||
builtin cd "${ANSIBLE_DIR}"
|
||||
# Lint
|
||||
ansible-lint "${ANSIBLE_DIR}"
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
from __future__ import annotations
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import enroll.cli as cli
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from enroll.remote import RemoteSudoPasswordRequired
|
||||
from enroll.sopsutil import SopsError
|
||||
|
||||
|
||||
def test_cli_harvest_subcommand_calls_harvest(monkeypatch, capsys, tmp_path):
|
||||
called = {}
|
||||
|
|
@ -398,3 +403,286 @@ def test_cli_manifest_common_args(monkeypatch, tmp_path):
|
|||
cli.main()
|
||||
assert called["fqdn"] == "example.test"
|
||||
assert called["jinjaturtle"] == "off"
|
||||
|
||||
|
||||
def test_cli_explain_passes_args_and_writes_stdout(monkeypatch, capsys, tmp_path):
|
||||
called = {}
|
||||
|
||||
def fake_explain_state(
|
||||
harvest: str,
|
||||
*,
|
||||
sops_mode: bool = False,
|
||||
fmt: str = "text",
|
||||
max_examples: int = 3,
|
||||
):
|
||||
called["harvest"] = harvest
|
||||
called["sops_mode"] = sops_mode
|
||||
called["fmt"] = fmt
|
||||
called["max_examples"] = max_examples
|
||||
return "EXPLAINED\n"
|
||||
|
||||
monkeypatch.setattr(cli, "explain_state", fake_explain_state)
|
||||
|
||||
monkeypatch.setattr(
|
||||
sys,
|
||||
"argv",
|
||||
[
|
||||
"enroll",
|
||||
"explain",
|
||||
"--sops",
|
||||
"--format",
|
||||
"json",
|
||||
"--max-examples",
|
||||
"7",
|
||||
str(tmp_path / "bundle" / "state.json"),
|
||||
],
|
||||
)
|
||||
|
||||
cli.main()
|
||||
out = capsys.readouterr().out
|
||||
assert out == "EXPLAINED\n"
|
||||
assert called["sops_mode"] is True
|
||||
assert called["fmt"] == "json"
|
||||
assert called["max_examples"] == 7
|
||||
|
||||
|
||||
def test_discover_config_path_missing_config_value_returns_none(monkeypatch):
|
||||
# Covers the "--config" flag present with no value.
|
||||
monkeypatch.delenv("ENROLL_CONFIG", raising=False)
|
||||
monkeypatch.delenv("XDG_CONFIG_HOME", raising=False)
|
||||
assert cli._discover_config_path(["--config"]) is None
|
||||
|
||||
|
||||
def test_discover_config_path_defaults_to_home_config(monkeypatch, tmp_path: Path):
|
||||
# Covers the Path.home() / ".config" fallback.
|
||||
monkeypatch.delenv("ENROLL_CONFIG", raising=False)
|
||||
monkeypatch.delenv("XDG_CONFIG_HOME", raising=False)
|
||||
monkeypatch.setattr(cli.Path, "home", lambda: tmp_path)
|
||||
monkeypatch.setattr(cli.Path, "cwd", lambda: tmp_path)
|
||||
|
||||
cp = tmp_path / ".config" / "enroll" / "enroll.ini"
|
||||
cp.parent.mkdir(parents=True)
|
||||
cp.write_text("[enroll]\n", encoding="utf-8")
|
||||
|
||||
assert cli._discover_config_path(["harvest"]) == cp
|
||||
|
||||
|
||||
def test_cli_harvest_local_sops_encrypts_and_prints_path(
|
||||
monkeypatch, tmp_path: Path, capsys
|
||||
):
|
||||
out_dir = tmp_path / "out"
|
||||
out_dir.mkdir()
|
||||
calls: dict[str, object] = {}
|
||||
|
||||
def fake_harvest(bundle_dir: str, **kwargs):
|
||||
calls["bundle"] = bundle_dir
|
||||
# Create a minimal state.json so tooling that expects it won't break.
|
||||
Path(bundle_dir).mkdir(parents=True, exist_ok=True)
|
||||
(Path(bundle_dir) / "state.json").write_text("{}", encoding="utf-8")
|
||||
return str(Path(bundle_dir) / "state.json")
|
||||
|
||||
def fake_encrypt(bundle_dir: Path, out_file: Path, fps: list[str]):
|
||||
calls["encrypt"] = (bundle_dir, out_file, fps)
|
||||
out_file.write_text("encrypted", encoding="utf-8")
|
||||
return out_file
|
||||
|
||||
monkeypatch.setattr(cli, "harvest", fake_harvest)
|
||||
monkeypatch.setattr(cli, "_encrypt_harvest_dir_to_sops", fake_encrypt)
|
||||
|
||||
monkeypatch.setattr(
|
||||
sys,
|
||||
"argv",
|
||||
[
|
||||
"enroll",
|
||||
"harvest",
|
||||
"--sops",
|
||||
"ABCDEF",
|
||||
"--out",
|
||||
str(out_dir),
|
||||
],
|
||||
)
|
||||
cli.main()
|
||||
|
||||
printed = capsys.readouterr().out.strip()
|
||||
assert printed.endswith("harvest.tar.gz.sops")
|
||||
assert Path(printed).exists()
|
||||
assert calls.get("encrypt")
|
||||
|
||||
|
||||
def test_cli_harvest_remote_sops_encrypts_and_prints_path(
|
||||
monkeypatch, tmp_path: Path, capsys
|
||||
):
|
||||
out_dir = tmp_path / "out"
|
||||
out_dir.mkdir()
|
||||
calls: dict[str, object] = {}
|
||||
|
||||
def fake_remote_harvest(**kwargs):
|
||||
calls["remote"] = kwargs
|
||||
# Create a minimal state.json in the temp bundle.
|
||||
out = Path(kwargs["local_out_dir"]) / "state.json"
|
||||
out.write_text("{}", encoding="utf-8")
|
||||
return out
|
||||
|
||||
def fake_encrypt(bundle_dir: Path, out_file: Path, fps: list[str]):
|
||||
calls["encrypt"] = (bundle_dir, out_file, fps)
|
||||
out_file.write_text("encrypted", encoding="utf-8")
|
||||
return out_file
|
||||
|
||||
monkeypatch.setattr(cli, "remote_harvest", fake_remote_harvest)
|
||||
monkeypatch.setattr(cli, "_encrypt_harvest_dir_to_sops", fake_encrypt)
|
||||
|
||||
monkeypatch.setattr(
|
||||
sys,
|
||||
"argv",
|
||||
[
|
||||
"enroll",
|
||||
"harvest",
|
||||
"--remote-host",
|
||||
"example.com",
|
||||
"--remote-user",
|
||||
"root",
|
||||
"--sops",
|
||||
"ABCDEF",
|
||||
"--out",
|
||||
str(out_dir),
|
||||
],
|
||||
)
|
||||
cli.main()
|
||||
|
||||
printed = capsys.readouterr().out.strip()
|
||||
assert printed.endswith("harvest.tar.gz.sops")
|
||||
assert Path(printed).exists()
|
||||
assert calls.get("remote")
|
||||
assert calls.get("encrypt")
|
||||
|
||||
|
||||
def test_cli_harvest_remote_password_required_exits_cleanly(monkeypatch):
|
||||
def boom(**kwargs):
|
||||
raise RemoteSudoPasswordRequired("pw required")
|
||||
|
||||
monkeypatch.setattr(cli, "remote_harvest", boom)
|
||||
monkeypatch.setattr(
|
||||
sys,
|
||||
"argv",
|
||||
[
|
||||
"enroll",
|
||||
"harvest",
|
||||
"--remote-host",
|
||||
"example.com",
|
||||
"--remote-user",
|
||||
"root",
|
||||
],
|
||||
)
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.main()
|
||||
assert "--ask-become-pass" in str(e.value)
|
||||
|
||||
|
||||
def test_cli_runtime_error_is_wrapped_as_user_friendly_system_exit(monkeypatch):
|
||||
def boom(*args, **kwargs):
|
||||
raise RuntimeError("nope")
|
||||
|
||||
monkeypatch.setattr(cli, "harvest", boom)
|
||||
monkeypatch.setattr(sys, "argv", ["enroll", "harvest", "--out", "/tmp/x"])
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.main()
|
||||
assert str(e.value) == "error: nope"
|
||||
|
||||
|
||||
def test_cli_sops_error_is_wrapped_as_user_friendly_system_exit(monkeypatch):
|
||||
def boom(*args, **kwargs):
|
||||
raise SopsError("sops broke")
|
||||
|
||||
monkeypatch.setattr(cli, "manifest", boom)
|
||||
monkeypatch.setattr(
|
||||
sys, "argv", ["enroll", "manifest", "--harvest", "/tmp/x", "--out", "/tmp/y"]
|
||||
)
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.main()
|
||||
assert str(e.value) == "error: sops broke"
|
||||
|
||||
|
||||
def test_cli_diff_notifies_webhook_and_email_and_respects_exit_code(
|
||||
monkeypatch, capsys
|
||||
):
|
||||
calls: dict[str, object] = {}
|
||||
|
||||
def fake_compare(old, new, sops_mode=False, **kwargs):
|
||||
calls["compare"] = (old, new, sops_mode)
|
||||
return {"dummy": True}, True
|
||||
|
||||
def fake_format(report, fmt="text"):
|
||||
calls.setdefault("format", []).append((report, fmt))
|
||||
return "REPORT\n"
|
||||
|
||||
def fake_post(url, body, headers=None):
|
||||
calls["webhook"] = (url, body, headers)
|
||||
return 200, b"ok"
|
||||
|
||||
def fake_email(**kwargs):
|
||||
calls["email"] = kwargs
|
||||
|
||||
monkeypatch.setattr(cli, "compare_harvests", fake_compare)
|
||||
monkeypatch.setattr(cli, "format_report", fake_format)
|
||||
monkeypatch.setattr(cli, "post_webhook", fake_post)
|
||||
monkeypatch.setattr(cli, "send_email", fake_email)
|
||||
monkeypatch.setenv("SMTPPW", "secret")
|
||||
|
||||
monkeypatch.setattr(
|
||||
sys,
|
||||
"argv",
|
||||
[
|
||||
"enroll",
|
||||
"diff",
|
||||
"--old",
|
||||
"/tmp/old",
|
||||
"--new",
|
||||
"/tmp/new",
|
||||
"--webhook",
|
||||
"https://example.invalid/h",
|
||||
"--webhook-header",
|
||||
"X-Test: ok",
|
||||
"--email-to",
|
||||
"a@example.com",
|
||||
"--smtp-password-env",
|
||||
"SMTPPW",
|
||||
"--exit-code",
|
||||
],
|
||||
)
|
||||
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.main()
|
||||
assert e.value.code == 2
|
||||
|
||||
assert calls.get("compare")
|
||||
assert calls.get("webhook")
|
||||
assert calls.get("email")
|
||||
# No report printed when exiting via --exit-code? (we still render and print).
|
||||
_ = capsys.readouterr()
|
||||
|
||||
|
||||
def test_cli_diff_webhook_http_error_raises_system_exit(monkeypatch):
|
||||
def fake_compare(old, new, sops_mode=False, **kwargs):
|
||||
return {"dummy": True}, True
|
||||
|
||||
monkeypatch.setattr(cli, "compare_harvests", fake_compare)
|
||||
monkeypatch.setattr(cli, "format_report", lambda report, fmt="text": "R\n")
|
||||
monkeypatch.setattr(cli, "post_webhook", lambda url, body, headers=None: (500, b""))
|
||||
|
||||
monkeypatch.setattr(
|
||||
sys,
|
||||
"argv",
|
||||
[
|
||||
"enroll",
|
||||
"diff",
|
||||
"--old",
|
||||
"/tmp/old",
|
||||
"--new",
|
||||
"/tmp/new",
|
||||
"--webhook",
|
||||
"https://example.invalid/h",
|
||||
],
|
||||
)
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.main()
|
||||
assert "HTTP 500" in str(e.value)
|
||||
|
|
|
|||
400
tests/test_diff_ignore_versions_exclude_enforce.py
Normal file
400
tests/test_diff_ignore_versions_exclude_enforce.py
Normal file
|
|
@ -0,0 +1,400 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
import types
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _write_bundle(
|
||||
root: Path, state: dict, artifacts: dict[str, bytes] | None = None
|
||||
) -> None:
|
||||
root.mkdir(parents=True, exist_ok=True)
|
||||
(root / "state.json").write_text(json.dumps(state, indent=2), encoding="utf-8")
|
||||
artifacts = artifacts or {}
|
||||
for rel, data in artifacts.items():
|
||||
p = root / rel
|
||||
p.parent.mkdir(parents=True, exist_ok=True)
|
||||
p.write_bytes(data)
|
||||
|
||||
|
||||
def _minimal_roles() -> dict:
|
||||
"""A small roles structure that's sufficient for enroll.diff file indexing."""
|
||||
return {
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"services": [],
|
||||
"packages": [],
|
||||
"apt_config": {
|
||||
"role_name": "apt_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"etc_custom": {
|
||||
"role_name": "etc_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"usr_local_custom": {
|
||||
"role_name": "usr_local_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"extra_paths": {
|
||||
"role_name": "extra_paths",
|
||||
"include_patterns": [],
|
||||
"exclude_patterns": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_diff_ignore_package_versions_suppresses_version_drift(tmp_path: Path):
|
||||
from enroll.diff import compare_harvests
|
||||
|
||||
old = tmp_path / "old"
|
||||
new = tmp_path / "new"
|
||||
|
||||
old_state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h1"},
|
||||
"inventory": {
|
||||
"packages": {
|
||||
"curl": {
|
||||
"version": "1.0",
|
||||
"installations": [{"version": "1.0", "arch": "amd64"}],
|
||||
}
|
||||
}
|
||||
},
|
||||
"roles": _minimal_roles(),
|
||||
}
|
||||
new_state = {
|
||||
**old_state,
|
||||
"inventory": {
|
||||
"packages": {
|
||||
"curl": {
|
||||
"version": "1.1",
|
||||
"installations": [{"version": "1.1", "arch": "amd64"}],
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
_write_bundle(old, old_state)
|
||||
_write_bundle(new, new_state)
|
||||
|
||||
# Without ignore flag, version drift is reported and counts as changes.
|
||||
report, has_changes = compare_harvests(str(old), str(new))
|
||||
assert has_changes is True
|
||||
assert report["packages"]["version_changed"]
|
||||
|
||||
# With ignore flag, version drift is suppressed and does not count as changes.
|
||||
report2, has_changes2 = compare_harvests(
|
||||
str(old), str(new), ignore_package_versions=True
|
||||
)
|
||||
assert has_changes2 is False
|
||||
assert report2["packages"]["version_changed"] == []
|
||||
assert report2["packages"]["version_changed_ignored_count"] == 1
|
||||
assert report2["filters"]["ignore_package_versions"] is True
|
||||
|
||||
|
||||
def test_diff_exclude_path_filters_file_drift_and_affects_has_changes(tmp_path: Path):
|
||||
from enroll.diff import compare_harvests
|
||||
|
||||
old = tmp_path / "old"
|
||||
new = tmp_path / "new"
|
||||
|
||||
# Only file drift is under /var/anacron, which is excluded.
|
||||
old_state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h1"},
|
||||
"inventory": {"packages": {}},
|
||||
"roles": {
|
||||
**_minimal_roles(),
|
||||
"extra_paths": {
|
||||
**_minimal_roles()["extra_paths"],
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/var/anacron/daily.stamp",
|
||||
"src_rel": "var/anacron/daily.stamp",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "extra_path",
|
||||
}
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
new_state = json.loads(json.dumps(old_state))
|
||||
|
||||
_write_bundle(
|
||||
old,
|
||||
old_state,
|
||||
{"artifacts/extra_paths/var/anacron/daily.stamp": b"yesterday\n"},
|
||||
)
|
||||
_write_bundle(
|
||||
new,
|
||||
new_state,
|
||||
{"artifacts/extra_paths/var/anacron/daily.stamp": b"today\n"},
|
||||
)
|
||||
|
||||
report, has_changes = compare_harvests(
|
||||
str(old), str(new), exclude_paths=["/var/anacron"]
|
||||
)
|
||||
assert has_changes is False
|
||||
assert report["files"]["changed"] == []
|
||||
assert report["filters"]["exclude_paths"] == ["/var/anacron"]
|
||||
|
||||
|
||||
def test_diff_exclude_path_only_filters_files_not_packages(tmp_path: Path):
|
||||
from enroll.diff import compare_harvests
|
||||
|
||||
old = tmp_path / "old"
|
||||
new = tmp_path / "new"
|
||||
|
||||
old_state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h1"},
|
||||
"inventory": {"packages": {"curl": {"version": "1.0"}}},
|
||||
"roles": {
|
||||
**_minimal_roles(),
|
||||
"extra_paths": {
|
||||
**_minimal_roles()["extra_paths"],
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/var/anacron/daily.stamp",
|
||||
"src_rel": "var/anacron/daily.stamp",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "extra_path",
|
||||
}
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
new_state = {
|
||||
**old_state,
|
||||
"inventory": {
|
||||
"packages": {
|
||||
"curl": {"version": "1.0"},
|
||||
"htop": {"version": "3.0"},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
_write_bundle(
|
||||
old,
|
||||
old_state,
|
||||
{"artifacts/extra_paths/var/anacron/daily.stamp": b"yesterday\n"},
|
||||
)
|
||||
_write_bundle(
|
||||
new,
|
||||
new_state,
|
||||
{
|
||||
"artifacts/extra_paths/var/anacron/daily.stamp": b"today\n",
|
||||
},
|
||||
)
|
||||
|
||||
report, has_changes = compare_harvests(
|
||||
str(old), str(new), exclude_paths=["/var/anacron"]
|
||||
)
|
||||
assert has_changes is True
|
||||
# File drift is filtered, but package drift remains.
|
||||
assert report["files"]["changed"] == []
|
||||
assert report["packages"]["added"] == ["htop"]
|
||||
|
||||
|
||||
def test_enforce_old_harvest_requires_ansible_playbook(monkeypatch, tmp_path: Path):
|
||||
import enroll.diff as d
|
||||
|
||||
monkeypatch.setattr(d.shutil, "which", lambda name: None)
|
||||
|
||||
old = tmp_path / "old"
|
||||
_write_bundle(old, {"inventory": {"packages": {}}, "roles": _minimal_roles()})
|
||||
|
||||
with pytest.raises(RuntimeError, match="ansible-playbook not found"):
|
||||
d.enforce_old_harvest(str(old))
|
||||
|
||||
|
||||
def test_enforce_old_harvest_runs_ansible_with_tags_from_file_drift(
|
||||
monkeypatch, tmp_path: Path
|
||||
):
|
||||
import enroll.diff as d
|
||||
import enroll.manifest as mf
|
||||
|
||||
# Pretend ansible-playbook is installed.
|
||||
monkeypatch.setattr(d.shutil, "which", lambda name: "/usr/bin/ansible-playbook")
|
||||
|
||||
calls: dict[str, object] = {}
|
||||
|
||||
# Stub manifest generation to only create playbook.yml (fast, no real roles needed).
|
||||
def fake_manifest(_harvest_dir: str, out_dir: str, **_kwargs):
|
||||
out = Path(out_dir)
|
||||
(out / "playbook.yml").write_text(
|
||||
"---\n- hosts: all\n gather_facts: false\n roles: []\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
monkeypatch.setattr(mf, "manifest", fake_manifest)
|
||||
|
||||
def fake_run(
|
||||
argv, cwd=None, env=None, capture_output=False, text=False, check=False
|
||||
):
|
||||
calls["argv"] = list(argv)
|
||||
calls["cwd"] = cwd
|
||||
return types.SimpleNamespace(returncode=0, stdout="ok", stderr="")
|
||||
|
||||
monkeypatch.setattr(d.subprocess, "run", fake_run)
|
||||
|
||||
old = tmp_path / "old"
|
||||
old_state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h1"},
|
||||
"inventory": {"packages": {}},
|
||||
"roles": {
|
||||
**_minimal_roles(),
|
||||
"usr_local_custom": {
|
||||
**_minimal_roles()["usr_local_custom"],
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/etc/myapp.conf",
|
||||
"src_rel": "etc/myapp.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "custom",
|
||||
}
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
_write_bundle(old, old_state)
|
||||
|
||||
# Minimal report containing enforceable drift: a baseline file is "removed".
|
||||
report = {
|
||||
"packages": {"added": [], "removed": [], "version_changed": []},
|
||||
"services": {"enabled_added": [], "enabled_removed": [], "changed": []},
|
||||
"users": {"added": [], "removed": [], "changed": []},
|
||||
"files": {
|
||||
"added": [],
|
||||
"removed": [{"path": "/etc/myapp.conf", "role": "usr_local_custom"}],
|
||||
"changed": [],
|
||||
},
|
||||
}
|
||||
|
||||
info = d.enforce_old_harvest(str(old), report=report)
|
||||
assert info["status"] == "applied"
|
||||
assert "--tags" in info["command"]
|
||||
assert "role_usr_local_custom" in ",".join(info.get("tags") or [])
|
||||
|
||||
argv = calls.get("argv")
|
||||
assert argv and argv[0].endswith("ansible-playbook")
|
||||
assert "--tags" in argv
|
||||
# Ensure we pass the computed tag.
|
||||
i = argv.index("--tags")
|
||||
assert "role_usr_local_custom" in str(argv[i + 1])
|
||||
|
||||
|
||||
def test_cli_diff_forwards_exclude_and_ignore_flags(monkeypatch, capsys):
|
||||
import enroll.cli as cli
|
||||
|
||||
calls: dict[str, object] = {}
|
||||
|
||||
def fake_compare(
|
||||
old, new, *, sops_mode=False, exclude_paths=None, ignore_package_versions=False
|
||||
):
|
||||
calls["compare"] = {
|
||||
"old": old,
|
||||
"new": new,
|
||||
"sops_mode": sops_mode,
|
||||
"exclude_paths": exclude_paths,
|
||||
"ignore_package_versions": ignore_package_versions,
|
||||
}
|
||||
# No changes -> should not try to enforce.
|
||||
return {"packages": {}, "services": {}, "users": {}, "files": {}}, False
|
||||
|
||||
monkeypatch.setattr(cli, "compare_harvests", fake_compare)
|
||||
monkeypatch.setattr(cli, "format_report", lambda report, fmt="text": "R\n")
|
||||
monkeypatch.setattr(
|
||||
sys,
|
||||
"argv",
|
||||
[
|
||||
"enroll",
|
||||
"diff",
|
||||
"--old",
|
||||
"/tmp/old",
|
||||
"--new",
|
||||
"/tmp/new",
|
||||
"--exclude-path",
|
||||
"/var/anacron",
|
||||
"--ignore-package-versions",
|
||||
],
|
||||
)
|
||||
|
||||
cli.main()
|
||||
_ = capsys.readouterr()
|
||||
assert calls["compare"]["exclude_paths"] == ["/var/anacron"]
|
||||
assert calls["compare"]["ignore_package_versions"] is True
|
||||
|
||||
|
||||
def test_cli_diff_enforce_skips_when_no_enforceable_drift(monkeypatch):
|
||||
import enroll.cli as cli
|
||||
|
||||
# Drift exists, but is not enforceable (only additions / version changes).
|
||||
report = {
|
||||
"packages": {"added": ["htop"], "removed": [], "version_changed": []},
|
||||
"services": {
|
||||
"enabled_added": ["x.service"],
|
||||
"enabled_removed": [],
|
||||
"changed": [],
|
||||
},
|
||||
"users": {"added": ["bob"], "removed": [], "changed": []},
|
||||
"files": {"added": [{"path": "/tmp/new"}], "removed": [], "changed": []},
|
||||
}
|
||||
|
||||
monkeypatch.setattr(cli, "compare_harvests", lambda *a, **k: (report, True))
|
||||
monkeypatch.setattr(cli, "has_enforceable_drift", lambda r: False)
|
||||
called = {"enforce": False}
|
||||
monkeypatch.setattr(
|
||||
cli, "enforce_old_harvest", lambda *a, **k: called.update({"enforce": True})
|
||||
)
|
||||
|
||||
captured = {}
|
||||
|
||||
def fake_format(rep, fmt="text"):
|
||||
captured["report"] = rep
|
||||
return "R\n"
|
||||
|
||||
monkeypatch.setattr(cli, "format_report", fake_format)
|
||||
|
||||
monkeypatch.setattr(
|
||||
sys,
|
||||
"argv",
|
||||
[
|
||||
"enroll",
|
||||
"diff",
|
||||
"--old",
|
||||
"/tmp/old",
|
||||
"--new",
|
||||
"/tmp/new",
|
||||
"--enforce",
|
||||
],
|
||||
)
|
||||
|
||||
cli.main()
|
||||
assert called["enforce"] is False
|
||||
assert captured["report"]["enforcement"]["status"] == "skipped"
|
||||
222
tests/test_explain.py
Normal file
222
tests/test_explain.py
Normal file
|
|
@ -0,0 +1,222 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import enroll.explain as ex
|
||||
|
||||
|
||||
def _write_state(bundle: Path, state: dict) -> Path:
|
||||
bundle.mkdir(parents=True, exist_ok=True)
|
||||
(bundle / "state.json").write_text(json.dumps(state, indent=2), encoding="utf-8")
|
||||
return bundle / "state.json"
|
||||
|
||||
|
||||
def test_explain_state_text_renders_roles_inventory_and_reasons(tmp_path: Path):
|
||||
bundle = tmp_path / "bundle"
|
||||
state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h1", "os": "debian", "pkg_backend": "dpkg"},
|
||||
"enroll": {"version": "0.0.0"},
|
||||
"inventory": {
|
||||
"packages": {
|
||||
"foo": {
|
||||
"installations": [{"version": "1.0", "arch": "amd64"}],
|
||||
"observed_via": [
|
||||
{"kind": "systemd_unit", "ref": "foo.service"},
|
||||
{"kind": "package_role", "ref": "foo"},
|
||||
],
|
||||
"roles": ["foo"],
|
||||
},
|
||||
"bar": {
|
||||
"installations": [{"version": "2.0", "arch": "amd64"}],
|
||||
"observed_via": [{"kind": "user_installed", "ref": "manual"}],
|
||||
"roles": ["bar"],
|
||||
},
|
||||
}
|
||||
},
|
||||
"roles": {
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [{"name": "alice"}],
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/home/alice/.ssh/authorized_keys",
|
||||
"src_rel": "home/alice/.ssh/authorized_keys",
|
||||
"owner": "alice",
|
||||
"group": "alice",
|
||||
"mode": "0600",
|
||||
"reason": "authorized_keys",
|
||||
}
|
||||
],
|
||||
"managed_dirs": [
|
||||
{
|
||||
"path": "/home/alice/.ssh",
|
||||
"owner": "alice",
|
||||
"group": "alice",
|
||||
"mode": "0700",
|
||||
"reason": "parent_of_managed_file",
|
||||
}
|
||||
],
|
||||
"excluded": [{"path": "/etc/shadow", "reason": "sensitive_content"}],
|
||||
"notes": ["n1", "n2"],
|
||||
},
|
||||
"services": [
|
||||
{
|
||||
"unit": "foo.service",
|
||||
"role_name": "foo",
|
||||
"packages": ["foo"],
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/etc/foo.conf",
|
||||
"src_rel": "etc/foo.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "modified_conffile",
|
||||
},
|
||||
# Unknown reason should fall back to generic text.
|
||||
{
|
||||
"path": "/etc/odd.conf",
|
||||
"src_rel": "etc/odd.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "mystery_reason",
|
||||
},
|
||||
],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
}
|
||||
],
|
||||
"packages": [
|
||||
{
|
||||
"package": "bar",
|
||||
"role_name": "bar",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
}
|
||||
],
|
||||
"extra_paths": {
|
||||
"role_name": "extra_paths",
|
||||
"include_patterns": ["/etc/a", "/etc/b"],
|
||||
"exclude_patterns": ["/etc/x", "/etc/y"],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"apt_config": {
|
||||
"role_name": "apt_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"dnf_config": {
|
||||
"role_name": "dnf_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"etc_custom": {
|
||||
"role_name": "etc_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"usr_local_custom": {
|
||||
"role_name": "usr_local_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
state_path = _write_state(bundle, state)
|
||||
|
||||
out = ex.explain_state(str(state_path), fmt="text", max_examples=1)
|
||||
|
||||
assert "Enroll explained:" in out
|
||||
assert "Host: h1" in out
|
||||
assert "Inventory" in out
|
||||
# observed_via summary should include both kinds (order not strictly guaranteed)
|
||||
assert "observed_via" in out
|
||||
assert "systemd_unit" in out
|
||||
assert "user_installed" in out
|
||||
|
||||
# extra_paths include/exclude patterns should be rendered with max_examples truncation.
|
||||
assert "include_patterns:" in out
|
||||
assert "/etc/a" in out
|
||||
assert "exclude_patterns:" in out
|
||||
|
||||
# Reasons section should mention known and unknown reasons.
|
||||
assert "modified_conffile" in out
|
||||
assert "mystery_reason" in out
|
||||
assert "Captured with reason 'mystery_reason'" in out
|
||||
|
||||
# Excluded paths section.
|
||||
assert "Why paths were excluded" in out
|
||||
assert "sensitive_content" in out
|
||||
|
||||
|
||||
def test_explain_state_json_contains_structured_report(tmp_path: Path):
|
||||
bundle = tmp_path / "bundle"
|
||||
state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h2", "os": "rhel", "pkg_backend": "rpm"},
|
||||
"enroll": {"version": "1.2.3"},
|
||||
"inventory": {"packages": {}},
|
||||
"roles": {
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"services": [],
|
||||
"packages": [],
|
||||
"apt_config": {
|
||||
"role_name": "apt_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"dnf_config": {
|
||||
"role_name": "dnf_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"etc_custom": {
|
||||
"role_name": "etc_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"usr_local_custom": {
|
||||
"role_name": "usr_local_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"extra_paths": {
|
||||
"role_name": "extra_paths",
|
||||
"include_patterns": [],
|
||||
"exclude_patterns": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
},
|
||||
}
|
||||
state_path = _write_state(bundle, state)
|
||||
|
||||
raw = ex.explain_state(str(state_path), fmt="json", max_examples=2)
|
||||
rep = json.loads(raw)
|
||||
assert rep["host"]["hostname"] == "h2"
|
||||
assert rep["enroll"]["version"] == "1.2.3"
|
||||
assert rep["inventory"]["package_count"] == 0
|
||||
assert isinstance(rep["roles"], list)
|
||||
assert "reasons" in rep
|
||||
164
tests/test_harvest_cron_logrotate.py
Normal file
164
tests/test_harvest_cron_logrotate.py
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import enroll.harvest as h
|
||||
from enroll.platform import PlatformInfo
|
||||
from enroll.systemd import UnitInfo
|
||||
|
||||
|
||||
class AllowAllPolicy:
|
||||
def deny_reason(self, path: str):
|
||||
return None
|
||||
|
||||
|
||||
class FakeBackend:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
name: str,
|
||||
installed: dict[str, list[dict[str, str]]],
|
||||
manual: list[str],
|
||||
):
|
||||
self.name = name
|
||||
self._installed = dict(installed)
|
||||
self._manual = list(manual)
|
||||
|
||||
def build_etc_index(self):
|
||||
# No package ownership information needed for this test.
|
||||
return set(), {}, {}, {}
|
||||
|
||||
def installed_packages(self):
|
||||
return dict(self._installed)
|
||||
|
||||
def list_manual_packages(self):
|
||||
return list(self._manual)
|
||||
|
||||
def owner_of_path(self, path: str):
|
||||
return None
|
||||
|
||||
def specific_paths_for_hints(self, hints: set[str]):
|
||||
return []
|
||||
|
||||
def is_pkg_config_path(self, path: str) -> bool:
|
||||
return False
|
||||
|
||||
def modified_paths(self, pkg: str, etc_paths: list[str]):
|
||||
return {}
|
||||
|
||||
|
||||
def test_harvest_unifies_cron_and_logrotate_into_dedicated_package_roles(
|
||||
monkeypatch, tmp_path: Path
|
||||
):
|
||||
bundle = tmp_path / "bundle"
|
||||
|
||||
# Fake files we want harvested.
|
||||
files = {
|
||||
"/etc/crontab": b"* * * * * root echo hi\n",
|
||||
"/etc/cron.d/php": b"# php cron\n",
|
||||
"/var/spool/cron/crontabs/alice": b"@daily echo user\n",
|
||||
"/etc/logrotate.conf": b"weekly\n",
|
||||
"/etc/logrotate.d/rsyslog": b"/var/log/syslog { rotate 7 }\n",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(h.os.path, "islink", lambda p: False)
|
||||
monkeypatch.setattr(h.os.path, "isfile", lambda p: p in files)
|
||||
monkeypatch.setattr(h.os.path, "isdir", lambda p: False)
|
||||
monkeypatch.setattr(h.os.path, "exists", lambda p: (p in files) or False)
|
||||
|
||||
# Expand cron/logrotate globs deterministically.
|
||||
def fake_iter_matching(spec: str, cap: int = 10000):
|
||||
mapping = {
|
||||
"/etc/crontab": ["/etc/crontab"],
|
||||
"/etc/cron.d/*": ["/etc/cron.d/php"],
|
||||
"/etc/cron.hourly/*": [],
|
||||
"/etc/cron.daily/*": [],
|
||||
"/etc/cron.weekly/*": [],
|
||||
"/etc/cron.monthly/*": [],
|
||||
"/etc/cron.allow": [],
|
||||
"/etc/cron.deny": [],
|
||||
"/etc/anacrontab": [],
|
||||
"/etc/anacron/*": [],
|
||||
"/var/spool/cron/*": [],
|
||||
"/var/spool/cron/crontabs/*": ["/var/spool/cron/crontabs/alice"],
|
||||
"/var/spool/crontabs/*": [],
|
||||
"/var/spool/anacron/*": [],
|
||||
"/etc/logrotate.conf": ["/etc/logrotate.conf"],
|
||||
"/etc/logrotate.d/*": ["/etc/logrotate.d/rsyslog"],
|
||||
}
|
||||
return list(mapping.get(spec, []))[:cap]
|
||||
|
||||
monkeypatch.setattr(h, "_iter_matching_files", fake_iter_matching)
|
||||
|
||||
# Avoid real system probing.
|
||||
monkeypatch.setattr(
|
||||
h, "detect_platform", lambda: PlatformInfo("debian", "dpkg", {})
|
||||
)
|
||||
backend = FakeBackend(
|
||||
name="dpkg",
|
||||
installed={
|
||||
"cron": [{"version": "1", "arch": "amd64"}],
|
||||
"logrotate": [{"version": "1", "arch": "amd64"}],
|
||||
},
|
||||
# Include cron/logrotate in manual packages to ensure they are skipped in the generic loop.
|
||||
manual=["cron", "logrotate"],
|
||||
)
|
||||
monkeypatch.setattr(h, "get_backend", lambda info=None: backend)
|
||||
|
||||
# Include a service that would collide with cron role naming.
|
||||
monkeypatch.setattr(
|
||||
h, "list_enabled_services", lambda: ["cron.service", "foo.service"]
|
||||
)
|
||||
monkeypatch.setattr(h, "list_enabled_timers", lambda: [])
|
||||
monkeypatch.setattr(
|
||||
h,
|
||||
"get_unit_info",
|
||||
lambda unit: UnitInfo(
|
||||
name=unit,
|
||||
fragment_path=None,
|
||||
dropin_paths=[],
|
||||
env_files=[],
|
||||
exec_paths=[],
|
||||
active_state="active",
|
||||
sub_state="running",
|
||||
unit_file_state="enabled",
|
||||
condition_result=None,
|
||||
),
|
||||
)
|
||||
monkeypatch.setattr(h, "collect_non_system_users", lambda: [])
|
||||
monkeypatch.setattr(
|
||||
h,
|
||||
"stat_triplet",
|
||||
lambda p: ("alice" if "alice" in p else "root", "root", "0644"),
|
||||
)
|
||||
|
||||
# Avoid needing real source files by implementing our own bundle copier.
|
||||
def fake_copy(bundle_dir: str, role_name: str, abs_path: str, src_rel: str):
|
||||
dst = Path(bundle_dir) / "artifacts" / role_name / src_rel
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
dst.write_bytes(files.get(abs_path, b""))
|
||||
|
||||
monkeypatch.setattr(h, "_copy_into_bundle", fake_copy)
|
||||
|
||||
state_path = h.harvest(str(bundle), policy=AllowAllPolicy())
|
||||
st = json.loads(Path(state_path).read_text(encoding="utf-8"))
|
||||
|
||||
# cron.service must be skipped to avoid colliding with the dedicated "cron" package role.
|
||||
svc_units = [s["unit"] for s in st["roles"]["services"]]
|
||||
assert "cron.service" not in svc_units
|
||||
assert "foo.service" in svc_units
|
||||
|
||||
pkgs = st["roles"]["packages"]
|
||||
cron = next(p for p in pkgs if p["role_name"] == "cron")
|
||||
logrotate = next(p for p in pkgs if p["role_name"] == "logrotate")
|
||||
|
||||
cron_paths = {mf["path"] for mf in cron["managed_files"]}
|
||||
assert "/etc/crontab" in cron_paths
|
||||
assert "/etc/cron.d/php" in cron_paths
|
||||
# user crontab captured
|
||||
assert "/var/spool/cron/crontabs/alice" in cron_paths
|
||||
|
||||
lr_paths = {mf["path"] for mf in logrotate["managed_files"]}
|
||||
assert "/etc/logrotate.conf" in lr_paths
|
||||
assert "/etc/logrotate.d/rsyslog" in lr_paths
|
||||
170
tests/test_harvest_helpers.py
Normal file
170
tests/test_harvest_helpers.py
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import enroll.harvest as h
|
||||
|
||||
|
||||
def test_iter_matching_files_skips_symlinks_and_walks_dirs(monkeypatch, tmp_path: Path):
|
||||
# Layout:
|
||||
# root/real.txt (file)
|
||||
# root/sub/nested.txt
|
||||
# root/link -> ... (ignored)
|
||||
root = tmp_path / "root"
|
||||
(root / "sub").mkdir(parents=True)
|
||||
(root / "real.txt").write_text("a", encoding="utf-8")
|
||||
(root / "sub" / "nested.txt").write_text("b", encoding="utf-8")
|
||||
|
||||
paths = {
|
||||
str(root): "dir",
|
||||
str(root / "real.txt"): "file",
|
||||
str(root / "sub"): "dir",
|
||||
str(root / "sub" / "nested.txt"): "file",
|
||||
str(root / "link"): "link",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(h.glob, "glob", lambda spec: [str(root), str(root / "link")])
|
||||
monkeypatch.setattr(h.os.path, "islink", lambda p: paths.get(p) == "link")
|
||||
monkeypatch.setattr(h.os.path, "isfile", lambda p: paths.get(p) == "file")
|
||||
monkeypatch.setattr(h.os.path, "isdir", lambda p: paths.get(p) == "dir")
|
||||
monkeypatch.setattr(
|
||||
h.os,
|
||||
"walk",
|
||||
lambda p: [
|
||||
(str(root), ["sub"], ["real.txt", "link"]),
|
||||
(str(root / "sub"), [], ["nested.txt"]),
|
||||
],
|
||||
)
|
||||
|
||||
out = h._iter_matching_files("/whatever/*", cap=100)
|
||||
assert str(root / "real.txt") in out
|
||||
assert str(root / "sub" / "nested.txt") in out
|
||||
assert str(root / "link") not in out
|
||||
|
||||
|
||||
def test_parse_apt_signed_by_extracts_keyrings(tmp_path: Path):
|
||||
f1 = tmp_path / "a.list"
|
||||
f1.write_text(
|
||||
"deb [signed-by=/usr/share/keyrings/foo.gpg] https://example.invalid stable main\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
f2 = tmp_path / "b.sources"
|
||||
f2.write_text(
|
||||
"Types: deb\nSigned-By: /etc/apt/keyrings/bar.gpg, /usr/share/keyrings/baz.gpg\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
f3 = tmp_path / "c.sources"
|
||||
f3.write_text("Signed-By: | /bin/echo nope\n", encoding="utf-8")
|
||||
|
||||
out = h._parse_apt_signed_by([str(f1), str(f2), str(f3)])
|
||||
assert "/usr/share/keyrings/foo.gpg" in out
|
||||
assert "/etc/apt/keyrings/bar.gpg" in out
|
||||
assert "/usr/share/keyrings/baz.gpg" in out
|
||||
|
||||
|
||||
def test_iter_apt_capture_paths_includes_signed_by_keyring(monkeypatch):
|
||||
# Simulate:
|
||||
# /etc/apt/apt.conf.d/00test
|
||||
# /etc/apt/sources.list.d/test.list (signed-by outside /etc/apt)
|
||||
# /usr/share/keyrings/ext.gpg
|
||||
files = {
|
||||
"/etc/apt/apt.conf.d/00test": "file",
|
||||
"/etc/apt/sources.list.d/test.list": "file",
|
||||
"/usr/share/keyrings/ext.gpg": "file",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(h.os.path, "isdir", lambda p: p in {"/etc/apt"})
|
||||
monkeypatch.setattr(
|
||||
h.os,
|
||||
"walk",
|
||||
lambda root: [
|
||||
("/etc/apt", ["apt.conf.d", "sources.list.d"], []),
|
||||
("/etc/apt/apt.conf.d", [], ["00test"]),
|
||||
("/etc/apt/sources.list.d", [], ["test.list"]),
|
||||
],
|
||||
)
|
||||
monkeypatch.setattr(h.os.path, "islink", lambda p: False)
|
||||
monkeypatch.setattr(h.os.path, "isfile", lambda p: files.get(p) == "file")
|
||||
|
||||
# Only treat the sources glob as having a hit.
|
||||
def fake_iter_matching(spec: str, cap: int = 10000):
|
||||
if spec == "/etc/apt/sources.list.d/*.list":
|
||||
return ["/etc/apt/sources.list.d/test.list"]
|
||||
return []
|
||||
|
||||
monkeypatch.setattr(h, "_iter_matching_files", fake_iter_matching)
|
||||
|
||||
# Provide file contents for the sources file.
|
||||
real_open = open
|
||||
|
||||
def fake_open(path, *a, **k):
|
||||
if path == "/etc/apt/sources.list.d/test.list":
|
||||
return real_open(os.devnull, "r", encoding="utf-8") # placeholder
|
||||
return real_open(path, *a, **k)
|
||||
|
||||
# Easier: patch _parse_apt_signed_by directly to avoid filesystem reads.
|
||||
monkeypatch.setattr(
|
||||
h, "_parse_apt_signed_by", lambda sfs: {"/usr/share/keyrings/ext.gpg"}
|
||||
)
|
||||
|
||||
out = h._iter_apt_capture_paths()
|
||||
paths = {p for p, _r in out}
|
||||
reasons = {p: r for p, r in out}
|
||||
assert "/etc/apt/apt.conf.d/00test" in paths
|
||||
assert "/etc/apt/sources.list.d/test.list" in paths
|
||||
assert "/usr/share/keyrings/ext.gpg" in paths
|
||||
assert reasons["/usr/share/keyrings/ext.gpg"] == "apt_signed_by_keyring"
|
||||
|
||||
|
||||
def test_iter_dnf_capture_paths(monkeypatch):
|
||||
files = {
|
||||
"/etc/dnf/dnf.conf": "file",
|
||||
"/etc/yum/yum.conf": "file",
|
||||
"/etc/yum.conf": "file",
|
||||
"/etc/yum.repos.d/test.repo": "file",
|
||||
"/etc/pki/rpm-gpg/RPM-GPG-KEY": "file",
|
||||
}
|
||||
|
||||
def isdir(p):
|
||||
return p in {"/etc/dnf", "/etc/yum", "/etc/yum.repos.d", "/etc/pki/rpm-gpg"}
|
||||
|
||||
def walk(root):
|
||||
if root == "/etc/dnf":
|
||||
return [("/etc/dnf", [], ["dnf.conf"])]
|
||||
if root == "/etc/yum":
|
||||
return [("/etc/yum", [], ["yum.conf"])]
|
||||
if root == "/etc/pki/rpm-gpg":
|
||||
return [("/etc/pki/rpm-gpg", [], ["RPM-GPG-KEY"])]
|
||||
return []
|
||||
|
||||
monkeypatch.setattr(h.os.path, "isdir", isdir)
|
||||
monkeypatch.setattr(h.os, "walk", walk)
|
||||
monkeypatch.setattr(h.os.path, "islink", lambda p: False)
|
||||
monkeypatch.setattr(h.os.path, "isfile", lambda p: files.get(p) == "file")
|
||||
monkeypatch.setattr(
|
||||
h,
|
||||
"_iter_matching_files",
|
||||
lambda spec, cap=10000: (
|
||||
["/etc/yum.repos.d/test.repo"] if spec.endswith("*.repo") else []
|
||||
),
|
||||
)
|
||||
|
||||
out = h._iter_dnf_capture_paths()
|
||||
paths = {p for p, _r in out}
|
||||
assert "/etc/dnf/dnf.conf" in paths
|
||||
assert "/etc/yum/yum.conf" in paths
|
||||
assert "/etc/yum.conf" in paths
|
||||
assert "/etc/yum.repos.d/test.repo" in paths
|
||||
assert "/etc/pki/rpm-gpg/RPM-GPG-KEY" in paths
|
||||
|
||||
|
||||
def test_iter_system_capture_paths_dedupes_first_reason(monkeypatch):
|
||||
monkeypatch.setattr(h, "_SYSTEM_CAPTURE_GLOBS", [("/a", "r1"), ("/b", "r2")])
|
||||
monkeypatch.setattr(
|
||||
h,
|
||||
"_iter_matching_files",
|
||||
lambda spec, cap=10000: ["/dup"] if spec in {"/a", "/b"} else [],
|
||||
)
|
||||
out = h._iter_system_capture_paths()
|
||||
assert out == [("/dup", "r1")]
|
||||
263
tests/test_harvest_symlinks.py
Normal file
263
tests/test_harvest_symlinks.py
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import enroll.harvest as h
|
||||
from enroll.platform import PlatformInfo
|
||||
from enroll.systemd import UnitInfo
|
||||
|
||||
|
||||
class AllowAllPolicy:
|
||||
def deny_reason(self, path: str):
|
||||
return None
|
||||
|
||||
def deny_reason_link(self, path: str):
|
||||
return None
|
||||
|
||||
|
||||
class FakeBackend:
|
||||
"""Minimal backend stub for harvest tests.
|
||||
|
||||
Keep harvest deterministic and avoid enumerating the real system.
|
||||
"""
|
||||
|
||||
name = "dpkg"
|
||||
|
||||
def build_etc_index(self):
|
||||
return (set(), {}, {}, {})
|
||||
|
||||
def owner_of_path(self, path: str):
|
||||
return None
|
||||
|
||||
def list_manual_packages(self):
|
||||
return []
|
||||
|
||||
def installed_packages(self):
|
||||
return {}
|
||||
|
||||
def specific_paths_for_hints(self, hints: set[str]):
|
||||
return []
|
||||
|
||||
def is_pkg_config_path(self, path: str) -> bool:
|
||||
return False
|
||||
|
||||
def modified_paths(self, pkg: str, etc_paths: list[str]):
|
||||
return {}
|
||||
|
||||
|
||||
def _base_monkeypatches(monkeypatch, *, unit: str):
|
||||
"""Patch harvest to avoid live system access."""
|
||||
|
||||
monkeypatch.setattr(
|
||||
h, "detect_platform", lambda: PlatformInfo("debian", "dpkg", {})
|
||||
)
|
||||
monkeypatch.setattr(h, "get_backend", lambda info=None: FakeBackend())
|
||||
|
||||
monkeypatch.setattr(h, "list_enabled_timers", lambda: [])
|
||||
monkeypatch.setattr(
|
||||
h,
|
||||
"get_unit_info",
|
||||
lambda u: UnitInfo(
|
||||
name=u,
|
||||
fragment_path=None,
|
||||
dropin_paths=[],
|
||||
env_files=[],
|
||||
exec_paths=[],
|
||||
active_state="inactive",
|
||||
sub_state="dead",
|
||||
unit_file_state="enabled",
|
||||
condition_result=None,
|
||||
),
|
||||
)
|
||||
|
||||
# Keep users empty and avoid touching /etc/skel.
|
||||
monkeypatch.setattr(h, "collect_non_system_users", lambda: [])
|
||||
|
||||
# Avoid warning spam from non-root test runs.
|
||||
if hasattr(h.os, "geteuid"):
|
||||
monkeypatch.setattr(h.os, "geteuid", lambda: 0)
|
||||
|
||||
# Avoid walking the real filesystem.
|
||||
monkeypatch.setattr(h.os, "walk", lambda root: iter(()))
|
||||
monkeypatch.setattr(h, "_copy_into_bundle", lambda *a, **k: None)
|
||||
|
||||
# Default to a "no files exist" view of the world unless a test overrides.
|
||||
monkeypatch.setattr(h.os.path, "isfile", lambda p: False)
|
||||
monkeypatch.setattr(h.os.path, "exists", lambda p: False)
|
||||
|
||||
# Minimal enabled services list.
|
||||
monkeypatch.setattr(h, "list_enabled_services", lambda: [unit] if unit else [])
|
||||
|
||||
|
||||
def test_harvest_captures_nginx_enabled_symlinks(monkeypatch, tmp_path: Path):
|
||||
bundle = tmp_path / "bundle"
|
||||
unit = "nginx.service"
|
||||
_base_monkeypatches(monkeypatch, unit=unit)
|
||||
|
||||
# Fake filesystem for nginx enabled dirs.
|
||||
dirs = {
|
||||
"/etc",
|
||||
"/etc/nginx",
|
||||
"/etc/nginx/sites-enabled",
|
||||
"/etc/nginx/modules-enabled",
|
||||
}
|
||||
links = {
|
||||
"/etc/nginx/sites-enabled/default": "../sites-available/default",
|
||||
"/etc/nginx/modules-enabled/mod-http": "../modules-available/mod-http",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(h.os.path, "isdir", lambda p: p in dirs)
|
||||
monkeypatch.setattr(h.os.path, "islink", lambda p: p in links)
|
||||
monkeypatch.setattr(h.os, "readlink", lambda p: links[p])
|
||||
|
||||
def fake_glob(pat: str):
|
||||
if pat == "/etc/nginx/sites-enabled/*":
|
||||
return [
|
||||
"/etc/nginx/sites-enabled/default",
|
||||
"/etc/nginx/sites-enabled/README",
|
||||
]
|
||||
if pat == "/etc/nginx/modules-enabled/*":
|
||||
return ["/etc/nginx/modules-enabled/mod-http"]
|
||||
return []
|
||||
|
||||
monkeypatch.setattr(h.glob, "glob", fake_glob)
|
||||
|
||||
state_path = h.harvest(str(bundle), policy=AllowAllPolicy())
|
||||
st = json.loads(Path(state_path).read_text(encoding="utf-8"))
|
||||
|
||||
svc = next(s for s in st["roles"]["services"] if s["role_name"] == "nginx")
|
||||
managed_links = svc.get("managed_links") or []
|
||||
assert {(ml["path"], ml["target"], ml["reason"]) for ml in managed_links} == {
|
||||
(
|
||||
"/etc/nginx/sites-enabled/default",
|
||||
"../sites-available/default",
|
||||
"enabled_symlink",
|
||||
),
|
||||
(
|
||||
"/etc/nginx/modules-enabled/mod-http",
|
||||
"../modules-available/mod-http",
|
||||
"enabled_symlink",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def test_harvest_does_not_capture_enabled_symlinks_without_role(
|
||||
monkeypatch, tmp_path: Path
|
||||
):
|
||||
bundle = tmp_path / "bundle"
|
||||
_base_monkeypatches(monkeypatch, unit="")
|
||||
|
||||
# Dirs exist but nginx isn't detected, so nothing should be captured.
|
||||
monkeypatch.setattr(
|
||||
h.os.path,
|
||||
"isdir",
|
||||
lambda p: p
|
||||
in {
|
||||
"/etc",
|
||||
"/etc/nginx/sites-enabled",
|
||||
"/etc/nginx/modules-enabled",
|
||||
},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
h.glob, "glob", lambda pat: ["/etc/nginx/sites-enabled/default"]
|
||||
)
|
||||
monkeypatch.setattr(h.os.path, "islink", lambda p: True)
|
||||
monkeypatch.setattr(h.os, "readlink", lambda p: "../sites-available/default")
|
||||
|
||||
state_path = h.harvest(str(bundle), policy=AllowAllPolicy())
|
||||
st = json.loads(Path(state_path).read_text(encoding="utf-8"))
|
||||
|
||||
# No services => no place to attach nginx links.
|
||||
assert st["roles"]["services"] == []
|
||||
# And no package snapshots either.
|
||||
assert st["roles"]["packages"] == []
|
||||
|
||||
|
||||
def test_harvest_symlink_capture_respects_ignore_policy(monkeypatch, tmp_path: Path):
|
||||
bundle = tmp_path / "bundle"
|
||||
_base_monkeypatches(monkeypatch, unit="nginx.service")
|
||||
|
||||
dirs = {"/etc", "/etc/nginx/sites-enabled", "/etc/nginx/modules-enabled"}
|
||||
links = {
|
||||
"/etc/nginx/sites-enabled/default": "../sites-available/default",
|
||||
"/etc/nginx/sites-enabled/ok": "../sites-available/ok",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(h.os.path, "isdir", lambda p: p in dirs)
|
||||
monkeypatch.setattr(h.os.path, "islink", lambda p: p in links)
|
||||
monkeypatch.setattr(h.os, "readlink", lambda p: links[p])
|
||||
monkeypatch.setattr(
|
||||
h.glob,
|
||||
"glob",
|
||||
lambda pat: (
|
||||
sorted(list(links.keys())) if pat == "/etc/nginx/sites-enabled/*" else []
|
||||
),
|
||||
)
|
||||
|
||||
calls: list[str] = []
|
||||
|
||||
class Policy:
|
||||
def deny_reason(self, path: str):
|
||||
return None
|
||||
|
||||
def deny_reason_link(self, path: str):
|
||||
calls.append(path)
|
||||
if path.endswith("/default"):
|
||||
return "denied_path"
|
||||
return None
|
||||
|
||||
state_path = h.harvest(str(bundle), policy=Policy())
|
||||
st = json.loads(Path(state_path).read_text(encoding="utf-8"))
|
||||
|
||||
svc = next(s for s in st["roles"]["services"] if s["role_name"] == "nginx")
|
||||
managed_links = svc.get("managed_links") or []
|
||||
excluded = svc.get("excluded") or []
|
||||
|
||||
assert any(p.endswith("/default") for p in calls)
|
||||
assert any(p.endswith("/ok") for p in calls)
|
||||
assert {ml["path"] for ml in managed_links} == {"/etc/nginx/sites-enabled/ok"}
|
||||
assert {ex["path"] for ex in excluded} == {"/etc/nginx/sites-enabled/default"}
|
||||
assert (
|
||||
next(ex["reason"] for ex in excluded if ex["path"].endswith("/default"))
|
||||
== "denied_path"
|
||||
)
|
||||
|
||||
|
||||
def test_harvest_captures_apache2_enabled_symlinks(monkeypatch, tmp_path: Path):
|
||||
bundle = tmp_path / "bundle"
|
||||
_base_monkeypatches(monkeypatch, unit="apache2.service")
|
||||
|
||||
dirs = {
|
||||
"/etc",
|
||||
"/etc/apache2/conf-enabled",
|
||||
"/etc/apache2/mods-enabled",
|
||||
"/etc/apache2/sites-enabled",
|
||||
}
|
||||
links = {
|
||||
"/etc/apache2/sites-enabled/000-default.conf": "../sites-available/000-default.conf",
|
||||
"/etc/apache2/mods-enabled/rewrite.load": "../mods-available/rewrite.load",
|
||||
"/etc/apache2/conf-enabled/security.conf": "../conf-available/security.conf",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(h.os.path, "isdir", lambda p: p in dirs)
|
||||
monkeypatch.setattr(h.os.path, "islink", lambda p: p in links)
|
||||
monkeypatch.setattr(h.os, "readlink", lambda p: links[p])
|
||||
|
||||
def fake_glob(pat: str):
|
||||
if pat == "/etc/apache2/sites-enabled/*":
|
||||
return ["/etc/apache2/sites-enabled/000-default.conf"]
|
||||
if pat == "/etc/apache2/mods-enabled/*":
|
||||
return ["/etc/apache2/mods-enabled/rewrite.load"]
|
||||
if pat == "/etc/apache2/conf-enabled/*":
|
||||
return ["/etc/apache2/conf-enabled/security.conf"]
|
||||
return []
|
||||
|
||||
monkeypatch.setattr(h.glob, "glob", fake_glob)
|
||||
|
||||
state_path = h.harvest(str(bundle), policy=AllowAllPolicy())
|
||||
st = json.loads(Path(state_path).read_text(encoding="utf-8"))
|
||||
|
||||
svc = next(s for s in st["roles"]["services"] if s["role_name"] == "apache2")
|
||||
managed_links = svc.get("managed_links") or []
|
||||
assert {ml["path"] for ml in managed_links} == set(links.keys())
|
||||
assert {ml["target"] for ml in managed_links} == set(links.values())
|
||||
assert all(ml["reason"] == "enabled_symlink" for ml in managed_links)
|
||||
|
|
@ -3,7 +3,8 @@ from enroll.ignore import IgnorePolicy
|
|||
|
||||
def test_ignore_policy_denies_common_backup_files():
|
||||
pol = IgnorePolicy()
|
||||
assert pol.deny_reason("/etc/shadow-") == "denied_path"
|
||||
assert pol.deny_reason("/etc/passwd-") == "denied_path"
|
||||
assert pol.deny_reason("/etc/group-") == "denied_path"
|
||||
assert pol.deny_reason("/etc/shadow-") == "backup_file"
|
||||
assert pol.deny_reason("/etc/passwd-") == "backup_file"
|
||||
assert pol.deny_reason("/etc/group-") == "backup_file"
|
||||
assert pol.deny_reason("/etc/something~") == "backup_file"
|
||||
assert pol.deny_reason("/foobar") == "unreadable"
|
||||
|
|
|
|||
|
|
@ -1,7 +1,12 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from enroll.manifest import manifest
|
||||
import os
|
||||
import stat
|
||||
import tarfile
|
||||
import pytest
|
||||
|
||||
import enroll.manifest as manifest
|
||||
|
||||
|
||||
def test_manifest_writes_roles_and_playbook_with_clean_when(tmp_path: Path):
|
||||
|
|
@ -176,7 +181,7 @@ def test_manifest_writes_roles_and_playbook_with_clean_when(tmp_path: Path):
|
|||
bundle / "artifacts" / "usr_local_custom" / "usr" / "local" / "bin" / "myscript"
|
||||
).write_text("#!/bin/sh\necho hi\n", encoding="utf-8")
|
||||
|
||||
manifest(str(bundle), str(out))
|
||||
manifest.manifest(str(bundle), str(out))
|
||||
|
||||
# Service role: systemd management should be gated on foo_manage_unit and a probe.
|
||||
tasks = (out / "roles" / "foo" / "tasks" / "main.yml").read_text(encoding="utf-8")
|
||||
|
|
@ -201,11 +206,11 @@ def test_manifest_writes_roles_and_playbook_with_clean_when(tmp_path: Path):
|
|||
|
||||
# Playbook should include users, etc_custom, packages, and services
|
||||
pb = (out / "playbook.yml").read_text(encoding="utf-8")
|
||||
assert "- users" in pb
|
||||
assert "- etc_custom" in pb
|
||||
assert "- usr_local_custom" in pb
|
||||
assert "- curl" in pb
|
||||
assert "- foo" in pb
|
||||
assert "role: users" in pb
|
||||
assert "role: etc_custom" in pb
|
||||
assert "role: usr_local_custom" in pb
|
||||
assert "role: curl" in pb
|
||||
assert "role: foo" in pb
|
||||
|
||||
|
||||
def test_manifest_site_mode_creates_host_inventory_and_raw_files(tmp_path: Path):
|
||||
|
|
@ -345,7 +350,7 @@ def test_manifest_site_mode_creates_host_inventory_and_raw_files(tmp_path: Path)
|
|||
/ "myapp.conf"
|
||||
).write_text("myapp=1\n", encoding="utf-8")
|
||||
|
||||
manifest(str(bundle), str(out), fqdn=fqdn)
|
||||
manifest.manifest(str(bundle), str(out), fqdn=fqdn)
|
||||
|
||||
# Host playbook exists.
|
||||
assert (out / "playbooks" / f"{fqdn}.yml").exists()
|
||||
|
|
@ -482,10 +487,10 @@ def test_manifest_includes_dnf_config_role_when_present(tmp_path: Path):
|
|||
bundle.mkdir(parents=True, exist_ok=True)
|
||||
(bundle / "state.json").write_text(json.dumps(state, indent=2), encoding="utf-8")
|
||||
|
||||
manifest(str(bundle), str(out))
|
||||
manifest.manifest(str(bundle), str(out))
|
||||
|
||||
pb = (out / "playbook.yml").read_text(encoding="utf-8")
|
||||
assert "- dnf_config" in pb
|
||||
assert "role: dnf_config" in pb
|
||||
|
||||
tasks = (out / "roles" / "dnf_config" / "tasks" / "main.yml").read_text(
|
||||
encoding="utf-8"
|
||||
|
|
@ -502,3 +507,291 @@ def test_render_install_packages_tasks_contains_dnf_branch():
|
|||
assert "ansible.builtin.dnf" in txt
|
||||
assert "ansible.builtin.package" in txt
|
||||
assert "pkg_mgr" in txt
|
||||
|
||||
|
||||
def test_manifest_orders_cron_and_logrotate_at_playbook_tail(tmp_path: Path):
|
||||
"""Cron/logrotate roles should appear at the end.
|
||||
|
||||
The cron role may restore per-user crontabs under /var/spool, so it should
|
||||
run after users have been created.
|
||||
"""
|
||||
|
||||
bundle = tmp_path / "bundle"
|
||||
out = tmp_path / "ansible"
|
||||
|
||||
state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "test", "os": "debian", "pkg_backend": "dpkg"},
|
||||
"inventory": {"packages": {}},
|
||||
"roles": {
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [{"name": "alice"}],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"services": [],
|
||||
"packages": [
|
||||
{
|
||||
"package": "curl",
|
||||
"role_name": "curl",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
{
|
||||
"package": "cron",
|
||||
"role_name": "cron",
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/var/spool/cron/crontabs/alice",
|
||||
"src_rel": "var/spool/cron/crontabs/alice",
|
||||
"owner": "alice",
|
||||
"group": "root",
|
||||
"mode": "0600",
|
||||
"reason": "system_cron",
|
||||
}
|
||||
],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
{
|
||||
"package": "logrotate",
|
||||
"role_name": "logrotate",
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/etc/logrotate.conf",
|
||||
"src_rel": "etc/logrotate.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "system_logrotate",
|
||||
}
|
||||
],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
],
|
||||
"apt_config": {
|
||||
"role_name": "apt_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"dnf_config": {
|
||||
"role_name": "dnf_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"etc_custom": {
|
||||
"role_name": "etc_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"usr_local_custom": {
|
||||
"role_name": "usr_local_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"extra_paths": {
|
||||
"role_name": "extra_paths",
|
||||
"include_patterns": [],
|
||||
"exclude_patterns": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Minimal artifacts for managed files.
|
||||
(bundle / "artifacts" / "cron" / "var" / "spool" / "cron" / "crontabs").mkdir(
|
||||
parents=True, exist_ok=True
|
||||
)
|
||||
(
|
||||
bundle / "artifacts" / "cron" / "var" / "spool" / "cron" / "crontabs" / "alice"
|
||||
).write_text("@daily echo hi\n", encoding="utf-8")
|
||||
(bundle / "artifacts" / "logrotate" / "etc").mkdir(parents=True, exist_ok=True)
|
||||
(bundle / "artifacts" / "logrotate" / "etc" / "logrotate.conf").write_text(
|
||||
"weekly\n", encoding="utf-8"
|
||||
)
|
||||
|
||||
bundle.mkdir(parents=True, exist_ok=True)
|
||||
(bundle / "state.json").write_text(json.dumps(state, indent=2), encoding="utf-8")
|
||||
|
||||
manifest.manifest(str(bundle), str(out))
|
||||
|
||||
pb = (out / "playbook.yml").read_text(encoding="utf-8").splitlines()
|
||||
# Roles are emitted as indented list items under the `roles:` key.
|
||||
roles = [
|
||||
ln.strip().removeprefix("- ").strip() for ln in pb if ln.startswith(" - ")
|
||||
]
|
||||
|
||||
# Ensure tail ordering.
|
||||
assert roles[-2:] == ["role: cron", "role: logrotate"]
|
||||
assert "role: users" in roles
|
||||
assert roles.index("role: users") < roles.index("role: cron")
|
||||
|
||||
|
||||
def test_yaml_helpers_fallback_when_yaml_unavailable(monkeypatch):
|
||||
monkeypatch.setattr(manifest, "_try_yaml", lambda: None)
|
||||
assert manifest._yaml_load_mapping("foo: 1\n") == {}
|
||||
out = manifest._yaml_dump_mapping({"b": 2, "a": 1})
|
||||
# Best-effort fallback is key: repr(value)
|
||||
assert out.splitlines()[0].startswith("a: ")
|
||||
assert out.endswith("\n")
|
||||
|
||||
|
||||
def test_copy2_replace_makes_readonly_sources_user_writable(
|
||||
monkeypatch, tmp_path: Path
|
||||
):
|
||||
src = tmp_path / "src.txt"
|
||||
dst = tmp_path / "dst.txt"
|
||||
src.write_text("hello", encoding="utf-8")
|
||||
# Make source read-only; copy2 preserves mode, so tmp will be read-only too.
|
||||
os.chmod(src, 0o444)
|
||||
|
||||
manifest._copy2_replace(str(src), str(dst))
|
||||
|
||||
st = os.stat(dst, follow_symlinks=False)
|
||||
assert stat.S_IMODE(st.st_mode) & stat.S_IWUSR
|
||||
|
||||
|
||||
def test_prepare_bundle_dir_sops_decrypts_and_extracts(monkeypatch, tmp_path: Path):
|
||||
enc = tmp_path / "harvest.tar.gz.sops"
|
||||
enc.write_text("ignored", encoding="utf-8")
|
||||
|
||||
def fake_require():
|
||||
return None
|
||||
|
||||
def fake_decrypt(src: str, dst: str, *, mode: int = 0o600):
|
||||
# Create a minimal tar.gz with a state.json file.
|
||||
with tarfile.open(dst, "w:gz") as tf:
|
||||
p = tmp_path / "state.json"
|
||||
p.write_text("{}", encoding="utf-8")
|
||||
tf.add(p, arcname="state.json")
|
||||
|
||||
monkeypatch.setattr(manifest, "require_sops_cmd", fake_require)
|
||||
monkeypatch.setattr(manifest, "decrypt_file_binary_to", fake_decrypt)
|
||||
|
||||
bundle_dir, td = manifest._prepare_bundle_dir(str(enc), sops_mode=True)
|
||||
try:
|
||||
assert (Path(bundle_dir) / "state.json").exists()
|
||||
finally:
|
||||
td.cleanup()
|
||||
|
||||
|
||||
def test_prepare_bundle_dir_rejects_non_dir_without_sops(tmp_path: Path):
|
||||
fp = tmp_path / "bundle.tar.gz"
|
||||
fp.write_text("x", encoding="utf-8")
|
||||
with pytest.raises(RuntimeError):
|
||||
manifest._prepare_bundle_dir(str(fp), sops_mode=False)
|
||||
|
||||
|
||||
def test_tar_dir_to_with_progress_writes_progress_when_tty(monkeypatch, tmp_path: Path):
|
||||
src = tmp_path / "dir"
|
||||
src.mkdir()
|
||||
(src / "a.txt").write_text("a", encoding="utf-8")
|
||||
(src / "b.txt").write_text("b", encoding="utf-8")
|
||||
|
||||
out = tmp_path / "out.tar.gz"
|
||||
writes: list[bytes] = []
|
||||
|
||||
monkeypatch.setattr(manifest.os, "isatty", lambda fd: True)
|
||||
monkeypatch.setattr(manifest.os, "write", lambda fd, b: writes.append(b) or len(b))
|
||||
|
||||
manifest._tar_dir_to_with_progress(str(src), str(out), desc="tarring")
|
||||
assert out.exists()
|
||||
assert writes # progress was written
|
||||
assert writes[-1].endswith(b"\n")
|
||||
|
||||
|
||||
def test_encrypt_manifest_out_dir_to_sops_handles_missing_tmp_cleanup(
|
||||
monkeypatch, tmp_path: Path
|
||||
):
|
||||
src_dir = tmp_path / "manifest"
|
||||
src_dir.mkdir()
|
||||
(src_dir / "x.txt").write_text("x", encoding="utf-8")
|
||||
|
||||
out = tmp_path / "manifest.tar.gz.sops"
|
||||
|
||||
monkeypatch.setattr(manifest, "require_sops_cmd", lambda: None)
|
||||
|
||||
def fake_encrypt(in_fp, out_fp, *args, **kwargs):
|
||||
Path(out_fp).write_text("enc", encoding="utf-8")
|
||||
|
||||
monkeypatch.setattr(manifest, "encrypt_file_binary", fake_encrypt)
|
||||
# Simulate race where tmp tar is already removed.
|
||||
monkeypatch.setattr(
|
||||
manifest.os, "unlink", lambda p: (_ for _ in ()).throw(FileNotFoundError())
|
||||
)
|
||||
|
||||
res = manifest._encrypt_manifest_out_dir_to_sops(str(src_dir), str(out), ["ABC"]) # type: ignore[arg-type]
|
||||
assert str(res).endswith(".sops")
|
||||
assert out.exists()
|
||||
|
||||
|
||||
def test_manifest_applies_jinjaturtle_to_jinjifyable_managed_file(
|
||||
monkeypatch, tmp_path: Path
|
||||
):
|
||||
# Create a minimal bundle with just an apt_config snapshot.
|
||||
bundle = tmp_path / "bundle"
|
||||
(bundle / "artifacts" / "apt_config" / "etc" / "apt").mkdir(parents=True)
|
||||
(bundle / "artifacts" / "apt_config" / "etc" / "apt" / "foo.ini").write_text(
|
||||
"key=VALUE\n", encoding="utf-8"
|
||||
)
|
||||
|
||||
state = {
|
||||
"schema_version": 1,
|
||||
"inventory": {"packages": {}},
|
||||
"roles": {
|
||||
"services": [],
|
||||
"packages": [],
|
||||
"apt_config": {
|
||||
"role_name": "apt_config",
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/etc/apt/foo.ini",
|
||||
"src_rel": "etc/apt/foo.ini",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "apt_config",
|
||||
}
|
||||
],
|
||||
"managed_dirs": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
},
|
||||
}
|
||||
(bundle / "state.json").write_text(
|
||||
__import__("json").dumps(state), encoding="utf-8"
|
||||
)
|
||||
|
||||
monkeypatch.setattr(manifest, "find_jinjaturtle_cmd", lambda: "jinjaturtle")
|
||||
|
||||
class _Res:
|
||||
template_text = "key={{ foo }}\n"
|
||||
vars_text = "foo: 123\n"
|
||||
|
||||
monkeypatch.setattr(manifest, "run_jinjaturtle", lambda *a, **k: _Res())
|
||||
|
||||
out_dir = tmp_path / "out"
|
||||
manifest.manifest(str(bundle), str(out_dir), jinjaturtle="on")
|
||||
|
||||
tmpl = out_dir / "roles" / "apt_config" / "templates" / "etc" / "apt" / "foo.ini.j2"
|
||||
assert tmpl.exists()
|
||||
assert "{{ foo }}" in tmpl.read_text(encoding="utf-8")
|
||||
|
||||
defaults = out_dir / "roles" / "apt_config" / "defaults" / "main.yml"
|
||||
txt = defaults.read_text(encoding="utf-8")
|
||||
assert "foo: 123" in txt
|
||||
# Non-templated file should not exist under files/.
|
||||
assert not (
|
||||
out_dir / "roles" / "apt_config" / "files" / "etc" / "apt" / "foo.ini"
|
||||
).exists()
|
||||
|
|
|
|||
96
tests/test_manifest_symlinks.py
Normal file
96
tests/test_manifest_symlinks.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import enroll.manifest as manifest
|
||||
|
||||
|
||||
def test_manifest_emits_symlink_tasks_and_vars(tmp_path: Path):
|
||||
bundle = tmp_path / "bundle"
|
||||
out = tmp_path / "ansible"
|
||||
|
||||
state = {
|
||||
"host": {"hostname": "test", "os": "debian", "pkg_backend": "dpkg"},
|
||||
"inventory": {"packages": {}},
|
||||
"roles": {
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"services": [
|
||||
{
|
||||
"unit": "nginx.service",
|
||||
"role_name": "nginx",
|
||||
"packages": ["nginx"],
|
||||
"active_state": "active",
|
||||
"sub_state": "running",
|
||||
"unit_file_state": "enabled",
|
||||
"condition_result": None,
|
||||
"managed_files": [],
|
||||
"managed_links": [
|
||||
{
|
||||
"path": "/etc/nginx/sites-enabled/default",
|
||||
"target": "../sites-available/default",
|
||||
"reason": "enabled_symlink",
|
||||
}
|
||||
],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
}
|
||||
],
|
||||
"packages": [],
|
||||
"apt_config": {
|
||||
"role_name": "apt_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"dnf_config": {
|
||||
"role_name": "dnf_config",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"etc_custom": {
|
||||
"role_name": "etc_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"usr_local_custom": {
|
||||
"role_name": "usr_local_custom",
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"extra_paths": {
|
||||
"role_name": "extra_paths",
|
||||
"include_patterns": [],
|
||||
"exclude_patterns": [],
|
||||
"managed_files": [],
|
||||
"managed_links": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
bundle.mkdir(parents=True, exist_ok=True)
|
||||
(bundle / "artifacts").mkdir(parents=True, exist_ok=True)
|
||||
(bundle / "state.json").write_text(json.dumps(state), encoding="utf-8")
|
||||
|
||||
manifest.manifest(str(bundle), str(out))
|
||||
|
||||
tasks = (out / "roles" / "nginx" / "tasks" / "main.yml").read_text(encoding="utf-8")
|
||||
assert "- name: Ensure managed symlinks exist" in tasks
|
||||
assert 'loop: "{{ nginx_managed_links | default([]) }}"' in tasks
|
||||
|
||||
defaults = (out / "roles" / "nginx" / "defaults" / "main.yml").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
# The role defaults should include the converted link mapping.
|
||||
assert "nginx_managed_links:" in defaults
|
||||
assert "dest: /etc/nginx/sites-enabled/default" in defaults
|
||||
assert "src: ../sites-available/default" in defaults
|
||||
|
|
@ -1,5 +1,13 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import types
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -94,3 +102,315 @@ def test_sops_pgp_arg_and_encrypt_decrypt_roundtrip(tmp_path: Path, monkeypatch)
|
|||
# Sanity: we invoked encrypt and decrypt.
|
||||
assert any("--encrypt" in c for c in calls)
|
||||
assert any("--decrypt" in c for c in calls)
|
||||
|
||||
|
||||
def test_cache_dir_defaults_to_home_cache(monkeypatch, tmp_path: Path):
|
||||
# Ensure default path uses ~/.cache when XDG_CACHE_HOME is unset.
|
||||
from enroll.cache import enroll_cache_dir
|
||||
|
||||
monkeypatch.delenv("XDG_CACHE_HOME", raising=False)
|
||||
monkeypatch.setattr(Path, "home", lambda: tmp_path)
|
||||
|
||||
p = enroll_cache_dir()
|
||||
assert str(p).startswith(str(tmp_path))
|
||||
assert p.name == "enroll"
|
||||
|
||||
|
||||
def test_harvest_cache_state_json_property(tmp_path: Path):
|
||||
from enroll.cache import HarvestCache
|
||||
|
||||
hc = HarvestCache(tmp_path / "h1")
|
||||
assert hc.state_json == hc.dir / "state.json"
|
||||
|
||||
|
||||
def test_cache_dir_security_rejects_symlink(tmp_path: Path):
|
||||
from enroll.cache import _ensure_dir_secure
|
||||
|
||||
real = tmp_path / "real"
|
||||
real.mkdir()
|
||||
link = tmp_path / "link"
|
||||
link.symlink_to(real, target_is_directory=True)
|
||||
|
||||
with pytest.raises(RuntimeError, match="Refusing to use symlink"):
|
||||
_ensure_dir_secure(link)
|
||||
|
||||
|
||||
def test_cache_dir_chmod_failures_are_ignored(monkeypatch, tmp_path: Path):
|
||||
from enroll import cache
|
||||
|
||||
# Make the cache base path deterministic and writable.
|
||||
monkeypatch.setattr(cache, "enroll_cache_dir", lambda: tmp_path)
|
||||
|
||||
# Force os.chmod to fail to cover the "except OSError: pass" paths.
|
||||
monkeypatch.setattr(
|
||||
os, "chmod", lambda *a, **k: (_ for _ in ()).throw(OSError("nope"))
|
||||
)
|
||||
|
||||
hc = cache.new_harvest_cache_dir()
|
||||
assert hc.dir.exists()
|
||||
assert hc.dir.is_dir()
|
||||
|
||||
|
||||
def test_stat_triplet_falls_back_to_numeric_ids(monkeypatch, tmp_path: Path):
|
||||
from enroll.fsutil import stat_triplet
|
||||
import pwd
|
||||
import grp
|
||||
|
||||
p = tmp_path / "x"
|
||||
p.write_text("x", encoding="utf-8")
|
||||
|
||||
# Force username/group resolution failures.
|
||||
monkeypatch.setattr(
|
||||
pwd, "getpwuid", lambda _uid: (_ for _ in ()).throw(KeyError("no user"))
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
grp, "getgrgid", lambda _gid: (_ for _ in ()).throw(KeyError("no group"))
|
||||
)
|
||||
|
||||
owner, group, mode = stat_triplet(str(p))
|
||||
assert owner.isdigit()
|
||||
assert group.isdigit()
|
||||
assert len(mode) == 4
|
||||
|
||||
|
||||
def test_ignore_policy_iter_effective_lines_removes_block_comments():
|
||||
from enroll.ignore import IgnorePolicy
|
||||
|
||||
pol = IgnorePolicy()
|
||||
data = b"""keep1
|
||||
/*
|
||||
drop me
|
||||
*/
|
||||
keep2
|
||||
"""
|
||||
assert list(pol.iter_effective_lines(data)) == [b"keep1", b"keep2"]
|
||||
|
||||
|
||||
def test_ignore_policy_deny_reason_dir_variants(tmp_path: Path):
|
||||
from enroll.ignore import IgnorePolicy
|
||||
|
||||
pol = IgnorePolicy()
|
||||
|
||||
# denied by glob
|
||||
assert pol.deny_reason_dir("/etc/shadow") == "denied_path"
|
||||
|
||||
# symlink rejected
|
||||
d = tmp_path / "d"
|
||||
d.mkdir()
|
||||
link = tmp_path / "l"
|
||||
link.symlink_to(d, target_is_directory=True)
|
||||
assert pol.deny_reason_dir(str(link)) == "symlink"
|
||||
|
||||
# not a directory
|
||||
f = tmp_path / "f"
|
||||
f.write_text("x", encoding="utf-8")
|
||||
assert pol.deny_reason_dir(str(f)) == "not_directory"
|
||||
|
||||
# ok
|
||||
assert pol.deny_reason_dir(str(d)) is None
|
||||
|
||||
|
||||
def test_run_jinjaturtle_parses_outputs(monkeypatch, tmp_path: Path):
|
||||
# Fully unit-test enroll.jinjaturtle.run_jinjaturtle by stubbing subprocess.run.
|
||||
from enroll.jinjaturtle import run_jinjaturtle
|
||||
|
||||
def fake_run(cmd, **kwargs): # noqa: ARG001
|
||||
# cmd includes "-d <defaults> -t <template>"
|
||||
d_idx = cmd.index("-d") + 1
|
||||
t_idx = cmd.index("-t") + 1
|
||||
defaults = Path(cmd[d_idx])
|
||||
template = Path(cmd[t_idx])
|
||||
defaults.write_text("---\nfoo: 1\n", encoding="utf-8")
|
||||
template.write_text("value={{ foo }}\n", encoding="utf-8")
|
||||
return SimpleNamespace(returncode=0, stdout="ok", stderr="")
|
||||
|
||||
monkeypatch.setattr(subprocess, "run", fake_run)
|
||||
|
||||
src = tmp_path / "src.ini"
|
||||
src.write_text("foo=1\n", encoding="utf-8")
|
||||
|
||||
res = run_jinjaturtle("/bin/jinjaturtle", str(src), role_name="role1")
|
||||
assert "foo: 1" in res.vars_text
|
||||
assert "value=" in res.template_text
|
||||
|
||||
|
||||
def test_run_jinjaturtle_raises_on_failure(monkeypatch, tmp_path: Path):
|
||||
from enroll.jinjaturtle import run_jinjaturtle
|
||||
|
||||
def fake_run(cmd, **kwargs): # noqa: ARG001
|
||||
return SimpleNamespace(returncode=2, stdout="out", stderr="bad")
|
||||
|
||||
monkeypatch.setattr(subprocess, "run", fake_run)
|
||||
|
||||
src = tmp_path / "src.ini"
|
||||
src.write_text("x", encoding="utf-8")
|
||||
with pytest.raises(RuntimeError, match="jinjaturtle failed"):
|
||||
run_jinjaturtle("/bin/jinjaturtle", str(src), role_name="role1")
|
||||
|
||||
|
||||
def test_require_sops_cmd_errors_when_missing(monkeypatch):
|
||||
from enroll.sopsutil import require_sops_cmd, SopsError
|
||||
|
||||
monkeypatch.setattr("enroll.sopsutil.shutil.which", lambda _: None)
|
||||
with pytest.raises(SopsError, match="not found on PATH"):
|
||||
require_sops_cmd()
|
||||
|
||||
|
||||
def test_get_enroll_version_reports_unknown_on_metadata_failure(monkeypatch):
|
||||
import enroll.version as v
|
||||
|
||||
fake_meta = types.ModuleType("importlib.metadata")
|
||||
|
||||
def boom():
|
||||
raise RuntimeError("boom")
|
||||
|
||||
fake_meta.packages_distributions = boom
|
||||
fake_meta.version = lambda _dist: boom()
|
||||
|
||||
monkeypatch.setitem(sys.modules, "importlib.metadata", fake_meta)
|
||||
assert v.get_enroll_version() == "unknown"
|
||||
|
||||
|
||||
def test_get_enroll_version_returns_unknown_if_importlib_metadata_unavailable(
|
||||
monkeypatch,
|
||||
):
|
||||
import builtins
|
||||
import enroll.version as v
|
||||
|
||||
real_import = builtins.__import__
|
||||
|
||||
def fake_import(
|
||||
name, globals=None, locals=None, fromlist=(), level=0
|
||||
): # noqa: A002
|
||||
if name == "importlib.metadata":
|
||||
raise ImportError("no metadata")
|
||||
return real_import(name, globals, locals, fromlist, level)
|
||||
|
||||
monkeypatch.setattr(builtins, "__import__", fake_import)
|
||||
assert v.get_enroll_version() == "unknown"
|
||||
|
||||
|
||||
def test_compare_harvests_and_format_report(tmp_path: Path):
|
||||
from enroll.diff import compare_harvests, format_report
|
||||
|
||||
old = tmp_path / "old"
|
||||
new = tmp_path / "new"
|
||||
(old / "artifacts").mkdir(parents=True)
|
||||
(new / "artifacts").mkdir(parents=True)
|
||||
|
||||
def write_state(base: Path, state: dict) -> None:
|
||||
base.mkdir(parents=True, exist_ok=True)
|
||||
(base / "state.json").write_text(json.dumps(state, indent=2), encoding="utf-8")
|
||||
|
||||
# Old bundle: pkg a@1.0, pkg b@1.0, one service, one user, one managed file.
|
||||
old_state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h1"},
|
||||
"inventory": {"packages": {"a": {"version": "1.0"}, "b": {"version": "1.0"}}},
|
||||
"roles": {
|
||||
"services": [
|
||||
{
|
||||
"unit": "svc.service",
|
||||
"role_name": "svc",
|
||||
"packages": ["a"],
|
||||
"active_state": "inactive",
|
||||
"sub_state": "dead",
|
||||
"unit_file_state": "enabled",
|
||||
"condition_result": None,
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/etc/foo.conf",
|
||||
"src_rel": "etc/foo.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "modified_conffile",
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
"packages": [],
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [{"name": "alice", "shell": "/bin/sh"}],
|
||||
},
|
||||
"apt_config": {"role_name": "apt_config", "managed_files": []},
|
||||
"etc_custom": {"role_name": "etc_custom", "managed_files": []},
|
||||
"usr_local_custom": {"role_name": "usr_local_custom", "managed_files": []},
|
||||
"extra_paths": {"role_name": "extra_paths", "managed_files": []},
|
||||
},
|
||||
}
|
||||
(old / "artifacts" / "svc" / "etc").mkdir(parents=True, exist_ok=True)
|
||||
(old / "artifacts" / "svc" / "etc" / "foo.conf").write_text("old", encoding="utf-8")
|
||||
write_state(old, old_state)
|
||||
|
||||
# New bundle: pkg a@2.0, pkg c@1.0, service changed, user changed, file moved role+content.
|
||||
new_state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h2"},
|
||||
"inventory": {"packages": {"a": {"version": "2.0"}, "c": {"version": "1.0"}}},
|
||||
"roles": {
|
||||
"services": [
|
||||
{
|
||||
"unit": "svc.service",
|
||||
"role_name": "svc",
|
||||
"packages": ["a", "c"],
|
||||
"active_state": "active",
|
||||
"sub_state": "running",
|
||||
"unit_file_state": "enabled",
|
||||
"condition_result": None,
|
||||
"managed_files": [],
|
||||
}
|
||||
],
|
||||
"packages": [],
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [{"name": "alice", "shell": "/bin/bash"}, {"name": "bob"}],
|
||||
},
|
||||
"apt_config": {"role_name": "apt_config", "managed_files": []},
|
||||
"etc_custom": {"role_name": "etc_custom", "managed_files": []},
|
||||
"usr_local_custom": {"role_name": "usr_local_custom", "managed_files": []},
|
||||
"extra_paths": {
|
||||
"role_name": "extra_paths",
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/etc/foo.conf",
|
||||
"src_rel": "etc/foo.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0600",
|
||||
"reason": "user_include",
|
||||
},
|
||||
{
|
||||
"path": "/etc/added.conf",
|
||||
"src_rel": "etc/added.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "user_include",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
(new / "artifacts" / "extra_paths" / "etc").mkdir(parents=True, exist_ok=True)
|
||||
(new / "artifacts" / "extra_paths" / "etc" / "foo.conf").write_text(
|
||||
"new", encoding="utf-8"
|
||||
)
|
||||
(new / "artifacts" / "extra_paths" / "etc" / "added.conf").write_text(
|
||||
"x", encoding="utf-8"
|
||||
)
|
||||
write_state(new, new_state)
|
||||
|
||||
report, changed = compare_harvests(str(old), str(new))
|
||||
assert changed is True
|
||||
|
||||
txt = format_report(report, fmt="text")
|
||||
assert "Packages" in txt
|
||||
|
||||
md = format_report(report, fmt="markdown")
|
||||
assert "# enroll diff report" in md
|
||||
|
||||
js = format_report(report, fmt="json")
|
||||
parsed = json.loads(js)
|
||||
assert parsed["packages"]["added"] == ["c"]
|
||||
|
|
|
|||
|
|
@ -1,323 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import types
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def test_cache_dir_defaults_to_home_cache(monkeypatch, tmp_path: Path):
|
||||
# Ensure default path uses ~/.cache when XDG_CACHE_HOME is unset.
|
||||
from enroll.cache import enroll_cache_dir
|
||||
|
||||
monkeypatch.delenv("XDG_CACHE_HOME", raising=False)
|
||||
monkeypatch.setattr(Path, "home", lambda: tmp_path)
|
||||
|
||||
p = enroll_cache_dir()
|
||||
assert str(p).startswith(str(tmp_path))
|
||||
assert p.name == "enroll"
|
||||
|
||||
|
||||
def test_harvest_cache_state_json_property(tmp_path: Path):
|
||||
from enroll.cache import HarvestCache
|
||||
|
||||
hc = HarvestCache(tmp_path / "h1")
|
||||
assert hc.state_json == hc.dir / "state.json"
|
||||
|
||||
|
||||
def test_cache_dir_security_rejects_symlink(tmp_path: Path):
|
||||
from enroll.cache import _ensure_dir_secure
|
||||
|
||||
real = tmp_path / "real"
|
||||
real.mkdir()
|
||||
link = tmp_path / "link"
|
||||
link.symlink_to(real, target_is_directory=True)
|
||||
|
||||
with pytest.raises(RuntimeError, match="Refusing to use symlink"):
|
||||
_ensure_dir_secure(link)
|
||||
|
||||
|
||||
def test_cache_dir_chmod_failures_are_ignored(monkeypatch, tmp_path: Path):
|
||||
from enroll import cache
|
||||
|
||||
# Make the cache base path deterministic and writable.
|
||||
monkeypatch.setattr(cache, "enroll_cache_dir", lambda: tmp_path)
|
||||
|
||||
# Force os.chmod to fail to cover the "except OSError: pass" paths.
|
||||
monkeypatch.setattr(
|
||||
os, "chmod", lambda *a, **k: (_ for _ in ()).throw(OSError("nope"))
|
||||
)
|
||||
|
||||
hc = cache.new_harvest_cache_dir()
|
||||
assert hc.dir.exists()
|
||||
assert hc.dir.is_dir()
|
||||
|
||||
|
||||
def test_stat_triplet_falls_back_to_numeric_ids(monkeypatch, tmp_path: Path):
|
||||
from enroll.fsutil import stat_triplet
|
||||
import pwd
|
||||
import grp
|
||||
|
||||
p = tmp_path / "x"
|
||||
p.write_text("x", encoding="utf-8")
|
||||
|
||||
# Force username/group resolution failures.
|
||||
monkeypatch.setattr(
|
||||
pwd, "getpwuid", lambda _uid: (_ for _ in ()).throw(KeyError("no user"))
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
grp, "getgrgid", lambda _gid: (_ for _ in ()).throw(KeyError("no group"))
|
||||
)
|
||||
|
||||
owner, group, mode = stat_triplet(str(p))
|
||||
assert owner.isdigit()
|
||||
assert group.isdigit()
|
||||
assert len(mode) == 4
|
||||
|
||||
|
||||
def test_ignore_policy_iter_effective_lines_removes_block_comments():
|
||||
from enroll.ignore import IgnorePolicy
|
||||
|
||||
pol = IgnorePolicy()
|
||||
data = b"""keep1
|
||||
/*
|
||||
drop me
|
||||
*/
|
||||
keep2
|
||||
"""
|
||||
assert list(pol.iter_effective_lines(data)) == [b"keep1", b"keep2"]
|
||||
|
||||
|
||||
def test_ignore_policy_deny_reason_dir_variants(tmp_path: Path):
|
||||
from enroll.ignore import IgnorePolicy
|
||||
|
||||
pol = IgnorePolicy()
|
||||
|
||||
# denied by glob
|
||||
assert pol.deny_reason_dir("/etc/shadow") == "denied_path"
|
||||
|
||||
# symlink rejected
|
||||
d = tmp_path / "d"
|
||||
d.mkdir()
|
||||
link = tmp_path / "l"
|
||||
link.symlink_to(d, target_is_directory=True)
|
||||
assert pol.deny_reason_dir(str(link)) == "symlink"
|
||||
|
||||
# not a directory
|
||||
f = tmp_path / "f"
|
||||
f.write_text("x", encoding="utf-8")
|
||||
assert pol.deny_reason_dir(str(f)) == "not_directory"
|
||||
|
||||
# ok
|
||||
assert pol.deny_reason_dir(str(d)) is None
|
||||
|
||||
|
||||
def test_run_jinjaturtle_parses_outputs(monkeypatch, tmp_path: Path):
|
||||
# Fully unit-test enroll.jinjaturtle.run_jinjaturtle by stubbing subprocess.run.
|
||||
from enroll.jinjaturtle import run_jinjaturtle
|
||||
|
||||
def fake_run(cmd, **kwargs): # noqa: ARG001
|
||||
# cmd includes "-d <defaults> -t <template>"
|
||||
d_idx = cmd.index("-d") + 1
|
||||
t_idx = cmd.index("-t") + 1
|
||||
defaults = Path(cmd[d_idx])
|
||||
template = Path(cmd[t_idx])
|
||||
defaults.write_text("---\nfoo: 1\n", encoding="utf-8")
|
||||
template.write_text("value={{ foo }}\n", encoding="utf-8")
|
||||
return SimpleNamespace(returncode=0, stdout="ok", stderr="")
|
||||
|
||||
monkeypatch.setattr(subprocess, "run", fake_run)
|
||||
|
||||
src = tmp_path / "src.ini"
|
||||
src.write_text("foo=1\n", encoding="utf-8")
|
||||
|
||||
res = run_jinjaturtle("/bin/jinjaturtle", str(src), role_name="role1")
|
||||
assert "foo: 1" in res.vars_text
|
||||
assert "value=" in res.template_text
|
||||
|
||||
|
||||
def test_run_jinjaturtle_raises_on_failure(monkeypatch, tmp_path: Path):
|
||||
from enroll.jinjaturtle import run_jinjaturtle
|
||||
|
||||
def fake_run(cmd, **kwargs): # noqa: ARG001
|
||||
return SimpleNamespace(returncode=2, stdout="out", stderr="bad")
|
||||
|
||||
monkeypatch.setattr(subprocess, "run", fake_run)
|
||||
|
||||
src = tmp_path / "src.ini"
|
||||
src.write_text("x", encoding="utf-8")
|
||||
with pytest.raises(RuntimeError, match="jinjaturtle failed"):
|
||||
run_jinjaturtle("/bin/jinjaturtle", str(src), role_name="role1")
|
||||
|
||||
|
||||
def test_require_sops_cmd_errors_when_missing(monkeypatch):
|
||||
from enroll.sopsutil import require_sops_cmd, SopsError
|
||||
|
||||
monkeypatch.setattr("enroll.sopsutil.shutil.which", lambda _: None)
|
||||
with pytest.raises(SopsError, match="not found on PATH"):
|
||||
require_sops_cmd()
|
||||
|
||||
|
||||
def test_get_enroll_version_reports_unknown_on_metadata_failure(monkeypatch):
|
||||
import enroll.version as v
|
||||
|
||||
fake_meta = types.ModuleType("importlib.metadata")
|
||||
|
||||
def boom():
|
||||
raise RuntimeError("boom")
|
||||
|
||||
fake_meta.packages_distributions = boom
|
||||
fake_meta.version = lambda _dist: boom()
|
||||
|
||||
monkeypatch.setitem(sys.modules, "importlib.metadata", fake_meta)
|
||||
assert v.get_enroll_version() == "unknown"
|
||||
|
||||
|
||||
def test_get_enroll_version_returns_unknown_if_importlib_metadata_unavailable(
|
||||
monkeypatch,
|
||||
):
|
||||
import builtins
|
||||
import enroll.version as v
|
||||
|
||||
real_import = builtins.__import__
|
||||
|
||||
def fake_import(
|
||||
name, globals=None, locals=None, fromlist=(), level=0
|
||||
): # noqa: A002
|
||||
if name == "importlib.metadata":
|
||||
raise ImportError("no metadata")
|
||||
return real_import(name, globals, locals, fromlist, level)
|
||||
|
||||
monkeypatch.setattr(builtins, "__import__", fake_import)
|
||||
assert v.get_enroll_version() == "unknown"
|
||||
|
||||
|
||||
def test_compare_harvests_and_format_report(tmp_path: Path):
|
||||
from enroll.diff import compare_harvests, format_report
|
||||
|
||||
old = tmp_path / "old"
|
||||
new = tmp_path / "new"
|
||||
(old / "artifacts").mkdir(parents=True)
|
||||
(new / "artifacts").mkdir(parents=True)
|
||||
|
||||
def write_state(base: Path, state: dict) -> None:
|
||||
base.mkdir(parents=True, exist_ok=True)
|
||||
(base / "state.json").write_text(json.dumps(state, indent=2), encoding="utf-8")
|
||||
|
||||
# Old bundle: pkg a@1.0, pkg b@1.0, one service, one user, one managed file.
|
||||
old_state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h1"},
|
||||
"inventory": {"packages": {"a": {"version": "1.0"}, "b": {"version": "1.0"}}},
|
||||
"roles": {
|
||||
"services": [
|
||||
{
|
||||
"unit": "svc.service",
|
||||
"role_name": "svc",
|
||||
"packages": ["a"],
|
||||
"active_state": "inactive",
|
||||
"sub_state": "dead",
|
||||
"unit_file_state": "enabled",
|
||||
"condition_result": None,
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/etc/foo.conf",
|
||||
"src_rel": "etc/foo.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "modified_conffile",
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
"packages": [],
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [{"name": "alice", "shell": "/bin/sh"}],
|
||||
},
|
||||
"apt_config": {"role_name": "apt_config", "managed_files": []},
|
||||
"etc_custom": {"role_name": "etc_custom", "managed_files": []},
|
||||
"usr_local_custom": {"role_name": "usr_local_custom", "managed_files": []},
|
||||
"extra_paths": {"role_name": "extra_paths", "managed_files": []},
|
||||
},
|
||||
}
|
||||
(old / "artifacts" / "svc" / "etc").mkdir(parents=True, exist_ok=True)
|
||||
(old / "artifacts" / "svc" / "etc" / "foo.conf").write_text("old", encoding="utf-8")
|
||||
write_state(old, old_state)
|
||||
|
||||
# New bundle: pkg a@2.0, pkg c@1.0, service changed, user changed, file moved role+content.
|
||||
new_state = {
|
||||
"schema_version": 3,
|
||||
"host": {"hostname": "h2"},
|
||||
"inventory": {"packages": {"a": {"version": "2.0"}, "c": {"version": "1.0"}}},
|
||||
"roles": {
|
||||
"services": [
|
||||
{
|
||||
"unit": "svc.service",
|
||||
"role_name": "svc",
|
||||
"packages": ["a", "c"],
|
||||
"active_state": "active",
|
||||
"sub_state": "running",
|
||||
"unit_file_state": "enabled",
|
||||
"condition_result": None,
|
||||
"managed_files": [],
|
||||
}
|
||||
],
|
||||
"packages": [],
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [{"name": "alice", "shell": "/bin/bash"}, {"name": "bob"}],
|
||||
},
|
||||
"apt_config": {"role_name": "apt_config", "managed_files": []},
|
||||
"etc_custom": {"role_name": "etc_custom", "managed_files": []},
|
||||
"usr_local_custom": {"role_name": "usr_local_custom", "managed_files": []},
|
||||
"extra_paths": {
|
||||
"role_name": "extra_paths",
|
||||
"managed_files": [
|
||||
{
|
||||
"path": "/etc/foo.conf",
|
||||
"src_rel": "etc/foo.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0600",
|
||||
"reason": "user_include",
|
||||
},
|
||||
{
|
||||
"path": "/etc/added.conf",
|
||||
"src_rel": "etc/added.conf",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "user_include",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
(new / "artifacts" / "extra_paths" / "etc").mkdir(parents=True, exist_ok=True)
|
||||
(new / "artifacts" / "extra_paths" / "etc" / "foo.conf").write_text(
|
||||
"new", encoding="utf-8"
|
||||
)
|
||||
(new / "artifacts" / "extra_paths" / "etc" / "added.conf").write_text(
|
||||
"x", encoding="utf-8"
|
||||
)
|
||||
write_state(new, new_state)
|
||||
|
||||
report, changed = compare_harvests(str(old), str(new))
|
||||
assert changed is True
|
||||
|
||||
txt = format_report(report, fmt="text")
|
||||
assert "Packages" in txt
|
||||
|
||||
md = format_report(report, fmt="markdown")
|
||||
assert "# enroll diff report" in md
|
||||
|
||||
js = format_report(report, fmt="json")
|
||||
parsed = json.loads(js)
|
||||
assert parsed["packages"]["added"] == ["c"]
|
||||
|
|
@ -3,6 +3,8 @@ from __future__ import annotations
|
|||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import enroll.pathfilter as pf
|
||||
|
||||
|
||||
def test_compile_and_match_prefix_glob_and_regex(tmp_path: Path):
|
||||
from enroll.pathfilter import PathFilter, compile_path_pattern
|
||||
|
|
@ -78,3 +80,107 @@ def test_expand_includes_notes_on_no_matches(tmp_path: Path):
|
|||
paths, notes = expand_includes(pats, max_files=10)
|
||||
assert paths == []
|
||||
assert any("matched no files" in n.lower() for n in notes)
|
||||
|
||||
|
||||
def test_expand_includes_supports_regex_with_inferred_root(tmp_path: Path):
|
||||
"""Regex includes are expanded by walking an inferred literal prefix root."""
|
||||
from enroll.pathfilter import compile_path_pattern, expand_includes
|
||||
|
||||
root = tmp_path / "root"
|
||||
(root / "home" / "alice" / ".config" / "myapp").mkdir(parents=True)
|
||||
target = root / "home" / "alice" / ".config" / "myapp" / "settings.ini"
|
||||
target.write_text("x=1\n", encoding="utf-8")
|
||||
|
||||
# This is anchored and begins with an absolute path, so expand_includes should
|
||||
# infer a narrow walk root instead of scanning '/'.
|
||||
rex = rf"re:^{root}/home/[^/]+/\.config/myapp/.*$"
|
||||
pat = compile_path_pattern(rex)
|
||||
paths, notes = expand_includes([pat], max_files=10)
|
||||
assert str(target) in paths
|
||||
assert notes == []
|
||||
|
||||
|
||||
def test_compile_path_pattern_normalises_relative_prefix():
|
||||
from enroll.pathfilter import compile_path_pattern
|
||||
|
||||
p = compile_path_pattern("etc/ssh")
|
||||
assert p.kind == "prefix"
|
||||
assert p.value == "/etc/ssh"
|
||||
|
||||
|
||||
def test_norm_abs_empty_string_is_root():
|
||||
assert pf._norm_abs("") == "/"
|
||||
|
||||
|
||||
def test_posix_match_invalid_pattern_fails_closed(monkeypatch):
|
||||
# Force PurePosixPath.match to raise to cover the exception handler.
|
||||
real_match = pf.PurePosixPath.match
|
||||
|
||||
def boom(self, pat):
|
||||
raise ValueError("bad pattern")
|
||||
|
||||
monkeypatch.setattr(pf.PurePosixPath, "match", boom)
|
||||
try:
|
||||
assert pf._posix_match("/etc/hosts", "[bad") is False
|
||||
finally:
|
||||
monkeypatch.setattr(pf.PurePosixPath, "match", real_match)
|
||||
|
||||
|
||||
def test_regex_literal_prefix_handles_escapes():
|
||||
# Prefix stops at meta chars but includes escaped literals.
|
||||
assert pf._regex_literal_prefix(r"^/etc/\./foo") == "/etc/./foo"
|
||||
|
||||
|
||||
def test_expand_includes_maybe_add_file_skips_non_files(monkeypatch, tmp_path: Path):
|
||||
# Drive the _maybe_add_file branch that rejects symlinks/non-files.
|
||||
pats = [pf.compile_path_pattern(str(tmp_path / "missing"))]
|
||||
|
||||
monkeypatch.setattr(pf.os.path, "isfile", lambda p: False)
|
||||
monkeypatch.setattr(pf.os.path, "islink", lambda p: False)
|
||||
monkeypatch.setattr(pf.os.path, "isdir", lambda p: False)
|
||||
|
||||
paths, notes = pf.expand_includes(pats, max_files=10)
|
||||
assert paths == []
|
||||
assert any("matched no files" in n for n in notes)
|
||||
|
||||
|
||||
def test_expand_includes_prunes_excluded_dirs(monkeypatch):
|
||||
include = [pf.compile_path_pattern("/root/**")]
|
||||
exclude = pf.PathFilter(exclude=["/root/skip/**"])
|
||||
|
||||
# Simulate filesystem walk:
|
||||
# /root has dirnames ['skip', 'keep'] but skip should be pruned.
|
||||
monkeypatch.setattr(
|
||||
pf.os.path,
|
||||
"isdir",
|
||||
lambda p: p in {"/root", "/root/keep", "/root/skip"},
|
||||
)
|
||||
monkeypatch.setattr(pf.os.path, "islink", lambda p: False)
|
||||
monkeypatch.setattr(pf.os.path, "isfile", lambda p: True)
|
||||
|
||||
def walk(root, followlinks=False):
|
||||
assert root == "/root"
|
||||
yield ("/root", ["skip", "keep"], [])
|
||||
yield ("/root/keep", [], ["a.txt"])
|
||||
# If pruning works, we should never walk into /root/skip.
|
||||
|
||||
monkeypatch.setattr(pf.os, "walk", walk)
|
||||
|
||||
paths, _notes = pf.expand_includes(include, exclude=exclude, max_files=10)
|
||||
assert "/root/keep/a.txt" in paths
|
||||
assert not any(p.startswith("/root/skip") for p in paths)
|
||||
|
||||
|
||||
def test_expand_includes_respects_max_files(monkeypatch):
|
||||
include = [pf.compile_path_pattern("/root/**")]
|
||||
monkeypatch.setattr(pf.os.path, "isdir", lambda p: p == "/root")
|
||||
monkeypatch.setattr(pf.os.path, "islink", lambda p: False)
|
||||
monkeypatch.setattr(pf.os.path, "isfile", lambda p: True)
|
||||
monkeypatch.setattr(
|
||||
pf.os,
|
||||
"walk",
|
||||
lambda root, followlinks=False: [("/root", [], ["a", "b", "c"])],
|
||||
)
|
||||
paths, notes = pf.expand_includes(include, max_files=2)
|
||||
assert len(paths) == 2
|
||||
assert "/root/c" not in paths
|
||||
|
|
|
|||
|
|
@ -129,3 +129,50 @@ def test_rpm_config_files_and_modified_files_parsing(monkeypatch):
|
|||
rpm, "_run", lambda cmd, allow_fail=False, merge_err=False: (1, out)
|
||||
)
|
||||
assert rpm.rpm_modified_files("mypkg") == {"/etc/foo.conf", "/etc/bar"}
|
||||
|
||||
|
||||
def test_list_manual_packages_uses_yum_fallback(monkeypatch):
|
||||
# No dnf, yum present.
|
||||
monkeypatch.setattr(
|
||||
rpm.shutil, "which", lambda exe: "/usr/bin/yum" if exe == "yum" else None
|
||||
)
|
||||
|
||||
def fake_run(cmd, allow_fail=False, merge_err=False):
|
||||
assert cmd[:3] == ["yum", "-q", "history"]
|
||||
return 0, "Installed Packages\nvim-enhanced.x86_64\nhtop\n"
|
||||
|
||||
monkeypatch.setattr(rpm, "_run", fake_run)
|
||||
|
||||
assert rpm.list_manual_packages() == ["htop", "vim-enhanced"]
|
||||
|
||||
|
||||
def test_list_installed_packages_parses_epoch_and_sorts(monkeypatch):
|
||||
out = (
|
||||
"bash\t0\t5.2.26\t1.el9\tx86_64\n"
|
||||
"bash\t1\t5.2.26\t1.el9\taarch64\n"
|
||||
"coreutils\t(none)\t9.1\t2.el9\tx86_64\n"
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
rpm, "_run", lambda cmd, allow_fail=False, merge_err=False: (0, out)
|
||||
)
|
||||
pkgs = rpm.list_installed_packages()
|
||||
assert pkgs["bash"][0]["arch"] == "aarch64" # sorted by arch then version
|
||||
assert pkgs["bash"][0]["version"].startswith("1:")
|
||||
assert pkgs["coreutils"][0]["version"] == "9.1-2.el9"
|
||||
|
||||
|
||||
def test_rpm_config_files_returns_empty_on_failure(monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
rpm, "_run", lambda cmd, allow_fail=False, merge_err=False: (1, "")
|
||||
)
|
||||
assert rpm.rpm_config_files("missing") == set()
|
||||
|
||||
|
||||
def test_rpm_owner_strips_epoch_prefix_when_present(monkeypatch):
|
||||
# Defensive: rpm output might include epoch-like token.
|
||||
monkeypatch.setattr(
|
||||
rpm,
|
||||
"_run",
|
||||
lambda cmd, allow_fail=False, merge_err=False: (0, "1:bash\n"),
|
||||
)
|
||||
assert rpm.rpm_owner("/bin/bash") == "bash"
|
||||
|
|
|
|||
31
tests/test_rpm_run.py
Normal file
31
tests/test_rpm_run.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import types
|
||||
|
||||
import pytest
|
||||
|
||||
import enroll.rpm as rpm
|
||||
|
||||
|
||||
def test_run_raises_on_nonzero_returncode_when_not_allow_fail(monkeypatch):
|
||||
def fake_run(cmd, check, text, stdout, stderr):
|
||||
return types.SimpleNamespace(returncode=1, stdout="OUT", stderr="ERR")
|
||||
|
||||
monkeypatch.setattr(rpm.subprocess, "run", fake_run)
|
||||
with pytest.raises(RuntimeError) as e:
|
||||
rpm._run(["rpm", "-q"]) # type: ignore[attr-defined]
|
||||
assert "Command failed" in str(e.value)
|
||||
assert "ERR" in str(e.value)
|
||||
assert "OUT" in str(e.value)
|
||||
|
||||
|
||||
def test_run_merge_err_includes_stderr_in_stdout(monkeypatch):
|
||||
def fake_run(cmd, check, text, stdout, stderr):
|
||||
# When merge_err is True, stderr is redirected to STDOUT, so we only
|
||||
# rely on stdout in our wrapper.
|
||||
return types.SimpleNamespace(returncode=0, stdout="COMBINED", stderr=None)
|
||||
|
||||
monkeypatch.setattr(rpm.subprocess, "run", fake_run)
|
||||
rc, out = rpm._run(["rpm", "-q"], merge_err=True)
|
||||
assert rc == 0
|
||||
assert out == "COMBINED"
|
||||
182
tests/test_validate.py
Normal file
182
tests/test_validate.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
import enroll.cli as cli
|
||||
from enroll.validate import validate_harvest
|
||||
|
||||
|
||||
def _base_state() -> dict:
|
||||
return {
|
||||
"enroll": {"version": "0.0.test", "harvest_time": 0},
|
||||
"host": {
|
||||
"hostname": "testhost",
|
||||
"os": "unknown",
|
||||
"pkg_backend": "dpkg",
|
||||
"os_release": {},
|
||||
},
|
||||
"inventory": {"packages": {}},
|
||||
"roles": {
|
||||
"users": {
|
||||
"role_name": "users",
|
||||
"users": [],
|
||||
"managed_dirs": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"services": [],
|
||||
"packages": [],
|
||||
"apt_config": {
|
||||
"role_name": "apt_config",
|
||||
"managed_dirs": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"dnf_config": {
|
||||
"role_name": "dnf_config",
|
||||
"managed_dirs": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"etc_custom": {
|
||||
"role_name": "etc_custom",
|
||||
"managed_dirs": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"usr_local_custom": {
|
||||
"role_name": "usr_local_custom",
|
||||
"managed_dirs": [],
|
||||
"managed_files": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
"extra_paths": {
|
||||
"role_name": "extra_paths",
|
||||
"include_patterns": [],
|
||||
"exclude_patterns": [],
|
||||
"managed_dirs": [],
|
||||
"managed_files": [],
|
||||
"managed_links": [],
|
||||
"excluded": [],
|
||||
"notes": [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _write_bundle(tmp_path: Path, state: dict) -> Path:
|
||||
bundle = tmp_path / "bundle"
|
||||
bundle.mkdir(parents=True)
|
||||
(bundle / "artifacts").mkdir()
|
||||
(bundle / "state.json").write_text(json.dumps(state, indent=2), encoding="utf-8")
|
||||
return bundle
|
||||
|
||||
|
||||
def test_validate_ok_bundle(tmp_path: Path):
|
||||
state = _base_state()
|
||||
state["roles"]["etc_custom"]["managed_files"].append(
|
||||
{
|
||||
"path": "/etc/hosts",
|
||||
"src_rel": "etc/hosts",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "custom_specific_path",
|
||||
}
|
||||
)
|
||||
|
||||
bundle = _write_bundle(tmp_path, state)
|
||||
art = bundle / "artifacts" / "etc_custom" / "etc" / "hosts"
|
||||
art.parent.mkdir(parents=True, exist_ok=True)
|
||||
art.write_text("127.0.0.1 localhost\n", encoding="utf-8")
|
||||
|
||||
res = validate_harvest(str(bundle))
|
||||
assert res.ok
|
||||
assert res.errors == []
|
||||
|
||||
|
||||
def test_validate_missing_artifact_is_error(tmp_path: Path):
|
||||
state = _base_state()
|
||||
state["roles"]["etc_custom"]["managed_files"].append(
|
||||
{
|
||||
"path": "/etc/hosts",
|
||||
"src_rel": "etc/hosts",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "custom_specific_path",
|
||||
}
|
||||
)
|
||||
bundle = _write_bundle(tmp_path, state)
|
||||
res = validate_harvest(str(bundle))
|
||||
assert not res.ok
|
||||
assert any("missing artifact" in e for e in res.errors)
|
||||
|
||||
|
||||
def test_validate_schema_error_is_reported(tmp_path: Path):
|
||||
state = _base_state()
|
||||
state["host"]["os"] = "not_a_real_os"
|
||||
bundle = _write_bundle(tmp_path, state)
|
||||
res = validate_harvest(str(bundle))
|
||||
assert not res.ok
|
||||
assert any(e.startswith("schema /host/os") for e in res.errors)
|
||||
|
||||
|
||||
def test_cli_validate_exits_1_on_validation_error(monkeypatch, tmp_path: Path):
|
||||
state = _base_state()
|
||||
state["roles"]["etc_custom"]["managed_files"].append(
|
||||
{
|
||||
"path": "/etc/hosts",
|
||||
"src_rel": "etc/hosts",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "custom_specific_path",
|
||||
}
|
||||
)
|
||||
bundle = _write_bundle(tmp_path, state)
|
||||
|
||||
monkeypatch.setattr(sys, "argv", ["enroll", "validate", str(bundle)])
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.main()
|
||||
assert e.value.code == 1
|
||||
|
||||
|
||||
def test_cli_validate_exits_1_on_validation_warning_with_flag(
|
||||
monkeypatch, tmp_path: Path
|
||||
):
|
||||
state = _base_state()
|
||||
state["roles"]["etc_custom"]["managed_files"].append(
|
||||
{
|
||||
"path": "/etc/hosts",
|
||||
"src_rel": "etc/hosts",
|
||||
"owner": "root",
|
||||
"group": "root",
|
||||
"mode": "0644",
|
||||
"reason": "custom_specific_path",
|
||||
}
|
||||
)
|
||||
|
||||
bundle = _write_bundle(tmp_path, state)
|
||||
art = bundle / "artifacts" / "etc_custom" / "etc" / "hosts"
|
||||
art.parent.mkdir(parents=True, exist_ok=True)
|
||||
art.write_text("127.0.0.1 localhost\n", encoding="utf-8")
|
||||
|
||||
art2 = bundle / "artifacts" / "etc_custom" / "etc" / "hosts2"
|
||||
art2.write_text("hello\n", encoding="utf-8")
|
||||
|
||||
monkeypatch.setattr(
|
||||
sys, "argv", ["enroll", "validate", str(bundle), "--fail-on-warnings"]
|
||||
)
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.main()
|
||||
assert e.value.code == 1
|
||||
Loading…
Add table
Add a link
Reference in a new issue