Compare commits

...

19 Commits

Author SHA1 Message Date
Prox
06b54c97f1 fix: use correct event field name setup_key_name (not setup_key) 2026-03-06 19:49:59 +02:00
9ffe1ba7b5 chore: mark GS-Enroll-Test as enrolled
All checks were successful
Reconcile / reconcile (push) Successful in 5s
2026-03-06 17:47:04 +00:00
5f9da63fa0 Merge pull request 'add GS-Enroll-Test setup key for enrollment test' (#5) from test-scenario-4 into main
All checks were successful
Reconcile / reconcile (push) Successful in 5s
Reviewed-on: #5
2026-03-06 17:31:51 +00:00
Prox
6433452c9d add GS-Enroll-Test setup key for enrollment test
All checks were successful
Dry Run / dry-run (pull_request) Successful in 5s
2026-03-06 19:30:25 +02:00
Prox
2a3e033b56 log setup key values in reconcile job output 2026-03-06 19:29:59 +02:00
Prox
bb7fc8dc16 flat table with action column in dry-run PR comment 2026-03-06 19:20:54 +02:00
a93eebdf18 Merge pull request 'removed setup key' (#4) from test-scenario-3 into main
All checks were successful
Reconcile / reconcile (push) Successful in 4s
Reviewed-on: #4
2026-03-06 17:19:12 +00:00
Prox
062e228902 removed setup key
All checks were successful
Dry Run / dry-run (pull_request) Successful in 5s
2026-03-06 19:17:04 +02:00
Prox
ad8557d543 add field-level change details to dry-run and reconcile output 2026-03-06 19:11:36 +02:00
490ae09a3f Merge pull request 'disabled pilots-to-gs policy' (#3) from test-scenario-2 into main
All checks were successful
Reconcile / reconcile (push) Successful in 5s
Reviewed-on: #3
2026-03-06 17:02:43 +00:00
Prox
bd1e792281 disabled pilots-to-gs policy
All checks were successful
Dry Run / dry-run (pull_request) Successful in 5s
2026-03-06 19:00:12 +02:00
565bd35a6e Merge pull request 'added group and policy' (#2) from test-scenario-1 into main
All checks were successful
Reconcile / reconcile (push) Successful in 5s
Reviewed-on: #2
2026-03-06 16:56:13 +00:00
Prox
066cc830e5 added group and policy
All checks were successful
Dry Run / dry-run (pull_request) Successful in 5s
2026-03-06 18:53:45 +02:00
Prox
0eef866266 markdown improvement 2026-03-06 18:34:59 +02:00
Prox
aa4aa32655 updated dry-run job 2026-03-06 18:34:59 +02:00
Prox
9a29a9cc0c improved jobs 2026-03-06 18:34:59 +02:00
Prox
19bf70034c updated dry-run.yml and reconcile jobs 2026-03-06 18:34:59 +02:00
Prox
42c1eb6f73 added test setup key 2026-03-06 18:34:59 +02:00
Prox
a316e39cae added TEST_SCENARIOS 2026-03-06 18:29:39 +02:00
13 changed files with 663 additions and 269 deletions

50
.beads/.gitignore vendored
View File

@ -1,45 +1,30 @@
# Dolt database (managed by Dolt, not git) # SQLite databases
dolt/ *.db
dolt-access.lock *.db?*
*.db-journal
*.db-wal
*.db-shm
# Runtime files # Daemon runtime files
daemon.lock
daemon.log
daemon.pid
bd.sock bd.sock
bd.sock.startlock
sync-state.json sync-state.json
last-touched last-touched
# Local version tracking (prevents upgrade notification spam after git ops) # Local version tracking (prevents upgrade notification spam after git ops)
.local_version .local_version
# Legacy database files
db.sqlite
bd.db
# Worktree redirect file (contains relative path to main repo's .beads/) # Worktree redirect file (contains relative path to main repo's .beads/)
# Must not be committed as paths would be wrong in other clones # Must not be committed as paths would be wrong in other clones
redirect redirect
# Sync state (local-only, per-machine) # Merge artifacts (temporary files from 3-way merge)
# These files are machine-specific and should not be shared across clones
.sync.lock
.jsonl.lock
sync_base.jsonl
export-state/
# Ephemeral store (SQLite - wisps/molecules, intentionally not versioned)
ephemeral.sqlite3
ephemeral.sqlite3-journal
ephemeral.sqlite3-wal
ephemeral.sqlite3-shm
# Legacy files (from pre-Dolt versions)
*.db
*.db?*
*.db-journal
*.db-wal
*.db-shm
db.sqlite
bd.db
daemon.lock
daemon.log
daemon-*.log.gz
daemon.pid
beads.base.jsonl beads.base.jsonl
beads.base.meta.json beads.base.meta.json
beads.left.jsonl beads.left.jsonl
@ -47,6 +32,11 @@ beads.left.meta.json
beads.right.jsonl beads.right.jsonl
beads.right.meta.json beads.right.meta.json
# Sync state (local-only, per-machine)
# These files are machine-specific and should not be shared across clones
.sync.lock
sync_base.jsonl
# NOTE: Do NOT add negation patterns (e.g., !issues.jsonl) here. # NOTE: Do NOT add negation patterns (e.g., !issues.jsonl) here.
# They would override fork protection in .git/info/exclude, allowing # They would override fork protection in .git/info/exclude, allowing
# contributors to accidentally commit upstream issue databases. # contributors to accidentally commit upstream issue databases.

0
.beads/issues.jsonl Normal file
View File

View File

@ -1,7 +1,4 @@
{ {
"database": "dolt", "database": "dolt",
"jsonl_export": "issues.jsonl", "jsonl_export": "issues.jsonl"
"backend": "dolt",
"dolt_mode": "server",
"dolt_database": "beads_netbird-gitops"
} }

3
.gitattributes vendored Normal file
View File

@ -0,0 +1,3 @@
# Use bd merge for beads JSONL files
.beads/issues.jsonl merge=beads

View File

@ -6,101 +6,150 @@ on:
- "state/*.json" - "state/*.json"
jobs: jobs:
detect: dry-run:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs:
envs: ${{ steps.changed.outputs.envs }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Detect changed environments - name: Dry-run reconcile for changed environments
id: changed
run: |
FILES=$(git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.sha }} -- 'state/*.json')
ENVS="[]"
for f in $FILES; do
ENV=$(basename "$f" .json)
ENVS=$(echo "$ENVS" | jq -c ". + [\"$ENV\"]")
done
echo "envs=$ENVS" >> "$GITHUB_OUTPUT"
echo "Changed environments: $ENVS"
dry-run:
needs: detect
runs-on: ubuntu-latest
if: needs.detect.outputs.envs != '[]'
strategy:
matrix:
env: ${{ fromJson(needs.detect.outputs.envs) }}
steps:
- uses: actions/checkout@v4
- name: Resolve environment secrets
id: env
run: |
ENV_UPPER=$(echo "${{ matrix.env }}" | tr '[:lower:]-' '[:upper:]_')
echo "token_key=${ENV_UPPER}_RECONCILER_TOKEN" >> "$GITHUB_OUTPUT"
echo "url_key=${ENV_UPPER}_RECONCILER_URL" >> "$GITHUB_OUTPUT"
- name: Run dry-run reconcile
id: plan
env: env:
RECONCILER_TOKEN: ${{ secrets[steps.env.outputs.token_key] }} BASE_SHA: ${{ github.event.pull_request.base.sha }}
RECONCILER_URL: ${{ secrets[steps.env.outputs.url_key] }} HEAD_SHA: ${{ github.sha }}
TEST_RECONCILER_TOKEN: ${{ secrets.TEST_RECONCILER_TOKEN }}
TEST_RECONCILER_URL: ${{ secrets.TEST_RECONCILER_URL }}
DEV_RECONCILER_TOKEN: ${{ secrets.DEV_RECONCILER_TOKEN }}
DEV_RECONCILER_URL: ${{ secrets.DEV_RECONCILER_URL }}
PROD_RECONCILER_TOKEN: ${{ secrets.PROD_RECONCILER_TOKEN }}
PROD_RECONCILER_URL: ${{ secrets.PROD_RECONCILER_URL }}
GIT_TOKEN: ${{ secrets.GIT_TOKEN }}
GIT_URL: ${{ secrets.GIT_URL }}
REPO: ${{ github.repository }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: | run: |
if [ -z "$RECONCILER_URL" ] || [ -z "$RECONCILER_TOKEN" ]; then python3 <<'SCRIPT'
echo "No secrets configured for environment '${{ matrix.env }}' — skipping" import json, os, subprocess, urllib.request
echo "response={}" >> "$GITHUB_OUTPUT"
exit 0
fi
RESPONSE=$(curl -sf \
-X POST \
-H "Authorization: Bearer ${RECONCILER_TOKEN}" \
-H "Content-Type: application/json" \
-d @state/${{ matrix.env }}.json \
"${RECONCILER_URL}/reconcile?dry_run=true")
echo "response<<EOF" >> "$GITHUB_OUTPUT"
echo "$RESPONSE" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
- name: Format plan as markdown # Detect changed state files
id: format diff = subprocess.run(
if: steps.plan.outputs.response != '{}' ["git", "diff", "--name-only", os.environ["BASE_SHA"], os.environ["HEAD_SHA"], "--", "state/*.json"],
run: | capture_output=True, text=True, check=True,
cat <<'SCRIPT' > format.py )
import json, sys envs = [os.path.basename(f).replace(".json", "") for f in diff.stdout.strip().split("\n") if f.strip()]
data = json.loads(sys.stdin.read())
if not envs:
print("No state files changed")
exit(0)
print(f"Changed environments: {envs}")
for env in envs:
key = env.upper().replace("-", "_")
token = os.environ.get(f"{key}_RECONCILER_TOKEN", "")
url = os.environ.get(f"{key}_RECONCILER_URL", "")
if not token or not url:
print(f"[{env}] No secrets configured — skipping")
continue
# Call reconciler dry-run
with open(f"state/{env}.json", "rb") as f:
state_data = f.read()
req = urllib.request.Request(
f"{url}/reconcile?dry_run=true",
data=state_data,
method="POST",
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
},
)
try:
resp = urllib.request.urlopen(req)
data = json.loads(resp.read())
except Exception as e:
print(f"[{env}] Reconciler call failed: {e}")
continue
# Format as markdown table
ops = data.get("operations", []) ops = data.get("operations", [])
summary = data.get("summary", {}) summary = data.get("summary", {})
env = sys.argv[1]
lines = [f"## Reconciliation Plan: `{env}`\n"] lines = [f"## Reconciliation Plan: `{env}`\n"]
if not ops: if not ops:
lines.append("No changes detected.\n") lines.append("No changes detected.\n")
else: else:
lines.append("| Operation | Name |") def fmt_val(v):
lines.append("|-----------|------|") if isinstance(v, bool):
for op in ops: return str(v).lower()
lines.append(f"| `{op['type']}` | {op['name']} |") if isinstance(v, list):
lines.append("") return ", ".join(str(x) for x in v) if v else "(empty)"
s = summary if v is None:
lines.append(f"**Summary:** {s.get('created',0)} create, {s.get('updated',0)} update, {s.get('deleted',0)} delete") return "(none)"
print("\n".join(lines)) return str(v)
SCRIPT
COMMENT=$(echo '${{ steps.plan.outputs.response }}' | python3 format.py "${{ matrix.env }}")
echo "comment<<EOF" >> "$GITHUB_OUTPUT"
echo "$COMMENT" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
- name: Post PR comment def get_action(op):
if: steps.plan.outputs.response != '{}' """Derive a human-readable action from the operation."""
env: t = op["type"]
GIT_TOKEN: ${{ secrets.GIT_TOKEN }} changes = op.get("changes", [])
run: | # For updates, check if it's just an enable/disable toggle
curl -sf \ if not t.startswith("create") and not t.startswith("delete"):
-X POST \ if len(changes) == 1 and changes[0]["field"] == "enabled":
-H "Authorization: token ${GIT_TOKEN}" \ return "disable" if changes[0]["to"] is False else "enable"
-H "Content-Type: application/json" \ return "update"
-d "{\"body\": $(echo '${{ steps.format.outputs.comment }}' | jq -Rs .)}" \ return t.split("_", 1)[0]
"${{ secrets.GIT_URL }}/api/v1/repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments"
def fmt_details(op, action):
"""Format details based on action type."""
if action == "delete":
return ""
if action in ("enable", "disable"):
return ""
changes = op.get("changes", [])
if changes:
parts = []
for c in changes:
parts.append(f"`{c['field']}`: {fmt_val(c['from'])} → {fmt_val(c['to'])}")
return "; ".join(parts)
d = op.get("details", {})
if not d:
return ""
parts = []
if "sources" in d and "destinations" in d:
parts.append(f"{fmt_val(d['sources'])} → {fmt_val(d['destinations'])}")
if "auto_groups" in d:
parts.append(f"groups: {fmt_val(d['auto_groups'])}")
if "type" in d and "address" in d:
parts.append(f"{d['type']}:{d['address']}")
if "peers" in d:
parts.append(f"peers: {fmt_val(d['peers'])}")
return "; ".join(parts) if parts else ""
lines.append("| Action | Resource | Name | Details |")
lines.append("|--------|----------|------|---------|")
for op in ops:
resource = op["type"].split("_", 1)[1] if "_" in op["type"] else op["type"]
action = get_action(op)
detail = fmt_details(op, action)
lines.append(f"| {action} | {resource} | {op['name']} | {detail} |")
lines.append("")
c, u, d = summary.get("created", 0), summary.get("updated", 0), summary.get("deleted", 0)
lines.append(f"**Total: {c} create, {u} update, {d} delete**")
comment = "\n".join(lines)
print(comment)
# Post PR comment
git_token = os.environ.get("GIT_TOKEN", "")
git_url = os.environ.get("GIT_URL", "")
if git_token and git_url:
api_url = f"{git_url}/api/v1/repos/{os.environ['REPO']}/issues/{os.environ['PR_NUMBER']}/comments"
body = json.dumps({"body": comment}).encode()
req = urllib.request.Request(api_url, data=body, method="POST", headers={
"Authorization": f"token {git_token}",
"Content-Type": "application/json",
})
urllib.request.urlopen(req)
print(f"Posted comment to PR #{os.environ['PR_NUMBER']}")
SCRIPT

View File

@ -8,103 +8,121 @@ on:
- "state/*.json" - "state/*.json"
jobs: jobs:
detect: reconcile:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs:
envs: ${{ steps.changed.outputs.envs }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 2 fetch-depth: 2
- name: Detect changed environments - name: Reconcile changed environments
id: changed
run: |
FILES=$(git diff --name-only HEAD~1 HEAD -- 'state/*.json')
ENVS="[]"
for f in $FILES; do
ENV=$(basename "$f" .json)
ENVS=$(echo "$ENVS" | jq -c ". + [\"$ENV\"]")
done
echo "envs=$ENVS" >> "$GITHUB_OUTPUT"
echo "Changed environments: $ENVS"
reconcile:
needs: detect
runs-on: ubuntu-latest
if: needs.detect.outputs.envs != '[]'
strategy:
matrix:
env: ${{ fromJson(needs.detect.outputs.envs) }}
steps:
- uses: actions/checkout@v4
- name: Resolve environment secrets
id: env
run: |
ENV_UPPER=$(echo "${{ matrix.env }}" | tr '[:lower:]-' '[:upper:]_')
echo "token_key=${ENV_UPPER}_RECONCILER_TOKEN" >> "$GITHUB_OUTPUT"
echo "url_key=${ENV_UPPER}_RECONCILER_URL" >> "$GITHUB_OUTPUT"
echo "age_key=${ENV_UPPER}_AGE_PUBLIC_KEY" >> "$GITHUB_OUTPUT"
- name: Sync events
env: env:
RECONCILER_TOKEN: ${{ secrets[steps.env.outputs.token_key] }} TEST_RECONCILER_TOKEN: ${{ secrets.TEST_RECONCILER_TOKEN }}
RECONCILER_URL: ${{ secrets[steps.env.outputs.url_key] }} TEST_RECONCILER_URL: ${{ secrets.TEST_RECONCILER_URL }}
DEV_RECONCILER_TOKEN: ${{ secrets.DEV_RECONCILER_TOKEN }}
DEV_RECONCILER_URL: ${{ secrets.DEV_RECONCILER_URL }}
PROD_RECONCILER_TOKEN: ${{ secrets.PROD_RECONCILER_TOKEN }}
PROD_RECONCILER_URL: ${{ secrets.PROD_RECONCILER_URL }}
run: | run: |
if [ -z "$RECONCILER_URL" ] || [ -z "$RECONCILER_TOKEN" ]; then python3 <<'SCRIPT'
echo "No secrets configured for environment '${{ matrix.env }}' — skipping" import json, os, subprocess, urllib.request, sys
exit 0
fi
curl -sf \
-X POST \
-H "Authorization: Bearer ${RECONCILER_TOKEN}" \
"${RECONCILER_URL}/sync-events"
- name: Pull latest (poller may have committed) # Detect changed state files
run: git pull --rebase diff = subprocess.run(
["git", "diff", "--name-only", "HEAD~1", "HEAD", "--", "state/*.json"],
capture_output=True, text=True, check=True,
)
envs = [os.path.basename(f).replace(".json", "") for f in diff.stdout.strip().split("\n") if f.strip()]
- name: Apply reconcile if not envs:
id: reconcile print("No state files changed")
env: exit(0)
RECONCILER_TOKEN: ${{ secrets[steps.env.outputs.token_key] }}
RECONCILER_URL: ${{ secrets[steps.env.outputs.url_key] }}
run: |
RESPONSE=$(curl -sf \
-X POST \
-H "Authorization: Bearer ${RECONCILER_TOKEN}" \
-H "Content-Type: application/json" \
-d @state/${{ matrix.env }}.json \
"${RECONCILER_URL}/reconcile")
echo "response<<EOF" >> "$GITHUB_OUTPUT"
echo "$RESPONSE" >> "$GITHUB_OUTPUT"
echo "EOF" >> "$GITHUB_OUTPUT"
STATUS=$(echo "$RESPONSE" | jq -r '.status') print(f"Changed environments: {envs}")
if [ "$STATUS" = "error" ]; then failed = []
echo "Reconcile failed for ${{ matrix.env }}"
echo "$RESPONSE" | jq .
exit 1
fi
- name: Encrypt and upload setup keys for env in envs:
if: success() key = env.upper().replace("-", "_")
env: token = os.environ.get(f"{key}_RECONCILER_TOKEN", "")
AGE_PUBLIC_KEY: ${{ secrets[steps.env.outputs.age_key] }} url = os.environ.get(f"{key}_RECONCILER_URL", "")
run: |
KEYS=$(echo '${{ steps.reconcile.outputs.response }}' | jq -r '.created_keys // empty')
if [ -n "$KEYS" ] && [ "$KEYS" != "{}" ] && [ "$KEYS" != "null" ] && [ -n "$AGE_PUBLIC_KEY" ]; then
echo "$KEYS" | age -r "$AGE_PUBLIC_KEY" -o setup-keys-${{ matrix.env }}.age
echo "Setup keys for ${{ matrix.env }} encrypted"
else
echo "No new keys created for ${{ matrix.env }}"
exit 0
fi
- name: Upload artifact if not token or not url:
if: success() print(f"[{env}] No secrets configured — skipping")
uses: actions/upload-artifact@v4 continue
with:
name: setup-keys-${{ matrix.env }} # Sync events first
path: setup-keys-${{ matrix.env }}.age try:
if-no-files-found: ignore req = urllib.request.Request(
f"{url}/sync-events", method="POST",
headers={"Authorization": f"Bearer {token}"},
)
urllib.request.urlopen(req)
print(f"[{env}] Synced events")
except Exception as e:
print(f"[{env}] Sync events failed (non-fatal): {e}")
# Apply reconcile
with open(f"state/{env}.json", "rb") as f:
state_data = f.read()
req = urllib.request.Request(
f"{url}/reconcile",
data=state_data,
method="POST",
headers={
"Authorization": f"Bearer {token}",
"Content-Type": "application/json",
},
)
try:
resp = urllib.request.urlopen(req)
data = json.loads(resp.read())
except Exception as e:
print(f"[{env}] Reconcile FAILED: {e}")
failed.append(env)
continue
status = data.get("status", "ok")
if status == "error":
print(f"[{env}] Reconcile returned error:")
print(json.dumps(data, indent=2))
failed.append(env)
continue
summary = data.get("summary", {})
print(f"[{env}] Reconcile OK: "
f"{summary.get('created',0)} created, "
f"{summary.get('updated',0)} updated, "
f"{summary.get('deleted',0)} deleted")
# Log each operation with details
for op in data.get("operations", []):
t = op["type"]
n = op["name"]
s = op.get("status", "?")
action = "CREATE" if t.startswith("create") else "DELETE" if t.startswith("delete") else "UPDATE"
resource = t.split("_", 1)[1] if "_" in t else t
prefix = f"[{env}] {action} {resource} '{n}' -> {s}"
changes = op.get("changes", [])
if changes:
def fmt(v):
if isinstance(v, bool): return str(v).lower()
if isinstance(v, list): return ", ".join(str(x) for x in v) if v else "(empty)"
if v is None: return "(none)"
return str(v)
parts = [f"{c['field']}: {fmt(c['from'])} -> {fmt(c['to'])}" for c in changes]
print(f" {prefix} [{'; '.join(parts)}]")
else:
print(f" {prefix}")
keys = data.get("created_keys", {})
if keys:
print(f"[{env}] Created setup keys:")
for name, value in keys.items():
print(f" {name}: {value}")
if failed:
print(f"\nFailed environments: {failed}")
sys.exit(1)
SCRIPT

227
poc/TEST-SCENARIOS.md Normal file
View File

@ -0,0 +1,227 @@
# Test Scenarios for NetBird GitOps PoC
Test instance: `vps-a.networkmonitor.cc`
State file: `state/test.json`
Gitea: `gitea.vps-a.networkmonitor.cc`
Current state on the instance: 2 groups, 3 setup keys, 1 policy, 1 user.
Each scenario: create a branch, edit `state/test.json`, push, open PR (dry-run),
review plan, merge (apply), verify on NetBird dashboard.
---
## Scenario 1: Add a new group and policy
**Goal:** Verify creating multiple resources in one PR.
**Changes to `state/test.json`:**
Add a new group `observers` and a policy allowing observers to see
ground-stations:
```json
"groups": {
"ground-stations": { "peers": [] },
"pilots": { "peers": [] },
"observers": { "peers": [] }
},
"policies": {
"pilots-to-gs": { ... },
"observers-to-gs": {
"description": "Observers can view ground stations",
"enabled": true,
"sources": ["observers"],
"destinations": ["ground-stations"],
"bidirectional": false,
"protocol": "all",
"action": "accept"
}
}
```
**Expected dry-run:**
- Create: group `observers`, policy `observers-to-gs`
**Verify after merge:**
- Dashboard shows the `observers` group
- Policy `observers-to-gs` exists with correct sources/destinations
---
## Scenario 2: Update an existing policy
**Goal:** Verify update detection works.
**Changes to `state/test.json`:**
Disable the `pilots-to-gs` policy:
```json
"pilots-to-gs": {
"enabled": false,
...
}
```
**Expected dry-run:**
- Update: policy `pilots-to-gs`
**Verify after merge:**
- Policy shows as disabled on the dashboard
---
## Scenario 3: Delete a resource
**Goal:** Verify deletion works safely.
**Changes to `state/test.json`:**
Remove `Pilot-Vlad-2` from `setup_keys` (delete the entire key).
**Expected dry-run:**
- Delete: setup_key `Pilot-Vlad-2`
**Verify after merge:**
- Setup key no longer appears on the dashboard
---
## Scenario 4: Enroll a peer (full lifecycle)
**Goal:** Verify the enrollment detection and peer rename flow.
**Prerequisite:** Runner and Gitea token must be configured for the reconciler
poller. Run ansible-playbook with filled vault.yml first.
**Steps:**
1. Make sure `state/test.json` has an unenrolled setup key, e.g.:
```json
"GS-TestHawk-1": {
"type": "one-off",
"expires_in": 604800,
"usage_limit": 1,
"auto_groups": ["ground-stations"],
"enrolled": false
}
```
2. Copy the setup key value from the NetBird dashboard (or from a previous
reconcile run's created_keys output)
3. Enroll a peer:
```bash
sudo netbird up --management-url https://vps-a.networkmonitor.cc --setup-key <KEY>
```
4. Wait for the poller to detect enrollment (~30 seconds)
5. Verify:
- Peer is renamed to `GS-TestHawk-1` on the dashboard
- `state/test.json` in Gitea repo has `"enrolled": true` for that key
- The commit was made by the reconciler automatically
---
## Scenario 5: Multi-resource create (bigger change)
**Goal:** Test a realistic initial deployment scenario.
**Changes to `state/test.json`:**
Add network, posture check, and DNS in one PR:
```json
"posture_checks": {
"geo-restrict-ua": {
"description": "Allow only UA/PL locations",
"checks": {
"geo_location_check": {
"locations": [
{ "country_code": "UA" },
{ "country_code": "PL" }
],
"action": "allow"
}
}
}
},
"dns": {
"nameserver_groups": {
"cloudflare": {
"nameservers": [
{ "ip": "1.1.1.1", "ns_type": "udp", "port": 53 }
],
"domains": [],
"enabled": true,
"primary": true,
"groups": ["pilots", "ground-stations"]
}
}
}
```
**Expected dry-run:**
- Create: posture_check `geo-restrict-ua`, dns `cloudflare`
**Verify after merge:**
- Posture check appears in dashboard
- DNS nameserver group exists
---
## Scenario 6: No-op (idempotency check)
**Goal:** Verify that pushing state that matches what's already deployed
produces no operations.
**Steps:**
1. Export current state:
```bash
deno task export -- \
--netbird-api-url https://vps-a.networkmonitor.cc/api \
--netbird-api-token <TOKEN> > state/test.json
```
2. Push to a branch, open PR
3. **Expected dry-run:** "No changes detected."
---
## Scenario 7: Conflicting change (error handling)
**Goal:** Verify the reconciler handles errors gracefully.
**Steps:**
1. Reference a group that doesn't exist in a policy:
```json
"bad-policy": {
"enabled": true,
"sources": ["nonexistent-group"],
"destinations": ["pilots"],
"bidirectional": true
}
```
2. This should fail schema validation before hitting the API.
3. **Expected:** CI job fails with a clear error message.
---
## Quick reference
```bash
# Create test branch
git checkout -b test-scenario-N
# Edit state/test.json
# Push and open PR
git push poc test-scenario-N
# After testing, clean up
git checkout main && git branch -D test-scenario-N
```

View File

@ -12,7 +12,7 @@ function makeEvent(overrides: Partial<NbEvent> = {}): NbEvent {
initiator_id: "init-1", initiator_id: "init-1",
initiator_name: "admin", initiator_name: "admin",
target_id: "peer-1", target_id: "peer-1",
meta: { setup_key: "drone-key", name: "drone-01" }, meta: { setup_key_name: "drone-key", name: "drone-01" },
...overrides, ...overrides,
}; };
} }
@ -66,7 +66,7 @@ Deno.test("processEnrollmentEvents filters by lastTimestamp", () => {
Deno.test("processEnrollmentEvents ignores unknown keys", () => { Deno.test("processEnrollmentEvents ignores unknown keys", () => {
const events: NbEvent[] = [ const events: NbEvent[] = [
makeEvent({ makeEvent({
meta: { setup_key: "rogue-key", name: "rogue-host" }, meta: { setup_key_name: "rogue-key", name: "rogue-host" },
target_id: "peer-x", target_id: "peer-x",
}), }),
]; ];

View File

@ -21,10 +21,10 @@ export function processEnrollmentEvents(
.filter((e) => { .filter((e) => {
if (e.activity_code !== "peer.setupkey.add") return false; if (e.activity_code !== "peer.setupkey.add") return false;
if (lastTimestamp && e.timestamp <= lastTimestamp) return false; if (lastTimestamp && e.timestamp <= lastTimestamp) return false;
if (!knownKeyNames.has(e.meta.setup_key)) { if (!knownKeyNames.has(e.meta.setup_key_name)) {
console.log(JSON.stringify({ console.log(JSON.stringify({
msg: "unknown_enrollment", msg: "unknown_enrollment",
setup_key: e.meta.setup_key, setup_key_name: e.meta.setup_key_name,
peer_id: e.target_id, peer_id: e.target_id,
})); }));
return false; return false;
@ -32,7 +32,7 @@ export function processEnrollmentEvents(
return true; return true;
}) })
.map((e) => ({ .map((e) => ({
setupKeyName: e.meta.setup_key, setupKeyName: e.meta.setup_key_name,
peerId: e.target_id, peerId: e.target_id,
peerHostname: e.meta.name, peerHostname: e.meta.name,
timestamp: e.timestamp, timestamp: e.timestamp,

View File

@ -1,7 +1,11 @@
import type { DesiredState } from "../state/schema.ts"; import type { DesiredState } from "../state/schema.ts";
import type { ActualState } from "../state/actual.ts"; import type { ActualState } from "../state/actual.ts";
import type { NbPolicyRule } from "../netbird/types.ts"; import type { NbPolicyRule } from "../netbird/types.ts";
import { EXECUTION_ORDER, type Operation } from "./operations.ts"; import {
EXECUTION_ORDER,
type FieldChange,
type Operation,
} from "./operations.ts";
/** /**
* Compares desired state against actual state and returns an ordered list of * Compares desired state against actual state and returns an ordered list of
@ -53,10 +57,14 @@ function diffPostureChecks(
continue; continue;
} }
if ( const pcChanges: FieldChange[] = [];
existing.description !== config.description || if (existing.description !== config.description) {
JSON.stringify(existing.checks) !== JSON.stringify(config.checks) pcChanges.push({ field: "description", from: existing.description, to: config.description });
) { }
if (JSON.stringify(existing.checks) !== JSON.stringify(config.checks)) {
pcChanges.push({ field: "checks", from: existing.checks, to: config.checks });
}
if (pcChanges.length > 0) {
ops.push({ ops.push({
type: "update_posture_check", type: "update_posture_check",
name, name,
@ -64,6 +72,7 @@ function diffPostureChecks(
description: config.description, description: config.description,
checks: config.checks, checks: config.checks,
}, },
changes: pcChanges,
}); });
} }
} }
@ -108,6 +117,9 @@ function diffGroups(
desired_peers: desiredPeerNames, desired_peers: desiredPeerNames,
actual_peers: actualPeerNames, actual_peers: actualPeerNames,
}, },
changes: [
{ field: "peers", from: actualPeerNames, to: desiredPeerNames },
],
}); });
} }
} }
@ -219,6 +231,9 @@ function diffNetworks(
type: "update_network", type: "update_network",
name, name,
details: { description: config.description }, details: { description: config.description },
changes: [
{ field: "description", from: existing.description, to: config.description },
],
}); });
} }
@ -293,13 +308,23 @@ function diffNetworkResources(
const actualGroupNames = existing.groups.map((g) => g.name).sort(); const actualGroupNames = existing.groups.map((g) => g.name).sort();
const desiredGroupNames = [...res.groups].sort(); const desiredGroupNames = [...res.groups].sort();
if ( const resChanges: FieldChange[] = [];
existing.description !== res.description || if (existing.description !== res.description) {
existing.type !== res.type || resChanges.push({ field: "description", from: existing.description, to: res.description });
existing.address !== res.address || }
existing.enabled !== res.enabled || if (existing.type !== res.type) {
!arraysEqual(actualGroupNames, desiredGroupNames) resChanges.push({ field: "type", from: existing.type, to: res.type });
) { }
if (existing.address !== res.address) {
resChanges.push({ field: "address", from: existing.address, to: res.address });
}
if (existing.enabled !== res.enabled) {
resChanges.push({ field: "enabled", from: existing.enabled, to: res.enabled });
}
if (!arraysEqual(actualGroupNames, desiredGroupNames)) {
resChanges.push({ field: "groups", from: actualGroupNames, to: desiredGroupNames });
}
if (resChanges.length > 0) {
ops.push({ ops.push({
type: "update_network_resource", type: "update_network_resource",
name: res.name, name: res.name,
@ -312,6 +337,7 @@ function diffNetworkResources(
enabled: res.enabled, enabled: res.enabled,
groups: res.groups, groups: res.groups,
}, },
changes: resChanges,
}); });
} }
} }
@ -362,11 +388,17 @@ function diffNetworkRouters(
} }
// Compare mutable fields // Compare mutable fields
if ( const routerChanges: FieldChange[] = [];
existing.metric !== router.metric || if (existing.metric !== router.metric) {
existing.masquerade !== router.masquerade || routerChanges.push({ field: "metric", from: existing.metric, to: router.metric });
existing.enabled !== router.enabled }
) { if (existing.masquerade !== router.masquerade) {
routerChanges.push({ field: "masquerade", from: existing.masquerade, to: router.masquerade });
}
if (existing.enabled !== router.enabled) {
routerChanges.push({ field: "enabled", from: existing.enabled, to: router.enabled });
}
if (routerChanges.length > 0) {
ops.push({ ops.push({
type: "update_network_router", type: "update_network_router",
name: key, name: key,
@ -379,6 +411,7 @@ function diffNetworkRouters(
masquerade: router.masquerade, masquerade: router.masquerade,
enabled: router.enabled, enabled: router.enabled,
}, },
changes: routerChanges,
}); });
} }
} }
@ -441,7 +474,7 @@ function diffPeers(
const existing = actual.peersByName.get(name); const existing = actual.peersByName.get(name);
if (!existing) continue; // Never create or delete peers if (!existing) continue; // Never create or delete peers
let changed = false; const peerChanges: FieldChange[] = [];
// Compare groups (excluding "All"), resolve actual peer group names // Compare groups (excluding "All"), resolve actual peer group names
const actualGroupNames = existing.groups const actualGroupNames = existing.groups
@ -450,19 +483,20 @@ function diffPeers(
.sort(); .sort();
const desiredGroupNames = [...config.groups].sort(); const desiredGroupNames = [...config.groups].sort();
if (!arraysEqual(actualGroupNames, desiredGroupNames)) { if (!arraysEqual(actualGroupNames, desiredGroupNames)) {
changed = true; peerChanges.push({ field: "groups", from: actualGroupNames, to: desiredGroupNames });
} }
if ( if (existing.login_expiration_enabled !== config.login_expiration_enabled) {
existing.login_expiration_enabled !== config.login_expiration_enabled || peerChanges.push({ field: "login_expiration_enabled", from: existing.login_expiration_enabled, to: config.login_expiration_enabled });
existing.inactivity_expiration_enabled !== }
config.inactivity_expiration_enabled || if (existing.inactivity_expiration_enabled !== config.inactivity_expiration_enabled) {
existing.ssh_enabled !== config.ssh_enabled peerChanges.push({ field: "inactivity_expiration_enabled", from: existing.inactivity_expiration_enabled, to: config.inactivity_expiration_enabled });
) { }
changed = true; if (existing.ssh_enabled !== config.ssh_enabled) {
peerChanges.push({ field: "ssh_enabled", from: existing.ssh_enabled, to: config.ssh_enabled });
} }
if (changed) { if (peerChanges.length > 0) {
ops.push({ ops.push({
type: "update_peer", type: "update_peer",
name, name,
@ -472,6 +506,7 @@ function diffPeers(
inactivity_expiration_enabled: config.inactivity_expiration_enabled, inactivity_expiration_enabled: config.inactivity_expiration_enabled,
ssh_enabled: config.ssh_enabled, ssh_enabled: config.ssh_enabled,
}, },
changes: peerChanges,
}); });
} }
} }
@ -511,10 +546,14 @@ function diffUsers(
).sort(); ).sort();
const desiredAutoGroupNames = [...config.auto_groups].sort(); const desiredAutoGroupNames = [...config.auto_groups].sort();
if ( const userChanges: FieldChange[] = [];
existing.role !== config.role || if (existing.role !== config.role) {
!arraysEqual(actualAutoGroupNames, desiredAutoGroupNames) userChanges.push({ field: "role", from: existing.role, to: config.role });
) { }
if (!arraysEqual(actualAutoGroupNames, desiredAutoGroupNames)) {
userChanges.push({ field: "auto_groups", from: actualAutoGroupNames, to: desiredAutoGroupNames });
}
if (userChanges.length > 0) {
ops.push({ ops.push({
type: "update_user", type: "update_user",
name: email, name: email,
@ -523,6 +562,7 @@ function diffUsers(
role: config.role, role: config.role,
auto_groups: config.auto_groups, auto_groups: config.auto_groups,
}, },
changes: userChanges,
}); });
} }
} }
@ -609,12 +649,30 @@ function diffPolicies(
desiredPostureChecks, desiredPostureChecks,
); );
if ( const changes: FieldChange[] = [];
existing.enabled !== config.enabled || if (existing.enabled !== config.enabled) {
!arraysEqual(actualSources, desiredSources) || changes.push({ field: "enabled", from: existing.enabled, to: config.enabled });
destsChanged || }
postureChecksChanged if (!arraysEqual(actualSources, desiredSources)) {
) { changes.push({ field: "sources", from: actualSources, to: desiredSources });
}
if (destsChanged) {
if (config.destination_resource) {
const actualDestRes = existing.rules[0]?.destinationResource;
changes.push({ field: "destination_resource", from: actualDestRes ?? null, to: config.destination_resource });
} else {
const actualDests = extractGroupNames(
existing.rules.flatMap((r) => r.destinations ?? []),
actual,
).sort();
changes.push({ field: "destinations", from: actualDests, to: [...config.destinations].sort() });
}
}
if (postureChecksChanged) {
changes.push({ field: "source_posture_checks", from: actualPostureChecks, to: desiredPostureChecks });
}
if (changes.length > 0) {
ops.push({ ops.push({
type: "update_policy", type: "update_policy",
name, name,
@ -625,6 +683,7 @@ function diffPolicies(
destination_resource: config.destination_resource, destination_resource: config.destination_resource,
source_posture_checks: config.source_posture_checks, source_posture_checks: config.source_posture_checks,
}, },
changes,
}); });
} }
} }
@ -682,11 +741,17 @@ function diffRoutes(
continue; continue;
} }
if ( const routeChanges: FieldChange[] = [];
existing.enabled !== config.enabled || if (existing.enabled !== config.enabled) {
existing.description !== config.description || routeChanges.push({ field: "enabled", from: existing.enabled, to: config.enabled });
existing.network !== config.network }
) { if (existing.description !== config.description) {
routeChanges.push({ field: "description", from: existing.description, to: config.description });
}
if (existing.network !== config.network) {
routeChanges.push({ field: "network", from: existing.network, to: config.network });
}
if (routeChanges.length > 0) {
ops.push({ ops.push({
type: "update_route", type: "update_route",
name: networkId, name: networkId,
@ -695,6 +760,7 @@ function diffRoutes(
description: config.description, description: config.description,
network: config.network, network: config.network,
}, },
changes: routeChanges,
}); });
} }
} }
@ -741,11 +807,17 @@ function diffDns(
config.nameservers, config.nameservers,
); );
if ( const dnsChanges: FieldChange[] = [];
existing.enabled !== config.enabled || if (existing.enabled !== config.enabled) {
existing.primary !== config.primary || dnsChanges.push({ field: "enabled", from: existing.enabled, to: config.enabled });
nsChanged }
) { if (existing.primary !== config.primary) {
dnsChanges.push({ field: "primary", from: existing.primary, to: config.primary });
}
if (nsChanged) {
dnsChanges.push({ field: "nameservers", from: existing.nameservers, to: config.nameservers });
}
if (dnsChanges.length > 0) {
ops.push({ ops.push({
type: "update_dns", type: "update_dns",
name, name,
@ -754,6 +826,7 @@ function diffDns(
primary: config.primary, primary: config.primary,
nameservers: config.nameservers, nameservers: config.nameservers,
}, },
changes: dnsChanges,
}); });
} }
} }

View File

@ -33,10 +33,17 @@ export type OperationType =
| "update_user" | "update_user"
| "delete_user"; | "delete_user";
export interface FieldChange {
field: string;
from: unknown;
to: unknown;
}
export interface Operation { export interface Operation {
type: OperationType; type: OperationType;
name: string; name: string;
details?: Record<string, unknown>; details?: Record<string, unknown>;
changes?: FieldChange[];
} }
export interface OperationResult extends Operation { export interface OperationResult extends Operation {

View File

@ -109,6 +109,8 @@ async function handleReconcile(
operations: ops.map((op) => ({ operations: ops.map((op) => ({
type: op.type, type: op.type,
name: op.name, name: op.name,
...(op.details && { details: op.details }),
...(op.changes && { changes: op.changes }),
})), })),
summary: summarize(ops), summary: summarize(ops),
}); });
@ -141,6 +143,8 @@ async function handleReconcile(
type: r.type, type: r.type,
name: r.name, name: r.name,
status: r.status, status: r.status,
...(r.details && { details: r.details }),
...(r.changes && { changes: r.changes }),
})), })),
created_keys: createdKeysObj, created_keys: createdKeysObj,
summary: summarize(results), summary: summarize(results),

View File

@ -5,6 +5,9 @@
}, },
"pilots": { "pilots": {
"peers": [] "peers": []
},
"observers": {
"peers": []
} }
}, },
"setup_keys": { "setup_keys": {
@ -25,12 +28,21 @@
"pilots" "pilots"
], ],
"enrolled": false "enrolled": false
},
"GS-Enroll-Test": {
"type": "one-off",
"expires_in": 604800,
"usage_limit": 1,
"auto_groups": [
"ground-stations"
],
"enrolled": true
} }
}, },
"policies": { "policies": {
"pilots-to-gs": { "pilots-to-gs": {
"description": "", "description": "",
"enabled": true, "enabled": false,
"sources": [ "sources": [
"pilots" "pilots"
], ],
@ -41,8 +53,26 @@
"protocol": "all", "protocol": "all",
"action": "accept", "action": "accept",
"source_posture_checks": [] "source_posture_checks": []
},
"observers-to-gs": {
"description": "",
"enabled": true,
"sources": [
"observers"
],
"destinations": [
"ground-stations"
],
"bidirectional": false,
"protocol": "all",
"action": "accept",
"source_posture_checks": []
} }
}, },
"routes": {},
"dns": {
"nameserver_groups": {}
},
"posture_checks": {}, "posture_checks": {},
"networks": {}, "networks": {},
"peers": {}, "peers": {},
@ -52,9 +82,5 @@
"role": "owner", "role": "owner",
"auto_groups": [] "auto_groups": []
} }
},
"routes": {},
"dns": {
"nameserver_groups": {}
} }
} }