Workstation reorganization:
- All build/deploy/helper scripts moved into scripts/ (paths updated to use
REPO_ROOT instead of SCRIPT_DIR so they resolve sibling dirs from the new
depth)
- New config/ directory placeholder for site-specific overrides
- Removed stale: mok-keys/, test-vm.sh, test-lab.sh, setup-guide-original.txt,
unattend/ (duplicate of moved playbook/FlatUnattendW10.xml)
- README.md and SETUP.md structure listings updated, dead "Testing with KVM"
section removed
- .claude/ gitignored
Enrollment share internal taxonomy (forward-looking; existing servers
unaffected since they keep their current boot.wim with flat paths):
- Single SMB share kept (WinPE only mounts one Y: drive), but content now
organised into ppkgs/, scripts/, config/, shopfloor-setup/, pre-install/{bios,
installers}, installers-post/cmm/, blancco/, logs/
- README.md deployed to share root explaining each subdir
- New playbook tasks deploy site-config.json + wait-for-internet.ps1 +
migrate-to-wifi.ps1 explicitly (were ad-hoc on legacy servers)
- BIOS subdir moved into pre-install/bios/, preinstall/ renamed to pre-install/
- startnet.cmd + startnet-template.cmd updated with new Y:\subdir\ paths
- Bumped GCCH PPKG references v4.9 -> v4.10
Blancco USB-build fixes (so next fresh USB install boots Blancco end-to-end
without the manual fixup we did against GOLD):
- grub-blancco.cfg: kernel/initrd switched HTTP -> TFTP (GRUB's HTTP module
times out on multi-MB files); added modprobe.blacklist=iwlwifi,iwlmvm,btusb
(WiFi drivers hang udev on Intel business PCs)
- grubx64.efi rebuilt from updated cfg
- Playbook task added to create /srv/tftp/blancco/ symlinks pointing at the
HTTP-served binaries
run-enrollment.ps1: OOBEComplete is now set AFTER PPKG install (Win11 22H2+
hangs indefinitely if OOBEComplete is set before the bulk-enrollment PPKG runs).
Also includes deploy-bios.sh / pull-bios.sh / busybox-static / models.txt
that were sitting untracked at the repo root.
178 lines
6.3 KiB
Python
178 lines
6.3 KiB
Python
#!/usr/bin/env python3
|
|
"""Sync HardwareDriver.json and user_selections.json across all PXE image types.
|
|
|
|
Reads all HardwareDriver.json files, builds a unified driver catalog,
|
|
then updates each image to include all known hardware models.
|
|
Run after adding new driver packs to the shared Out-of-box Drivers directory.
|
|
"""
|
|
import json
|
|
import os
|
|
import sys
|
|
from pathlib import Path
|
|
from collections import OrderedDict
|
|
|
|
WINPEAPPS = Path("/srv/samba/winpeapps")
|
|
SHARED_DRIVERS = WINPEAPPS / "_shared" / "Out-of-box Drivers"
|
|
|
|
|
|
def normalize_entry(entry):
|
|
"""Normalize a HardwareDriver.json entry to a consistent format."""
|
|
norm = {}
|
|
norm["manufacturer"] = entry.get("manufacturer", "Dell")
|
|
norm["product"] = entry.get("product") or entry.get("manufacturerfriendlyname", "Dell")
|
|
norm["family"] = entry.get("family", "")
|
|
norm["modelswminame"] = entry.get("modelswminame") or entry.get("models", "")
|
|
norm["modelsfriendlyname"] = entry.get("modelsfriendlyname", "")
|
|
norm["fileName"] = entry.get("fileName") or entry.get("FileName", "")
|
|
norm["destinationDir"] = entry.get("destinationDir") or entry.get("DestinationDir", "")
|
|
norm["url"] = entry.get("url", "")
|
|
norm["hash"] = entry.get("hash", "")
|
|
norm["size"] = entry.get("size", 0)
|
|
norm["modifiedDate"] = entry.get("modifiedDate", "0001-01-01T00:00:00")
|
|
norm["osId"] = entry.get("osId", "")
|
|
norm["imagedisk"] = entry.get("imagedisk", 0)
|
|
return norm
|
|
|
|
|
|
def merge_os_ids(a, b):
|
|
"""Merge two osId strings (e.g., '18' + '20,21' -> '18,20,21')."""
|
|
ids = set()
|
|
for oid in [a, b]:
|
|
for part in str(oid).split(","):
|
|
part = part.strip()
|
|
if part:
|
|
ids.add(part)
|
|
return ",".join(sorted(ids, key=lambda x: int(x) if x.isdigit() else 0))
|
|
|
|
|
|
def check_driver_exists(entry):
|
|
"""Check if the driver zip actually exists in the shared directory."""
|
|
dest = entry["destinationDir"]
|
|
dest = dest.replace("*destinationdir*", "")
|
|
dest = dest.lstrip("\\")
|
|
dest = dest.replace("\\", "/")
|
|
# Strip leading path components that are already in SHARED_DRIVERS
|
|
for prefix in ["Deploy/Out-of-box Drivers/", "Out-of-box Drivers/"]:
|
|
if dest.startswith(prefix):
|
|
dest = dest[len(prefix):]
|
|
break
|
|
dest = dest.lstrip("/")
|
|
zip_path = SHARED_DRIVERS / dest / entry["fileName"]
|
|
return zip_path.exists()
|
|
|
|
|
|
def main():
|
|
print("=== PXE Hardware Model Sync ===")
|
|
print()
|
|
|
|
# Step 1: Build unified catalog from all images
|
|
print("Reading driver catalogs...")
|
|
catalog = OrderedDict()
|
|
|
|
image_dirs = sorted(
|
|
[d for d in WINPEAPPS.iterdir() if d.is_dir() and not d.name.startswith("_")]
|
|
)
|
|
|
|
for img_dir in image_dirs:
|
|
hw_file = img_dir / "Deploy" / "Control" / "HardwareDriver.json"
|
|
if not hw_file.exists():
|
|
continue
|
|
with open(hw_file) as f:
|
|
entries = json.load(f)
|
|
print(" Read {} entries from {}".format(len(entries), img_dir.name))
|
|
for entry in entries:
|
|
norm = normalize_entry(entry)
|
|
key = (norm["family"], norm["fileName"])
|
|
if key in catalog:
|
|
catalog[key]["osId"] = merge_os_ids(
|
|
catalog[key]["osId"], norm["osId"]
|
|
)
|
|
# Prefer longer/more complete model names
|
|
if len(norm["modelswminame"]) > len(catalog[key]["modelswminame"]):
|
|
catalog[key]["modelswminame"] = norm["modelswminame"]
|
|
if len(norm["modelsfriendlyname"]) > len(
|
|
catalog[key]["modelsfriendlyname"]
|
|
):
|
|
catalog[key]["modelsfriendlyname"] = norm["modelsfriendlyname"]
|
|
else:
|
|
catalog[key] = norm
|
|
|
|
unified = list(catalog.values())
|
|
print()
|
|
print("Unified catalog: {} unique driver entries".format(len(unified)))
|
|
|
|
# Step 2: Check which drivers actually exist on disk
|
|
missing = []
|
|
found = 0
|
|
for entry in unified:
|
|
if check_driver_exists(entry):
|
|
found += 1
|
|
else:
|
|
missing.append(
|
|
" {}: {}".format(entry["family"], entry["fileName"])
|
|
)
|
|
|
|
print(" {} drivers found on disk".format(found))
|
|
if missing:
|
|
print(" WARNING: {} driver zips NOT found on disk:".format(len(missing)))
|
|
for m in missing[:15]:
|
|
print(m)
|
|
if len(missing) > 15:
|
|
print(" ... and {} more".format(len(missing) - 15))
|
|
print(" (Entries still included - PESetup may download them)")
|
|
|
|
# Step 3: Build unified model selection from all driver entries
|
|
models = []
|
|
seen = set()
|
|
for entry in unified:
|
|
friendly_names = [
|
|
n.strip()
|
|
for n in entry["modelsfriendlyname"].split(",")
|
|
if n.strip()
|
|
]
|
|
family = entry["family"]
|
|
for name in friendly_names:
|
|
key = (name, family)
|
|
if key not in seen:
|
|
seen.add(key)
|
|
models.append({"Model": name, "Id": family})
|
|
models.sort(key=lambda x: x["Model"])
|
|
print()
|
|
print("Unified model selection: {} models".format(len(models)))
|
|
|
|
# Step 4: Update each image
|
|
print()
|
|
print("Updating images...")
|
|
for img_dir in image_dirs:
|
|
hw_file = img_dir / "Deploy" / "Control" / "HardwareDriver.json"
|
|
us_file = img_dir / "Tools" / "user_selections.json"
|
|
if not hw_file.exists() or not us_file.exists():
|
|
continue
|
|
|
|
# Write unified HardwareDriver.json
|
|
with open(hw_file, "w") as f:
|
|
json.dump(unified, f, indent=2)
|
|
f.write("\n")
|
|
|
|
# Update user_selections.json (preserve OperatingSystemSelection etc.)
|
|
with open(us_file) as f:
|
|
user_sel = json.load(f)
|
|
old_count = len(user_sel[0].get("HardwareModelSelection", []))
|
|
user_sel[0]["HardwareModelSelection"] = models
|
|
with open(us_file, "w") as f:
|
|
json.dump(user_sel, f, indent=2)
|
|
f.write("\n")
|
|
|
|
print(
|
|
" {}: {} -> {} models, {} driver entries".format(
|
|
img_dir.name, old_count, len(models), len(unified)
|
|
)
|
|
)
|
|
|
|
print()
|
|
print("Done!")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|