diff --git a/README.md b/README.md index d42366cc..71030cff 100644 --- a/README.md +++ b/README.md @@ -287,6 +287,7 @@ These commands are intended to be run as part of build systems / deployment pipe `cfbs set-input` and `cfbs get-input` can be thought of as ways to save and load the input file. Similar to `cfbs get-input` the JSON contains both the specification (what the module accepts and how it's presented to the user) as well as the user's responses (if present). Expected usage is to run `cfbs get-input` to get the JSON, and then fill out the response part and run `cfbs set-input`. +* `cfbs generate-release-information`: An internal command used to generate JSON release information files from the [official CFEngine masterfiles](https://github.com/cfengine/masterfiles/). * `cfbs validate`: Used to validate the [index JSON file](https://github.com/cfengine/build-index/blob/master/cfbs.json). May be expanded to validate other files and formats in the future. **Note:** If you use `cfbs validate` as part of your automation, scripts, and build systems, be aware that we might add more strict validation rules in the future, so be prepared to sometimes have it fail after upgrading the version of cfbs. diff --git a/cfbs/args.py b/cfbs/args.py index bfd48960..1e608fae 100644 --- a/cfbs/args.py +++ b/cfbs/args.py @@ -100,6 +100,11 @@ def get_arg_parser(): help="Ignore versions.json. Necessary in case of a custom index or testing changes to the default index.", action="store_true", ) + parser.add_argument( + "--omit-download", + help="Use existing masterfiles instead of downloading in 'cfbs generate-release-information'", + action="store_true", + ) parser.add_argument( "--masterfiles", help="Add masterfiles on cfbs init choose between" ) diff --git a/cfbs/cfbs.1 b/cfbs/cfbs.1 index 049ef06e..1a0bca30 100644 --- a/cfbs/cfbs.1 +++ b/cfbs/cfbs.1 @@ -1,4 +1,4 @@ -.TH CFBS "1" "2024\-06\-07" "cfbs" "CFEngine Build System manual" +.TH CFBS "1" "2024\-11\-22" "cfbs" "CFEngine Build System manual" .SH NAME cfbs \- combines multiple modules into 1 policy set to deploy on your infrastructure. Modules can be custom promise types, JSON files which enable certain functionality, or reusable CFEngine policy. The modules you use can be written by the CFEngine team, others in the community, your colleagues, or yourself. .SH SYNOPSIS @@ -9,7 +9,7 @@ CFEngine Build System. .TP \fBcmd\fR -The command to perform (pretty, init, status, search, add, remove, clean, update, validate, download, build, install, help, info, show, input, set\-input, get\-input) +The command to perform (pretty, init, status, search, add, remove, clean, update, validate, download, build, install, help, info, show, input, set\-input, get\-input, generate\-release\-information) .TP \fBargs\fR @@ -72,6 +72,10 @@ Specify git commit message \fB\-\-ignore\-versions\-json\fR Ignore versions.json. Necessary in case of a custom index or testing changes to the default index. +.TP +\fB\-\-omit\-download\fR +Use existing masterfiles instead of downloading in 'cfbs generate-release-information' + .TP \fB\-\-masterfiles\fR \fI\,MASTERFILES\/\fR Add masterfiles on cfbs init choose between diff --git a/cfbs/commands.py b/cfbs/commands.py index 11d8d461..324d793d 100644 --- a/cfbs/commands.py +++ b/cfbs/commands.py @@ -65,6 +65,7 @@ from cfbs.git_magic import Result, commit_after_command, git_commit_maybe_prompt from cfbs.prompts import YES_NO_CHOICES, prompt_user from cfbs.module import Module, is_module_added_manually +from cfbs.masterfiles.generate_release_information import generate_release_information class InputDataUpdateFailed(Exception): @@ -1204,3 +1205,8 @@ def get_input_command(name, outfile): log.error("Failed to write json: %s" % e) return 1 return 0 + + +@cfbs_command("generate-release-information") +def generate_release_information_command(omit_download=False): + generate_release_information(omit_download) diff --git a/cfbs/main.py b/cfbs/main.py index ca5d1e3d..83bd1018 100644 --- a/cfbs/main.py +++ b/cfbs/main.py @@ -58,6 +58,12 @@ def main() -> int: % args.command ) + if args.omit_download and args.command != "generate-release-information": + user_error( + "The option --omit-download is only for 'cfbs generate-release-information', not 'cfbs %s'" + % args.command + ) + if args.non_interactive and args.command not in ( "init", "add", @@ -91,6 +97,11 @@ def main() -> int: if args.command in ("info", "show"): return commands.info_command(args.args) + if args.command == "generate-release-information": + return commands.generate_release_information_command( + omit_download=args.omit_download + ) + if not is_cfbs_repo(): user_error("This is not a cfbs repo, to get started, type: cfbs init") diff --git a/cfbs/masterfiles/__init__.py b/cfbs/masterfiles/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cfbs/masterfiles/analyze.py b/cfbs/masterfiles/analyze.py new file mode 100644 index 00000000..86867d98 --- /dev/null +++ b/cfbs/masterfiles/analyze.py @@ -0,0 +1,127 @@ +from collections import OrderedDict +import os + +from cfbs.utils import dict_sorted_by_key, file_sha256 + + +def initialize_vcf(): + versions_dict = {"versions": {}} + checksums_dict = {"checksums": {}} + files_dict = {"files": {}} + + return versions_dict, checksums_dict, files_dict + + +def versions_checksums_files( + files_dir_path, version, versions_dict, checksums_dict, files_dict +): + for root, _, files in os.walk(files_dir_path): + for name in files: + full_relpath = os.path.join(root, name) + tarball_relpath = os.path.relpath(full_relpath, files_dir_path) + file_checksum = file_sha256(full_relpath) + + if version not in versions_dict["versions"]: + versions_dict["versions"][version] = {} + if "files" not in versions_dict["versions"][version]: + versions_dict["versions"][version]["files"] = {} + versions_dict["versions"][version]["files"][tarball_relpath] = file_checksum + + if not file_checksum in checksums_dict["checksums"]: + checksums_dict["checksums"][file_checksum] = [] + checksums_dict["checksums"][file_checksum].append( + { + "file": tarball_relpath, + "version": version, + } + ) + + if not tarball_relpath in files_dict["files"]: + files_dict["files"][tarball_relpath] = [] + files_dict["files"][tarball_relpath].append( + { + "checksum": file_checksum, + "version": version, + } + ) + + return versions_dict, checksums_dict, files_dict + + +def finalize_vcf(versions_dict, checksums_dict, files_dict): + # explicitly sort VCF data to ensure determinism + + # checksums.json: + working_dict = checksums_dict["checksums"] + # sort each list, first by version descending, then by filepath alphabetically + for k in working_dict.keys(): + working_dict[k] = sorted( + working_dict[k], + key=lambda d: ( + version_as_comparable_list_negated(d["version"]), + d["file"], + ), + ) + # sort checksums + checksums_dict["checksums"] = dict_sorted_by_key(working_dict) + + # files.json: + working_dict = files_dict["files"] + # sort each list, first by version descending, then by checksum + for k in working_dict.keys(): + working_dict[k] = sorted( + working_dict[k], + key=lambda d: ( + version_as_comparable_list_negated(d["version"]), + d["checksum"], + ), + ) + # sort files, alphabetically + files_dict["files"] = dict_sorted_by_key(working_dict) + + # versions.json: + working_dict = versions_dict["versions"] + # sort files of each version + for k in working_dict.keys(): + working_dict[k]["files"] = dict_sorted_by_key(working_dict[k]["files"]) + # sort version numbers, in decreasing order + versions_dict["versions"] = OrderedDict( + sorted( + versions_dict["versions"].items(), + key=lambda p: (version_as_comparable_list(p[0]), p[1]), + reverse=True, + ) + ) + + return versions_dict, checksums_dict, files_dict + + +def version_as_comparable_list(version: str): + """Also supports versions containing exactly one of `b` or `-`. + + Example of the version ordering: `3.24.0b1 < 3.24.0 < 3.24.0-1`. + + Examples: + * `version_as_comparable_list("3.24.0b1")` is `[[3, 24, 0], [-1, 1]]` + * `version_as_comparable_list("3.24.0-2")` is `[[3, 24, 0], [1, 2]]` + * `version_as_comparable_list("3.24.x")` is `[[3, 24, 99999], [0, 0]]`""" + if "b" not in version: + if "-" not in version: + version += "|0.0" + version = version.replace("x", "99999").replace("-", "|1.").replace("b", "|-1.") + versionpair = version.split("|") + versionlist = [versionpair[0].split("."), versionpair[1].split(".")] + + versionlist[0] = [int(s) for s in versionlist[0]] + versionlist[1] = [int(s) for s in versionlist[1]] + + return versionlist + + +def version_as_comparable_list_negated(version): + vcl = version_as_comparable_list(version) + + vcl[0] = [-x for x in vcl[0]] + vcl[1] = [-x for x in vcl[1]] + + return vcl diff --git a/cfbs/masterfiles/check_download_matches_git.py b/cfbs/masterfiles/check_download_matches_git.py new file mode 100644 index 00000000..a90cc87c --- /dev/null +++ b/cfbs/masterfiles/check_download_matches_git.py @@ -0,0 +1,46 @@ +import os + +from cfbs.utils import dict_diff, read_json, user_error + + +def check_download_matches_git(versions): + """Check that the downloadable files match the git files. + + This can be used to monitor / detect if something has been changed, accidentally or maliciously. + + Generates a `differences-*.txt` file for each version. + """ + + download_versions_dict = read_json("versions.json") + git_versions_dict = read_json("versions-git.json") + + os.makedirs("differences", exist_ok=True) + + for version in versions: + download_version_dict = download_versions_dict["versions"][version]["files"] + git_version_dict = git_versions_dict["versions"][version]["files"] + + # normalize downloaded version dictionary filepaths + # necessary because the downloaded version and git version dictionaries have filepaths of different forms + new_download_dict = {} + for key, value in download_version_dict.items(): + if key.startswith("masterfiles/"): + key = key[12:] + new_download_dict[key] = value + download_version_dict = new_download_dict + + with open("differences/difference-" + version + ".txt", "w") as f: + only_dl, only_git, value_diff = dict_diff( + download_version_dict, git_version_dict + ) + + print("Files only in the downloaded version:", only_dl, file=f) + print("Files only in the git version:", only_git, file=f) + print("Files with different contents:", value_diff, file=f) + + if len(only_dl) > 0 or len(value_diff) > 0: + user_error( + "Downloadable files of version " + + version + + " do not match git files" + ) diff --git a/cfbs/masterfiles/download_all_versions.py b/cfbs/masterfiles/download_all_versions.py new file mode 100644 index 00000000..fad92cff --- /dev/null +++ b/cfbs/masterfiles/download_all_versions.py @@ -0,0 +1,115 @@ +import os +import shutil + +from cfbs.utils import FetchError, fetch_url, get_json, mkdir, user_error + +ENTERPRISE_RELEASES_URL = "https://cfengine.com/release-data/enterprise/releases.json" + + +def get_download_urls_enterprise(): + download_urls = {} + reported_checksums = {} + + print("* gathering download URLs...") + + data = get_json(ENTERPRISE_RELEASES_URL) + + for release_data in data["releases"]: + version = release_data["version"] + + if version == "3.10.0": + # for 3.10.0, for some reason, the "Masterfiles ready-to-install tarball" is a .tar.gz tarball, rather than a .pkg.tar.gz tarball + # download the .pkg.tar.gz tarball from an unlisted analoguous URL instead + download_url = "https://cfengine-package-repos.s3.amazonaws.com/tarballs/cfengine-masterfiles-3.10.0.pkg.tar.gz" + digest = "7b5e237529e11ce4ae295922dad1a681f13b95f3a7d247d39d3f5088f1a1d7d3" + download_urls[version] = download_url + reported_checksums[version] = digest + continue + if version == "3.9.2": + # for 3.9.2, no masterfiles are listed, but an unlisted analoguous URL exists + download_url = "https://cfengine-package-repos.s3.amazonaws.com/tarballs/cfengine-masterfiles-3.9.2.pkg.tar.gz" + digest = "ae1a758530d4a4aad5b6812b61fc37ad1b5900b755f88a1ab98da7fd05a9f5cc" + download_urls[version] = download_url + reported_checksums[version] = digest + continue + + release_url = release_data["URL"] + subdata = get_json(release_url) + artifacts_data = subdata["artifacts"] + + if "Additional Assets" not in artifacts_data: + # happens for 3.9.0b1, 3.8.0b1, 3.6.1, 3.6.0 + continue + + assets_data = artifacts_data["Additional Assets"] + masterfiles_data = None + + for asset in assets_data: + if asset["Title"] == "Masterfiles ready-to-install tarball": + masterfiles_data = asset + + if masterfiles_data is None: + # happens for 3.9.2, 3.9.0, 3.8.2, 3.8.1, 3.8.0, 3.7.4--3.6.2 + # 3.9.2: see above + # 3.9.0 and below: no masterfiles listed, and unlisted analogous URLs seemingly do not exist + continue + + download_urls[version] = masterfiles_data["URL"] + reported_checksums[version] = masterfiles_data["SHA256"] + + return download_urls, reported_checksums + + +def download_versions_from_urls(download_path, download_urls, reported_checksums): + downloaded_versions = [] + + mkdir(download_path) + + for version, url in download_urls.items(): + # ignore master and .x versions + if url.startswith("http://buildcache"): + continue + + print("* downloading from", url) + downloaded_versions.append(version) + + version_path = os.path.join(download_path, version) + mkdir(version_path) + + # download a version, and verify the reported checksum matches + filename = url.split("/")[-1] + tarball_path = os.path.join(version_path, filename) + checksum = reported_checksums[version] + try: + fetch_url(url, tarball_path, checksum) + except FetchError as e: + user_error("For version " + version + ": " + str(e)) + + tarball_dir_path = os.path.join(version_path, "tarball") + shutil.unpack_archive(tarball_path, tarball_dir_path) + + return downloaded_versions + + +def download_all_versions(download_path): + download_urls, reported_checksums = get_download_urls_enterprise() + + # add masterfiles versions which do not appear in Enterprise releases but appear in Community releases + # 3.12.0b1 + version = "3.12.0b1" + download_url = "https://cfengine-package-repos.s3.amazonaws.com/community_binaries/Community-3.12.0b1/misc/cfengine-masterfiles-3.12.0b1.pkg.tar.gz" + digest = "ede305dae7be3edfac04fc5b7f63b46adb3a5b1612f4755e855ee8e6b8d344d7" + download_urls[version] = download_url + reported_checksums[version] = digest + # 3.10.0b1 + version = "3.10.0b1" + download_url = "https://cfengine-package-repos.s3.amazonaws.com/tarballs/cfengine-masterfiles-3.10.0b1.pkg.tar.gz" + digest = "09291617254705d79dea2531b23dbd0754f09029e90ce0b43b275aa02c1223a3" + download_urls[version] = download_url + reported_checksums[version] = digest + + downloaded_versions = download_versions_from_urls( + download_path, download_urls, reported_checksums + ) + + return downloaded_versions diff --git a/cfbs/masterfiles/generate_release_information.py b/cfbs/masterfiles/generate_release_information.py new file mode 100644 index 00000000..d8eff440 --- /dev/null +++ b/cfbs/masterfiles/generate_release_information.py @@ -0,0 +1,31 @@ +from cfbs.masterfiles.download_all_versions import download_all_versions +from cfbs.masterfiles.generate_vcf_download import generate_vcf_download +from cfbs.masterfiles.generate_vcf_git_checkout import generate_vcf_git_checkout +from cfbs.masterfiles.check_download_matches_git import check_download_matches_git +from cfbs.utils import immediate_subdirectories + +DOWNLOAD_PATH = "downloaded_masterfiles" + + +def generate_release_information(omit_download=False): + if not omit_download: + print("Downloading masterfiles...") + + downloaded_versions = download_all_versions(DOWNLOAD_PATH) + + print("Download finished. Every reported checksum matches.") + else: + downloaded_versions = immediate_subdirectories(DOWNLOAD_PATH) + + print("Generating release information...") + + generate_vcf_download(DOWNLOAD_PATH, downloaded_versions) + generate_vcf_git_checkout(downloaded_versions) + + print("Candidate release information generated.") + print("Checking that downloadable files match git files...") + + check_download_matches_git(downloaded_versions) + + print("Downloadable files match git files.") + print("Release information generation successfully finished.") diff --git a/cfbs/masterfiles/generate_vcf_download.py b/cfbs/masterfiles/generate_vcf_download.py new file mode 100644 index 00000000..0e758369 --- /dev/null +++ b/cfbs/masterfiles/generate_vcf_download.py @@ -0,0 +1,31 @@ +import os + +from cfbs.utils import write_json +from cfbs.masterfiles.analyze import ( + finalize_vcf, + initialize_vcf, + versions_checksums_files, +) + + +def generate_vcf_download(dir_path, downloaded_versions): + """`dir_path`: the path of the directory containing masterfiles versions subdirectories in the form `dir_path/x.y.z/tarball/` + + The `tarball` folder should contain the `masterfiles` folder (older tarballs also have a `modules` folder alongside the `masterfiles` folder). + """ + versions_dict, checksums_dict, files_dict = initialize_vcf() + + for version in downloaded_versions: + files_dir_path = os.path.join(dir_path, version, "tarball") + + versions_dict, checksums_dict, files_dict = versions_checksums_files( + files_dir_path, version, versions_dict, checksums_dict, files_dict + ) + + versions_dict, checksums_dict, files_dict = finalize_vcf( + versions_dict, checksums_dict, files_dict + ) + + write_json("versions.json", versions_dict) + write_json("checksums.json", checksums_dict) + write_json("files.json", files_dict) diff --git a/cfbs/masterfiles/generate_vcf_git_checkout.py b/cfbs/masterfiles/generate_vcf_git_checkout.py new file mode 100644 index 00000000..95a0e9c4 --- /dev/null +++ b/cfbs/masterfiles/generate_vcf_git_checkout.py @@ -0,0 +1,113 @@ +import os +import shutil +import subprocess +import sys + +from cfbs.utils import write_json +from cfbs.masterfiles.analyze import ( + finalize_vcf, + initialize_vcf, + versions_checksums_files, +) + +DIR_PATH = "." +"""The path of the working directory.""" + +MPF_URL = "https://github.com/cfengine/masterfiles" +MPF_PATH = os.path.join(DIR_PATH, "masterfiles") + + +def check_required_command(command): + if not shutil.which(command): + print("`%s` was not found" % command) + sys.exit(1) + + +def check_required_commands(commands): + for c in commands: + check_required_command(c) + + +def generate_vcf_git_checkout(checkout_tags): + required_commands = ["git", "make", "automake", "autoconf"] + check_required_commands(required_commands) + + # get the current version of the MPF repo + if not os.path.isdir(MPF_PATH): + subprocess.run( + ["git", "clone", "--no-checkout", MPF_URL], + cwd=DIR_PATH, + check=True, + ) + else: + subprocess.run( + ["git", "fetch", "--all"], + cwd=MPF_PATH, + check=True, + ) + + versions_dict, checksums_dict, files_dict = initialize_vcf() + + for tag in checkout_tags: + print("Checking out tag", tag) + + # checking out some tags equal to the downloaded version doesn't result in the same files + # the downloadable files are reproducible by checking out specific tags + if tag == "3.18.0": + checkout_tag = "3.18.0-2" + elif tag == "3.15.4": + checkout_tag = "3.15.4-2-build2" + elif tag == "3.12.3": + checkout_tag = "3.12.3-build7" + elif tag == "3.7.7": + checkout_tag = "3.7.7-build1" + else: + checkout_tag = tag + + # check out the version + subprocess.run( + ["git", "checkout", checkout_tag], + cwd=MPF_PATH, + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + + # build masterfiles from git as they are in the tarball packages + # for the files of this version to be reproducible, the `EXPLICIT_RELEASE` environment variable needs to be set to what it was when the downloadable files were built + if tag == "3.18.3": + release_number = "2" + else: + release_number = "1" + subprocess.run( + ["./autogen.sh"], + cwd=MPF_PATH, + check=True, + env=dict( + os.environ.copy(), EXPLICIT_VERSION=tag, EXPLICIT_RELEASE=release_number + ), + ) + # older masterfiles version READMEs instruct to use `make install` and newer `make` - always use `make` instead + subprocess.run(["make"], cwd=MPF_PATH, check=True) + + # compute VCF data for all the files + versions_dict, checksums_dict, files_dict = versions_checksums_files( + MPF_PATH, tag, versions_dict, checksums_dict, files_dict + ) + + # clean the files to prevent spillage to other versions + subprocess.run( + ["git", "clean", "-dfx"], + cwd=MPF_PATH, + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + + versions_dict, checksums_dict, files_dict = finalize_vcf( + versions_dict, checksums_dict, files_dict + ) + + write_json("versions-git.json", versions_dict) + write_json("checksums-git.json", checksums_dict) + write_json("files-git.json", files_dict) diff --git a/cfbs/utils.py b/cfbs/utils.py index 74b329c0..a143c0da 100644 --- a/cfbs/utils.py +++ b/cfbs/utils.py @@ -236,6 +236,33 @@ def deduplicate_list(l): return list(OrderedDict.fromkeys(l)) +def dict_sorted_by_key(the_dict): + sorted_dict = OrderedDict(sorted(the_dict.items())) + + return sorted_dict + + +def dict_diff(A, B): + """Returns three sorted lists: + * first: list of keys only in `A` + * second: list of keys only in `B` + * third: list of tuples `(k, A[k], B[k])` for keys `k` in both with differing values + """ + keys_A = set(A.keys()) + keys_B = set(B.keys()) + keys_in_both = keys_A & keys_B + keys_only_A = keys_A - keys_in_both + keys_only_B = keys_B - keys_in_both + + values_different = set((k, A[k], B[k]) for k in keys_in_both if A[k] != B[k]) + + keys_only_A = sorted(keys_only_A) + keys_only_B = sorted(keys_only_B) + values_different = sorted(values_different) + + return keys_only_A, keys_only_B, values_different + + def cfbs_filename() -> str: return "cfbs.json" @@ -244,6 +271,14 @@ def is_cfbs_repo() -> bool: return os.path.isfile(cfbs_filename()) +def immediate_subdirectories(path): + return [f.name for f in os.scandir(path) if f.is_dir()] + + +def immediate_files(path): + return [f.name for f in os.scandir(path) if not f.is_dir()] + + def path_append(dir, subdir): dir = os.path.abspath(os.path.expanduser(dir)) return dir if not subdir else os.path.join(dir, subdir) @@ -278,6 +313,19 @@ def cfbs_dir(append=None) -> str: return os.path.join(directory, append) +def string_sha256(input): + return hashlib.sha256(input.encode("utf-8")).hexdigest() + + +def file_sha256(file): + h = hashlib.sha256() + + with open(file, "rb") as f: + h.update(f.read()) + + return h.hexdigest() + + class FetchError(Exception): pass diff --git a/tests/test_utils.py b/tests/test_utils.py index d883e858..edba41a6 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,10 +1,12 @@ -from cfbs.utils import canonify, deduplicate_def_json, merge_json, loads_bundlenames - - -def test_canonify(): - assert canonify("Hello CFEngine!") == "Hello_CFEngine_" - assert canonify("/etc/os-release") == "_etc_os_release" - assert canonify("my-example-module") == "my_example_module" +from cfbs.utils import ( + canonify, + deduplicate_def_json, + dict_diff, + file_sha256, + merge_json, + loads_bundlenames, + string_sha256, +) def test_merge_json(): @@ -140,6 +142,33 @@ def test_deduplicate_def_json(): assert deduplicated == expected +def test_dict_diff(): + A = {"A": "a", "B": "b", "C": "c"} + B = {"A": "a", "B": "c", "D": "d"} + + assert dict_diff(A, B) == (["C"], ["D"], [("B", "b", "c")]) + + +def test_string_sha256(): + s = "cfbs/masterfiles/" + checksum = "9e63d3266f80328fb6547b3462e81ab55b13f689d6b0944e242e2b3a0f3a32a3" + + assert string_sha256(s) == checksum + + +def test_file_sha256(): + file_path = "tests/sample/foo/main.cf" + checksum = "da90bdfe7b5ee30e4d7871496e8434603315fb1b267660e2d49aee8ef47b246d" + + assert file_sha256(file_path) == checksum + + +def test_canonify(): + assert canonify("Hello CFEngine!") == "Hello_CFEngine_" + assert canonify("/etc/os-release") == "_etc_os_release" + assert canonify("my-example-module") == "my_example_module" + + def test_loads_bundlenames_single_bundle(): policy = """bundle agent bogus {