aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Crute <mike@crute.us>2020-05-26 18:09:37 -0700
committerMike Crute <mike@crute.us>2020-05-28 17:55:36 -0700
commit7e60c7fb6a8b1a0e4508c4995fd5e4c780a70f68 (patch)
tree1f1525c0e2d512bbd2b482e5fc2c4b8632b0b2fc
parent1241cb5aaf519ac3d9187383e6784210afb1c9a9 (diff)
downloadalpine-ec2-ami-7e60c7fb6a8b1a0e4508c4995fd5e4c780a70f68.tar.bz2
alpine-ec2-ami-7e60c7fb6a8b1a0e4508c4995fd5e4c780a70f68.tar.xz
alpine-ec2-ami-7e60c7fb6a8b1a0e4508c4995fd5e4c780a70f68.zip
Combine all commands into a meta-command
There is an increasing need to share components among the various commands, especially with the introduction of the identity broker API. Rather than trying to assemble an importable python library of code for the build process I think we can just combine everything into one file and use argparse sub-commands to integrate them into a set of individually callable scripts. This change does that integration.
-rw-r--r--Makefile4
-rw-r--r--scripts/builder.py607
-rw-r--r--scripts/gen-release-readme.py.in131
-rw-r--r--scripts/make-amis.py.in68
-rw-r--r--scripts/prune-amis.py.in168
-rw-r--r--scripts/resolve-profile.py.in133
-rw-r--r--scripts/update-release.py.in80
7 files changed, 609 insertions, 582 deletions
diff --git a/Makefile b/Makefile
index 3aa7464..e6f879c 100644
--- a/Makefile
+++ b/Makefile
@@ -25,7 +25,7 @@ __check_defined = \
25 25
26.PHONY: amis prune release-readme clean 26.PHONY: amis prune release-readme clean
27 27
28amis: build/packer.json build/profile/$(PROFILE) build build/setup-ami $(NVME_SCRIPTS) 28amis: build/packer.json build/profile/$(PROFILE) build
29 @:$(call check_defined, PROFILE, target profile name) 29 @:$(call check_defined, PROFILE, target profile name)
30 build/builder make-amis $(PROFILE) $(BUILDS) 30 build/builder make-amis $(PROFILE) $(BUILDS)
31 31
@@ -54,7 +54,7 @@ build/packer.json: packer.conf build
54.PHONY: build/profile/$(PROFILE) 54.PHONY: build/profile/$(PROFILE)
55build/profile/$(PROFILE): build $(CORE_PROFILES) $(TARGET_PROFILES) 55build/profile/$(PROFILE): build $(CORE_PROFILES) $(TARGET_PROFILES)
56 @:$(call check_defined, PROFILE, target profile name) 56 @:$(call check_defined, PROFILE, target profile name)
57 build/builder resolve-profile $(PROFILE) 57 build/builder resolve-profiles $(PROFILE)
58 58
59clean: 59clean:
60 rm -rf build 60 rm -rf build
diff --git a/scripts/builder.py b/scripts/builder.py
new file mode 100644
index 0000000..f4e1472
--- /dev/null
+++ b/scripts/builder.py
@@ -0,0 +1,607 @@
1import io
2import os
3import re
4import sys
5import glob
6import json
7import time
8import shutil
9import logging
10import argparse
11import textwrap
12import subprocess
13import urllib.error
14
15from collections import defaultdict
16from datetime import datetime, timedelta
17from distutils.version import StrictVersion
18from urllib.request import Request, urlopen
19
20import yaml
21import boto3
22import pyhocon
23
24
25class IdentityBrokerClient:
26
27 _DEFAULT_ENDPOINT = "https://aws-access.crute.us/api/account"
28 _DEFAULT_ACCOUNT = "alpine-amis-user"
29
30 def __init__(self, endpoint=None, key=None, account=None):
31 self.endpoint = endpoint or self._DEFAULT_ENDPOINT
32 self.account = account or self._DEFAULT_ACCOUNT
33 self.key = key
34 self._logger = logging.getLogger(__class__.__name__)
35
36 if override_endpoint := os.environ.get("IDENTITY_BROKER_ENDPOINT"):
37 self.endpoint = override_endpoint
38
39 if not self.key:
40 self.key = os.environ.get("IDENTITY_BROKER_API_KEY")
41
42 if not self.key:
43 raise Exception("No identity broker key found")
44
45 def _get(self, path):
46 while True: # to handle rate limits
47 try:
48 res = urlopen(Request(path, headers={"X-API-Key": self.key}))
49 except urllib.error.HTTPError as ex:
50 if ex.headers.get("Location") == "/logout":
51 raise Exception("Identity broker token is expired")
52
53 if res.status == 429:
54 self._logger.warning(
55 "Rate-limited by identity broker, sleeping 30 seconds")
56 time.sleep(30)
57 continue
58
59 if res.status not in {200, 429}:
60 raise Exception(res.reason)
61
62 return json.load(res)
63
64 def get_credentials_url(self):
65 for account in self._get(self.endpoint):
66 if account["short_name"] == self.account:
67 return account["credentials_url"]
68
69 raise Exception("No account found")
70
71 def get_regions(self):
72 out = {}
73
74 for region in self._get(self.get_credentials_url()):
75 if region["enabled"]:
76 out[region["name"]] = region["credentials_url"]
77
78 return out
79
80 def get_credentials(self, region):
81 return self._get(self.get_regions()[region])
82
83 def _boto3_session_from_creds(self, creds, region):
84 return boto3.session.Session(
85 aws_access_key_id=creds["access_key"],
86 aws_secret_access_key=creds["secret_key"],
87 aws_session_token=creds["session_token"],
88 region_name=region)
89
90 def boto3_session_for_region(self, region):
91 return self._boto3_session_from_creds(
92 self.get_credentials(region), region)
93
94 def iter_regions(self):
95 for region, cred_url in self.get_regions().items():
96 yield self._boto3_session_from_creds(self._get(cred_url), region)
97
98
99class ReleaseReadmeUpdater:
100
101 SECTION_TPL = textwrap.dedent("""
102 ### Alpine Linux {release} ({date})
103 <details><summary><i>click to show/hide</i></summary><p>
104
105 {rows}
106
107 </p></details>
108 """)
109
110 AMI_TPL = (
111 " [{id}](https://{r}.console.aws.amazon.com/ec2/home"
112 "#Images:visibility=public-images;imageId={id}) "
113 "([launch](https://{r}.console.aws.amazon.com/ec2/home"
114 "#launchAmi={id})) |"
115 )
116
117 def __init__(self, repo_root, profile, archs=None):
118 self.repo_root = repo_root
119 self.profile = profile
120 self.archs = archs or ["x86_64", "aarch64"]
121
122 @staticmethod
123 def extract_ver(x):
124 return StrictVersion("0.0" if x["release"] == "edge" else x["release"])
125
126 def get_sorted_releases(self, release_data):
127 sections = defaultdict(lambda: {
128 "release": "",
129 "built": {},
130 "name": {},
131 "ami": defaultdict(dict)
132 })
133
134 for build, releases in release_data.items():
135 for release, amis in releases.items():
136 for name, info in amis.items():
137 arch = info["arch"]
138 built = info["build_time"]
139 ver = sections[info["version"]]
140
141 if arch not in ver["built"] or ver["built"][arch] < built:
142 ver["release"] = release
143 ver["name"][arch] = name
144 ver["built"][arch] = built
145
146 for region, ami in info["artifacts"].items():
147 ver["ami"][region][arch] = ami
148
149 return sorted(sections.values(), key=self.extract_ver, reverse=True)
150
151 def make_ami_list(self, sorted_releases):
152 ami_list = "## AMIs\n"
153
154 for info in sorted_releases:
155 rows = ["| Region |", "| ------ |"]
156
157 for arch in self.archs:
158 if arch in info["name"]:
159 rows[0] += f" {info['name'][arch]} |"
160 rows[1] += " --- |"
161
162 for region, amis in info["ami"].items():
163 row = f"| {region} |"
164 for arch in self.archs:
165 if arch in amis:
166 row += self.AMI_TPL.format(r=region, id=amis[arch])
167 rows.append(row)
168
169 ami_list += self.SECTION_TPL.format(
170 release=info["release"].capitalize(),
171 date=datetime.utcfromtimestamp(
172 max(info["built"].values())).date(),
173 rows="\n".join(rows))
174
175 return ami_list
176
177 def update_markdown(self):
178 release_dir = os.path.join(self.repo_root, "releases")
179 profile_file = os.path.join(release_dir, f"{self.profile}.yaml")
180
181 with open(profile_file, "r") as data:
182 sorted_releases = self.get_sorted_releases(yaml.safe_load(data))
183
184 readme_md = os.path.join(release_dir, "README.md")
185
186 with open(readme_md, "r") as file:
187 readme = file.read()
188
189 with open(readme_md, "w") as file:
190 file.write(
191 re.sub("## AMIs.*\Z", self.make_ami_list(sorted_releases),
192 readme, flags=re.S))
193
194
195class GenReleaseReadme:
196 """Update release README
197 """
198
199 command_name = "gen-release-readme"
200
201 @staticmethod
202 def add_args(parser):
203 parser.add_argument("profile", help="name of profile to update")
204
205 def run(self, args, root):
206 ReleaseReadmeUpdater(root, args.profile).update_markdown()
207
208
209class MakeAMIs:
210 """Build Packer JSON variable files from HOCON build profiles
211 """
212
213 command_name = "make-amis"
214
215 @staticmethod
216 def add_args(parser):
217 parser.add_argument("profile", help="name of profile to build")
218 parser.add_argument("builds", nargs="*",
219 help="name of builds within a profile to build")
220
221 def run(self, args, root):
222 os.chdir(os.path.join(root, "build"))
223
224 builds = args.builds or os.listdir(
225 os.path.join("profile", args.profile))
226
227 for build in builds:
228 print(f"\n*** Building {args.profile}/{build} ***\n\n")
229
230 build_dir = os.path.join("profile", args.profile, build)
231 if not os.path.exists(build_dir):
232 print(f"Build dir '{build_dir}' does not exist")
233 break
234
235 out = io.StringIO()
236
237 res = subprocess.Popen([
238 os.environ.get("PACKER", "packer"),
239 "build",
240 f"-var-file={build_dir}/vars.json",
241 "packer.json"
242 ], stdout=subprocess.PIPE, encoding="utf-8")
243
244 while res.poll() is None:
245 text = res.stdout.readline()
246 out.write(text)
247 print(text, end="")
248
249 if res.returncode == 0:
250 subprocess.run([os.path.join(root, "build", "builder"),
251 "update-releases", args.profile, build])
252 else:
253 if "is used by an existing AMI" in out.getvalue():
254 continue
255 else:
256 sys.exit(res.returncode)
257
258 print("\n=== DONE ===\n")
259
260
261class PruneAMIs:
262 """Prune AMIs from AWS
263 """
264
265 command_name = "prune-amis"
266
267 @staticmethod
268 def add_args(parser):
269 LEVEL_HELP = textwrap.dedent("""\
270 revision - keep only the latest revision per release
271 release - keep only the latest release per version
272 version - keep only the versions that aren't end-of-life
273 """)
274
275 parser.add_argument(
276 "level", choices=["revision", "release", "version"],
277 help=LEVEL_HELP)
278 parser.add_argument("profile", help="profile to prune")
279 parser.add_argument(
280 "build", nargs="?", help="build within profile to prune")
281
282 @staticmethod
283 def delete_image(ec2, image):
284 ec2.deregister_image(ImageId=image["ImageId"])
285
286 for blockdev in image["BlockDeviceMappings"]:
287 if "Ebs" not in blockdev:
288 continue
289
290 ec2.delete_snapshot(SnapshotId=blockdev["Ebs"]["SnapshotId"])
291
292 def run(self, args, root):
293 now = datetime.utcnow()
294 release_yaml = os.path.join(root, "releases", f"{args.profile}.yaml")
295
296 with open(release_yaml, "r") as data:
297 before = yaml.safe_load(data)
298
299 known = defaultdict(list)
300 prune = defaultdict(list)
301 after = defaultdict(lambda: defaultdict(dict))
302
303 # for all builds in the profile...
304 for build_name, releases in before.items():
305 # this is not the build that was specified
306 if args.build is not None and args.build != build_name:
307 print(f"< skipping {args.profile}/{build_name}")
308 # ensure its release data remains intact
309 after[build_name] = before[build_name]
310 continue
311 else:
312 print(f"> PRUNING {args.profile}/{build_name} for {args.level}")
313
314 criteria = {}
315
316 # scan releases for pruning criteria
317 for release, amis in releases.items():
318 for ami_name, info in amis.items():
319 version = info["version"]
320 built = info["build_time"]
321
322 if eol := info.get("end_of_life"):
323 eol = datetime.fromisoformat(info["end_of_life"])
324
325 for region, ami_id in info["artifacts"].items():
326 known[region].append(ami_id)
327
328 if args.level == "revision":
329 # find build timestamp of most recent revision, per release
330 if release not in criteria or built > criteria[release]:
331 criteria[release] = built
332 elif args.level == "release":
333 # find build timestamp of most recent revision, per version
334 if version not in criteria or built > criteria[version]:
335 criteria[version] = built
336 elif args.level == "version":
337 # find latest EOL date, per version
338 if (
339 version not in criteria or
340 (not criteria[version]) or
341 (eol and eol > criteria[version])
342 ):
343 criteria[version] = eol
344
345 # rescan again to determine what doesn't make the cut
346 for release, amis in releases.items():
347 for ami_name, info in amis.items():
348 version = info["version"]
349
350 if eol := info.get("end_of_life"):
351 eol = datetime.fromisoformat(info["end_of_life"])
352
353 if args.level == "revision":
354 if info["build_time"] < criteria[release]:
355 for region, ami_id in info["artifacts"].items():
356 prune[region].append(ami_id)
357 elif args.level == "release":
358 if info["build_time"] < criteria[version]:
359 for region, ami_id in info["artifacts"].items():
360 prune[region].append(ami_id)
361 elif args.level == "version":
362 if criteria[version] and (
363 (version != "edge" and criteria[version] < now) or
364 (version == "edge" and ((not eol) or (eol < now)))
365 ):
366 for region, ami_id in info["artifacts"].items():
367 prune[region].append(ami_id)
368 else:
369 after[build_name][release][ami_name] = info
370
371 for session in IdentityBrokerClient().iter_regions():
372 region = session.region_name
373
374 print(f"* scanning: {region} ...")
375
376 ec2 = session.client("ec2")
377 for image in ec2.describe_images(Owners=["self"])["Images"]:
378 image_name, image_id = image["Name"], image["ImageId"]
379
380 if region in prune and image["ImageId"] in prune[region]:
381 print(f"REMOVE: {image_name} = {image_id}")
382 self.delete_image(image)
383 elif region in known and image["ImageId"] in known[region]:
384 print(f"KEEP: {image_name} = {image_id}")
385 else:
386 print(f"UNKNOWN: {image_name} = {image_id}")
387
388 # update releases/<profile>.yaml
389 with open(release_yaml, "w") as data:
390 yaml.dump(after, data, sort_keys=False)
391
392
393class ConfigBuilder:
394
395 now = datetime.utcnow()
396 tomorrow = now + timedelta(days=1)
397
398 @staticmethod
399 def unquote(x):
400 return x.strip('"')
401
402 @staticmethod
403 def force_iso_date(input):
404 return datetime.fromisoformat(input).isoformat(timespec="seconds")
405
406 @classmethod
407 def resolve_tomorrow(cls, input):
408 return cls.tomorrow.isoformat(timespec="seconds")
409
410 @classmethod
411 def resolve_now(cls, input):
412 return cls.now.strftime("%Y%m%d%H%M%S")
413
414 @classmethod
415 def fold_comma(cls, input):
416 return ",".join([cls.unquote(k) for k in input.keys()])
417
418 @classmethod
419 def fold_space(cls, input):
420 return " ".join([cls.unquote(k) for k in input.keys()])
421
422 @classmethod
423 def fold_repos(cls, input):
424 return "\n".join(
425 f"@{v} {cls.unquote(k)}" if isinstance(v, str) else cls.unquote(k)
426 for k, v in input.items())
427
428 @staticmethod
429 def fold_packages(input):
430 return " ".join(
431 f"{k}@{v}" if isinstance(v, str) else k
432 for k, v in input.items())
433
434 @staticmethod
435 def fold_services(input):
436 return " ".join(
437 "{}={}".format(k, ",".join(v.keys()))
438 for k, v in input.items())
439
440 def __init__(self, config_path, out_dir):
441 self.config_path = config_path
442 self.out_dir = out_dir
443
444 self._keys_to_transform = {
445 "ami_access" : self.fold_comma,
446 "ami_regions" : self.fold_comma,
447 "kernel_modules" : self.fold_comma,
448 "kernel_options" : self.fold_space,
449 "repos" : self.fold_repos,
450 "pkgs" : self.fold_packages,
451 "svcs" : self.fold_services,
452 "revision" : self.resolve_now,
453 "end_of_life" : lambda x: \
454 self.force_iso_date(self.resolve_tomorrow(x)),
455 }
456
457 def build_all(self):
458 for file in glob.glob(os.path.join(self.config_path, "*.conf")):
459 profile = os.path.splitext(os.path.split(file)[-1])[0]
460 self.build_profile(profile)
461
462 def build_profile(self, profile):
463 build_config = pyhocon.ConfigFactory.parse_file(
464 os.path.join(self.config_path, f"{profile}.conf"))
465
466 for build, cfg in build_config["BUILDS"].items():
467 build_dir = os.path.join(self.out_dir, profile, build)
468
469 # Always start fresh
470 shutil.rmtree(build_dir, ignore_errors=True)
471 os.makedirs(build_dir)
472
473 cfg["profile"] = profile
474 cfg["profile_build"] = build
475
476 # Order of operations is important here
477 for k, v in cfg.items():
478 transform = self._keys_to_transform.get(k)
479 if transform:
480 cfg[k] = transform(v)
481
482 if isinstance(v, str) and "{var." in v:
483 cfg[k] = v.format(var=cfg)
484
485 with open(os.path.join(build_dir, "vars.json"), "w") as out:
486 json.dump(cfg, out, indent=4, separators=(",", ": "))
487
488
489class ResolveProfiles:
490 """Build Packer JSON variable files from HOCON build profiles
491 """
492
493 command_name = "resolve-profiles"
494
495 @staticmethod
496 def add_args(parser):
497 parser.add_argument(
498 "profile", help="name of profile to build", nargs="*")
499
500 def run(self, args, root):
501 builder = ConfigBuilder(
502 os.path.join(root, "profiles"),
503 os.path.join(root, "build", "profile"))
504
505 if args.profile:
506 for profile in args.profile:
507 builder.build_profile(profile)
508 else:
509 builder.build_all()
510
511
512class UpdateReleases:
513 """Update release YAML
514 """
515
516 command_name = "update-releases"
517
518 @staticmethod
519 def add_args(parser):
520 parser.add_argument("profile", help="name of profile to update")
521 parser.add_argument("build", help="name of build to update")
522
523 @staticmethod
524 def parse_ids(ids):
525 parsed = re.split(":|,", ids)
526 return dict(zip(parsed[0::2], parsed[1::2]))
527
528 def run(self, args, root):
529 release_dir = os.path.join(root, "releases")
530 if not os.path.exists(release_dir):
531 os.makedirs(release_dir)
532
533 release_yaml = os.path.join(release_dir, f"{args.profile}.yaml")
534 releases = {}
535 if os.path.exists(release_yaml):
536 with open(release_yaml, "r") as data:
537 releases = yaml.safe_load(data)
538
539 manifest_json = os.path.join(
540 root, "build", "profile", args.profile, args.build,
541 "manifest.json")
542 with open(manifest_json, "r") as data:
543 manifest = json.load(data)
544
545 data = manifest["builds"][0]["custom_data"]
546 release = data["release"]
547
548 if args.build not in releases:
549 releases[args.build] = {}
550
551 if release not in releases[args.build]:
552 releases[args.build][release] = {}
553
554 releases[args.build][release][data["ami_name"]] = {
555 "description": data["ami_desc"],
556 "profile": args.profile,
557 "profile_build": args.build,
558 "version": data["version"],
559 "release": release,
560 "arch": data["arch"],
561 "revision": data["revision"],
562 "end_of_life": data["end_of_life"],
563 "build_time": manifest["builds"][0]["build_time"],
564 "artifacts": self.parse_ids(manifest["builds"][0]["artifact_id"]),
565 }
566
567 with open(release_yaml, "w") as data:
568 yaml.dump(releases, data, sort_keys=False)
569
570
571def find_repo_root():
572 path = os.getcwd()
573
574 while ".git" not in set(os.listdir(path)) and path != "/":
575 path = os.path.dirname(path)
576
577 if path == "/":
578 raise Exception("No repo found, stopping at /")
579
580 return path
581
582
583def main():
584 dispatch = {}
585
586 parser = argparse.ArgumentParser()
587 subs = parser.add_subparsers(dest="command_name", required=True)
588
589 for command in sys.modules[__name__].__dict__.values():
590 if not hasattr(command, "command_name"):
591 continue
592
593 dispatch[command.command_name] = command()
594
595 doc = getattr(command, "__doc__", "")
596 subparser = subs.add_parser(
597 command.command_name, help=doc, description=doc)
598
599 if add_args := getattr(command, "add_args", None):
600 command.add_args(subparser)
601
602 args = parser.parse_args()
603 dispatch[args.command_name].run(args, find_repo_root())
604
605
606if __name__ == "__main__":
607 main()
diff --git a/scripts/gen-release-readme.py.in b/scripts/gen-release-readme.py.in
deleted file mode 100644
index c2af953..0000000
--- a/scripts/gen-release-readme.py.in
+++ /dev/null
@@ -1,131 +0,0 @@
1@PYTHON@
2# vim: ts=4 et:
3
4import os
5import re
6import argparse
7import textwrap
8from datetime import datetime
9from collections import defaultdict
10from distutils.version import StrictVersion
11
12import yaml
13
14
15def find_repo_root():
16 path = os.getcwd()
17
18 while ".git" not in set(os.listdir(path)) and path != "/":
19 path = os.path.dirname(path)
20
21 if path == "/":
22 raise Exception("No repo found, stopping at /")
23
24 return path
25
26
27class ReleaseReadmeUpdater:
28
29 SECTION_TPL = textwrap.dedent("""
30 ### Alpine Linux {release} ({date})
31 <details><summary><i>click to show/hide</i></summary><p>
32
33 {rows}
34
35 </p></details>
36 """)
37
38 AMI_TPL = (
39 " [{id}](https://{r}.console.aws.amazon.com/ec2/home"
40 "#Images:visibility=public-images;imageId={id}) "
41 "([launch](https://{r}.console.aws.amazon.com/ec2/home"
42 "#launchAmi={id})) |"
43 )
44
45 def __init__(self, profile, archs=None):
46 self.profile = profile
47 self.archs = archs or ["x86_64", "aarch64"]
48
49 def get_sorted_releases(self, release_data):
50 sections = defaultdict(lambda: {
51 "release": "",
52 "built": {},
53 "name": {},
54 "ami": defaultdict(dict)
55 })
56
57 for build, releases in release_data.items():
58 for release, amis in releases.items():
59 for name, info in amis.items():
60 arch = info["arch"]
61 built = info["build_time"]
62 ver = sections[info["version"]]
63
64 if arch not in ver["built"] or ver["built"][arch] < built:
65 ver["release"] = release
66 ver["name"][arch] = name
67 ver["built"][arch] = built
68
69 for region, ami in info["artifacts"].items():
70 ver["ami"][region][arch] = ami
71
72 extract_ver = lambda x: StrictVersion(
73 "0.0" if x["release"] == "edge" else x["release"])
74
75 return sorted(sections.values(), key=extract_ver, reverse=True)
76
77 def make_ami_list(self, sorted_releases):
78 ami_list = "## AMIs\n"
79
80 for info in sorted_releases:
81 rows = ["| Region |", "| ------ |"]
82
83 for arch in self.archs:
84 if arch in info["name"]:
85 rows[0] += f" {info['name'][arch]} |"
86 rows[1] += " --- |"
87
88 for region, amis in info["ami"].items():
89 row = f"| {region} |"
90 for arch in self.archs:
91 if arch in amis:
92 row += self.AMI_TPL.format(r=region, id=amis[arch])
93 rows.append(row)
94
95 ami_list += self.SECTION_TPL.format(
96 release=info["release"].capitalize(),
97 date=datetime.utcfromtimestamp(
98 max(info["built"].values())).date(),
99 rows="\n".join(rows))
100
101 return ami_list
102
103 def update_markdown(self):
104 release_dir = os.path.join(find_repo_root(), "releases")
105 profile_file = os.path.join(release_dir, f"{self.profile}.yaml")
106
107 with open(profile_file, "r") as data:
108 sorted_releases = self.get_sorted_releases(yaml.safe_load(data))
109
110 readme_md = os.path.join(release_dir, "README.md")
111
112 with open(readme_md, "r") as file:
113 readme = file.read()
114
115 with open(readme_md, "w") as file:
116 file.write(
117 re.sub("## AMIs.*\Z", self.make_ami_list(sorted_releases),
118 readme, flags=re.S))
119
120
121def main():
122 parser = argparse.ArgumentParser(description="Update release README")
123 parser.add_argument("profile", help="name of profile to update")
124 args = parser.parse_args()
125
126 ReleaseReadmeUpdater(args.profile).update_markdown()
127
128
129
130if __name__ == "__main__":
131 main()
diff --git a/scripts/make-amis.py.in b/scripts/make-amis.py.in
deleted file mode 100644
index c7f9f98..0000000
--- a/scripts/make-amis.py.in
+++ /dev/null
@@ -1,68 +0,0 @@
1@PYTHON@
2# vim: set ts=4 et:
3
4import os
5import io
6import sys
7import argparse
8import subprocess
9
10
11def find_repo_root():
12 path = os.getcwd()
13
14 while ".git" not in set(os.listdir(path)) and path != "/":
15 path = os.path.dirname(path)
16
17 if path == "/":
18 raise Exception("No repo found, stopping at /")
19
20 return path
21
22
23def main(args):
24 parser = argparse.ArgumentParser(description="Build Packer JSON variable "
25 "files from HOCON build profiles")
26 parser.add_argument("profile", help="name of profile to build")
27 parser.add_argument("builds", nargs="*",
28 help="name of builds within a profile to build")
29 args = parser.parse_args()
30
31 os.chdir(os.path.join(find_repo_root(), "build"))
32
33 builds = args.builds or os.listdir(os.path.join("profile", args.profile))
34 for build in builds:
35 print(f"\n*** Building {args.profile}/{build} ***\n\n")
36
37 build_dir = os.path.join("profile", args.profile, build)
38 if not os.path.exists(build_dir):
39 print(f"Build dir '{build_dir}' does not exist")
40 break
41
42 out = io.StringIO()
43
44 res = subprocess.Popen([
45 os.environ.get("PACKER", "packer"),
46 "build",
47 f"-var-file={build_dir}/vars.json",
48 "packer.json"
49 ], stdout=subprocess.PIPE, encoding="utf-8")
50
51 while res.poll() is None:
52 text = res.stdout.readline()
53 out.write(text)
54 print(text, end="")
55
56 if res.returncode == 0:
57 subprocess.run(["./update-release.py", args.profile, build])
58 else:
59 if "is used by an existing AMI" in out.getvalue():
60 continue
61 else:
62 sys.exit(res.returncode)
63
64 print("\n=== DONE ===\n")
65
66
67if __name__ == "__main__":
68 main(sys.argv)
diff --git a/scripts/prune-amis.py.in b/scripts/prune-amis.py.in
deleted file mode 100644
index 06ef567..0000000
--- a/scripts/prune-amis.py.in
+++ /dev/null
@@ -1,168 +0,0 @@
1#@PYTHON@
2# vim: ts=4 et:
3
4import os
5import sys
6import argparse
7from datetime import datetime
8
9import yaml
10import boto3
11from botocore.exceptions import ClientError
12
13LEVEL_HELP = """\
14revision - keep only the latest revision per release
15release - keep only the latest release per version
16version - keep only the versions that aren't end-of-life
17"""
18
19
20def find_repo_root():
21 path = os.getcwd()
22
23 while ".git" not in set(os.listdir(path)) and path != "/":
24 path = os.path.dirname(path)
25
26 if path == "/":
27 raise Exception("No repo found, stopping at /")
28
29 return path
30
31
32def main(args):
33 parser = argparse.ArgumentParser(
34 description="Prune AMIs from AWS",
35 formatter_class=argparse.RawTextHelpFormatter)
36 parser.add_argument(
37 "level", choices=["revision", "release", "version"], help=LEVEL_HELP)
38 parser.add_argument("profile", help="profile to prune")
39 parser.add_argument(
40 "build", nargs="?", help="build within profile to prune")
41 args = parser.parse_args()
42
43 now = datetime.utcnow()
44
45 release_yaml = os.path.join(
46 find_repo_root() "releases", f"{args.profile}.yaml")
47
48 with open(release_yaml, "r") as data:
49 before = yaml.safe_load(data)
50
51 known = {}
52 prune = {}
53 after = {}
54
55 # for all builds in the profile...
56 for build_name, releases in before.items():
57
58 # this is not the build that was specified
59 if args.build is not None and args.build != build_name:
60 print(f"< skipping {args.profile}/{build_name}")
61 # ensure its release data remains intact
62 after[build_name] = before[build_name]
63 continue
64 else:
65 print(f"> PRUNING {args.profile}/{build_name} for {args.level}")
66
67 criteria = {}
68
69 # scan releases for pruning criteria
70 for release, amis in releases.items():
71 for ami_name, info in amis.items():
72 version = info["version"]
73 built = info["build_time"]
74
75 if info["end_of_life"]:
76 eol = datetime.fromisoformat(info["end_of_life"])
77 else:
78 eol = None
79
80 for region, ami_id in info["artifacts"].items():
81 if region not in known:
82 known[region] = []
83 known[region].append(ami_id)
84
85 if args.level == "revision":
86 # find build timestamp of most recent revision, per release
87 if release not in criteria or built > criteria[release]:
88 criteria[release] = built
89 elif args.level == "release":
90 # find build timestamp of most recent revision, per version
91 if version not in criteria or built > criteria[version]:
92 criteria[version] = built
93 elif args.level == "version":
94 # find latest EOL date, per version
95 if (version not in criteria or not criteria[version]) or (
96 eol and eol > criteria[version]):
97 criteria[version] = eol
98
99 # rescan again to determine what doesn't make the cut
100 for release, amis in releases.items():
101 for ami_name, info in amis.items():
102 version = info["version"]
103 built = info["build_time"]
104
105 if info["end_of_life"]:
106 eol = datetime.fromisoformat(info["end_of_life"])
107 else:
108 eol = None
109
110 if ((args.level == "revision" and built < criteria[release]) or
111 (args.level == "release" and built < criteria[version]) or
112 (args.level == "version" and criteria[version] and (
113 (version != "edge" and criteria[version] < now) or
114 (version == "edge" and ((not eol) or (eol < now)))
115 ))):
116 for region, ami_id in info["artifacts"].items():
117 if region not in prune:
118 prune[region] = []
119
120 prune[region].append(ami_id)
121 else:
122 if build_name not in after:
123 after[build_name] = {}
124
125 if release not in after[build_name]:
126 after[build_name][release] = {}
127
128 after[build_name][release][ami_name] = info
129
130 # scan all regions for AMIs
131 AWS = boto3.session.Session()
132 for region in AWS.get_available_regions("ec2"):
133 print(f"* scanning: {region} ...")
134 EC2 = AWS.client("ec2", region_name=region)
135
136 try:
137 for image in EC2.describe_images(Owners=["self"])["Images"]:
138
139 action = "? UNKNOWN"
140 if region in prune and image["ImageId"] in prune[region]:
141 action = "- REMOVING"
142 elif region in known and image["ImageId"] in known[region]:
143 action = "+ KEEPING"
144
145 print(f" {action}: {image['Name']}\n = {image['ImageId']}",
146 end="", flush=True)
147
148 if action[0] == "-":
149 EC2.deregister_image(ImageId=image["ImageId"])
150
151 for blockdev in image["BlockDeviceMappings"]:
152 if "Ebs" in blockdev:
153 print(", {blockdev['Ebs']['SnapshotId']}",
154 end="", flush=True)
155 if action[0] == "-":
156 EC2.delete_snapshot(
157 SnapshotId=blockdev["Ebs"]["SnapshotId"])
158 print()
159 except ClientError as e:
160 print(e)
161
162 # update releases/<profile>.yaml
163 with open(release_yaml, "w") as data:
164 yaml.dump(after, data, sort_keys=False)
165
166
167if __name__ == "__main__":
168 main(sys.argv)
diff --git a/scripts/resolve-profile.py.in b/scripts/resolve-profile.py.in
deleted file mode 100644
index 2905423..0000000
--- a/scripts/resolve-profile.py.in
+++ /dev/null
@@ -1,133 +0,0 @@
1@PYTHON@
2# vim: set ts=4 et:
3
4import os
5import sys
6import json
7import shutil
8import argparse
9from datetime import datetime, timedelta
10
11from pyhocon import ConfigFactory
12
13
14# Just group together our transforms
15class Transforms:
16
17 NOW = datetime.utcnow()
18 TOMORROW = NOW + timedelta(days=1)
19
20 unquote = lambda x: x.strip('"')
21
22 @staticmethod
23 def force_iso_date(input):
24 return datetime.fromisoformat(input).isoformat(timespec="seconds")
25
26 @classmethod
27 def resolve_tomorrow(cls, input):
28 return cls.TOMORROW.isoformat(timespec="seconds")
29
30 @classmethod
31 def resolve_now(cls, input):
32 return cls.NOW.strftime("%Y%m%d%H%M%S")
33
34 @classmethod
35 def fold_comma(cls, input):
36 return ",".join([cls.unquote(k) for k in input.keys()])
37
38 @classmethod
39 def fold_space(cls, input):
40 return " ".join([cls.unquote(k) for k in input.keys()])
41
42 @classmethod
43 def fold_repos(cls, input):
44 return "\n".join(
45 f"@{v} {cls.unquote(k)}" if isinstance(v, str) else cls.unquote(k)
46 for k, v in input.items())
47
48 @staticmethod
49 def fold_packages(input):
50 return " ".join(
51 f"{k}@{v}" if isinstance(v, str) else k
52 for k, v in input.items())
53
54 @staticmethod
55 def fold_services(input):
56 return " ".join(
57 "{}={}".format(k, ",".join(v.keys()))
58 for k, v in input.items())
59
60
61class ConfigBuilder:
62
63 _CFG_TRANSFORMS = {
64 "ami_access" : Transforms.fold_comma,
65 "ami_regions" : Transforms.fold_comma,
66 "kernel_modules" : Transforms.fold_comma,
67 "kernel_options" : Transforms.fold_space,
68 "repos" : Transforms.fold_repos,
69 "pkgs" : Transforms.fold_packages,
70 "svcs" : Transforms.fold_services,
71 "revision" : Transforms.resolve_now,
72 "end_of_life" : lambda x: \
73 Transforms.force_iso_date(Transforms.resolve_tomorrow(x)),
74 }
75
76 def __init__(self, config_path, out_dir):
77 self.config_path = config_path
78 self.out_dir = out_dir
79
80 def build(self, profile):
81 build_config = ConfigFactory.parse_file(self.config_path)
82
83 for build, cfg in build_config["BUILDS"].items():
84 build_dir = os.path.join(self.out_dir, build)
85
86 # Always start fresh
87 shutil.rmtree(build_dir, ignore_errors=True)
88 os.makedirs(build_dir)
89
90 cfg["profile"] = profile
91 cfg["profile_build"] = build
92
93 # Order of operations is important here
94 for k, v in cfg.items():
95 transform = self._CFG_TRANSFORMS.get(k)
96 if transform:
97 cfg[k] = transform(v)
98
99 if isinstance(v, str) and "{var." in v:
100 cfg[k] = v.format(var=cfg)
101
102 with open(os.path.join(build_dir, "vars.json"), "w") as out:
103 json.dump(cfg, out, indent=4, separators=(",", ": "))
104
105
106def find_repo_root():
107 path = os.getcwd()
108
109 while ".git" not in set(os.listdir(path)) and path != "/":
110 path = os.path.dirname(path)
111
112 if path == "/":
113 raise Exception("No repo found, stopping at /")
114
115 return path
116
117
118def main(args):
119 parser = argparse.ArgumentParser(description="Build Packer JSON variable "
120 "files from HOCON build profiles")
121 parser.add_argument("profile", help="name of profile to build")
122 args = parser.parse_args()
123
124 root = find_repo_root()
125
126 ConfigBuilder(
127 os.path.join(root, "profiles", f"{args.profile}.conf"),
128 os.path.join(root, "build", "profile", args.profile)
129 ).build(args.profile)
130
131
132if __name__ == "__main__":
133 main(sys.argv)
diff --git a/scripts/update-release.py.in b/scripts/update-release.py.in
deleted file mode 100644
index b8f4d00..0000000
--- a/scripts/update-release.py.in
+++ /dev/null
@@ -1,80 +0,0 @@
1@PYTHON@
2# vim: set ts=4 et:
3
4import os
5import re
6import sys
7import json
8import argparse
9
10import yaml
11
12
13def find_repo_root():
14 path = os.getcwd()
15
16 while ".git" not in set(os.listdir(path)) and path != "/":
17 path = os.path.dirname(path)
18
19 if path == "/":
20 raise Exception("No repo found, stopping at /")
21
22 return path
23
24
25def parse_artifact_ids(ids):
26 parsed = re.split(":|,", ids)
27 return dict(zip(parsed[0::2], parsed[1::2]))
28
29
30def main(args):
31 parser = argparse.ArgumentParser(description="Update release YAML")
32 parser.add_argument("profile", help="name of profile to update")
33 parser.add_argument("build", help="name of build to update")
34 args = parser.parse_args()
35
36 root = find_repo_root()
37
38 release_dir = os.path.join(root, "releases")
39 if not os.path.exists(release_dir):
40 os.makedirs(release_dir)
41
42 release_yaml = os.path.join(release_dir, f"{args.profile}.yaml")
43 releases = {}
44 if os.path.exists(release_yaml):
45 with open(release_yaml, "r") as data:
46 releases = yaml.safe_load(data)
47
48 manifest_json = os.path.join(
49 root, "build", "profile", args.profile, args.build, "manifest.json")
50 with open(manifest_json, "r") as data:
51 manifest = json.load(data)
52
53 data = manifest["builds"][0]["custom_data"]
54 release = data["release"]
55
56 if args.build not in releases:
57 releases[args.build] = {}
58
59 if release not in releases[args.build]:
60 releases[args.build][release] = {}
61
62 releases[args.build][release][data["ami_name"]] = {
63 "description": data["ami_desc"],
64 "profile": args.profile,
65 "profile_build": args.build,
66 "version": data["version"],
67 "release": release,
68 "arch": data["arch"],
69 "revision": data["revision"],
70 "end_of_life": data["end_of_life"],
71 "build_time": manifest["builds"][0]["build_time"],
72 "artifacts": parse_artifact_ids(manifest["builds"][0]["artifact_id"]),
73 }
74
75 with open(release_yaml, "w") as data:
76 yaml.dump(releases, data, sort_keys=False)
77
78
79if __name__ == "__main__":
80 main(sys.argv)