diff options
author | Mike Crute <mike@crute.us> | 2020-05-21 18:22:26 -0700 |
---|---|---|
committer | Mike Crute <mike@crute.us> | 2020-05-26 18:10:03 -0700 |
commit | a36d0616bfee07e903e27a63348b7a65e2504c22 (patch) | |
tree | ea21be780f5129142c070bee53cd25382644ef7d | |
parent | d63409acce1750db32781146d8366a13923262d0 (diff) | |
download | alpine-ec2-ami-a36d0616bfee07e903e27a63348b7a65e2504c22.tar.bz2 alpine-ec2-ami-a36d0616bfee07e903e27a63348b7a65e2504c22.tar.xz alpine-ec2-ami-a36d0616bfee07e903e27a63348b7a65e2504c22.zip |
Convert python scripts to argparse
This removes the manual command line handling and reformats the scripts
into main methods. This is paving the way for a more unified build tool.
-rw-r--r-- | scripts/gen-release-readme.py.in | 228 | ||||
-rw-r--r-- | scripts/prune-amis.py.in | 291 | ||||
-rw-r--r-- | scripts/update-release.py.in | 126 |
3 files changed, 353 insertions, 292 deletions
diff --git a/scripts/gen-release-readme.py.in b/scripts/gen-release-readme.py.in index 679d4f3..c2af953 100644 --- a/scripts/gen-release-readme.py.in +++ b/scripts/gen-release-readme.py.in | |||
@@ -1,119 +1,131 @@ | |||
1 | @PYTHON@ | 1 | @PYTHON@ |
2 | # vim: ts=4 et: | 2 | # vim: ts=4 et: |
3 | 3 | ||
4 | from datetime import datetime | ||
5 | from distutils.version import StrictVersion | ||
6 | import functools | ||
7 | import os | 4 | import os |
8 | import re | 5 | import re |
9 | import sys | 6 | import argparse |
7 | import textwrap | ||
8 | from datetime import datetime | ||
9 | from collections import defaultdict | ||
10 | from distutils.version import StrictVersion | ||
11 | |||
10 | import yaml | 12 | import yaml |
11 | 13 | ||
12 | if len(sys.argv) != 2: | 14 | |
13 | sys.exit("Usage: " + os.path.basename(__file__) + "<profile>") | 15 | def find_repo_root(): |
14 | 16 | path = os.getcwd() | |
15 | PROFILE = sys.argv[1] | 17 | |
16 | 18 | while ".git" not in set(os.listdir(path)) and path != "/": | |
17 | RELEASE_DIR = os.path.join( | 19 | path = os.path.dirname(path) |
18 | os.path.dirname(os.path.realpath(__file__)), | 20 | |
19 | '..', 'releases' | 21 | if path == "/": |
20 | ) | 22 | raise Exception("No repo found, stopping at /") |
21 | 23 | ||
22 | README_MD = os.path.join( RELEASE_DIR, 'README.md') | 24 | return path |
23 | RELEASE_YAML = os.path.join( RELEASE_DIR, PROFILE + '.yaml') | 25 | |
24 | 26 | ||
25 | # read in releases/<profile>.yaml | 27 | class ReleaseReadmeUpdater: |
26 | with open(RELEASE_YAML, 'r') as data: | 28 | |
27 | RELEASES = yaml.safe_load(data) | 29 | SECTION_TPL = textwrap.dedent(""" |
28 | 30 | ### Alpine Linux {release} ({date}) | |
29 | sections = {} | 31 | <details><summary><i>click to show/hide</i></summary><p> |
30 | 32 | ||
31 | for build, releases in RELEASES.items(): | 33 | {rows} |
32 | for release, amis in releases.items(): | 34 | |
33 | for name, info in amis.items(): | 35 | </p></details> |
34 | version = info['version'] | 36 | """) |
35 | arch = info['arch'] | 37 | |
36 | built = info['build_time'] | 38 | AMI_TPL = ( |
37 | if version in sections: | 39 | " [{id}](https://{r}.console.aws.amazon.com/ec2/home" |
38 | ver = sections[version] | 40 | "#Images:visibility=public-images;imageId={id}) " |
39 | else: | 41 | "([launch](https://{r}.console.aws.amazon.com/ec2/home" |
40 | ver = { | 42 | "#launchAmi={id})) |" |
41 | 'release': '', | ||
42 | 'built': {}, | ||
43 | 'name': {}, | ||
44 | 'ami': {} | ||
45 | } | ||
46 | if (arch not in ver['built'] or | ||
47 | ver['built'][arch] < built): | ||
48 | ver['release'] = release | ||
49 | ver['name'][arch] = name | ||
50 | ver['built'][arch] = built | ||
51 | for region, ami in info['artifacts'].items(): | ||
52 | if region not in ver['ami']: | ||
53 | ver['ami'][region] = {} | ||
54 | ver['ami'][region][arch] = ami | ||
55 | sections[version] = ver | ||
56 | |||
57 | SECTION = """ | ||
58 | ### Alpine Linux {release} ({date}) | ||
59 | <details><summary><i>click to show/hide</i></summary><p> | ||
60 | |||
61 | {rows} | ||
62 | |||
63 | </p></details> | ||
64 | """ | ||
65 | |||
66 | AMI = " [{id}](https://{r}.console.aws.amazon.com/ec2/home#Images:visibility=public-images;imageId={id}) " + \ | ||
67 | "([launch](https://{r}.console.aws.amazon.com/ec2/home#launchAmi={id})) |" | ||
68 | |||
69 | ARCHS = ['x86_64', 'aarch64'] | ||
70 | |||
71 | |||
72 | # most -> least recent version, edge at end | ||
73 | def ver_cmp(a, b): | ||
74 | try: | ||
75 | if StrictVersion(a) < StrictVersion(b): | ||
76 | return 1 | ||
77 | if StrictVersion(a) > StrictVersion(b): | ||
78 | return -1 | ||
79 | return 0 | ||
80 | except ValueError: | ||
81 | # "edge" doesn't work with StrictVersion | ||
82 | if a == 'edge': | ||
83 | return 1 | ||
84 | if b == 'edge': | ||
85 | return -1 | ||
86 | return 0 | ||
87 | |||
88 | |||
89 | ami_list = "## AMIs\n" | ||
90 | |||
91 | for version in sorted(list(sections.keys()), key=functools.cmp_to_key(ver_cmp)): | ||
92 | info = sections[version] | ||
93 | release = info['release'] | ||
94 | rows = [] | ||
95 | rows.append('| Region |') | ||
96 | rows.append('| ------ |') | ||
97 | for arch in ARCHS: | ||
98 | if arch in info['name']: | ||
99 | rows[0] += ' {n} |'.format(n=info['name'][arch]) | ||
100 | rows[1] += ' --- |' | ||
101 | for region, amis in info['ami'].items(): | ||
102 | row = '| {r} |'.format(r=region) | ||
103 | for arch in ARCHS: | ||
104 | if arch in amis: | ||
105 | row += AMI.format(r=region, id=amis[arch]) | ||
106 | rows.append(row) | ||
107 | ami_list += SECTION.format( | ||
108 | release=release.capitalize(), | ||
109 | date=datetime.utcfromtimestamp(max(info['built'].values())).date(), | ||
110 | rows="\n".join(rows) | ||
111 | ) | 43 | ) |
112 | 44 | ||
113 | with open(README_MD, 'r') as file: | 45 | def __init__(self, profile, archs=None): |
114 | readme = file.read() | 46 | self.profile = profile |
47 | self.archs = archs or ["x86_64", "aarch64"] | ||
48 | |||
49 | def get_sorted_releases(self, release_data): | ||
50 | sections = defaultdict(lambda: { | ||
51 | "release": "", | ||
52 | "built": {}, | ||
53 | "name": {}, | ||
54 | "ami": defaultdict(dict) | ||
55 | }) | ||
56 | |||
57 | for build, releases in release_data.items(): | ||
58 | for release, amis in releases.items(): | ||
59 | for name, info in amis.items(): | ||
60 | arch = info["arch"] | ||
61 | built = info["build_time"] | ||
62 | ver = sections[info["version"]] | ||
63 | |||
64 | if arch not in ver["built"] or ver["built"][arch] < built: | ||
65 | ver["release"] = release | ||
66 | ver["name"][arch] = name | ||
67 | ver["built"][arch] = built | ||
68 | |||
69 | for region, ami in info["artifacts"].items(): | ||
70 | ver["ami"][region][arch] = ami | ||
71 | |||
72 | extract_ver = lambda x: StrictVersion( | ||
73 | "0.0" if x["release"] == "edge" else x["release"]) | ||
74 | |||
75 | return sorted(sections.values(), key=extract_ver, reverse=True) | ||
76 | |||
77 | def make_ami_list(self, sorted_releases): | ||
78 | ami_list = "## AMIs\n" | ||
79 | |||
80 | for info in sorted_releases: | ||
81 | rows = ["| Region |", "| ------ |"] | ||
82 | |||
83 | for arch in self.archs: | ||
84 | if arch in info["name"]: | ||
85 | rows[0] += f" {info['name'][arch]} |" | ||
86 | rows[1] += " --- |" | ||
87 | |||
88 | for region, amis in info["ami"].items(): | ||
89 | row = f"| {region} |" | ||
90 | for arch in self.archs: | ||
91 | if arch in amis: | ||
92 | row += self.AMI_TPL.format(r=region, id=amis[arch]) | ||
93 | rows.append(row) | ||
94 | |||
95 | ami_list += self.SECTION_TPL.format( | ||
96 | release=info["release"].capitalize(), | ||
97 | date=datetime.utcfromtimestamp( | ||
98 | max(info["built"].values())).date(), | ||
99 | rows="\n".join(rows)) | ||
100 | |||
101 | return ami_list | ||
102 | |||
103 | def update_markdown(self): | ||
104 | release_dir = os.path.join(find_repo_root(), "releases") | ||
105 | profile_file = os.path.join(release_dir, f"{self.profile}.yaml") | ||
106 | |||
107 | with open(profile_file, "r") as data: | ||
108 | sorted_releases = self.get_sorted_releases(yaml.safe_load(data)) | ||
109 | |||
110 | readme_md = os.path.join(release_dir, "README.md") | ||
111 | |||
112 | with open(readme_md, "r") as file: | ||
113 | readme = file.read() | ||
114 | |||
115 | with open(readme_md, "w") as file: | ||
116 | file.write( | ||
117 | re.sub("## AMIs.*\Z", self.make_ami_list(sorted_releases), | ||
118 | readme, flags=re.S)) | ||
119 | |||
120 | |||
121 | def main(): | ||
122 | parser = argparse.ArgumentParser(description="Update release README") | ||
123 | parser.add_argument("profile", help="name of profile to update") | ||
124 | args = parser.parse_args() | ||
125 | |||
126 | ReleaseReadmeUpdater(args.profile).update_markdown() | ||
127 | |||
115 | 128 | ||
116 | readme_re = re.compile('## AMIs.*\Z', re.S) | ||
117 | 129 | ||
118 | with open(README_MD, 'w') as file: | 130 | if __name__ == "__main__": |
119 | file.write(readme_re.sub(ami_list, readme)) | 131 | main() |
diff --git a/scripts/prune-amis.py.in b/scripts/prune-amis.py.in index 3009204..06ef567 100644 --- a/scripts/prune-amis.py.in +++ b/scripts/prune-amis.py.in | |||
@@ -1,137 +1,168 @@ | |||
1 | @PYTHON@ | 1 | #@PYTHON@ |
2 | # vim: ts=4 et: | 2 | # vim: ts=4 et: |
3 | 3 | ||
4 | from datetime import datetime | ||
5 | import os | 4 | import os |
6 | import sys | 5 | import sys |
6 | import argparse | ||
7 | from datetime import datetime | ||
8 | |||
9 | import yaml | ||
7 | import boto3 | 10 | import boto3 |
8 | from botocore.exceptions import ClientError | 11 | from botocore.exceptions import ClientError |
9 | import yaml | ||
10 | 12 | ||
11 | LEVELS = ['revision', 'release', 'version'] | 13 | LEVEL_HELP = """\ |
12 | 14 | revision - keep only the latest revision per release | |
13 | if 3 < len(sys.argv) > 4 or sys.argv[1] not in LEVELS: | 15 | release - keep only the latest release per version |
14 | sys.exit("Usage: " + os.path.basename(__file__) + """ <level> <profile> [<build>] | 16 | version - keep only the versions that aren't end-of-life |
15 | <level> :- | 17 | """ |
16 | revision - keep only the latest revision per release | 18 | |
17 | release - keep only the latest release per version | 19 | |
18 | version - keep only the versions that aren't end-of-life""") | 20 | def find_repo_root(): |
19 | 21 | path = os.getcwd() | |
20 | NOW = datetime.utcnow() | 22 | |
21 | LEVEL = sys.argv[1] | 23 | while ".git" not in set(os.listdir(path)) and path != "/": |
22 | PROFILE = sys.argv[2] | 24 | path = os.path.dirname(path) |
23 | BUILD = None if len(sys.argv) == 3 else sys.argv[3] | 25 | |
24 | 26 | if path == "/": | |
25 | RELEASE_YAML = os.path.join( | 27 | raise Exception("No repo found, stopping at /") |
26 | os.path.dirname(os.path.realpath(__file__)), | 28 | |
27 | '..', 'releases', PROFILE + '.yaml' | 29 | return path |
28 | ) | 30 | |
29 | 31 | ||
30 | with open(RELEASE_YAML, 'r') as data: | 32 | def main(args): |
31 | BEFORE = yaml.safe_load(data) | 33 | parser = argparse.ArgumentParser( |
32 | 34 | description="Prune AMIs from AWS", | |
33 | known = {} | 35 | formatter_class=argparse.RawTextHelpFormatter) |
34 | prune = {} | 36 | parser.add_argument( |
35 | after = {} | 37 | "level", choices=["revision", "release", "version"], help=LEVEL_HELP) |
36 | 38 | parser.add_argument("profile", help="profile to prune") | |
37 | # for all builds in the profile... | 39 | parser.add_argument( |
38 | for build_name, releases in BEFORE.items(): | 40 | "build", nargs="?", help="build within profile to prune") |
39 | 41 | args = parser.parse_args() | |
40 | # this is not the build that was specified | 42 | |
41 | if BUILD is not None and BUILD != build_name: | 43 | now = datetime.utcnow() |
42 | print('< skipping {0}/{1}'.format(PROFILE, build_name)) | 44 | |
43 | # ensure its release data remains intact | 45 | release_yaml = os.path.join( |
44 | after[build_name] = BEFORE[build_name] | 46 | find_repo_root() "releases", f"{args.profile}.yaml") |
45 | continue | 47 | |
46 | else: | 48 | with open(release_yaml, "r") as data: |
47 | print('> PRUNING {0}/{1} for {2}'.format(PROFILE, build_name, LEVEL)) | 49 | before = yaml.safe_load(data) |
48 | 50 | ||
49 | criteria = {} | 51 | known = {} |
50 | 52 | prune = {} | |
51 | # scan releases for pruning criteria | 53 | after = {} |
52 | for release, amis in releases.items(): | 54 | |
53 | for ami_name, info in amis.items(): | 55 | # for all builds in the profile... |
54 | version = info['version'] | 56 | for build_name, releases in before.items(): |
55 | if info['end_of_life']: | 57 | |
56 | eol = datetime.fromisoformat(info['end_of_life']) | 58 | # this is not the build that was specified |
57 | else: | 59 | if args.build is not None and args.build != build_name: |
58 | eol = None | 60 | print(f"< skipping {args.profile}/{build_name}") |
59 | built = info['build_time'] | 61 | # ensure its release data remains intact |
60 | for region, ami_id in info['artifacts'].items(): | 62 | after[build_name] = before[build_name] |
61 | if region not in known: | 63 | continue |
62 | known[region] = [] | 64 | else: |
63 | known[region].append(ami_id) | 65 | print(f"> PRUNING {args.profile}/{build_name} for {args.level}") |
64 | 66 | ||
65 | if LEVEL == 'revision': | 67 | criteria = {} |
66 | # find build timestamp of most recent revision, per release | 68 | |
67 | if release not in criteria or built > criteria[release]: | 69 | # scan releases for pruning criteria |
68 | criteria[release] = built | 70 | for release, amis in releases.items(): |
69 | elif LEVEL == 'release': | 71 | for ami_name, info in amis.items(): |
70 | # find build timestamp of most recent revision, per version | 72 | version = info["version"] |
71 | if version not in criteria or built > criteria[version]: | 73 | built = info["build_time"] |
72 | criteria[version] = built | 74 | |
73 | elif LEVEL == 'version': | 75 | if info["end_of_life"]: |
74 | # find latest EOL date, per version | 76 | eol = datetime.fromisoformat(info["end_of_life"]) |
75 | if (version not in criteria or not criteria[version]) or ( | 77 | else: |
76 | eol and eol > criteria[version]): | 78 | eol = None |
77 | criteria[version] = eol | 79 | |
78 | 80 | for region, ami_id in info["artifacts"].items(): | |
79 | # rescan again to determine what doesn't make the cut | 81 | if region not in known: |
80 | for release, amis in releases.items(): | 82 | known[region] = [] |
81 | for ami_name, info in amis.items(): | 83 | known[region].append(ami_id) |
82 | version = info['version'] | 84 | |
83 | if info['end_of_life']: | 85 | if args.level == "revision": |
84 | eol = datetime.fromisoformat(info['end_of_life']) | 86 | # find build timestamp of most recent revision, per release |
85 | else: | 87 | if release not in criteria or built > criteria[release]: |
86 | eol = None | 88 | criteria[release] = built |
87 | built = info['build_time'] | 89 | elif args.level == "release": |
88 | if ((LEVEL == 'revision' and built < criteria[release]) or | 90 | # find build timestamp of most recent revision, per version |
89 | (LEVEL == 'release' and built < criteria[version]) or | 91 | if version not in criteria or built > criteria[version]: |
90 | (LEVEL == 'version' and criteria[version] and ( | 92 | criteria[version] = built |
91 | (version != 'edge' and criteria[version] < NOW) or | 93 | elif args.level == "version": |
92 | (version == 'edge' and ((not eol) or (eol < NOW))) | 94 | # find latest EOL date, per version |
93 | ))): | 95 | if (version not in criteria or not criteria[version]) or ( |
94 | for region, ami_id in info['artifacts'].items(): | 96 | eol and eol > criteria[version]): |
95 | if region not in prune: | 97 | criteria[version] = eol |
96 | prune[region] = [] | 98 | |
97 | prune[region].append(ami_id) | 99 | # rescan again to determine what doesn't make the cut |
98 | else: | 100 | for release, amis in releases.items(): |
99 | if build_name not in after: | 101 | for ami_name, info in amis.items(): |
100 | after[build_name] = {} | 102 | version = info["version"] |
101 | if release not in after[build_name]: | 103 | built = info["build_time"] |
102 | after[build_name][release] = {} | 104 | |
103 | after[build_name][release][ami_name] = info | 105 | if info["end_of_life"]: |
104 | 106 | eol = datetime.fromisoformat(info["end_of_life"]) | |
105 | # scan all regions for AMIs | 107 | else: |
106 | AWS = boto3.session.Session() | 108 | eol = None |
107 | for region in AWS.get_available_regions('ec2'): | 109 | |
108 | print("* scanning: " + region + '...') | 110 | if ((args.level == "revision" and built < criteria[release]) or |
109 | EC2 = AWS.client('ec2', region_name=region) | 111 | (args.level == "release" and built < criteria[version]) or |
110 | 112 | (args.level == "version" and criteria[version] and ( | |
111 | try: | 113 | (version != "edge" and criteria[version] < now) or |
112 | for image in EC2.describe_images(Owners=['self'])['Images']: | 114 | (version == "edge" and ((not eol) or (eol < now))) |
113 | 115 | ))): | |
114 | action = '? UNKNOWN' | 116 | for region, ami_id in info["artifacts"].items(): |
115 | if region in prune and image['ImageId'] in prune[region]: | 117 | if region not in prune: |
116 | action = '- REMOVING' | 118 | prune[region] = [] |
117 | elif region in known and image['ImageId'] in known[region]: | 119 | |
118 | action = '+ KEEPING' | 120 | prune[region].append(ami_id) |
119 | 121 | else: | |
120 | print(' ' + action + ': ' + image['Name'] + | 122 | if build_name not in after: |
121 | "\n = " + image['ImageId'], end='', flush=True) | 123 | after[build_name] = {} |
122 | if action[0] == '-': | 124 | |
123 | EC2.deregister_image(ImageId=image['ImageId']) | 125 | if release not in after[build_name]: |
124 | for blockdev in image['BlockDeviceMappings']: | 126 | after[build_name][release] = {} |
125 | if 'Ebs' in blockdev: | 127 | |
126 | print(', ' + blockdev['Ebs']['SnapshotId'], | 128 | after[build_name][release][ami_name] = info |
127 | end='', flush=True) | 129 | |
128 | if action[0] == '-': | 130 | # scan all regions for AMIs |
129 | EC2.delete_snapshot( | 131 | AWS = boto3.session.Session() |
130 | SnapshotId=blockdev['Ebs']['SnapshotId']) | 132 | for region in AWS.get_available_regions("ec2"): |
131 | print() | 133 | print(f"* scanning: {region} ...") |
132 | except ClientError as e: | 134 | EC2 = AWS.client("ec2", region_name=region) |
133 | print(e) | 135 | |
134 | 136 | try: | |
135 | # update releases/<profile>.yaml | 137 | for image in EC2.describe_images(Owners=["self"])["Images"]: |
136 | with open(RELEASE_YAML, 'w') as data: | 138 | |
137 | yaml.dump(after, data, sort_keys=False) | 139 | action = "? UNKNOWN" |
140 | if region in prune and image["ImageId"] in prune[region]: | ||
141 | action = "- REMOVING" | ||
142 | elif region in known and image["ImageId"] in known[region]: | ||
143 | action = "+ KEEPING" | ||
144 | |||
145 | print(f" {action}: {image['Name']}\n = {image['ImageId']}", | ||
146 | end="", flush=True) | ||
147 | |||
148 | if action[0] == "-": | ||
149 | EC2.deregister_image(ImageId=image["ImageId"]) | ||
150 | |||
151 | for blockdev in image["BlockDeviceMappings"]: | ||
152 | if "Ebs" in blockdev: | ||
153 | print(", {blockdev['Ebs']['SnapshotId']}", | ||
154 | end="", flush=True) | ||
155 | if action[0] == "-": | ||
156 | EC2.delete_snapshot( | ||
157 | SnapshotId=blockdev["Ebs"]["SnapshotId"]) | ||
158 | print() | ||
159 | except ClientError as e: | ||
160 | print(e) | ||
161 | |||
162 | # update releases/<profile>.yaml | ||
163 | with open(release_yaml, "w") as data: | ||
164 | yaml.dump(after, data, sort_keys=False) | ||
165 | |||
166 | |||
167 | if __name__ == "__main__": | ||
168 | main(sys.argv) | ||
diff --git a/scripts/update-release.py.in b/scripts/update-release.py.in index 95350c9..b8f4d00 100644 --- a/scripts/update-release.py.in +++ b/scripts/update-release.py.in | |||
@@ -1,62 +1,80 @@ | |||
1 | @PYTHON@ | 1 | @PYTHON@ |
2 | # vim: set ts=4 et: | 2 | # vim: set ts=4 et: |
3 | 3 | ||
4 | import json | ||
5 | import os | 4 | import os |
6 | import re | 5 | import re |
7 | import sys | 6 | import sys |
7 | import json | ||
8 | import argparse | ||
9 | |||
8 | import yaml | 10 | import yaml |
9 | 11 | ||
10 | if len(sys.argv) != 3: | 12 | |
11 | sys.exit("Usage: " + os.path.basename(__file__) + " <profile> <build>") | 13 | def find_repo_root(): |
12 | 14 | path = os.getcwd() | |
13 | PROFILE = sys.argv[1] | 15 | |
14 | BUILD = sys.argv[2] | 16 | while ".git" not in set(os.listdir(path)) and path != "/": |
15 | 17 | path = os.path.dirname(path) | |
16 | SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) | 18 | |
17 | MANIFEST_JSON = os.path.join( | 19 | if path == "/": |
18 | SCRIPT_DIR, 'profile', PROFILE, BUILD, 'manifest.json' | 20 | raise Exception("No repo found, stopping at /") |
19 | ) | 21 | |
20 | 22 | return path | |
21 | RELEASE_DIR = os.path.join(SCRIPT_DIR, '..', 'releases') | 23 | |
22 | RELEASE_YAML = os.path.join(RELEASE_DIR, PROFILE + '.yaml') | 24 | |
23 | 25 | def parse_artifact_ids(ids): | |
24 | if not os.path.exists(RELEASE_DIR): | 26 | parsed = re.split(":|,", ids) |
25 | os.makedirs(RELEASE_DIR) | 27 | return dict(zip(parsed[0::2], parsed[1::2])) |
26 | 28 | ||
27 | releases = {} | 29 | |
28 | if os.path.exists(RELEASE_YAML): | 30 | def main(args): |
29 | with open(RELEASE_YAML, 'r') as data: | 31 | parser = argparse.ArgumentParser(description="Update release YAML") |
30 | releases = yaml.safe_load(data) | 32 | parser.add_argument("profile", help="name of profile to update") |
31 | 33 | parser.add_argument("build", help="name of build to update") | |
32 | with open(MANIFEST_JSON, 'r') as data: | 34 | args = parser.parse_args() |
33 | MANIFEST = json.load(data) | 35 | |
34 | 36 | root = find_repo_root() | |
35 | A = re.split(':|,', MANIFEST['builds'][0]['artifact_id']) | 37 | |
36 | ARTIFACTS = dict(zip(A[0::2], A[1::2])) | 38 | release_dir = os.path.join(root, "releases") |
37 | BUILD_TIME = MANIFEST['builds'][0]['build_time'] | 39 | if not os.path.exists(release_dir): |
38 | DATA = MANIFEST['builds'][0]['custom_data'] | 40 | os.makedirs(release_dir) |
39 | RELEASE = DATA['release'] | 41 | |
40 | 42 | release_yaml = os.path.join(release_dir, f"{args.profile}.yaml") | |
41 | if BUILD not in releases: | 43 | releases = {} |
42 | releases[BUILD] = {} | 44 | if os.path.exists(release_yaml): |
43 | if RELEASE not in releases[BUILD]: | 45 | with open(release_yaml, "r") as data: |
44 | releases[BUILD][RELEASE] = {} | 46 | releases = yaml.safe_load(data) |
45 | 47 | ||
46 | REVISION = { | 48 | manifest_json = os.path.join( |
47 | 'description': DATA['ami_desc'], | 49 | root, "build", "profile", args.profile, args.build, "manifest.json") |
48 | 'profile': PROFILE, | 50 | with open(manifest_json, "r") as data: |
49 | 'profile_build': BUILD, | 51 | manifest = json.load(data) |
50 | 'version': DATA['version'], | 52 | |
51 | 'release': RELEASE, | 53 | data = manifest["builds"][0]["custom_data"] |
52 | 'arch': DATA['arch'], | 54 | release = data["release"] |
53 | 'revision': DATA['revision'], | 55 | |
54 | 'end_of_life': DATA['end_of_life'], | 56 | if args.build not in releases: |
55 | 'build_time': BUILD_TIME, | 57 | releases[args.build] = {} |
56 | 'artifacts': ARTIFACTS | 58 | |
57 | } | 59 | if release not in releases[args.build]: |
58 | 60 | releases[args.build][release] = {} | |
59 | releases[BUILD][RELEASE][DATA['ami_name']] = REVISION | 61 | |
60 | 62 | releases[args.build][release][data["ami_name"]] = { | |
61 | with open(RELEASE_YAML, 'w') as data: | 63 | "description": data["ami_desc"], |
62 | yaml.dump(releases, data, sort_keys=False) | 64 | "profile": args.profile, |
65 | "profile_build": args.build, | ||
66 | "version": data["version"], | ||
67 | "release": release, | ||
68 | "arch": data["arch"], | ||
69 | "revision": data["revision"], | ||
70 | "end_of_life": data["end_of_life"], | ||
71 | "build_time": manifest["builds"][0]["build_time"], | ||
72 | "artifacts": parse_artifact_ids(manifest["builds"][0]["artifact_id"]), | ||
73 | } | ||
74 | |||
75 | with open(release_yaml, "w") as data: | ||
76 | yaml.dump(releases, data, sort_keys=False) | ||
77 | |||
78 | |||
79 | if __name__ == "__main__": | ||
80 | main(sys.argv) | ||