Move scripts used to bootstrap this repo to a dedicated folder

This commit is contained in:
Yuri Sizov
2023-10-04 15:45:58 +02:00
parent 7499c6c2e8
commit a68806e782
3 changed files with 39 additions and 1 deletions

View File

@@ -0,0 +1,68 @@
### This script has been used to generate initial data for this repository
### and is preserved as a reference. DO NOT USE IT.
### Generate commit history adding each official release in order.
###
### Make sure JSON metadata files are put into the releases folder.
### Using these files this script recreates the commit history,
### placing each release at their approximate time and date of
### publication. Each commit is tagged with the release identity.
###
### Make sure you do not rebase or otherwise change the history
### afterwards, as that destroys git tags (they remain assigned
### to old commits).
import json
import os
import subprocess
from datetime import datetime
releases = []
# Read JSON files and generate correct release history.
releases_path = "./releases"
dir_contents = os.listdir(releases_path)
for filename in dir_contents:
filepath = os.path.join(releases_path, filename)
if not os.path.isfile(filepath):
continue
with open(filepath, 'r') as json_data:
release_data = json.load(json_data)
print(f"Reading release '{release_data['name']}' data.")
releases.append({
"file": filepath,
"data": release_data
})
# Sort by release date so we can create commits in order
releases.sort(key=lambda x: x['data']['release_date'])
# Generate a commit for each release, spoof the commit date to
# match the release date.
for release_data in releases:
commit_datetime = datetime.fromtimestamp(release_data['data']['release_date'])
# Thu, 07 Apr 2005 22:13:13 +0200
commit_date = commit_datetime.strftime('%a, %d %b %Y %H:%M:%S +0000')
release_tag = f"{release_data['data']['version']}-{release_data['data']['status']}"
cmd_add_file = f"git add {release_data['file']}"
cmd_commit_release = f"git commit -m \"Add Godot {release_tag}\""
cmd_amend_time = f"git commit --amend --no-edit --date \"{commit_date}\""
cmd_tag_release = f"git tag {release_tag}"
extra_env = os.environ.copy()
extra_env['GIT_COMMITTER_DATE'] = commit_date
subprocess.run(cmd_add_file)
subprocess.run(cmd_commit_release, env=extra_env)
subprocess.run(cmd_amend_time, env=extra_env)
subprocess.run(cmd_tag_release, env=extra_env)
print(f"Committed release '{release_data['data']['name']}'.")

View File

@@ -0,0 +1,216 @@
### This script has been used to generate initial data for this repository
### and is preserved as a reference. DO NOT USE IT.
### Generate JSON metadata files for each official release of Godot.
###
### Files are put into a temporary folder temp/releases. To generate
### the data we extract dates and commit hashes from releases published
### on TuxFamily. We also extract SHA512 checksums for release files
### where possible.
import os
import urllib.request
import urllib.error
import re
from datetime import datetime
url = 'https://downloads.tuxfamily.org/godotengine/'
skip_versions = [
"2.1.1-fixup",
"2.1.7-rc",
"3.1.2-hotfix",
"4.0-pre-alpha",
]
correct_dates = {
"1.1": "2015-May-21 00:00:00",
"2.0": "2016-Feb-23 00:00:00",
}
missing_hashes = {
"2.1.6-rc1": "9ef833ec6d275e6271811f256acf23e29b2ccc33",
"3.0.3-rc1": "63e70e3cd19d4ef42a293de40ffaf11aa735dad6",
"3.0.3-rc2": "6635f2a1085a85f4195401d27d079a27bd98f3e0",
"3.0.3-rc3": "f6406398670f41b55cd8e47bf5d8a1e764fb0c02",
"3.1-alpha1": "2881a8e431308647fde21f9744b81269d0323922",
}
# Helpers.
def find_commit_hash(release_url):
commit_hash = "";
readme_url = f"{release_url}/README.txt"
try:
with urllib.request.urlopen(readme_url) as response:
readme_text = response.read().decode()
commit_pattern = re.compile(r'Built from commit ([a-f0-9]+)')
commit_match = commit_pattern.search(readme_text)
if commit_match:
commit_hash = commit_match.group(1)
except urllib.error.HTTPError:
pass
return commit_hash
def find_file_checksums(release_url):
files = []
checksums_url = f"{release_url}/SHA512-SUMS.txt"
try:
with urllib.request.urlopen(checksums_url) as response:
checksums_text = response.read().decode()
checksums_lines = checksums_text.splitlines()
for line in checksums_lines:
split_line = line.split(" ")
files.append({
"filename": split_line[1],
"checksum": split_line[0]
})
except urllib.error.HTTPError:
pass
return files
def find_release_date(page_html):
# <td class="n"><a href="Godot_v3.1-stable_export_templates.tpz">Godot_v3.1-stable_export_templates.tpz</a></td><td class="m">2019-Mar-13 13:23:30</td><td class="s">429.2M</td><td class="t">application/octet-stream</td>
pattern = re.compile(r'<td class="n"><a href="(.+)_export_templates.tpz">(.+)_export_templates.tpz</a></td><td class="m">([A-Za-z0-9\-: ]+)</td><td class="s">([A-Z0-9\.]+)</td><td class="t">application/octet-stream</td>')
matches = pattern.findall(page_html)
if not matches:
return None
# 2016-Mar-07 20:33:34
return datetime.strptime(matches[0][2], '%Y-%b-%d %H:%M:%S')
def generate_file(version_name, release_name, release_status, release_url):
# Navigate to the release's sub-directory for parsing.
with urllib.request.urlopen(release_url) as response:
release_html = response.read().decode()
# Get the release date.
release_date = None
if release_name in correct_dates:
release_date = datetime.strptime(correct_dates[release_name], '%Y-%b-%d %H:%M:%S')
else:
# Extract the release date from the export templates file listed on the page.
release_date = find_release_date(release_html)
if not release_date:
print(f"Skipped version '{release_name}' because it's not released")
return release_html # Return raw HTML for further parsing.
# Open the file for writing.
output_path = f"./temp/releases/godot-{release_name}.json"
if release_status == "stable":
output_path = f"./temp/releases/godot-{release_name}-stable.json"
with open(output_path, 'w') as f:
# Get the commit hash / git reference.
commit_hash = ""
if release_status == "stable":
commit_hash = f"{version_name}-stable"
else:
# Extract the commit hash for this release from the README.txt file.
commit_hash = find_commit_hash(release_url);
if not commit_hash and release_name in missing_hashes:
commit_hash = missing_hashes[release_name]
if not commit_hash:
print(f"Version '{release_name}' has no commit hash!")
# Start writing the file with basic meta information.
f.write(
f'{{\n'
f' "name": "{release_name}",\n'
f' "version": "{version_name}",\n'
f' "status": "{release_status}",\n'
f' "release_date": {int(release_date.timestamp())},\n'
f' "git_reference": "{commit_hash}",\n'
f'\n'
f' "files": [\n'
)
# Generate the list of files.
# Extract file names and checksums from SHA512-SUMS.txt.
standard_files = find_file_checksums(release_url)
mono_files = find_file_checksums(f"{release_url}/mono")
for i, file in enumerate(standard_files):
f.write(
f' {{\n'
f' "filename": "{file["filename"]}",\n'
f' "checksum": "{file["checksum"]}"\n'
f' }}{"" if i == len(standard_files) - 1 and len(mono_files) == 0 else ","}\n'
)
for i, file in enumerate(mono_files):
f.write(
f' {{\n'
f' "filename": "{file["filename"]}",\n'
f' "checksum": "{file["checksum"]}"\n'
f' }}{"" if i == len(mono_files) - 1 else ","}\n'
)
# Finish the file.
f.write(
f' ]\n'
f'}}\n'
)
print(f"Written config '{output_path}'")
return release_html # Return raw HTML for further parsing.
# Main routine.
# Request the download repository on TuxFamily.
with urllib.request.urlopen(url) as response:
html = response.read().decode()
# Parse the directory index and find all the links that look like versions.
pattern = re.compile(r'<a href="(\d\.\d(\.\d(\.\d)?)?/)">')
matches = pattern.findall(html)
version_names = []
for match in matches:
subfolder_name = match[0]
if subfolder_name.endswith('/'):
version_names.append(subfolder_name[:-1])
# Create the output directory if it doesn't exist.
if not os.path.exists("./temp/releases"):
os.makedirs("./temp/releases")
for version_name in version_names:
version_url = url + version_name
# Generate a file for the stable release.
version_html = generate_file(version_name, version_name, "stable", version_url)
# Generate files for pre-releases of the stable release.
# Look for potential builds.
subfolder_pattern = re.compile(r'<a href="([^"]+/)">')
subfolder_matches = subfolder_pattern.findall(version_html)
folder_names = [match[:-1] for match in subfolder_matches if match not in ['mono/', '../']]
for folder_name in folder_names:
release_name = f"{version_name}-{folder_name}"
if release_name in skip_versions:
continue
release_url = f"{version_url}/{folder_name}"
generate_file(version_name, release_name, folder_name, release_url)

View File

@@ -0,0 +1,192 @@
### This script has been used to generate initial data for this repository
### and is preserved as a reference. DO NOT USE IT.
### Generate GitHub releases for each official Godot release.
###
### For each release creates a canned release summary based on
### release's characteristics, then publishes a new GitHub release
### in the linked repository. Make sure to use gh to configure
### the default repository for this project's folder.
###
### Generated release notes are available in temp/notes for examination.
import json
import os
import subprocess
import yaml
website_versions = []
# Helpers.
def generate_notes(release_data):
notes = ""
version_version = release_data["version"]
version_status = release_data["status"]
version_tag = f"{version_version}-{version_status}"
version_bits = version_version.split(".")
version_flavor = "patch"
if len(version_bits) == 2 and version_bits[1] == "0":
version_flavor = "major"
elif len(version_bits) == 2 and version_bits[1] != "0":
version_flavor = "minor"
# Add the intro line.
version_name = version_version
if version_status != "stable":
version_name += " "
if version_status.startswith("rc"):
version_name += f"RC {version_status.removeprefix('rc')}"
elif version_status.startswith("beta"):
version_name += f"beta {version_status.removeprefix('beta')}"
elif version_status.startswith("alpha"):
version_name += f"alpha {version_status.removeprefix('alpha')}"
elif version_status.startswith("dev"):
version_name += f"dev {version_status.removeprefix('dev')}"
else:
version_name += version_status
version_description = ""
if version_status == "stable":
version_bits = version_version.split(".")
if version_flavor == "major":
version_description = "a major release introducing new features and considerable changes to core systems. **Major version releases contain compatibility breaking changes.**"
elif version_flavor == "minor":
version_description = "a feature release improving upon the previous version in many aspects, such as usability and performance. Feature releases also contain new features, but preserve compatibility with previous releases."
else:
version_description = "a maintenance release addressing stability and usability issues, and fixing all sorts of bugs. Maintenance releases are compatible with previous releases and are recommended for adoption."
else:
flavor_name = "maintenance"
if version_flavor == "major":
flavor_name = "major"
elif version_flavor == "minor":
flavor_name = "feature"
if version_status.startswith("rc"):
version_description = f"a release candidate for the {version_version} {flavor_name} release. Release candidates focus on finalizing the release and fixing remaining critical bugs."
elif version_status.startswith("beta"):
version_description = f"a beta snapshot for the {version_version} {flavor_name} release. Beta snapshots are feature-complete and provided for public beta testing to catch as many bugs as possible ahead of the stable release."
else: # alphas and devs go here.
version_description = f"a dev snapshot for the {version_version} {flavor_name} release. Dev snapshots are in-development builds of the engine provided for early testing and feature evaluation while the engine is still being worked on."
notes += f"**Godot {version_name}** is {version_description}\n\n"
# Link to the bug tracker.
notes += "Report bugs on GitHub after checking that they haven't been reported:\n"
notes += "- https://github.com/godotengine/godot/issues\n"
notes += "\n"
# Add build information.
# Only for pre-releases.
if version_status != "stable":
commit_hash = release_data["git_reference"]
notes += f"Built from commit [{commit_hash}](https://github.com/godotengine/godot/commit/{commit_hash}).\n"
notes += f"To make a custom build which would also be recognized as {version_status}, you should define `GODOT_VERSION_STATUS={version_status}` in your build environment prior to compiling.\n"
notes += "\n"
# Add useful links.
notes += "----\n"
notes += "\n"
release_notes_url = ""
release_notes_version = version_version
if version_version == "3.2.4":
release_notes_version = "3.3"
for web_version in website_versions:
if web_version["name"] != release_notes_version:
continue
if web_version["flavor"] == version_status:
release_notes_url = f"https://godotengine.org{web_version['release_notes']}"
break
for web_release in web_version["releases"]:
if web_release["name"] != version_status:
continue
release_notes_url = f"https://godotengine.org{web_release['release_notes']}"
break
notes += f"- [Release notes]({release_notes_url})\n"
if version_status == "stable":
notes += f"- [Complete changelog](https://godotengine.github.io/godot-interactive-changelog/#{version_version})\n"
notes += f"- [Curated changelog](https://github.com/godotengine/godot/blob/{version_tag}/CHANGELOG.md)\n"
else:
notes += f"- [Complete changelog](https://godotengine.github.io/godot-interactive-changelog/#{version_tag})\n"
notes += "- Download (GitHub): Expand **Assets** below\n"
if version_status == "stable":
notes += f"- [Download (TuxFamily)](https://downloads.tuxfamily.org/godotengine/{version_version})\n"
else:
notes += f"- [Download (TuxFamily)](https://downloads.tuxfamily.org/godotengine/{version_version}/{version_status})\n"
notes += "\n"
notes += "*All files for this release are mirrored under **Assets** below.*\n"
return notes
with open("./temp/versions.yml", "r") as f:
try:
website_versions = yaml.safe_load(f)
except yaml.YAMLError as exc:
pass
releases = []
# Read JSON files and generate GitHub release in order.
releases_path = "./releases"
dir_contents = os.listdir(releases_path)
for filename in dir_contents:
filepath = os.path.join(releases_path, filename)
if not os.path.isfile(filepath):
continue
with open(filepath, 'r') as json_data:
release_data = json.load(json_data)
print(f"Reading release '{release_data['name']}' data.")
releases.append({
"file": filepath,
"data": release_data
})
# Sort by release date so we can create commits in order
releases.sort(key=lambda x: x['data']['release_date'])
# Generate a commit for each release, spoof the commit date to
# match the release date.
# Create the output directory if it doesn't exist.
if not os.path.exists("./temp/notes"):
os.makedirs("./temp/notes")
for release_data in releases:
release_tag = f"{release_data['data']['version']}-{release_data['data']['status']}"
release_title = f"{release_data['data']['version']}-{release_data['data']['status']}"
prerelease_flag = ""
if release_data['data']['status'] != "stable":
prerelease_flag = "--prerelease"
release_notes = generate_notes(release_data['data'])
release_notes_file = f"./temp/notes/release-notes-{release_tag}.txt"
with open(release_notes_file, 'w') as temp_notes:
temp_notes.write(release_notes)
cmd_create_release = f"gh release create {release_tag} --verify-tag --title \"{release_title}\" --notes-file {release_notes_file} {prerelease_flag}"
print(cmd_create_release)
subprocess.run(cmd_create_release)