mirror of
https://github.com/godotengine/godot-interactive-changelog.git
synced 2025-12-31 01:49:28 +03:00
Track merge commits to correctly identify releases
This commit is contained in:
@@ -28,6 +28,7 @@ const API_RATE_LIMIT = `
|
||||
`;
|
||||
|
||||
const GIT_HEAD_COMMIT_RE = RegExp("^commit ([a-zA-Z0-9-_]+)$");
|
||||
const GIT_HEAD_MERGE_RE = RegExp("^Merge: (.+) (.+)$");
|
||||
const GIT_HEAD_AUTHOR_RE = RegExp("^Author: (.+)$");
|
||||
const GIT_HEAD_COMMITTER_RE = RegExp("^Commit: (.+)$");
|
||||
const GIT_BODY_LINE_RE = RegExp("^[\\s]{2,}(.*)$");
|
||||
@@ -74,7 +75,7 @@ class DataFetcher {
|
||||
if (typeof data["errors"] === "undefined") {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.warn(` Server handled the request, but there were errors:`);
|
||||
data.errors.forEach((item) => {
|
||||
console.log(` [${item.type}] ${item.message}`);
|
||||
@@ -98,7 +99,7 @@ class DataFetcher {
|
||||
|
||||
async countCommitHistory(fromCommit, toCommit) {
|
||||
try {
|
||||
const { stdout, stderr } = await exec(`git log --pretty=oneline --no-merges ${fromCommit}..${toCommit}`, { cwd: `./temp/${this.data_repo}` });
|
||||
const { stdout, stderr } = await exec(`git log --pretty=oneline ${fromCommit}..${toCommit}`, { cwd: `./temp/${this.data_repo}` });
|
||||
|
||||
const commitHistory = stdout.trimEnd();
|
||||
await this._logResponse(commitHistory, "_commit_shortlog", LogFormat.Raw);
|
||||
@@ -112,7 +113,7 @@ class DataFetcher {
|
||||
|
||||
async getCommitHistory(fromCommit, toCommit) {
|
||||
try {
|
||||
const { stdout, stderr } = await exec(`git log --pretty=full --no-merges ${fromCommit}..${toCommit}`, { cwd: `./temp/${this.data_repo}` });
|
||||
const { stdout, stderr } = await exec(`git log --pretty=full ${fromCommit}..${toCommit}`, { cwd: `./temp/${this.data_repo}` });
|
||||
|
||||
const commitHistory = stdout;
|
||||
await this._logResponse(commitHistory, "_commit_history", LogFormat.Raw);
|
||||
@@ -134,11 +135,11 @@ class DataFetcher {
|
||||
} else if (process.env.GITHUB_TOKEN) {
|
||||
init.headers["Authorization"] = `token ${process.env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
|
||||
init.body = JSON.stringify({
|
||||
query,
|
||||
});
|
||||
|
||||
|
||||
return await fetch("https://api.github.com/graphql", init);
|
||||
}
|
||||
|
||||
@@ -152,7 +153,7 @@ class DataFetcher {
|
||||
} else if (process.env.GITHUB_TOKEN) {
|
||||
init.headers["Authorization"] = `token ${process.env.GITHUB_TOKEN}`;
|
||||
}
|
||||
|
||||
|
||||
return await fetch(`${this.api_rest_path}${query}`, init);
|
||||
}
|
||||
|
||||
@@ -163,18 +164,18 @@ class DataFetcher {
|
||||
${API_RATE_LIMIT}
|
||||
}
|
||||
`;
|
||||
|
||||
|
||||
const res = await this.fetchGithub(query);
|
||||
if (res.status !== 200) {
|
||||
this._handleResponseErrors(this.api_repository_id, res);
|
||||
process.exitCode = ExitCodes.RequestFailure;
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
const data = await res.json();
|
||||
await this._logResponse(data, "_rate_limit");
|
||||
this._handleDataErrors(data);
|
||||
|
||||
|
||||
const rate_limit = data.data["rateLimit"];
|
||||
console.log(` [$${rate_limit.cost}][${rate_limit.nodeCount}] Available API calls: ${rate_limit.remaining}/${rate_limit.limit}; resets at ${rate_limit.resetAt}`);
|
||||
} catch (err) {
|
||||
@@ -269,7 +270,7 @@ class DataFetcher {
|
||||
`;
|
||||
|
||||
console.log(` Requesting batch ${page}/${totalPages} of commit and pull request data.`);
|
||||
|
||||
|
||||
const res = await this.fetchGithub(query);
|
||||
if (res.status !== 200) {
|
||||
this._handleResponseErrors(this.api_repository_id, res);
|
||||
@@ -312,6 +313,7 @@ class DataProcessor {
|
||||
_getCommitObject() {
|
||||
return {
|
||||
"hash": "",
|
||||
"is_merge": false,
|
||||
|
||||
"authored_by": [],
|
||||
"author_raw": "",
|
||||
@@ -426,6 +428,13 @@ class DataProcessor {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this is a merge commit.
|
||||
matches = line.match(GIT_HEAD_MERGE_RE);
|
||||
if (matches) {
|
||||
commit.is_merge = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse the authorship information.
|
||||
matches = line.match(GIT_HEAD_AUTHOR_RE);
|
||||
if (matches) {
|
||||
@@ -483,8 +492,6 @@ class DataProcessor {
|
||||
console.error(` Error parsing commit log: Expected to received ${logSize} commits, but got ${this.log.length} instead.`);
|
||||
process.exitCode = ExitCodes.ParseFailure;
|
||||
}
|
||||
|
||||
return Object.keys(this.commits);
|
||||
}
|
||||
|
||||
processCommits(commitsRaw, targetRepo) {
|
||||
@@ -576,6 +583,21 @@ class DataProcessor {
|
||||
process.exitCode = ExitCodes.ParseFailure;
|
||||
}
|
||||
}
|
||||
|
||||
getCommitHashes() {
|
||||
const commitHashes = [];
|
||||
|
||||
for (let commitHash in this.commits) {
|
||||
const commit = this.commits[commitHash];
|
||||
if (commit.is_merge) {
|
||||
continue;
|
||||
}
|
||||
|
||||
commitHashes.push(commitHash);
|
||||
}
|
||||
|
||||
return commitHashes;
|
||||
}
|
||||
}
|
||||
|
||||
class DataIO {
|
||||
@@ -586,7 +608,7 @@ class DataIO {
|
||||
this.data_version = "";
|
||||
this.skip_checkout = false;
|
||||
|
||||
//
|
||||
//
|
||||
this.config = null;
|
||||
this.first_commit = ""
|
||||
this.last_commit = "";
|
||||
@@ -755,14 +777,18 @@ async function main() {
|
||||
// cherry-pick, and not the original commit. We can rely on the commit message body
|
||||
// containing a certain string, from which we can take the original commit hash.
|
||||
|
||||
const commitHashes = dataProcessor.processLog(commitLog, commitLogSize);
|
||||
dataProcessor.processLog(commitLog, commitLogSize);
|
||||
checkForExit();
|
||||
|
||||
// This method returns only non-merge commits; we don't need to fetch anything about
|
||||
// merge commits. We only need them for commit history.
|
||||
const commitHashes = dataProcessor.getCommitHashes();
|
||||
|
||||
// Third, we generate a query to the GraphQL API to fetch the information about
|
||||
// linked PRs. GraphQL API doesn't have a filter to extract data for a list of
|
||||
// commit hashes, but it supports having multiple sub-queries within the same request,
|
||||
// which is our way in.
|
||||
//
|
||||
//
|
||||
// While paginated queries are limited to 100 entries per page, sub-queries do not
|
||||
// appear to be similarly limited. We are still limited by the total number of nodes
|
||||
// we can theoretically fetch, which is 500 000. As such, we still want to do this
|
||||
@@ -784,7 +810,7 @@ async function main() {
|
||||
//
|
||||
// For intermediate releases (developer previews) we have preconfigured hashes and
|
||||
// can simply pass them to the final data. Frontend will handle the rest.
|
||||
|
||||
|
||||
dataProcessor.processCommits(commitsRaw, `${dataIO.data_owner}/${dataIO.data_repo}`);
|
||||
checkForExit();
|
||||
|
||||
|
||||
@@ -2,22 +2,26 @@
|
||||
"name": "4.0.1",
|
||||
"ref": "4.0.1-stable",
|
||||
"from_ref": "4.0-stable",
|
||||
"article": "https://godotengine.org/article/maintenance-release-godot-4-0-1/",
|
||||
|
||||
"releases": [
|
||||
{
|
||||
"name": "rc1",
|
||||
"ref": "d23922ffebe48f29126c003411495737d07e5a9f",
|
||||
"from_ref": "4.0-stable"
|
||||
"from_ref": "4.0-stable",
|
||||
"article": "https://godotengine.org/article/release-candidate-godot-4-0-1-rc-1/"
|
||||
},
|
||||
{
|
||||
"name": "rc2",
|
||||
"ref": "6970257cffc6790f4d7e847e87e5cab9e252874e",
|
||||
"from_ref": "d23922ffebe48f29126c003411495737d07e5a9f"
|
||||
"from_ref": "d23922ffebe48f29126c003411495737d07e5a9f",
|
||||
"article": "https://godotengine.org/article/release-candidate-godot-4-0-1-rc-2/"
|
||||
},
|
||||
{
|
||||
"name": "stable",
|
||||
"ref": "4.0.1-stable",
|
||||
"from_ref": "6970257cffc6790f4d7e847e87e5cab9e252874e"
|
||||
"from_ref": "6970257cffc6790f4d7e847e87e5cab9e252874e",
|
||||
"article": "https://godotengine.org/article/maintenance-release-godot-4-0-1/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,7 +107,7 @@ export default class EntryComponent extends LitElement {
|
||||
async _requestVersionData(version) {
|
||||
// Start loading, show the indicator.
|
||||
this._loadingVersions.push(version.name);
|
||||
|
||||
|
||||
const versionData = await greports.api.getVersionData(this._selectedRepository, version.name);
|
||||
versionData.config = version;
|
||||
this._versionData[version.name] = versionData;
|
||||
@@ -116,13 +116,25 @@ export default class EntryComponent extends LitElement {
|
||||
const [...commitLog] = versionData.log;
|
||||
commitLog.reverse();
|
||||
|
||||
version.commit_log = commitLog;
|
||||
// We need to filter out all merge commits for display and the count.
|
||||
version.commit_log = [];
|
||||
commitLog.forEach((commitHash) => {
|
||||
const commit = versionData.commits[commitHash];
|
||||
if (commit.is_merge) {
|
||||
return; // Continue.
|
||||
}
|
||||
|
||||
version.commit_log.push(commitHash);
|
||||
});
|
||||
|
||||
version.releases.forEach((release) => {
|
||||
release.commit_log = [];
|
||||
|
||||
let counting = false;
|
||||
commitLog.forEach((commitHash, index) => {
|
||||
if (counting) {
|
||||
const commit = versionData.commits[commitHash];
|
||||
// We need to filter out all merge commits for display and the count.
|
||||
if (counting && !commit.is_merge) {
|
||||
release.commit_log.push(commitHash);
|
||||
}
|
||||
|
||||
@@ -133,7 +145,11 @@ export default class EntryComponent extends LitElement {
|
||||
if (release.from_ref === version.from_ref && index === 0) {
|
||||
counting = true;
|
||||
// HACK: Exclude the lower end by default, but include for the first range.
|
||||
release.commit_log.push(commitHash);
|
||||
if (!commit.is_merge) {
|
||||
// It shouldn't be possible for the first commit to be a merge commit,
|
||||
// but let's guard anyway.
|
||||
release.commit_log.push(commitHash);
|
||||
}
|
||||
}
|
||||
else if (commitHash === release.from_ref) {
|
||||
counting = true;
|
||||
|
||||
Reference in New Issue
Block a user