mirror of
https://github.com/godotengine/godot-interactive-changelog.git
synced 2026-01-06 14:10:21 +03:00
Make API calls more robust for bigger queries
- Also fix sorting for different version formats
This commit is contained in:
@@ -17,6 +17,7 @@ const LogFormat = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const COMMITS_PER_PAGE = 150;
|
const COMMITS_PER_PAGE = 150;
|
||||||
|
const API_DELAY_MSEC = 2500;
|
||||||
const API_RATE_LIMIT = `
|
const API_RATE_LIMIT = `
|
||||||
rateLimit {
|
rateLimit {
|
||||||
limit
|
limit
|
||||||
@@ -82,6 +83,10 @@ class DataFetcher {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async delay(msec) {
|
||||||
|
return new Promise(resolve => setTimeout(resolve, msec));
|
||||||
|
}
|
||||||
|
|
||||||
async checkoutRepo(atCommit) {
|
async checkoutRepo(atCommit) {
|
||||||
try {
|
try {
|
||||||
// Make sure that the temp folder exists and is empty.
|
// Make sure that the temp folder exists and is empty.
|
||||||
@@ -125,7 +130,7 @@ class DataFetcher {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fetchGithub(query) {
|
async fetchGithub(query, retries = 0) {
|
||||||
const init = {};
|
const init = {};
|
||||||
init.method = "POST";
|
init.method = "POST";
|
||||||
init.headers = {};
|
init.headers = {};
|
||||||
@@ -140,7 +145,18 @@ class DataFetcher {
|
|||||||
query,
|
query,
|
||||||
});
|
});
|
||||||
|
|
||||||
return await fetch("https://api.github.com/graphql", init);
|
let res = await fetch("https://api.github.com/graphql", init);
|
||||||
|
let attempt = 0;
|
||||||
|
while (res.status !== 200 && attempt < retries) {
|
||||||
|
attempt += 1;
|
||||||
|
console.log(` Failed with status ${res.status}, retrying (${attempt}/${retries})...`);
|
||||||
|
|
||||||
|
// GitHub API is flaky, so we add an extra delay to let it calm down a bit.
|
||||||
|
await this.delay(API_DELAY_MSEC);
|
||||||
|
res = await fetch("https://api.github.com/graphql", init);
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fetchGithubRest(query) {
|
async fetchGithubRest(query) {
|
||||||
@@ -271,7 +287,7 @@ class DataFetcher {
|
|||||||
|
|
||||||
console.log(` Requesting batch ${page}/${totalPages} of commit and pull request data.`);
|
console.log(` Requesting batch ${page}/${totalPages} of commit and pull request data.`);
|
||||||
|
|
||||||
const res = await this.fetchGithub(query);
|
const res = await this.fetchGithub(query, 2);
|
||||||
if (res.status !== 200) {
|
if (res.status !== 200) {
|
||||||
this._handleResponseErrors(this.api_repository_id, res);
|
this._handleResponseErrors(this.api_repository_id, res);
|
||||||
process.exitCode = ExitCodes.RequestFailure;
|
process.exitCode = ExitCodes.RequestFailure;
|
||||||
@@ -291,7 +307,8 @@ class DataFetcher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const rate_limit = data.data["rateLimit"];
|
const rate_limit = data.data["rateLimit"];
|
||||||
console.log(` [$${rate_limit.cost}][${rate_limit.nodeCount}] Retrieved ${Object.keys(commit_data).length} commits; processing...`);
|
console.log(` [$${rate_limit.cost}][${rate_limit.nodeCount}] Retrieved ${Object.keys(commit_data).length} commits.`);
|
||||||
|
console.log(` --`);
|
||||||
return commit_data;
|
return commit_data;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(" Error fetching pull request data: " + err);
|
console.error(" Error fetching pull request data: " + err);
|
||||||
@@ -497,11 +514,16 @@ class DataProcessor {
|
|||||||
processCommits(commitsRaw, targetRepo) {
|
processCommits(commitsRaw, targetRepo) {
|
||||||
try {
|
try {
|
||||||
for (let commitHash in commitsRaw) {
|
for (let commitHash in commitsRaw) {
|
||||||
if (typeof this.commits[commitHash] === "undefined") {
|
if (commitsRaw[commitHash] == null) {
|
||||||
throw new Error(`Received data for a commit hash "${commitHash}", but this commit is unknown.`);
|
console.warn(` Requested data for a commit hash "${commitHash}", but received nothing.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (typeof this.commits[commitHash] === "undefined") {
|
||||||
|
console.warn(` Received data for a commit hash "${commitHash}", but this commit is unknown.`);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
const commit = this.commits[commitHash];
|
|
||||||
const item = commitsRaw[commitHash];
|
const item = commitsRaw[commitHash];
|
||||||
|
const commit = this.commits[commitHash];
|
||||||
|
|
||||||
// Commits can have multiple authors, we will list all of them. Also, associated PRs
|
// Commits can have multiple authors, we will list all of them. Also, associated PRs
|
||||||
// can be authored by somebody else entirely. We will store them with the PR, and will
|
// can be authored by somebody else entirely. We will store them with the PR, and will
|
||||||
@@ -579,7 +601,7 @@ class DataProcessor {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(" Error parsing pull request data: " + err);
|
console.error(" Error parsing commit and pull request data: " + err);
|
||||||
process.exitCode = ExitCodes.ParseFailure;
|
process.exitCode = ExitCodes.ParseFailure;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -723,9 +745,6 @@ async function main() {
|
|||||||
process.exit();
|
process.exit();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
const delay = async (msec) => {
|
|
||||||
return new Promise(resolve => setTimeout(resolve, msec));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Getting PRs between two commits is a complicated task, and must be done in
|
// Getting PRs between two commits is a complicated task, and must be done in
|
||||||
// multiple steps. GitHub API does not have a method for that, so we must improvise.
|
// multiple steps. GitHub API does not have a method for that, so we must improvise.
|
||||||
@@ -795,32 +814,42 @@ async function main() {
|
|||||||
// in batches, so the number of nodes in each request is manageable.
|
// in batches, so the number of nodes in each request is manageable.
|
||||||
|
|
||||||
console.log("[*] Fetching commit data from GitHub.");
|
console.log("[*] Fetching commit data from GitHub.");
|
||||||
|
let commitsRaw = {};
|
||||||
|
|
||||||
const totalPages = Math.ceil(commitHashes.length / COMMITS_PER_PAGE);
|
const totalPages = Math.ceil(commitHashes.length / COMMITS_PER_PAGE);
|
||||||
// Pages are starting with 1 for better presentation.
|
// Pages are starting with 1 for better presentation.
|
||||||
let page = 1;
|
let page = 1;
|
||||||
while (page <= totalPages) {
|
while (page <= totalPages) {
|
||||||
const batchHashes = commitHashes.splice(0, COMMITS_PER_PAGE);
|
const batchHashes = commitHashes.splice(0, COMMITS_PER_PAGE);
|
||||||
const commitsRaw = await dataFetcher.fetchCommits(batchHashes, page, totalPages);
|
const batchCommits = await dataFetcher.fetchCommits(batchHashes, page, totalPages);
|
||||||
checkForExit();
|
|
||||||
|
|
||||||
// Fourth, we consolidate the information. Commits are populated with links to their
|
|
||||||
// respective PRs, and PRs store references to their commits. We will save this to
|
|
||||||
// a file for the specified range, which should be between two stable releases.
|
|
||||||
//
|
|
||||||
// For intermediate releases (developer previews) we have preconfigured hashes and
|
|
||||||
// can simply pass them to the final data. Frontend will handle the rest.
|
|
||||||
|
|
||||||
dataProcessor.processCommits(commitsRaw, `${dataIO.data_owner}/${dataIO.data_repo}`);
|
|
||||||
checkForExit();
|
checkForExit();
|
||||||
|
|
||||||
|
Object.assign(commitsRaw, batchCommits);
|
||||||
page++;
|
page++;
|
||||||
|
|
||||||
// Wait for a bit before proceeding to avoid hitting the secondary rate limit in GitHub API.
|
// Wait for a bit before proceeding to avoid hitting the secondary rate limit in GitHub API.
|
||||||
// See https://docs.github.com/en/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits.
|
// See https://docs.github.com/en/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits.
|
||||||
await delay(1500);
|
await dataFetcher.delay(API_DELAY_MSEC);
|
||||||
|
|
||||||
|
// Add an extra delay every few requests, because the chance to trigger the hidden rate issue
|
||||||
|
// seems to grow with the number of queries.
|
||||||
|
if (page % 8 === 0) {
|
||||||
|
console.log("[*] Waiting a bit for the API to cool down...");
|
||||||
|
await dataFetcher.delay(API_DELAY_MSEC * 4);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Fourth, we consolidate the information. Commits are populated with links to their
|
||||||
|
// respective PRs, and PRs store references to their commits. We will save this to
|
||||||
|
// a file for the specified range, which should be between two stable releases.
|
||||||
|
//
|
||||||
|
// For intermediate releases (developer previews) we have preconfigured hashes and
|
||||||
|
// can simply pass them to the final data. Frontend will handle the rest.
|
||||||
|
|
||||||
|
console.log(`[*] Processing ${Object.keys(commitsRaw).length} commits.`);
|
||||||
|
dataProcessor.processCommits(commitsRaw, `${dataIO.data_owner}/${dataIO.data_repo}`);
|
||||||
|
checkForExit();
|
||||||
|
|
||||||
console.log("[*] Checking the rate limits after.")
|
console.log("[*] Checking the rate limits after.")
|
||||||
await dataFetcher.checkRates();
|
await dataFetcher.checkRates();
|
||||||
checkForExit();
|
checkForExit();
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ async function publishConfigs() {
|
|||||||
|
|
||||||
await fs.access(configPath, fsConstants.R_OK);
|
await fs.access(configPath, fsConstants.R_OK);
|
||||||
const configContent = await fs.readFile(configPath);
|
const configContent = await fs.readFile(configPath);
|
||||||
|
|
||||||
const config = JSON.parse(configContent);
|
const config = JSON.parse(configContent);
|
||||||
const [ config_owner, config_repo ] = nameBits;
|
const [ config_owner, config_repo ] = nameBits;
|
||||||
|
|
||||||
@@ -101,10 +101,16 @@ async function publishConfigs() {
|
|||||||
const envalueVersion = (versionString) => {
|
const envalueVersion = (versionString) => {
|
||||||
let value = 0;
|
let value = 0;
|
||||||
const versionBits = versionString.split(".");
|
const versionBits = versionString.split(".");
|
||||||
|
while (versionBits.length < 4) {
|
||||||
|
// Make sure we have exactly 4 values in the version. Normally Godot
|
||||||
|
// versions are either X.Y or X.Y.Z, but there are exceptions, so we
|
||||||
|
// normalize the size to match them.
|
||||||
|
versionBits.push("0");
|
||||||
|
}
|
||||||
|
|
||||||
let i = 0;
|
let i = 0;
|
||||||
while (versionBits.length > 0) {
|
while (versionBits.length > 0) {
|
||||||
value += versionBits.pop() * Math.pow(100, i);
|
value += parseInt(versionBits.pop(), 10) * Math.pow(100, i);
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user