diff --git a/index.js b/index.js index 1b9d7ed..8027ddc 100644 --- a/index.js +++ b/index.js @@ -1,314 +1,248 @@ const core = require("@actions/core"); const github = require("@actions/github"); const fs = require("fs"); -const path = require("path"); +const https = require("https"); +const url = require("url"); const { + PutObjectCommand, S3Client, ListObjectsCommand, - GetObjectCommand, - HeadObjectCommand, } = require("@aws-sdk/client-s3"); const { Octokit, App } = require("octokit"); -const exec = require("@actions/exec"); var bucketName = core.getInput("bucketName"); let client = new S3Client(); let octokit = new Octokit({ auth: core.getInput("token") }); -var depPath = core.getInput("depPath"); -var repo = core.getInput("repo"); -var owner = core.getInput("owner"); -var main_branch = core.getInput("main_branch"); +var repo_list_string = core.getInput("repo"); +var repo_list = repo_list_string.split(","); -// opens folder where dependency configs are stored -const dir = fs.opendirSync(depPath); - -async function getMainRef() { - // create ref of the main branch +async function writeToS3(response, FILE_NAME, path) { try { - var ref = await octokit.request("GET /repos/{owner}/{repo}/git/ref/{ref}", { - owner: owner, - repo: repo, - ref: "heads/" + main_branch, - }); - } catch { - console.log(err); - throw err; - } - return ref; -} + const writeStream = fs.createWriteStream(FILE_NAME); + // writing tarball to file + + response.pipe(writeStream).on("finish", async function () { + writeStream.close(); + var fileData = fs.readFileSync(FILE_NAME); + // getting downloaded tarfile to send to s3 bucket + + var putParams = { + Bucket: bucketName, + Key: path, + Body: fileData, + }; + try { + const data = await client.send(new PutObjectCommand(putParams)); + console.log("File Successfully Uploaded"); + return data; + } catch (err) { + console.log("Error", err); + } + // sending to s3 bucket -async function createRef(hash, branchName) { - // creating a new branch with name: branchName - // based on hash taken from the branch we want the new one to be based on - try { - console.log("creating ref"); - var res = await octokit.request("POST /repos/{owner}/{repo}/git/refs", { - owner: owner, - repo: repo, - ref: "refs/heads/" + branchName, - sha: hash, }); - } catch { + } catch (err) { console.log(err); - throw err; } - return res; } -async function createBranch(branchName) { +async function updateDependencies(FILE_NAME, tag_name, repo, owner) { + var TAR_URL = + "https://api.github.com/repos/" + + owner + + "/" + + repo + + "/tarball/" + + tag_name; + // download location of the tarfile of a repo for a specific release + + var path = "Dependencies/" + repo + "/" + FILE_NAME; + // path where to store tar file on s3 bucket + + var options = { + host: "api.github.com", + path: TAR_URL, + method: "GET", + headers: { "user-agent": "node.js" }, + }; + console.log(TAR_URL); try { - // get ref of branch we want the new branch to be based on - var ref = await getMainRef(); - - var hash = ref.data.object.sha; - - // pass in the hash - var res = await createRef(hash, branchName); + await https.get(options, (response) => { + if ( + response.statusCode > 300 && + response.statusCode < 400 && + response.headers.location + ) { + if (url.parse(response.headers.location).hostname) { + https.get(response.headers.location, (response) => { + writeToS3(response, FILE_NAME, path); + }); + } else { + https.get( + url.resolve(url.parse(TAR_URL).hostname, response.headers.location), + (response) => { + writeToS3(response, FILE_NAME, path); + } + ); + } + } else { + writeToS3(response, FILE_NAME, path); + } + }); } catch (err) { console.log(err); - throw err; } } - -async function listDependenciesS3(path) { +async function ListDependenciesS3(path) { var params = { Bucket: bucketName, Prefix: path + "/", }; - - // gets all objects in the bucket specified by path - const data = await client.send(new ListObjectsCommand(params)); - if (data.length < 0) { - return data; - } - - // gets all the file names that end with the file extension .gz and sorts them by LastModified Desc - // result is an array with the most recent versions of the tar files coming first - var files = data.Contents?.filter((file) => { - file.Key.includes(".gz"); - }).sort((file1, file2) => file2.LastModified - file1.LastModified); - - return files; -} - -async function generateHash(key) { - var params = { - Bucket: bucketName, - Key: key, - }; try { - // Create a helper function to convert a ReadableStream to a string. - const streamToString = (stream) => - new Promise((resolve, reject) => { - const chunks = []; - stream.on("data", (chunk) => chunks.push(chunk)); - stream.on("error", reject); - stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf8"))); - }); - - // Get the object} from the Amazon S3 bucket. It is returned as a ReadableStream. - const data = await client.send(new GetObjectCommand(params)); - - // Convert the ReadableStream to a string. - const bodyContents = await streamToString(data.Body); - - var crypto = require("crypto"); - - // creating hash object - var hash = crypto.createHash("sha512"); + const data = await client.send(new ListObjectsCommand(params)); + if (data.length < 0) { + return data; + } + // gets all objects in the bucket folder specified by path - // passing the data to be hashed - hash_data = hash.update(bodyContents, "utf-8"); + var files = data.Contents?.filter((file) => { + return file.Key.indexOf(".gz") > 0; + }).sort((file1, file2) => -1 * (file1.LastModified - file2.LastModified)); + // gets files that have .gz in file name sorted by last modified date desc - // Creating the hash in the required format - gen_hash = hash_data.digest("hex"); - return gen_hash; + return files; } catch (err) { - console.log("Error", err); - throw err; + console.log(err); + return []; } } - -async function getLastModified(key) { - var params = { - Bucket: bucketName, - Key: key, - }; - // getting last modified time of an object in s3 bucket - const data = await client.send(new HeadObjectCommand(params)); - return data.LastModified; -} - -async function existsPR() { - var res = await octokit.request("GET /repos/{owner}/{repo}/pulls", { - owner: owner, - repo: repo, - }); - // checking if a pull request with "Automated Config Update" as the title exists - return ( - res.data.filter((e) => e.title === "Automated Config Update").length > 0 - ); -} - -async function deleteBranch(branchName) { +async function getLatest(repo, owner) { try { - return await octokit.request( - "DELETE /repos/{owner}/{repo}/git/refs/{ref}", + var latest = await octokit.request( + "GET /repos/{owner}/{repo}/releases/latest", { owner: owner, repo: repo, - ref: "heads/" + branchName, } ); - } catch { + return latest; + } catch (err) { console.log(err); - throw err; + return null; } } -async function getBranches() { - try { - var res = await octokit.request("GET /repos/{owner}/{repo}/branches", { - owner: owner, - repo: repo, - }); - return res.data.filter((e) => e.name.includes("AutomatedConfigUpdate")); - } catch { - console.log(err); - throw err; +function compareVersions(v1, v2) { + let v1_split = v1.split("."); + let v2_split = v2.split("."); + if (v1_split.length == v2_split.length) { + for (let i = 0; i < v1_split.length; i++) { + if (v1_split[i] > v2_split[i]) { + return 1; + } + if (v1_split[i] < v2_split[i]) { + return -1; + } + } + return 0; + } else { + return 0; } } -async function cleanUpBranches() { - var branchList = await getBranches(); - branchList.forEach((branch) => { - deleteBranch(branch.name); - }); -} - -async function updateConfig() { +function getConfig(repo) { try { - var exists_PR = await existsPR(); - - // if a pull request exists, exit early - if (exists_PR) { - console.log("A Pull Request Already Exists"); - - return; - } - console.log("Cleaning Up Branches"); - await cleanUpBranches(); - - // generate new branch name with current time - var branchName = "AutomatedConfigUpdate_" + new Date().getTime().toString(); - - // create new branch - await createBranch(branchName); + var depPath = core.getInput("depPath"); + var config = JSON.parse(fs.readFileSync(depPath, "utf8")); + // opening dependency json file - // fetch changes - await exec.exec("git", ["fetch"]); - - // checkout created branch - console.log("checking out Code"); - await exec.exec("git", ["checkout", branchName]); - - let dirent; - - // reading all the files in folder where dependency configs are stored - while ((dirent = dir.readSync()) !== null) { - console.log(dirent.name); - var current_repo = dirent.name.replace(".json", ""); - - // opening dependency json file - var config = JSON.parse( - fs.readFileSync(path.join(depPath, dirent.name)), - "utf8" - ); - if (config["freeze"]) { - console.log("Version Freeze Enabled Skipping Updates"); - continue; - } - - // getting list of tar files stored on s3 sorted by version descending - var s3_dep_list = await listDependenciesS3( - "Dependencies/" + current_repo - ); - - // if there are no tar files stored on s3, no pull request is needed - if (!s3_dep_list) { - console.log("No Dependencies on S3 storage"); - continue; - } + return config[repo]; + } catch (err) { + console.log(err); + return {}; + } +} +function parseConfig(cfg) { + try { + var path = cfg["path"]; + var url = cfg["github_url"]; + var org = url.split("/")[0]; + return [path, org]; + } catch (err) { + console.log(err); + return []; + } +} - // getting the newest version of the tar file - var s3_latest = s3_dep_list[0]; +async function syncDependencies(repo) { + var cfg = getConfig(repo); + if (JSON.stringify(cfg) === "{}") { + console.log("Dependency Config is Empty"); + return; + } + // read info about repo to update from config file - // getting the last modified time of the newest version of the tar file - var lastModified = await getLastModified(s3_latest.Key); + var path_and_org = parseConfig(cfg); + if (path_and_org.length == 0) { + console.log("Could not parse config file"); + return; + } + var owner = path_and_org[1]; + var path = path_and_org[0]; - // if config has been updated before - if (config["last_updated"] != "") { - // get time last updated - var last_updated = new Date(config["last_updated"]); + var s3_dep_list = await ListDependenciesS3(path); + // get latest versions of tar file on s3 bucket - // if the newest tar file was uploaded after the last time the config file was updated then config file needs to be updated - if (lastModified > last_updated) { - // change last updated time to current time - config["last_updated"] = new Date().toUTCString(); - } else { - console.log("config already up to date"); - continue; - } - } else { - // config has never been updated, so last updated time must be now - config["last_updated"] = new Date().toUTCString(); - } - // generate hash of latest tar file stored on s3 - var hash = await generateHash(s3_latest.Key); + var gh_latest_release = await getLatest(repo, owner); + // gets latest version of the repo on Github - console.log("hash:" + hash); - console.log(s3_latest.Key); - config["SHA512"] = hash; - var version = - "v" + - s3_latest.Key.replace( - "Dependencies/" + current_repo + "/" + current_repo + "-", - "" - ).replace(".tar.gz", ""); - config["version"] = version; + if (gh_latest_release == null) { + console.log("Could not fetch latest release on Github"); + return; + } - // writing changes to file - await fs.writeFile( - path.join(depPath, dirent.name), - JSON.stringify(config), - function writeJSON(err) { - if (err) return console.log(err); - } - ); - } // add changes to git - await exec.exec("git", ["add", "."]); + var g_tag = gh_latest_release.data.tag_name.replace("v", ""); + // remove the v and leave just the version number - // commit changes - await exec.exec("git", ["commit", "-m", "Automated Config Update"]); + if (!s3_dep_list) { + // if there are no versions stored on the s3 bucket of this repo - // push to remote origin - await exec.exec("git", ["push"]); + updateDependencies( + repo + "-" + g_tag + ".tar.gz", + gh_latest_release.data.tag_name, + repo, + owner + ); + return; + } - // create pull request from newly created branch to the main branch - await octokit.request("POST /repos/{owner}/{repo}/pulls", { - owner: owner, - repo: repo, - title: "Automated Config Update", - body: "Approve Changes", - head: branchName, - base: main_branch, - }); - } catch (err) { - // Commiting and Pushing Changes failed - // Abort Creating Pull request - // Delete newly created branch + var s3_latest = s3_dep_list[0]; + // s3_latest is sorted descending alphabetically so the first element will give the latest version in s3 bucket - await deleteBranch(branchName); + var s3_latest_tag = s3_latest.Key.substring( + s3_latest.Key.indexOf("-") + 1, + s3_latest.Key.indexOf(".tar") + ); + // geting version number of latest tar file stored in s3 bucket + + console.log("Latest Version on S3: " + s3_latest_tag); + console.log("Latest Version on Github: " + g_tag); + + if (compareVersions(g_tag, s3_latest_tag)) { + // if version on Github is newer than one stored on s3, update depenendency + + console.log("Updating Dependency"); + updateDependencies( + repo + "-" + g_tag + ".tar.gz", + gh_latest_release.data.tag_name, + repo, + owner + ); + } else { + console.log("Dependency Already Up to Date"); } } -updateConfig(); + +repo_list.forEach((element) => { + syncDependencies(element); +});