mirror of
https://github.com/jquery/jquery.git
synced 2024-12-09 08:04:24 +00:00
2cf659189e
*Authors* - Checking and updating authors has been migrated to a custom script in the repo *Changelog* - changelogplease is no longer maintained - generate changelog in markdown for GitHub releases - generate changelog in HTML for blog posts - generate contributors list in HTML for blog posts *dist* - clone dist repo, copy files, and commit/push - commit tag with dist files on main branch; remove dist files from main branch after release *cdn* - clone cdn repo, copy files, and commit/push - create versioned and unversioned copies in cdn/ - generate md5 sums and archives for Google and MSFT *build* - implement reproducible builds and verify release builds * uses the last modified date for the latest commit * See https://reproducible-builds.org/ - the verify workflow also ensures all files were properly published to the CDN and npm *docs* - the new release workflow is documented at build/release/README.md *verify* - use the last modified date of the commit before the tag - use versioned filenames when checking map files on the CDN - skip factory and package.json files when verifying CDN *misc* - now that we don't need the jquery-release script and now that we no longer need to build on Node 10, we can use ESM in all files in the build folder - limit certain workflows to the main repo (not forks) - version has been set to the previously released version 3.7.1, as release-it expects - release-it added the `preReleaseBase` option and we now always set it to `1` in the npm script. This is a noop for stable releases. - include post-release script to be run manually after a release, with further steps that should be verified manually Ref jquery/jquery-release#114 Closes gh-5522
194 lines
5.1 KiB
JavaScript
194 lines
5.1 KiB
JavaScript
import fs from "node:fs/promises";
|
|
import { promisify } from "node:util";
|
|
import zlib from "node:zlib";
|
|
import { exec as nodeExec } from "node:child_process";
|
|
import chalk from "chalk";
|
|
import isCleanWorkingDir from "./isCleanWorkingDir.js";
|
|
|
|
const VERSION = 1;
|
|
const lastRunBranch = " last run";
|
|
|
|
const gzip = promisify( zlib.gzip );
|
|
const exec = promisify( nodeExec );
|
|
|
|
async function getBranchName() {
|
|
const { stdout } = await exec( "git rev-parse --abbrev-ref HEAD" );
|
|
return stdout.trim();
|
|
}
|
|
|
|
async function getCommitHash() {
|
|
const { stdout } = await exec( "git rev-parse HEAD" );
|
|
return stdout.trim();
|
|
}
|
|
|
|
function getBranchHeader( branch, commit ) {
|
|
let branchHeader = branch.trim();
|
|
if ( commit ) {
|
|
branchHeader = chalk.bold( branchHeader ) + chalk.gray( ` @${ commit }` );
|
|
} else {
|
|
branchHeader = chalk.italic( branchHeader );
|
|
}
|
|
return branchHeader;
|
|
}
|
|
|
|
async function getCache( loc ) {
|
|
let cache;
|
|
try {
|
|
const contents = await fs.readFile( loc, "utf8" );
|
|
cache = JSON.parse( contents );
|
|
} catch ( _ ) {
|
|
return {};
|
|
}
|
|
|
|
const lastRun = cache[ lastRunBranch ];
|
|
if ( !lastRun || !lastRun.meta || lastRun.meta.version !== VERSION ) {
|
|
console.log( "Compare cache version mismatch. Rewriting..." );
|
|
return {};
|
|
}
|
|
return cache;
|
|
}
|
|
|
|
function cacheResults( results ) {
|
|
const files = Object.create( null );
|
|
results.forEach( function( result ) {
|
|
files[ result.filename ] = {
|
|
raw: result.raw,
|
|
gz: result.gz
|
|
};
|
|
} );
|
|
return files;
|
|
}
|
|
|
|
function saveCache( loc, cache ) {
|
|
|
|
// Keep cache readable for manual edits
|
|
return fs.writeFile( loc, JSON.stringify( cache, null, " " ) + "\n" );
|
|
}
|
|
|
|
function compareSizes( existing, current, padLength ) {
|
|
if ( typeof current !== "number" ) {
|
|
return chalk.grey( `${ existing }`.padStart( padLength ) );
|
|
}
|
|
const delta = current - existing;
|
|
if ( delta > 0 ) {
|
|
return chalk.red( `+${ delta }`.padStart( padLength ) );
|
|
}
|
|
return chalk.green( `${ delta }`.padStart( padLength ) );
|
|
}
|
|
|
|
function sortBranches( a, b ) {
|
|
if ( a === lastRunBranch ) {
|
|
return 1;
|
|
}
|
|
if ( b === lastRunBranch ) {
|
|
return -1;
|
|
}
|
|
if ( a < b ) {
|
|
return -1;
|
|
}
|
|
if ( a > b ) {
|
|
return 1;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
export async function compareSize( { cache = ".sizecache.json", files } = {} ) {
|
|
if ( !files || !files.length ) {
|
|
throw new Error( "No files specified" );
|
|
}
|
|
|
|
const branch = await getBranchName();
|
|
const commit = await getCommitHash();
|
|
const sizeCache = await getCache( cache );
|
|
|
|
let rawPadLength = 0;
|
|
let gzPadLength = 0;
|
|
const results = await Promise.all(
|
|
files.map( async function( filename ) {
|
|
|
|
let contents = await fs.readFile( filename, "utf8" );
|
|
|
|
// Remove the short SHA and .dirty from comparisons.
|
|
// The short SHA so commits can be compared against each other
|
|
// and .dirty to compare with the existing branch during development.
|
|
const sha = /jQuery v\d+.\d+.\d+(?:-\w+)?(?:\+slim\.|\+)?([^ \.]+(?:\.dirty)?)?/.exec( contents )[ 1 ];
|
|
contents = contents.replace( new RegExp( sha, "g" ), "" );
|
|
|
|
const size = Buffer.byteLength( contents, "utf8" );
|
|
const gzippedSize = ( await gzip( contents ) ).length;
|
|
|
|
// Add one to give space for the `+` or `-` in the comparison
|
|
rawPadLength = Math.max( rawPadLength, size.toString().length + 1 );
|
|
gzPadLength = Math.max( gzPadLength, gzippedSize.toString().length + 1 );
|
|
|
|
return { filename, raw: size, gz: gzippedSize };
|
|
} )
|
|
);
|
|
|
|
const sizeHeader = "raw".padStart( rawPadLength ) +
|
|
"gz".padStart( gzPadLength + 1 ) +
|
|
" Filename";
|
|
|
|
const sizes = results.map( function( result ) {
|
|
const rawSize = result.raw.toString().padStart( rawPadLength );
|
|
const gzSize = result.gz.toString().padStart( gzPadLength );
|
|
return `${ rawSize } ${ gzSize } ${ result.filename }`;
|
|
} );
|
|
|
|
const comparisons = Object.keys( sizeCache ).sort( sortBranches ).map( function( branch ) {
|
|
const meta = sizeCache[ branch ].meta || {};
|
|
const commit = meta.commit;
|
|
|
|
const files = sizeCache[ branch ].files;
|
|
const branchSizes = Object.keys( files ).map( function( filename ) {
|
|
const branchResult = files[ filename ];
|
|
const compareResult = results.find( function( result ) {
|
|
return result.filename === filename;
|
|
} ) || {};
|
|
|
|
const compareRaw = compareSizes( branchResult.raw, compareResult.raw, rawPadLength );
|
|
const compareGz = compareSizes( branchResult.gz, compareResult.gz, gzPadLength );
|
|
return `${ compareRaw } ${ compareGz } ${ filename }`;
|
|
} );
|
|
|
|
return [
|
|
"", // New line before each branch
|
|
getBranchHeader( branch, commit ),
|
|
sizeHeader,
|
|
...branchSizes
|
|
].join( "\n" );
|
|
} );
|
|
|
|
const output = [
|
|
"", // Opening new line
|
|
chalk.bold( "Sizes" ),
|
|
sizeHeader,
|
|
...sizes,
|
|
...comparisons,
|
|
"" // Closing new line
|
|
].join( "\n" );
|
|
|
|
console.log( output );
|
|
|
|
// Always save the last run
|
|
// Save version under last run
|
|
sizeCache[ lastRunBranch ] = {
|
|
meta: { version: VERSION },
|
|
files: cacheResults( results )
|
|
};
|
|
|
|
// Only save cache for the current branch
|
|
// if the working directory is clean.
|
|
if ( await isCleanWorkingDir() ) {
|
|
sizeCache[ branch ] = {
|
|
meta: { commit },
|
|
files: cacheResults( results )
|
|
};
|
|
console.log( `Saved cache for ${ branch }.` );
|
|
}
|
|
|
|
await saveCache( cache, sizeCache );
|
|
|
|
return results;
|
|
}
|