Release: migrate release process to release-it

*Authors*
- Checking and updating authors has been migrated
  to a custom script in the repo

*Changelog*
- changelogplease is no longer maintained
- generate changelog in markdown for GitHub releases
- generate changelog in HTML for blog posts
- generate contributors list in HTML for blog posts

*dist*
- clone dist repo, copy files, and commit/push
- commit tag with dist files on main branch;
  remove dist files from main branch after release

*cdn*
- clone cdn repo, copy files, and commit/push
- create versioned and unversioned copies in cdn/
- generate md5 sums and archives for Google and MSFT

*build*
- implement reproducible builds and verify release builds
  * uses the last modified date for the latest commit
  * See https://reproducible-builds.org/
- the verify workflow also ensures all files were
  properly published to the CDN and npm

*docs*
- the new release workflow is documented at build/release/README.md

*verify*
- use the last modified date of the commit before the tag
- use versioned filenames when checking map files on the CDN
- skip factory and package.json files when verifying CDN

*misc*
- now that we don't need the jquery-release script and
  now that we no longer need to build on Node 10, we can
  use ESM in all files in the build folder
- limit certain workflows to the main repo (not forks)
- version has been set to the previously released version 3.7.1,
  as release-it expects
- release-it added the `preReleaseBase` option and we
  now always set it to `1` in the npm script. This is
  a noop for stable releases.
- include post-release script to be run manually after a release,
  with further steps that should be verified manually

Ref jquery/jquery-release#114
Closes gh-5522
This commit is contained in:
Timmy Willison 2023-07-27 11:24:49 -04:00
parent 3b2330240c
commit 2cf659189e
35 changed files with 4697 additions and 632 deletions

View File

@ -13,4 +13,3 @@ insert_final_newline = true
[*.{json,yml}]
indent_style = space
indent_size = 2

View File

@ -10,11 +10,13 @@ permissions:
jobs:
update:
name: Update Filestash
runs-on: ubuntu-latest
# skip on forks
if: ${{ github.repository == 'jquery/jquery' }}
environment: filestash
env:
NODE_VERSION: 20.x
name: Update Filestash
steps:
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7

37
.github/workflows/verify-release.yml vendored Normal file
View File

@ -0,0 +1,37 @@
name: Reproducible Builds
on:
push:
# On tags
tags:
- '*'
# Or manually
workflow_dispatch:
inputs:
version:
description: 'Version to verify (>= 4.0.0-rc.1)'
required: false
jobs:
run:
name: Verify release
runs-on: ubuntu-latest
# skip on forks
if: ${{ github.repository == 'jquery/jquery' }}
env:
NODE_VERSION: 20.x
steps:
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Use Node.js ${{ env.NODE_VERSION }}
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3
with:
node-version: ${{ env.NODE_VERSION }}
- name: Install dependencies
run: npm ci
- run: npm run release:verify
env:
VERSION: ${{ github.event.inputs.version || github.ref_name }}

8
.gitignore vendored
View File

@ -3,12 +3,12 @@
*~
*.diff
*.patch
/*.html
.DS_Store
.bower.json
.sizecache.json
yarn.lock
.eslintcache
tmp
npm-debug.log*
@ -23,6 +23,10 @@ npm-debug.log*
/test/data/core/jquery-iterability-transpiled.js
/test/data/qunit-fixture.js
# Ignore BrowserStack files
# Release artifacts
changelog.html
contributors.html
# Ignore BrowserStack testing files
local.log
browserstack.err

View File

@ -1,12 +1,15 @@
.eslintignore
.eslintcache
eslint.config.js
/.editorconfig
/.gitattributes
/.mailmap
/.sizecache.json
/build
/external
/speed
/test
/Gruntfile.js
/tmp
/changelog.html
/contributors.html

44
.release-it.js Normal file
View File

@ -0,0 +1,44 @@
"use strict";
const blogURL = process.env.BLOG_URL;
if ( !blogURL || !blogURL.startsWith( "https://blog.jquery.com/" ) ) {
throw new Error( "A valid BLOG_URL must be set in the environment" );
}
// This is needed because until the release-it migration,
// all the previous release tags were disconnected from the 3.x branch.
// We can remove this if/when we do a 3.x release with the new setup.
const from = process.env.FROM_VERSION;
module.exports = {
preReleaseBase: 1,
hooks: {
"before:init": "bash ./build/release/pre-release.sh",
"after:version:bump":
"sed -i 's/main\\/AUTHORS.txt/${version}\\/AUTHORS.txt/' package.json",
"after:bump": "cross-env VERSION=${version} npm run build:all",
"before:git:release": "git add -f dist/ dist-module/ changelog.md",
"after:release": "echo 'Run the following to complete the release:' && " +
`echo './build/release/post-release.sh $\{version} ${ blogURL }'`
},
git: {
// Use the node script directly to avoid an npm script
// command log entry in the GH release notes
changelog: `node build/release/changelog.js ${ from ? from : "${from}" } $\{to}`,
commitMessage: "Release: ${version}",
getLatestTagFromAllRefs: true,
pushRepo: "git@github.com:jquery/jquery.git",
requireBranch: "main",
requireCleanWorkingDir: true
},
github: {
pushRepo: "git@github.com:jquery/jquery.git",
release: true,
tokenRef: "JQUERY_GITHUB_TOKEN"
},
npm: {
publish: true
}
};

View File

@ -1,8 +1,6 @@
"use strict";
const { build } = require( "./tasks/build" );
const yargs = require( "yargs/yargs" );
const slimExclude = require( "./tasks/lib/slim-exclude" );
import yargs from "yargs/yargs";
import { build } from "./tasks/build.js";
import slimExclude from "./tasks/lib/slim-exclude.js";
const argv = yargs( process.argv.slice( 2 ) )
.version( false )

3
build/package.json Normal file
View File

@ -0,0 +1,3 @@
{
"type": "module"
}

View File

@ -1,82 +0,0 @@
"use strict";
const fs = require( "node:fs" );
module.exports = function( Release ) {
const distFiles = [
"dist/jquery.js",
"dist/jquery.min.js",
"dist/jquery.min.map",
"dist/jquery.slim.js",
"dist/jquery.slim.min.js",
"dist/jquery.slim.min.map"
];
const filesToCommit = [
...distFiles,
"src/core.js"
];
const cdn = require( "./release/cdn" );
const dist = require( "./release/dist" );
const { buildDefaultFiles } = require( "./tasks/build" );
const npmTags = Release.npmTags;
Release.define( {
npmPublish: true,
issueTracker: "github",
/**
* Set the version in the src folder for distributing AMD
*/
_setSrcVersion: function() {
var corePath = __dirname + "/../src/core.js",
contents = fs.readFileSync( corePath, "utf8" );
contents = contents.replace( /@VERSION/g, Release.newVersion );
fs.writeFileSync( corePath, contents, "utf8" );
},
/**
* Generates any release artifacts that should be included in the release.
* The callback must be invoked with an array of files that should be
* committed before creating the tag.
* @param {Function} callback
*/
generateArtifacts: async function( callback ) {
await buildDefaultFiles( { version: Release.newVersion } );
cdn.makeReleaseCopies( Release );
Release._setSrcVersion();
callback( filesToCommit );
},
/**
* Acts as insertion point for restoring Release.dir.repo
* It was changed to reuse npm publish code in jquery-release
* for publishing the distribution repo instead
*/
npmTags: function() {
// origRepo is not defined if dist was skipped
Release.dir.repo = Release.dir.origRepo || Release.dir.repo;
return npmTags();
},
/**
* Publish to distribution repo and npm
* @param {Function} callback
*/
dist: function( callback ) {
cdn.makeArchives( Release, function() {
dist( Release, distFiles, callback );
} );
}
} );
};
module.exports.dependencies = [
"archiver@5.2.0",
"shelljs@0.8.4",
"inquirer@8.0.0",
"chalk@4.1.0"
];

123
build/release/README.md Normal file
View File

@ -0,0 +1,123 @@
# Releasing jQuery
This document describes the process for releasing a new version of jQuery. It is intended for jQuery team members and collaborators who have been granted permission to release new versions.
## Prerequisites
Before you can release a new version of jQuery, you need to have the following tools installed:
- [Node.js](https://nodejs.org/) (latest LTS version)
- [npm](https://www.npmjs.com/) (comes with Node.js)
- [git](https://git-scm.com/)
## Setup
1. Clone the jQuery repo:
```sh
git clone git@github.com:jquery/jquery.git
cd jquery
```
1. Install the dependencies:
```sh
npm install
```
1. Log into npm with a user that has access to the `jquery` package.
```sh
npm login
```
The release script will not run if not logged in.
1. Set `JQUERY_GITHUB_TOKEN` in the shell environment that will be used to run `npm run release`. The token can be [created on GitHub](https://github.com/settings/tokens/new?scopes=repo&description=release-it) and only needs the `repo` scope. This token is used to publish GitHub release notes and generate a list of contributors for the blog post.
```sh
export JQUERY_GITHUB_TOKEN=...
```
The release script will not run without this token.
## Release Process
1. Ensure all milestoned issues/PRs are closed, or reassign to a new milestone.
1. Verify all tests are passing in [CI](https://github.com/jquery/jquery/actions).
1. Run any release-only tests, such as those in the [`test/integration`](../../test/integration/) folder.
1. Ensure AUTHORS.txt file is up to date (this will be verified by the release script).
- Use `npm run authors:update` to update.
1. Create draft blog post on blog.jquery.com; save the link before publishing. The link is required to run the release.
- Highlight major changes and reason for release.
- Add HTML from the `changelog.html` generated in the below release script.
- Use HTML from the `contributors.html` generated in the below release script in the "Thanks" section.
1. Run a dry run of the release script:
```sh
BLOG_URL=https://blog.jquery.com/... npm run release -- -d
```
1. If the dry run is successful, run the release script:
```sh
BLOG_URL=https://blog.jquery.com/... npm run release
```
This will run the pre-release script, which includes checking authors, running tests, running the build, and cloning the CDN and jquery-dist repos in the `tmp/` folder.
It will then walk you through the rest of the release process: creating the tag, publishing to npm, publishing release notes on GitHub, and pushing the updated branch and new tag to the jQuery repo.
Finally, it will run the post-release script, which will ask you to confirm the files prepared in `tmp/release/cdn` and `tmp/release/dist` are correct before pushing to the respective repos. It will also prepare a commit for the jQuery repo to remove the release files and update the AUTHORS.txt URL in the package.json. It will ask for confirmation before pushing that commit as well.
For a pre-release, run:
```sh
BLOG_URL=https://blog.jquery.com/... npm run release -- --preRelease=beta
```
`preRelease` can also be set to `alpha` or `rc`.
**Note**: `preReleaseBase` is set in the npm script to `1` to ensure any pre-releases start at `.1` instead of `.0`. This does not interfere with stable releases.
1. Run the post-release script:
```sh
./build/release/post-release.sh $VERSION $BLOG_URL
```
This will push the release files to the CDN and jquery-dist repos, and push the commit to the jQuery repo to remove the release files and update the AUTHORS.txt URL in the package.json.
1. Once the release is complete, publish the blog post.
## Stable releases
Stable releases have a few more steps:
1. Close the milestone matching the current release: https://github.com/jquery/jquery/milestones. Ensure there is a new milestone for the next release.
1. Update jQuery on https://github.com/jquery/jquery-wp-content.
1. Update jQuery on https://github.com/jquery/blog.jquery.com-theme.
1. Update latest jQuery version for [healthyweb.org](https://github.com/jquery/healthyweb.org/blob/main/wrangler.toml).
1. Update the shipping version on [jquery.com home page](https://github.com/jquery/jquery.com).
```sh
git pull jquery/jquery.com
# Edit index.html and download.md
git commit
npm version patch
git push origin main --tags
```
1. Update the version used in [jQuery docs demos](https://github.com/jquery/api.jquery.com/blob/main/entries2html.xsl).
1. Email archives to CDNs.
| CDN | Emails | Include |
| --- | ------ | ------- |
| Google | hosted-libraries@google | `tmp/archives/googlecdn-jquery-*.zip` |
| Microsoft | damian.edwards@microsoft, Chris.Sfanos@microsoft | `tmp/archives/mscdn-jquery-*.zip` |
| CDNJS | ryan@ryankirkman, thomasalwyndavis@gmail | Blog post link |

59
build/release/archive.js Normal file
View File

@ -0,0 +1,59 @@
import { readdir, writeFile } from "node:fs/promises";
import { createReadStream, createWriteStream } from "node:fs";
import path from "node:path";
import util from "node:util";
import os from "node:os";
import { exec as nodeExec } from "node:child_process";
import archiver from "archiver";
const exec = util.promisify( nodeExec );
async function md5sum( files, folder ) {
if ( os.platform() === "win32" ) {
const rmd5 = /[a-f0-9]{32}/;
const sum = [];
for ( let i = 0; i < files.length; i++ ) {
const { stdout } = await exec( "certutil -hashfile " + files[ i ] + " MD5", {
cwd: folder
} );
sum.push( rmd5.exec( stdout )[ 0 ] + " " + files[ i ] );
}
return sum.join( "\n" );
}
const { stdout } = await exec( "md5 -r " + files.join( " " ), { cwd: folder } );
return stdout;
}
export default function archive( { cdn, folder, version } ) {
return new Promise( async( resolve, reject ) => {
console.log( `Creating production archive for ${ cdn }...` );
const md5file = cdn + "-md5.txt";
const output = createWriteStream(
path.join( folder, cdn + "-jquery-" + version + ".zip" )
);
output.on( "close", resolve );
output.on( "error", reject );
const archive = archiver( "zip" );
archive.pipe( output );
const files = await readdir( folder );
const sum = await md5sum( files, folder );
await writeFile( path.join( folder, md5file ), sum );
files.push( md5file );
files.forEach( ( file ) => {
const stream = createReadStream( path.join( folder, file ) );
archive.append( stream, {
name: path.basename( file )
} );
} );
archive.finalize();
} );
}

View File

@ -1,8 +1,11 @@
"use strict";
const fs = require( "node:fs/promises" );
const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec );
import fs from "node:fs/promises";
import util from "node:util";
import { exec as nodeExec } from "node:child_process";
const exec = util.promisify( nodeExec );
const rnewline = /\r?\n/;
const rdate = /^\[(\d+)\] /;
@ -47,7 +50,7 @@ async function getLastAuthor() {
async function logAuthors( preCommand ) {
let command = "git log --pretty=format:\"[%at] %aN <%aE>\"";
if ( preCommand ) {
command = preCommand + " && " + command;
command = `${ preCommand } && ${ command }`;
}
const { stdout } = await exec( command );
return uniq( stdout.trim().split( rnewline ).reverse() );
@ -63,21 +66,21 @@ async function getSizzleAuthors() {
function sortAuthors( a, b ) {
const [ , aDate ] = rdate.exec( a );
const [ , bDate ] = rdate.exec( b );
return parseInt( aDate ) - parseInt( bDate );
return Number( aDate ) - Number( bDate );
}
function formatAuthor( author ) {
return author.replace( rdate, "" );
}
async function getAuthors() {
export async function getAuthors() {
console.log( "Getting authors..." );
const authors = await logAuthors();
const sizzleAuthors = await getSizzleAuthors();
return uniq( authors.concat( sizzleAuthors ) ).sort( sortAuthors ).map( formatAuthor );
}
async function checkAuthors() {
export async function checkAuthors() {
const authors = await getAuthors();
const lastAuthor = await getLastAuthor();
@ -89,7 +92,7 @@ async function checkAuthors() {
console.log( "AUTHORS.txt is up to date" );
}
async function updateAuthors() {
export async function updateAuthors() {
const authors = await getAuthors();
const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
@ -97,9 +100,3 @@ async function updateAuthors() {
console.log( "AUTHORS.txt updated" );
}
module.exports = {
checkAuthors,
getAuthors,
updateAuthors
};

View File

@ -1,151 +1,128 @@
"use strict";
import { mkdir, readFile, writeFile } from "node:fs/promises";
import path from "node:path";
import { argv } from "node:process";
import util from "node:util";
import { exec as nodeExec } from "node:child_process";
import { rimraf } from "rimraf";
import archive from "./archive.js";
var fs = require( "node:fs" ),
shell = require( "shelljs" ),
path = require( "node:path" ),
os = require( "node:os" ),
cdnFolder = "dist/cdn",
releaseFiles = {
"jquery-VER.js": "dist/jquery.js",
"jquery-VER.min.js": "dist/jquery.min.js",
"jquery-VER.min.map": "dist/jquery.min.map",
"jquery-VER.slim.js": "dist/jquery.slim.js",
"jquery-VER.slim.min.js": "dist/jquery.slim.min.js",
"jquery-VER.slim.min.map": "dist/jquery.slim.min.map"
},
googleFilesCDN = [
"jquery.js",
"jquery.min.js",
"jquery.min.map",
"jquery.slim.js",
"jquery.slim.min.js",
"jquery.slim.min.map"
],
msFilesCDN = [
"jquery-VER.js",
"jquery-VER.min.js",
"jquery-VER.min.map",
"jquery-VER.slim.js",
"jquery-VER.slim.min.js",
"jquery-VER.slim.min.map"
];
const exec = util.promisify( nodeExec );
/**
* Generates copies for the CDNs
*/
function makeReleaseCopies( Release ) {
shell.mkdir( "-p", cdnFolder );
const version = argv[ 2 ];
Object.keys( releaseFiles ).forEach( function( key ) {
var text,
builtFile = releaseFiles[ key ],
unpathedFile = key.replace( /VER/g, Release.newVersion ),
releaseFile = cdnFolder + "/" + unpathedFile;
if ( /\.map$/.test( releaseFile ) ) {
// Map files need to reference the new uncompressed name;
// assume that all files reside in the same directory.
// "file":"jquery.min.js" ... "sources":["jquery.js"]
text = fs
.readFileSync( builtFile, "utf8" )
.replace(
/"file":"([^"]+)"/,
`"file":"${ unpathedFile.replace( /\.min\.map/, ".min.js" ) }"`
)
.replace(
/"sources":\["([^"]+)"\]/,
`"sources":["${ unpathedFile.replace( /\.min\.map/, ".js" ) }"]`
);
fs.writeFileSync( releaseFile, text );
} else if ( builtFile !== releaseFile ) {
shell.cp( "-f", builtFile, releaseFile );
}
} );
if ( !version ) {
throw new Error( "No version specified" );
}
function makeArchives( Release, callback ) {
Release.chdir( Release.dir.repo );
const archivesFolder = "tmp/archives";
const versionedFolder = `${ archivesFolder }/versioned`;
const unversionedFolder = `${ archivesFolder }/unversioned`;
function makeArchive( cdn, files, callback ) {
if ( Release.preRelease ) {
console.log(
`Skipping archive creation for ${ cdn }; this is a beta release.`
);
callback();
return;
}
// The cdn repo is cloned during release
const cdnRepoFolder = "tmp/release/cdn";
console.log( "Creating production archive for " + cdn );
// .min.js and .min.map files are expected
// in the same directory as the uncompressed files.
const sources = [
"dist/jquery.js",
"dist/jquery.slim.js"
];
var i,
sum,
result,
archiver = require( "archiver" )( "zip" ),
md5file = cdnFolder + "/" + cdn + "-md5.txt",
output = fs.createWriteStream(
cdnFolder + "/" + cdn + "-jquery-" + Release.newVersion + ".zip"
),
rmd5 = /[a-f0-9]{32}/,
rver = /VER/;
const rminmap = /\.min\.map$/;
const rjs = /\.js$/;
output.on( "close", callback );
output.on( "error", function( err ) {
throw err;
} );
archiver.pipe( output );
files = files.map( function( item ) {
return (
"dist" +
( rver.test( item ) ? "/cdn" : "" ) +
"/" +
item.replace( rver, Release.newVersion )
);
} );
if ( os.platform() === "win32" ) {
sum = [];
for ( i = 0; i < files.length; i++ ) {
result = Release.exec(
"certutil -hashfile " + files[ i ] + " MD5",
"Error retrieving md5sum"
);
sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
}
sum = sum.join( "\n" );
} else {
sum = Release.exec(
"md5 -r " + files.join( " " ),
"Error retrieving md5sum"
);
}
fs.writeFileSync( md5file, sum );
files.push( md5file );
files.forEach( function( file ) {
archiver.append( fs.createReadStream( file ), { name: path.basename( file ) } );
} );
archiver.finalize();
}
function buildGoogleCDN( callback ) {
makeArchive( "googlecdn", googleFilesCDN, callback );
}
function buildMicrosoftCDN( callback ) {
makeArchive( "mscdn", msFilesCDN, callback );
}
buildGoogleCDN( function() {
buildMicrosoftCDN( callback );
} );
function clean() {
console.log( "Cleaning any existing archives..." );
return rimraf( archivesFolder );
}
module.exports = {
makeReleaseCopies: makeReleaseCopies,
makeArchives: makeArchives
};
// Map files need to reference the new uncompressed name;
// assume that all files reside in the same directory.
// "file":"jquery.min.js" ... "sources":["jquery.js"]
// This is only necessary for the versioned files.
async function convertMapToVersioned( file, folder ) {
const mapFile = file.replace( /\.js$/, ".min.map" );
const filename = path
.basename( mapFile )
.replace( "jquery", "jquery-" + version );
const contents = JSON.parse( await readFile( mapFile, "utf8" ) );
return writeFile(
path.join( folder, filename ),
JSON.stringify( {
...contents,
file: filename.replace( rminmap, ".min.js" ),
sources: [ filename.replace( rminmap, ".js" ) ]
} )
);
}
async function makeUnversionedCopies() {
await mkdir( unversionedFolder, { recursive: true } );
return Promise.all(
sources.map( async( file ) => {
const filename = path.basename( file );
const minFilename = filename.replace( rjs, ".min.js" );
const mapFilename = filename.replace( rjs, ".min.map" );
await exec( `cp -f ${ file } ${ unversionedFolder }/${ filename }` );
await exec(
`cp -f ${ file.replace(
rjs,
".min.js"
) } ${ unversionedFolder }/${ minFilename }`
);
await exec(
`cp -f ${ file.replace(
rjs,
".min.map"
) } ${ unversionedFolder }/${ mapFilename }`
);
} )
);
}
async function makeVersionedCopies() {
await mkdir( versionedFolder, { recursive: true } );
return Promise.all(
sources.map( async( file ) => {
const filename = path
.basename( file )
.replace( "jquery", "jquery-" + version );
const minFilename = filename.replace( rjs, ".min.js" );
await exec( `cp -f ${ file } ${ versionedFolder }/${ filename }` );
await exec(
`cp -f ${ file.replace(
rjs,
".min.js"
) } ${ versionedFolder }/${ minFilename }`
);
await convertMapToVersioned( file, versionedFolder );
} )
);
}
async function copyToRepo( folder ) {
return exec( `cp -f ${ folder }/* ${ cdnRepoFolder }/cdn/` );
}
async function cdn() {
await clean();
await Promise.all( [ makeUnversionedCopies(), makeVersionedCopies() ] );
await copyToRepo( versionedFolder );
await Promise.all( [
archive( { cdn: "googlecdn", folder: unversionedFolder, version } ),
archive( { cdn: "mscdn", folder: versionedFolder, version } )
] );
console.log( "Files ready for CDNs." );
}
cdn();

239
build/release/changelog.js Normal file
View File

@ -0,0 +1,239 @@
import { writeFile } from "node:fs/promises";
import { argv } from "node:process";
import { exec as nodeExec } from "node:child_process";
import util from "node:util";
import { marked } from "marked";
const exec = util.promisify( nodeExec );
const rbeforeHash = /.#$/;
const rendsWithHash = /#$/;
const rcherry = / \(cherry picked from commit [^)]+\)/;
const rcommit = /Fix(?:e[sd])? ((?:[a-zA-Z0-9_-]{1,39}\/[a-zA-Z0-9_-]{1,100}#)|#|gh-)(\d+)/g;
const rcomponent = /^([^ :]+):\s*([^\n]+)/;
const rnewline = /\r?\n/;
const prevVersion = argv[ 2 ];
const nextVersion = argv[ 3 ];
const blogUrl = process.env.BLOG_URL;
if ( !prevVersion || !nextVersion ) {
throw new Error( "Usage: `node changelog.js PREV_VERSION NEXT_VERSION`" );
}
function ticketUrl( ticketId ) {
return `https://github.com/jquery/jquery/issues/${ ticketId }`;
}
function getTicketsForCommit( commit ) {
var tickets = [];
commit.replace( rcommit, function( _match, refType, ticketId ) {
var ticket = {
url: ticketUrl( ticketId ),
label: "#" + ticketId
};
// If the refType has anything before the #, assume it's a GitHub ref
if ( rbeforeHash.test( refType ) ) {
// console.log( refType );
refType = refType.replace( rendsWithHash, "" );
ticket.url = `https://github.com/${ refType }/issues/${ ticketId }`;
ticket.label = refType + ticket.label;
}
tickets.push( ticket );
} );
return tickets;
}
async function getCommits() {
const format =
"__COMMIT__%n%s (__TICKETREF__[%h](https://github.com/jquery/jquery/commit/%H))%n%b";
const { stdout } = await exec(
`git log --format="${ format }" ${ prevVersion }..${ nextVersion }`
);
const commits = stdout.split( "__COMMIT__" ).slice( 1 );
return removeReverts( commits.map( parseCommit ).sort( sortCommits ) );
}
function parseCommit( commit ) {
const tickets = getTicketsForCommit( commit )
.map( ( ticket ) => {
return `[${ ticket.label }](${ ticket.url })`;
} )
.join( ", " );
// Drop the commit message body
let message = `${ commit.trim().split( rnewline )[ 0 ] }`;
// Add any ticket references
message = message.replace( "__TICKETREF__", tickets ? `${ tickets }, ` : "" );
// Remove cherry pick references
message = message.replace( rcherry, "" );
return message;
}
function sortCommits( a, b ) {
const aComponent = rcomponent.exec( a );
const bComponent = rcomponent.exec( b );
if ( aComponent && bComponent ) {
if ( aComponent[ 1 ] < bComponent[ 1 ] ) {
return -1;
}
if ( aComponent[ 1 ] > bComponent[ 1 ] ) {
return 1;
}
return 0;
}
if ( a < b ) {
return -1;
}
if ( a > b ) {
return 1;
}
return 0;
}
/**
* Remove all revert commits and the commit it is reverting
*/
function removeReverts( commits ) {
const remove = [];
commits.forEach( function( commit ) {
const match = /\*\s*Revert "([^"]*)"/.exec( commit );
// Ignore double reverts
if ( match && !/^Revert "([^"]*)"/.test( match[ 0 ] ) ) {
remove.push( commit, match[ 0 ] );
}
} );
remove.forEach( function( message ) {
const index = commits.findIndex( ( commit ) => commit.includes( message ) );
if ( index > -1 ) {
// console.log( "Removing ", commits[ index ] );
commits.splice( index, 1 );
}
} );
return commits;
}
function addHeaders( commits ) {
const components = {};
let markdown = "";
commits.forEach( function( commit ) {
const match = rcomponent.exec( commit );
if ( match ) {
let component = match[ 1 ];
if ( !/^[A-Z]/.test( component ) ) {
component =
component.slice( 0, 1 ).toUpperCase() +
component.slice( 1 ).toLowerCase();
}
if ( !components[ component.toLowerCase() ] ) {
markdown += "\n## " + component + "\n\n";
components[ component.toLowerCase() ] = true;
}
markdown += `- ${ match[ 2 ] }\n`;
} else {
markdown += `- ${ commit }\n`;
}
} );
return markdown;
}
async function getGitHubContributor( sha ) {
const response = await fetch(
`https://api.github.com/repos/jquery/jquery/commits/${ sha }`,
{
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${ process.env.JQUERY_GITHUB_TOKEN }`,
"X-GitHub-Api-Version": "2022-11-28"
}
}
);
const data = await response.json();
if ( !data.commit || !data.author ) {
// The data may contain multiple helpful fields
throw new Error( JSON.stringify( data ) );
}
return { name: data.commit.author.name, url: data.author.html_url };
}
function uniqueContributors( contributors ) {
const seen = {};
return contributors.filter( ( contributor ) => {
if ( seen[ contributor.name ] ) {
return false;
}
seen[ contributor.name ] = true;
return true;
} );
}
async function getContributors() {
const { stdout } = await exec(
`git log --format="%H" ${ prevVersion }..${ nextVersion }`
);
const shas = stdout.split( rnewline ).filter( Boolean );
const contributors = await Promise.all( shas.map( getGitHubContributor ) );
return uniqueContributors( contributors )
// Sort by last name
.sort( ( a, b ) => {
const aName = a.name.split( " " );
const bName = b.name.split( " " );
return aName[ aName.length - 1 ].localeCompare( bName[ bName.length - 1 ] );
} )
.map( ( { name, url } ) => {
if ( name === "Timmy Willison" || name.includes( "dependabot" ) ) {
return;
}
return `<a href="${ url }">${ name }</a>`;
} )
.filter( Boolean ).join( "\n" );
}
async function generate() {
const commits = await getCommits();
const contributors = await getContributors();
let changelog = "# Changelog\n";
if ( blogUrl ) {
changelog += `\n${ blogUrl }\n`;
}
changelog += addHeaders( commits );
// Write markdown to changelog.md
await writeFile( "changelog.md", changelog );
// Write HTML to changelog.html for blog post
await writeFile( "changelog.html", marked.parse( changelog ) );
// Write contributors HTML for blog post
await writeFile( "contributors.html", contributors );
// Log regular changelog for release-it
console.log( changelog );
return changelog;
}
generate();

View File

@ -1,177 +1,125 @@
"use strict";
import { readFile, writeFile } from "node:fs/promises";
import util from "node:util";
import { argv } from "node:process";
import { exec as nodeExec } from "node:child_process";
import { rimraf } from "rimraf";
module.exports = function( Release, files, complete ) {
const pkg = JSON.parse( await readFile( "./package.json", "utf8" ) );
const fs = require( "node:fs/promises" );
const shell = require( "shelljs" );
const inquirer = require( "inquirer" );
const pkg = require( `${ Release.dir.repo }/package.json` );
const distRemote = Release.remote
const exec = util.promisify( nodeExec );
// For local and github dists
.replace( /jquery(\.git|$)/, "jquery-dist$1" );
const version = argv[ 2 ];
const blogURL = argv[ 3 ];
// These files are included with the distribution
const extras = [
"src",
"LICENSE.txt",
"AUTHORS.txt"
];
if ( !version ) {
throw new Error( "No version specified" );
}
/**
* Clone the distribution repo
*/
function clone() {
Release.chdir( Release.dir.base );
Release.dir.dist = `${ Release.dir.base }/dist`;
if ( !blogURL || !blogURL.startsWith( "https://blog.jquery.com/" ) ) {
throw new Error( "Invalid blog post URL" );
}
console.log( "Using distribution repo: ", distRemote );
Release.exec( `git clone ${ distRemote } ${ Release.dir.dist }`,
"Error cloning repo." );
// The dist repo is cloned during release
const distRepoFolder = "tmp/release/dist";
// Distribution always works on main
Release.chdir( Release.dir.dist );
Release.exec( "git checkout main", "Error checking out branch." );
console.log();
}
// Files to be included in the dist repo.
// README.md and bower.json are generated.
// package.json is a simplified version of the original.
const files = [
"dist",
"src",
"LICENSE.txt",
"AUTHORS.txt",
"changelog.md"
];
/**
* Generate bower file for jquery-dist
*/
function generateBower() {
return JSON.stringify( {
async function generateBower() {
return JSON.stringify(
{
name: pkg.name,
main: pkg.main,
license: "MIT",
ignore: [
"package.json"
],
ignore: [ "package.json" ],
keywords: pkg.keywords
}, null, 2 );
}
},
null,
2
);
}
/**
* Replace the version in the README
* @param {string} readme
* @param {string} blogPostLink
*/
function editReadme( readme, blogPostLink ) {
return readme
.replace( /@VERSION/g, Release.newVersion )
.replace( /@BLOG_POST_LINK/g, blogPostLink );
}
async function generateReadme() {
const readme = await readFile(
"./build/fixtures/README.md",
"utf8"
);
/**
* Copy necessary files over to the dist repo
*/
async function copy() {
return readme
.replace( /@VERSION/g, version )
.replace( /@BLOG_POST_LINK/g, blogURL );
}
// Copy dist files
const distFolder = `${ Release.dir.dist }/dist`;
const readme = await fs.readFile(
`${ Release.dir.repo }/build/fixtures/README.md`, "utf8" );
const rmIgnore = [ ...files, "node_modules" ]
.map( file => `${ Release.dir.dist }/${ file }` );
/**
* Copy necessary files over to the dist repo
*/
async function copyFiles() {
shell.config.globOptions = {
ignore: rmIgnore
};
// Remove any extraneous files before copy
await rimraf( [
`${ distRepoFolder }/dist`,
`${ distRepoFolder }/dist-module`,
`${ distRepoFolder }/src`
] );
const { blogPostLink } = await inquirer.prompt( [ {
type: "input",
name: "blogPostLink",
message: "Enter URL of the blog post announcing the jQuery release...\n"
} ] );
// Copy all files
await Promise.all(
files.map( function( path ) {
console.log( `Copying ${ path }...` );
return exec( `cp -rf ${ path } ${ distRepoFolder }/${ path }` );
} )
);
// Remove extraneous files before copy
shell.rm( "-rf", `${ Release.dir.dist }/**/*` );
// Remove the wrapper from the dist repo
await rimraf( [
`${ distRepoFolder }/src/wrapper.js`
] );
shell.mkdir( "-p", distFolder );
files.forEach( function( file ) {
shell.cp( "-f", `${ Release.dir.repo }/${ file }`, distFolder );
} );
// Set the version in src/core.js
const core = await readFile( `${ distRepoFolder }/src/core.js`, "utf8" );
await writeFile(
`${ distRepoFolder }/src/core.js`,
core.replace( /@VERSION/g, version )
);
// Copy other files
extras.forEach( function( file ) {
shell.cp( "-rf", `${ Release.dir.repo }/${ file }`, Release.dir.dist );
} );
// Write generated README
console.log( "Generating README.md..." );
const readme = await generateReadme();
await writeFile( `${ distRepoFolder }/README.md`, readme );
// Remove the wrapper & the ESLint config from the dist repo
shell.rm( "-f", `${ Release.dir.dist }/src/wrapper.js` );
shell.rm( "-f", `${ Release.dir.dist }/src/.eslintrc.json` );
// Write generated Bower file
console.log( "Generating bower.json..." );
const bower = await generateBower();
await writeFile( `${ distRepoFolder }/bower.json`, bower );
// Write package.json
// Remove scripts and other superfluous properties,
// especially the prepare script, which fails on the dist repo
const packageJson = Object.assign( {}, pkg );
delete packageJson.scripts;
delete packageJson.devDependencies;
delete packageJson.dependencies;
delete packageJson.commitplease;
packageJson.version = Release.newVersion;
await fs.writeFile(
`${ Release.dir.dist }/package.json`,
JSON.stringify( packageJson, null, 2 )
);
// Write simplified package.json
console.log( "Writing package.json..." );
await writeFile(
`${ distRepoFolder }/package.json`,
JSON.stringify(
{
...pkg,
scripts: undefined,
dependencies: undefined,
devDependencies: undefined,
commitplease: undefined
},
null,
2
// Write generated bower file
await fs.writeFile( `${ Release.dir.dist }/bower.json`, generateBower() );
// Add final newline
) + "\n"
);
await fs.writeFile( `${ Release.dir.dist }/README.md`,
editReadme( readme, blogPostLink ) );
console.log( "Files copied to dist repo." );
}
console.log( "Files ready to add." );
}
/**
* Add, commit, and tag the dist files
*/
function commit() {
console.log( "Adding files to dist..." );
Release.exec( "git add -A", "Error adding files." );
Release.exec(
`git commit -m "Release ${ Release.newVersion }"`,
"Error committing files."
);
console.log();
console.log( "Tagging release on dist..." );
Release.exec( `git tag ${ Release.newVersion }`,
`Error tagging ${ Release.newVersion } on dist repo.` );
Release.tagTime = Release.exec( "git log -1 --format=\"%ad\"",
"Error getting tag timestamp." ).trim();
}
/**
* Push files to dist repo
*/
function push() {
Release.chdir( Release.dir.dist );
console.log( "Pushing release to dist repo..." );
Release.exec(
`git push ${
Release.isTest ? " --dry-run" : ""
} ${ distRemote } main --tags`,
"Error pushing main and tags to git repo."
);
// Set repo for npm publish
Release.dir.origRepo = Release.dir.repo;
Release.dir.repo = Release.dir.dist;
}
Release.walk( [
Release._section( "Copy files to distribution repo" ),
clone,
copy,
Release.confirmReview,
Release._section( "Add, commit, and tag files in distribution repo" ),
commit,
Release.confirmReview,
Release._section( "Pushing files to distribution repo" ),
push
], complete );
};
copyFiles();

View File

@ -0,0 +1,60 @@
#!/bin/sh
set -euo pipefail
# $1: Version
# $2: Blog URL
cdn=tmp/release/cdn
dist=tmp/release/dist
if [[ -z "$1" ]] then
echo "Version is not set (1st argument)"
exit 1
fi
if [[ -z "$2" ]] then
echo "Blog URL is not set (2nd argument)"
exit 1
fi
# Push files to cdn repo
npm run release:cdn $1
cd $cdn
git add -A
git commit -m "jquery: Add version $1"
# Wait for confirmation from user to push changes to cdn repo
read -p "Press enter to push changes to cdn repo"
git push
cd -
# Push files to dist repo
npm run release:dist $1 $2
cd $dist
git add -A
git commit -m "Release: $1"
# -s to sign and annotate tag (recommended for releases)
git tag -s $1 -m "Release: $1"
# Wait for confirmation from user to push changes to dist repo
read -p "Press enter to push changes to dist repo"
git push --follow-tags
cd -
# Restore AUTHORS URL
sed -i "s/$1\/AUTHORS.txt/main\/AUTHORS.txt/" package.json
git add package.json
# Remove built files from tracking.
# Leave the changelog.md committed.
# Leave the tmp folder as some files are needed
# after the release (such as for emailing archives).
npm run build:clean
git rm --cached -r dist/ dist-module/
git add dist/package.json dist/wrappers dist-module/package.json dist-module/wrappers
git commit -m "Release: remove dist files from main branch"
# Wait for confirmation from user to push changes
read -p "Press enter to push changes to main branch"
git push

View File

@ -0,0 +1,21 @@
#!/bin/sh
set -euo pipefail
# Install dependencies
npm ci
# Clean all release and build artifacts
npm run build:clean
npm run release:clean
# Check authors
npm run authors:check
# Run tests
npm test
# Clone dist and cdn repos to the tmp/release directory
mkdir -p tmp/release
git clone https://github.com/jquery/jquery-dist tmp/release/dist
git clone https://github.com/jquery/codeorigin.jquery.com tmp/release/cdn

243
build/release/verify.js Normal file
View File

@ -0,0 +1,243 @@
/**
* Verify the latest release is reproducible
*/
import { exec as nodeExec } from "node:child_process";
import crypto from "node:crypto";
import { createWriteStream } from "node:fs";
import { mkdir, readdir, readFile } from "node:fs/promises";
import path from "node:path";
import { Readable } from "node:stream";
import { finished } from "node:stream/promises";
import util from "node:util";
import { gunzip as nodeGunzip } from "node:zlib";
import { rimraf } from "rimraf";
const exec = util.promisify( nodeExec );
const gunzip = util.promisify( nodeGunzip );
const SRC_REPO = "https://github.com/jquery/jquery.git";
const CDN_URL = "https://code.jquery.com";
const REGISTRY_URL = "https://registry.npmjs.org/jquery";
const excludeFromCDN = [
/^package\.json$/,
/^jquery\.factory\./
];
const rjquery = /^jquery/;
async function verifyRelease( { version } = {} ) {
if ( !version ) {
version = process.env.VERSION || ( await getLatestVersion() );
}
const release = await buildRelease( { version } );
console.log( `Verifying jQuery ${ version }...` );
let verified = true;
const matchingFiles = [];
const mismatchingFiles = [];
// Check all files against the CDN
await Promise.all(
release.files
.filter( ( file ) => excludeFromCDN.every( ( re ) => !re.test( file.name ) ) )
.map( async( file ) => {
const url = new URL( file.cdnName, CDN_URL );
const response = await fetch( url );
if ( !response.ok ) {
throw new Error(
`Failed to download ${
file.cdnName
} from the CDN: ${ response.statusText }`
);
}
const cdnContents = await response.text();
if ( cdnContents !== file.cdnContents ) {
mismatchingFiles.push( url.href );
verified = false;
} else {
matchingFiles.push( url.href );
}
} )
);
// Check all files against npm.
// First, download npm tarball for version
const npmPackage = await fetch( REGISTRY_URL ).then( ( res ) => res.json() );
if ( !npmPackage.versions[ version ] ) {
throw new Error( `jQuery ${ version } not found on npm!` );
}
const npmTarball = npmPackage.versions[ version ].dist.tarball;
// Write npm tarball to file
const npmTarballPath = path.join( "tmp/verify", version, "npm.tgz" );
await downloadFile( npmTarball, npmTarballPath );
// Check the tarball checksum
const tgzSum = await sumTarball( npmTarballPath );
if ( tgzSum !== release.tgz.contents ) {
mismatchingFiles.push( `npm:${ version }.tgz` );
verified = false;
} else {
matchingFiles.push( `npm:${ version }.tgz` );
}
await Promise.all(
release.files.map( async( file ) => {
// Get file contents from tarball
const { stdout: npmContents } = await exec(
`tar -xOf ${ npmTarballPath } package/${ file.path }/${ file.name }`
);
if ( npmContents !== file.contents ) {
mismatchingFiles.push( `npm:${ file.path }/${ file.name }` );
verified = false;
} else {
matchingFiles.push( `npm:${ file.path }/${ file.name }` );
}
} )
);
if ( verified ) {
console.log( `jQuery ${ version } is reproducible! All files match!` );
} else {
console.log();
for ( const file of matchingFiles ) {
console.log( `${ file }` );
}
console.log();
for ( const file of mismatchingFiles ) {
console.log( `${ file }` );
}
throw new Error( `jQuery ${ version } is NOT reproducible!` );
}
}
async function buildRelease( { version } ) {
const releaseFolder = path.join( "tmp/verify", version );
// Clone the release repo
console.log( `Cloning jQuery ${ version }...` );
await rimraf( releaseFolder );
await mkdir( releaseFolder, { recursive: true } );
// Uses a depth of 2 so we can get the commit date of
// the commit used to build, which is the commit before the tag
await exec(
`git clone -q -b ${ version } --depth=2 ${ SRC_REPO } ${ releaseFolder }`
);
// Install node dependencies
console.log( `Installing dependencies for jQuery ${ version }...` );
await exec( "npm ci", { cwd: releaseFolder } );
// Find the date of the commit just before the release,
// which was used as the date in the built files
const { stdout: date } = await exec( "git log -1 --format=%ci HEAD~1", {
cwd: releaseFolder
} );
// Build the release
console.log( `Building jQuery ${ version }...` );
const { stdout: buildOutput } = await exec( "npm run build:all", {
cwd: releaseFolder,
env: {
// Keep existing environment variables
...process.env,
RELEASE_DATE: date,
VERSION: version
}
} );
console.log( buildOutput );
// Pack the npm tarball
console.log( `Packing jQuery ${ version }...` );
const { stdout: packOutput } = await exec( "npm pack", { cwd: releaseFolder } );
console.log( packOutput );
// Get all top-level /dist and /dist-module files
const distFiles = await readdir(
path.join( releaseFolder, "dist" ),
{ withFileTypes: true }
);
const distModuleFiles = await readdir(
path.join( releaseFolder, "dist-module" ),
{ withFileTypes: true }
);
const files = await Promise.all(
[ ...distFiles, ...distModuleFiles ]
.filter( ( dirent ) => dirent.isFile() )
.map( async( dirent ) => {
const contents = await readFile(
path.join( dirent.parentPath, dirent.name ),
"utf8"
);
return {
name: dirent.name,
path: path.basename( dirent.parentPath ),
contents,
cdnName: dirent.name.replace( rjquery, `jquery-${ version }` ),
cdnContents: dirent.name.endsWith( ".map" ) ?
// The CDN has versioned filenames in the maps
convertMapToVersioned( contents, version ) :
contents
};
} )
);
// Get checksum of the tarball
const tgzFilename = `jquery-${ version }.tgz`;
const sum = await sumTarball( path.join( releaseFolder, tgzFilename ) );
return {
files,
tgz: {
name: tgzFilename,
contents: sum
},
version
};
}
async function downloadFile( url, dest ) {
const response = await fetch( url );
const fileStream = createWriteStream( dest );
const stream = Readable.fromWeb( response.body ).pipe( fileStream );
return finished( stream );
}
async function getLatestVersion() {
const { stdout: sha } = await exec( "git rev-list --tags --max-count=1" );
const { stdout: tag } = await exec( `git describe --tags ${ sha.trim() }` );
return tag.trim();
}
function shasum( data ) {
const hash = crypto.createHash( "sha256" );
hash.update( data );
return hash.digest( "hex" );
}
async function sumTarball( filepath ) {
const contents = await readFile( filepath );
const unzipped = await gunzip( contents );
return shasum( unzipped );
}
function convertMapToVersioned( contents, version ) {
const map = JSON.parse( contents );
return JSON.stringify( {
...map,
file: map.file.replace( rjquery, `jquery-${ version }` ),
sources: map.sources.map( ( source ) => source.replace( rjquery, `jquery-${ version }` ) )
} );
}
verifyRelease();

View File

@ -4,20 +4,20 @@
* and includes/excludes specified modules
*/
"use strict";
import { exec as nodeExec } from "node:child_process";
import { writeFileSync } from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import util from "node:util";
import requirejs from "requirejs";
import { compareSize } from "./lib/compareSize.js";
import getTimestamp from "./lib/getTimestamp.js";
import isCleanWorkingDir from "./lib/isCleanWorkingDir.js";
import excludedFromSlim from "./lib/slim-exclude.js";
import minify from "./minify.js";
const fs = require( "node:fs/promises" );
const path = require( "node:path" );
const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec );
const requirejs = require( "requirejs" );
const excludedFromSlim = require( "./lib/slim-exclude" );
const pkg = require( "../../package.json" );
const isCleanWorkingDir = require( "./lib/isCleanWorkingDir" );
const minify = require( "./minify" );
const getTimestamp = require( "./lib/getTimestamp" );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
const srcFolder = path.resolve( __dirname, "../../src" );
const exec = util.promisify( nodeExec );
const pkg = JSON.parse( await fs.readFile( "./package.json", "utf8" ) );
const rdefineEnd = /\}\s*?\);[^}\w]*$/;
@ -38,14 +38,14 @@ const removeWith = {
};
async function read( filename ) {
return fs.readFile( path.join( srcFolder, filename ), "utf8" );
return fs.readFile( path.join( "./src", filename ), "utf8" );
}
// Remove the src folder and file extension
// and ensure unix-style path separators
function moduleName( filename ) {
return filename
.replace( `${ srcFolder }${ path.sep }`, "" )
.replace( new RegExp( `.*\\${ path.sep }src\\${ path.sep }` ), "" )
.replace( /\.js$/, "" )
.split( path.sep )
.join( path.posix.sep );
@ -54,7 +54,7 @@ function moduleName( filename ) {
async function readdirRecursive( dir, all = [] ) {
let files;
try {
files = await fs.readdir( path.join( srcFolder, dir ), {
files = await fs.readdir( path.join( "./src", dir ), {
withFileTypes: true
} );
} catch ( _ ) {
@ -212,7 +212,12 @@ async function checkExclude( exclude, include ) {
return [ unique( excluded ), unique( included ) ];
}
async function build( {
async function getLastModifiedDate() {
const { stdout } = await exec( "git log -1 --format=\"%at\"" );
return new Date( parseInt( stdout, 10 ) * 1000 );
}
export async function build( {
amd,
dir = "dist",
exclude = [],
@ -242,6 +247,11 @@ async function build( {
);
const config = await getRequireConfig( { amd } );
// Use the last modified date so builds are reproducible
const date = process.env.RELEASE_DATE ?
new Date( process.env.RELEASE_DATE ) :
await getLastModifiedDate();
// Replace exports/global with a noop noConflict
if ( excluded.includes( "exports/global" ) ) {
const index = excluded.indexOf( "exports/global" );
@ -286,7 +296,7 @@ async function build( {
* Handle Final output from the optimizer
* @param {String} compiled
*/
config.out = async function( compiled ) {
config.out = function( compiled ) {
const compiledContents = compiled
// Embed Version
@ -294,10 +304,11 @@ async function build( {
// Embed Date
// yyyy-mm-ddThh:mmZ
.replace( /@DATE/g, new Date().toISOString().replace( /:\d+\.\d+Z$/, "Z" ) );
.replace( /@DATE/g, date.toISOString().replace( /:\d+\.\d+Z$/, "Z" ) );
// Write concatenated source to file
await fs.writeFile(
// Cannot use async in config.out
writeFileSync(
path.join( dir, filename ),
compiledContents
);
@ -320,7 +331,7 @@ async function build( {
await minify( { filename, dir } );
}
async function buildDefaultFiles( {
export async function buildDefaultFiles( {
version = process.env.VERSION
} = {} ) {
await Promise.all( [
@ -328,12 +339,6 @@ async function buildDefaultFiles( {
build( { filename: "jquery.slim.js", slim: true, version } )
] );
// Earlier Node.js versions do not support the ESM format.
if ( !verifyNodeVersion() ) {
return;
}
const { compareSize } = await import( "./compare_size.mjs" );
return compareSize( {
files: [
"dist/jquery.min.js",
@ -341,5 +346,3 @@ async function buildDefaultFiles( {
]
} );
}
module.exports = { build, buildDefaultFiles };

View File

@ -1,7 +1,5 @@
"use strict";
// Process files for distribution.
module.exports = function processForDist( text, filename ) {
export default function processForDist( text, filename ) {
if ( !text ) {
throw new Error( "text required for processForDist" );
}
@ -28,4 +26,4 @@ module.exports = function processForDist( text, filename ) {
}
throw new Error( message );
}
};
}

View File

@ -1,9 +1,9 @@
import chalk from "chalk";
import fs from "node:fs/promises";
import { promisify } from "node:util";
import zlib from "node:zlib";
import { exec as nodeExec } from "node:child_process";
import isCleanWorkingDir from "./lib/isCleanWorkingDir.js";
import chalk from "chalk";
import isCleanWorkingDir from "./isCleanWorkingDir.js";
const VERSION = 1;
const lastRunBranch = " last run";

View File

@ -1,9 +1,7 @@
"use strict";
module.exports = function getTimestamp() {
export default function getTimestamp() {
const now = new Date();
const hours = now.getHours().toString().padStart( 2, "0" );
const minutes = now.getMinutes().toString().padStart( 2, "0" );
const seconds = now.getSeconds().toString().padStart( 2, "0" );
return `${ hours }:${ minutes }:${ seconds }`;
};
}

View File

@ -1,9 +1,9 @@
"use strict";
import util from "node:util";
import { exec as nodeExec } from "node:child_process";
const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec );
const exec = util.promisify( nodeExec );
module.exports = async function isCleanWorkingDir() {
export default async function isCleanWorkingDir() {
const { stdout } = await exec( "git status --untracked-files=no --porcelain" );
return !stdout.trim();
};
}

View File

@ -1,7 +1,5 @@
"use strict";
// NOTE: keep it in sync with test/data/testinit.js
module.exports = [
export default [
"ajax",
"effects"
];

View File

@ -1,12 +0,0 @@
"use strict";
const { version } = require( "process" );
const nodeV18OrNewer = !/^v1[0-7]\./.test( version );
module.exports = function verifyNodeVersion() {
if ( !nodeV18OrNewer ) {
console.log( "Old Node.js detected, task skipped..." );
return false;
}
return true;
};

View File

@ -1,14 +1,12 @@
"use strict";
const UglifyJS = require( "uglify-js" );
const fs = require( "node:fs/promises" );
const path = require( "node:path" );
const processForDist = require( "./dist" );
const getTimestamp = require( "./lib/getTimestamp" );
import fs from "node:fs/promises";
import path from "node:path";
import UglifyJS from "uglify-js";
import processForDist from "./dist.js";
import getTimestamp from "./lib/getTimestamp.js";
const rjs = /\.js$/;
module.exports = async function minify( { dir, filename } ) {
export default async function minify( { dir, filename } ) {
const filepath = path.join( dir, filename );
const contents = await fs.readFile( filepath, "utf8" );
const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
@ -82,4 +80,4 @@ module.exports = async function minify( { dir, filename } ) {
console.log( `[${ getTimestamp() }] ${ minFilename } ${ version } with ${
mapFilename
} created.` );
};
}

View File

@ -1,13 +1,8 @@
"use strict";
import fs from "node:fs/promises";
import util from "node:util";
import { exec as nodeExec } from "node:child_process";
const fs = require( "node:fs/promises" );
const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
if ( !verifyNodeVersion() ) {
return;
}
const exec = util.promisify( nodeExec );
// Fire up all tests defined in test/node_smoke_tests/*.js in spawned sub-processes.
// All the files under test/node_smoke_tests/*.js are supposed to exit with 0 code

View File

@ -1,9 +1,7 @@
"use strict";
import fs from "node:fs/promises";
import path from "node:path";
const fs = require( "node:fs/promises" );
const path = require( "node:path" );
const projectDir = path.resolve( __dirname, "..", ".." );
const projectDir = path.resolve( "." );
const files = {
"bootstrap/bootstrap.css": "bootstrap/dist/css/bootstrap.css",

View File

@ -1,22 +1,14 @@
"use strict";
const { spawn } = require( "node:child_process" );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
const path = require( "node:path" );
const os = require( "node:os" );
if ( !verifyNodeVersion() ) {
return;
}
import path from "node:path";
import os from "node:os";
import { spawn } from "node:child_process";
const command = path.resolve(
__dirname,
`../../node_modules/.bin/promises-aplus-tests${ os.platform() === "win32" ? ".cmd" : "" }`
`node_modules/.bin/promises-aplus-tests${ os.platform() === "win32" ? ".cmd" : "" }`
);
const args = [ "--reporter", "dot", "--timeout", "2000" ];
const tests = [
"test/promises_aplus_adapters/deferred.js",
"test/promises_aplus_adapters/when.js"
"test/promises_aplus_adapters/deferred.cjs",
"test/promises_aplus_adapters/when.cjs"
];
async function runTests() {

View File

@ -1,6 +1,4 @@
"use strict";
const fs = require( "node:fs/promises" );
import fs from "node:fs/promises";
async function generateFixture() {
const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" );

View File

@ -11,6 +11,7 @@ module.exports = [
// See https://github.com/eslint/eslint/discussions/17412
ignores: [
"external",
"tmp",
"test/data/json_obj.js",
"test/data/jquery-*.js",
@ -264,8 +265,9 @@ module.exports = [
{
files: [
"build/**",
"eslint.config.js",
".release-it.js",
"build/**",
"test/node_smoke_tests/**",
"test/bundler_smoke_tests/**/*",
"test/promises_aplus_adapters/**",
@ -292,6 +294,7 @@ module.exports = [
{
files: [
"build/**/*.js",
"**/*.mjs"
],
languageOptions: {

3663
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,15 +2,16 @@
"name": "jquery",
"title": "jQuery",
"description": "JavaScript library for DOM operations",
"version": "3.7.2-pre",
"version": "3.7.1",
"main": "dist/jquery.js",
"scripts": {
"authors:check": "node -e \"require('./build/release/authors.js').checkAuthors()\"",
"authors:update": "node -e \"require('./build/release/authors.js').updateAuthors()\"",
"authors:check": "node -e \"(async () => { const { checkAuthors } = await import('./build/release/authors.js'); checkAuthors() })()\"",
"authors:update": "node -e \"(async () => { const { updateAuthors } = await import('./build/release/authors.js'); updateAuthors() })()\"",
"babel:tests": "babel test/data/core/jquery-iterability-transpiled-es6.js --out-file test/data/core/jquery-iterability-transpiled.js",
"build": "node ./build/command.js",
"build:all": "node -e \"require('./build/tasks/build.js').buildDefaultFiles()\"",
"build:main": "node -e \"require('./build/tasks/build.js').build()\"",
"build:all": "node -e \"(async () => { const { buildDefaultFiles } = await import('./build/tasks/build.js'); buildDefaultFiles() })()\"",
"build:clean": "rimraf --glob dist/*.{js,map} --glob dist-module/*.{js,map}",
"build:main": "node -e \"(async () => { const { build } = await import('./build/tasks/build.js'); build() })()\"",
"lint:dev": "eslint --cache .",
"lint:json": "jsonlint --quiet package.json",
"lint": "concurrently -r \"npm:lint:dev\" \"npm:lint:json\"",
@ -18,6 +19,11 @@
"prepare": "husky",
"pretest": "npm run qunit-fixture && npm run babel:tests && npm run npmcopy",
"qunit-fixture": "node build/tasks/qunit-fixture.js",
"release": "release-it",
"release:cdn": "node build/release/cdn.js",
"release:clean": "rimraf tmp changelog.html contributors.html",
"release:dist": "node build/release/dist.js",
"release:verify": "node build/release/verify.js",
"start": "nodemon --watch src -x \"npm run build:all\"",
"test:browser": "npm run pretest && npm run build:main && npm run test:unit -- -b chrome -b firefox -h",
"test:browserless": "npm run pretest && npm run build:all && node build/tasks/node_smoke_tests.js && node build/tasks/promises_aplus_tests.js && npm run test:unit -- -b jsdom -m basic",
@ -67,23 +73,27 @@
"commitplease": "3.2.0",
"concurrently": "8.2.2",
"core-js-bundle": "3.37.1",
"cross-env": "7.0.3",
"diff": "5.2.0",
"eslint": "9.4.0",
"eslint-config-jquery": "3.0.2",
"exit-hook": "4.0.0",
"express": "4.19.2",
"express-body-parser-error-handler": "1.0.7",
"globals": "15.4.0",
"globals": "15.8.0",
"husky": "9.0.11",
"jsdom": "24.1.0",
"marked": "13.0.2",
"native-promise-only": "0.8.1",
"nodemon": "3.1.3",
"promises-aplus-tests": "2.1.2",
"q": "1.5.1",
"qunit": "2.21.0",
"raw-body": "2.5.2",
"release-it": "17.5.0",
"requirejs": "2.3.6",
"selenium-webdriver": "4.21.0",
"rimraf": "6.0.0",
"sinon": "7.5.0",
"uglify-js": "3.7.7",
"yargs": "17.7.2"