Release: migrate release process to release-it

*Authors*
- Checking and updating authors has been migrated
  to a custom script in the repo

*Changelog*
- changelogplease is no longer maintained
- generate changelog in markdown for GitHub releases
- generate changelog in HTML for blog posts
- generate contributors list in HTML for blog posts

*dist*
- clone dist repo, copy files, and commit/push
- commit tag with dist files on main branch;
  remove dist files from main branch after release

*cdn*
- clone cdn repo, copy files, and commit/push
- create versioned and unversioned copies in cdn/
- generate md5 sums and archives for Google and MSFT

*build*
- implement reproducible builds and verify release builds
  * uses the last modified date for the latest commit
  * See https://reproducible-builds.org/
- the verify workflow also ensures all files were
  properly published to the CDN and npm

*docs*
- the new release workflow is documented at build/release/README.md

*misc*
- now that we don't need the jquery-release script and
  now that we no longer need to build on Node 10, we can
  use ESM in all files in the build folder
- move dist wrappers to "wrappers" folders for easy removal
  of all built files
- limit certain workflows to the main repo (not forks)
- version in package.json has been set to beta.1 so that
  the next release will be beta.2
- release-it added the `preReleaseBase` option and we
  now always set it to `1` in the npm script. This is
  a noop for stable releases.

Fixes jquery/jquery-release#114
Closes gh-5512
This commit is contained in:
Timmy Willison 2023-07-27 11:24:49 -04:00
parent 3a98ef91df
commit 2646a8b07f
38 changed files with 5154 additions and 1061 deletions

View File

@ -13,4 +13,3 @@ insert_final_newline = true
[*.{json,yml}] [*.{json,yml}]
indent_style = space indent_style = space
indent_size = 2 indent_size = 2

View File

@ -10,11 +10,13 @@ permissions:
jobs: jobs:
update: update:
name: Update Filestash
runs-on: ubuntu-latest runs-on: ubuntu-latest
# skip on forks
if: ${{ github.repository == 'jquery/jquery' }}
environment: filestash environment: filestash
env: env:
NODE_VERSION: 20.x NODE_VERSION: 20.x
name: Update Filestash
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7

33
.github/workflows/verify-release.yml vendored Normal file
View File

@ -0,0 +1,33 @@
name: Reproducible Builds
on:
# On tags
push:
tags:
- '*'
# Or manually
workflow_dispatch:
inputs:
version:
description: 'Version to verify (>= 4.0.0-beta.2)'
required: false
jobs:
run:
name: Verify release
runs-on: ubuntu-latest
# skip on forks
if: ${{ github.repository == 'jquery/jquery' }}
env:
NODE_VERSION: 20.x
steps:
- name: Checkout
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- name: Use Node.js ${{ env.NODE_VERSION }}
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
with:
node-version: ${{ env.NODE_VERSION }}
- run: npm run release:verify
env:
VERSION: ${{ github.event.inputs.version || github.ref_name }}

21
.gitignore vendored
View File

@ -3,7 +3,6 @@
*~ *~
*.diff *.diff
*.patch *.patch
/*.html
.DS_Store .DS_Store
.bower.json .bower.json
.sizecache.json .sizecache.json
@ -13,19 +12,17 @@ tmp
npm-debug.log* npm-debug.log*
# Ignore everything in `dist` folder except for # Ignore built files in `dist` folder
# the ESLint config & package.json files # Leave the package.json and wrappers
/dist/* /dist/*
!/dist/package.json !/dist/package.json
!/dist/jquery.bundler-require-wrapper.js !/dist/wrappers
!/dist/jquery.bundler-require-wrapper.slim.js
# Ignore everything in the `dist-module` folder except for the ESLint config, # Ignore built files in `dist-module` folder
# package.json & Node module wrapper files # Leave the package.json and wrappers
/dist-module/* /dist-module/*
!/dist-module/package.json !/dist-module/package.json
!/dist-module/jquery.node-module-wrapper.js !/dist-module/wrappers
!/dist-module/jquery.node-module-wrapper.slim.js
/external /external
/node_modules /node_modules
@ -33,6 +30,10 @@ npm-debug.log*
/test/data/core/jquery-iterability-transpiled.js /test/data/core/jquery-iterability-transpiled.js
/test/data/qunit-fixture.js /test/data/qunit-fixture.js
# Ignore BrowserStack files # Release artifacts
changelog.*
contributors.*
# Ignore BrowserStack testing files
local.log local.log
browserstack.err browserstack.err

View File

@ -1,12 +1,15 @@
.eslintignore .eslintignore
.eslintcache
eslint.config.js eslint.config.js
/.editorconfig /.editorconfig
/.gitattributes /.gitattributes
/.mailmap /.mailmap
/.sizecache.json
/build /build
/external /external
/speed
/test /test
/Gruntfile.cjs /tmp
/changelog.html
/contributors.html

31
.release-it.cjs Normal file
View File

@ -0,0 +1,31 @@
"use strict";
const blogURL = process.env.BLOG_URL;
if ( !blogURL || !blogURL.startsWith( "https://blog.jquery.com/" ) ) {
throw new Error( "A valid BLOG_URL must be set in the environment" );
}
module.exports = {
hooks: {
"before:init": "./build/release/pre-release.sh",
"before:git:release": "git add -f dist/ dist-module/ changelog.md",
"after:version:bump":
"sed -i 's/main/AUTHORS.txt/${version}/AUTHORS.txt/' package.json",
"after:release": `./build/release/post-release.sh \${version} ${ blogURL }`
},
git: {
changelog: "npm run release:changelog -- ${from} ${to}",
commitMessage: "Release: ${version}",
getLatestTagFromAllRefs: true,
requireBranch: "main",
requireCleanWorkingDir: true
},
github: {
release: true,
tokenRef: "JQUERY_GITHUB_TOKEN"
},
npm: {
publish: true
}
};

View File

@ -1,8 +1,6 @@
"use strict"; import yargs from "yargs/yargs";
import { build } from "./tasks/build.js";
const { build } = require( "./tasks/build" ); import slimExclude from "./tasks/lib/slim-exclude.js";
const yargs = require( "yargs/yargs" );
const slimExclude = require( "./tasks/lib/slim-exclude" );
const argv = yargs( process.argv.slice( 2 ) ) const argv = yargs( process.argv.slice( 2 ) )
.version( false ) .version( false )

View File

@ -1,3 +0,0 @@
{
"type": "commonjs"
}

View File

@ -1,96 +0,0 @@
"use strict";
const fs = require( "node:fs" );
module.exports = function( Release ) {
const distFiles = [
"dist/jquery.js",
"dist/jquery.min.js",
"dist/jquery.min.map",
"dist/jquery.slim.js",
"dist/jquery.slim.min.js",
"dist/jquery.slim.min.map",
"dist/jquery.factory.js",
"dist/jquery.factory.slim.js",
"dist-module/jquery.module.js",
"dist-module/jquery.module.min.js",
"dist-module/jquery.module.min.map",
"dist-module/jquery.slim.module.js",
"dist-module/jquery.slim.module.min.js",
"dist-module/jquery.slim.module.min.map",
"dist-module/jquery.factory.module.js",
"dist-module/jquery.factory.slim.module.js"
];
const filesToCommit = [
...distFiles,
"src/core.js"
];
const cdn = require( "./release/cdn" );
const dist = require( "./release/dist" );
const { buildDefaultFiles } = require( "./tasks/build" );
const npmTags = Release.npmTags;
function setSrcVersion( filepath ) {
var contents = fs.readFileSync( filepath, "utf8" );
contents = contents.replace( /@VERSION/g, Release.newVersion );
fs.writeFileSync( filepath, contents, "utf8" );
}
Release.define( {
npmPublish: true,
issueTracker: "github",
// Update cdn location to versioned files
cdnPublish: "dist/cdn/versioned",
/**
* Set the version in the src folder for distributing ES modules.
*/
_setSrcVersion: function() {
setSrcVersion( `${ __dirname }/../src/core.js` );
},
/**
* Generates any release artifacts that should be included in the release.
* The callback must be invoked with an array of files that should be
* committed before creating the tag.
* @param {Function} callback
*/
generateArtifacts: async function( callback ) {
await buildDefaultFiles( { version: Release.newVersion } );
cdn.makeReleaseCopies( Release );
Release._setSrcVersion();
callback( filesToCommit );
},
/**
* Acts as insertion point for restoring Release.dir.repo
* It was changed to reuse npm publish code in jquery-release
* for publishing the distribution repo instead
*/
npmTags: function() {
// origRepo is not defined if dist was skipped
Release.dir.repo = Release.dir.origRepo || Release.dir.repo;
return npmTags();
},
/**
* Publish to distribution repo and npm
* @param {Function} callback
*/
dist: async function( callback ) {
await cdn.makeArchives( Release );
dist( Release, distFiles, callback );
}
} );
};
module.exports.dependencies = [
"archiver@5.2.0",
"shelljs@0.8.4",
"inquirer@8.0.0"
];

116
build/release/README.md Normal file
View File

@ -0,0 +1,116 @@
# Releasing jQuery
This document describes the process for releasing a new version of jQuery. It is intended for jQuery team members and collaborators who have been granted permission to release new versions.
## Prerequisites
Before you can release a new version of jQuery, you need to have the following tools installed:
- [Node.js](https://nodejs.org/) (latest LTS version)
- [npm](https://www.npmjs.com/) (comes with Node.js)
- [git](https://git-scm.com/)
## Setup
1. Clone the jQuery repo:
```sh
git clone git@github.com:jquery/jquery.git
cd jquery
```
1. Install the dependencies:
```sh
npm install
```
1. Log into npm with a user that has access to the `jquery` package.
```sh
npm login
```
The release script will not run if not logged in.
1. Set `JQUERY_GITHUB_TOKEN` in the shell environment that will be used to run `npm run release`. The token can be [created on GitHub](https://github.com/settings/tokens/new?scopes=repo&description=release-it) and only needs the `repo` scope. This token is used to publish GitHub release notes and generate a list of contributors for the blog post.
```sh
export JQUERY_GITHUB_TOKEN=...
```
The release script will not run without this token.
## Release Process
1. Ensure all milestoned issues/PRs are closed, or reassign to a new milestone.
1. Verify all tests are passing in [CI](https://github.com/jquery/jquery/actions).
1. Run any release-only tests, such as those in the [`test/integration`](../../test/integration/) folder.
1. Ensure AUTHORS.txt file is up to date (this will be verified by the release script).
- Use `npm run authors:update` to update.
1. Create draft blog post on blog.jquery.com; save the link before publishing. The link is required to run the release.
- Highlight major changes and reason for release.
- Add contributor list generated in the below release script.
- Add HTML from the `changelog.html` generated in the below release script.
- Use HTML from the `contributors.html` generated in the below release script in the "Thanks" section.
1. Run a dry run of the release script:
```sh
BLOG_URL=https://blog.jquery.com/... npm run release -- -d
```
1. If the dry run is successful, run the release script:
```sh
BLOG_URL=https://blog.jquery.com/... npm run release
```
This will run the pre-release script, which includes checking authors, running tests, running the build, and cloning the CDN and jquery-dist repos in the `tmp/` folder.
It will then walk you through the rest of the release process: creating the tag, publishing to npm, publishing release notes on GitHub, and pushing the updated branch and new tag to the jQuery repo.
Finally, it will run the post-release script, which will ask you to confirm the files prepared in `tmp/release/cdn` and `tmp/release/dist` are correct before pushing to the respective repos. It will also prepare a commit for the jQuery repo to remove the release files and update the AUTHORS.txt URL in the package.json. It will ask for confirmation before pushing that commit as well.
For a pre-release, run:
```sh
BLOG_URL=https://blog.jquery.com/... npm run release -- --preRelease=beta
```
`preRelease` can also be set to `alpha` or `rc`.
**Note**: `preReleaseBase` is set in the npm script to `1` to ensure any pre-releases start at `.1` instead of `.0`. This does not interfere with stable releases.
1. Once the release is complete, publish the blog post.
## Stable releases
Stable releases have a few more steps:
1. Close the milestone matching the current release: https://github.com/jquery/jquery/milestones. Ensure there is a new milestone for the next release.
1. Update jQuery on https://github.com/jquery/jquery-wp-content.
1. Update jQuery on https://github.com/jquery/blog.jquery.com-theme.
1. Update latest jQuery version for [healthyweb.org](https://github.com/jquery/healthyweb.org/blob/main/wrangler.toml).
1. Update the shipping version on [jquery.com home page](https://github.com/jquery/jquery.com).
```sh
git pull jquery/jquery.com
# Edit index.html and download.md
git commit
npm version patch
git push origin main --tags
```
1. Update the version used in [jQuery docs demos](https://github.com/jquery/api.jquery.com/blob/main/entries2html.xsl).
1. Email archives to CDNs.
| CDN | Emails | Include |
| --- | ------ | ------- |
| Google | hosted-libraries@google | `tmp/archives/googlecdn-jquery-*.zip` |
| Microsoft | damian.edwards@microsoft, Chris.Sfanos@microsoft | `tmp/archives/mscdn-jquery-*.zip` |
| CDNJS | ryan@ryankirkman, thomasalwyndavis@gmail | Blog post link |

59
build/release/archive.js Normal file
View File

@ -0,0 +1,59 @@
import { readdir, writeFile } from "node:fs/promises";
import { createReadStream, createWriteStream } from "node:fs";
import path from "node:path";
import util from "node:util";
import os from "node:os";
import { exec as nodeExec } from "node:child_process";
import archiver from "archiver";
const exec = util.promisify( nodeExec );
async function md5sum( files, folder ) {
if ( os.platform() === "win32" ) {
const rmd5 = /[a-f0-9]{32}/;
const sum = [];
for ( let i = 0; i < files.length; i++ ) {
const { stdout } = await exec( "certutil -hashfile " + files[ i ] + " MD5", {
cwd: folder
} );
sum.push( rmd5.exec( stdout )[ 0 ] + " " + files[ i ] );
}
return sum.join( "\n" );
}
const { stdout } = await exec( "md5 -r " + files.join( " " ), { cwd: folder } );
return stdout;
}
export default function archive( { cdn, folder, version } ) {
return new Promise( async( resolve, reject ) => {
console.log( `Creating production archive for ${ cdn }...` );
const md5file = cdn + "-md5.txt";
const output = createWriteStream(
path.join( folder, cdn + "-jquery-" + version + ".zip" )
);
output.on( "close", resolve );
output.on( "error", reject );
const archive = archiver( "zip" );
archive.pipe( output );
const files = await readdir( folder );
const sum = await md5sum( files, folder );
await writeFile( path.join( folder, md5file ), sum );
files.push( md5file );
files.forEach( ( file ) => {
const stream = createReadStream( path.join( folder, file ) );
archive.append( stream, {
name: path.basename( file )
} );
} );
archive.finalize();
} );
}

View File

@ -1,8 +1,11 @@
"use strict";
const fs = require( "node:fs/promises" );
const util = require( "node:util" ); import fs from "node:fs/promises";
const exec = util.promisify( require( "node:child_process" ).exec ); import util from "node:util";
import { exec as nodeExec } from "node:child_process";
const exec = util.promisify( nodeExec );
const rnewline = /\r?\n/; const rnewline = /\r?\n/;
const rdate = /^\[(\d+)\] /; const rdate = /^\[(\d+)\] /;
@ -70,14 +73,14 @@ function formatAuthor( author ) {
return author.replace( rdate, "" ); return author.replace( rdate, "" );
} }
async function getAuthors() { export async function getAuthors() {
console.log( "Getting authors..." ); console.log( "Getting authors..." );
const authors = await logAuthors(); const authors = await logAuthors();
const sizzleAuthors = await getSizzleAuthors(); const sizzleAuthors = await getSizzleAuthors();
return uniq( authors.concat( sizzleAuthors ) ).sort( sortAuthors ).map( formatAuthor ); return uniq( authors.concat( sizzleAuthors ) ).sort( sortAuthors ).map( formatAuthor );
} }
async function checkAuthors() { export async function checkAuthors() {
const authors = await getAuthors(); const authors = await getAuthors();
const lastAuthor = await getLastAuthor(); const lastAuthor = await getLastAuthor();
@ -89,7 +92,7 @@ async function checkAuthors() {
console.log( "AUTHORS.txt is up to date" ); console.log( "AUTHORS.txt is up to date" );
} }
async function updateAuthors() { export async function updateAuthors() {
const authors = await getAuthors(); const authors = await getAuthors();
const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n"; const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
@ -97,9 +100,3 @@ async function updateAuthors() {
console.log( "AUTHORS.txt updated" ); console.log( "AUTHORS.txt updated" );
} }
module.exports = {
checkAuthors,
getAuthors,
updateAuthors
};

View File

@ -1,175 +1,130 @@
"use strict"; import { mkdir, readFile, writeFile } from "node:fs/promises";
import path from "node:path";
import { argv } from "node:process";
import util from "node:util";
import { exec as nodeExec } from "node:child_process";
import { rimraf } from "rimraf";
import archive from "./archive.js";
const fs = require( "node:fs" ); const exec = util.promisify( nodeExec );
const shell = require( "shelljs" );
const path = require( "node:path" );
const os = require( "node:os" );
const cdnFolderContainer = "dist/cdn"; const version = argv[ 2 ];
const cdnFolderVersioned = `${ cdnFolderContainer }/versioned`;
const cdnFolderUnversioned = `${ cdnFolderContainer }/unversioned`;
const versionedReleaseFiles = { if ( !version ) {
"jquery-@VER.js": "dist/jquery.js", throw new Error( "No version specified" );
"jquery-@VER.min.js": "dist/jquery.min.js", }
"jquery-@VER.min.map": "dist/jquery.min.map",
"jquery-@VER.slim.js": "dist/jquery.slim.js",
"jquery-@VER.slim.min.js": "dist/jquery.slim.min.js",
"jquery-@VER.slim.min.map": "dist/jquery.slim.min.map",
"jquery-@VER.module.js": "dist-module/jquery.module.js",
"jquery-@VER.module.min.js": "dist-module/jquery.module.min.js",
"jquery-@VER.module.min.map": "dist-module/jquery.module.min.map",
"jquery-@VER.slim.module.js": "dist-module/jquery.slim.module.js",
"jquery-@VER.slim.module.min.js": "dist-module/jquery.slim.module.min.js",
"jquery-@VER.slim.module.min.map": "dist-module/jquery.slim.module.min.map"
};
const unversionedReleaseFiles = { const archivesFolder = "tmp/archives";
"jquery.js": "dist/jquery.js", const versionedFolder = `${ archivesFolder }/versioned`;
"jquery.min.js": "dist/jquery.min.js", const unversionedFolder = `${ archivesFolder }/unversioned`;
"jquery.min.map": "dist/jquery.min.map",
"jquery.slim.js": "dist/jquery.slim.js",
"jquery.slim.min.js": "dist/jquery.slim.min.js",
"jquery.slim.min.map": "dist/jquery.slim.min.map",
"jquery.module.js": "dist-module/jquery.module.js",
"jquery.module.min.js": "dist-module/jquery.module.min.js",
"jquery.module.min.map": "dist-module/jquery.module.min.map",
"jquery.slim.module.js": "dist-module/jquery.slim.module.js",
"jquery.slim.module.min.js": "dist-module/jquery.slim.module.min.js",
"jquery.slim.module.min.map": "dist-module/jquery.slim.module.min.map"
};
/** // The cdn repo is cloned during release
* Generates copies for the CDNs const cdnRepoFolder = "tmp/release/cdn";
*/
function makeReleaseCopies( Release ) {
[
{ filesMap: versionedReleaseFiles, cdnFolder: cdnFolderVersioned },
{ filesMap: unversionedReleaseFiles, cdnFolder: cdnFolderUnversioned }
].forEach( ( { filesMap, cdnFolder } ) => {
shell.mkdir( "-p", cdnFolder );
Object.keys( filesMap ).forEach( ( key ) => { // .min.js and .min.map files are expected
let text; // in the same directory as the uncompressed files.
const builtFile = filesMap[ key ]; const sources = [
const unpathedFile = key.replace( /@VER/g, Release.newVersion ); "dist/jquery.js",
const releaseFile = cdnFolder + "/" + unpathedFile; "dist/jquery.slim.js",
"dist-module/jquery.module.js",
"dist-module/jquery.slim.module.js"
];
if ( /\.map$/.test( releaseFile ) ) { const rminmap = /\.min\.map$/;
const rjs = /\.js$/;
function clean() {
console.log( "Cleaning any existing archives..." );
return rimraf( archivesFolder );
}
// Map files need to reference the new uncompressed name; // Map files need to reference the new uncompressed name;
// assume that all files reside in the same directory. // assume that all files reside in the same directory.
// "file":"jquery.min.js" ... "sources":["jquery.js"] // "file":"jquery.min.js" ... "sources":["jquery.js"]
text = fs // This is only necessary for the versioned files.
.readFileSync( builtFile, "utf8" ) async function convertMapToVersioned( file, folder ) {
.replace( const mapFile = file.replace( /\.js$/, ".min.map" );
/"file":"([^"]+)"/, const filename = path
`"file":"${ unpathedFile.replace( /\.min\.map/, ".min.js" ) }"` .basename( mapFile )
) .replace( "jquery", "jquery-" + version );
.replace(
/"sources":\["([^"]+)"\]/,
`"sources":["${ unpathedFile.replace( /\.min\.map/, ".js" ) }"]`
);
fs.writeFileSync( releaseFile, text );
} else if ( builtFile !== releaseFile ) {
shell.cp( "-f", builtFile, releaseFile );
}
} );
} );
}
async function makeArchives( Release ) { const contents = JSON.parse( await readFile( mapFile, "utf8" ) );
Release.chdir( Release.dir.repo );
async function makeArchive( { cdn, filesMap, cdnFolder } ) { return writeFile(
return new Promise( ( resolve, reject ) => { path.join( folder, filename ),
if ( Release.preRelease ) { JSON.stringify( {
console.log( ...contents,
`Skipping archive creation for ${ cdn }; this is a beta release.` file: filename.replace( rminmap, ".min.js" ),
); sources: [ filename.replace( rminmap, ".js" ) ]
resolve(); } )
return;
}
console.log( "Creating production archive for " + cdn );
let i, sum, result;
const archiver = require( "archiver" )( "zip" );
const md5file = cdnFolder + "/" + cdn + "-md5.txt";
const output = fs.createWriteStream(
cdnFolder + "/" + cdn + "-jquery-" + Release.newVersion + ".zip"
);
const rmd5 = /[a-f0-9]{32}/;
const rver = /@VER/;
output.on( "close", resolve );
output.on( "error", ( err ) => {
reject( err );
} );
archiver.pipe( output );
let finalFilesMap = Object.create( null );
for ( const [ releaseFile, builtFile ] of Object.entries( filesMap ) ) {
finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] =
builtFile;
}
const files = Object.keys( filesMap ).map(
( item ) => `${ cdnFolder }/${ item.replace( rver, Release.newVersion ) }`
);
if ( os.platform() === "win32" ) {
sum = [];
for ( i = 0; i < files.length; i++ ) {
result = Release.exec(
"certutil -hashfile " + files[ i ] + " MD5",
"Error retrieving md5sum"
);
sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
}
sum = sum.join( "\n" );
} else {
sum = Release.exec(
"md5 -r " + files.join( " " ),
"Error retrieving md5sum"
); );
} }
fs.writeFileSync( md5file, sum );
files.push( md5file );
files.forEach( ( file ) => { async function makeUnversionedCopies() {
archiver.append( fs.createReadStream( file ), { await mkdir( unversionedFolder, { recursive: true } );
name: path.basename( file )
} );
} );
archiver.finalize(); return Promise.all(
} ); sources.map( async( file ) => {
const filename = path.basename( file );
const minFilename = filename.replace( rjs, ".min.js" );
const mapFilename = filename.replace( rjs, ".min.map" );
await exec( `cp -f ${ file } ${ unversionedFolder }/${ filename }` );
await exec(
`cp -f ${ file.replace(
rjs,
".min.js"
) } ${ unversionedFolder }/${ minFilename }`
);
await exec(
`cp -f ${ file.replace(
rjs,
".min.map"
) } ${ unversionedFolder }/${ mapFilename }`
);
} )
);
} }
async function buildGoogleCDN() { async function makeVersionedCopies() {
await makeArchive( { await mkdir( versionedFolder, { recursive: true } );
cdn: "googlecdn",
filesMap: unversionedReleaseFiles, return Promise.all(
cdnFolder: cdnFolderUnversioned sources.map( async( file ) => {
} ); const filename = path
.basename( file )
.replace( "jquery", "jquery-" + version );
const minFilename = filename.replace( rjs, ".min.js" );
await exec( `cp -f ${ file } ${ versionedFolder }/${ filename }` );
await exec(
`cp -f ${ file.replace(
rjs,
".min.js"
) } ${ versionedFolder }/${ minFilename }`
);
await convertMapToVersioned( file, versionedFolder );
} )
);
} }
async function buildMicrosoftCDN() { async function copyToRepo( folder ) {
await makeArchive( { return exec( `cp -f ${ folder }/* ${ cdnRepoFolder }/cdn/` );
cdn: "mscdn",
filesMap: versionedReleaseFiles,
cdnFolder: cdnFolderVersioned
} );
} }
await buildGoogleCDN(); async function cdn() {
await buildMicrosoftCDN(); await clean();
await Promise.all( [ makeUnversionedCopies(), makeVersionedCopies() ] );
await copyToRepo( versionedFolder );
await Promise.all( [
archive( { cdn: "googlecdn", folder: unversionedFolder, version } ),
archive( { cdn: "mscdn", folder: versionedFolder, version } )
] );
console.log( "Files ready for CDNs." );
} }
module.exports = { cdn();
makeReleaseCopies: makeReleaseCopies,
makeArchives: makeArchives
};

239
build/release/changelog.js Normal file
View File

@ -0,0 +1,239 @@
import { writeFile } from "node:fs/promises";
import { argv } from "node:process";
import { exec as nodeExec } from "node:child_process";
import util from "node:util";
import { marked } from "marked";
const exec = util.promisify( nodeExec );
const rbeforeHash = /.#$/;
const rendsWithHash = /#$/;
const rcherry = / \(cherry picked from commit [^)]+\)/;
const rcommit = /Fix(?:e[sd])? ((?:[a-zA-Z0-9_-]{1,39}\/[a-zA-Z0-9_-]{1,100}#)|#|gh-)(\d+)/g;
const rcomponent = /^([^ :]+):\s*([^\n]+)/;
const rnewline = /\r?\n/;
const prevVersion = argv[ 2 ];
const nextVersion = argv[ 3 ];
const blogUrl = process.env.BLOG_URL;
if ( !prevVersion || !nextVersion ) {
throw new Error( "Usage: `node changelog.js PREV_VERSION NEXT_VERSION`" );
}
function ticketUrl( ticketId ) {
return `https://github.com/jquery/jquery/issues/${ ticketId }`;
}
function getTicketsForCommit( commit ) {
var tickets = [];
commit.replace( rcommit, function( _match, refType, ticketId ) {
var ticket = {
url: ticketUrl( ticketId ),
label: "#" + ticketId
};
// If the refType has anything before the #, assume it's a GitHub ref
if ( rbeforeHash.test( refType ) ) {
// console.log( refType );
refType = refType.replace( rendsWithHash, "" );
ticket.url = `https://github.com/${ refType }/issues/${ ticketId }`;
ticket.label = refType + ticket.label;
}
tickets.push( ticket );
} );
return tickets;
}
async function getCommits() {
const format =
"__COMMIT__%n%s (__TICKETREF__[%h](https://github.com/jquery/jquery/commit/%H))%n%b";
const { stdout } = await exec(
`git log --format="${ format }" ${ prevVersion }..${ nextVersion }`
);
const commits = stdout.split( "__COMMIT__" ).slice( 1 );
return removeReverts( commits.map( parseCommit ).sort( sortCommits ) );
}
function parseCommit( commit ) {
const tickets = getTicketsForCommit( commit )
.map( ( ticket ) => {
return `[${ ticket.label }](${ ticket.url })`;
} )
.join( ", " );
// Drop the commit message body
let message = `${ commit.trim().split( rnewline )[ 0 ] }`;
// Add any ticket references
message = message.replace( "__TICKETREF__", tickets ? `${ tickets }, ` : "" );
// Remove cherry pick references
message = message.replace( rcherry, "" );
return message;
}
function sortCommits( a, b ) {
const aComponent = rcomponent.exec( a );
const bComponent = rcomponent.exec( b );
if ( aComponent && bComponent ) {
if ( aComponent[ 1 ] < bComponent[ 1 ] ) {
return -1;
}
if ( aComponent[ 1 ] > bComponent[ 1 ] ) {
return 1;
}
return 0;
}
if ( a < b ) {
return -1;
}
if ( a > b ) {
return 1;
}
return 0;
}
/**
* Remove all revert commits and the commit it is reverting
*/
function removeReverts( commits ) {
const remove = [];
commits.forEach( function( commit ) {
const match = /\*\s*Revert "([^"]*)"/.exec( commit );
// Ignore double reverts
if ( match && !/^Revert "([^"]*)"/.test( match[ 0 ] ) ) {
remove.push( commit, match[ 0 ] );
}
} );
remove.forEach( function( message ) {
const index = commits.findIndex( ( commit ) => commit.includes( message ) );
if ( index > -1 ) {
// console.log( "Removing ", commits[ index ] );
commits.splice( index, 1 );
}
} );
return commits;
}
function addHeaders( commits ) {
const components = {};
let markdown = "";
commits.forEach( function( commit ) {
const match = rcomponent.exec( commit );
if ( match ) {
let component = match[ 1 ];
if ( !/^[A-Z]/.test( component ) ) {
component =
component.slice( 0, 1 ).toUpperCase() +
component.slice( 1 ).toLowerCase();
}
if ( !components[ component.toLowerCase() ] ) {
markdown += "\n## " + component + "\n\n";
components[ component.toLowerCase() ] = true;
}
markdown += `- ${ match[ 2 ] }\n`;
} else {
markdown += `- ${ commit }\n`;
}
} );
return markdown;
}
async function getGitHubContributor( sha ) {
const response = await fetch(
`https://api.github.com/repos/jquery/jquery/commits/${ sha }`,
{
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${ process.env.JQUERY_GITHUB_TOKEN }`,
"X-GitHub-Api-Version": "2022-11-28"
}
}
);
const data = await response.json();
if ( !data.commit || !data.author ) {
// The data may contain multiple helpful fields
throw new Error( JSON.stringify( data ) );
}
return { name: data.commit.author.name, url: data.author.html_url };
}
function uniqueContributors( contributors ) {
const seen = {};
return contributors.filter( ( contributor ) => {
if ( seen[ contributor.name ] ) {
return false;
}
seen[ contributor.name ] = true;
return true;
} );
}
async function getContributors() {
const { stdout } = await exec(
`git log --format="%H" ${ prevVersion }..${ nextVersion }`
);
const shas = stdout.split( rnewline ).filter( Boolean );
const contributors = await Promise.all( shas.map( getGitHubContributor ) );
return uniqueContributors( contributors )
// Sort by last name
.sort( ( a, b ) => {
const aName = a.name.split( " " );
const bName = b.name.split( " " );
return aName[ aName.length - 1 ].localeCompare( bName[ bName.length - 1 ] );
} )
.map( ( { name, url } ) => {
if ( name === "Timmy Willison" || name.includes( "dependabot" ) ) {
return;
}
return `<a href="${ url }">${ name }</a>`;
} )
.filter( Boolean ).join( "\n" );
}
async function generate() {
const commits = await getCommits();
const contributors = await getContributors();
let changelog = "# Changelog\n";
if ( blogUrl ) {
changelog += `\n${ blogUrl }\n`;
}
changelog += addHeaders( commits );
// Write markdown to changelog.md
await writeFile( "changelog.md", changelog );
// Write HTML to changelog.html for blog post
await writeFile( "changelog.html", marked.parse( changelog ) );
// Write contributors HTML for blog post
await writeFile( "contributors.html", contributors );
// Log regular changelog for release-it
console.log( changelog );
return changelog;
}
generate();

View File

@ -1,190 +1,125 @@
"use strict"; import { readFile, writeFile } from "node:fs/promises";
import util from "node:util";
import { argv } from "node:process";
import { exec as nodeExec } from "node:child_process";
import { rimraf } from "rimraf";
module.exports = function( Release, files, complete ) { const pkg = JSON.parse( await readFile( "./package.json", "utf8" ) );
const fs = require( "node:fs/promises" ); const exec = util.promisify( nodeExec );
const shell = require( "shelljs" );
const inquirer = require( "inquirer" );
const pkg = require( `${ Release.dir.repo }/package.json` );
const distRemote = Release.remote
// For local and github dists const version = argv[ 2 ];
.replace( /jquery(\.git|$)/, "jquery-dist$1" ); const blogURL = argv[ 3 ];
// These files are included with the distribution if ( !version ) {
const extras = [ throw new Error( "No version specified" );
}
if ( !blogURL || !blogURL.startsWith( "https://blog.jquery.com/" ) ) {
throw new Error( "Invalid blog post URL" );
}
// The dist repo is cloned during release
const distRepoFolder = "tmp/release/dist";
// Files to be included in the dist repo.
// README.md and bower.json are generated.
const files = [
"dist",
"dist-module",
"src", "src",
"LICENSE.txt", "LICENSE.txt",
"AUTHORS.txt", "AUTHORS.txt",
"dist/package.json", "changelog.md"
"dist/jquery.bundler-require-wrapper.js",
"dist/jquery.bundler-require-wrapper.slim.js",
"dist-module/package.json",
"dist-module/jquery.node-module-wrapper.js",
"dist-module/jquery.node-module-wrapper.slim.js"
]; ];
/** async function generateBower() {
* Clone the distribution repo return JSON.stringify(
*/ {
function clone() {
Release.chdir( Release.dir.base );
Release.dir.dist = `${ Release.dir.base }/dist`;
console.log( "Using distribution repo: ", distRemote );
Release.exec( `git clone ${ distRemote } ${ Release.dir.dist }`,
"Error cloning repo." );
// Distribution always works on main
Release.chdir( Release.dir.dist );
Release.exec( "git checkout main", "Error checking out branch." );
console.log();
}
/**
* Generate bower file for jquery-dist
*/
function generateBower() {
return JSON.stringify( {
name: pkg.name, name: pkg.name,
main: pkg.main, main: pkg.main,
license: "MIT", license: "MIT",
ignore: [ ignore: [ "package.json" ],
"package.json"
],
keywords: pkg.keywords keywords: pkg.keywords
}, null, 2 ); },
null,
2
);
} }
/** async function generateReadme() {
* Replace the version in the README const readme = await readFile(
* @param {string} readme "./build/fixtures/README.md",
* @param {string} blogPostLink "utf8"
*/ );
function editReadme( readme, blogPostLink ) {
return readme return readme
.replace( /@VERSION/g, Release.newVersion ) .replace( /@VERSION/g, version )
.replace( /@BLOG_POST_LINK/g, blogPostLink ); .replace( /@BLOG_POST_LINK/g, blogURL );
} }
/** /**
* Copy necessary files over to the dist repo * Copy necessary files over to the dist repo
*/ */
async function copy() { async function copyFiles() {
const readme = await fs.readFile(
`${ Release.dir.repo }/build/fixtures/README.md`, "utf8" );
const rmIgnore = [ ...files, "node_modules" ]
.map( file => `${ Release.dir.dist }/${ file }` );
shell.config.globOptions = { // Remove any extraneous files before copy
ignore: rmIgnore await rimraf( [
}; `${ distRepoFolder }/dist`,
`${ distRepoFolder }/dist-module`,
`${ distRepoFolder }/src`
] );
const { blogPostLink } = await inquirer.prompt( [ { // Copy all files
type: "input", await Promise.all(
name: "blogPostLink", files.map( function( path ) {
message: "Enter URL of the blog post announcing the jQuery release...\n" console.log( `Copying ${ path }...` );
} ] ); return exec( `cp -rf ${ path } ${ distRepoFolder }/${ path }` );
} )
// Remove extraneous files before copy
shell.rm( "-rf", `${ Release.dir.dist }/**/*` );
// Copy dist files
shell.mkdir( "-p", `${ Release.dir.dist }/dist` );
shell.mkdir( "-p", `${ Release.dir.dist }/dist-module` );
files.forEach( function( file ) {
shell.cp(
"-f",
`${ Release.dir.repo }/${ file }`,
`${ Release.dir.dist }/${ file }`
);
} );
// Copy other files
extras.forEach( function( file ) {
shell.cp(
"-rf",
`${ Release.dir.repo }/${ file }`,
`${ Release.dir.dist }/${ file }`
);
} );
// Remove the wrapper & the ESLint config from the dist repo
shell.rm( "-f", `${ Release.dir.dist }/src/wrapper.js` );
shell.rm( "-f", `${ Release.dir.dist }/src/.eslintrc.json` );
// Write package.json
// Remove scripts and other superfluous properties,
// especially the prepare script, which fails on the dist repo
const packageJson = Object.assign( {}, pkg );
delete packageJson.scripts;
delete packageJson.devDependencies;
delete packageJson.dependencies;
delete packageJson.commitplease;
packageJson.version = Release.newVersion;
await fs.writeFile(
`${ Release.dir.dist }/package.json`,
JSON.stringify( packageJson, null, 2 )
); );
// Write generated bower file // Remove the wrapper from the dist repo
await fs.writeFile( `${ Release.dir.dist }/bower.json`, generateBower() ); await rimraf( [
`${ distRepoFolder }/src/wrapper.js`
] );
await fs.writeFile( `${ Release.dir.dist }/README.md`, // Set the version in src/core.js
editReadme( readme, blogPostLink ) ); const core = await readFile( `${ distRepoFolder }/src/core.js`, "utf8" );
await writeFile(
`${ distRepoFolder }/src/core.js`,
core.replace( /@VERSION/g, version )
);
console.log( "Files ready to add." ); // Write generated README
console.log( "Generating README.md..." );
const readme = await generateReadme();
await writeFile( `${ distRepoFolder }/README.md`, readme );
// Write generated Bower file
console.log( "Generating bower.json..." );
const bower = await generateBower();
await writeFile( `${ distRepoFolder }/bower.json`, bower );
// Write simplified package.json
console.log( "Writing package.json..." );
await writeFile(
`${ distRepoFolder }/package.json`,
JSON.stringify(
{
...pkg,
scripts: undefined,
dependencies: undefined,
devDependencies: undefined,
commitplease: undefined
},
null,
2
// Add final newline
) + "\n"
);
console.log( "Files copied to dist repo." );
} }
/** copyFiles();
* Add, commit, and tag the dist files
*/
function commit() {
console.log( "Adding files to dist..." );
Release.exec( "git add -A", "Error adding files." );
Release.exec(
`git commit -m "Release ${ Release.newVersion }"`,
"Error committing files."
);
console.log();
console.log( "Tagging release on dist..." );
Release.exec( `git tag ${ Release.newVersion }`,
`Error tagging ${ Release.newVersion } on dist repo.` );
Release.tagTime = Release.exec( "git log -1 --format=\"%ad\"",
"Error getting tag timestamp." ).trim();
}
/**
* Push files to dist repo
*/
function push() {
Release.chdir( Release.dir.dist );
console.log( "Pushing release to dist repo..." );
Release.exec(
`git push ${
Release.isTest ? " --dry-run" : ""
} ${ distRemote } main --tags`,
"Error pushing main and tags to git repo."
);
// Set repo for npm publish
Release.dir.origRepo = Release.dir.repo;
Release.dir.repo = Release.dir.dist;
}
Release.walk( [
Release._section( "Copy files to distribution repo" ),
clone,
copy,
Release.confirmReview,
Release._section( "Add, commit, and tag files in distribution repo" ),
commit,
Release.confirmReview,
Release._section( "Pushing files to distribution repo" ),
push
], complete );
};

View File

@ -0,0 +1,56 @@
#!/bin/sh
set -euo pipefail
# $1: Version
# $2: Blog URL
cdn=tmp/release/cdn
dist=tmp/release/dist
if [[ -z "$1" ]] then
echo "Version is not set (1st argument)"
exit 1
fi
if [[ -z "$2" ]] then
echo "Blog URL is not set (2nd argument)"
exit 1
fi
# Push files to cdn repo
npm run release:cdn $1
cd $cdn
git add -A
git commit -m "jquery: Add version $1"
# Wait for confirmation from user to push changes to cdn repo
read -p "Press enter to push changes to cdn repo"
git push
cd -
# Push files to dist repo
npm run release:dist $1 $2
cd $dist
git add -A
npm version $1
# Wait for confirmation from user to push changes to dist repo
read -p "Press enter to push changes to dist repo"
git push --follow-tags
cd -
# Restore AUTHORS URL
sed -i "s/$1\/AUTHORS.txt/main\/AUTHORS.txt/" package.json
git add package.json
# Remove built files from tracking.
# Leave the changelog.md committed.
# Leave the tmp folder as some files are needed
# after the release (such as for emailing archives).
npm run build:clean
git commit -m "Release: remove dist files from main branch"
# Wait for confirmation from user to push changes
read -p "Press enter to push changes to main branch"
git push

View File

@ -0,0 +1,15 @@
#!/bin/sh
set -euo pipefail
# Cleans all release and build artifacts
npm run build:clean
npm run release:clean
npm ci
npm run authors:check
npm test
# Clone dist and cdn repos to the tmp/release directory
mkdir -p tmp/release
git clone https://github.com/jquery/jquery-dist tmp/release/dist
git clone https://github.com/jquery/codeorigin.jquery.com tmp/release/cdn

198
build/release/verify.js Normal file
View File

@ -0,0 +1,198 @@
/**
* Verify the latest release is reproducible
* Works with versions 4.0.0-beta.2 and later
*/
import chalk from "chalk";
import * as Diff from "diff";
import { exec as nodeExec } from "node:child_process";
import crypto from "node:crypto";
import { createWriteStream } from "node:fs";
import { mkdir, readdir, readFile } from "node:fs/promises";
import path from "node:path";
import { Readable } from "node:stream";
import { finished } from "node:stream/promises";
import util from "node:util";
import { gunzip as nodeGunzip } from "node:zlib";
import { rimraf } from "rimraf";
const exec = util.promisify( nodeExec );
const gunzip = util.promisify( nodeGunzip );
const SRC_REPO = "https://github.com/jquery/jquery.git";
const CDN_URL = "https://code.jquery.com";
const REGISTRY_URL = "https://registry.npmjs.org/jquery";
const rstable = /^(\d+\.\d+\.\d+)$/;
export async function verifyRelease( { version } = {} ) {
if ( !version ) {
version = process.env.VERSION || ( await getLatestVersion() );
}
console.log( `Checking jQuery ${ version }...` );
const release = await buildRelease( { version } );
let verified = true;
// Only check stable versions against the CDN
if ( rstable.test( version ) ) {
await Promise.all(
release.files.map( async( file ) => {
const cdnContents = await fetch( new URL( file.name, CDN_URL ) ).then(
( res ) => res.text()
);
if ( cdnContents !== file.contents ) {
console.log( `${ file.name } is different from the CDN:` );
diffFiles( file.contents, cdnContents );
verified = false;
}
} )
);
}
// Check all releases against npm.
// First, download npm tarball for version
const npmPackage = await fetch( REGISTRY_URL ).then( ( res ) => res.json() );
if ( !npmPackage.versions[ version ] ) {
throw new Error( `jQuery ${ version } not found on npm!` );
}
const npmTarball = npmPackage.versions[ version ].dist.tarball;
// Write npm tarball to file
const npmTarballPath = path.join( "tmp/verify", version, "npm.tgz" );
await downloadFile( npmTarball, npmTarballPath );
// Check the tarball checksum
const tgzSum = await sumTarball( npmTarballPath );
if ( tgzSum !== release.tgz.contents ) {
console.log( `${ version }.tgz is different from npm:` );
diffFiles( release.tgz.contents, tgzSum );
verified = false;
}
await Promise.all(
release.files.map( async( file ) => {
// Get file contents from tarball
const { stdout: npmContents } = await exec(
`tar -xOf ${ npmTarballPath } package/${ file.path }/${ file.name }`
);
if ( npmContents !== file.contents ) {
console.log( `${ file.name } is different from the CDN:` );
diffFiles( file.contents, npmContents );
verified = false;
}
} )
);
if ( verified ) {
console.log( `jQuery ${ version } is reproducible!` );
} else {
throw new Error( `jQuery ${ version } is NOT reproducible!` );
}
}
async function buildRelease( { version } ) {
const releaseFolder = path.join( "tmp/verify", version );
// Clone the release repo
console.log( `Cloning jQuery ${ version }...` );
await rimraf( releaseFolder );
await mkdir( releaseFolder, { recursive: true } );
await exec(
`git clone -q -b ${ version } --depth=1 ${ SRC_REPO } ${ releaseFolder }`
);
// Install node dependencies
console.log( `Installing dependencies for jQuery ${ version }...` );
await exec( "npm ci", { cwd: releaseFolder } );
// Build the release
console.log( `Building jQuery ${ version }...` );
const { stdout: buildOutput } = await exec( "npm run build:all", {
cwd: releaseFolder,
env: {
VERSION: version
}
} );
console.log( buildOutput );
// Pack the npm tarball
console.log( `Packing jQuery ${ version }...` );
const { stdout: packOutput } = await exec( "npm pack", { cwd: releaseFolder } );
console.log( packOutput );
// Get all top-level /dist and /dist-module files
const distFiles = await readdir( path.join( releaseFolder, "dist" ), {
withFileTypes: true
} );
const distModuleFiles = await readdir(
path.join( releaseFolder, "dist-module" ),
{
withFileTypes: true
}
);
const files = await Promise.all(
[ ...distFiles, ...distModuleFiles ]
.filter( ( dirent ) => dirent.isFile() )
.map( async( dirent ) => ( {
name: dirent.name,
path: path.basename( dirent.path ),
contents: await readFile( path.join( dirent.path, dirent.name ), "utf8" )
} ) )
);
// Get checksum of the tarball
const tgzFilename = `jquery-${ version }.tgz`;
const sum = await sumTarball( path.join( releaseFolder, tgzFilename ) );
return {
files,
tgz: {
name: tgzFilename,
contents: sum
},
version
};
}
async function downloadFile( url, dest ) {
const response = await fetch( url );
const fileStream = createWriteStream( dest );
const stream = Readable.fromWeb( response.body ).pipe( fileStream );
return finished( stream );
}
async function diffFiles( a, b ) {
const diff = Diff.diffLines( a, b );
diff.forEach( ( part ) => {
if ( part.added ) {
console.log( chalk.green( part.value ) );
} else if ( part.removed ) {
console.log( chalk.red( part.value ) );
} else {
console.log( part.value );
}
} );
}
async function getLatestVersion() {
const { stdout: sha } = await exec( "git rev-list --tags --max-count=1" );
const { stdout: tag } = await exec( `git describe --tags ${ sha.trim() }` );
return tag.trim();
}
function shasum( data ) {
const hash = crypto.createHash( "sha256" );
hash.update( data );
return hash.digest( "hex" );
}
async function sumTarball( filepath ) {
const contents = await readFile( filepath );
const unzipped = await gunzip( contents );
return shasum( unzipped );
}

View File

@ -4,22 +4,21 @@
* and includes/excludes specified modules * and includes/excludes specified modules
*/ */
"use strict"; import fs from "node:fs/promises";
import path from "node:path";
import util from "node:util";
import { exec as nodeExec } from "node:child_process";
import * as rollup from "rollup";
import excludedFromSlim from "./lib/slim-exclude.js";
import rollupFileOverrides from "./lib/rollupFileOverridesPlugin.js";
import isCleanWorkingDir from "./lib/isCleanWorkingDir.js";
import processForDist from "./dist.js";
import minify from "./minify.js";
import getTimestamp from "./lib/getTimestamp.js";
import { compareSize } from "./lib/compareSize.js";
const fs = require( "node:fs/promises" ); const exec = util.promisify( nodeExec );
const path = require( "node:path" ); const pkg = JSON.parse( await fs.readFile( "./package.json", "utf8" ) );
const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec );
const rollup = require( "rollup" );
const excludedFromSlim = require( "./lib/slim-exclude" );
const rollupFileOverrides = require( "./lib/rollup-plugin-file-overrides" );
const pkg = require( "../../package.json" );
const isCleanWorkingDir = require( "./lib/isCleanWorkingDir" );
const processForDist = require( "./dist" );
const minify = require( "./minify" );
const getTimestamp = require( "./lib/getTimestamp" );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
const srcFolder = path.resolve( __dirname, "../../src" );
const minimum = [ "core" ]; const minimum = [ "core" ];
@ -38,14 +37,14 @@ const removeWith = {
}; };
async function read( filename ) { async function read( filename ) {
return fs.readFile( path.join( srcFolder, filename ), "utf8" ); return fs.readFile( path.join( "./src", filename ), "utf8" );
} }
// Remove the src folder and file extension // Remove the src folder and file extension
// and ensure unix-style path separators // and ensure unix-style path separators
function moduleName( filename ) { function moduleName( filename ) {
return filename return filename
.replace( `${ srcFolder }${ path.sep }`, "" ) .replace( new RegExp( `.*\\${ path.sep }src\\${ path.sep }` ), "" )
.replace( /\.js$/, "" ) .replace( /\.js$/, "" )
.split( path.sep ) .split( path.sep )
.join( path.posix.sep ); .join( path.posix.sep );
@ -54,7 +53,7 @@ function moduleName( filename ) {
async function readdirRecursive( dir, all = [] ) { async function readdirRecursive( dir, all = [] ) {
let files; let files;
try { try {
files = await fs.readdir( path.join( srcFolder, dir ), { files = await fs.readdir( path.join( "./src", dir ), {
withFileTypes: true withFileTypes: true
} ); } );
} catch ( e ) { } catch ( e ) {
@ -141,7 +140,15 @@ async function checkExclude( exclude, include ) {
return [ unique( excluded ), unique( included ) ]; return [ unique( excluded ), unique( included ) ];
} }
async function getLastModifiedDate() {
const { stdout } = await exec( "git log -1 --format=\"%at\"" );
return new Date( parseInt( stdout, 10 ) * 1000 );
}
async function writeCompiled( { code, dir, filename, version } ) { async function writeCompiled( { code, dir, filename, version } ) {
// Use the last modified date so builds are reproducible
const date = await getLastModifiedDate();
const compiledContents = code const compiledContents = code
// Embed Version // Embed Version
@ -149,14 +156,14 @@ async function writeCompiled( { code, dir, filename, version } ) {
// Embed Date // Embed Date
// yyyy-mm-ddThh:mmZ // yyyy-mm-ddThh:mmZ
.replace( /@DATE/g, new Date().toISOString().replace( /:\d+\.\d+Z$/, "Z" ) ); .replace( /@DATE/g, date.toISOString().replace( /:\d+\.\d+Z$/, "Z" ) );
await fs.writeFile( path.join( dir, filename ), compiledContents ); await fs.writeFile( path.join( dir, filename ), compiledContents );
console.log( `[${ getTimestamp() }] ${ filename } v${ version } created.` ); console.log( `[${ getTimestamp() }] ${ filename } v${ version } created.` );
} }
// Build jQuery ECMAScript modules // Build jQuery ECMAScript modules
async function build( { export async function build( {
amd, amd,
dir = "dist", dir = "dist",
exclude = [], exclude = [],
@ -206,7 +213,7 @@ async function build( {
if ( excluded.includes( "exports/global" ) ) { if ( excluded.includes( "exports/global" ) ) {
const index = excluded.indexOf( "exports/global" ); const index = excluded.indexOf( "exports/global" );
setOverride( setOverride(
`${ srcFolder }/exports/global.js`, "./src/exports/global.js",
"import { jQuery } from \"../core.js\";\n\n" + "import { jQuery } from \"../core.js\";\n\n" +
"jQuery.noConflict = function() {};" "jQuery.noConflict = function() {};"
); );
@ -225,7 +232,7 @@ async function build( {
// No name means an anonymous define // No name means an anonymous define
const amdExportContents = await read( "exports/amd.js" ); const amdExportContents = await read( "exports/amd.js" );
setOverride( setOverride(
`${ srcFolder }/exports/amd.js`, "./src/exports/amd.js",
amdExportContents.replace( amdExportContents.replace(
// Remove the comma for anonymous defines // Remove the comma for anonymous defines
@ -248,7 +255,7 @@ async function build( {
} }
const inputOptions = { const inputOptions = {
input: `${ srcFolder }/jquery.js` input: "./src/jquery.js"
}; };
const includedImports = included const includedImports = included
@ -274,7 +281,7 @@ async function build( {
// Replace excluded modules with empty sources. // Replace excluded modules with empty sources.
for ( const module of excluded ) { for ( const module of excluded ) {
setOverride( setOverride(
`${ srcFolder }/${ module }.js`, `./src/${ module }.js`,
// The `selector` module is not removed, but replaced // The `selector` module is not removed, but replaced
// with `selector-native`. // with `selector-native`.
@ -290,7 +297,7 @@ async function build( {
output: [ outputOptions ], output: [ outputOptions ],
plugins: [ rollupFileOverrides( fileOverrides ) ], plugins: [ rollupFileOverrides( fileOverrides ) ],
watch: { watch: {
include: `${ srcFolder }/**`, include: "./src/**",
skipWrite: true skipWrite: true
} }
} ); } );
@ -352,7 +359,7 @@ async function build( {
} }
} }
async function buildDefaultFiles( { export async function buildDefaultFiles( {
version = process.env.VERSION, version = process.env.VERSION,
watch watch
} = {} ) { } = {} ) {
@ -407,12 +414,9 @@ async function buildDefaultFiles( {
} ) } )
] ); ] );
// Earlier Node.js versions do not support the ESM format. if ( watch ) {
if ( !verifyNodeVersion() ) { console.log( "Watching files..." );
return; } else {
}
const { compareSize } = await import( "./compare_size.mjs" );
return compareSize( { return compareSize( {
files: [ files: [
"dist/jquery.min.js", "dist/jquery.min.js",
@ -422,5 +426,4 @@ async function buildDefaultFiles( {
] ]
} ); } );
} }
}
module.exports = { build, buildDefaultFiles };

View File

@ -1,7 +1,5 @@
"use strict";
// Process files for distribution. // Process files for distribution.
module.exports = function processForDist( text, filename ) { export default function processForDist( text, filename ) {
if ( !text ) { if ( !text ) {
throw new Error( "text required for processForDist" ); throw new Error( "text required for processForDist" );
} }
@ -28,4 +26,4 @@ module.exports = function processForDist( text, filename ) {
} }
throw new Error( message ); throw new Error( message );
} }
}; }

View File

@ -1,9 +1,9 @@
import chalk from "chalk";
import fs from "node:fs/promises"; import fs from "node:fs/promises";
import { promisify } from "node:util"; import { promisify } from "node:util";
import zlib from "node:zlib"; import zlib from "node:zlib";
import { exec as nodeExec } from "node:child_process"; import { exec as nodeExec } from "node:child_process";
import isCleanWorkingDir from "./lib/isCleanWorkingDir.js"; import chalk from "chalk";
import isCleanWorkingDir from "./isCleanWorkingDir.js";
const VERSION = 1; const VERSION = 1;
const lastRunBranch = " last run"; const lastRunBranch = " last run";

View File

@ -1,9 +1,7 @@
"use strict"; export default function getTimestamp() {
module.exports = function getTimestamp() {
const now = new Date(); const now = new Date();
const hours = now.getHours().toString().padStart( 2, "0" ); const hours = now.getHours().toString().padStart( 2, "0" );
const minutes = now.getMinutes().toString().padStart( 2, "0" ); const minutes = now.getMinutes().toString().padStart( 2, "0" );
const seconds = now.getSeconds().toString().padStart( 2, "0" ); const seconds = now.getSeconds().toString().padStart( 2, "0" );
return `${ hours }:${ minutes }:${ seconds }`; return `${ hours }:${ minutes }:${ seconds }`;
}; }

View File

@ -1,9 +1,9 @@
"use strict"; import util from "node:util";
import { exec as nodeExec } from "node:child_process";
const util = require( "node:util" ); const exec = util.promisify( nodeExec );
const exec = util.promisify( require( "node:child_process" ).exec );
module.exports = async function isCleanWorkingDir() { export default async function isCleanWorkingDir() {
const { stdout } = await exec( "git status --untracked-files=no --porcelain" ); const { stdout } = await exec( "git status --untracked-files=no --porcelain" );
return !stdout.trim(); return !stdout.trim();
}; }

View File

@ -1,5 +1,3 @@
"use strict";
/** /**
* A Rollup plugin accepting a file overrides map and changing * A Rollup plugin accepting a file overrides map and changing
* module sources to the overridden ones where provided. Files * module sources to the overridden ones where provided. Files
@ -7,7 +5,7 @@
* *
* @param {Map<string, string>} fileOverrides * @param {Map<string, string>} fileOverrides
*/ */
module.exports = ( fileOverrides ) => { export default function rollupFileOverrides( fileOverrides ) {
return { return {
name: "jquery-file-overrides", name: "jquery-file-overrides",
load( id ) { load( id ) {
@ -21,4 +19,4 @@ module.exports = ( fileOverrides ) => {
return null; return null;
} }
}; };
}; }

View File

@ -1,7 +1,5 @@
"use strict";
// NOTE: keep it in sync with test/data/testinit.js // NOTE: keep it in sync with test/data/testinit.js
module.exports = [ export default [
"ajax", "ajax",
"callbacks", "callbacks",
"deferred", "deferred",

View File

@ -1,12 +0,0 @@
"use strict";
const { version } = require( "process" );
const nodeV18OrNewer = !/^v1[0-7]\./.test( version );
module.exports = function verifyNodeVersion() {
if ( !nodeV18OrNewer ) {
console.log( "Old Node.js detected, task skipped..." );
return false;
}
return true;
};

View File

@ -1,14 +1,12 @@
"use strict"; import fs from "node:fs/promises";
import path from "node:path";
const swc = require( "@swc/core" ); import swc from "@swc/core";
const fs = require( "node:fs/promises" ); import processForDist from "./dist.js";
const path = require( "node:path" ); import getTimestamp from "./lib/getTimestamp.js";
const processForDist = require( "./dist" );
const getTimestamp = require( "./lib/getTimestamp" );
const rjs = /\.js$/; const rjs = /\.js$/;
module.exports = async function minify( { filename, dir, esm } ) { export default async function minify( { filename, dir, esm } ) {
const contents = await fs.readFile( path.join( dir, filename ), "utf8" ); const contents = await fs.readFile( path.join( dir, filename ), "utf8" );
const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ]; const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
@ -67,4 +65,4 @@ module.exports = async function minify( { filename, dir, esm } ) {
console.log( `[${ getTimestamp() }] ${ minFilename } ${ version } with ${ console.log( `[${ getTimestamp() }] ${ minFilename } ${ version } with ${
mapFilename mapFilename
} created.` ); } created.` );
}; }

View File

@ -1,17 +1,12 @@
"use strict"; import fs from "node:fs/promises";
import util from "node:util";
import { exec as nodeExec } from "node:child_process";
const fs = require( "node:fs/promises" ); const exec = util.promisify( nodeExec );
const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
const allowedLibraryTypes = new Set( [ "regular", "factory" ] ); const allowedLibraryTypes = new Set( [ "regular", "factory" ] );
const allowedSourceTypes = new Set( [ "commonjs", "module", "dual" ] ); const allowedSourceTypes = new Set( [ "commonjs", "module", "dual" ] );
if ( !verifyNodeVersion() ) {
return;
}
// Fire up all tests defined in test/node_smoke_tests/*.js in spawned sub-processes. // Fire up all tests defined in test/node_smoke_tests/*.js in spawned sub-processes.
// All the files under test/node_smoke_tests/*.js are supposed to exit with 0 code // All the files under test/node_smoke_tests/*.js are supposed to exit with 0 code
// on success or another one on failure. Spawning in sub-processes is // on success or another one on failure. Spawning in sub-processes is

View File

@ -1,9 +1,7 @@
"use strict"; import fs from "node:fs/promises";
import path from "node:path";
const fs = require( "node:fs/promises" ); const projectDir = path.resolve( "." );
const path = require( "node:path" );
const projectDir = path.resolve( __dirname, "..", ".." );
const files = { const files = {
"bootstrap/bootstrap.css": "bootstrap/dist/css/bootstrap.css", "bootstrap/bootstrap.css": "bootstrap/dist/css/bootstrap.css",

View File

@ -1,17 +1,9 @@
"use strict"; import path from "node:path";
import os from "node:os";
const { spawn } = require( "node:child_process" ); import { spawn } from "node:child_process";
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
const path = require( "node:path" );
const os = require( "node:os" );
if ( !verifyNodeVersion() ) {
return;
}
const command = path.resolve( const command = path.resolve(
__dirname, `node_modules/.bin/promises-aplus-tests${ os.platform() === "win32" ? ".cmd" : "" }`
`../../node_modules/.bin/promises-aplus-tests${ os.platform() === "win32" ? ".cmd" : "" }`
); );
const args = [ "--reporter", "dot", "--timeout", "2000" ]; const args = [ "--reporter", "dot", "--timeout", "2000" ];
const tests = [ const tests = [

View File

@ -1,6 +1,4 @@
"use strict"; import fs from "node:fs/promises";
const fs = require( "node:fs/promises" );
async function generateFixture() { async function generateFixture() {
const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" ); const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" );

View File

@ -1,5 +1,5 @@
// Node.js is able to import from a CommonJS module in an ESM one. // Node.js is able to import from a CommonJS module in an ESM one.
import jQuery from "../dist/jquery.js"; import jQuery from "../../dist/jquery.js";
export { jQuery, jQuery as $ }; export { jQuery, jQuery as $ };
export default jQuery; export default jQuery;

View File

@ -1,5 +1,5 @@
// Node.js is able to import from a CommonJS module in an ESM one. // Node.js is able to import from a CommonJS module in an ESM one.
import jQuery from "../dist/jquery.slim.js"; import jQuery from "../../dist/jquery.slim.js";
export { jQuery, jQuery as $ }; export { jQuery, jQuery as $ };
export default jQuery; export default jQuery;

View File

@ -1,5 +1,5 @@
"use strict"; "use strict";
// Bundlers are able to synchronously require an ESM module from a CommonJS one. // Bundlers are able to synchronously require an ESM module from a CommonJS one.
const { jQuery } = require( "../dist-module/jquery.module.js" ); const { jQuery } = require( "../../dist-module/jquery.module.js" );
module.exports = jQuery; module.exports = jQuery;

View File

@ -1,5 +1,5 @@
"use strict"; "use strict";
// Bundlers are able to synchronously require an ESM module from a CommonJS one. // Bundlers are able to synchronously require an ESM module from a CommonJS one.
const { jQuery } = require( "../dist-module/jquery.slim.module.js" ); const { jQuery } = require( "../../dist-module/jquery.slim.module.js" );
module.exports = jQuery; module.exports = jQuery;

View File

@ -280,8 +280,9 @@ export default [
{ {
files: [ files: [
"build/**",
"eslint.config.js", "eslint.config.js",
".release-it.cjs",
"build/**",
"test/node_smoke_tests/**", "test/node_smoke_tests/**",
"test/bundler_smoke_tests/**/*", "test/bundler_smoke_tests/**/*",
"test/promises_aplus_adapters/**", "test/promises_aplus_adapters/**",
@ -301,15 +302,6 @@ export default [
} }
}, },
{
files: [
"build/**/*.js"
],
languageOptions: {
sourceType: "commonjs"
}
},
{ {
files: [ files: [
"dist/jquery.js", "dist/jquery.js",
@ -320,10 +312,8 @@ export default [
"dist-module/jquery.slim.module.js", "dist-module/jquery.slim.module.js",
"dist-module/jquery.factory.module.js", "dist-module/jquery.factory.module.js",
"dist-module/jquery.factory.slim.module.js", "dist-module/jquery.factory.slim.module.js",
"dist/jquery.bundler-require-wrapper.js", "dist/wrappers/*.js",
"dist/jquery.bundler-require-wrapper.slim.js", "dist-module/wrappers/*.js"
"dist-module/jquery.node-module-wrapper.js",
"dist-module/jquery.node-module-wrapper.slim.js"
], ],
languageOptions: { languageOptions: {
ecmaVersion: 2015, ecmaVersion: 2015,
@ -406,8 +396,7 @@ export default [
{ {
files: [ files: [
"dist/jquery.bundler-require-wrapper.js", "dist/wrappers/*.js"
"dist/jquery.bundler-require-wrapper.slim.js"
], ],
languageOptions: { languageOptions: {
ecmaVersion: 2015, ecmaVersion: 2015,

4495
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,29 +2,29 @@
"name": "jquery", "name": "jquery",
"title": "jQuery", "title": "jQuery",
"description": "JavaScript library for DOM operations", "description": "JavaScript library for DOM operations",
"version": "4.0.0-pre", "version": "4.0.0-beta.1",
"type": "module", "type": "module",
"exports": { "exports": {
".": { ".": {
"node": { "node": {
"import": "./dist-module/jquery.node-module-wrapper.js", "import": "./dist-module/wrappers/jquery.node-module-wrapper.js",
"default": "./dist/jquery.js" "default": "./dist/jquery.js"
}, },
"module": { "module": {
"import": "./dist-module/jquery.module.js", "import": "./dist-module/jquery.module.js",
"default": "./dist/jquery.bundler-require-wrapper.js" "default": "./dist/wrappers/jquery.bundler-require-wrapper.js"
}, },
"import": "./dist-module/jquery.module.js", "import": "./dist-module/jquery.module.js",
"default": "./dist/jquery.js" "default": "./dist/jquery.js"
}, },
"./slim": { "./slim": {
"node": { "node": {
"import": "./dist-module/jquery.node-module-wrapper.slim.js", "import": "./dist-module/wrappers/jquery.node-module-wrapper.slim.js",
"default": "./dist/jquery.slim.js" "default": "./dist/jquery.slim.js"
}, },
"module": { "module": {
"import": "./dist-module/jquery.slim.module.js", "import": "./dist-module/jquery.slim.module.js",
"default": "./dist/jquery.bundler-require-wrapper.slim.js" "default": "./dist/wrappers/jquery.bundler-require-wrapper.slim.js"
}, },
"import": "./dist-module/jquery.slim.module.js", "import": "./dist-module/jquery.slim.module.js",
"default": "./dist/jquery.slim.js" "default": "./dist/jquery.slim.js"
@ -45,12 +45,13 @@
}, },
"main": "dist/jquery.js", "main": "dist/jquery.js",
"scripts": { "scripts": {
"authors:check": "node -e \"require('./build/release/authors.js').checkAuthors()\"", "authors:check": "node -e \"(async () => { const { checkAuthors } = await import('./build/release/authors.js'); checkAuthors() })()\"",
"authors:update": "node -e \"require('./build/release/authors.js').updateAuthors()\"", "authors:update": "node -e \"(async () => { const { updateAuthors } = await import('./build/release/authors.js'); updateAuthors() })()\"",
"babel:tests": "babel test/data/core/jquery-iterability-transpiled-es6.js --out-file test/data/core/jquery-iterability-transpiled.js", "babel:tests": "babel test/data/core/jquery-iterability-transpiled-es6.js --out-file test/data/core/jquery-iterability-transpiled.js",
"build": "node ./build/command.js", "build": "node ./build/command.js",
"build:all": "node -e \"require('./build/tasks/build.js').buildDefaultFiles()\"", "build:all": "node -e \"(async () => { const { buildDefaultFiles } = await import('./build/tasks/build.js'); buildDefaultFiles() })()\"",
"build:main": "node -e \"require('./build/tasks/build.js').build()\"", "build:clean": "rimraf dist/*.{js,map} dist-module/*.{js,map}",
"build:main": "node -e \"(async () => { const { build } = await import('./build/tasks/build.js'); build() })()\"",
"lint:dev": "eslint --cache .", "lint:dev": "eslint --cache .",
"lint:json": "jsonlint --quiet package.json", "lint:json": "jsonlint --quiet package.json",
"lint": "concurrently -r \"npm:lint:dev\" \"npm:lint:json\"", "lint": "concurrently -r \"npm:lint:dev\" \"npm:lint:json\"",
@ -58,7 +59,13 @@
"prepare": "husky", "prepare": "husky",
"pretest": "npm run qunit-fixture && npm run babel:tests && npm run npmcopy", "pretest": "npm run qunit-fixture && npm run babel:tests && npm run npmcopy",
"qunit-fixture": "node build/tasks/qunit-fixture.js", "qunit-fixture": "node build/tasks/qunit-fixture.js",
"start": "node -e \"require('./build/tasks/build.js').buildDefaultFiles({ watch: true })\"", "release": "release-it --preReleaseBase=1",
"release:cdn": "node build/release/cdn.js",
"release:changelog": "node build/release/changelog.js",
"release:clean": "rimraf tmp --glob changelog.{md,html} contributors.html",
"release:dist": "node build/release/dist.js",
"release:verify": "node -e \"(async () => { const { verifyRelease } = await import('./build/release/verify.js'); verifyRelease() })()\"",
"start": "node -e \"(async () => { const { buildDefaultFiles } = await import('./build/tasks/build.js'); buildDefaultFiles({ watch: true }) })()\"",
"test:bundlers": "npm run pretest && npm run build:all && node test/bundler_smoke_tests/run-jsdom-tests.js", "test:bundlers": "npm run pretest && npm run build:all && node test/bundler_smoke_tests/run-jsdom-tests.js",
"test:browser": "npm run pretest && npm run build:main && npm run test:unit -- -b chrome -b firefox -h", "test:browser": "npm run pretest && npm run build:main && npm run test:unit -- -b chrome -b firefox -h",
"test:browserless": "npm run pretest && npm run build:all && node test/bundler_smoke_tests/run-jsdom-tests.js && node build/tasks/node_smoke_tests.js && node build/tasks/promises_aplus_tests.js && npm run test:unit -- -b jsdom -m basic", "test:browserless": "npm run pretest && npm run build:all && node test/bundler_smoke_tests/run-jsdom-tests.js && node build/tasks/node_smoke_tests.js && node build/tasks/promises_aplus_tests.js && npm run test:unit -- -b jsdom -m basic",
@ -101,8 +108,9 @@
"@prantlf/jsonlint": "14.0.3", "@prantlf/jsonlint": "14.0.3",
"@rollup/plugin-commonjs": "26.0.1", "@rollup/plugin-commonjs": "26.0.1",
"@rollup/plugin-node-resolve": "15.2.3", "@rollup/plugin-node-resolve": "15.2.3",
"@swc/core": "1.5.28", "@swc/core": "1.6.13",
"@types/selenium-webdriver": "4.1.23", "@types/selenium-webdriver": "4.1.24",
"archiver": "7.0.1",
"body-parser": "1.20.2", "body-parser": "1.20.2",
"bootstrap": "5.3.3", "bootstrap": "5.3.3",
"browserstack-local": "1.5.5", "browserstack-local": "1.5.5",
@ -118,20 +126,23 @@
"exit-hook": "4.0.0", "exit-hook": "4.0.0",
"express": "4.19.2", "express": "4.19.2",
"express-body-parser-error-handler": "1.0.7", "express-body-parser-error-handler": "1.0.7",
"globals": "15.4.0", "globals": "15.8.0",
"husky": "9.0.11", "husky": "9.0.11",
"jsdom": "24.1.0", "jsdom": "24.1.0",
"marked": "13.0.2",
"multiparty": "4.2.3", "multiparty": "4.2.3",
"native-promise-only": "0.8.1", "native-promise-only": "0.8.1",
"promises-aplus-tests": "2.1.2", "promises-aplus-tests": "2.1.2",
"q": "1.5.1", "q": "1.5.1",
"qunit": "2.21.0", "qunit": "2.21.0",
"raw-body": "2.5.2", "raw-body": "2.5.2",
"release-it": "17.5.0",
"requirejs": "2.3.6", "requirejs": "2.3.6",
"rollup": "4.18.0", "rimraf": "6.0.0",
"selenium-webdriver": "4.21.0", "rollup": "4.18.1",
"selenium-webdriver": "4.22.0",
"sinon": "9.2.4", "sinon": "9.2.4",
"webpack": "5.92.0", "webpack": "5.92.1",
"yargs": "17.7.2" "yargs": "17.7.2"
}, },
"commitplease": { "commitplease": {