Build: migrate more uses of fs.promises; use node: protocol

Ref gh-5440
This commit is contained in:
Timmy Willison 2024-03-10 12:19:15 -04:00
parent 822362e6ef
commit ae7f6139cc
12 changed files with 65 additions and 56 deletions

View File

@ -1,6 +1,6 @@
"use strict"; "use strict";
var fs = require( "fs" ); const fs = require( "node:fs" );
module.exports = function( Release ) { module.exports = function( Release ) {

View File

@ -1,6 +1,6 @@
"use strict"; "use strict";
const fs = require( "node:fs" ); const fs = require( "node:fs/promises" );
const util = require( "node:util" ); const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec ); const exec = util.promisify( require( "node:child_process" ).exec );
const rnewline = /\r?\n/; const rnewline = /\r?\n/;
@ -40,7 +40,7 @@ function cloneSizzle() {
} }
async function getLastAuthor() { async function getLastAuthor() {
const authorsTxt = await fs.promises.readFile( "AUTHORS.txt", "utf8" ); const authorsTxt = await fs.readFile( "AUTHORS.txt", "utf8" );
return authorsTxt.trim().split( rnewline ).pop(); return authorsTxt.trim().split( rnewline ).pop();
} }
@ -93,7 +93,7 @@ async function updateAuthors() {
const authors = await getAuthors(); const authors = await getAuthors();
const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n"; const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
await fs.promises.writeFile( "AUTHORS.txt", authorsTxt ); await fs.writeFile( "AUTHORS.txt", authorsTxt );
console.log( "AUTHORS.txt updated" ); console.log( "AUTHORS.txt updated" );
} }

View File

@ -1,9 +1,9 @@
"use strict"; "use strict";
const fs = require( "fs" ); const fs = require( "node:fs" );
const shell = require( "shelljs" ); const shell = require( "shelljs" );
const path = require( "path" ); const path = require( "node:path" );
const os = require( "os" ); const os = require( "node:os" );
const cdnFolderContainer = "dist/cdn"; const cdnFolderContainer = "dist/cdn";
const cdnFolderVersioned = `${ cdnFolderContainer }/versioned`; const cdnFolderVersioned = `${ cdnFolderContainer }/versioned`;
@ -49,7 +49,7 @@ function makeReleaseCopies( Release ) {
].forEach( ( { filesMap, cdnFolder } ) => { ].forEach( ( { filesMap, cdnFolder } ) => {
shell.mkdir( "-p", cdnFolder ); shell.mkdir( "-p", cdnFolder );
Object.keys( filesMap ).forEach( key => { Object.keys( filesMap ).forEach( ( key ) => {
let text; let text;
const builtFile = filesMap[ key ]; const builtFile = filesMap[ key ];
const unpathedFile = key.replace( /@VER/g, Release.newVersion ); const unpathedFile = key.replace( /@VER/g, Release.newVersion );
@ -60,28 +60,33 @@ function makeReleaseCopies( Release ) {
// Map files need to reference the new uncompressed name; // Map files need to reference the new uncompressed name;
// assume that all files reside in the same directory. // assume that all files reside in the same directory.
// "file":"jquery.min.js" ... "sources":["jquery.js"] // "file":"jquery.min.js" ... "sources":["jquery.js"]
text = fs.readFileSync( builtFile, "utf8" ) text = fs
.replace( /"file":"([^"]+)"/, .readFileSync( builtFile, "utf8" )
"\"file\":\"" + unpathedFile.replace( /\.min\.map/, ".min.js\"" ) ) .replace(
.replace( /"sources":\["([^"]+)"\]/, /"file":"([^"]+)"/,
"\"sources\":[\"" + unpathedFile.replace( /\.min\.map/, ".js" ) + "\"]" ); `"file":"${ unpathedFile.replace( /\.min\.map/, ".min.js" ) }"`
)
.replace(
/"sources":\["([^"]+)"\]/,
`"sources":["${ unpathedFile.replace( /\.min\.map/, ".js" ) }"]`
);
fs.writeFileSync( releaseFile, text ); fs.writeFileSync( releaseFile, text );
} else if ( builtFile !== releaseFile ) { } else if ( builtFile !== releaseFile ) {
shell.cp( "-f", builtFile, releaseFile ); shell.cp( "-f", builtFile, releaseFile );
} }
} ); } );
} ); } );
} }
async function makeArchives( Release ) { async function makeArchives( Release ) {
Release.chdir( Release.dir.repo ); Release.chdir( Release.dir.repo );
async function makeArchive( { cdn, filesMap, cdnFolder } ) { async function makeArchive( { cdn, filesMap, cdnFolder } ) {
return new Promise( ( resolve, reject ) => { return new Promise( ( resolve, reject ) => {
if ( Release.preRelease ) { if ( Release.preRelease ) {
console.log( "Skipping archive creation for " + cdn + "; this is a beta release." ); console.log(
`Skipping archive creation for ${ cdn }; this is a beta release.`
);
resolve(); resolve();
return; return;
} }
@ -99,7 +104,7 @@ async function makeArchives( Release ) {
output.on( "close", resolve ); output.on( "close", resolve );
output.on( "error", err => { output.on( "error", ( err ) => {
reject( err ); reject( err );
} ); } );
@ -107,33 +112,37 @@ async function makeArchives( Release ) {
let finalFilesMap = Object.create( null ); let finalFilesMap = Object.create( null );
for ( const [ releaseFile, builtFile ] of Object.entries( filesMap ) ) { for ( const [ releaseFile, builtFile ] of Object.entries( filesMap ) ) {
finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] = builtFile; finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] =
builtFile;
} }
const files = Object const files = Object.keys( filesMap ).map(
.keys( filesMap ) ( item ) => `${ cdnFolder }/${ item.replace( rver, Release.newVersion ) }`
.map( item => `${ cdnFolder }/${ );
item.replace( rver, Release.newVersion )
}` );
if ( os.platform() === "win32" ) { if ( os.platform() === "win32" ) {
sum = []; sum = [];
for ( i = 0; i < files.length; i++ ) { for ( i = 0; i < files.length; i++ ) {
result = Release.exec( result = Release.exec(
"certutil -hashfile " + files[ i ] + " MD5", "Error retrieving md5sum" "certutil -hashfile " + files[ i ] + " MD5",
"Error retrieving md5sum"
); );
sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] ); sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
} }
sum = sum.join( "\n" ); sum = sum.join( "\n" );
} else { } else {
sum = Release.exec( "md5 -r " + files.join( " " ), "Error retrieving md5sum" ); sum = Release.exec(
"md5 -r " + files.join( " " ),
"Error retrieving md5sum"
);
} }
fs.writeFileSync( md5file, sum ); fs.writeFileSync( md5file, sum );
files.push( md5file ); files.push( md5file );
files.forEach( file => { files.forEach( ( file ) => {
archiver.append( fs.createReadStream( file ), archiver.append( fs.createReadStream( file ), {
{ name: path.basename( file ) } ); name: path.basename( file )
} );
} ); } );
archiver.finalize(); archiver.finalize();

View File

@ -2,7 +2,7 @@
module.exports = function( Release, files, complete ) { module.exports = function( Release, files, complete ) {
const fs = require( "fs" ).promises; const fs = require( "node:fs/promises" );
const shell = require( "shelljs" ); const shell = require( "shelljs" );
const inquirer = require( "inquirer" ); const inquirer = require( "inquirer" );
const pkg = require( `${ Release.dir.repo }/package.json` ); const pkg = require( `${ Release.dir.repo }/package.json` );

View File

@ -1,5 +1,5 @@
import chalk from "chalk"; import chalk from "chalk";
import fs from "node:fs"; import fs from "node:fs/promises";
import { promisify } from "node:util"; import { promisify } from "node:util";
import zlib from "node:zlib"; import zlib from "node:zlib";
import { exec as nodeExec } from "node:child_process"; import { exec as nodeExec } from "node:child_process";
@ -34,7 +34,7 @@ function getBranchHeader( branch, commit ) {
async function getCache( loc ) { async function getCache( loc ) {
let cache; let cache;
try { try {
const contents = await fs.promises.readFile( loc, "utf8" ); const contents = await fs.readFile( loc, "utf8" );
cache = JSON.parse( contents ); cache = JSON.parse( contents );
} catch ( err ) { } catch ( err ) {
return {}; return {};
@ -60,7 +60,7 @@ function cacheResults( results ) {
} }
function saveCache( loc, cache ) { function saveCache( loc, cache ) {
return fs.promises.writeFile( loc, JSON.stringify( cache ) ); return fs.writeFile( loc, JSON.stringify( cache ) );
} }
function compareSizes( existing, current, padLength ) { function compareSizes( existing, current, padLength ) {
@ -104,7 +104,7 @@ export async function compareSize( { cache = ".sizecache.json", files } = {} ) {
const results = await Promise.all( const results = await Promise.all(
files.map( async function( filename ) { files.map( async function( filename ) {
let contents = await fs.promises.readFile( filename, "utf8" ); let contents = await fs.readFile( filename, "utf8" );
// Remove the short SHA and .dirty from comparisons. // Remove the short SHA and .dirty from comparisons.
// The short SHA so commits can be compared against each other // The short SHA so commits can be compared against each other

View File

@ -1,7 +1,7 @@
"use strict"; "use strict";
const util = require( "util" ); const util = require( "node:util" );
const exec = util.promisify( require( "child_process" ).exec ); const exec = util.promisify( require( "node:child_process" ).exec );
module.exports = async function isCleanWorkingDir() { module.exports = async function isCleanWorkingDir() {
const { stdout } = await exec( "git status --untracked-files=no --porcelain" ); const { stdout } = await exec( "git status --untracked-files=no --porcelain" );

View File

@ -1,15 +1,15 @@
"use strict"; "use strict";
const swc = require( "@swc/core" ); const swc = require( "@swc/core" );
const fs = require( "fs" ); const fs = require( "node:fs/promises" );
const path = require( "path" ); const path = require( "node:path" );
const processForDist = require( "./dist" ); const processForDist = require( "./dist" );
const getTimestamp = require( "./lib/getTimestamp" ); const getTimestamp = require( "./lib/getTimestamp" );
const rjs = /\.js$/; const rjs = /\.js$/;
module.exports = async function minify( { filename, dir, esm } ) { module.exports = async function minify( { filename, dir, esm } ) {
const contents = await fs.promises.readFile( path.join( dir, filename ), "utf8" ); const contents = await fs.readFile( path.join( dir, filename ), "utf8" );
const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ]; const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
const { code, map: incompleteMap } = await swc.minify( const { code, map: incompleteMap } = await swc.minify(
@ -48,11 +48,11 @@ module.exports = async function minify( { filename, dir, esm } ) {
} ); } );
await Promise.all( [ await Promise.all( [
fs.promises.writeFile( fs.writeFile(
path.join( dir, minFilename ), path.join( dir, minFilename ),
code code
), ),
fs.promises.writeFile( fs.writeFile(
path.join( dir, mapFilename ), path.join( dir, mapFilename ),
map map
) )

View File

@ -1,8 +1,8 @@
"use strict"; "use strict";
const fs = require( "fs" ); const fs = require( "node:fs/promises" );
const util = require( "util" ); const util = require( "node:util" );
const exec = util.promisify( require( "child_process" ).exec ); const exec = util.promisify( require( "node:child_process" ).exec );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" ); const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
const allowedLibraryTypes = [ "regular", "factory" ]; const allowedLibraryTypes = [ "regular", "factory" ];
@ -26,7 +26,7 @@ async function runTests( { libraryType, sourceType, module } ) {
} ${ sourceType } "${ module }"` ); } ${ sourceType } "${ module }"` );
} }
const dir = `./test/node_smoke_tests/${ sourceType }/${ libraryType }`; const dir = `./test/node_smoke_tests/${ sourceType }/${ libraryType }`;
const files = await fs.promises.readdir( dir, { withFileTypes: true } ); const files = await fs.readdir( dir, { withFileTypes: true } );
const testFiles = files.filter( ( testFilePath ) => testFilePath.isFile() ); const testFiles = files.filter( ( testFilePath ) => testFilePath.isFile() );
if ( !testFiles.length ) { if ( !testFiles.length ) {

View File

@ -1,7 +1,7 @@
"use strict"; "use strict";
const fs = require( "fs" ); const fs = require( "node:fs/promises" );
const path = require( "path" ); const path = require( "node:path" );
const projectDir = path.resolve( __dirname, "..", ".." ); const projectDir = path.resolve( __dirname, "..", ".." );
@ -26,15 +26,15 @@ const files = {
}; };
async function npmcopy() { async function npmcopy() {
await fs.promises.mkdir( path.resolve( projectDir, "external" ), { await fs.mkdir( path.resolve( projectDir, "external" ), {
recursive: true recursive: true
} ); } );
for ( const [ dest, source ] of Object.entries( files ) ) { for ( const [ dest, source ] of Object.entries( files ) ) {
const from = path.resolve( projectDir, "node_modules", source ); const from = path.resolve( projectDir, "node_modules", source );
const to = path.resolve( projectDir, "external", dest ); const to = path.resolve( projectDir, "external", dest );
const toDir = path.dirname( to ); const toDir = path.dirname( to );
await fs.promises.mkdir( toDir, { recursive: true } ); await fs.mkdir( toDir, { recursive: true } );
await fs.promises.copyFile( from, to ); await fs.copyFile( from, to );
console.log( `${ source }${ dest }` ); console.log( `${ source }${ dest }` );
} }
} }

View File

@ -1,9 +1,9 @@
"use strict"; "use strict";
const { spawn } = require( "child_process" ); const { spawn } = require( "node:child_process" );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" ); const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
const path = require( "path" ); const path = require( "node:path" );
const os = require( "os" ); const os = require( "node:os" );
if ( !verifyNodeVersion() ) { if ( !verifyNodeVersion() ) {
return; return;

View File

@ -1,10 +1,10 @@
"use strict"; "use strict";
const fs = require( "fs" ); const fs = require( "node:fs/promises" );
async function generateFixture() { async function generateFixture() {
const fixture = await fs.promises.readFile( "./test/data/qunit-fixture.html", "utf8" ); const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" );
await fs.promises.writeFile( await fs.writeFile(
"./test/data/qunit-fixture.js", "./test/data/qunit-fixture.js",
"// Generated by build/tasks/qunit-fixture.js\n" + "// Generated by build/tasks/qunit-fixture.js\n" +
"QUnit.config.fixture = " + "QUnit.config.fixture = " +

View File

@ -1,7 +1,7 @@
import bodyParser from "body-parser"; import bodyParser from "body-parser";
import express from "express"; import express from "express";
import bodyParserErrorHandler from "express-body-parser-error-handler"; import bodyParserErrorHandler from "express-body-parser-error-handler";
import { readFile } from "fs/promises"; import { readFile } from "node:fs/promises";
import mockServer from "../middleware-mockserver.cjs"; import mockServer from "../middleware-mockserver.cjs";
export async function createTestServer( report ) { export async function createTestServer( report ) {