mirror of
https://github.com/jquery/jquery.git
synced 2024-11-23 02:54:22 +00:00
Build: migrate more uses of fs.promises; use node: protocol
Ref gh-5440
This commit is contained in:
parent
822362e6ef
commit
ae7f6139cc
@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
var fs = require( "fs" );
|
||||
const fs = require( "node:fs" );
|
||||
|
||||
module.exports = function( Release ) {
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
const fs = require( "node:fs" );
|
||||
const fs = require( "node:fs/promises" );
|
||||
const util = require( "node:util" );
|
||||
const exec = util.promisify( require( "node:child_process" ).exec );
|
||||
const rnewline = /\r?\n/;
|
||||
@ -40,7 +40,7 @@ function cloneSizzle() {
|
||||
}
|
||||
|
||||
async function getLastAuthor() {
|
||||
const authorsTxt = await fs.promises.readFile( "AUTHORS.txt", "utf8" );
|
||||
const authorsTxt = await fs.readFile( "AUTHORS.txt", "utf8" );
|
||||
return authorsTxt.trim().split( rnewline ).pop();
|
||||
}
|
||||
|
||||
@ -93,7 +93,7 @@ async function updateAuthors() {
|
||||
const authors = await getAuthors();
|
||||
|
||||
const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
|
||||
await fs.promises.writeFile( "AUTHORS.txt", authorsTxt );
|
||||
await fs.writeFile( "AUTHORS.txt", authorsTxt );
|
||||
|
||||
console.log( "AUTHORS.txt updated" );
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
"use strict";
|
||||
|
||||
const fs = require( "fs" );
|
||||
const fs = require( "node:fs" );
|
||||
const shell = require( "shelljs" );
|
||||
const path = require( "path" );
|
||||
const os = require( "os" );
|
||||
const path = require( "node:path" );
|
||||
const os = require( "node:os" );
|
||||
|
||||
const cdnFolderContainer = "dist/cdn";
|
||||
const cdnFolderVersioned = `${ cdnFolderContainer }/versioned`;
|
||||
@ -49,7 +49,7 @@ function makeReleaseCopies( Release ) {
|
||||
].forEach( ( { filesMap, cdnFolder } ) => {
|
||||
shell.mkdir( "-p", cdnFolder );
|
||||
|
||||
Object.keys( filesMap ).forEach( key => {
|
||||
Object.keys( filesMap ).forEach( ( key ) => {
|
||||
let text;
|
||||
const builtFile = filesMap[ key ];
|
||||
const unpathedFile = key.replace( /@VER/g, Release.newVersion );
|
||||
@ -60,28 +60,33 @@ function makeReleaseCopies( Release ) {
|
||||
// Map files need to reference the new uncompressed name;
|
||||
// assume that all files reside in the same directory.
|
||||
// "file":"jquery.min.js" ... "sources":["jquery.js"]
|
||||
text = fs.readFileSync( builtFile, "utf8" )
|
||||
.replace( /"file":"([^"]+)"/,
|
||||
"\"file\":\"" + unpathedFile.replace( /\.min\.map/, ".min.js\"" ) )
|
||||
.replace( /"sources":\["([^"]+)"\]/,
|
||||
"\"sources\":[\"" + unpathedFile.replace( /\.min\.map/, ".js" ) + "\"]" );
|
||||
text = fs
|
||||
.readFileSync( builtFile, "utf8" )
|
||||
.replace(
|
||||
/"file":"([^"]+)"/,
|
||||
`"file":"${ unpathedFile.replace( /\.min\.map/, ".min.js" ) }"`
|
||||
)
|
||||
.replace(
|
||||
/"sources":\["([^"]+)"\]/,
|
||||
`"sources":["${ unpathedFile.replace( /\.min\.map/, ".js" ) }"]`
|
||||
);
|
||||
fs.writeFileSync( releaseFile, text );
|
||||
} else if ( builtFile !== releaseFile ) {
|
||||
shell.cp( "-f", builtFile, releaseFile );
|
||||
}
|
||||
} );
|
||||
|
||||
} );
|
||||
}
|
||||
|
||||
async function makeArchives( Release ) {
|
||||
|
||||
Release.chdir( Release.dir.repo );
|
||||
|
||||
async function makeArchive( { cdn, filesMap, cdnFolder } ) {
|
||||
return new Promise( ( resolve, reject ) => {
|
||||
if ( Release.preRelease ) {
|
||||
console.log( "Skipping archive creation for " + cdn + "; this is a beta release." );
|
||||
console.log(
|
||||
`Skipping archive creation for ${ cdn }; this is a beta release.`
|
||||
);
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
@ -99,7 +104,7 @@ async function makeArchives( Release ) {
|
||||
|
||||
output.on( "close", resolve );
|
||||
|
||||
output.on( "error", err => {
|
||||
output.on( "error", ( err ) => {
|
||||
reject( err );
|
||||
} );
|
||||
|
||||
@ -107,33 +112,37 @@ async function makeArchives( Release ) {
|
||||
|
||||
let finalFilesMap = Object.create( null );
|
||||
for ( const [ releaseFile, builtFile ] of Object.entries( filesMap ) ) {
|
||||
finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] = builtFile;
|
||||
finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] =
|
||||
builtFile;
|
||||
}
|
||||
|
||||
const files = Object
|
||||
.keys( filesMap )
|
||||
.map( item => `${ cdnFolder }/${
|
||||
item.replace( rver, Release.newVersion )
|
||||
}` );
|
||||
const files = Object.keys( filesMap ).map(
|
||||
( item ) => `${ cdnFolder }/${ item.replace( rver, Release.newVersion ) }`
|
||||
);
|
||||
|
||||
if ( os.platform() === "win32" ) {
|
||||
sum = [];
|
||||
for ( i = 0; i < files.length; i++ ) {
|
||||
result = Release.exec(
|
||||
"certutil -hashfile " + files[ i ] + " MD5", "Error retrieving md5sum"
|
||||
"certutil -hashfile " + files[ i ] + " MD5",
|
||||
"Error retrieving md5sum"
|
||||
);
|
||||
sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
|
||||
}
|
||||
sum = sum.join( "\n" );
|
||||
} else {
|
||||
sum = Release.exec( "md5 -r " + files.join( " " ), "Error retrieving md5sum" );
|
||||
sum = Release.exec(
|
||||
"md5 -r " + files.join( " " ),
|
||||
"Error retrieving md5sum"
|
||||
);
|
||||
}
|
||||
fs.writeFileSync( md5file, sum );
|
||||
files.push( md5file );
|
||||
|
||||
files.forEach( file => {
|
||||
archiver.append( fs.createReadStream( file ),
|
||||
{ name: path.basename( file ) } );
|
||||
files.forEach( ( file ) => {
|
||||
archiver.append( fs.createReadStream( file ), {
|
||||
name: path.basename( file )
|
||||
} );
|
||||
} );
|
||||
|
||||
archiver.finalize();
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
module.exports = function( Release, files, complete ) {
|
||||
|
||||
const fs = require( "fs" ).promises;
|
||||
const fs = require( "node:fs/promises" );
|
||||
const shell = require( "shelljs" );
|
||||
const inquirer = require( "inquirer" );
|
||||
const pkg = require( `${ Release.dir.repo }/package.json` );
|
||||
|
@ -1,5 +1,5 @@
|
||||
import chalk from "chalk";
|
||||
import fs from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import { promisify } from "node:util";
|
||||
import zlib from "node:zlib";
|
||||
import { exec as nodeExec } from "node:child_process";
|
||||
@ -34,7 +34,7 @@ function getBranchHeader( branch, commit ) {
|
||||
async function getCache( loc ) {
|
||||
let cache;
|
||||
try {
|
||||
const contents = await fs.promises.readFile( loc, "utf8" );
|
||||
const contents = await fs.readFile( loc, "utf8" );
|
||||
cache = JSON.parse( contents );
|
||||
} catch ( err ) {
|
||||
return {};
|
||||
@ -60,7 +60,7 @@ function cacheResults( results ) {
|
||||
}
|
||||
|
||||
function saveCache( loc, cache ) {
|
||||
return fs.promises.writeFile( loc, JSON.stringify( cache ) );
|
||||
return fs.writeFile( loc, JSON.stringify( cache ) );
|
||||
}
|
||||
|
||||
function compareSizes( existing, current, padLength ) {
|
||||
@ -104,7 +104,7 @@ export async function compareSize( { cache = ".sizecache.json", files } = {} ) {
|
||||
const results = await Promise.all(
|
||||
files.map( async function( filename ) {
|
||||
|
||||
let contents = await fs.promises.readFile( filename, "utf8" );
|
||||
let contents = await fs.readFile( filename, "utf8" );
|
||||
|
||||
// Remove the short SHA and .dirty from comparisons.
|
||||
// The short SHA so commits can be compared against each other
|
||||
|
@ -1,7 +1,7 @@
|
||||
"use strict";
|
||||
|
||||
const util = require( "util" );
|
||||
const exec = util.promisify( require( "child_process" ).exec );
|
||||
const util = require( "node:util" );
|
||||
const exec = util.promisify( require( "node:child_process" ).exec );
|
||||
|
||||
module.exports = async function isCleanWorkingDir() {
|
||||
const { stdout } = await exec( "git status --untracked-files=no --porcelain" );
|
||||
|
@ -1,15 +1,15 @@
|
||||
"use strict";
|
||||
|
||||
const swc = require( "@swc/core" );
|
||||
const fs = require( "fs" );
|
||||
const path = require( "path" );
|
||||
const fs = require( "node:fs/promises" );
|
||||
const path = require( "node:path" );
|
||||
const processForDist = require( "./dist" );
|
||||
const getTimestamp = require( "./lib/getTimestamp" );
|
||||
|
||||
const rjs = /\.js$/;
|
||||
|
||||
module.exports = async function minify( { filename, dir, esm } ) {
|
||||
const contents = await fs.promises.readFile( path.join( dir, filename ), "utf8" );
|
||||
const contents = await fs.readFile( path.join( dir, filename ), "utf8" );
|
||||
const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
|
||||
|
||||
const { code, map: incompleteMap } = await swc.minify(
|
||||
@ -48,11 +48,11 @@ module.exports = async function minify( { filename, dir, esm } ) {
|
||||
} );
|
||||
|
||||
await Promise.all( [
|
||||
fs.promises.writeFile(
|
||||
fs.writeFile(
|
||||
path.join( dir, minFilename ),
|
||||
code
|
||||
),
|
||||
fs.promises.writeFile(
|
||||
fs.writeFile(
|
||||
path.join( dir, mapFilename ),
|
||||
map
|
||||
)
|
||||
|
@ -1,8 +1,8 @@
|
||||
"use strict";
|
||||
|
||||
const fs = require( "fs" );
|
||||
const util = require( "util" );
|
||||
const exec = util.promisify( require( "child_process" ).exec );
|
||||
const fs = require( "node:fs/promises" );
|
||||
const util = require( "node:util" );
|
||||
const exec = util.promisify( require( "node:child_process" ).exec );
|
||||
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
|
||||
|
||||
const allowedLibraryTypes = [ "regular", "factory" ];
|
||||
@ -26,7 +26,7 @@ async function runTests( { libraryType, sourceType, module } ) {
|
||||
} ${ sourceType } "${ module }"` );
|
||||
}
|
||||
const dir = `./test/node_smoke_tests/${ sourceType }/${ libraryType }`;
|
||||
const files = await fs.promises.readdir( dir, { withFileTypes: true } );
|
||||
const files = await fs.readdir( dir, { withFileTypes: true } );
|
||||
const testFiles = files.filter( ( testFilePath ) => testFilePath.isFile() );
|
||||
|
||||
if ( !testFiles.length ) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
"use strict";
|
||||
|
||||
const fs = require( "fs" );
|
||||
const path = require( "path" );
|
||||
const fs = require( "node:fs/promises" );
|
||||
const path = require( "node:path" );
|
||||
|
||||
const projectDir = path.resolve( __dirname, "..", ".." );
|
||||
|
||||
@ -26,15 +26,15 @@ const files = {
|
||||
};
|
||||
|
||||
async function npmcopy() {
|
||||
await fs.promises.mkdir( path.resolve( projectDir, "external" ), {
|
||||
await fs.mkdir( path.resolve( projectDir, "external" ), {
|
||||
recursive: true
|
||||
} );
|
||||
for ( const [ dest, source ] of Object.entries( files ) ) {
|
||||
const from = path.resolve( projectDir, "node_modules", source );
|
||||
const to = path.resolve( projectDir, "external", dest );
|
||||
const toDir = path.dirname( to );
|
||||
await fs.promises.mkdir( toDir, { recursive: true } );
|
||||
await fs.promises.copyFile( from, to );
|
||||
await fs.mkdir( toDir, { recursive: true } );
|
||||
await fs.copyFile( from, to );
|
||||
console.log( `${ source } → ${ dest }` );
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
"use strict";
|
||||
|
||||
const { spawn } = require( "child_process" );
|
||||
const { spawn } = require( "node:child_process" );
|
||||
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
|
||||
const path = require( "path" );
|
||||
const os = require( "os" );
|
||||
const path = require( "node:path" );
|
||||
const os = require( "node:os" );
|
||||
|
||||
if ( !verifyNodeVersion() ) {
|
||||
return;
|
||||
|
@ -1,10 +1,10 @@
|
||||
"use strict";
|
||||
|
||||
const fs = require( "fs" );
|
||||
const fs = require( "node:fs/promises" );
|
||||
|
||||
async function generateFixture() {
|
||||
const fixture = await fs.promises.readFile( "./test/data/qunit-fixture.html", "utf8" );
|
||||
await fs.promises.writeFile(
|
||||
const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" );
|
||||
await fs.writeFile(
|
||||
"./test/data/qunit-fixture.js",
|
||||
"// Generated by build/tasks/qunit-fixture.js\n" +
|
||||
"QUnit.config.fixture = " +
|
||||
|
@ -1,7 +1,7 @@
|
||||
import bodyParser from "body-parser";
|
||||
import express from "express";
|
||||
import bodyParserErrorHandler from "express-body-parser-error-handler";
|
||||
import { readFile } from "fs/promises";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import mockServer from "../middleware-mockserver.cjs";
|
||||
|
||||
export async function createTestServer( report ) {
|
||||
|
Loading…
Reference in New Issue
Block a user