mirror of
https://github.com/jquery/jquery.git
synced 2024-11-23 02:54:22 +00:00
Build: migrate more uses of fs.promises; use node: protocol
Ref gh-5441
This commit is contained in:
parent
2d8208e007
commit
79a19b412c
@ -1,6 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
var fs = require( "fs" );
|
const fs = require( "node:fs" );
|
||||||
|
|
||||||
module.exports = function( Release ) {
|
module.exports = function( Release ) {
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const fs = require( "node:fs" );
|
const fs = require( "node:fs/promises" );
|
||||||
const util = require( "node:util" );
|
const util = require( "node:util" );
|
||||||
const exec = util.promisify( require( "node:child_process" ).exec );
|
const exec = util.promisify( require( "node:child_process" ).exec );
|
||||||
const rnewline = /\r?\n/;
|
const rnewline = /\r?\n/;
|
||||||
@ -40,7 +40,7 @@ function cloneSizzle() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function getLastAuthor() {
|
async function getLastAuthor() {
|
||||||
const authorsTxt = await fs.promises.readFile( "AUTHORS.txt", "utf8" );
|
const authorsTxt = await fs.readFile( "AUTHORS.txt", "utf8" );
|
||||||
return authorsTxt.trim().split( rnewline ).pop();
|
return authorsTxt.trim().split( rnewline ).pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -93,7 +93,7 @@ async function updateAuthors() {
|
|||||||
const authors = await getAuthors();
|
const authors = await getAuthors();
|
||||||
|
|
||||||
const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
|
const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
|
||||||
await fs.promises.writeFile( "AUTHORS.txt", authorsTxt );
|
await fs.writeFile( "AUTHORS.txt", authorsTxt );
|
||||||
|
|
||||||
console.log( "AUTHORS.txt updated" );
|
console.log( "AUTHORS.txt updated" );
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,10 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
var
|
var fs = require( "node:fs" ),
|
||||||
fs = require( "fs" ),
|
|
||||||
shell = require( "shelljs" ),
|
shell = require( "shelljs" ),
|
||||||
path = require( "path" ),
|
path = require( "node:path" ),
|
||||||
os = require( "os" ),
|
os = require( "node:os" ),
|
||||||
|
|
||||||
cdnFolder = "dist/cdn",
|
cdnFolder = "dist/cdn",
|
||||||
|
|
||||||
releaseFiles = {
|
releaseFiles = {
|
||||||
"jquery-VER.js": "dist/jquery.js",
|
"jquery-VER.js": "dist/jquery.js",
|
||||||
"jquery-VER.min.js": "dist/jquery.min.js",
|
"jquery-VER.min.js": "dist/jquery.min.js",
|
||||||
@ -16,15 +13,21 @@ var
|
|||||||
"jquery-VER.slim.min.js": "dist/jquery.slim.min.js",
|
"jquery-VER.slim.min.js": "dist/jquery.slim.min.js",
|
||||||
"jquery-VER.slim.min.map": "dist/jquery.slim.min.map"
|
"jquery-VER.slim.min.map": "dist/jquery.slim.min.map"
|
||||||
},
|
},
|
||||||
|
|
||||||
googleFilesCDN = [
|
googleFilesCDN = [
|
||||||
"jquery.js", "jquery.min.js", "jquery.min.map",
|
"jquery.js",
|
||||||
"jquery.slim.js", "jquery.slim.min.js", "jquery.slim.min.map"
|
"jquery.min.js",
|
||||||
|
"jquery.min.map",
|
||||||
|
"jquery.slim.js",
|
||||||
|
"jquery.slim.min.js",
|
||||||
|
"jquery.slim.min.map"
|
||||||
],
|
],
|
||||||
|
|
||||||
msFilesCDN = [
|
msFilesCDN = [
|
||||||
"jquery-VER.js", "jquery-VER.min.js", "jquery-VER.min.map",
|
"jquery-VER.js",
|
||||||
"jquery-VER.slim.js", "jquery-VER.slim.min.js", "jquery-VER.slim.min.map"
|
"jquery-VER.min.js",
|
||||||
|
"jquery-VER.min.map",
|
||||||
|
"jquery-VER.slim.js",
|
||||||
|
"jquery-VER.slim.min.js",
|
||||||
|
"jquery-VER.slim.min.map"
|
||||||
];
|
];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -44,11 +47,16 @@ function makeReleaseCopies( Release ) {
|
|||||||
// Map files need to reference the new uncompressed name;
|
// Map files need to reference the new uncompressed name;
|
||||||
// assume that all files reside in the same directory.
|
// assume that all files reside in the same directory.
|
||||||
// "file":"jquery.min.js" ... "sources":["jquery.js"]
|
// "file":"jquery.min.js" ... "sources":["jquery.js"]
|
||||||
text = fs.readFileSync( builtFile, "utf8" )
|
text = fs
|
||||||
.replace( /"file":"([^"]+)"/,
|
.readFileSync( builtFile, "utf8" )
|
||||||
"\"file\":\"" + unpathedFile.replace( /\.min\.map/, ".min.js\"" ) )
|
.replace(
|
||||||
.replace( /"sources":\["([^"]+)"\]/,
|
/"file":"([^"]+)"/,
|
||||||
"\"sources\":[\"" + unpathedFile.replace( /\.min\.map/, ".js" ) + "\"]" );
|
`"file":"${ unpathedFile.replace( /\.min\.map/, ".min.js" ) }"`
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
/"sources":\["([^"]+)"\]/,
|
||||||
|
`"sources":["${ unpathedFile.replace( /\.min\.map/, ".js" ) }"]`
|
||||||
|
);
|
||||||
fs.writeFileSync( releaseFile, text );
|
fs.writeFileSync( releaseFile, text );
|
||||||
} else if ( builtFile !== releaseFile ) {
|
} else if ( builtFile !== releaseFile ) {
|
||||||
shell.cp( "-f", builtFile, releaseFile );
|
shell.cp( "-f", builtFile, releaseFile );
|
||||||
@ -57,19 +65,22 @@ function makeReleaseCopies( Release ) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function makeArchives( Release, callback ) {
|
function makeArchives( Release, callback ) {
|
||||||
|
|
||||||
Release.chdir( Release.dir.repo );
|
Release.chdir( Release.dir.repo );
|
||||||
|
|
||||||
function makeArchive( cdn, files, callback ) {
|
function makeArchive( cdn, files, callback ) {
|
||||||
if ( Release.preRelease ) {
|
if ( Release.preRelease ) {
|
||||||
console.log( "Skipping archive creation for " + cdn + "; this is a beta release." );
|
console.log(
|
||||||
|
`Skipping archive creation for ${ cdn }; this is a beta release.`
|
||||||
|
);
|
||||||
callback();
|
callback();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log( "Creating production archive for " + cdn );
|
console.log( "Creating production archive for " + cdn );
|
||||||
|
|
||||||
var i, sum, result,
|
var i,
|
||||||
|
sum,
|
||||||
|
result,
|
||||||
archiver = require( "archiver" )( "zip" ),
|
archiver = require( "archiver" )( "zip" ),
|
||||||
md5file = cdnFolder + "/" + cdn + "-md5.txt",
|
md5file = cdnFolder + "/" + cdn + "-md5.txt",
|
||||||
output = fs.createWriteStream(
|
output = fs.createWriteStream(
|
||||||
@ -87,28 +98,35 @@ function makeArchives( Release, callback ) {
|
|||||||
archiver.pipe( output );
|
archiver.pipe( output );
|
||||||
|
|
||||||
files = files.map( function( item ) {
|
files = files.map( function( item ) {
|
||||||
return "dist" + ( rver.test( item ) ? "/cdn" : "" ) + "/" +
|
return (
|
||||||
item.replace( rver, Release.newVersion );
|
"dist" +
|
||||||
|
( rver.test( item ) ? "/cdn" : "" ) +
|
||||||
|
"/" +
|
||||||
|
item.replace( rver, Release.newVersion )
|
||||||
|
);
|
||||||
} );
|
} );
|
||||||
|
|
||||||
if ( os.platform() === "win32" ) {
|
if ( os.platform() === "win32" ) {
|
||||||
sum = [];
|
sum = [];
|
||||||
for ( i = 0; i < files.length; i++ ) {
|
for ( i = 0; i < files.length; i++ ) {
|
||||||
result = Release.exec(
|
result = Release.exec(
|
||||||
"certutil -hashfile " + files[ i ] + " MD5", "Error retrieving md5sum"
|
"certutil -hashfile " + files[ i ] + " MD5",
|
||||||
|
"Error retrieving md5sum"
|
||||||
);
|
);
|
||||||
sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
|
sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
|
||||||
}
|
}
|
||||||
sum = sum.join( "\n" );
|
sum = sum.join( "\n" );
|
||||||
} else {
|
} else {
|
||||||
sum = Release.exec( "md5 -r " + files.join( " " ), "Error retrieving md5sum" );
|
sum = Release.exec(
|
||||||
|
"md5 -r " + files.join( " " ),
|
||||||
|
"Error retrieving md5sum"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
fs.writeFileSync( md5file, sum );
|
fs.writeFileSync( md5file, sum );
|
||||||
files.push( md5file );
|
files.push( md5file );
|
||||||
|
|
||||||
files.forEach( function( file ) {
|
files.forEach( function( file ) {
|
||||||
archiver.append( fs.createReadStream( file ),
|
archiver.append( fs.createReadStream( file ), { name: path.basename( file ) } );
|
||||||
{ name: path.basename( file ) } );
|
|
||||||
} );
|
} );
|
||||||
|
|
||||||
archiver.finalize();
|
archiver.finalize();
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
module.exports = function( Release, files, complete ) {
|
module.exports = function( Release, files, complete ) {
|
||||||
|
|
||||||
const fs = require( "fs" ).promises;
|
const fs = require( "node:fs/promises" );
|
||||||
const shell = require( "shelljs" );
|
const shell = require( "shelljs" );
|
||||||
const inquirer = require( "inquirer" );
|
const inquirer = require( "inquirer" );
|
||||||
const pkg = require( `${ Release.dir.repo }/package.json` );
|
const pkg = require( `${ Release.dir.repo }/package.json` );
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import chalk from "chalk";
|
import chalk from "chalk";
|
||||||
import fs from "node:fs";
|
import fs from "node:fs/promises";
|
||||||
import { promisify } from "node:util";
|
import { promisify } from "node:util";
|
||||||
import zlib from "node:zlib";
|
import zlib from "node:zlib";
|
||||||
import { exec as nodeExec } from "node:child_process";
|
import { exec as nodeExec } from "node:child_process";
|
||||||
@ -34,7 +34,7 @@ function getBranchHeader( branch, commit ) {
|
|||||||
async function getCache( loc ) {
|
async function getCache( loc ) {
|
||||||
let cache;
|
let cache;
|
||||||
try {
|
try {
|
||||||
const contents = await fs.promises.readFile( loc, "utf8" );
|
const contents = await fs.readFile( loc, "utf8" );
|
||||||
cache = JSON.parse( contents );
|
cache = JSON.parse( contents );
|
||||||
} catch ( err ) {
|
} catch ( err ) {
|
||||||
return {};
|
return {};
|
||||||
@ -60,7 +60,7 @@ function cacheResults( results ) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function saveCache( loc, cache ) {
|
function saveCache( loc, cache ) {
|
||||||
return fs.promises.writeFile( loc, JSON.stringify( cache ) );
|
return fs.writeFile( loc, JSON.stringify( cache ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
function compareSizes( existing, current, padLength ) {
|
function compareSizes( existing, current, padLength ) {
|
||||||
@ -104,7 +104,7 @@ export async function compareSize( { cache = ".sizecache.json", files } = {} ) {
|
|||||||
const results = await Promise.all(
|
const results = await Promise.all(
|
||||||
files.map( async function( filename ) {
|
files.map( async function( filename ) {
|
||||||
|
|
||||||
let contents = await fs.promises.readFile( filename, "utf8" );
|
let contents = await fs.readFile( filename, "utf8" );
|
||||||
|
|
||||||
// Remove the short SHA and .dirty from comparisons.
|
// Remove the short SHA and .dirty from comparisons.
|
||||||
// The short SHA so commits can be compared against each other
|
// The short SHA so commits can be compared against each other
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const util = require( "util" );
|
const util = require( "node:util" );
|
||||||
const exec = util.promisify( require( "child_process" ).exec );
|
const exec = util.promisify( require( "node:child_process" ).exec );
|
||||||
|
|
||||||
module.exports = async function isCleanWorkingDir() {
|
module.exports = async function isCleanWorkingDir() {
|
||||||
const { stdout } = await exec( "git status --untracked-files=no --porcelain" );
|
const { stdout } = await exec( "git status --untracked-files=no --porcelain" );
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const UglifyJS = require( "uglify-js" );
|
const UglifyJS = require( "uglify-js" );
|
||||||
const fs = require( "fs" );
|
const fs = require( "node:fs/promises" );
|
||||||
const path = require( "path" );
|
const path = require( "node:path" );
|
||||||
const processForDist = require( "./dist" );
|
const processForDist = require( "./dist" );
|
||||||
const getTimestamp = require( "./lib/getTimestamp" );
|
const getTimestamp = require( "./lib/getTimestamp" );
|
||||||
|
|
||||||
@ -10,7 +10,7 @@ const rjs = /\.js$/;
|
|||||||
|
|
||||||
module.exports = async function minify( { dir, filename } ) {
|
module.exports = async function minify( { dir, filename } ) {
|
||||||
const filepath = path.join( dir, filename );
|
const filepath = path.join( dir, filename );
|
||||||
const contents = await fs.promises.readFile( filepath, "utf8" );
|
const contents = await fs.readFile( filepath, "utf8" );
|
||||||
const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
|
const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
|
||||||
const banner = `/*! jQuery ${ version }` +
|
const banner = `/*! jQuery ${ version }` +
|
||||||
" | (c) OpenJS Foundation and other contributors" +
|
" | (c) OpenJS Foundation and other contributors" +
|
||||||
@ -63,11 +63,11 @@ module.exports = async function minify( { dir, filename } ) {
|
|||||||
} );
|
} );
|
||||||
|
|
||||||
await Promise.all( [
|
await Promise.all( [
|
||||||
fs.promises.writeFile(
|
fs.writeFile(
|
||||||
path.join( dir, minFilename ),
|
path.join( dir, minFilename ),
|
||||||
code
|
code
|
||||||
),
|
),
|
||||||
fs.promises.writeFile(
|
fs.writeFile(
|
||||||
path.join( dir, mapFilename ),
|
path.join( dir, mapFilename ),
|
||||||
map
|
map
|
||||||
)
|
)
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const fs = require( "fs" );
|
const fs = require( "node:fs/promises" );
|
||||||
const util = require( "util" );
|
const util = require( "node:util" );
|
||||||
const exec = util.promisify( require( "child_process" ).exec );
|
const exec = util.promisify( require( "node:child_process" ).exec );
|
||||||
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
|
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
|
||||||
|
|
||||||
if ( !verifyNodeVersion() ) {
|
if ( !verifyNodeVersion() ) {
|
||||||
@ -17,7 +17,7 @@ if ( !verifyNodeVersion() ) {
|
|||||||
|
|
||||||
async function runTests( { module } ) {
|
async function runTests( { module } ) {
|
||||||
const dir = "./test/node_smoke_tests";
|
const dir = "./test/node_smoke_tests";
|
||||||
const files = await fs.promises.readdir( dir, { withFileTypes: true } );
|
const files = await fs.readdir( dir, { withFileTypes: true } );
|
||||||
const testFiles = files.filter( ( testFilePath ) => testFilePath.isFile() );
|
const testFiles = files.filter( ( testFilePath ) => testFilePath.isFile() );
|
||||||
|
|
||||||
if ( !testFiles.length ) {
|
if ( !testFiles.length ) {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const fs = require( "fs" );
|
const fs = require( "node:fs/promises" );
|
||||||
const path = require( "path" );
|
const path = require( "node:path" );
|
||||||
|
|
||||||
const projectDir = path.resolve( __dirname, "..", ".." );
|
const projectDir = path.resolve( __dirname, "..", ".." );
|
||||||
|
|
||||||
@ -26,15 +26,15 @@ const files = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
async function npmcopy() {
|
async function npmcopy() {
|
||||||
await fs.promises.mkdir( path.resolve( projectDir, "external" ), {
|
await fs.mkdir( path.resolve( projectDir, "external" ), {
|
||||||
recursive: true
|
recursive: true
|
||||||
} );
|
} );
|
||||||
for ( const [ dest, source ] of Object.entries( files ) ) {
|
for ( const [ dest, source ] of Object.entries( files ) ) {
|
||||||
const from = path.resolve( projectDir, "node_modules", source );
|
const from = path.resolve( projectDir, "node_modules", source );
|
||||||
const to = path.resolve( projectDir, "external", dest );
|
const to = path.resolve( projectDir, "external", dest );
|
||||||
const toDir = path.dirname( to );
|
const toDir = path.dirname( to );
|
||||||
await fs.promises.mkdir( toDir, { recursive: true } );
|
await fs.mkdir( toDir, { recursive: true } );
|
||||||
await fs.promises.copyFile( from, to );
|
await fs.copyFile( from, to );
|
||||||
console.log( `${ source } → ${ dest }` );
|
console.log( `${ source } → ${ dest }` );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const { spawn } = require( "child_process" );
|
const { spawn } = require( "node:child_process" );
|
||||||
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
|
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
|
||||||
const path = require( "path" );
|
const path = require( "node:path" );
|
||||||
const os = require( "os" );
|
const os = require( "node:os" );
|
||||||
|
|
||||||
if ( !verifyNodeVersion() ) {
|
if ( !verifyNodeVersion() ) {
|
||||||
return;
|
return;
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const fs = require( "fs" );
|
const fs = require( "node:fs/promises" );
|
||||||
|
|
||||||
async function generateFixture() {
|
async function generateFixture() {
|
||||||
const fixture = await fs.promises.readFile( "./test/data/qunit-fixture.html", "utf8" );
|
const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" );
|
||||||
await fs.promises.writeFile(
|
await fs.writeFile(
|
||||||
"./test/data/qunit-fixture.js",
|
"./test/data/qunit-fixture.js",
|
||||||
"// Generated by build/tasks/qunit-fixture.js\n" +
|
"// Generated by build/tasks/qunit-fixture.js\n" +
|
||||||
"QUnit.config.fixture = " +
|
"QUnit.config.fixture = " +
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import bodyParser from "body-parser";
|
import bodyParser from "body-parser";
|
||||||
import express from "express";
|
import express from "express";
|
||||||
import bodyParserErrorHandler from "express-body-parser-error-handler";
|
import bodyParserErrorHandler from "express-body-parser-error-handler";
|
||||||
import { readFile } from "fs/promises";
|
import { readFile } from "node:fs/promises";
|
||||||
import mockServer from "../middleware-mockserver.js";
|
import mockServer from "../middleware-mockserver.js";
|
||||||
|
|
||||||
export async function createTestServer( report ) {
|
export async function createTestServer( report ) {
|
||||||
|
Loading…
Reference in New Issue
Block a user