.editorconfig: two spaces for .js files

Most other scripting languages we use in tree have the same.
This commit is contained in:
Wolfgang Walther 2025-06-21 21:29:50 +02:00
parent d543504ef8
commit 3d4c1c0d26
No known key found for this signature in database
GPG Key ID: B39893FA5F65CAE1
5 changed files with 217 additions and 221 deletions

View File

@ -24,7 +24,7 @@ insert_final_newline = false
# see https://nixos.org/nixpkgs/manual/#chap-conventions # see https://nixos.org/nixpkgs/manual/#chap-conventions
# Match json/lockfiles/markdown/nix/perl/python/ruby/shell/docbook files, set indent to spaces # Match json/lockfiles/markdown/nix/perl/python/ruby/shell/docbook files, set indent to spaces
[*.{bash,json,lock,md,nix,pl,pm,py,rb,sh,xml}] [*.{bash,js,json,lock,md,nix,pl,pm,py,rb,sh,xml}]
indent_style = space indent_style = space
# Match docbook files, set indent width of one # Match docbook files, set indent width of one
@ -32,7 +32,7 @@ indent_style = space
indent_size = 1 indent_size = 1
# Match json/lockfiles/markdown/nix/ruby files, set indent width of two # Match json/lockfiles/markdown/nix/ruby files, set indent width of two
[*.{json,lock,md,nix,rb}] [*.{js,json,lock,md,nix,rb}]
indent_size = 2 indent_size = 2
# Match all the Bash code in Nix files, set indent width of two # Match all the Bash code in Nix files, set indent width of two

View File

@ -1,4 +0,0 @@
# TODO: Move to top-level via staging PR
[*.js]
indent_style = space
indent_size = 2

View File

@ -3,17 +3,17 @@ const path = require('path')
// This has to match the logic in pkgs/development/tools/yarn2nix-moretea/yarn2nix/lib/urlToName.js // This has to match the logic in pkgs/development/tools/yarn2nix-moretea/yarn2nix/lib/urlToName.js
// so that fixup_yarn_lock produces the same paths // so that fixup_yarn_lock produces the same paths
const urlToName = url => { const urlToName = url => {
const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/') const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')
if (url.startsWith('file:')) { if (url.startsWith('file:')) {
return url return url
} else if (url.startsWith('git+') || isCodeloadGitTarballUrl) { } else if (url.startsWith('git+') || isCodeloadGitTarballUrl) {
return path.basename(url) return path.basename(url)
} else { } else {
return url return url
.replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names .replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names
.replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore .replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore
} }
} }
module.exports = { urlToName }; module.exports = { urlToName };

View File

@ -7,79 +7,79 @@ const lockfile = require('./yarnpkg-lockfile.js')
const { urlToName } = require('./common.js') const { urlToName } = require('./common.js')
const fixupYarnLock = async (lockContents, verbose) => { const fixupYarnLock = async (lockContents, verbose) => {
const lockData = lockfile.parse(lockContents) const lockData = lockfile.parse(lockContents)
const fixedData = Object.fromEntries( const fixedData = Object.fromEntries(
Object.entries(lockData.object) Object.entries(lockData.object)
.map(([dep, pkg]) => { .map(([dep, pkg]) => {
if (pkg.resolved === undefined) { if (pkg.resolved === undefined) {
console.warn(`no resolved URL for package ${dep}`) console.warn(`no resolved URL for package ${dep}`)
var maybeFile = dep.split("@", 2)[1] var maybeFile = dep.split("@", 2)[1]
if (maybeFile.startsWith("file:")) { if (maybeFile.startsWith("file:")) {
console.log(`Rewriting URL for local file dependency ${dep}`) console.log(`Rewriting URL for local file dependency ${dep}`)
pkg.resolved = maybeFile pkg.resolved = maybeFile
} }
return [dep, pkg] return [dep, pkg]
} }
const [ url, hash ] = pkg.resolved.split("#", 2) const [ url, hash ] = pkg.resolved.split("#", 2)
if (hash || url.startsWith("https://codeload.github.com/")) { if (hash || url.startsWith("https://codeload.github.com/")) {
if (verbose) console.log(`Removing integrity for git dependency ${dep}`) if (verbose) console.log(`Removing integrity for git dependency ${dep}`)
delete pkg.integrity delete pkg.integrity
} }
if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`) if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`)
pkg.resolved = urlToName(url) pkg.resolved = urlToName(url)
if (hash) if (hash)
pkg.resolved += `#${hash}` pkg.resolved += `#${hash}`
return [dep, pkg] return [dep, pkg]
}) })
) )
if (verbose) console.log('Done') if (verbose) console.log('Done')
return fixedData return fixedData
} }
const showUsage = async () => { const showUsage = async () => {
process.stderr.write(` process.stderr.write(`
syntax: fixup-yarn-lock [path to yarn.lock] [options] syntax: fixup-yarn-lock [path to yarn.lock] [options]
Options: Options:
-h --help Show this help -h --help Show this help
-v --verbose Verbose output -v --verbose Verbose output
`) `)
process.exit(1) process.exit(1)
} }
const main = async () => { const main = async () => {
const args = process.argv.slice(2) const args = process.argv.slice(2)
let next, lockFile, verbose let next, lockFile, verbose
while (next = args.shift()) { while (next = args.shift()) {
if (next == '--verbose' || next == '-v') { if (next == '--verbose' || next == '-v') {
verbose = true verbose = true
} else if (next == '--help' || next == '-h') { } else if (next == '--help' || next == '-h') {
showUsage() showUsage()
} else if (!lockFile) { } else if (!lockFile) {
lockFile = next lockFile = next
} else { } else {
showUsage() showUsage()
} }
} }
let lockContents let lockContents
try { try {
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8') lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
} catch { } catch {
showUsage() showUsage()
} }
const fixedData = await fixupYarnLock(lockContents, verbose) const fixedData = await fixupYarnLock(lockContents, verbose)
await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData)) await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData))
} }
main() main()
.catch(e => { .catch(e => {
console.error(e) console.error(e)
process.exit(1) process.exit(1)
}) })

View File

@ -15,155 +15,155 @@ const { urlToName } = require('./common.js')
const execFile = promisify(child_process.execFile) const execFile = promisify(child_process.execFile)
const exec = async (...args) => { const exec = async (...args) => {
const res = await execFile(...args) const res = await execFile(...args)
if (res.error) throw new Error(res.stderr) if (res.error) throw new Error(res.stderr)
return res return res
} }
const downloadFileHttps = (fileName, url, expectedHash, hashType = 'sha1') => { const downloadFileHttps = (fileName, url, expectedHash, hashType = 'sha1') => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const get = (url, redirects = 0) => https.get(url, (res) => { const get = (url, redirects = 0) => https.get(url, (res) => {
if(redirects > 10) { if(redirects > 10) {
reject('Too many redirects!'); reject('Too many redirects!');
return; return;
} }
if(res.statusCode === 301 || res.statusCode === 302) { if(res.statusCode === 301 || res.statusCode === 302) {
return get(res.headers.location, redirects + 1) return get(res.headers.location, redirects + 1)
} }
const file = fs.createWriteStream(fileName) const file = fs.createWriteStream(fileName)
const hash = crypto.createHash(hashType) const hash = crypto.createHash(hashType)
res.pipe(file) res.pipe(file)
res.pipe(hash).setEncoding('hex') res.pipe(hash).setEncoding('hex')
res.on('end', () => { res.on('end', () => {
file.close() file.close()
const h = hash.read() const h = hash.read()
if (expectedHash === undefined){ if (expectedHash === undefined){
console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`); console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`);
} else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`)) } else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`))
resolve() resolve()
}) })
res.on('error', e => reject(e)) res.on('error', e => reject(e))
}) })
get(url) get(url)
}) })
} }
const downloadGit = async (fileName, url, rev) => { const downloadGit = async (fileName, url, rev) => {
await exec('nix-prefetch-git', [ await exec('nix-prefetch-git', [
'--out', fileName + '.tmp', '--out', fileName + '.tmp',
'--url', url, '--url', url,
'--rev', rev, '--rev', rev,
'--builder' '--builder'
]) ])
await exec('tar', [ await exec('tar', [
// hopefully make it reproducible across runs and systems // hopefully make it reproducible across runs and systems
'--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1', '--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1',
// Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79 // Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79
'--mode', 'u+w', '--mode', 'u+w',
'-C', fileName + '.tmp', '-C', fileName + '.tmp',
'-cf', fileName, '.' '-cf', fileName, '.'
]) ])
await exec('rm', [ '-rf', fileName + '.tmp', ]) await exec('rm', [ '-rf', fileName + '.tmp', ])
} }
const isGitUrl = pattern => { const isGitUrl = pattern => {
// https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47 // https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47
const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org'] const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org']
const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/] const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/]
for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true
const {hostname, path} = url.parse(pattern) const {hostname, path} = url.parse(pattern)
if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0 if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0
// only if dependency is pointing to a git repo, // only if dependency is pointing to a git repo,
// e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz // e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz
&& path.split('/').filter(p => !!p).length === 2 && path.split('/').filter(p => !!p).length === 2
) return true ) return true
return false return false
} }
const downloadPkg = (pkg, verbose) => { const downloadPkg = (pkg, verbose) => {
for (let marker of ['@file:', '@link:']) { for (let marker of ['@file:', '@link:']) {
const split = pkg.key.split(marker) const split = pkg.key.split(marker)
if (split.length == 2) { if (split.length == 2) {
console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`) console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`)
return return
} else if (split.length > 2) { } else if (split.length > 2) {
throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`) throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`)
} }
} }
if (pkg.resolved === undefined) { if (pkg.resolved === undefined) {
throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`) throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`)
} }
const [ url, hash ] = pkg.resolved.split('#') const [ url, hash ] = pkg.resolved.split('#')
if (verbose) console.log('downloading ' + url) if (verbose) console.log('downloading ' + url)
const fileName = urlToName(url) const fileName = urlToName(url)
const s = url.split('/') const s = url.split('/')
if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) { if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) {
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1]) return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1])
} else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') && } else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') &&
( (
s.length <= 5 || // https://github.com/owner/repo.tgz#feedface... s.length <= 5 || // https://github.com/owner/repo.tgz#feedface...
s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz
)) { )) {
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, '')) return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, ''))
} else if (isGitUrl(url)) { } else if (isGitUrl(url)) {
return downloadGit(fileName, url.replace(/^git\+/, ''), hash) return downloadGit(fileName, url.replace(/^git\+/, ''), hash)
} else if (url.startsWith('https://')) { } else if (url.startsWith('https://')) {
if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) { if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) {
const [ type, checksum ] = pkg.integrity.split('-') const [ type, checksum ] = pkg.integrity.split('-')
return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type) return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type)
} }
return downloadFileHttps(fileName, url, hash) return downloadFileHttps(fileName, url, hash)
} else if (url.startsWith('file:')) { } else if (url.startsWith('file:')) {
console.warn(`ignoring unsupported file:path url "${url}"`) console.warn(`ignoring unsupported file:path url "${url}"`)
} else { } else {
throw new Error('don\'t know how to download "' + url + '"') throw new Error('don\'t know how to download "' + url + '"')
} }
} }
const performParallel = tasks => { const performParallel = tasks => {
const worker = async () => { const worker = async () => {
while (tasks.length > 0) await tasks.shift()() while (tasks.length > 0) await tasks.shift()()
} }
const workers = [] const workers = []
for (let i = 0; i < 4; i++) { for (let i = 0; i < 4; i++) {
workers.push(worker()) workers.push(worker())
} }
return Promise.all(workers) return Promise.all(workers)
} }
// This could be implemented using [`Map.groupBy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/groupBy), // This could be implemented using [`Map.groupBy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/groupBy),
// but that method is only supported starting with Node 21 // but that method is only supported starting with Node 21
const uniqueBy = (arr, callback) => { const uniqueBy = (arr, callback) => {
const map = new Map() const map = new Map()
for (const elem of arr) { for (const elem of arr) {
map.set(callback(elem), elem) map.set(callback(elem), elem)
} }
return [...map.values()] return [...map.values()]
} }
const prefetchYarnDeps = async (lockContents, verbose) => { const prefetchYarnDeps = async (lockContents, verbose) => {
const lockData = lockfile.parse(lockContents) const lockData = lockfile.parse(lockContents)
await performParallel( await performParallel(
uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved) uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved)
.map(([key, value]) => () => downloadPkg({ key, ...value }, verbose)) .map(([key, value]) => () => downloadPkg({ key, ...value }, verbose))
) )
await fs.promises.writeFile('yarn.lock', lockContents) await fs.promises.writeFile('yarn.lock', lockContents)
if (verbose) console.log('Done') if (verbose) console.log('Done')
} }
const showUsage = async () => { const showUsage = async () => {
process.stderr.write(` process.stderr.write(`
syntax: prefetch-yarn-deps [path to yarn.lock] [options] syntax: prefetch-yarn-deps [path to yarn.lock] [options]
Options: Options:
@ -171,50 +171,50 @@ Options:
-v --verbose Verbose output -v --verbose Verbose output
--builder Only perform the download to current directory, then exit --builder Only perform the download to current directory, then exit
`) `)
process.exit(1) process.exit(1)
} }
const main = async () => { const main = async () => {
const args = process.argv.slice(2) const args = process.argv.slice(2)
let next, lockFile, verbose, isBuilder let next, lockFile, verbose, isBuilder
while (next = args.shift()) { while (next = args.shift()) {
if (next == '--builder') { if (next == '--builder') {
isBuilder = true isBuilder = true
} else if (next == '--verbose' || next == '-v') { } else if (next == '--verbose' || next == '-v') {
verbose = true verbose = true
} else if (next == '--help' || next == '-h') { } else if (next == '--help' || next == '-h') {
showUsage() showUsage()
} else if (!lockFile) { } else if (!lockFile) {
lockFile = next lockFile = next
} else { } else {
showUsage() showUsage()
} }
} }
let lockContents let lockContents
try { try {
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8') lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
} catch { } catch {
showUsage() showUsage()
} }
if (isBuilder) { if (isBuilder) {
await prefetchYarnDeps(lockContents, verbose) await prefetchYarnDeps(lockContents, verbose)
} else { } else {
const { stdout: tmpDir } = await exec('mktemp', [ '-d' ]) const { stdout: tmpDir } = await exec('mktemp', [ '-d' ])
try { try {
process.chdir(tmpDir.trim()) process.chdir(tmpDir.trim())
await prefetchYarnDeps(lockContents, verbose) await prefetchYarnDeps(lockContents, verbose)
const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ]) const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ])
console.log(hash) console.log(hash)
} finally { } finally {
await exec('rm', [ '-rf', tmpDir.trim() ]) await exec('rm', [ '-rf', tmpDir.trim() ])
} }
} }
} }
main() main()
.catch(e => { .catch(e => {
console.error(e) console.error(e)
process.exit(1) process.exit(1)
}) })