Spaces:
Sleeping
Sleeping
File size: 5,755 Bytes
2b24a67 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 |
'use strict'
const events = require('events')
const contentPath = require('./path')
const fs = require('fs/promises')
const { moveFile } = require('@npmcli/fs')
const { Minipass } = require('minipass')
const Pipeline = require('minipass-pipeline')
const Flush = require('minipass-flush')
const path = require('path')
const ssri = require('ssri')
const uniqueFilename = require('unique-filename')
const fsm = require('fs-minipass')
module.exports = write
// Cache of move operations in process so we don't duplicate
const moveOperations = new Map()
async function write (cache, data, opts = {}) {
const { algorithms, size, integrity } = opts
if (typeof size === 'number' && data.length !== size) {
throw sizeError(size, data.length)
}
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
if (integrity && !ssri.checkData(data, integrity, opts)) {
throw checksumError(integrity, sri)
}
for (const algo in sri) {
const tmp = await makeTmp(cache, opts)
const hash = sri[algo].toString()
try {
await fs.writeFile(tmp.target, data, { flag: 'wx' })
await moveToDestination(tmp, cache, hash, opts)
} finally {
if (!tmp.moved) {
await fs.rm(tmp.target, { recursive: true, force: true })
}
}
}
return { integrity: sri, size: data.length }
}
module.exports.stream = writeStream
// writes proxied to the 'inputStream' that is passed to the Promise
// 'end' is deferred until content is handled.
class CacacheWriteStream extends Flush {
constructor (cache, opts) {
super()
this.opts = opts
this.cache = cache
this.inputStream = new Minipass()
this.inputStream.on('error', er => this.emit('error', er))
this.inputStream.on('drain', () => this.emit('drain'))
this.handleContentP = null
}
write (chunk, encoding, cb) {
if (!this.handleContentP) {
this.handleContentP = handleContent(
this.inputStream,
this.cache,
this.opts
)
this.handleContentP.catch(error => this.emit('error', error))
}
return this.inputStream.write(chunk, encoding, cb)
}
flush (cb) {
this.inputStream.end(() => {
if (!this.handleContentP) {
const e = new Error('Cache input stream was empty')
e.code = 'ENODATA'
// empty streams are probably emitting end right away.
// defer this one tick by rejecting a promise on it.
return Promise.reject(e).catch(cb)
}
// eslint-disable-next-line promise/catch-or-return
this.handleContentP.then(
(res) => {
res.integrity && this.emit('integrity', res.integrity)
// eslint-disable-next-line promise/always-return
res.size !== null && this.emit('size', res.size)
cb()
},
(er) => cb(er)
)
})
}
}
function writeStream (cache, opts = {}) {
return new CacacheWriteStream(cache, opts)
}
async function handleContent (inputStream, cache, opts) {
const tmp = await makeTmp(cache, opts)
try {
const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
await moveToDestination(
tmp,
cache,
res.integrity,
opts
)
return res
} finally {
if (!tmp.moved) {
await fs.rm(tmp.target, { recursive: true, force: true })
}
}
}
async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
const outStream = new fsm.WriteStream(tmpTarget, {
flags: 'wx',
})
if (opts.integrityEmitter) {
// we need to create these all simultaneously since they can fire in any order
const [integrity, size] = await Promise.all([
events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
events.once(opts.integrityEmitter, 'size').then(res => res[0]),
new Pipeline(inputStream, outStream).promise(),
])
return { integrity, size }
}
let integrity
let size
const hashStream = ssri.integrityStream({
integrity: opts.integrity,
algorithms: opts.algorithms,
size: opts.size,
})
hashStream.on('integrity', i => {
integrity = i
})
hashStream.on('size', s => {
size = s
})
const pipeline = new Pipeline(inputStream, hashStream, outStream)
await pipeline.promise()
return { integrity, size }
}
async function makeTmp (cache, opts) {
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
return {
target: tmpTarget,
moved: false,
}
}
async function moveToDestination (tmp, cache, sri, opts) {
const destination = contentPath(cache, sri)
const destDir = path.dirname(destination)
if (moveOperations.has(destination)) {
return moveOperations.get(destination)
}
moveOperations.set(
destination,
fs.mkdir(destDir, { recursive: true })
.then(async () => {
await moveFile(tmp.target, destination, { overwrite: false })
tmp.moved = true
return tmp.moved
})
.catch(err => {
if (!err.message.startsWith('The destination file exists')) {
throw Object.assign(err, { code: 'EEXIST' })
}
}).finally(() => {
moveOperations.delete(destination)
})
)
return moveOperations.get(destination)
}
function sizeError (expected, found) {
/* eslint-disable-next-line max-len */
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
err.expected = expected
err.found = found
err.code = 'EBADSIZE'
return err
}
function checksumError (expected, found) {
const err = new Error(`Integrity check failed:
Wanted: ${expected}
Found: ${found}`)
err.code = 'EINTEGRITY'
err.expected = expected
err.found = found
return err
}
|