123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139 |
- 'use strict'
-
- // XXX: This shares a lot in common with extract.js
- // maybe some DRY opportunity here?
-
- // tar -t
- const hlo = require('./high-level-opt.js')
- const Parser = require('./parse.js')
- const fs = require('fs')
- const fsm = require('fs-minipass')
- const path = require('path')
- const stripSlash = require('./strip-trailing-slashes.js')
-
- module.exports = (opt_, files, cb) => {
- if (typeof opt_ === 'function') {
- cb = opt_, files = null, opt_ = {}
- } else if (Array.isArray(opt_)) {
- files = opt_, opt_ = {}
- }
-
- if (typeof files === 'function') {
- cb = files, files = null
- }
-
- if (!files) {
- files = []
- } else {
- files = Array.from(files)
- }
-
- const opt = hlo(opt_)
-
- if (opt.sync && typeof cb === 'function') {
- throw new TypeError('callback not supported for sync tar functions')
- }
-
- if (!opt.file && typeof cb === 'function') {
- throw new TypeError('callback only supported with file option')
- }
-
- if (files.length) {
- filesFilter(opt, files)
- }
-
- if (!opt.noResume) {
- onentryFunction(opt)
- }
-
- return opt.file && opt.sync ? listFileSync(opt)
- : opt.file ? listFile(opt, cb)
- : list(opt)
- }
-
- const onentryFunction = opt => {
- const onentry = opt.onentry
- opt.onentry = onentry ? e => {
- onentry(e)
- e.resume()
- } : e => e.resume()
- }
-
- // construct a filter that limits the file entries listed
- // include child entries if a dir is included
- const filesFilter = (opt, files) => {
- const map = new Map(files.map(f => [stripSlash(f), true]))
- const filter = opt.filter
-
- const mapHas = (file, r) => {
- const root = r || path.parse(file).root || '.'
- const ret = file === root ? false
- : map.has(file) ? map.get(file)
- : mapHas(path.dirname(file), root)
-
- map.set(file, ret)
- return ret
- }
-
- opt.filter = filter
- ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
- : file => mapHas(stripSlash(file))
- }
-
- const listFileSync = opt => {
- const p = list(opt)
- const file = opt.file
- let threw = true
- let fd
- try {
- const stat = fs.statSync(file)
- const readSize = opt.maxReadSize || 16 * 1024 * 1024
- if (stat.size < readSize) {
- p.end(fs.readFileSync(file))
- } else {
- let pos = 0
- const buf = Buffer.allocUnsafe(readSize)
- fd = fs.openSync(file, 'r')
- while (pos < stat.size) {
- const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
- pos += bytesRead
- p.write(buf.slice(0, bytesRead))
- }
- p.end()
- }
- threw = false
- } finally {
- if (threw && fd) {
- try {
- fs.closeSync(fd)
- } catch (er) {}
- }
- }
- }
-
- const listFile = (opt, cb) => {
- const parse = new Parser(opt)
- const readSize = opt.maxReadSize || 16 * 1024 * 1024
-
- const file = opt.file
- const p = new Promise((resolve, reject) => {
- parse.on('error', reject)
- parse.on('end', resolve)
-
- fs.stat(file, (er, stat) => {
- if (er) {
- reject(er)
- } else {
- const stream = new fsm.ReadStream(file, {
- readSize: readSize,
- size: stat.size,
- })
- stream.on('error', reject)
- stream.pipe(parse)
- }
- })
- })
- return cb ? p.then(cb, cb) : p
- }
-
- const list = opt => new Parser(opt)
|