You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

replace.js 5.6KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. 'use strict'
  2. // tar -r
  3. const hlo = require('./high-level-opt.js')
  4. const Pack = require('./pack.js')
  5. const fs = require('fs')
  6. const fsm = require('fs-minipass')
  7. const t = require('./list.js')
  8. const path = require('path')
  9. // starting at the head of the file, read a Header
  10. // If the checksum is invalid, that's our position to start writing
  11. // If it is, jump forward by the specified size (round up to 512)
  12. // and try again.
  13. // Write the new Pack stream starting there.
  14. const Header = require('./header.js')
  15. module.exports = (opt_, files, cb) => {
  16. const opt = hlo(opt_)
  17. if (!opt.file) {
  18. throw new TypeError('file is required')
  19. }
  20. if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
  21. throw new TypeError('cannot append to compressed archives')
  22. }
  23. if (!files || !Array.isArray(files) || !files.length) {
  24. throw new TypeError('no files or directories specified')
  25. }
  26. files = Array.from(files)
  27. return opt.sync ? replaceSync(opt, files)
  28. : replace(opt, files, cb)
  29. }
  30. const replaceSync = (opt, files) => {
  31. const p = new Pack.Sync(opt)
  32. let threw = true
  33. let fd
  34. let position
  35. try {
  36. try {
  37. fd = fs.openSync(opt.file, 'r+')
  38. } catch (er) {
  39. if (er.code === 'ENOENT') {
  40. fd = fs.openSync(opt.file, 'w+')
  41. } else {
  42. throw er
  43. }
  44. }
  45. const st = fs.fstatSync(fd)
  46. const headBuf = Buffer.alloc(512)
  47. POSITION: for (position = 0; position < st.size; position += 512) {
  48. for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
  49. bytes = fs.readSync(
  50. fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
  51. )
  52. if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
  53. throw new Error('cannot append to compressed archives')
  54. }
  55. if (!bytes) {
  56. break POSITION
  57. }
  58. }
  59. const h = new Header(headBuf)
  60. if (!h.cksumValid) {
  61. break
  62. }
  63. const entryBlockSize = 512 * Math.ceil(h.size / 512)
  64. if (position + entryBlockSize + 512 > st.size) {
  65. break
  66. }
  67. // the 512 for the header we just parsed will be added as well
  68. // also jump ahead all the blocks for the body
  69. position += entryBlockSize
  70. if (opt.mtimeCache) {
  71. opt.mtimeCache.set(h.path, h.mtime)
  72. }
  73. }
  74. threw = false
  75. streamSync(opt, p, position, fd, files)
  76. } finally {
  77. if (threw) {
  78. try {
  79. fs.closeSync(fd)
  80. } catch (er) {}
  81. }
  82. }
  83. }
  84. const streamSync = (opt, p, position, fd, files) => {
  85. const stream = new fsm.WriteStreamSync(opt.file, {
  86. fd: fd,
  87. start: position,
  88. })
  89. p.pipe(stream)
  90. addFilesSync(p, files)
  91. }
  92. const replace = (opt, files, cb) => {
  93. files = Array.from(files)
  94. const p = new Pack(opt)
  95. const getPos = (fd, size, cb_) => {
  96. const cb = (er, pos) => {
  97. if (er) {
  98. fs.close(fd, _ => cb_(er))
  99. } else {
  100. cb_(null, pos)
  101. }
  102. }
  103. let position = 0
  104. if (size === 0) {
  105. return cb(null, 0)
  106. }
  107. let bufPos = 0
  108. const headBuf = Buffer.alloc(512)
  109. const onread = (er, bytes) => {
  110. if (er) {
  111. return cb(er)
  112. }
  113. bufPos += bytes
  114. if (bufPos < 512 && bytes) {
  115. return fs.read(
  116. fd, headBuf, bufPos, headBuf.length - bufPos,
  117. position + bufPos, onread
  118. )
  119. }
  120. if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
  121. return cb(new Error('cannot append to compressed archives'))
  122. }
  123. // truncated header
  124. if (bufPos < 512) {
  125. return cb(null, position)
  126. }
  127. const h = new Header(headBuf)
  128. if (!h.cksumValid) {
  129. return cb(null, position)
  130. }
  131. const entryBlockSize = 512 * Math.ceil(h.size / 512)
  132. if (position + entryBlockSize + 512 > size) {
  133. return cb(null, position)
  134. }
  135. position += entryBlockSize + 512
  136. if (position >= size) {
  137. return cb(null, position)
  138. }
  139. if (opt.mtimeCache) {
  140. opt.mtimeCache.set(h.path, h.mtime)
  141. }
  142. bufPos = 0
  143. fs.read(fd, headBuf, 0, 512, position, onread)
  144. }
  145. fs.read(fd, headBuf, 0, 512, position, onread)
  146. }
  147. const promise = new Promise((resolve, reject) => {
  148. p.on('error', reject)
  149. let flag = 'r+'
  150. const onopen = (er, fd) => {
  151. if (er && er.code === 'ENOENT' && flag === 'r+') {
  152. flag = 'w+'
  153. return fs.open(opt.file, flag, onopen)
  154. }
  155. if (er) {
  156. return reject(er)
  157. }
  158. fs.fstat(fd, (er, st) => {
  159. if (er) {
  160. return fs.close(fd, () => reject(er))
  161. }
  162. getPos(fd, st.size, (er, position) => {
  163. if (er) {
  164. return reject(er)
  165. }
  166. const stream = new fsm.WriteStream(opt.file, {
  167. fd: fd,
  168. start: position,
  169. })
  170. p.pipe(stream)
  171. stream.on('error', reject)
  172. stream.on('close', resolve)
  173. addFilesAsync(p, files)
  174. })
  175. })
  176. }
  177. fs.open(opt.file, flag, onopen)
  178. })
  179. return cb ? promise.then(cb, cb) : promise
  180. }
  181. const addFilesSync = (p, files) => {
  182. files.forEach(file => {
  183. if (file.charAt(0) === '@') {
  184. t({
  185. file: path.resolve(p.cwd, file.slice(1)),
  186. sync: true,
  187. noResume: true,
  188. onentry: entry => p.add(entry),
  189. })
  190. } else {
  191. p.add(file)
  192. }
  193. })
  194. p.end()
  195. }
  196. const addFilesAsync = (p, files) => {
  197. while (files.length) {
  198. const file = files.shift()
  199. if (file.charAt(0) === '@') {
  200. return t({
  201. file: path.resolve(p.cwd, file.slice(1)),
  202. noResume: true,
  203. onentry: entry => p.add(entry),
  204. }).then(_ => addFilesAsync(p, files))
  205. } else {
  206. p.add(file)
  207. }
  208. }
  209. p.end()
  210. }