Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
firien committed Aug 2, 2024
1 parent 8b57607 commit 9f1fbcc
Showing 1 changed file with 126 additions and 3 deletions.
129 changes: 126 additions & 3 deletions src/zip.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { createReadStream, createWriteStream, statSync } from 'node:fs'
import { ReadableStream } from 'node:stream/web'
class Entry {
static #localFileHeaderLength = 30
static #centralDirectoryFileHeaderLength = 46
Expand Down Expand Up @@ -44,12 +46,16 @@ class Entry {
* Generate localFileHeader
* @return {Blob}
*/
localFileHeader () {
localFileHeader ({ stream = false }) {
const buffer = new ArrayBuffer(this.constructor.#localFileHeaderLength)
const dv = new DataView(buffer)
dv.setUint32(0, 0x04034b50, true) // Local file header signature
dv.setUint16(4, 0x1400) // Version needed to extract (minimum)
dv.setUint16(6, 0) // General purpose bit flag
let genFlag = 0b0000100000000000
if (stream) {
genFlag |= 0b1000
}
dv.setUint16(6, genFlag) // General purpose bit flag
this.commonHeaders(dv, 8)
dv.setUint16(28, 0, true) // Extra field length
return new Blob([buffer, this.encodedName])
Expand Down Expand Up @@ -83,7 +89,7 @@ class Entry {
dv.setUint32(offsetStart + 6, this.crc32) // CRC-32 of uncompressed data
dv.setUint32(offsetStart + 10, this.compressedByteSize, true) // Compressed size
dv.setUint32(offsetStart + 14, this.uncompressedByteSize, true) // Uncompressed size
dv.setUint16(offsetStart + 18, this.name.length, true) // File name length
dv.setUint16(offsetStart + 18, this.encodedName.length, true) // File name length
}

get dateWord () {
Expand Down Expand Up @@ -190,3 +196,120 @@ export default class {
return new Blob(blobs, { type: 'application/zip' })
}
}

export class ZipStream {
constructor (file) {
this.zip = createWriteStream(file)
this.entries = []
}

async addFile (path) {
const file = new Entry()
file.timeStamp = new Date()
const stats = statSync(path)
file.compressionMethod = 0x0800
file.externalFileAttributes = 0x0000A481
const utf8Encode = new TextEncoder()
file.encodedName = utf8Encode.encode(path)
// unknown data
file.crc32 = 0
file.uncompressedByteSize = 0
file.compressedByteSize = 0
file.localFileHeaderOffset = this.zip.bytesWritten

const localFileHeader = file.localFileHeader({ stream: true })
const aa = await localFileHeader.arrayBuffer()
const ab = new Uint8Array(aa)
await new Promise((resolve, reject) => {
this.zip.write(ab, resolve)
})

const stream = createReadStream(path)
const gzip = new CompressionStream('gzip')
const compressedStream = ReadableStream.from(stream).pipeThrough(gzip)

let header
let previousChunk

const headerBytes = 10
const trailingBytes = 8

// get chunks from gzip
const start = this.zip.bytesWritten
for await (const chunk of compressedStream) {
if (previousChunk) {
await new Promise((resolve, reject) => {
this.zip.write(previousChunk, resolve)
})
}
if (!header) {
header = chunk.slice(0, headerBytes)
if (chunk.length > headerBytes) {
previousChunk = chunk.subarray(headerBytes)
}
} else {
previousChunk = chunk
}
}
file.uncompressedByteSize = stats.size
const footer = previousChunk.slice(-trailingBytes)
const dataView = new DataView(footer.buffer)
// extract crc32 checksum
file.crc32 = dataView.getUint32(0)
// write last chunk of compressed file
await new Promise((resolve, reject) => {
this.zip.write(previousChunk.subarray(0, previousChunk.length - trailingBytes), resolve)
})
file.compressedByteSize = this.zip.bytesWritten - start

// Data descriptor
const buffer = new ArrayBuffer(16)
const dv = new DataView(buffer)
dv.setUint32(0, 0x08074b50, true) // Local file header signature
dv.setUint32(4, file.crc32) // Version needed to extract (minimum)
dv.setUint32(8, file.compressedByteSize, true) // Compressed size
dv.setUint32(12, file.uncompressedByteSize, true) // Uncompressed size
// write the data desscriptor
await new Promise((resolve, reject) => {
const cc = new Uint8Array(buffer)
this.zip.write(cc, resolve)
})
this.entries.push(file)
}

async close () {
// write central directories
for (const entry of this.entries) {
this.centralDirectoryOffset = this.zip.bytesWritten
const aa = await entry.centralDirectoryFileHeader().arrayBuffer()
const ab = new Uint8Array(aa)
await new Promise((resolve, reject) => {
this.zip.write(ab, resolve)
})
}
this.centralDirectorySize = this.zip.bytesWritten - this.centralDirectoryOffset
await new Promise((resolve, reject) => {
const bb = new Uint8Array(this.endOfCentralDirectoryRecord())
this.zip.write(bb, resolve)
})
this.zip.close()
}

/**
* Generate endOfCentralDirectoryRecord
* @return {ArrayBuffer}
*/
endOfCentralDirectoryRecord () {
const buffer = new ArrayBuffer(22)
const dv = new DataView(buffer)
dv.setUint32(0, 0x06054b50, true) // End of central directory signature
dv.setUint16(4, 0) // Number of this disk
dv.setUint16(6, 0) // Disk where central directory starts
dv.setUint16(8, this.entries.length, true) // Number of central directory records on this disk
dv.setUint16(10, this.entries.length, true) // Total number of central directory records
dv.setUint32(12, this.centralDirectorySize, true) // Size of central directory
dv.setUint32(16, this.centralDirectoryOffset, true) // Offset of start of central directory
dv.setUint16(20, 0) // Comment length
return buffer
}
}

0 comments on commit 9f1fbcc

Please sign in to comment.