Skip to content

Commit

Permalink
test for Archive adding directories
Browse files Browse the repository at this point in the history
  • Loading branch information
richytong committed Feb 4, 2024
1 parent 2970290 commit f999c04
Showing 1 changed file with 48 additions and 37 deletions.
85 changes: 48 additions & 37 deletions Archive.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,42 +6,53 @@ const pathResolve = require('./internal/pathResolve')
const map = require('rubico/map')
const reduce = require('rubico/reduce')

module.exports = Test('Archive', Archive)
.case(async archive => {
const pack = await archive.tar(pathResolve(__dirname), {
ignore: ['Dockerfile', 'node_modules', '.git', '.nyc_output'],
})
const extracted = await archive.untar(pack)
assert(extracted.size > 0)
for (const [path, stream] of extracted) {
assert('header' in stream)
assert(!path.startsWith('/'))
assert.equal(typeof path, 'string')
assert.equal(typeof stream[Symbol.asyncIterator], 'function')
}
})
.case({
Dockerfile: 'FROM node:15-alpine'
}, async archive => {
const pack = await archive.tar(pathResolve(__dirname, 'internal'))
const extracted = await archive.untar(pack)
const dir = await fs.readdir(pathResolve(__dirname, 'internal'))
assert.equal(extracted.size, dir.length + 1) // extra Dockerfile
assert(extracted.has('Dockerfile'))
assert.equal(
await reduce((a, b) => a + b, '')(extracted.get('Dockerfile')),
'FROM node:15-alpine')
const test = new Test('Archive', Archive)

.case(async archive => {
const pack = await archive.tar(pathResolve(__dirname), {
ignore: ['Dockerfile', 'node_modules', '.git', '.nyc_output'],
})
.case({
Dockerfile: 'FROM busybox:1.32'
}, async archive => {
const pack = await archive.tar(`${pathResolve(__dirname, 'internal')}/`, {
ignore: ['hashJSON.js'],
})
const extracted = await archive.untar(pack)
assert(extracted.size > 0)
assert(extracted.has('Dockerfile'))
assert.equal(
await reduce((a, b) => a + b, '')(extracted.get('Dockerfile')),
'FROM busybox:1.32')
const extracted = await archive.untar(pack)
assert(extracted.size > 0)
for (const [path, stream] of extracted) {
assert('header' in stream)
assert(!path.startsWith('/'))
assert.equal(typeof path, 'string')
assert.equal(typeof stream[Symbol.asyncIterator], 'function')
}
})

.case({
Dockerfile: 'FROM node:15-alpine'
}, async archive => {
const pack = await archive.tar(pathResolve(__dirname, 'internal'))
const extracted = await archive.untar(pack)
const dir = await fs.readdir(pathResolve(__dirname, 'internal'))
assert.equal(extracted.size, dir.length + 1) // extra Dockerfile
assert(extracted.has('Dockerfile'))
assert.equal(
await reduce((a, b) => a + b, '')(extracted.get('Dockerfile')),
'FROM node:15-alpine')
})

.case({
Dockerfile: 'FROM busybox:1.32'
'.aws/credentials': '[claimyr]\naccessKeyId\nsecretAccessKey',
}, async archive => {
const pack = await archive.tar(`${pathResolve(__dirname, 'internal')}/`, {
ignore: ['hashJSON.js'],
})
const extracted = await archive.untar(pack)
assert(extracted.size > 0)
assert(extracted.has('Dockerfile'))
assert(extracted.has('.aws/credentials'))
assert.equal(
await reduce((a, b) => a + b, '')(extracted.get('Dockerfile')),
'FROM busybox:1.32')
})

if (process.argv[1] == __filename) {
test()
}

module.exports = test

0 comments on commit f999c04

Please sign in to comment.