1 |
3a515b92
|
cagy
|
'use strict'
|
2 |
|
|
|
3 |
|
|
const BB = require('bluebird')
|
4 |
|
|
|
5 |
|
|
const contentPath = require('./content/path')
|
6 |
|
|
const crypto = require('crypto')
|
7 |
|
|
const figgyPudding = require('figgy-pudding')
|
8 |
|
|
const fixOwner = require('./util/fix-owner')
|
9 |
|
|
const fs = require('graceful-fs')
|
10 |
|
|
const hashToSegments = require('./util/hash-to-segments')
|
11 |
|
|
const ms = require('mississippi')
|
12 |
|
|
const path = require('path')
|
13 |
|
|
const ssri = require('ssri')
|
14 |
|
|
const Y = require('./util/y.js')
|
15 |
|
|
|
16 |
|
|
const indexV = require('../package.json')['cache-version'].index
|
17 |
|
|
|
18 |
|
|
const appendFileAsync = BB.promisify(fs.appendFile)
|
19 |
|
|
const readFileAsync = BB.promisify(fs.readFile)
|
20 |
|
|
const readdirAsync = BB.promisify(fs.readdir)
|
21 |
|
|
const concat = ms.concat
|
22 |
|
|
const from = ms.from
|
23 |
|
|
|
24 |
|
|
module.exports.NotFoundError = class NotFoundError extends Error {
|
25 |
|
|
constructor (cache, key) {
|
26 |
|
|
super(Y`No cache entry for \`${key}\` found in \`${cache}\``)
|
27 |
|
|
this.code = 'ENOENT'
|
28 |
|
|
this.cache = cache
|
29 |
|
|
this.key = key
|
30 |
|
|
}
|
31 |
|
|
}
|
32 |
|
|
|
33 |
|
|
const IndexOpts = figgyPudding({
|
34 |
|
|
metadata: {},
|
35 |
|
|
size: {}
|
36 |
|
|
})
|
37 |
|
|
|
38 |
|
|
module.exports.insert = insert
|
39 |
|
|
function insert (cache, key, integrity, opts) {
|
40 |
|
|
opts = IndexOpts(opts)
|
41 |
|
|
const bucket = bucketPath(cache, key)
|
42 |
|
|
const entry = {
|
43 |
|
|
key,
|
44 |
|
|
integrity: integrity && ssri.stringify(integrity),
|
45 |
|
|
time: Date.now(),
|
46 |
|
|
size: opts.size,
|
47 |
|
|
metadata: opts.metadata
|
48 |
|
|
}
|
49 |
|
|
return fixOwner.mkdirfix(
|
50 |
|
|
cache, path.dirname(bucket)
|
51 |
|
|
).then(() => {
|
52 |
|
|
const stringified = JSON.stringify(entry)
|
53 |
|
|
// NOTE - Cleverness ahoy!
|
54 |
|
|
//
|
55 |
|
|
// This works because it's tremendously unlikely for an entry to corrupt
|
56 |
|
|
// another while still preserving the string length of the JSON in
|
57 |
|
|
// question. So, we just slap the length in there and verify it on read.
|
58 |
|
|
//
|
59 |
|
|
// Thanks to @isaacs for the whiteboarding session that ended up with this.
|
60 |
|
|
return appendFileAsync(
|
61 |
|
|
bucket, `\n${hashEntry(stringified)}\t${stringified}`
|
62 |
|
|
)
|
63 |
|
|
}).then(
|
64 |
|
|
() => fixOwner.chownr(cache, bucket)
|
65 |
|
|
).catch({ code: 'ENOENT' }, () => {
|
66 |
|
|
// There's a class of race conditions that happen when things get deleted
|
67 |
|
|
// during fixOwner, or between the two mkdirfix/chownr calls.
|
68 |
|
|
//
|
69 |
|
|
// It's perfectly fine to just not bother in those cases and lie
|
70 |
|
|
// that the index entry was written. Because it's a cache.
|
71 |
|
|
}).then(() => {
|
72 |
|
|
return formatEntry(cache, entry)
|
73 |
|
|
})
|
74 |
|
|
}
|
75 |
|
|
|
76 |
|
|
module.exports.insert.sync = insertSync
|
77 |
|
|
function insertSync (cache, key, integrity, opts) {
|
78 |
|
|
opts = IndexOpts(opts)
|
79 |
|
|
const bucket = bucketPath(cache, key)
|
80 |
|
|
const entry = {
|
81 |
|
|
key,
|
82 |
|
|
integrity: integrity && ssri.stringify(integrity),
|
83 |
|
|
time: Date.now(),
|
84 |
|
|
size: opts.size,
|
85 |
|
|
metadata: opts.metadata
|
86 |
|
|
}
|
87 |
|
|
fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
|
88 |
|
|
const stringified = JSON.stringify(entry)
|
89 |
|
|
fs.appendFileSync(
|
90 |
|
|
bucket, `\n${hashEntry(stringified)}\t${stringified}`
|
91 |
|
|
)
|
92 |
|
|
try {
|
93 |
|
|
fixOwner.chownr.sync(cache, bucket)
|
94 |
|
|
} catch (err) {
|
95 |
|
|
if (err.code !== 'ENOENT') {
|
96 |
|
|
throw err
|
97 |
|
|
}
|
98 |
|
|
}
|
99 |
|
|
return formatEntry(cache, entry)
|
100 |
|
|
}
|
101 |
|
|
|
102 |
|
|
module.exports.find = find
|
103 |
|
|
function find (cache, key) {
|
104 |
|
|
const bucket = bucketPath(cache, key)
|
105 |
|
|
return bucketEntries(bucket).then(entries => {
|
106 |
|
|
return entries.reduce((latest, next) => {
|
107 |
|
|
if (next && next.key === key) {
|
108 |
|
|
return formatEntry(cache, next)
|
109 |
|
|
} else {
|
110 |
|
|
return latest
|
111 |
|
|
}
|
112 |
|
|
}, null)
|
113 |
|
|
}).catch(err => {
|
114 |
|
|
if (err.code === 'ENOENT') {
|
115 |
|
|
return null
|
116 |
|
|
} else {
|
117 |
|
|
throw err
|
118 |
|
|
}
|
119 |
|
|
})
|
120 |
|
|
}
|
121 |
|
|
|
122 |
|
|
module.exports.find.sync = findSync
|
123 |
|
|
function findSync (cache, key) {
|
124 |
|
|
const bucket = bucketPath(cache, key)
|
125 |
|
|
try {
|
126 |
|
|
return bucketEntriesSync(bucket).reduce((latest, next) => {
|
127 |
|
|
if (next && next.key === key) {
|
128 |
|
|
return formatEntry(cache, next)
|
129 |
|
|
} else {
|
130 |
|
|
return latest
|
131 |
|
|
}
|
132 |
|
|
}, null)
|
133 |
|
|
} catch (err) {
|
134 |
|
|
if (err.code === 'ENOENT') {
|
135 |
|
|
return null
|
136 |
|
|
} else {
|
137 |
|
|
throw err
|
138 |
|
|
}
|
139 |
|
|
}
|
140 |
|
|
}
|
141 |
|
|
|
142 |
|
|
module.exports.delete = del
|
143 |
|
|
function del (cache, key, opts) {
|
144 |
|
|
return insert(cache, key, null, opts)
|
145 |
|
|
}
|
146 |
|
|
|
147 |
|
|
module.exports.delete.sync = delSync
|
148 |
|
|
function delSync (cache, key, opts) {
|
149 |
|
|
return insertSync(cache, key, null, opts)
|
150 |
|
|
}
|
151 |
|
|
|
152 |
|
|
module.exports.lsStream = lsStream
|
153 |
|
|
function lsStream (cache) {
|
154 |
|
|
const indexDir = bucketDir(cache)
|
155 |
|
|
const stream = from.obj()
|
156 |
|
|
|
157 |
|
|
// "/cachename/*"
|
158 |
|
|
readdirOrEmpty(indexDir).map(bucket => {
|
159 |
|
|
const bucketPath = path.join(indexDir, bucket)
|
160 |
|
|
|
161 |
|
|
// "/cachename/<bucket 0xFF>/*"
|
162 |
|
|
return readdirOrEmpty(bucketPath).map(subbucket => {
|
163 |
|
|
const subbucketPath = path.join(bucketPath, subbucket)
|
164 |
|
|
|
165 |
|
|
// "/cachename/<bucket 0xFF>/<bucket 0xFF>/*"
|
166 |
|
|
return readdirOrEmpty(subbucketPath).map(entry => {
|
167 |
|
|
const getKeyToEntry = bucketEntries(
|
168 |
|
|
path.join(subbucketPath, entry)
|
169 |
|
|
).reduce((acc, entry) => {
|
170 |
|
|
acc.set(entry.key, entry)
|
171 |
|
|
return acc
|
172 |
|
|
}, new Map())
|
173 |
|
|
|
174 |
|
|
return getKeyToEntry.then(reduced => {
|
175 |
|
|
for (let entry of reduced.values()) {
|
176 |
|
|
const formatted = formatEntry(cache, entry)
|
177 |
|
|
formatted && stream.push(formatted)
|
178 |
|
|
}
|
179 |
|
|
}).catch({ code: 'ENOENT' }, nop)
|
180 |
|
|
})
|
181 |
|
|
})
|
182 |
|
|
}).then(() => {
|
183 |
|
|
stream.push(null)
|
184 |
|
|
}, err => {
|
185 |
|
|
stream.emit('error', err)
|
186 |
|
|
})
|
187 |
|
|
|
188 |
|
|
return stream
|
189 |
|
|
}
|
190 |
|
|
|
191 |
|
|
module.exports.ls = ls
|
192 |
|
|
function ls (cache) {
|
193 |
|
|
return BB.fromNode(cb => {
|
194 |
|
|
lsStream(cache).on('error', cb).pipe(concat(entries => {
|
195 |
|
|
cb(null, entries.reduce((acc, xs) => {
|
196 |
|
|
acc[xs.key] = xs
|
197 |
|
|
return acc
|
198 |
|
|
}, {}))
|
199 |
|
|
}))
|
200 |
|
|
})
|
201 |
|
|
}
|
202 |
|
|
|
203 |
|
|
function bucketEntries (bucket, filter) {
|
204 |
|
|
return readFileAsync(
|
205 |
|
|
bucket, 'utf8'
|
206 |
|
|
).then(data => _bucketEntries(data, filter))
|
207 |
|
|
}
|
208 |
|
|
|
209 |
|
|
function bucketEntriesSync (bucket, filter) {
|
210 |
|
|
const data = fs.readFileSync(bucket, 'utf8')
|
211 |
|
|
return _bucketEntries(data, filter)
|
212 |
|
|
}
|
213 |
|
|
|
214 |
|
|
function _bucketEntries (data, filter) {
|
215 |
|
|
let entries = []
|
216 |
|
|
data.split('\n').forEach(entry => {
|
217 |
|
|
if (!entry) { return }
|
218 |
|
|
const pieces = entry.split('\t')
|
219 |
|
|
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
|
220 |
|
|
// Hash is no good! Corruption or malice? Doesn't matter!
|
221 |
|
|
// EJECT EJECT
|
222 |
|
|
return
|
223 |
|
|
}
|
224 |
|
|
let obj
|
225 |
|
|
try {
|
226 |
|
|
obj = JSON.parse(pieces[1])
|
227 |
|
|
} catch (e) {
|
228 |
|
|
// Entry is corrupted!
|
229 |
|
|
return
|
230 |
|
|
}
|
231 |
|
|
if (obj) {
|
232 |
|
|
entries.push(obj)
|
233 |
|
|
}
|
234 |
|
|
})
|
235 |
|
|
return entries
|
236 |
|
|
}
|
237 |
|
|
|
238 |
|
|
module.exports._bucketDir = bucketDir
|
239 |
|
|
function bucketDir (cache) {
|
240 |
|
|
return path.join(cache, `index-v${indexV}`)
|
241 |
|
|
}
|
242 |
|
|
|
243 |
|
|
module.exports._bucketPath = bucketPath
|
244 |
|
|
function bucketPath (cache, key) {
|
245 |
|
|
const hashed = hashKey(key)
|
246 |
|
|
return path.join.apply(path, [bucketDir(cache)].concat(
|
247 |
|
|
hashToSegments(hashed)
|
248 |
|
|
))
|
249 |
|
|
}
|
250 |
|
|
|
251 |
|
|
module.exports._hashKey = hashKey
|
252 |
|
|
function hashKey (key) {
|
253 |
|
|
return hash(key, 'sha256')
|
254 |
|
|
}
|
255 |
|
|
|
256 |
|
|
module.exports._hashEntry = hashEntry
|
257 |
|
|
function hashEntry (str) {
|
258 |
|
|
return hash(str, 'sha1')
|
259 |
|
|
}
|
260 |
|
|
|
261 |
|
|
function hash (str, digest) {
|
262 |
|
|
return crypto
|
263 |
|
|
.createHash(digest)
|
264 |
|
|
.update(str)
|
265 |
|
|
.digest('hex')
|
266 |
|
|
}
|
267 |
|
|
|
268 |
|
|
function formatEntry (cache, entry) {
|
269 |
|
|
// Treat null digests as deletions. They'll shadow any previous entries.
|
270 |
|
|
if (!entry.integrity) { return null }
|
271 |
|
|
return {
|
272 |
|
|
key: entry.key,
|
273 |
|
|
integrity: entry.integrity,
|
274 |
|
|
path: contentPath(cache, entry.integrity),
|
275 |
|
|
size: entry.size,
|
276 |
|
|
time: entry.time,
|
277 |
|
|
metadata: entry.metadata
|
278 |
|
|
}
|
279 |
|
|
}
|
280 |
|
|
|
281 |
|
|
function readdirOrEmpty (dir) {
|
282 |
|
|
return readdirAsync(dir)
|
283 |
|
|
.catch({ code: 'ENOENT' }, () => [])
|
284 |
|
|
.catch({ code: 'ENOTDIR' }, () => [])
|
285 |
|
|
}
|
286 |
|
|
|
287 |
|
|
function nop () {
|
288 |
|
|
}
|