forked from meteor/meteor
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcaching-compiler.js
357 lines (320 loc) · 12.3 KB
/
caching-compiler.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
const fs = Plugin.fs;
const path = Plugin.path;
const createHash = Npm.require('crypto').createHash;
const assert = Npm.require('assert');
const Future = Npm.require('fibers/future');
const LRU = Npm.require('lru-cache');
const async = Npm.require('async');
// Base class for CachingCompiler and MultiFileCachingCompiler.
CachingCompilerBase = class CachingCompilerBase {
constructor({
compilerName,
defaultCacheSize,
maxParallelism = 20,
}) {
this._compilerName = compilerName;
this._maxParallelism = maxParallelism;
const envVarPrefix = 'METEOR_' + compilerName.toUpperCase() + '_CACHE_';
const debugEnvVar = envVarPrefix + 'DEBUG';
this._cacheDebugEnabled = !! process.env[debugEnvVar];
const cacheSizeEnvVar = envVarPrefix + 'SIZE';
this._cacheSize = +process.env[cacheSizeEnvVar] || defaultCacheSize;
this._diskCache = null;
// For testing.
this._callCount = 0;
}
// Your subclass must override this method to define the key used to identify
// a particular version of an InputFile.
//
// Given an InputFile (the data type passed to processFilesForTarget as part
// of the Plugin.registerCompiler API), returns a cache key that represents
// it. This cache key can be any JSON value (it will be converted internally
// into a hash). This should reflect any aspect of the InputFile that affects
// the output of `compileOneFile`. Typically you'll want to include
// `inputFile.getDeclaredExports()`, and perhaps
// `inputFile.getPathInPackage()` or `inputFile.getDeclaredExports` if
// `compileOneFile` pays attention to them.
//
// Note that for MultiFileCachingCompiler, your cache key doesn't need to
// include the file's path, because that is automatically taken into account
// by the implementation. CachingCompiler subclasses can choose whether or not
// to include the file's path in the cache key.
getCacheKey(inputFile) {
throw Error('CachingCompiler subclass should implement getCacheKey!');
}
// Your subclass must override this method to define how a CompileResult
// translates into adding assets to the bundle.
//
// This method is given an InputFile (the data type passed to
// processFilesForTarget as part of the Plugin.registerCompiler API) and a
// CompileResult (either returned directly from compileOneFile or read from
// the cache). It should call methods like `inputFile.addJavaScript`
// and `inputFile.error`.
addCompileResult(inputFile, compileResult) {
throw Error('CachingCompiler subclass should implement addCompileResult!');
}
// Your subclass must override this method to define the size of a
// CompilerResult (used by the in-memory cache to limit the total amount of
// data cached).
compileResultSize(compileResult) {
throw Error('CachingCompiler subclass should implement compileResultSize!');
}
// Your subclass may override this method to define an alternate way of
// stringifying CompilerResults. Takes a CompileResult and returns a string.
stringifyCompileResult(compileResult) {
return JSON.stringify(compileResult);
}
// Your subclass may override this method to define an alternate way of
// parsing CompilerResults from string. Takes a string and returns a
// CompileResult. If the string doesn't represent a valid CompileResult, you
// may want to return null instead of throwing, which will make
// CachingCompiler ignore the cache.
parseCompileResult(stringifiedCompileResult) {
return this._parseJSONOrNull(stringifiedCompileResult);
}
_parseJSONOrNull(json) {
try {
return JSON.parse(json);
} catch (e) {
if (e instanceof SyntaxError)
return null;
throw e;
}
}
_cacheDebug(message) {
if (!this._cacheDebugEnabled)
return;
console.log(`CACHE(${ this._compilerName }): ${ message }`);
}
setDiskCacheDirectory(diskCache) {
if (this._diskCache)
throw Error('setDiskCacheDirectory called twice?');
this._diskCache = diskCache;
}
// Since so many compilers will need to calculate the size of a SourceMap in
// their compileResultSize, this method is provided.
sourceMapSize(sm) {
if (! sm) return 0;
// sum the length of sources and the mappings, the size of
// metadata is ignored, but it is not a big deal
return sm.mappings.length
+ (sm.sourcesContent || []).reduce(function (soFar, current) {
return soFar + (current ? current.length : 0);
}, 0);
}
// Borrowed from another MIT-licensed project that benjamn wrote:
// https://github.com/reactjs/commoner/blob/235d54a12c/lib/util.js#L136-L168
_deepHash(val) {
const hash = createHash('sha1');
let type = typeof val;
if (val === null) {
type = 'null';
}
hash.update(type + '\0');
switch (type) {
case 'object':
const keys = Object.keys(val);
// Array keys will already be sorted.
if (! Array.isArray(val)) {
keys.sort();
}
keys.forEach((key) => {
if (typeof val[key] === 'function') {
// Silently ignore nested methods, but nevertheless complain below
// if the root value is a function.
return;
}
hash.update(key + '\0').update(this._deepHash(val[key]));
});
break;
case 'function':
assert.ok(false, 'cannot hash function objects');
break;
default:
hash.update('' + val);
break;
}
return hash.digest('hex');
}
// We want to write the file atomically. But we also don't want to block
// processing on the file write.
_writeFileAsync(filename, contents) {
const tempFilename = filename + '.tmp.' + Random.id();
fs.writeFile(tempFilename, contents, (err) => {
// ignore errors, it's just a cache
if (err) {
return;
}
fs.rename(tempFilename, filename, (err) => {
// ignore this error too.
});
});
}
// Helper function. Returns the body of the file as a string, or null if it
// doesn't exist.
_readFileOrNull(filename) {
try {
return fs.readFileSync(filename, 'utf8');
} catch (e) {
if (e && e.code === 'ENOENT')
return null;
throw e;
}
}
}
// CachingCompiler is a class designed to be used with Plugin.registerCompiler
// which implements in-memory and on-disk caches for the files that it
// processes. You should subclass CachingCompiler and define the following
// methods: getCacheKey, compileOneFile, addCompileResult, and
// compileResultSize.
//
// CachingCompiler assumes that files are processed independently of each other;
// there is no 'import' directive allowing one file to reference another. That
// is, editing one file should only require that file to be rebuilt, not other
// files.
//
// The data that is cached for each file is of a type that is (implicitly)
// defined by your subclass. CachingCompiler refers to this type as
// `CompileResult`, but this isn't a single type: it's up to your subclass to
// decide what type of data this is. You should document what your subclass's
// CompileResult type is.
//
// Your subclass's compiler should call the superclass compiler specifying the
// compiler name (used to generate environment variables for debugging and
// tweaking in-memory cache size) and the default cache size.
//
// By default, CachingCompiler processes each file in "parallel". That is, if it
// needs to yield to read from the disk cache, or if getCacheKey,
// compileOneFile, or addCompileResult yields, it will start processing the next
// few files. To set how many files can be processed in parallel (including
// setting it to 1 if your subclass doesn't support any parallelism), pass the
// maxParallelism option to the superclass constructor.
//
// For example (using ES2015 via the ecmascript package):
//
// class AwesomeCompiler extends CachingCompiler {
// constructor() {
// super({
// compilerName: 'awesome',
// defaultCacheSize: 1024*1024*10,
// });
// }
// // ... define the other methods
// }
// Plugin.registerCompile({
// extensions: ['awesome'],
// }, () => new AwesomeCompiler());
//
// XXX maybe compileResultSize and stringifyCompileResult should just be methods
// on CompileResult? Sort of hard to do that with parseCompileResult.
CachingCompiler = class CachingCompiler extends CachingCompilerBase {
constructor({
compilerName,
defaultCacheSize,
maxParallelism = 20,
}) {
super({compilerName, defaultCacheSize, maxParallelism});
// Maps from a hashed cache key to a compileResult.
this._cache = new LRU({
max: this._cacheSize,
length: (value) => this.compileResultSize(value),
});
}
// Your subclass must override this method to define the transformation from
// InputFile to its cacheable CompileResult).
//
// Given an InputFile (the data type passed to processFilesForTarget as part
// of the Plugin.registerCompiler API), compiles the file and returns a
// CompileResult (the cacheable data type specific to your subclass).
//
// This method is not called on files when a valid cache entry exists in
// memory or on disk.
//
// On a compile error, you should call `inputFile.error` appropriately and
// return null; this will not be cached.
//
// This method should not call `inputFile.addJavaScript` and similar files!
// That's what addCompileResult is for.
compileOneFile(inputFile) {
throw Error('CachingCompiler subclass should implement compileOneFile!');
}
// The processFilesForTarget method from the Plugin.registerCompiler API. If
// you have processing you want to perform at the beginning or end of a
// processing phase, you may want to override this method and call the
// superclass implementation from within your method.
processFilesForTarget(inputFiles) {
const cacheMisses = [];
const future = new Future;
async.eachLimit(inputFiles, this._maxParallelism, (inputFile, cb) => {
let error = null;
try {
const cacheKey = this._deepHash(this.getCacheKey(inputFile));
let compileResult = this._cache.get(cacheKey);
if (! compileResult) {
compileResult = this._readCache(cacheKey);
if (compileResult) {
this._cacheDebug(`Loaded ${ inputFile.getDisplayPath() }`);
}
}
if (! compileResult) {
cacheMisses.push(inputFile.getDisplayPath());
compileResult = this.compileOneFile(inputFile);
if (! compileResult) {
// compileOneFile should have called inputFile.error.
// We don't cache failures for now.
return;
}
// Save what we've compiled.
this._cache.set(cacheKey, compileResult);
this._writeCacheAsync(cacheKey, compileResult);
}
this.addCompileResult(inputFile, compileResult);
} catch (e) {
error = e;
} finally {
cb(error);
}
}, future.resolver());
future.wait();
if (this._cacheDebugEnabled) {
cacheMisses.sort();
this._cacheDebug(
`Ran (#${ ++this._callCount }) on: ${ JSON.stringify(cacheMisses) }`);
}
}
_cacheFilename(cacheKey) {
// We want cacheKeys to be hex so that they work on any FS and never end in
// .cache.
if (!/^[a-f0-9]+$/.test(cacheKey)) {
throw Error('bad cacheKey: ' + cacheKey);
}
return path.join(this._diskCache, cacheKey + '.cache');
}
// Load a cache entry from disk. Returns the compileResult object
// and loads it into the in-memory cache too.
_readCache(cacheKey) {
if (! this._diskCache) {
return null;
}
const cacheFilename = this._cacheFilename(cacheKey);
const compileResult = this._readAndParseCompileResultOrNull(cacheFilename);
if (! compileResult) {
return null;
}
this._cache.set(cacheKey, compileResult);
return compileResult;
}
_writeCacheAsync(cacheKey, compileResult) {
if (! this._diskCache)
return;
const cacheFilename = this._cacheFilename(cacheKey);
const cacheContents = this.stringifyCompileResult(compileResult);
this._writeFileAsync(cacheFilename, cacheContents);
}
// Returns null if the file does not exist or can't be parsed; otherwise
// returns the parsed compileResult in the file.
_readAndParseCompileResultOrNull(filename) {
const raw = this._readFileOrNull(filename);
return this.parseCompileResult(raw);
}
}