|
| 1 | +/** |
| 2 | + * Copyright (c) 2016-present, Facebook, Inc. |
| 3 | + * All rights reserved. |
| 4 | + * |
| 5 | + * This source code is licensed under the BSD-style license found in the |
| 6 | + * LICENSE file in the root directory of this source tree. An additional grant |
| 7 | + * of patent rights can be found in the PATENTS file in the same directory. |
| 8 | + * |
| 9 | + * @flow |
| 10 | + */ |
| 11 | + |
| 12 | +'use strict'; |
| 13 | + |
| 14 | +const debounce = require('lodash/debounce'); |
| 15 | +const imurmurhash = require('imurmurhash'); |
| 16 | +const jsonStableStringify = require('json-stable-stringify'); |
| 17 | +const path = require('path'); |
| 18 | +const request = require('request'); |
| 19 | +const toFixedHex = require('./toFixedHex'); |
| 20 | + |
| 21 | +import type {CachedResult} from './TransformCache'; |
| 22 | + |
| 23 | +const SINGLE_REQUEST_MAX_KEYS = 100; |
| 24 | +const AGGREGATION_DELAY_MS = 100; |
| 25 | + |
| 26 | +type FetchResultURIs = ( |
| 27 | + keys: Array<string>, |
| 28 | + callback: (error?: Error, results?: Map<string, string>) => void, |
| 29 | +) => mixed; |
| 30 | + |
| 31 | +type FetchProps = { |
| 32 | + filePath: string, |
| 33 | + sourceCode: string, |
| 34 | + transformCacheKey: string, |
| 35 | + transformOptions: mixed, |
| 36 | +}; |
| 37 | + |
| 38 | +type FetchCallback = (error?: Error, resultURI?: ?CachedResult) => mixed; |
| 39 | +type FetchURICallback = (error?: Error, resultURI?: ?string) => mixed; |
| 40 | + |
| 41 | +/** |
| 42 | + * We aggregate the requests to do a single request for many keys. It also |
| 43 | + * ensures we do a single request at a time to avoid pressuring the I/O. |
| 44 | + */ |
| 45 | +class KeyURIFetcher { |
| 46 | + |
| 47 | + _fetchResultURIs: FetchResultURIs; |
| 48 | + _pendingQueries: Array<{key: string, callback: FetchURICallback}>; |
| 49 | + _isProcessing: boolean; |
| 50 | + _processQueriesDebounced: () => void; |
| 51 | + _processQueries: () => void; |
| 52 | + |
| 53 | + /** |
| 54 | + * Fetch the pending keys right now, if any and if we're not already doing |
| 55 | + * so in parallel. At the end of the fetch, we trigger a new batch fetching |
| 56 | + * recursively. |
| 57 | + */ |
| 58 | + _processQueries() { |
| 59 | + const {_pendingQueries} = this; |
| 60 | + if (_pendingQueries.length === 0 || this._isProcessing) { |
| 61 | + return; |
| 62 | + } |
| 63 | + this._isProcessing = true; |
| 64 | + const queries = _pendingQueries.splice(0, SINGLE_REQUEST_MAX_KEYS); |
| 65 | + const keys = queries.map(query => query.key); |
| 66 | + this._fetchResultURIs(keys, (error, results) => { |
| 67 | + queries.forEach(query => { |
| 68 | + query.callback(error, results && results.get(query.key)); |
| 69 | + }); |
| 70 | + this._isProcessing = false; |
| 71 | + process.nextTick(this._processQueries); |
| 72 | + }); |
| 73 | + } |
| 74 | + |
| 75 | + /** |
| 76 | + * Enqueue the fetching of a particular key. |
| 77 | + */ |
| 78 | + fetch(key: string, callback: FetchURICallback) { |
| 79 | + this._pendingQueries.push({key, callback}); |
| 80 | + this._processQueriesDebounced(); |
| 81 | + } |
| 82 | + |
| 83 | + constructor(fetchResultURIs: FetchResultURIs) { |
| 84 | + this._fetchResultURIs = fetchResultURIs; |
| 85 | + this._pendingQueries = []; |
| 86 | + this._isProcessing = false; |
| 87 | + this._processQueries = this._processQueries.bind(this); |
| 88 | + this._processQueriesDebounced = |
| 89 | + debounce(this._processQueries, AGGREGATION_DELAY_MS); |
| 90 | + } |
| 91 | + |
| 92 | +} |
| 93 | + |
| 94 | +function validateCachedResult(cachedResult: mixed): ?CachedResult { |
| 95 | + if ( |
| 96 | + cachedResult != null && |
| 97 | + typeof cachedResult === 'object' && |
| 98 | + typeof cachedResult.code === 'string' && |
| 99 | + Array.isArray(cachedResult.dependencies) && |
| 100 | + cachedResult.dependencies.every(dep => typeof dep === 'string') && |
| 101 | + Array.isArray(cachedResult.dependencyOffsets) && |
| 102 | + cachedResult.dependencyOffsets.every(offset => typeof offset === 'number') |
| 103 | + ) { |
| 104 | + return (cachedResult: any); |
| 105 | + } |
| 106 | + return undefined; |
| 107 | +} |
| 108 | + |
| 109 | +/** |
| 110 | + * One can enable the global cache by calling configure() from a custom CLI |
| 111 | + * script. Eventually we may make it more flexible. |
| 112 | + */ |
| 113 | +class GlobalTransformCache { |
| 114 | + |
| 115 | + _fetcher: KeyURIFetcher; |
| 116 | + static _global: ?GlobalTransformCache; |
| 117 | + |
| 118 | + constructor(fetchResultURIs: FetchResultURIs) { |
| 119 | + this._fetcher = new KeyURIFetcher(fetchResultURIs); |
| 120 | + } |
| 121 | + |
| 122 | + /** |
| 123 | + * Return a key for identifying uniquely a source file. |
| 124 | + */ |
| 125 | + static keyOf(props: FetchProps) { |
| 126 | + const sourceDigest = toFixedHex(8, imurmurhash(props.sourceCode).result()); |
| 127 | + const optionsHash = imurmurhash() |
| 128 | + .hash(jsonStableStringify(props.transformOptions) || '') |
| 129 | + .hash(props.transformCacheKey) |
| 130 | + .result(); |
| 131 | + const optionsDigest = toFixedHex(8, optionsHash); |
| 132 | + return ( |
| 133 | + `${optionsDigest}${sourceDigest}` + |
| 134 | + `${path.basename(props.filePath)}` |
| 135 | + ); |
| 136 | + } |
| 137 | + |
| 138 | + /** |
| 139 | + * We may want to improve that logic to return a stream instead of the whole |
| 140 | + * blob of transformed results. However the results are generally only a few |
| 141 | + * megabytes each. |
| 142 | + */ |
| 143 | + _fetchFromURI(uri: string, callback: FetchCallback) { |
| 144 | + request.get({uri, json: true}, (error, response, unvalidatedResult) => { |
| 145 | + if (error != null) { |
| 146 | + callback(error); |
| 147 | + return; |
| 148 | + } |
| 149 | + if (response.statusCode !== 200) { |
| 150 | + callback(new Error( |
| 151 | + `Unexpected HTTP status code: ${response.statusCode}`, |
| 152 | + )); |
| 153 | + return; |
| 154 | + } |
| 155 | + const result = validateCachedResult(unvalidatedResult); |
| 156 | + if (result == null) { |
| 157 | + callback(new Error('Invalid result returned by server.')); |
| 158 | + return; |
| 159 | + } |
| 160 | + callback(undefined, result); |
| 161 | + }); |
| 162 | + } |
| 163 | + |
| 164 | + fetch(props: FetchProps, callback: FetchCallback) { |
| 165 | + this._fetcher.fetch(GlobalTransformCache.keyOf(props), (error, uri) => { |
| 166 | + if (error != null) { |
| 167 | + callback(error); |
| 168 | + } else { |
| 169 | + if (uri == null) { |
| 170 | + callback(); |
| 171 | + return; |
| 172 | + } |
| 173 | + this._fetchFromURI(uri, callback); |
| 174 | + } |
| 175 | + }); |
| 176 | + } |
| 177 | + |
| 178 | + /** |
| 179 | + * For using the global cache one needs to have some kind of central key-value |
| 180 | + * store that gets prefilled using keyOf() and the transformed results. The |
| 181 | + * fetching function should provide a mapping of keys to URIs. The files |
| 182 | + * referred by these URIs contains the transform results. Using URIs instead |
| 183 | + * of returning the content directly allows for independent fetching of each |
| 184 | + * result. |
| 185 | + */ |
| 186 | + static configure(fetchResultURIs: FetchResultURIs) { |
| 187 | + GlobalTransformCache._global = new GlobalTransformCache(fetchResultURIs); |
| 188 | + } |
| 189 | + |
| 190 | + static get() { |
| 191 | + return GlobalTransformCache._global; |
| 192 | + } |
| 193 | + |
| 194 | +} |
| 195 | + |
| 196 | +GlobalTransformCache._global = null; |
| 197 | + |
| 198 | +module.exports = GlobalTransformCache; |
0 commit comments