GSI - Employe Self Service Mobile
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

207 lines
8.2 KiB

2 months ago
  1. "use strict";
  2. Object.defineProperty(exports, "__esModule", { value: true });
  3. exports.Uploader = void 0;
  4. const lodash_1 = require("lodash");
  5. const abort_controller_1 = require("abort-controller");
  6. const clc = require("colorette");
  7. const crypto = require("crypto");
  8. const fs = require("fs");
  9. const path = require("path");
  10. const zlib = require("zlib");
  11. const apiv2_1 = require("../../apiv2");
  12. const queue_1 = require("../../throttler/queue");
  13. const api_1 = require("../../api");
  14. const hashcache_1 = require("./hashcache");
  15. const logger_1 = require("../../logger");
  16. const error_1 = require("../../error");
  17. const MIN_UPLOAD_TIMEOUT = 30000;
  18. const MAX_UPLOAD_TIMEOUT = 7200000;
  19. function progressMessage(message, current, total) {
  20. current = Math.min(current, total);
  21. const percent = Math.floor(((current * 1.0) / total) * 100).toString();
  22. return `${message} [${current}/${total}] (${clc.bold(clc.green(`${percent}%`))})`;
  23. }
  24. class Uploader {
  25. constructor(options) {
  26. this.hashClient = new apiv2_1.Client({
  27. urlPrefix: api_1.hostingApiOrigin,
  28. auth: true,
  29. apiVersion: "v1beta1",
  30. });
  31. this.version = options.version;
  32. this.cwd = options.cwd || process.cwd();
  33. this.projectRoot = options.projectRoot;
  34. this.gzipLevel = options.gzipLevel || 9;
  35. this.hashQueue = new queue_1.Queue({
  36. name: "hashQueue",
  37. concurrency: options.hashConcurrency || 50,
  38. handler: this.hashHandler.bind(this),
  39. });
  40. this.populateBatchSize = options.populateBatchSize || 1000;
  41. this.populateBatch = {};
  42. this.populateQueue = new queue_1.Queue({
  43. name: "populateQueue",
  44. concurrency: options.populateConcurrency || 10,
  45. handler: this.populateHandler.bind(this),
  46. retries: 3,
  47. });
  48. this.uploadQueue = new queue_1.Queue({
  49. name: "uploadQueue",
  50. concurrency: options.uploadConcurrency || 200,
  51. handler: this.uploadHandler.bind(this),
  52. retries: 5,
  53. });
  54. this.public = options.public || this.cwd;
  55. this.files = options.files;
  56. this.fileCount = this.files.length;
  57. this.cache = (0, hashcache_1.load)(this.projectRoot, this.hashcacheName());
  58. this.cacheNew = new Map();
  59. this.sizeMap = {};
  60. this.hashMap = {};
  61. this.pathMap = {};
  62. }
  63. hashcacheName() {
  64. return Buffer.from(path.relative(this.projectRoot, this.public))
  65. .toString("base64")
  66. .replace(/=+$/, "");
  67. }
  68. async start() {
  69. if (this.files.length === 0) {
  70. return;
  71. }
  72. for (const f of this.files) {
  73. this.hashQueue.add(f);
  74. }
  75. this.hashQueue.close();
  76. this.hashQueue.process();
  77. this.hashQueue
  78. .wait()
  79. .then(this.queuePopulate.bind(this))
  80. .then(() => {
  81. (0, hashcache_1.dump)(this.projectRoot, this.hashcacheName(), this.cacheNew);
  82. logger_1.logger.debug("[hosting][hash queue][FINAL]", this.hashQueue.stats());
  83. this.populateQueue.close();
  84. return this.populateQueue.wait();
  85. })
  86. .then(() => {
  87. logger_1.logger.debug("[hosting][populate queue][FINAL]", this.populateQueue.stats());
  88. logger_1.logger.debug("[hosting] uploads queued:", this.uploadQueue.stats().total);
  89. this.uploadQueue.close();
  90. });
  91. this.uploadQueue.wait().catch((err) => {
  92. if (err.message.includes("content hash")) {
  93. logger_1.logger.debug("[hosting][upload queue] upload failed with content hash error. Deleting hash cache");
  94. (0, hashcache_1.dump)(this.projectRoot, this.hashcacheName(), new Map());
  95. }
  96. });
  97. const fin = (err) => {
  98. logger_1.logger.debug("[hosting][upload queue][FINAL]", this.uploadQueue.stats());
  99. if (err) {
  100. throw err;
  101. }
  102. };
  103. return this.wait().then(fin).catch(fin);
  104. }
  105. async wait() {
  106. await Promise.all([this.hashQueue.wait(), this.populateQueue.wait(), this.uploadQueue.wait()]);
  107. }
  108. statusMessage() {
  109. if (!this.hashQueue.finished) {
  110. return progressMessage("hashing files", this.hashQueue.complete, this.fileCount);
  111. }
  112. else if (!this.populateQueue.finished) {
  113. return progressMessage("adding files to version", this.populateQueue.complete * 1000, this.fileCount);
  114. }
  115. else if (!this.uploadQueue.finished) {
  116. return progressMessage("uploading new files", this.uploadQueue.complete, this.uploadQueue.stats().total);
  117. }
  118. else {
  119. return "upload complete";
  120. }
  121. }
  122. async hashHandler(filePath) {
  123. const stats = fs.statSync(path.resolve(this.public, filePath));
  124. const mtime = stats.mtime.getTime();
  125. this.sizeMap[filePath] = stats.size;
  126. const cached = this.cache.get(filePath);
  127. if (cached && cached.mtime === mtime) {
  128. this.cacheNew.set(filePath, cached);
  129. this.addHash(filePath, cached.hash);
  130. return;
  131. }
  132. const fstream = this.zipStream(filePath);
  133. const hash = crypto.createHash("sha256");
  134. fstream.pipe(hash);
  135. return new Promise((resolve, reject) => {
  136. fstream.on("end", resolve);
  137. fstream.on("error", reject);
  138. }).then(() => {
  139. const hashVal = hash.read().toString("hex");
  140. this.cacheNew.set(filePath, { mtime: mtime, hash: hashVal });
  141. this.addHash(filePath, hashVal);
  142. });
  143. }
  144. addHash(filePath, hash) {
  145. this.hashMap[hash] = filePath;
  146. this.pathMap[filePath] = hash;
  147. this.populateBatch["/" + filePath] = hash;
  148. const curBatchSize = (0, lodash_1.size)(this.populateBatch);
  149. if (curBatchSize > 0 && curBatchSize % this.populateBatchSize === 0) {
  150. this.queuePopulate();
  151. }
  152. }
  153. queuePopulate() {
  154. const pop = this.populateBatch;
  155. this.populateQueue.add(pop);
  156. this.populateBatch = {};
  157. this.populateQueue.process();
  158. }
  159. async populateHandler(batch) {
  160. const res = await this.hashClient.post(`/${this.version}:populateFiles`, { files: batch });
  161. this.uploadUrl = res.body.uploadUrl;
  162. this.uploadClient = new apiv2_1.Client({ urlPrefix: this.uploadUrl, auth: true });
  163. this.addUploads(res.body.uploadRequiredHashes || []);
  164. }
  165. addUploads(hashes) {
  166. for (const hash of hashes) {
  167. this.uploadQueue.add(hash);
  168. }
  169. this.uploadQueue.process();
  170. }
  171. async uploadHandler(toUpload) {
  172. if (!this.uploadClient) {
  173. throw new error_1.FirebaseError("No upload client available.", { exit: 2 });
  174. }
  175. const controller = new abort_controller_1.default();
  176. const timeout = setTimeout(() => {
  177. controller.abort();
  178. }, this.uploadTimeout(this.hashMap[toUpload]));
  179. const res = await this.uploadClient.request({
  180. method: "POST",
  181. path: `/${toUpload}`,
  182. body: this.zipStream(this.hashMap[toUpload]),
  183. resolveOnHTTPError: true,
  184. responseType: "stream",
  185. signal: controller.signal,
  186. });
  187. clearTimeout(timeout);
  188. if (this.uploadQueue.cursor % 100 === 0) {
  189. logger_1.logger.debug("[hosting][upload]", this.uploadQueue.stats());
  190. }
  191. if (res.status !== 200) {
  192. const errorMessage = await res.response.text();
  193. logger_1.logger.debug(`[hosting][upload] ${this.hashMap[toUpload]} (${toUpload}) HTTP ERROR ${res.status}: headers=${JSON.stringify(res.response.headers)} ${errorMessage}`);
  194. throw new Error(`Unexpected error while uploading file: ${errorMessage}`);
  195. }
  196. }
  197. zipStream(filePath) {
  198. const file = fs.createReadStream(path.resolve(this.public, filePath));
  199. const gzip = zlib.createGzip({ level: this.gzipLevel });
  200. return file.pipe(gzip);
  201. }
  202. uploadTimeout(filePath) {
  203. const size = this.sizeMap[filePath] || 0;
  204. return Math.min(Math.max(Math.round(size / 1000) * 20, MIN_UPLOAD_TIMEOUT), MAX_UPLOAD_TIMEOUT);
  205. }
  206. }
  207. exports.Uploader = Uploader;