GSI - Employe Self Service Mobile
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

114 lines
5.4 KiB

2 months ago
  1. "use strict";
  2. Object.defineProperty(exports, "__esModule", { value: true });
  3. exports.command = void 0;
  4. const csv_parse_1 = require("csv-parse");
  5. const Chain = require("stream-chain");
  6. const clc = require("colorette");
  7. const fs = require("fs-extra");
  8. const Pick = require("stream-json/filters/Pick");
  9. const StreamArray = require("stream-json/streamers/StreamArray");
  10. const command_1 = require("../command");
  11. const error_1 = require("../error");
  12. const logger_1 = require("../logger");
  13. const projectUtils_1 = require("../projectUtils");
  14. const requirePermissions_1 = require("../requirePermissions");
  15. const accountImporter_1 = require("../accountImporter");
  16. const MAX_BATCH_SIZE = 1000;
  17. exports.command = new command_1.Command("auth:import [dataFile]")
  18. .description("import users into your Firebase project from a data file(.csv or .json)")
  19. .option("--hash-algo <hashAlgo>", "specify the hash algorithm used in password for these accounts")
  20. .option("--hash-key <hashKey>", "specify the key used in hash algorithm")
  21. .option("--salt-separator <saltSeparator>", "specify the salt separator which will be appended to salt when verifying password. only used by SCRYPT now.")
  22. .option("--rounds <rounds>", "specify how many rounds for hash calculation.")
  23. .option("--mem-cost <memCost>", "specify the memory cost for firebase scrypt, or cpu/memory cost for standard scrypt")
  24. .option("--parallelization <parallelization>", "specify the parallelization for standard scrypt.")
  25. .option("--block-size <blockSize>", "specify the block size (normally is 8) for standard scrypt.")
  26. .option("--dk-len <dkLen>", "specify derived key length for standard scrypt.")
  27. .option("--hash-input-order <hashInputOrder>", "specify the order of password and salt. Possible values are SALT_FIRST and PASSWORD_FIRST. " +
  28. "MD5, SHA1, SHA256, SHA512, HMAC_MD5, HMAC_SHA1, HMAC_SHA256, HMAC_SHA512 support this flag.")
  29. .before(requirePermissions_1.requirePermissions, ["firebaseauth.users.create", "firebaseauth.users.update"])
  30. .action(async (dataFile, options) => {
  31. const projectId = (0, projectUtils_1.needProjectId)(options);
  32. const checkRes = (0, accountImporter_1.validateOptions)(options);
  33. if (!checkRes.valid) {
  34. return checkRes;
  35. }
  36. const hashOptions = checkRes;
  37. if (!dataFile.endsWith(".csv") && !dataFile.endsWith(".json")) {
  38. throw new error_1.FirebaseError("Data file must end with .csv or .json");
  39. }
  40. const stats = await fs.stat(dataFile);
  41. const fileSizeInBytes = stats.size;
  42. logger_1.logger.info(`Processing ${clc.bold(dataFile)} (${fileSizeInBytes} bytes)`);
  43. const batches = [];
  44. let currentBatch = [];
  45. let counter = 0;
  46. let userListArr = [];
  47. const inStream = fs.createReadStream(dataFile);
  48. if (dataFile.endsWith(".csv")) {
  49. userListArr = await new Promise((resolve, reject) => {
  50. const parser = (0, csv_parse_1.parse)();
  51. parser
  52. .on("readable", () => {
  53. let record = [];
  54. while ((record = parser.read()) !== null) {
  55. counter++;
  56. const trimmed = record.map((s) => {
  57. const str = s.trim().replace(/^["|'](.*)["|']$/, "$1");
  58. return str === "" ? undefined : str;
  59. });
  60. const user = (0, accountImporter_1.transArrayToUser)(trimmed);
  61. const err = user.error;
  62. if (err) {
  63. return reject(new error_1.FirebaseError(`Line ${counter} (${record.join(",")}) has invalid data format: ${err}`));
  64. }
  65. currentBatch.push(user);
  66. if (currentBatch.length === MAX_BATCH_SIZE) {
  67. batches.push(currentBatch);
  68. currentBatch = [];
  69. }
  70. }
  71. })
  72. .on("end", () => {
  73. if (currentBatch.length) {
  74. batches.push(currentBatch);
  75. }
  76. resolve(batches);
  77. });
  78. inStream.pipe(parser);
  79. });
  80. }
  81. else {
  82. userListArr = await new Promise((resolve, reject) => {
  83. const pipeline = new Chain([
  84. Pick.withParser({ filter: /^users$/ }),
  85. StreamArray.streamArray(),
  86. ({ value }) => {
  87. counter++;
  88. const user = (0, accountImporter_1.validateUserJson)(value);
  89. const err = user.error;
  90. if (err) {
  91. throw new error_1.FirebaseError(`Validation Error: ${err}`);
  92. }
  93. currentBatch.push(value);
  94. if (currentBatch.length === MAX_BATCH_SIZE) {
  95. batches.push(currentBatch);
  96. currentBatch = [];
  97. }
  98. },
  99. ]);
  100. pipeline.once("error", reject);
  101. pipeline.on("finish", () => {
  102. if (currentBatch.length) {
  103. batches.push(currentBatch);
  104. }
  105. resolve(batches);
  106. });
  107. inStream.pipe(pipeline);
  108. });
  109. }
  110. logger_1.logger.debug(`Preparing to import ${counter} user records in ${userListArr.length} batches.`);
  111. if (userListArr.length) {
  112. return (0, accountImporter_1.serialImportUsers)(projectId, hashOptions, userListArr, 0);
  113. }
  114. });