GSI - Employe Self Service Mobile
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

347 lines
16 KiB

2 months ago
  1. "use strict";
  2. Object.defineProperty(exports, "__esModule", { value: true });
  3. exports.endpointFromFunction = exports.functionFromEndpoint = exports.deleteFunction = exports.updateFunction = exports.listAllFunctions = exports.listFunctions = exports.getFunction = exports.createFunction = exports.generateUploadUrl = exports.mebibytes = exports.API_VERSION = void 0;
  4. const clc = require("colorette");
  5. const apiv2_1 = require("../apiv2");
  6. const error_1 = require("../error");
  7. const api_1 = require("../api");
  8. const logger_1 = require("../logger");
  9. const v2_1 = require("../functions/events/v2");
  10. const backend = require("../deploy/functions/backend");
  11. const runtimes = require("../deploy/functions/runtimes");
  12. const proto = require("./proto");
  13. const utils = require("../utils");
  14. const projectConfig = require("../functions/projectConfig");
  15. const constants_1 = require("../functions/constants");
  16. exports.API_VERSION = "v2";
  17. const client = new apiv2_1.Client({
  18. urlPrefix: api_1.functionsV2Origin,
  19. auth: true,
  20. apiVersion: exports.API_VERSION,
  21. });
  22. const BYTES_PER_UNIT = {
  23. "": 1,
  24. k: 1e3,
  25. M: 1e6,
  26. G: 1e9,
  27. T: 1e12,
  28. Ki: 1 << 10,
  29. Mi: 1 << 20,
  30. Gi: 1 << 30,
  31. Ti: 1 << 40,
  32. };
  33. function mebibytes(memory) {
  34. const re = /^([0-9]+(\.[0-9]*)?)(Ki|Mi|Gi|Ti|k|M|G|T|([eE]([0-9]+)))?$/;
  35. const matches = re.exec(memory);
  36. if (!matches) {
  37. throw new Error(`Invalid memory quantity "${memory}""`);
  38. }
  39. const quantity = Number.parseFloat(matches[1]);
  40. let bytes;
  41. if (matches[5]) {
  42. bytes = quantity * Math.pow(10, Number.parseFloat(matches[5]));
  43. }
  44. else {
  45. const suffix = matches[3] || "";
  46. bytes = quantity * BYTES_PER_UNIT[suffix];
  47. }
  48. return bytes / (1 << 20);
  49. }
  50. exports.mebibytes = mebibytes;
  51. function functionsOpLogReject(funcName, type, err) {
  52. var _a, _b;
  53. utils.logWarning(clc.bold(clc.yellow("functions:")) + ` ${err === null || err === void 0 ? void 0 : err.message}`);
  54. if (((_b = (_a = err === null || err === void 0 ? void 0 : err.context) === null || _a === void 0 ? void 0 : _a.response) === null || _b === void 0 ? void 0 : _b.statusCode) === 429) {
  55. utils.logWarning(`${clc.bold(clc.yellow("functions:"))} got "Quota Exceeded" error while trying to ${type} ${funcName}. Waiting to retry...`);
  56. }
  57. else if (err === null || err === void 0 ? void 0 : err.message.includes("If you recently started to use Eventarc, it may take a few minutes before all necessary permissions are propagated to the Service Agent")) {
  58. utils.logWarning(`${clc.bold(clc.yellow("functions:"))} since this is your first time using functions v2, we need a little bit longer to finish setting everything up, please retry the deployment in a few minutes.`);
  59. }
  60. else {
  61. utils.logWarning(clc.bold(clc.yellow("functions:")) + " failed to " + type + " function " + funcName);
  62. }
  63. throw new error_1.FirebaseError(`Failed to ${type} function ${funcName}`, {
  64. original: err,
  65. context: { function: funcName },
  66. });
  67. }
  68. async function generateUploadUrl(projectId, location) {
  69. try {
  70. const res = await client.post(`projects/${projectId}/locations/${location}/functions:generateUploadUrl`);
  71. return res.body;
  72. }
  73. catch (err) {
  74. logger_1.logger.info("\n\nThere was an issue deploying your functions. Verify that your project has a Google App Engine instance setup at https://console.cloud.google.com/appengine and try again. If this issue persists, please contact support.");
  75. throw err;
  76. }
  77. }
  78. exports.generateUploadUrl = generateUploadUrl;
  79. async function createFunction(cloudFunction) {
  80. const components = cloudFunction.name.split("/");
  81. const functionId = components.splice(-1, 1)[0];
  82. try {
  83. const res = await client.post(components.join("/"), cloudFunction, { queryParams: { functionId } });
  84. return res.body;
  85. }
  86. catch (err) {
  87. throw functionsOpLogReject(cloudFunction.name, "create", err);
  88. }
  89. }
  90. exports.createFunction = createFunction;
  91. async function getFunction(projectId, location, functionId) {
  92. const name = `projects/${projectId}/locations/${location}/functions/${functionId}`;
  93. const res = await client.get(name);
  94. return res.body;
  95. }
  96. exports.getFunction = getFunction;
  97. async function listFunctions(projectId, region) {
  98. const res = await listFunctionsInternal(projectId, region);
  99. if (res.unreachable.includes(region)) {
  100. throw new error_1.FirebaseError(`Cloud Functions region ${region} is unavailable`);
  101. }
  102. return res.functions;
  103. }
  104. exports.listFunctions = listFunctions;
  105. async function listAllFunctions(projectId) {
  106. return await listFunctionsInternal(projectId, "-");
  107. }
  108. exports.listAllFunctions = listAllFunctions;
  109. async function listFunctionsInternal(projectId, region) {
  110. const functions = [];
  111. const unreacahble = new Set();
  112. let pageToken = "";
  113. while (true) {
  114. const url = `projects/${projectId}/locations/${region}/functions`;
  115. const opts = { queryParams: { filter: `environment="GEN_2"` } };
  116. if (pageToken !== "") {
  117. opts.queryParams = Object.assign(Object.assign({}, opts.queryParams), { pageToken });
  118. }
  119. const res = await client.get(url, opts);
  120. functions.push(...(res.body.functions || []));
  121. for (const region of res.body.unreachable || []) {
  122. unreacahble.add(region);
  123. }
  124. if (!res.body.nextPageToken) {
  125. return {
  126. functions,
  127. unreachable: Array.from(unreacahble),
  128. };
  129. }
  130. pageToken = res.body.nextPageToken;
  131. }
  132. }
  133. async function updateFunction(cloudFunction) {
  134. const fieldMasks = proto.fieldMasks(cloudFunction, "labels", "serviceConfig.environmentVariables", "serviceConfig.secretEnvironmentVariables");
  135. try {
  136. const queryParams = {
  137. updateMask: fieldMasks.join(","),
  138. };
  139. const res = await client.patch(cloudFunction.name, cloudFunction, { queryParams });
  140. return res.body;
  141. }
  142. catch (err) {
  143. throw functionsOpLogReject(cloudFunction.name, "update", err);
  144. }
  145. }
  146. exports.updateFunction = updateFunction;
  147. async function deleteFunction(cloudFunction) {
  148. try {
  149. const res = await client.delete(cloudFunction);
  150. return res.body;
  151. }
  152. catch (err) {
  153. throw functionsOpLogReject(cloudFunction, "update", err);
  154. }
  155. }
  156. exports.deleteFunction = deleteFunction;
  157. function functionFromEndpoint(endpoint, source) {
  158. var _a, _b;
  159. if (endpoint.platform !== "gcfv2") {
  160. throw new error_1.FirebaseError("Trying to create a v2 CloudFunction with v1 API. This should never happen");
  161. }
  162. if (!runtimes.isValidRuntime(endpoint.runtime)) {
  163. throw new error_1.FirebaseError("Failed internal assertion. Trying to deploy a new function with a deprecated runtime." +
  164. " This should never happen");
  165. }
  166. const gcfFunction = {
  167. name: backend.functionName(endpoint),
  168. buildConfig: {
  169. runtime: endpoint.runtime,
  170. entryPoint: endpoint.entryPoint,
  171. source: {
  172. storageSource: source,
  173. },
  174. environmentVariables: {},
  175. },
  176. serviceConfig: {},
  177. };
  178. proto.copyIfPresent(gcfFunction, endpoint, "labels");
  179. proto.copyIfPresent(gcfFunction.serviceConfig, endpoint, "environmentVariables", "secretEnvironmentVariables", "ingressSettings", "timeoutSeconds");
  180. proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "serviceAccountEmail", "serviceAccount");
  181. const mem = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY;
  182. gcfFunction.serviceConfig.availableMemory = mem > 1024 ? `${mem / 1024}Gi` : `${mem}Mi`;
  183. proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "minInstanceCount", "minInstances");
  184. proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "maxInstanceCount", "maxInstances");
  185. if (endpoint.vpc) {
  186. proto.renameIfPresent(gcfFunction.serviceConfig, endpoint.vpc, "vpcConnector", "connector");
  187. proto.renameIfPresent(gcfFunction.serviceConfig, endpoint.vpc, "vpcConnectorEgressSettings", "egressSettings");
  188. }
  189. else if (endpoint.vpc === null) {
  190. gcfFunction.serviceConfig.vpcConnector = null;
  191. gcfFunction.serviceConfig.vpcConnectorEgressSettings = null;
  192. }
  193. if (backend.isEventTriggered(endpoint)) {
  194. gcfFunction.eventTrigger = {
  195. eventType: endpoint.eventTrigger.eventType,
  196. };
  197. if (gcfFunction.eventTrigger.eventType === v2_1.PUBSUB_PUBLISH_EVENT) {
  198. if (!((_a = endpoint.eventTrigger.eventFilters) === null || _a === void 0 ? void 0 : _a.topic)) {
  199. throw new error_1.FirebaseError("Error: Pub/Sub event trigger is missing topic: " +
  200. JSON.stringify(endpoint.eventTrigger, null, 2));
  201. }
  202. gcfFunction.eventTrigger.pubsubTopic = endpoint.eventTrigger.eventFilters.topic;
  203. gcfFunction.eventTrigger.eventFilters = [];
  204. for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilters)) {
  205. if (attribute === "topic")
  206. continue;
  207. gcfFunction.eventTrigger.eventFilters.push({ attribute, value });
  208. }
  209. }
  210. else {
  211. gcfFunction.eventTrigger.eventFilters = [];
  212. for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilters || {})) {
  213. gcfFunction.eventTrigger.eventFilters.push({ attribute, value });
  214. }
  215. for (const [attribute, value] of Object.entries(endpoint.eventTrigger.eventFilterPathPatterns || {})) {
  216. gcfFunction.eventTrigger.eventFilters.push({
  217. attribute,
  218. value,
  219. operator: "match-path-pattern",
  220. });
  221. }
  222. }
  223. proto.renameIfPresent(gcfFunction.eventTrigger, endpoint.eventTrigger, "triggerRegion", "region");
  224. proto.copyIfPresent(gcfFunction.eventTrigger, endpoint.eventTrigger, "channel");
  225. if (endpoint.eventTrigger.retry) {
  226. logger_1.logger.warn("Cannot set a retry policy on Cloud Function", endpoint.id);
  227. }
  228. gcfFunction.serviceConfig.environmentVariables = Object.assign(Object.assign({}, gcfFunction.serviceConfig.environmentVariables), { FUNCTION_SIGNATURE_TYPE: "cloudevent" });
  229. }
  230. else if (backend.isScheduleTriggered(endpoint)) {
  231. gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { "deployment-scheduled": "true" });
  232. }
  233. else if (backend.isTaskQueueTriggered(endpoint)) {
  234. gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { "deployment-taskqueue": "true" });
  235. }
  236. else if (backend.isCallableTriggered(endpoint)) {
  237. gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { "deployment-callable": "true" });
  238. }
  239. else if (backend.isBlockingTriggered(endpoint)) {
  240. gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { [constants_1.BLOCKING_LABEL]: constants_1.BLOCKING_EVENT_TO_LABEL_KEY[endpoint.blockingTrigger.eventType] });
  241. }
  242. const codebase = endpoint.codebase || projectConfig.DEFAULT_CODEBASE;
  243. if (codebase !== projectConfig.DEFAULT_CODEBASE) {
  244. gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { [constants_1.CODEBASE_LABEL]: codebase });
  245. }
  246. else {
  247. (_b = gcfFunction.labels) === null || _b === void 0 ? true : delete _b[constants_1.CODEBASE_LABEL];
  248. }
  249. if (endpoint.hash) {
  250. gcfFunction.labels = Object.assign(Object.assign({}, gcfFunction.labels), { [constants_1.HASH_LABEL]: endpoint.hash });
  251. }
  252. return gcfFunction;
  253. }
  254. exports.functionFromEndpoint = functionFromEndpoint;
  255. function endpointFromFunction(gcfFunction) {
  256. var _a, _b, _c, _d, _e, _f;
  257. const [, project, , region, , id] = gcfFunction.name.split("/");
  258. let trigger;
  259. if (((_a = gcfFunction.labels) === null || _a === void 0 ? void 0 : _a["deployment-scheduled"]) === "true") {
  260. trigger = {
  261. scheduleTrigger: {},
  262. };
  263. }
  264. else if (((_b = gcfFunction.labels) === null || _b === void 0 ? void 0 : _b["deployment-taskqueue"]) === "true") {
  265. trigger = {
  266. taskQueueTrigger: {},
  267. };
  268. }
  269. else if (((_c = gcfFunction.labels) === null || _c === void 0 ? void 0 : _c["deployment-callable"]) === "true") {
  270. trigger = {
  271. callableTrigger: {},
  272. };
  273. }
  274. else if ((_d = gcfFunction.labels) === null || _d === void 0 ? void 0 : _d[constants_1.BLOCKING_LABEL]) {
  275. trigger = {
  276. blockingTrigger: {
  277. eventType: constants_1.BLOCKING_LABEL_KEY_TO_EVENT[gcfFunction.labels[constants_1.BLOCKING_LABEL]],
  278. },
  279. };
  280. }
  281. else if (gcfFunction.eventTrigger) {
  282. const eventFilters = {};
  283. const eventFilterPathPatterns = {};
  284. if (gcfFunction.eventTrigger.pubsubTopic) {
  285. eventFilters.topic = gcfFunction.eventTrigger.pubsubTopic;
  286. }
  287. else {
  288. for (const eventFilter of gcfFunction.eventTrigger.eventFilters || []) {
  289. if (eventFilter.operator === "match-path-pattern") {
  290. eventFilterPathPatterns[eventFilter.attribute] = eventFilter.value;
  291. }
  292. else {
  293. eventFilters[eventFilter.attribute] = eventFilter.value;
  294. }
  295. }
  296. }
  297. trigger = {
  298. eventTrigger: {
  299. eventType: gcfFunction.eventTrigger.eventType,
  300. retry: false,
  301. },
  302. };
  303. if (Object.keys(eventFilters).length) {
  304. trigger.eventTrigger.eventFilters = eventFilters;
  305. }
  306. if (Object.keys(eventFilterPathPatterns).length) {
  307. trigger.eventTrigger.eventFilterPathPatterns = eventFilterPathPatterns;
  308. }
  309. proto.copyIfPresent(trigger.eventTrigger, gcfFunction.eventTrigger, "channel");
  310. proto.renameIfPresent(trigger.eventTrigger, gcfFunction.eventTrigger, "region", "triggerRegion");
  311. }
  312. else {
  313. trigger = { httpsTrigger: {} };
  314. }
  315. if (!runtimes.isValidRuntime(gcfFunction.buildConfig.runtime)) {
  316. logger_1.logger.debug("GCFv2 function has a deprecated runtime:", JSON.stringify(gcfFunction, null, 2));
  317. }
  318. const endpoint = Object.assign(Object.assign({ platform: "gcfv2", id,
  319. project,
  320. region }, trigger), { entryPoint: gcfFunction.buildConfig.entryPoint, runtime: gcfFunction.buildConfig.runtime, uri: gcfFunction.serviceConfig.uri });
  321. proto.copyIfPresent(endpoint, gcfFunction.serviceConfig, "ingressSettings", "environmentVariables", "secretEnvironmentVariables", "timeoutSeconds");
  322. proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "serviceAccount", "serviceAccountEmail");
  323. proto.convertIfPresent(endpoint, gcfFunction.serviceConfig, "availableMemoryMb", "availableMemory", (prod) => {
  324. if (prod === null) {
  325. logger_1.logger.debug("Prod should always return a valid memory amount");
  326. return prod;
  327. }
  328. const mem = mebibytes(prod);
  329. if (!backend.isValidMemoryOption(mem)) {
  330. logger_1.logger.warn("Converting a function to an endpoint with an invalid memory option", mem);
  331. }
  332. return mem;
  333. });
  334. proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "minInstances", "minInstanceCount");
  335. proto.renameIfPresent(endpoint, gcfFunction.serviceConfig, "maxInstances", "maxInstanceCount");
  336. proto.copyIfPresent(endpoint, gcfFunction, "labels");
  337. if (gcfFunction.serviceConfig.vpcConnector) {
  338. endpoint.vpc = { connector: gcfFunction.serviceConfig.vpcConnector };
  339. proto.renameIfPresent(endpoint.vpc, gcfFunction.serviceConfig, "egressSettings", "vpcConnectorEgressSettings");
  340. }
  341. endpoint.codebase = ((_e = gcfFunction.labels) === null || _e === void 0 ? void 0 : _e[constants_1.CODEBASE_LABEL]) || projectConfig.DEFAULT_CODEBASE;
  342. if ((_f = gcfFunction.labels) === null || _f === void 0 ? void 0 : _f[constants_1.HASH_LABEL]) {
  343. endpoint.hash = gcfFunction.labels[constants_1.HASH_LABEL];
  344. }
  345. return endpoint;
  346. }
  347. exports.endpointFromFunction = endpointFromFunction;