diff --git a/.gitignore b/.gitignore index f0130a07..f683dbcf 100644 --- a/.gitignore +++ b/.gitignore @@ -50,3 +50,5 @@ generated .env.prod .env.test todo.md + +/generated \ No newline at end of file diff --git a/DeleteUserDataActivity/backupAndDelete.ts b/DeleteUserDataActivity/backupAndDelete.ts new file mode 100644 index 00000000..4e888e43 --- /dev/null +++ b/DeleteUserDataActivity/backupAndDelete.ts @@ -0,0 +1,461 @@ +import { BlobService } from "azure-storage"; +import { sequenceT } from "fp-ts/lib/Apply"; +import { Either, fromOption, left, toError } from "fp-ts/lib/Either"; +import { none, Option, some } from "fp-ts/lib/Option"; +import { + fromEither, + fromLeft, + TaskEither, + taskEither, + taskEitherSeq, + tryCatch +} from "fp-ts/lib/TaskEither"; + +import { array } from "fp-ts/lib/Array"; +import { MessageContent } from "io-functions-commons/dist/generated/definitions/MessageContent"; +import { + RetrievedMessageWithContent, + RetrievedMessageWithoutContent +} from "io-functions-commons/dist/src/models/message"; +import { RetrievedMessageStatus } from "io-functions-commons/dist/src/models/message_status"; +import { RetrievedNotification } from "io-functions-commons/dist/src/models/notification"; +import { RetrievedNotificationStatus } from "io-functions-commons/dist/src/models/notification_status"; +import { RetrievedProfile } from "io-functions-commons/dist/src/models/profile"; +import { FiscalCode } from "italia-ts-commons/lib/strings"; +import { + IResultIterator, + iteratorToArray +} from "../utils/extensions/documentdb"; +import { MessageDeletableModel } from "../utils/extensions/models/message"; +import { MessageStatusDeletableModel } from "../utils/extensions/models/message_status"; +import { NotificationDeletableModel } from "../utils/extensions/models/notification"; +import { NotificationStatusDeletableModel } from "../utils/extensions/models/notification_status"; +import { ProfileDeletableModel } from "../utils/extensions/models/profile"; +import { DataFailure, IBlobServiceInfo } from "./types"; +import { saveDataToBlob } from "./utils"; +import { toDocumentDeleteFailure, toQueryFailure } from "./utils"; + +import { + fromQueryEither, + QueryError +} from "io-functions-commons/dist/src/utils/documentdb"; + +/** + * Recursively consumes an iterator and executes operations on every item + * @param deleteSingle takes an item and delete it + * @param userDataBackup references about where to save data + * @param makeBackupBlobName takes an item and construct a name for the backup blob + * @param iterator an iterator of every result from the db + */ +const executeRecursiveBackupAndDelete = ( + deleteSingle: (item: T) => Promise>, + userDataBackup: IBlobServiceInfo, + makeBackupBlobName: (item: T) => string, + iterator: IResultIterator +): TaskEither< + // tslint:disable-next-line: use-type-alias + DataFailure, + readonly T[] +> => + tryCatch(iterator.executeNext, toError) + // this is just type lifting + .foldTaskEither>( + e => fromLeft(toQueryFailure(e)), + e => fromEither(e).mapLeft(toQueryFailure) + ) + .chain(maybeResults => + maybeResults.fold( + // if the iterator content is none, exit the recursion + taskEither.of([]), + items => + // executes backup&delete for this set of items + array.sequence(taskEither)( + items.map((item: T) => + sequenceT(taskEitherSeq)< + DataFailure, + // tslint:disable-next-line: readonly-array + [ + TaskEither, + TaskEither, + // tslint:disable-next-line: readonly-array + TaskEither + ] + >( + saveDataToBlob( + userDataBackup, + makeBackupBlobName(item), + item + ), + tryCatch(() => deleteSingle(item), toError) + .mapLeft(toDocumentDeleteFailure) + .chain(_ => fromEither(_).mapLeft(toDocumentDeleteFailure)), + // recursive step + executeRecursiveBackupAndDelete( + deleteSingle, + userDataBackup, + makeBackupBlobName, + iterator + ) + ) + // aggregates the results at the end of the recursion + .map(([_, __, nextResults]) => [item, ...nextResults]) + ) + ) + ) + ); + +/** + * Backup and delete every version of the profile + * + * @param param0.profileModel instance of ProfileModel + * @param param0.userDataBackup information about the blob storage account to place backup into + * @param param0.fiscalCode the identifier of the user + */ +const backupAndDeleteProfile = ({ + fiscalCode, + profileModel, + userDataBackup +}: { + profileModel: ProfileDeletableModel; + userDataBackup: IBlobServiceInfo; + fiscalCode: FiscalCode; +}) => + executeRecursiveBackupAndDelete( + item => profileModel.deleteProfileVersion(item.fiscalCode, item.id), + userDataBackup, + item => `profile/${item.id}.json`, + profileModel.findAllVersionsByModelId(fiscalCode) + ); + +/** + * Backup and delete a given notification + * + * @param param0.notificationModel instance of NotificationModel + * @param param0.userDataBackup information about the blob storage account to place backup into + * @param param0.notification the notification + */ +const backupAndDeleteNotification = ({ + notificationModel, + userDataBackup, + notification +}: { + notificationModel: NotificationDeletableModel; + userDataBackup: IBlobServiceInfo; + notification: RetrievedNotification; +}): TaskEither => + sequenceT(taskEitherSeq)< + DataFailure, + // tslint:disable-next-line: readonly-array + [ + TaskEither, + TaskEither + ] + >( + saveDataToBlob( + userDataBackup, + `notification/${notification.id}.json`, + notification + ), + fromQueryEither(() => + notificationModel.deleteNotification( + notification.messageId, + notification.id + ) + ).mapLeft(toDocumentDeleteFailure) + ).map(_ => notification); + +/** + * Find all versions of a notification status, then backup and delete each document + * @param param0.notificationStatusModel instance of NotificationStatusModel + * @param param0.userDataBackup information about the blob storage account to place backup into + * @param param0.notification parent notification + * + */ +const backupAndDeleteNotificationStatus = ({ + notificationStatusModel, + userDataBackup, + notification +}: { + notificationStatusModel: NotificationStatusDeletableModel; + userDataBackup: IBlobServiceInfo; + notification: RetrievedNotification; +}): TaskEither => { + return executeRecursiveBackupAndDelete( + item => + notificationStatusModel.deleteNotificationStatusVersion( + item.notificationId, + item.id + ), + userDataBackup, + item => `notification-status/${item.id}.json`, + notificationStatusModel.findAllVersionsByNotificationId(notification.id) + ); +}; + +/** + * Backup and delete a given message + * + * @param param0.messageStatusModel instance of MessageStatusModel + * @param param0.userDataBackup information about the blob storage account to place backup into + * @param param0.message the message + */ +const backupAndDeleteMessage = ({ + messageModel, + userDataBackup, + message +}: { + messageModel: MessageDeletableModel; + userDataBackup: IBlobServiceInfo; + message: RetrievedMessageWithoutContent; +}): TaskEither => + sequenceT(taskEitherSeq)< + DataFailure, + // tslint:disable-next-line: readonly-array + [ + TaskEither, + TaskEither + ] + >( + saveDataToBlob( + userDataBackup, + `message/${message.id}.json`, + message + ), + fromQueryEither(() => + messageModel.deleteMessage(message.fiscalCode, message.id) + ).mapLeft(toDocumentDeleteFailure) + ).map(_ => message); + +const backupAndDeleteMessageContent = ({ + messageContentBlobService, + messageModel, + userDataBackup, + message +}: { + messageContentBlobService: BlobService; + messageModel: MessageDeletableModel; + userDataBackup: IBlobServiceInfo; + message: RetrievedMessageWithoutContent; +}): TaskEither> => + tryCatch( + () => + messageModel.getContentFromBlob(messageContentBlobService, message.id), + toError + ) + // type lift + // from TaskEither of Either of Option of X + // to TaskEither of X + // this way we collaps every left/none case into the same path + .chain(fromEither) + .chain(e => fromEither(fromOption(undefined)(e))) + .foldTaskEither>( + _ => { + // unfortunately, a document not found is threated like a query error + return taskEither.of(none); + }, + content => + taskEither + .of(void 0) + .chain(_ => + saveDataToBlob( + userDataBackup, + `message-content/${message.id}.json`, + content + ) + ) + .chain(_ => + tryCatch( + () => + messageModel.deleteContentFromBlob( + messageContentBlobService, + message.id + ), + toError + ).mapLeft(toDocumentDeleteFailure) + ) + .map(_ => some(content)) + ); + +/** + * Find all versions of a message status, then backup and delete each document + * @param param0.messageStatusModel instance of MessageStatusModel + * @param param0.userDataBackup information about the blob storage account to place backup into + * @param param0.message parent message + * + */ +const backupAndDeleteMessageStatus = ({ + messageStatusModel, + userDataBackup, + message +}: { + messageStatusModel: MessageStatusDeletableModel; + userDataBackup: IBlobServiceInfo; + message: RetrievedMessageWithoutContent; +}): TaskEither => + executeRecursiveBackupAndDelete( + item => + messageStatusModel.deleteMessageStatusVersion(item.messageId, item.id), + userDataBackup, + item => `message-status/${item.id}.json`, + messageStatusModel.findAllVersionsByModelId(message.id) + ); + +/** + * For a given message, search all its notifications and backup&delete each one including its own notification status + * + * @param param0.message the message to search notification for + * @param param0.notificationModel instance of NotificationModel + * @param param0.notificationStatusModel instance of NotificationStatusModel + * @param param0.userDataBackup information about the blob storage account to place backup into + */ +const backupAndDeleteAllNotificationsData = ({ + message, + notificationModel, + notificationStatusModel, + userDataBackup +}: { + message: RetrievedMessageWithoutContent; + notificationModel: NotificationDeletableModel; + notificationStatusModel: NotificationStatusDeletableModel; + userDataBackup: IBlobServiceInfo; +}) => + fromQueryEither>(() => + iteratorToArray(notificationModel.findNotificationsForMessage(message.id)) + ) + .mapLeft(toQueryFailure) + .foldTaskEither( + e => fromEither(left(e)), + notifications => + array.sequence(taskEitherSeq)( + notifications.map(notification => + sequenceT(taskEitherSeq)( + backupAndDeleteNotificationStatus({ + notification, + notificationStatusModel, + userDataBackup + }), + backupAndDeleteNotification({ + notification, + notificationModel, + userDataBackup + }) + ) + ) + ) + ); + +/** + * For a given user, search all its messages and backup&delete each one including its own child models (messagestatus, notifications, message content) + * @param param0.messageContentBlobService instance of blob service where message contents are stored + * @param param0.messageModel instance of MessageModel + * @param param0.messageStatusModel instance of MessageStatusModel + * @param param0.NotificationModel instance of NotificationModel + * @param param0.notificationStatusModel instance of NotificationStatusModel + * @param param0.userDataBackup information about the blob storage account to place backup into + * @param param0.fiscalCode identifier of the user + */ +const backupAndDeleteAllMessagesData = ({ + messageContentBlobService, + messageModel, + messageStatusModel, + notificationModel, + notificationStatusModel, + userDataBackup, + fiscalCode +}: { + messageContentBlobService: BlobService; + messageModel: MessageDeletableModel; + messageStatusModel: MessageStatusDeletableModel; + notificationModel: NotificationDeletableModel; + notificationStatusModel: NotificationStatusDeletableModel; + userDataBackup: IBlobServiceInfo; + fiscalCode: FiscalCode; +}) => + fromQueryEither>(() => + iteratorToArray(messageModel.findMessages(fiscalCode)) + ) + .mapLeft(toQueryFailure) + .foldTaskEither( + e => fromEither(left(e)), + messages => { + return array.sequence(taskEitherSeq)( + messages.map(message => { + // cast needed because findMessages has a wrong signature + // tslint:disable-next-line: no-any + const retrievedMessage = (message as any) as RetrievedMessageWithoutContent; + return sequenceT(taskEitherSeq)( + backupAndDeleteMessageContent({ + message: retrievedMessage, + messageContentBlobService, + messageModel, + userDataBackup + }), + backupAndDeleteMessageStatus({ + message: retrievedMessage, + messageStatusModel, + userDataBackup + }), + backupAndDeleteAllNotificationsData({ + message: retrievedMessage, + notificationModel, + notificationStatusModel, + userDataBackup + }) + ).chain(_ => + backupAndDeleteMessage({ + message: retrievedMessage, + messageModel, + userDataBackup + }) + ); + }) + ); + } + ); + +/** + * Explores the user data structures and deletes all documents and blobs. Before that saves a blob for every found document in a dedicated storage folder + * Versioned models are backupped with a blob for each document version. + * Deletions happen after and only if the respective document has been successfully backupped. + * Backups and deletions of parent models happen after and only if every child model has been backupped and deleted successfully (example: Message and MessageStatus). + * This is important because children are found from their parents and otherwise it would create dangling models in case of an error occur. + * + * @param param0.messageContentBlobService instance of blob service where message contents are stored + * @param param0.messageModel instance of MessageModel + * @param param0.messageStatusModel instance of MessageStatusModel + * @param param0.NotificationModel instance of NotificationModel + * @param param0.notificationStatusModel instance of NotificationStatusModel + * @param param0.profileModel instance of ProfileModel + * @param param0.userDataBackup information about the blob storage account to place backup into + * @param param0.fiscalCode identifier of the user + */ +export const backupAndDeleteAllUserData = ({ + messageContentBlobService, + messageModel, + messageStatusModel, + notificationModel, + notificationStatusModel, + profileModel, + userDataBackup, + fiscalCode +}: { + messageContentBlobService: BlobService; + messageModel: MessageDeletableModel; + messageStatusModel: MessageStatusDeletableModel; + notificationModel: NotificationDeletableModel; + notificationStatusModel: NotificationStatusDeletableModel; + profileModel: ProfileDeletableModel; + userDataBackup: IBlobServiceInfo; + fiscalCode: FiscalCode; +}) => + backupAndDeleteAllMessagesData({ + fiscalCode, + messageContentBlobService, + messageModel, + messageStatusModel, + notificationModel, + notificationStatusModel, + userDataBackup + }).chain(_ => + backupAndDeleteProfile({ profileModel, userDataBackup, fiscalCode }) + ); diff --git a/DeleteUserDataActivity/function.json b/DeleteUserDataActivity/function.json new file mode 100644 index 00000000..daef6592 --- /dev/null +++ b/DeleteUserDataActivity/function.json @@ -0,0 +1,10 @@ +{ + "bindings": [ + { + "name": "name", + "type": "activityTrigger", + "direction": "in" + } + ], + "scriptFile": "../dist/DeleteUserDataActivity/index.js" +} \ No newline at end of file diff --git a/DeleteUserDataActivity/handler.ts b/DeleteUserDataActivity/handler.ts new file mode 100644 index 00000000..24f56ef2 --- /dev/null +++ b/DeleteUserDataActivity/handler.ts @@ -0,0 +1,100 @@ +/** + * This activity extracts all the data about a user contained in our db. + */ + +import * as t from "io-ts"; + +import { fromEither } from "fp-ts/lib/TaskEither"; + +import { Context } from "@azure/functions"; + +import { BlobService } from "azure-storage"; +import { readableReport } from "italia-ts-commons/lib/reporters"; +import { NonEmptyString } from "italia-ts-commons/lib/strings"; + +import { MessageDeletableModel } from "../utils/extensions/models/message"; +import { MessageStatusDeletableModel } from "../utils/extensions/models/message_status"; +import { NotificationDeletableModel } from "../utils/extensions/models/notification"; +import { NotificationStatusDeletableModel } from "../utils/extensions/models/notification_status"; +import { ProfileDeletableModel } from "../utils/extensions/models/profile"; +import { backupAndDeleteAllUserData } from "./backupAndDelete"; +import { + ActivityInput, + ActivityResult, + ActivityResultFailure, + ActivityResultSuccess, + InvalidInputFailure +} from "./types"; +import { logFailure } from "./utils"; + +const logPrefix = `DeleteUserDataActivity`; + +export interface IActivityHandlerInput { + messageModel: MessageDeletableModel; + messageStatusModel: MessageStatusDeletableModel; + notificationModel: NotificationDeletableModel; + notificationStatusModel: NotificationStatusDeletableModel; + profileModel: ProfileDeletableModel; + messageContentBlobService: BlobService; + userDataBackupBlobService: BlobService; + userDataBackupContainerName: NonEmptyString; +} + +/** + * Factory methods that builds an activity function + */ +export function createDeleteUserDataActivityHandler({ + messageContentBlobService, + messageModel, + messageStatusModel, + notificationModel, + notificationStatusModel, + profileModel, + userDataBackupBlobService, + userDataBackupContainerName +}: IActivityHandlerInput): ( + context: Context, + input: unknown +) => Promise { + return (context: Context, input: unknown) => + // validates the input + fromEither( + ActivityInput.decode(input).mapLeft( + (reason: t.Errors) => + InvalidInputFailure.encode({ + kind: "INVALID_INPUT_FAILURE", + reason: readableReport(reason) + }) + ) + ) + // then perform backup&delete on all user data + .chain(({ fiscalCode, backupFolder }) => + backupAndDeleteAllUserData({ + fiscalCode, + messageContentBlobService, + messageModel, + messageStatusModel, + notificationModel, + notificationStatusModel, + profileModel, + userDataBackup: { + blobService: userDataBackupBlobService, + containerName: userDataBackupContainerName, + folder: backupFolder + } + }) + ) + .bimap( + failure => { + logFailure(context, logPrefix)(failure); + return failure; + }, + _ => + ActivityResultSuccess.encode({ + kind: "SUCCESS" + }) + ) + .run() + // unfold the value from the either + .then(e => e.value); +} diff --git a/DeleteUserDataActivity/index.ts b/DeleteUserDataActivity/index.ts new file mode 100644 index 00000000..ad989d00 --- /dev/null +++ b/DeleteUserDataActivity/index.ts @@ -0,0 +1,89 @@ +import * as documentDbUtils from "io-functions-commons/dist/src/utils/documentdb"; + +import { getRequiredStringEnv } from "io-functions-commons/dist/src/utils/env"; + +import { documentClient } from "../utils/cosmosdb"; + +import { createDeleteUserDataActivityHandler } from "./handler"; + +import { createBlobService } from "azure-storage"; +import { MESSAGE_COLLECTION_NAME } from "io-functions-commons/dist/src/models/message"; +import { MESSAGE_STATUS_COLLECTION_NAME } from "io-functions-commons/dist/src/models/message_status"; +import { NOTIFICATION_COLLECTION_NAME } from "io-functions-commons/dist/src/models/notification"; +import { NOTIFICATION_STATUS_COLLECTION_NAME } from "io-functions-commons/dist/src/models/notification_status"; +import { PROFILE_COLLECTION_NAME } from "io-functions-commons/dist/src/models/profile"; +import { MessageDeletableModel } from "../utils/extensions/models/message"; +import { MessageStatusDeletableModel } from "../utils/extensions/models/message_status"; +import { NotificationDeletableModel } from "../utils/extensions/models/notification"; +import { NotificationStatusDeletableModel } from "../utils/extensions/models/notification_status"; +import { ProfileDeletableModel } from "../utils/extensions/models/profile"; + +const cosmosDbName = getRequiredStringEnv("COSMOSDB_NAME"); + +const documentDbDatabaseUrl = documentDbUtils.getDatabaseUri(cosmosDbName); + +const messageModel = new MessageDeletableModel( + documentClient, + documentDbUtils.getCollectionUri( + documentDbDatabaseUrl, + MESSAGE_COLLECTION_NAME + ), + getRequiredStringEnv("MESSAGE_CONTAINER_NAME") +); + +const messageStatusModel = new MessageStatusDeletableModel( + documentClient, + documentDbUtils.getCollectionUri( + documentDbDatabaseUrl, + MESSAGE_STATUS_COLLECTION_NAME + ) +); + +const notificationModel = new NotificationDeletableModel( + documentClient, + documentDbUtils.getCollectionUri( + documentDbDatabaseUrl, + NOTIFICATION_COLLECTION_NAME + ) +); + +const notificationStatusModel = new NotificationStatusDeletableModel( + documentClient, + documentDbUtils.getCollectionUri( + documentDbDatabaseUrl, + NOTIFICATION_STATUS_COLLECTION_NAME + ) +); + +const profileModel = new ProfileDeletableModel( + documentClient, + documentDbUtils.getCollectionUri( + documentDbDatabaseUrl, + PROFILE_COLLECTION_NAME + ) +); + +const userDataBackupBlobService = createBlobService( + getRequiredStringEnv("UserDataBackupStorageConnection") +); + +const messageContentBlobService = createBlobService( + getRequiredStringEnv("StorageConnection") +); + +const userDataBackupContainerName = getRequiredStringEnv( + "USER_DATA_BACKUP_CONTAINER_NAME" +); + +const activityFunctionHandler = createDeleteUserDataActivityHandler({ + messageContentBlobService, + messageModel, + messageStatusModel, + notificationModel, + notificationStatusModel, + profileModel, + userDataBackupBlobService, + userDataBackupContainerName +}); + +export default activityFunctionHandler; diff --git a/DeleteUserDataActivity/types.ts b/DeleteUserDataActivity/types.ts new file mode 100644 index 00000000..a0529454 --- /dev/null +++ b/DeleteUserDataActivity/types.ts @@ -0,0 +1,81 @@ +import { BlobService } from "azure-storage"; +import * as t from "io-ts"; +import { FiscalCode, NonEmptyString } from "italia-ts-commons/lib/strings"; + +// Activity input +export const ActivityInput = t.interface({ + backupFolder: NonEmptyString, + fiscalCode: FiscalCode +}); +export type ActivityInput = t.TypeOf; + +// Activity success result +export const ActivityResultSuccess = t.interface({ + kind: t.literal("SUCCESS") +}); +export type ActivityResultSuccess = t.TypeOf; + +// Activity failed because of invalid input +export const InvalidInputFailure = t.interface({ + kind: t.literal("INVALID_INPUT_FAILURE"), + reason: t.string +}); +export type InvalidInputFailure = t.TypeOf; + +// Activity failed because of an error on a query +export const QueryFailure = t.intersection([ + t.interface({ + kind: t.literal("QUERY_FAILURE"), + reason: t.string + }), + t.partial({ query: t.string }) +]); +export type QueryFailure = t.TypeOf; + +// activity failed for user not found +export const UserNotFound = t.interface({ + kind: t.literal("USER_NOT_FOUND_FAILURE") +}); +export type UserNotFound = t.TypeOf; + +// activity failed while deleting a document from the db +export const DocumentDeleteFailure = t.interface({ + kind: t.literal("DELETE_FAILURE"), + reason: t.string +}); +export type DocumentDeleteFailure = t.TypeOf; + +// activity failed while creating a new blob on storage +export const BlobCreationFailure = t.interface({ + kind: t.literal("BLOB_FAILURE"), + reason: t.string +}); +export type BlobCreationFailure = t.TypeOf; + +export const ActivityResultFailure = t.taggedUnion("kind", [ + UserNotFound, + QueryFailure, + InvalidInputFailure, + BlobCreationFailure, + DocumentDeleteFailure +]); +export type ActivityResultFailure = t.TypeOf; + +export const ActivityResult = t.taggedUnion("kind", [ + ActivityResultSuccess, + ActivityResultFailure +]); +export type ActivityResult = t.TypeOf; + +// type alias for fetch, delete and backup of data +export type DataFailure = + | QueryFailure + | BlobCreationFailure + | DocumentDeleteFailure; + +// define a value object with the info related to the blob storage for backup files +export interface IBlobServiceInfo { + blobService: BlobService; + containerName: string; + folder?: NonEmptyString; +} diff --git a/DeleteUserDataActivity/utils.ts b/DeleteUserDataActivity/utils.ts new file mode 100644 index 00000000..4d148c9f --- /dev/null +++ b/DeleteUserDataActivity/utils.ts @@ -0,0 +1,108 @@ +import { Context } from "@azure/functions"; +import { BlobService } from "azure-storage"; +import { toString } from "fp-ts/lib/function"; +import { TaskEither, taskify } from "fp-ts/lib/TaskEither"; +import { QueryError } from "io-functions-commons/dist/src/utils/documentdb"; +import { + ActivityResultFailure, + BlobCreationFailure, + DocumentDeleteFailure, + IBlobServiceInfo, + QueryFailure +} from "./types"; + +/** + * To be used for exhaustive checks + */ +export function assertNever(_: never): never { + throw new Error("should not have executed this"); +} + +/** + * to cast an error to QueryFailure + * @param err + */ +export const toQueryFailure = (err: Error | QueryError): QueryFailure => + QueryFailure.encode({ + kind: "QUERY_FAILURE", + reason: err instanceof Error ? err.message : `QueryError: ${toString(err)}` + }); + +/** + * to cast an error to a DocumentDeleteFailure + * @param err + */ +export const toDocumentDeleteFailure = ( + err: Error | QueryError +): DocumentDeleteFailure => + DocumentDeleteFailure.encode({ + kind: "DELETE_FAILURE", + reason: err instanceof Error ? err.message : toString(err) + }); + +/** + * Logs depending on failure type + * @param context the Azure functions context + * @param failure the failure to log + */ +export const logFailure = (context: Context, logPrefix: string) => ( + failure: ActivityResultFailure +): void => { + switch (failure.kind) { + case "INVALID_INPUT_FAILURE": + context.log.error( + `${logPrefix}|Error decoding input|ERROR=${failure.reason}` + ); + break; + case "QUERY_FAILURE": + context.log.error( + `${logPrefix}|Error ${failure.query} query error|ERROR=${failure.reason}` + ); + break; + case "BLOB_FAILURE": + context.log.error( + `${logPrefix}|Error saving blob|ERROR=${failure.reason}` + ); + break; + case "USER_NOT_FOUND_FAILURE": + context.log.error(`${logPrefix}|Error user not found|ERROR=`); + break; + case "DELETE_FAILURE": + context.log.error( + `${logPrefix}|Error deleting data|ERROR=${failure.reason}` + ); + break; + default: + assertNever(failure); + } +}; + +/** + * Saves data into a dedicated blob + * @param blobServiceInfo references about where to save data + * @param blobName name of the blob to be saved. It might not include a folder if specified in blobServiceInfo + * @param data serializable data to be saved + * + * @returns either a blob failure or the saved object + */ +export const saveDataToBlob = ( + { blobService, containerName, folder }: IBlobServiceInfo, + blobName: string, + data: T +): TaskEither => { + return taskify(cb => + blobService.createBlockBlobFromText( + containerName, + `${folder}${folder ? "/" : ""}${blobName}`, + JSON.stringify(data), + cb + ) + )().bimap( + err => + BlobCreationFailure.encode({ + kind: "BLOB_FAILURE", + reason: err.message + }), + _ => data + ); +}; diff --git a/UserDataDeleteOrchestrator/GetUserDataProcessing/__tests__/handler.test.ts b/GetUserDataProcessingActivity/__tests__/handler.test.ts similarity index 50% rename from UserDataDeleteOrchestrator/GetUserDataProcessing/__tests__/handler.test.ts rename to GetUserDataProcessingActivity/__tests__/handler.test.ts index f72d45dd..6da21b51 100644 --- a/UserDataDeleteOrchestrator/GetUserDataProcessing/__tests__/handler.test.ts +++ b/GetUserDataProcessingActivity/__tests__/handler.test.ts @@ -2,38 +2,55 @@ import { left, right } from "fp-ts/lib/Either"; -import { context as contextMock } from "../../../__mocks__/durable-functions"; -import { aUserDataProcessing } from "../../../__mocks__/mocks"; +import { context as contextMock } from "../../__mocks__/durable-functions"; +import { aFiscalCode, aUserDataProcessing } from "../../__mocks__/mocks"; import { ActivityInput, - ActivityResultFailure, - createGetUserDataProcessingHandler + ActivityResultInvalidInputFailure, + ActivityResultNotFoundFailure, + ActivityResultQueryFailure, + ActivityResultSuccess, + createSetUserDataProcessingStatusActivityHandler } from "../handler"; import { QueryError } from "documentdb"; import { none, some } from "fp-ts/lib/Option"; import { UserDataProcessingModel } from "io-functions-commons/dist/src/models/user_data_processing"; -const aFiscalCode = aUserDataProcessing.fiscalCode; const aChoice = aUserDataProcessing.choice; -describe("GetUserDataProcessingHandler", () => { - it("should retrieve an existing record", async () => { +describe("SetUserDataProcessingStatusActivityHandler", () => { + it("should handle a result", async () => { const mockModel = ({ findOneUserDataProcessingById: jest.fn(async () => right(some(aUserDataProcessing)) ) } as any) as UserDataProcessingModel; - const handler = createGetUserDataProcessingHandler(mockModel); + const handler = createSetUserDataProcessingStatusActivityHandler(mockModel); const input: ActivityInput = { choice: aChoice, fiscalCode: aFiscalCode }; const result = await handler(contextMock, input); - expect(result.kind).toEqual("SUCCESS"); + expect(ActivityResultSuccess.decode(result).isRight()).toBe(true); + }); + + it("should handle a record not found failure", async () => { + const mockModel = ({ + findOneUserDataProcessingById: jest.fn(async () => right(none)) + } as any) as UserDataProcessingModel; + + const handler = createSetUserDataProcessingStatusActivityHandler(mockModel); + const input: ActivityInput = { + choice: aChoice, + fiscalCode: aFiscalCode + }; + const result = await handler(contextMock, input); + + expect(ActivityResultNotFoundFailure.decode(result).isRight()).toBe(true); }); it("should handle a query error", async () => { @@ -45,56 +62,45 @@ describe("GetUserDataProcessingHandler", () => { ) } as any) as UserDataProcessingModel; - const handler = createGetUserDataProcessingHandler(mockModel); + const handler = createSetUserDataProcessingStatusActivityHandler(mockModel); const input: ActivityInput = { choice: aChoice, fiscalCode: aFiscalCode }; const result = await handler(contextMock, input); - ActivityResultFailure.decode(result).fold( - err => fail(`Failing decoding result, response: ${JSON.stringify(err)}`), - failure => { - expect(failure.kind).toEqual(expect.any(String)); - } - ); + expect(ActivityResultQueryFailure.decode(result).isRight()).toBe(true); }); - it("should handle a record not found", async () => { + it("should handle a rejection", async () => { const mockModel = ({ - findOneUserDataProcessingById: jest.fn(async () => right(none)) + findOneUserDataProcessingById: jest.fn(async () => { + throw new Error("my unhandled rejection"); + }) } as any) as UserDataProcessingModel; - const handler = createGetUserDataProcessingHandler(mockModel); + const handler = createSetUserDataProcessingStatusActivityHandler(mockModel); const input: ActivityInput = { choice: aChoice, fiscalCode: aFiscalCode }; const result = await handler(contextMock, input); - ActivityResultFailure.decode(result).fold( - err => fail(`Failing decoding result, response: ${JSON.stringify(err)}`), - failure => { - expect(failure.kind).toEqual(expect.any(String)); - } - ); + expect(ActivityResultQueryFailure.decode(result).isRight()).toBe(true); }); it("should handle an invalid input", async () => { const mockModel = ({} as any) as UserDataProcessingModel; - const handler = createGetUserDataProcessingHandler(mockModel); + const handler = createSetUserDataProcessingStatusActivityHandler(mockModel); // @ts-ignore to force bad behavior const result = await handler(contextMock, { invalid: "input" }); - ActivityResultFailure.decode(result).fold( - err => fail(`Failing decoding result, response: ${JSON.stringify(err)}`), - failure => { - expect(failure.kind).toEqual(expect.any(String)); - } + expect(ActivityResultInvalidInputFailure.decode(result).isRight()).toBe( + true ); }); }); diff --git a/GetUserDataProcessingActivity/function.json b/GetUserDataProcessingActivity/function.json new file mode 100644 index 00000000..aacf89dd --- /dev/null +++ b/GetUserDataProcessingActivity/function.json @@ -0,0 +1,10 @@ +{ + "bindings": [ + { + "name": "name", + "type": "activityTrigger", + "direction": "in" + } + ], + "scriptFile": "../dist/GetUserDataProcessingActivity/index.js" +} \ No newline at end of file diff --git a/GetUserDataProcessingActivity/handler.ts b/GetUserDataProcessingActivity/handler.ts new file mode 100644 index 00000000..a8378c55 --- /dev/null +++ b/GetUserDataProcessingActivity/handler.ts @@ -0,0 +1,161 @@ +/** + * Updates the status of a UserDataProcessing record + */ + +import * as t from "io-ts"; + +import { fromEither, fromLeft, taskEither } from "fp-ts/lib/TaskEither"; + +import { Context } from "@azure/functions"; + +import { UserDataProcessingChoice } from "io-functions-commons/dist/generated/definitions/UserDataProcessingChoice"; +import { + makeUserDataProcessingId, + UserDataProcessing, + UserDataProcessingModel +} from "io-functions-commons/dist/src/models/user_data_processing"; +import { fromQueryEither } from "io-functions-commons/dist/src/utils/documentdb"; +import { readableReport } from "italia-ts-commons/lib/reporters"; +import { FiscalCode } from "italia-ts-commons/lib/strings"; + +// Activity input +export const ActivityInput = t.interface({ + choice: UserDataProcessingChoice, + fiscalCode: FiscalCode +}); +export type ActivityInput = t.TypeOf; + +// Activity result +export const ActivityResultSuccess = t.interface({ + kind: t.literal("SUCCESS"), + value: UserDataProcessing +}); +export type ActivityResultSuccess = t.TypeOf; + +// Activity failed because of invalid input +export const ActivityResultInvalidInputFailure = t.interface({ + kind: t.literal("INVALID_INPUT_FAILURE"), + reason: t.string +}); +export type ActivityResultInvalidInputFailure = t.TypeOf< + typeof ActivityResultInvalidInputFailure +>; + +// Activity failed because of invalid input +export const ActivityResultNotFoundFailure = t.interface({ + kind: t.literal("NOT_FOUND_FAILURE") +}); +export type ActivityResultNotFoundFailure = t.TypeOf< + typeof ActivityResultNotFoundFailure +>; + +// Activity failed because of an error on a query +export const ActivityResultQueryFailure = t.intersection([ + t.interface({ + kind: t.literal("QUERY_FAILURE"), + reason: t.string + }), + t.partial({ query: t.string }) +]); +export type ActivityResultQueryFailure = t.TypeOf< + typeof ActivityResultQueryFailure +>; + +export const ActivityResultFailure = t.taggedUnion("kind", [ + ActivityResultQueryFailure, + ActivityResultInvalidInputFailure, + ActivityResultNotFoundFailure +]); +export type ActivityResultFailure = t.TypeOf; + +export const ActivityResult = t.taggedUnion("kind", [ + ActivityResultSuccess, + ActivityResultFailure +]); + +export type ActivityResult = t.TypeOf; + +const logPrefix = `GetUserDataProcessingActivity`; + +function assertNever(_: never): void { + throw new Error("should not have executed this"); +} + +/** + * Logs depending on failure type + * @param context the Azure functions context + * @param failure the failure to log + */ +const logFailure = (context: Context) => ( + failure: ActivityResultFailure +): void => { + switch (failure.kind) { + case "INVALID_INPUT_FAILURE": + context.log.error( + `${logPrefix}|Error decoding input|ERROR=${failure.reason}` + ); + break; + case "QUERY_FAILURE": + context.log.error( + `${logPrefix}|Error ${failure.query} query error |ERROR=${failure.reason}` + ); + break; + case "NOT_FOUND_FAILURE": + // it might not be a failure + context.log.warn(`${logPrefix}|Error UserDataProcessing not found`); + break; + default: + assertNever(failure); + } +}; + +export const createSetUserDataProcessingStatusActivityHandler = ( + userDataProcessingModel: UserDataProcessingModel +) => (context: Context, input: unknown) => { + // the actual handler + return fromEither(ActivityInput.decode(input)) + .mapLeft((reason: t.Errors) => + ActivityResultInvalidInputFailure.encode({ + kind: "INVALID_INPUT_FAILURE", + reason: readableReport(reason) + }) + ) + .chain(({ fiscalCode, choice }) => + fromQueryEither(() => + userDataProcessingModel.findOneUserDataProcessingById( + fiscalCode, + makeUserDataProcessingId(choice, fiscalCode) + ) + ).foldTaskEither( + error => + fromLeft( + ActivityResultQueryFailure.encode({ + kind: "QUERY_FAILURE", + query: "findOneUserDataProcessingById", + reason: JSON.stringify(error) + }) + ), + maybeRecord => + maybeRecord.fold( + fromLeft( + ActivityResultNotFoundFailure.encode({ + kind: "NOT_FOUND_FAILURE" + }) + ), + _ => taskEither.of(_) + ) + ) + ) + .map(record => + ActivityResultSuccess.encode({ + kind: "SUCCESS", + value: record + }) + ) + .mapLeft(failure => { + logFailure(context)(failure); + return failure; + }) + .run() + .then(e => e.value); +}; diff --git a/UserDataDeleteOrchestrator/GetUserDataProcessing/index.ts b/GetUserDataProcessingActivity/index.ts similarity index 73% rename from UserDataDeleteOrchestrator/GetUserDataProcessing/index.ts rename to GetUserDataProcessingActivity/index.ts index ab1ab34e..1072543e 100644 --- a/UserDataDeleteOrchestrator/GetUserDataProcessing/index.ts +++ b/GetUserDataProcessingActivity/index.ts @@ -1,4 +1,4 @@ -import { +import { USER_DATA_PROCESSING_COLLECTION_NAME, UserDataProcessingModel } from "io-functions-commons/dist/src/models/user_data_processing"; @@ -7,9 +7,9 @@ import * as documentDbUtils from "io-functions-commons/dist/src/utils/documentdb import { getRequiredStringEnv } from "io-functions-commons/dist/src/utils/env"; -import { documentClient } from "../../utils/cosmosdb"; +import { documentClient } from "../utils/cosmosdb"; -import { createGetUserDataProcessingHandler } from "./handler"; +import { createSetUserDataProcessingStatusActivityHandler } from "./handler"; const cosmosDbName = getRequiredStringEnv("COSMOSDB_NAME"); @@ -24,8 +24,8 @@ const userDataProcessingModel = new UserDataProcessingModel( userDataProcessingsCollectionUrl ); -const functionHandler = createGetUserDataProcessingHandler( +const activityFunctionHandler = createSetUserDataProcessingStatusActivityHandler( userDataProcessingModel ); -export default functionHandler; +export default activityFunctionHandler; diff --git a/README.md b/README.md index e846e8d4..49f53f03 100644 --- a/README.md +++ b/README.md @@ -8,28 +8,43 @@ The implementation is based on the Azure Functions v2 runtime. The table lists some of the environment variables needed by the application; they may be customized as needed. -| Variable name | Description | type | -| --------------------------- | ------------------------------------------------------------------------------------------------ | ------ | -| StorageConnection | Storage connection string to store computed visible-service.json (retrieved by io-functions-app) | string | -| COSMOSDB_CONNECTION_STRING | CosmosDB connection string (needed in triggers) | string | -| COSMOSDB_URI | CosmosDB connection URI | string | -| COSMOSDB_KEY | CosmosDB connection key | string | -| COSMOSDB_NAME | CosmosDB database name | string | -| LOGOS_URL | The url of the service logos storage | string | -| AssetsStorageConnection | The connection string used to connect to Azure Blob Storage containing the service cache | string | -| SERVICE_PRINCIPAL_CLIENT_ID | The service principal name used to get the token credentials to connect to the APIM | string | -| SERVICE_PRINCIPAL_SECRET | The service principal secret used to get the token credentials to connect to the APIM | string | -| SERVICE_PRINCIPAL_TENANT_ID | The service principal tenant id used to get the token credentials to connect to the APIM | string | -| ADB2C_CLIENT_ID | The application client id used to get the token credentials to connect to the ADB2C | string | -| ADB2C_CLIENT_KEY | The application secret used to get the token credentials to connect to the ADB2C | string | -| ADB2C_TENANT_ID | The ADB2C tenant id | string | -| AZURE_APIM | The name of the API Management service used to get the subscriptions | string | -| AZURE_APIM_HOST | The host name of the API Management service | string | -| AZURE_APIM_RESOURCE_GROUP | The name of the resource group used to get the subscriptions | string | -| AZURE_SUBSCRIPTION_ID | Credentials which identify the Azure subscription, used to init the APIM client | string | -| UserDataArchiveStorageConnection | Storage connection string to store zip file for user to download their data | string | -| USER_DATA_CONTAINER_NAME | Name of the container on which zip files with usr data are stored | string | -| MESSAGE_CONTAINER_NAME | Name of the container which stores message content | string | -| PUBLIC_API_URL | Internal URL of the API management used to send messages | string | -| PUBLIC_API_KEY | GDPR service access key for the message API | string | -| PUBLIC_DOWNLOAD_BASE_URL | Public URL of user's data zip bundle storage | string | +| Variable name | Description | type | +|----------------------------------|--------------------------------------------------------------------------------------------------|--------| +| StorageConnection | Storage connection string to store computed visible-service.json (retrieved by io-functions-app) | string | +| COSMOSDB_CONNECTION_STRING | CosmosDB connection string (needed in triggers) | string | +| COSMOSDB_URI | CosmosDB connection URI | string | +| COSMOSDB_KEY | CosmosDB connection key | string | +| COSMOSDB_NAME | CosmosDB database name | string | +| LOGOS_URL | The url of the service logos storage | string | +| AssetsStorageConnection | The connection string used to connect to Azure Blob Storage containing the service cache | string | +| SERVICE_PRINCIPAL_CLIENT_ID | The service principal name used to get the token credentials to connect to the APIM | string | +| SERVICE_PRINCIPAL_SECRET | The service principal secret used to get the token credentials to connect to the APIM | string | +| SERVICE_PRINCIPAL_TENANT_ID | The service principal tenant id used to get the token credentials to connect to the APIM | string | +| ADB2C_CLIENT_ID | The application client id used to get the token credentials to connect to the ADB2C | string | +| ADB2C_CLIENT_KEY | The application secret used to get the token credentials to connect to the ADB2C | string | +| ADB2C_TENANT_ID | The ADB2C tenant id | string | +| AZURE_APIM | The name of the API Management service used to get the subscriptions | string | +| AZURE_APIM_HOST | The host name of the API Management service | string | +| AZURE_APIM_RESOURCE_GROUP | The name of the resource group used to get the subscriptions | string | +| AZURE_SUBSCRIPTION_ID | Credentials which identify the Azure subscription, used to init the APIM client | string | +| UserDataArchiveStorageConnection | Storage connection string to store zip file for user to download their data | string | +| USER_DATA_CONTAINER_NAME | Name of the container on which zip files with usr data are stored | string | +| MESSAGE_CONTAINER_NAME | Name of the container which stores message content | string | +| PUBLIC_API_URL | Internal URL of the API management used to send messages | string | +| PUBLIC_API_KEY | GDPR service access key for the message API | string | +| PUBLIC_DOWNLOAD_BASE_URL | Public URL of user's data zip bundle storage | string | +| SESSION_API_URL | Internal URL of the BACKEND API used to handle session lock/unlock requests | string | +| SESSION_API_KEY | service access key for the session API | string | +| USER_DATA_BACKUP_CONTAINER_NAME | Name of the storage container in which user data is backuped before being permanently deleted | string | +| USER_DATA_DELETE_DELAY_DAYS | How many days to wait when a user asks for cancellation before effectively delete her data | number | +| UserDataBackupStorageConnection | Storage connection string for GDPR user data storage | string | + + +#### Feature flags + +This flags enable/disable some features and are expected to be boolean. To set them true, assign them the literal value `1` + +| Variable name | Description | default | +|------------------------------|---------------------------------------------------|---------| +| FF_ENABLE_USER_DATA_DOWNLOAD | Users' GDPR data access claims are processed | true | +| FF_ENABLE_USER_DATA_DELETE | Users' GDPR right to erasure claims are processed | true | diff --git a/SendUserDataDownloadMessageActivity/handler.ts b/SendUserDataDownloadMessageActivity/handler.ts index b0011c99..9b84cafa 100644 --- a/SendUserDataDownloadMessageActivity/handler.ts +++ b/SendUserDataDownloadMessageActivity/handler.ts @@ -98,19 +98,15 @@ async function sendMessage( apiKey: string, newMessage: NewMessage, timeoutFetch: typeof fetch -): Promise { - const response = await timeoutFetch( - `${apiUrl}/api/v1/messages/${fiscalCode}`, - { - body: JSON.stringify(newMessage), - headers: { - "Content-Type": "application/json", - "Ocp-Apim-Subscription-Key": apiKey - }, - method: "POST" - } - ); - return response.status; +): Promise { + return timeoutFetch(`${apiUrl}/api/v1/messages/${fiscalCode}`, { + body: JSON.stringify(newMessage), + headers: { + "Content-Type": "application/json", + "Ocp-Apim-Subscription-Key": apiKey + }, + method: "POST" + }); } // Activity result @@ -172,7 +168,7 @@ export const getActivityFunction = ( // throws in case of timeout so // the orchestrator can schedule a retry - const status = await sendMessage( + const response = await sendMessage( fiscalCode, publicApiUrl, publicApiKey, @@ -180,8 +176,10 @@ export const getActivityFunction = ( timeoutFetch ); + const status = response.status; + if (status !== 201) { - const msg = `${logPrefix}|ERROR=${status}`; + const msg = `${logPrefix}|ERROR=${status},${await response.text()}`; if (status >= 500) { throw new Error(msg); } else { diff --git a/SendUserDataDownloadMessageActivity/index.ts b/SendUserDataDownloadMessageActivity/index.ts index 1c364c0e..1166eaa4 100644 --- a/SendUserDataDownloadMessageActivity/index.ts +++ b/SendUserDataDownloadMessageActivity/index.ts @@ -1,31 +1,12 @@ import { getRequiredStringEnv } from "io-functions-commons/dist/src/utils/env"; -import { agent } from "italia-ts-commons"; -import { - AbortableFetch, - setFetchTimeout, - toFetch -} from "italia-ts-commons/lib/fetch"; -import { Millisecond } from "italia-ts-commons/lib/units"; +import { timeoutFetch } from "../utils/fetch"; import { getActivityFunction } from "./handler"; -// HTTP external requests timeout in milliseconds -const DEFAULT_REQUEST_TIMEOUT_MS = 10000; - // Needed to call notifications API const publicApiUrl = getRequiredStringEnv("PUBLIC_API_URL"); const publicApiKey = getRequiredStringEnv("PUBLIC_API_KEY"); const publicDownloadBaseUrl = getRequiredStringEnv("PUBLIC_DOWNLOAD_BASE_URL"); -// HTTP-only fetch with optional keepalive agent -// @see https://github.com/pagopa/io-ts-commons/blob/master/src/agent.ts#L10 -const httpApiFetch = agent.getHttpFetch(process.env); - -// a fetch that can be aborted and that gets cancelled after fetchTimeoutMs -const abortableFetch = AbortableFetch(httpApiFetch); -const timeoutFetch = toFetch( - setFetchTimeout(DEFAULT_REQUEST_TIMEOUT_MS as Millisecond, abortableFetch) -); - const index = getActivityFunction( publicApiUrl, publicApiKey, diff --git a/SetUserDataProcessingStatusActivity/__tests__/handler.test.ts b/SetUserDataProcessingStatusActivity/__tests__/handler.test.ts index d97ce826..92e3a7ed 100644 --- a/SetUserDataProcessingStatusActivity/__tests__/handler.test.ts +++ b/SetUserDataProcessingStatusActivity/__tests__/handler.test.ts @@ -37,9 +37,6 @@ describe("SetUserDataProcessingStatusActivityHandler", () => { const result = await handler(contextMock, input); expect(result.kind).toEqual("SUCCESS"); - if (result.kind === "SUCCESS") { - expect(result.value.status === UserDataProcessingStatusEnum.WIP); - } }); it("should handle a query error", async () => { diff --git a/SetUserDataProcessingStatusActivity/handler.ts b/SetUserDataProcessingStatusActivity/handler.ts index e0f6451b..969bac52 100644 --- a/SetUserDataProcessingStatusActivity/handler.ts +++ b/SetUserDataProcessingStatusActivity/handler.ts @@ -26,8 +26,7 @@ export type ActivityInput = t.TypeOf; // Activity result export const ActivityResultSuccess = t.interface({ - kind: t.literal("SUCCESS"), - value: UserDataProcessing + kind: t.literal("SUCCESS") }); export type ActivityResultSuccess = t.TypeOf; @@ -146,7 +145,8 @@ export const createSetUserDataProcessingStatusActivityHandler = ( () => userDataProcessingModel.createOrUpdateByNewOne({ ...currentRecord, - status: nextStatus + status: nextStatus, + updatedAt: new Date() }), "userDataProcessingModel.createOrUpdateByNewOne" ); @@ -162,8 +162,7 @@ export const createSetUserDataProcessingStatusActivityHandler = ( .chain(saveNewStatusOnDb) .map(newRecord => ActivityResultSuccess.encode({ - kind: "SUCCESS", - value: newRecord + kind: "SUCCESS" }) ) .mapLeft(failure => { diff --git a/SetUserSessionLockActivity/__tests__/handler.test.ts b/SetUserSessionLockActivity/__tests__/handler.test.ts new file mode 100644 index 00000000..ec97aab7 --- /dev/null +++ b/SetUserSessionLockActivity/__tests__/handler.test.ts @@ -0,0 +1,187 @@ +/* tslint:disable: no-any no-identical-functions */ + +import { right } from "fp-ts/lib/Either"; +import * as t from "io-ts"; +import { readableReport } from "italia-ts-commons/lib/reporters"; +import { context } from "../../__mocks__/durable-functions"; +import { aFiscalCode } from "../../__mocks__/mocks"; +import { SuccessResponse } from "../../generated/session-api/SuccessResponse"; +import { Client } from "../../utils/sessionApiClient"; +import { + ActivityInput, + ApiCallFailure, + BadApiRequestFailure, + createSetUserSessionLockActivityHandler, + InvalidInputFailure, + TransientFailure +} from "../handler"; + +// dummy but effective +const aDecodingFailure = t.number.decode("abc"); + +const aSuccessResponse = SuccessResponse.decode({ message: "ok" }).getOrElseL( + err => { + throw new Error(`Invalid mock fr SuccessResponse: ${readableReport(err)}`); + } +); + +const mockLockUserSession = jest.fn().mockImplementation(async () => + right({ + status: 200, + value: aSuccessResponse + }) +); +const mockUnlockUserSession = jest.fn().mockImplementation(async () => + right({ + status: 200, + value: aSuccessResponse + }) +); + +const mockClient = { + lockUserSession: mockLockUserSession, + unlockUserSession: mockUnlockUserSession +} as Client<"token">; + +describe("createSetUserSessionLockActivityHandler", () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it("should fail on invalid input", async () => { + const handler = createSetUserSessionLockActivityHandler(mockClient); + + const result = await handler(context, "invalid"); + expect(InvalidInputFailure.decode(result).isRight()).toBe(true); + }); + + it("should execute correct api operation when action is LOCK", async () => { + const handler = createSetUserSessionLockActivityHandler(mockClient); + + await handler( + context, + ActivityInput.encode({ + action: "LOCK", + fiscalCode: aFiscalCode + }) + ); + expect(mockLockUserSession).toHaveBeenCalledTimes(1); + expect(mockUnlockUserSession).not.toHaveBeenCalled(); + }); + + it("should execute correct api operation when action is UNLOCK", async () => { + const handler = createSetUserSessionLockActivityHandler(mockClient); + + await handler( + context, + ActivityInput.encode({ + action: "UNLOCK", + fiscalCode: aFiscalCode + }) + ); + expect(mockUnlockUserSession).toHaveBeenCalledTimes(1); + expect(mockLockUserSession).not.toHaveBeenCalled(); + }); + + it("should fail when api operation fails", async () => { + mockLockUserSession.mockImplementationOnce(async () => { + throw new Error("any error"); + }); + + const handler = createSetUserSessionLockActivityHandler(mockClient); + + // the handler may throw depending on what we consider to be a transient failure + // we wrap in a try/catch so we can test both cases + try { + const result = await handler( + context, + ActivityInput.encode({ + action: "LOCK", + fiscalCode: aFiscalCode + }) + ); + expect(TransientFailure.decode(result).isRight()).toBe(false); + expect(ApiCallFailure.decode(result).isRight()).toBe(true); + } catch (result) { + expect(TransientFailure.decode(result).isRight()).toBe(true); + expect(ApiCallFailure.decode(result).isRight()).toBe(true); + } + }); + + it("should fail when api operation returns an unparsable payload", async () => { + mockLockUserSession.mockImplementationOnce(async () => aDecodingFailure); + + const handler = createSetUserSessionLockActivityHandler(mockClient); + + // the handler may throw depending on what we consider to be a transient failure + // we wrap in a try/catch so we can test both cases + try { + const result = await handler( + context, + ActivityInput.encode({ + action: "LOCK", + fiscalCode: aFiscalCode + }) + ); + expect(TransientFailure.decode(result).isRight()).toBe(false); + expect(ApiCallFailure.decode(result).isRight()).toBe(true); + } catch (result) { + expect(TransientFailure.decode(result).isRight()).toBe(true); + expect(ApiCallFailure.decode(result).isRight()).toBe(true); + } + }); + + it("should fail when api operation returns an error response", async () => { + mockLockUserSession.mockImplementationOnce(async () => + right({ + status: 500 + }) + ); + + const handler = createSetUserSessionLockActivityHandler(mockClient); + + // the handler may throw depending on what we consider to be a transient failure + // we wrap in a try/catch so we can test both cases + try { + const result = await handler( + context, + ActivityInput.encode({ + action: "LOCK", + fiscalCode: aFiscalCode + }) + ); + expect(TransientFailure.decode(result).isRight()).toBe(false); + expect(ApiCallFailure.decode(result).isRight()).toBe(true); + } catch (result) { + expect(TransientFailure.decode(result).isRight()).toBe(true); + expect(ApiCallFailure.decode(result).isRight()).toBe(true); + } + }); + + it("should fail when api operation is called badly", async () => { + mockLockUserSession.mockImplementationOnce(async () => + right({ + status: 400 + }) + ); + + const handler = createSetUserSessionLockActivityHandler(mockClient); + + // the handler may throw depending on what we consider to be a transient failure + // we wrap in a try/catch so we can test both cases + try { + const result = await handler( + context, + ActivityInput.encode({ + action: "LOCK", + fiscalCode: aFiscalCode + }) + ); + expect(TransientFailure.decode(result).isRight()).toBe(false); + expect(BadApiRequestFailure.decode(result).isRight()).toBe(true); + } catch (result) { + expect(TransientFailure.decode(result).isRight()).toBe(true); + expect(BadApiRequestFailure.decode(result).isRight()).toBe(true); + } + }); +}); diff --git a/SetUserSessionLockActivity/function.json b/SetUserSessionLockActivity/function.json new file mode 100644 index 00000000..11a58320 --- /dev/null +++ b/SetUserSessionLockActivity/function.json @@ -0,0 +1,10 @@ +{ + "bindings": [ + { + "name": "name", + "type": "activityTrigger", + "direction": "in" + } + ], + "scriptFile": "../dist/SetUserSessionLockActivity/index.js" +} \ No newline at end of file diff --git a/SetUserSessionLockActivity/handler.ts b/SetUserSessionLockActivity/handler.ts new file mode 100644 index 00000000..ade551dc --- /dev/null +++ b/SetUserSessionLockActivity/handler.ts @@ -0,0 +1,196 @@ +/** + * Interacts with Session API to lock/unlock user + */ + +import { Context } from "@azure/functions"; +import { toError } from "fp-ts/lib/Either"; +import { + fromEither, + fromLeft, + taskEither, + TaskEither, + tryCatch +} from "fp-ts/lib/TaskEither"; +import * as t from "io-ts"; +import { readableReport } from "italia-ts-commons/lib/reporters"; +import { FiscalCode } from "italia-ts-commons/lib/strings"; +import { SuccessResponse } from "../generated/session-api/SuccessResponse"; +import { Client } from "../utils/sessionApiClient"; + +function assertNever(_: never): void { + throw new Error("should not have executed this"); +} + +// Activity input +export const ActivityInput = t.interface({ + action: t.union([t.literal("LOCK"), t.literal("UNLOCK")]), + fiscalCode: FiscalCode +}); +export type ActivityInput = t.TypeOf; + +// Activity result +export const ActivityResultSuccess = t.interface({ + kind: t.literal("SUCCESS") +}); +export type ActivityResultSuccess = t.TypeOf; + +// Activity failed because of invalid input +export const InvalidInputFailure = t.interface({ + kind: t.literal("INVALID_INPUT_FAILURE"), + reason: t.string +}); +export type InvalidInputFailure = t.TypeOf; + +// Activity failed because of an error on an api call +export const ApiCallFailure = t.interface({ + kind: t.literal("API_CALL_FAILURE"), + reason: t.string +}); +export type ApiCallFailure = t.TypeOf; + +// Activity failed because the api has been called badly +export const BadApiRequestFailure = t.interface({ + kind: t.literal("BAD_API_REQUEST_FAILURE"), + reason: t.string +}); +export type BadApiRequestFailure = t.TypeOf; + +// maps domain errors that are considered transient and thus may allow a retry +export const TransientFailure = ApiCallFailure; +export type TransientFailure = t.TypeOf; + +export const ActivityResultFailure = t.taggedUnion("kind", [ + ApiCallFailure, + BadApiRequestFailure, + InvalidInputFailure +]); +export type ActivityResultFailure = t.TypeOf; + +export const ActivityResult = t.taggedUnion("kind", [ + ActivityResultSuccess, + ActivityResultFailure +]); + +export type ActivityResult = t.TypeOf; + +const logPrefix = `SetUserSessionLockActivity`; + +/** + * Wraps the logic to call Session API and lift errors to the correct domain values + * @param sessionApiClient + * @param action + * @param fiscalCode + */ +const callSessionApi = ( + context: Context, + sessionApiClient: Client<"token">, + action: ActivityInput["action"], + fiscalCode: FiscalCode +): TaskEither => + taskEither + .of(void 0) + .chain(_ => + tryCatch( + () => { + switch (action) { + case "LOCK": + return sessionApiClient.lockUserSession({ fiscalCode }); + case "UNLOCK": + return sessionApiClient.unlockUserSession({ fiscalCode }); + default: + assertNever(action); + } + }, + error => { + context.log.error( + `${logPrefix}|ERROR|failed using api`, + action, + error + ); + return ApiCallFailure.encode({ + kind: "API_CALL_FAILURE", + reason: toError(error).message + }); + } + ) + ) + .chain(decodeErrorOrResponse => + fromEither(decodeErrorOrResponse).mapLeft(error => { + context.log.error( + `${logPrefix}|ERROR|failed decoding api payload`, + action, + error + ); + return ApiCallFailure.encode({ + kind: "API_CALL_FAILURE", + reason: readableReport(error) + }); + }) + ) + .chain(({ status, value }) => { + switch (status) { + case 200: + return taskEither.of(value); + case 400: + case 401: + case 404: + context.log.error( + `${logPrefix}|ERROR|API bad request ${status}`, + action, + value + ); + return fromLeft( + BadApiRequestFailure.encode({ + kind: "BAD_API_REQUEST_FAILURE", + reason: `Session Api called badly, action: ${action} code: ${status}` + }) + ); + case 500: + context.log.error( + `${logPrefix}|ERROR|API error response ${status}`, + action, + value + ); + return fromLeft( + ApiCallFailure.encode({ + kind: "API_CALL_FAILURE", + reason: `Session Api unexpected error, action: ${action}` + }) + ); + default: + assertNever(status); + } + }); + +export const createSetUserSessionLockActivityHandler = ( + sessionApiClient: Client<"token"> +) => (context: Context, input: unknown) => + taskEither + .of(void 0) + .chain(_ => + fromEither(ActivityInput.decode(input)).mapLeft(err => + InvalidInputFailure.encode({ + kind: "INVALID_INPUT_FAILURE", + reason: readableReport(err) + }) + ) + ) + .chain(({ action, fiscalCode }) => + callSessionApi(context, sessionApiClient, action, fiscalCode) + ) + .fold( + failure => { + context.log.error(`${logPrefix}|ERROR|Activity failed`, failure); + + // in case of transient failures we let the activity throw, so the orchestrator can retry + if (TransientFailure.is(failure)) { + throw failure; + } + return failure; + }, + _ => { + context.log.info(`${logPrefix}|INFO|Activity succeeded`); + return ActivityResultSuccess.encode({ kind: "SUCCESS" }); + } + ) + .run(); diff --git a/SetUserSessionLockActivity/index.ts b/SetUserSessionLockActivity/index.ts new file mode 100644 index 00000000..ed1d326d --- /dev/null +++ b/SetUserSessionLockActivity/index.ts @@ -0,0 +1,26 @@ +import { getRequiredStringEnv } from "io-functions-commons/dist/src/utils/env"; +import { timeoutFetch } from "../utils/fetch"; +import { + ApiOperation, + Client, + createClient, + WithDefaultsT +} from "../utils/sessionApiClient"; +import { createSetUserSessionLockActivityHandler } from "./handler"; + +const sessionApiUrl = getRequiredStringEnv("SESSION_API_URL"); +const sessionApiKey = getRequiredStringEnv("SESSION_API_KEY"); + +const withDefaultApiKey: WithDefaultsT<"token"> = ( + apiOperation: ApiOperation +) => params => apiOperation({ ...params, token: sessionApiKey }); + +const client: Client<"token"> = createClient({ + baseUrl: sessionApiUrl, + fetchApi: timeoutFetch, + withDefaults: withDefaultApiKey +}); + +const activityFunctionHandler = createSetUserSessionLockActivityHandler(client); + +export default activityFunctionHandler; diff --git a/UpdateVisibleServicesCacheOrchestrator/index.ts b/UpdateVisibleServicesCacheOrchestrator/index.ts index f4dc732b..5e3e84bc 100644 --- a/UpdateVisibleServicesCacheOrchestrator/index.ts +++ b/UpdateVisibleServicesCacheOrchestrator/index.ts @@ -4,15 +4,18 @@ * one JSON into the blob storage for each visible service found. */ -import { IFunctionContext, Task } from "durable-functions/lib/src/classes"; +import { + IOrchestrationFunctionContext, + Task +} from "durable-functions/lib/src/classes"; import * as df from "durable-functions"; import { isLeft } from "fp-ts/lib/Either"; import { VisibleServices } from "../UpdateVisibleServicesCache"; const UpdateVisibleServicesCacheOrchestrator = df.orchestrator(function*( - context: IFunctionContext -): IterableIterator { + context: IOrchestrationFunctionContext +): Generator { const visibleServicesJson = context.df.getInput(); const errorOrVisibleServices = VisibleServices.decode(visibleServicesJson); diff --git a/UpsertServiceOrchestrator/handler.ts b/UpsertServiceOrchestrator/handler.ts index 2bd08251..50356999 100644 --- a/UpsertServiceOrchestrator/handler.ts +++ b/UpsertServiceOrchestrator/handler.ts @@ -1,6 +1,6 @@ import * as df from "durable-functions"; -import { IFunctionContext } from "durable-functions/lib/src/classes"; +import { IOrchestrationFunctionContext } from "durable-functions/lib/src/classes"; import { isLeft } from "fp-ts/lib/Either"; import { isSome, none, Option, some } from "fp-ts/lib/Option"; @@ -43,8 +43,8 @@ function computeMaybeAction( } export const handler = function*( - context: IFunctionContext -): IterableIterator { + context: IOrchestrationFunctionContext +): Generator { const input = context.df.getInput(); const retryOptions = new df.RetryOptions(5000, 10); diff --git a/UserDataDeleteOrchestrator/GetUserDataProcessing/handler.ts b/UserDataDeleteOrchestrator/GetUserDataProcessing/handler.ts deleted file mode 100644 index 22bfef1d..00000000 --- a/UserDataDeleteOrchestrator/GetUserDataProcessing/handler.ts +++ /dev/null @@ -1,212 +0,0 @@ -/** - * Updates the status of a UserDataProcessing record - */ - -import * as t from "io-ts"; - -import { Either, left, right } from "fp-ts/lib/Either"; -import { Option } from "fp-ts/lib/Option"; -import { fromEither, TaskEither, tryCatch } from "fp-ts/lib/TaskEither"; - -import { Context } from "@azure/functions"; - -import { QueryError } from "documentdb"; -import { UserDataProcessingChoice } from "io-functions-commons/dist/generated/definitions/UserDataProcessingChoice"; -import { - makeUserDataProcessingId, - UserDataProcessing, - UserDataProcessingModel -} from "io-functions-commons/dist/src/models/user_data_processing"; -import { readableReport } from "italia-ts-commons/lib/reporters"; -import { FiscalCode } from "italia-ts-commons/lib/strings"; - -// Activity input -export const ActivityInput = t.interface({ - choice: UserDataProcessingChoice, - fiscalCode: FiscalCode -}); -export type ActivityInput = t.TypeOf; - -// Activity result -const ActivityResultSuccess = t.interface({ - kind: t.literal("SUCCESS"), - value: UserDataProcessing -}); -export type ActivityResultSuccess = t.TypeOf; - -// Activity failed because of invalid input -const ActivityResultInvalidInputFailure = t.interface({ - kind: t.literal("INVALID_INPUT_FAILURE"), - reason: t.string -}); -export type ActivityResultInvalidInputFailure = t.TypeOf< - typeof ActivityResultInvalidInputFailure ->; - -// Activity failed because of record not found -const ActivityResultRecordNotFound = t.interface({ - kind: t.literal("RECORD_NOT_FOUND") -}); -export type ActivityResultRecordNotFound = t.TypeOf< - typeof ActivityResultRecordNotFound ->; - -// Activity failed because of an error on a query -const ActivityResultQueryFailure = t.intersection([ - t.interface({ - kind: t.literal("QUERY_FAILURE"), - reason: t.string - }), - t.partial({ query: t.string }) -]); -export type ActivityResultQueryFailure = t.TypeOf< - typeof ActivityResultQueryFailure ->; - -export const ActivityResultFailure = t.taggedUnion("kind", [ - ActivityResultQueryFailure, - ActivityResultInvalidInputFailure, - ActivityResultRecordNotFound -]); -export type ActivityResultFailure = t.TypeOf; - -export const ActivityResult = t.taggedUnion("kind", [ - ActivityResultSuccess, - ActivityResultFailure -]); - -export type ActivityResult = t.TypeOf; - -const logPrefix = `GetUserDataProcessingActivity`; - -function assertNever(_: never): void { - throw new Error("should not have executed this"); -} - -/** - * Converts a Promise into a TaskEither - * This is needed because our models return unconvenient type. Both left and rejection cases are handled as a TaskEither left - * @param lazyPromise a lazy promise to convert - * @param queryName an optional name for the query, for logging purpose - * - * @returns either the query result or a query failure - */ -const fromQueryEither = ( - lazyPromise: () => Promise>, - queryName: string = "" -) => - tryCatch(lazyPromise, (err: Error) => - ActivityResultQueryFailure.encode({ - kind: "QUERY_FAILURE", - query: queryName, - reason: err.message - }) - ).chain((queryErrorOrRecord: Either) => - fromEither( - queryErrorOrRecord.mapLeft(queryError => - ActivityResultQueryFailure.encode({ - kind: "QUERY_FAILURE", - query: queryName, - reason: JSON.stringify(queryError) - }) - ) - ) - ); - -/** - * Logs depending on failure type - * @param context the Azure functions context - * @param failure the failure to log - */ -const logFailure = (context: Context) => ( - failure: ActivityResultFailure -): void => { - switch (failure.kind) { - case "INVALID_INPUT_FAILURE": - context.log.error( - `${logPrefix}|Error decoding input|ERROR=${failure.reason}` - ); - break; - case "QUERY_FAILURE": - context.log.error( - `${logPrefix}|Error ${failure.query} query error |ERROR=${failure.reason}` - ); - break; - case "RECORD_NOT_FOUND": - context.log.error(`${logPrefix}|Error record not found |ERROR=`); - break; - default: - assertNever(failure); - } -}; - -/** - * Updates a UserDataProcessing record by creating a new version of it with a chenged status - * @param param0.currentRecord the record to be modified - * @param param0.nextStatus: the status to assign the record to - * - * @returns either an Error or the new created record - */ -const getUserDataProcessingRequest = ({ - userDataProcessingModel, - fiscalCode, - choice -}: { - userDataProcessingModel: UserDataProcessingModel; - fiscalCode: FiscalCode; - choice: UserDataProcessingChoice; -}): TaskEither> => - fromQueryEither( - () => - userDataProcessingModel.findOneUserDataProcessingById( - fiscalCode, - makeUserDataProcessingId(choice, fiscalCode) - ), - "userDataProcessingModel.findOneUserDataProcessingById" - ); - -export const createGetUserDataProcessingHandler = ( - userDataProcessingModel: UserDataProcessingModel -) => (context: Context, input: unknown): Promise => { - // the actual handler - return fromEither(ActivityInput.decode(input)) - .mapLeft((reason: t.Errors) => - ActivityResultInvalidInputFailure.encode({ - kind: "INVALID_INPUT_FAILURE", - reason: readableReport(reason) - }) - ) - .chain(({ fiscalCode, choice }) => - getUserDataProcessingRequest({ - choice, - fiscalCode, - userDataProcessingModel - }) - ) - .foldTaskEither( - e => fromEither(left(e)), - maybeRecord => - maybeRecord.fold( - fromEither( - left( - ActivityResultRecordNotFound.encode({ kind: "RECORD_NOT_FOUND" }) - ) - ), - foundRecord => - fromEither( - right( - ActivityResultSuccess.encode({ - kind: "SUCCESS", - value: foundRecord - }) - ) - ) - ) - ) - .mapLeft(failure => { - logFailure(context)(failure); - return failure; - }) - .run() - .then((e: Either) => e.value); -}; diff --git a/UserDataDeleteOrchestrator/__tests__/handler.test.ts b/UserDataDeleteOrchestrator/__tests__/handler.test.ts new file mode 100644 index 00000000..2a170e06 --- /dev/null +++ b/UserDataDeleteOrchestrator/__tests__/handler.test.ts @@ -0,0 +1,374 @@ +// tslint:disable: no-any + +import { IOrchestrationFunctionContext } from "durable-functions/lib/src/classes"; +import { + mockOrchestratorCallActivity, + mockOrchestratorCallActivityWithRetry, + mockOrchestratorCancelTimer, + mockOrchestratorContext, + mockOrchestratorGetInput, + mockOrchestratorTaskAny +} from "../../__mocks__/durable-functions"; +import { + createUserDataDeleteOrchestratorHandler, + InvalidInputFailure, + OrchestratorSuccess +} from "../handler"; + +import { UserDataProcessingChoiceEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingChoice"; +import { UserDataProcessingStatusEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingStatus"; +import { readableReport } from "italia-ts-commons/lib/reporters"; +import { Day, Hour } from "italia-ts-commons/lib/units"; +import { aUserDataProcessing } from "../../__mocks__/mocks"; +import { ActivityResultSuccess as DeleteUserDataActivityResultSuccess } from "../../DeleteUserDataActivity/types"; +import { + ActivityResultNotFoundFailure as GetUserDataProcessingActivityResultNotFoundFailure, + ActivityResultSuccess as GetUserDataProcessingActivityResultSuccess +} from "../../GetUserDataProcessingActivity/handler"; +import { ActivityResultSuccess as SetUserDataProcessingStatusActivityResultSuccess } from "../../SetUserDataProcessingStatusActivity/handler"; +import { ActivityResultSuccess as SetUserSessionLockActivityResultSuccess } from "../../SetUserSessionLockActivity/handler"; +import { OrchestratorFailure } from "../../UserDataDownloadOrchestrator/handler"; +import { ProcessableUserDataDelete } from "../../UserDataProcessingTrigger"; + +const aProcessableUserDataDelete = ProcessableUserDataDelete.decode({ + ...aUserDataProcessing, + choice: UserDataProcessingChoiceEnum.DELETE, + status: UserDataProcessingStatusEnum.PENDING +}).getOrElseL(e => + fail(`Failed creating a mock input document: ${readableReport(e)}`) +); + +const aUserDataDownloadPending = { + ...aUserDataProcessing, + choice: UserDataProcessingChoiceEnum.DOWNLOAD, + status: UserDataProcessingStatusEnum.PENDING +}; + +const aUserDataDownloadWip = { + ...aUserDataProcessing, + choice: UserDataProcessingChoiceEnum.DOWNLOAD, + status: UserDataProcessingStatusEnum.WIP +}; + +const aUserDataDownloadClosed = { + ...aUserDataProcessing, + choice: UserDataProcessingChoiceEnum.DOWNLOAD, + status: UserDataProcessingStatusEnum.CLOSED +}; + +const setUserDataProcessingStatusActivity = jest.fn().mockImplementation(() => + SetUserDataProcessingStatusActivityResultSuccess.encode({ + kind: "SUCCESS" + }) +); + +const getUserDataProcessingActivity = jest.fn().mockImplementation(() => + GetUserDataProcessingActivityResultNotFoundFailure.encode({ + kind: "NOT_FOUND_FAILURE" + }) +); + +const setUserSessionLockActivity = jest.fn().mockImplementation(() => + SetUserSessionLockActivityResultSuccess.encode({ + kind: "SUCCESS" + }) +); + +const deleteUserDataActivity = jest.fn().mockImplementation(() => + DeleteUserDataActivityResultSuccess.encode({ + kind: "SUCCESS" + }) +); + +// A mock implementation proxy for df.callActivity/df.df.callActivityWithRetry that routes each call to the correct mock implentation +const switchMockImplementation = (name: string, ...args: readonly unknown[]) => + (name === "SetUserDataProcessingStatusActivity" + ? setUserDataProcessingStatusActivity + : name === "GetUserDataProcessingActivity" + ? getUserDataProcessingActivity + : name === "SetUserSessionLockActivity" + ? setUserSessionLockActivity + : name === "DeleteUserDataActivity" + ? deleteUserDataActivity + : jest.fn())(name, ...args); + +// I assign switchMockImplementation to both because +// I don't want tests to depend on implementation details +// such as which activity is called with retry and which is not +mockOrchestratorCallActivity.mockImplementation(switchMockImplementation); +mockOrchestratorCallActivityWithRetry.mockImplementation( + switchMockImplementation +); + +/** + * Util function that takes an orchestrator and executes each step until is done + * @param orch an orchestrator + * + * @returns the last value yielded by the orchestrator + */ +const consumeOrchestrator = (orch: any) => { + // tslint:disable-next-line: no-let + let prevValue: unknown; + while (true) { + const { done, value } = orch.next(prevValue); + if (done) { + return value; + } + prevValue = value; + } +}; + +// just a convenient cast, good for every test case +const context = (mockOrchestratorContext as unknown) as IOrchestrationFunctionContext; + +const waitForAbortInterval = 0 as Day; +const waitForDownloadInterval = 0 as Hour; + +// tslint:disable-next-line: no-big-function +describe("createUserDataDeleteOrchestratorHandler", () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it("should fail on invalid input", () => { + mockOrchestratorGetInput.mockReturnValueOnce("invalid input"); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(InvalidInputFailure.decode(result).isRight()).toBe(true); + + expect(setUserDataProcessingStatusActivity).not.toHaveBeenCalled(); + expect(deleteUserDataActivity).not.toHaveBeenCalled(); + expect(setUserSessionLockActivity).not.toHaveBeenCalled(); + }); + + it("should set processing ad FAILED if fails to lock the user session", () => { + mockOrchestratorGetInput.mockReturnValueOnce(aProcessableUserDataDelete); + setUserSessionLockActivity.mockImplementationOnce( + // tslint:disable-next-line: no-duplicate-string + () => "any unsuccessful value" + ); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(OrchestratorFailure.decode(result).isRight()).toBe(true); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + nextStatus: UserDataProcessingStatusEnum.FAILED + }) + ); + }); + + it("should set processing ad FAILED if fails to set the operation as WIP", () => { + mockOrchestratorGetInput.mockReturnValueOnce(aProcessableUserDataDelete); + setUserDataProcessingStatusActivity.mockImplementationOnce( + () => "any unsuccessful value" + ); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(OrchestratorFailure.decode(result).isRight()).toBe(true); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + nextStatus: UserDataProcessingStatusEnum.FAILED + }) + ); + }); + + it("should set processing ad FAILED if fails delete user data", () => { + mockOrchestratorGetInput.mockReturnValueOnce(aProcessableUserDataDelete); + deleteUserDataActivity.mockImplementationOnce( + () => "any unsuccessful value" + ); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(OrchestratorFailure.decode(result).isRight()).toBe(true); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + nextStatus: UserDataProcessingStatusEnum.FAILED + }) + ); + }); + + it("should set processing ad FAILED if fails to unlock the user session", () => { + mockOrchestratorGetInput.mockReturnValueOnce(aProcessableUserDataDelete); + setUserSessionLockActivity.mockImplementationOnce(() => + SetUserSessionLockActivityResultSuccess.encode({ + kind: "SUCCESS" + }) + ); + setUserSessionLockActivity.mockImplementationOnce( + () => "any unsuccessful value" + ); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(OrchestratorFailure.decode(result).isRight()).toBe(true); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + nextStatus: UserDataProcessingStatusEnum.FAILED + }) + ); + }); + + it("should set processing ad FAILED if fails to set the operation as WIP", () => { + mockOrchestratorGetInput.mockReturnValueOnce(aProcessableUserDataDelete); + setUserDataProcessingStatusActivity.mockImplementationOnce(() => + SetUserDataProcessingStatusActivityResultSuccess.encode({ + kind: "SUCCESS" + }) + ); + setUserDataProcessingStatusActivity.mockImplementationOnce( + () => "any unsuccessful value" + ); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(OrchestratorFailure.decode(result).isRight()).toBe(true); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + nextStatus: UserDataProcessingStatusEnum.FAILED + }) + ); + }); + + it("should set status as CLOSED if wait interval expires", () => { + mockOrchestratorGetInput.mockReturnValueOnce(aProcessableUserDataDelete); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(OrchestratorSuccess.decode(result).isRight()).toBe(true); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledTimes(2); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + nextStatus: UserDataProcessingStatusEnum.WIP + }) + ); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + nextStatus: UserDataProcessingStatusEnum.CLOSED + }) + ); + expect(setUserSessionLockActivity).toHaveBeenCalledTimes(2); + expect(setUserSessionLockActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + action: "LOCK" + }) + ); + expect(setUserSessionLockActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + action: "UNLOCK" + }) + ); + expect(deleteUserDataActivity).toHaveBeenCalledTimes(1); + }); + + it("should set status as CLOSED if abort request comes before wait interval expires", () => { + mockOrchestratorGetInput.mockReturnValueOnce(aProcessableUserDataDelete); + + // I trick the implementation of Task.any to return the second event, not the first + mockOrchestratorTaskAny.mockImplementationOnce(([, _]) => _); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(OrchestratorSuccess.decode(result).isRight()).toBe(true); + expect(mockOrchestratorCancelTimer).toHaveBeenCalledTimes(1); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledTimes(1); + expect(setUserDataProcessingStatusActivity).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + nextStatus: UserDataProcessingStatusEnum.ABORTED + }) + ); + expect(setUserSessionLockActivity).not.toHaveBeenCalled(); + expect(deleteUserDataActivity).not.toHaveBeenCalled(); + }); + + it("should wait if there are pending downloads", () => { + mockOrchestratorGetInput.mockReturnValueOnce(aProcessableUserDataDelete); + + // call 1: it's pending + getUserDataProcessingActivity.mockImplementationOnce(() => + GetUserDataProcessingActivityResultSuccess.encode({ + kind: "SUCCESS", + value: aUserDataDownloadPending + }) + ); + + // call 2: it's wip + getUserDataProcessingActivity.mockImplementationOnce(() => + GetUserDataProcessingActivityResultSuccess.encode({ + kind: "SUCCESS", + value: aUserDataDownloadWip + }) + ); + + // call 3: it's closed (so we can continue with delete) + getUserDataProcessingActivity.mockImplementationOnce(() => + GetUserDataProcessingActivityResultSuccess.encode({ + kind: "SUCCESS", + value: aUserDataDownloadClosed + }) + ); + + const result = consumeOrchestrator( + createUserDataDeleteOrchestratorHandler( + waitForAbortInterval, + waitForDownloadInterval + )(context) + ); + + expect(OrchestratorSuccess.decode(result).isRight()).toBe(true); + expect(getUserDataProcessingActivity).toHaveBeenCalledTimes(3); + }); +}); diff --git a/UserDataDeleteOrchestrator/cli.ts b/UserDataDeleteOrchestrator/cli.ts deleted file mode 100644 index 448f02ab..00000000 --- a/UserDataDeleteOrchestrator/cli.ts +++ /dev/null @@ -1,218 +0,0 @@ -/** - * Exposes ExtractUserDataActivity as a cli command for local usage - */ - -// tslint:disable: no-console no-any - -import * as dotenv from "dotenv"; -dotenv.config(); - -import { readableReport } from "italia-ts-commons/lib/reporters"; -import { FiscalCode } from "italia-ts-commons/lib/strings"; -import RedisSessionStorage from "./session-utils/redisSessionStorage"; - -import SetUserDataProcessingStatusActivity from "../SetUserDataProcessingStatusActivity"; -import getUserDataProcessing from "./GetUserDataProcessing"; - -import { sequenceT } from "fp-ts/lib/Apply"; -import { Either, toError } from "fp-ts/lib/Either"; -import { taskEither, TaskEither, tryCatch } from "fp-ts/lib/TaskEither"; -import { UserDataProcessingChoiceEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingChoice"; -import { UserDataProcessingStatusEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingStatus"; -import { UserDataProcessing } from "io-functions-commons/dist/src/models/user_data_processing"; -import { getRequiredStringEnv } from "io-functions-commons/dist/src/utils/env"; -import { NodeEnvironmentEnum } from "italia-ts-commons/lib/environment"; -import { getNodeEnvironmentFromProcessEnv } from "italia-ts-commons/lib/environment"; - -import { Context } from "@azure/functions"; -import { - createClusterRedisClient, - createSimpleRedisClient -} from "./session-utils/redis"; -const trace = (l: string) => (e: any) => { - console.log(l, e); - return e; -}; - -const context = ({ - log: { - error: console.error, - info: console.log, - verbose: console.log - } - // tslint:disable-next-line: no-any -} as any) as Context; - -const REDIS_CLIENT = - getNodeEnvironmentFromProcessEnv(process.env) === - NodeEnvironmentEnum.DEVELOPMENT - ? createSimpleRedisClient(process.env.REDIS_URL) - : createClusterRedisClient( - getRequiredStringEnv("REDIS_URL"), - process.env.REDIS_PASSWORD, - process.env.REDIS_PORT - ); -// Create the Session Storage service -const SESSION_STORAGE = new RedisSessionStorage(REDIS_CLIENT); - -// placeholder fot methods steps to be implemented -const notImplementedTask = (name: string): TaskEither => { - console.warn(`task ${name} hasn't been implemented yet!`); - return taskEither.of(true); -}; - -// before deleting data we block the user and clrear all its session data -const blockUser = (fiscalCode: FiscalCode): TaskEither => { - const delByFiscalCode = tryCatch( - () => - SESSION_STORAGE.delByFiscalCode(fiscalCode) - .then(trace("delByFiscalCode")) - .then(e => - e.getOrElseL((err: any) => { - throw err; - }) - ), - toError - ); - - const delUserMetadataByFiscalCode = tryCatch( - () => - SESSION_STORAGE.delUserMetadataByFiscalCode(fiscalCode) - .then(trace("delUserMetadataByFiscalCode")) - .then(e => - e.getOrElseL((err: any) => { - throw err; - }) - ), - toError - ); - - const setBlockedUser = tryCatch( - () => - SESSION_STORAGE.setBlockedUser(fiscalCode) - .then(trace("setBlockedUser")) - .then(e => - e.getOrElseL((err: any) => { - throw err; - }) - ), - toError - ); - - return sequenceT(taskEither)( - delByFiscalCode, - delUserMetadataByFiscalCode - ).chain(_ => setBlockedUser); -}; - -// creates a bundle with user data and save it to a dedicated storage -const saveUserDataToStorage = ( - // tslint:disable-next-line: variable-name - _fiscalCode: FiscalCode -): TaskEither => notImplementedTask("saveUserDataToStorage"); - -// delete all user data from our db -const deleteUserData = ( - // tslint:disable-next-line: variable-name - _fiscalCode: FiscalCode -): TaskEither => notImplementedTask("deleteUserData"); - -// change status on user request -const setUserDataProcessingStatus = ( - currentUserDataProcessing: UserDataProcessing, - nextStatus: UserDataProcessingStatusEnum -): TaskEither => - tryCatch( - () => - SetUserDataProcessingStatusActivity(context, { - currentRecord: currentUserDataProcessing, - nextStatus - }).then(result => { - if (result.kind !== "SUCCESS") { - throw new Error( - `SetUserDataProcessingStatusActivity to ${nextStatus} failed: ${result.kind} error` - ); - } - return true; - }), - toError - ); - -// after the operation, unblock the user to allow another login -const unblockUser = (fiscalCode: FiscalCode): TaskEither => - tryCatch( - () => - SESSION_STORAGE.unsetBlockedUser(fiscalCode) - .then(trace("unsetBlockedUser")) - .then(e => - e.getOrElseL((err: Error) => { - throw err; - }) - ), - toError - ); - -async function run(): Promise> { - const fiscalCode = FiscalCode.decode(process.argv[2]).getOrElseL(reason => { - throw new Error(`Invalid input: ${readableReport(reason)}`); - }); - - const userDataProcessingResult = await getUserDataProcessing(context, { - choice: UserDataProcessingChoiceEnum.DELETE, - fiscalCode - }); - - if (userDataProcessingResult.kind === "RECORD_NOT_FOUND") { - throw new Error(`No data delete has been requested for the current user`); - } else if (userDataProcessingResult.kind !== "SUCCESS") { - throw new Error("Failed retrieving userDataProcessing"); - } else if ( - userDataProcessingResult.value.status !== - UserDataProcessingStatusEnum.PENDING && - userDataProcessingResult.value.status !== - UserDataProcessingStatusEnum.FAILED - ) { - throw new Error("User data processing status !== PENDING & != FAILED"); - } else { - console.log( - "Found user data processing request with status %s", - userDataProcessingResult.value.status - ); - } - - return blockUser(fiscalCode) - .chain(_ => - setUserDataProcessingStatus( - userDataProcessingResult.value, - UserDataProcessingStatusEnum.WIP - ) - ) - .chain(_ => saveUserDataToStorage(fiscalCode)) - .chain(_ => deleteUserData(fiscalCode)) - .chain(_ => unblockUser(fiscalCode)) - .chain(_ => - setUserDataProcessingStatus( - userDataProcessingResult.value, - UserDataProcessingStatusEnum.CLOSED - ) - ) - .foldTaskEither( - _ => { - console.log("Something went wrong. Mark the requeste as FAILED"); - // mark as failed - return setUserDataProcessingStatus( - userDataProcessingResult.value, - UserDataProcessingStatusEnum.FAILED - ); - }, - // just pass - e => taskEither.of(e) - ) - .run(); -} - -// tslint:disable-next-line: no-floating-promises -run() - .then(result => console.log("OK", result)) - .catch(ex => console.error("KO", ex)) - .then(_ => REDIS_CLIENT.quit()); diff --git a/UserDataDeleteOrchestrator/function.json b/UserDataDeleteOrchestrator/function.json new file mode 100644 index 00000000..12bc2a59 --- /dev/null +++ b/UserDataDeleteOrchestrator/function.json @@ -0,0 +1,10 @@ +{ + "bindings": [ + { + "name": "context", + "type": "orchestrationTrigger", + "direction": "in" + } + ], + "scriptFile": "../dist/UserDataDeleteOrchestrator/index.js" +} \ No newline at end of file diff --git a/UserDataDeleteOrchestrator/handler.ts b/UserDataDeleteOrchestrator/handler.ts new file mode 100644 index 00000000..05c90595 --- /dev/null +++ b/UserDataDeleteOrchestrator/handler.ts @@ -0,0 +1,388 @@ +import { + IOrchestrationFunctionContext, + Task, + TaskSet +} from "durable-functions/lib/src/classes"; +import { isLeft, toError } from "fp-ts/lib/Either"; +import { toString } from "fp-ts/lib/function"; +import { UserDataProcessingChoiceEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingChoice"; +import { UserDataProcessingStatusEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingStatus"; +import { UserDataProcessing } from "io-functions-commons/dist/src/models/user_data_processing"; +import * as t from "io-ts"; +import { readableReport } from "italia-ts-commons/lib/reporters"; +import { FiscalCode, NonEmptyString } from "italia-ts-commons/lib/strings"; +import { Day, Hour } from "italia-ts-commons/lib/units"; +import { + ActivityInput as DeleteUserDataActivityInput, + ActivityResultSuccess as DeleteUserDataActivityResultSuccess +} from "../DeleteUserDataActivity/types"; +import { + ActivityInput as GetUserDataProcessingStatusActivityInput, + ActivityResult as GetUserDataProcessingStatusActivityResult, + ActivityResultNotFoundFailure as GetUserDataProcessingStatusActivityResultNotFoundFailure, + ActivityResultSuccess as GetUserDataProcessingStatusActivityResultSuccess +} from "../GetUserDataProcessingActivity/handler"; +import { ActivityResultSuccess as SetUserDataProcessingStatusActivityResultSuccess } from "../SetUserDataProcessingStatusActivity/handler"; +import { + ActivityInput as SetUserSessionLockActivityInput, + ActivityResultSuccess as SetUserSessionLockActivityResultSuccess +} from "../SetUserSessionLockActivity/handler"; +import { ProcessableUserDataDelete } from "../UserDataProcessingTrigger"; +import { + trackUserDataDeleteEvent, + trackUserDataDeleteException +} from "../utils/appinsightsEvents"; +import { ABORT_EVENT, addDays, addHours } from "./utils"; + +const logPrefix = "UserDataDeleteOrchestrator"; + +const printableError = (error: Error | unknown): string => + error instanceof Error ? error.message : toString(error); + +export type InvalidInputFailure = t.TypeOf; +export const InvalidInputFailure = t.interface({ + kind: t.literal("INVALID_INPUT"), + reason: t.string +}); + +export type UnhanldedFailure = t.TypeOf; +export const UnhanldedFailure = t.interface({ + kind: t.literal("UNHANDLED"), + reason: t.string +}); + +export type ActivityFailure = t.TypeOf; +export const ActivityFailure = t.intersection([ + t.interface({ + activityName: t.string, + kind: t.literal("ACTIVITY"), + reason: t.string + }), + t.partial({ extra: t.object }) +]); + +export type OrchestratorFailure = t.TypeOf; +export const OrchestratorFailure = t.taggedUnion("kind", [ + InvalidInputFailure, + UnhanldedFailure, + ActivityFailure +]); + +export type OrchestratorSuccess = t.TypeOf; +export const OrchestratorSuccess = t.interface({ + kind: t.literal("SUCCESS"), + type: t.keyof({ ABORTED: null, DELETED: null }) +}); + +export type SkippedDocument = t.TypeOf; +export const SkippedDocument = t.interface({ + kind: t.literal("SKIPPED") +}); + +export type OrchestratorResult = t.TypeOf; +export const OrchestratorResult = t.union([ + OrchestratorFailure, + SkippedDocument, + OrchestratorSuccess +]); + +const toActivityFailure = ( + err: { kind: string }, + activityName: string, + extra?: object +) => + ActivityFailure.encode({ + activityName, + extra, + kind: "ACTIVITY", + reason: err.kind + }); + +function* setUserSessionLock( + context: IOrchestrationFunctionContext, + { action, fiscalCode }: SetUserSessionLockActivityInput +): Generator { + const result = yield context.df.callActivity( + "SetUserSessionLockActivity", + SetUserSessionLockActivityInput.encode({ + action, + fiscalCode + }) + ); + return SetUserSessionLockActivityResultSuccess.decode(result).getOrElseL( + _ => { + context.log.error( + `${logPrefix}|ERROR|SetUserSessionLockActivity fail|${readableReport( + _ + )}` + ); + throw toActivityFailure( + { kind: "SET_USER_SESSION_LOCK" }, + "SetUserSessionLockActivity", + { + action + } + ); + } + ); +} + +function* setUserDataProcessingStatus( + context: IOrchestrationFunctionContext, + currentRecord: UserDataProcessing, + nextStatus: UserDataProcessingStatusEnum +): Generator { + const result = yield context.df.callActivity( + "SetUserDataProcessingStatusActivity", + { + currentRecord, + nextStatus + } + ); + return SetUserDataProcessingStatusActivityResultSuccess.decode( + result + ).getOrElseL(_ => { + throw toActivityFailure( + { kind: "SET_USER_DATA_PROCESSING_STATUS_ACTIVITY_RESULT" }, + "SetUserDataProcessingStatusActivity", + { + status: nextStatus + } + ); + }); +} + +function* hasPendingDownload( + context: IOrchestrationFunctionContext, + fiscalCode: FiscalCode +): Generator { + const result = yield context.df.callActivity( + "GetUserDataProcessingActivity", + GetUserDataProcessingStatusActivityInput.encode({ + choice: UserDataProcessingChoiceEnum.DOWNLOAD, + fiscalCode + }) + ); + + return GetUserDataProcessingStatusActivityResult.decode(result).fold( + _ => { + throw toActivityFailure( + { kind: "GET_USER_DATA_PROCESSING_ACTIVITY_RESULT" }, + "GetUserDataProcessingActivity" + ); + }, // check if + response => { + if (GetUserDataProcessingStatusActivityResultSuccess.is(response)) { + return [ + UserDataProcessingStatusEnum.PENDING, + UserDataProcessingStatusEnum.WIP + ].includes(response.value.status); + } else if ( + GetUserDataProcessingStatusActivityResultNotFoundFailure.is(response) + ) { + return false; + } + + throw toActivityFailure(response, "GetUserDataProcessingActivity"); + } + ); +} + +function* deleteUserData( + context: IOrchestrationFunctionContext, + currentRecord: UserDataProcessing +): Generator { + const backupFolder = `${ + currentRecord.userDataProcessingId + }-${context.df.currentUtcDateTime.getTime()}` as NonEmptyString; + const result = yield context.df.callActivity( + "DeleteUserDataActivity", + DeleteUserDataActivityInput.encode({ + backupFolder, + fiscalCode: currentRecord.fiscalCode + }) + ); + return DeleteUserDataActivityResultSuccess.decode(result).getOrElseL(_ => { + context.log.error( + `${logPrefix}|ERROR|DeleteUserDataActivity fail`, + result, + readableReport(_) + ); + throw toActivityFailure( + { kind: "DELETE_USER_DATA" }, + "DeleteUserDataActivity" + ); + }); +} + +/** + * Create a handler for the orchestrator + * + * @param waitForAbortInterval Indicates how many days the request must be left pending, waiting for an eventual abort request + * @param waitForDownloadInterval Indicates how many hours the request must be postponed in case a download request is being processing meanwhile + */ +export const createUserDataDeleteOrchestratorHandler = ( + waitForAbortInterval: Day, + waitForDownloadInterval: Hour = 12 as Hour +) => + function*(context: IOrchestrationFunctionContext): Generator { + const document = context.df.getInput(); + // This check has been done on the trigger, so it should never fail. + // However, it's worth the effort to check it twice + const invalidInputOrCurrentUserDataProcessing = ProcessableUserDataDelete.decode( + document + ).mapLeft(err => { + context.log.error( + `${logPrefix}|WARN|Cannot decode ProcessableUserDataDelete document: ${readableReport( + err + )}` + ); + return InvalidInputFailure.encode({ + kind: "INVALID_INPUT", + reason: readableReport(err) + }); + }); + + if (isLeft(invalidInputOrCurrentUserDataProcessing)) { + return invalidInputOrCurrentUserDataProcessing.value; + } + + const currentUserDataProcessing = + invalidInputOrCurrentUserDataProcessing.value; + + context.log.verbose( + `${logPrefix}|VERBOSE|Executing delete`, + currentUserDataProcessing + ); + + try { + // we have an interval on which we wait for eventual cancellation by the user + const intervalExpiredEvent = context.df.createTimer( + addDays(context.df.currentUtcDateTime, waitForAbortInterval) + ); + + // we wait for eventually abort message from the user + const canceledRequestEvent = context.df.waitForExternalEvent(ABORT_EVENT); + + context.log.verbose( + `${logPrefix}|VERBOSE|Operation stopped for ${waitForAbortInterval} days` + ); + + trackUserDataDeleteEvent("paused", currentUserDataProcessing); + + // the first that get triggered + const triggeredEvent = yield context.df.Task.any([ + intervalExpiredEvent, + canceledRequestEvent + ]); + + if (triggeredEvent === intervalExpiredEvent) { + context.log.verbose( + `${logPrefix}|VERBOSE|Operation resumed after ${waitForAbortInterval} days` + ); + + // lock user session + yield* setUserSessionLock(context, { + action: "LOCK", + fiscalCode: currentUserDataProcessing.fiscalCode + }); + + // set as wip + yield* setUserDataProcessingStatus( + context, + currentUserDataProcessing, + UserDataProcessingStatusEnum.WIP + ); + + // If there's a working download request, we postpone delete of one day + while ( + yield* hasPendingDownload( + context, + currentUserDataProcessing.fiscalCode + ) + ) { + // we wait some more time for the download process to end + context.log.verbose( + `${logPrefix}|VERBOSE|Found an active DOWNLOAD procedure, wait for ${waitForDownloadInterval} hours` + ); + const waitForDownloadEvent = context.df.createTimer( + addHours(context.df.currentUtcDateTime, waitForDownloadInterval) + ); + trackUserDataDeleteEvent("postponed", currentUserDataProcessing); + yield waitForDownloadEvent; + } + + // backup&delete data + yield* deleteUserData(context, currentUserDataProcessing); + + // set as closed + yield* setUserDataProcessingStatus( + context, + currentUserDataProcessing, + UserDataProcessingStatusEnum.CLOSED + ); + + // unlock user + yield* setUserSessionLock(context, { + action: "UNLOCK", + fiscalCode: currentUserDataProcessing.fiscalCode + }); + + trackUserDataDeleteEvent("deleted", currentUserDataProcessing); + return OrchestratorSuccess.encode({ kind: "SUCCESS", type: "DELETED" }); + } else { + // stop the timer to let the orchestrator end + intervalExpiredEvent.cancel(); + + context.log.verbose( + `${logPrefix}|VERBOSE|Operation resumed because of abort event` + ); + + // set as aborted + yield* setUserDataProcessingStatus( + context, + currentUserDataProcessing, + UserDataProcessingStatusEnum.ABORTED + ); + + trackUserDataDeleteEvent("aborted", currentUserDataProcessing); + return OrchestratorSuccess.encode({ kind: "SUCCESS", type: "ABORTED" }); + } + } catch (error) { + context.log.error( + `${logPrefix}|ERROR|Failed processing user data for download: ${printableError( + error + )}` + ); + trackUserDataDeleteException( + "failed", + toError(error), + currentUserDataProcessing + ); + + SetUserDataProcessingStatusActivityResultSuccess.decode( + yield context.df.callActivity("SetUserDataProcessingStatusActivity", { + currentRecord: currentUserDataProcessing, + nextStatus: UserDataProcessingStatusEnum.FAILED + }) + ).getOrElseL(err => { + trackUserDataDeleteException( + "unhandled_failed_status", + new Error(readableReport(err)), + currentUserDataProcessing + ); + throw new Error( + `Activity SetUserDataProcessingStatusActivity (status=FAILED) failed: ${readableReport( + err + )}` + ); + }); + + return OrchestratorFailure.decode(error).getOrElse( + UnhanldedFailure.encode({ + kind: "UNHANDLED", + reason: printableError(error) + }) + ); + } + }; diff --git a/UserDataDeleteOrchestrator/index.ts b/UserDataDeleteOrchestrator/index.ts new file mode 100644 index 00000000..71856a4a --- /dev/null +++ b/UserDataDeleteOrchestrator/index.ts @@ -0,0 +1,14 @@ +import * as df from "durable-functions"; +import { getRequiredStringEnv } from "io-functions-commons/dist/src/utils/env"; +import { Day } from "italia-ts-commons/lib/units"; +import { createUserDataDeleteOrchestratorHandler } from "./handler"; + +const waitInterval = (getRequiredStringEnv( + "USER_DATA_DELETE_DELAY_DAYS" +) as unknown) as Day; + +const orchestrator = df.orchestrator( + createUserDataDeleteOrchestratorHandler(waitInterval) +); + +export default orchestrator; diff --git a/UserDataDeleteOrchestrator/session-utils/SessionInfo.ts b/UserDataDeleteOrchestrator/session-utils/SessionInfo.ts deleted file mode 100644 index 1b85efa1..00000000 --- a/UserDataDeleteOrchestrator/session-utils/SessionInfo.ts +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Do not edit this file it is auto-generated by italia-utils / gen-api-models. - * See https://github.com/teamdigitale/italia-utils - */ -/* tslint:disable */ - -import { Timestamp } from "./Timestamp"; -import * as t from "io-ts"; - -/** - * Decribe a session of an authenticated user. - */ - -// required attributes -const SessionInfoR = t.interface({ - createdAt: Timestamp, - - sessionToken: t.string -}); - -// optional attributes -const SessionInfoO = t.partial({}); - -export const SessionInfo = t.exact( - t.intersection([SessionInfoR, SessionInfoO], "SessionInfo") -); - -export type SessionInfo = t.TypeOf; diff --git a/UserDataDeleteOrchestrator/session-utils/SessionsList.ts b/UserDataDeleteOrchestrator/session-utils/SessionsList.ts deleted file mode 100644 index 66619f48..00000000 --- a/UserDataDeleteOrchestrator/session-utils/SessionsList.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Do not edit this file it is auto-generated by italia-utils / gen-api-models. - * See https://github.com/teamdigitale/italia-utils - */ -/* tslint:disable */ - -import { SessionInfo } from "./SessionInfo"; -import * as t from "io-ts"; - -/** - * Contains all active sessions for an authenticated user. - */ - -// required attributes -const SessionsListR = t.interface({ - sessions: t.readonlyArray(SessionInfo, "array of SessionInfo") -}); - -// optional attributes -const SessionsListO = t.partial({}); - -export const SessionsList = t.exact( - t.intersection([SessionsListR, SessionsListO], "SessionsList") -); - -export type SessionsList = t.TypeOf; diff --git a/UserDataDeleteOrchestrator/session-utils/Timestamp.ts b/UserDataDeleteOrchestrator/session-utils/Timestamp.ts deleted file mode 100644 index be3c5f87..00000000 --- a/UserDataDeleteOrchestrator/session-utils/Timestamp.ts +++ /dev/null @@ -1,15 +0,0 @@ -/** - * Do not edit this file it is auto-generated by italia-utils / gen-api-models. - * See https://github.com/teamdigitale/italia-utils - */ -/* tslint:disable */ - -import { UTCISODateFromString as UTCISODateFromStringT } from "italia-ts-commons/lib/dates"; -import * as t from "io-ts"; - -/** - * A date-time field in ISO-8601 format and UTC timezone. - */ - -export type Timestamp = t.TypeOf; -export const Timestamp = UTCISODateFromStringT; diff --git a/UserDataDeleteOrchestrator/session-utils/UserMetadata.ts b/UserDataDeleteOrchestrator/session-utils/UserMetadata.ts deleted file mode 100644 index 7571873d..00000000 --- a/UserDataDeleteOrchestrator/session-utils/UserMetadata.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Do not edit this file it is auto-generated by italia-utils / gen-api-models. - * See https://github.com/teamdigitale/italia-utils - */ -/* tslint:disable */ - -import * as t from "io-ts"; - -// required attributes -const UserMetadataR = t.interface({ - version: t.number, - - metadata: t.string -}); - -// optional attributes -const UserMetadataO = t.partial({}); - -export const UserMetadata = t.exact( - t.intersection([UserMetadataR, UserMetadataO], "UserMetadata") -); - -export type UserMetadata = t.TypeOf; diff --git a/UserDataDeleteOrchestrator/session-utils/errorsFormatter.ts b/UserDataDeleteOrchestrator/session-utils/errorsFormatter.ts deleted file mode 100644 index 7488e998..00000000 --- a/UserDataDeleteOrchestrator/session-utils/errorsFormatter.ts +++ /dev/null @@ -1,14 +0,0 @@ -/** - * Merge into one single Error several errors provided in input and add a context description - * @param errors - * @param context - * @returns A single Error instance with a formatted message. - */ -export function multipleErrorsFormatter( - errors: ReadonlyArray, - context: string -): Error { - return new Error( - errors.map(_ => `value [${_.message}]`).join(` at [context: ${context}]\n`) - ); -} diff --git a/UserDataDeleteOrchestrator/session-utils/redis.ts b/UserDataDeleteOrchestrator/session-utils/redis.ts deleted file mode 100644 index 40a0eb94..00000000 --- a/UserDataDeleteOrchestrator/session-utils/redis.ts +++ /dev/null @@ -1,34 +0,0 @@ -import * as redis from "redis"; -import RedisClustr = require("redis-clustr"); -const log = console; - -export function createSimpleRedisClient(redisUrl?: string): redis.RedisClient { - const redisUrlOrDefault = redisUrl || "redis://redis"; - log.info("Creating SIMPLE redis client", { url: redisUrlOrDefault }); - return redis.createClient(redisUrlOrDefault); -} - -export function createClusterRedisClient( - redisUrl: string, - password?: string, - port?: string -): redis.RedisClient { - const DEFAULT_REDIS_PORT = "6379"; - - const redisPort: number = parseInt(port || DEFAULT_REDIS_PORT, 10); - log.info("Creating CLUSTER redis client", { url: redisUrl }); - return new RedisClustr({ - redisOptions: { - auth_pass: password, - tls: { - servername: redisUrl - } - }, - servers: [ - { - host: redisUrl, - port: redisPort - } - ] - }); -} diff --git a/UserDataDeleteOrchestrator/session-utils/redisSessionStorage.ts b/UserDataDeleteOrchestrator/session-utils/redisSessionStorage.ts deleted file mode 100644 index f2ad1f3d..00000000 --- a/UserDataDeleteOrchestrator/session-utils/redisSessionStorage.ts +++ /dev/null @@ -1,270 +0,0 @@ -/** - * This service uses the Redis client to store and retrieve session information. - */ -import { array } from "fp-ts/lib/Array"; -import { - Either, - isLeft, - left, - parseJSON, - right, - toError -} from "fp-ts/lib/Either"; -import { fromEither, taskEither, tryCatch } from "fp-ts/lib/TaskEither"; -import * as t from "io-ts"; -import { errorsToReadableMessages } from "italia-ts-commons/lib/reporters"; -import { FiscalCode } from "italia-ts-commons/lib/strings"; -import * as redis from "redis"; -import { isArray } from "util"; -import { multipleErrorsFormatter } from "./errorsFormatter"; -import { SessionToken, WalletToken } from "./token"; - -import RedisStorageUtils from "./redisStorageUtils"; - -const sessionKeyPrefix = "SESSION-"; -const walletKeyPrefix = "WALLET-"; -const userSessionsSetKeyPrefix = "USERSESSIONS-"; -const sessionInfoKeyPrefix = "SESSIONINFO-"; -const blockedUserSetKey = "BLOCKEDUSERS"; -const userMetadataPrefix = "USERMETA-"; - -export const sessionNotFoundError = new Error("Session not found"); - -// a partial representation of an User, to not include the full model -const User = t.interface({ - fiscal_code: FiscalCode, - session_token: SessionToken, - wallet_token: WalletToken -}); -type User = t.TypeOf; - -const log = console; - -export default class RedisSessionStorage extends RedisStorageUtils { - constructor(private readonly redisClient: redis.RedisClient) { - super(); - } - - /** - * Delete all user session data - * @param fiscalCode - */ - public async delByFiscalCode( - fiscalCode: FiscalCode - ): Promise> { - const sessionsOrError = await this.readSessionInfoKeys(fiscalCode); - - const delSingleSession = ( - token: SessionToken - ): Promise> => - this.loadSessionBySessionToken(token) - .then(e => { - const user: User = e.getOrElseL(err => { - throw err; - }); - return this.del(user.session_token, user.wallet_token); - }) - .catch(_ => { - // if I didn't find a user by it's token, I assume there's nothing about that user, so its data is deleted already - return right(true); - }); - - const delEverySession = sessionTokens => - array - .sequence(taskEither)( - sessionTokens.map(sessionInfoKey => - fromEither( - SessionToken.decode(sessionInfoKey).mapLeft( - _ => new Error("Error decoding token") - ) - ).chain((token: SessionToken) => - tryCatch(() => delSingleSession(token), toError).chain(fromEither) - ) - ) - ) - .chain(_ => - tryCatch(() => this.delSessionInfoKeys(fiscalCode), toError).chain( - fromEither - ) - ); - - return fromEither(sessionsOrError) - .foldTaskEither( - _ => fromEither(right(true)), - sessionInfoKeys => - delEverySession( - sessionInfoKeys.map(sessionInfoKey => - sessionInfoKey.replace(sessionInfoKeyPrefix, "") - ) - ) - ) - .run(); - } - - public delUserMetadataByFiscalCode( - fiscalCode: string - ): Promise> { - return new Promise>(resolve => { - log.info(`Deleting metadata for ${fiscalCode}`); - this.redisClient.del(`${userMetadataPrefix}${fiscalCode}`, err => { - if (err) { - resolve(left(err)); - } else { - resolve(right(true)); - } - }); - }); - } - - public setBlockedUser(fiscalCode: string): Promise> { - return new Promise>(resolve => { - log.info(`Adding ${fiscalCode} to ${blockedUserSetKey} set`); - this.redisClient.sadd(blockedUserSetKey, fiscalCode, err => - resolve(err ? left(err) : right(true)) - ); - }); - } - public unsetBlockedUser(fiscalCode: string): Promise> { - return new Promise>(resolve => { - log.info(`Removing ${fiscalCode} from ${blockedUserSetKey} set`); - this.redisClient.srem(blockedUserSetKey, fiscalCode, (err, response) => - resolve( - this.falsyResponseToError( - this.integerReply(err, response, 1), - new Error( - "Unexpected response from redis client deleting blockedUserKey" - ) - ) - ) - ); - }); - } - - /** - * Return a Session for this token. - */ - private async loadSessionBySessionToken( - token: SessionToken - ): Promise> { - return new Promise(resolve => { - log.info(`Reading user session for token ${token}`); - this.redisClient.get(`${sessionKeyPrefix}${token}`, (err, value) => { - if (err) { - // Client returns an error. - return resolve(left(err)); - } - - if (value === null) { - return resolve(left(sessionNotFoundError)); - } - const errorOrDeserializedUser = this.parseUser(value); - return resolve(errorOrDeserializedUser); - }); - }); - } - - /** - * {@inheritDoc} - */ - private async del( - sessionToken: SessionToken, - walletToken: WalletToken - ): Promise> { - const deleteSessionTokens = new Promise>(resolve => { - log.info(`Deleting session token ${sessionToken}`); - // Remove the specified key. A key is ignored if it does not exist. - // @see https://redis.io/commands/del - this.redisClient.del( - `${sessionKeyPrefix}${sessionToken}`, - (err, response) => - resolve( - this.falsyResponseToError( - this.integerReply(err, response, 1), - new Error( - "Unexpected response from redis client deleting sessionInfoKey and sessionToken." - ) - ) - ) - ); - }); - - const deleteWalletToken = new Promise>(resolve => { - log.info(`Deleting wallet token ${walletToken}`); - // Remove the specified key. A key is ignored if it does not exist. - // @see https://redis.io/commands/del - this.redisClient.del( - `${walletKeyPrefix}${walletToken}`, - (err, response) => - resolve( - this.falsyResponseToError( - this.integerReply(err, response, 1), - new Error( - "Unexpected response from redis client deleting walletToken." - ) - ) - ) - ); - }); - - const deletePromises = await Promise.all([ - deleteSessionTokens, - deleteWalletToken - ]); - - const isDeleteFailed = deletePromises.some(isLeft); - if (isDeleteFailed) { - return left( - multipleErrorsFormatter( - deletePromises.filter(isLeft).map(_ => _.value), - "RedisSessionStorage.del" - ) - ); - } - return right(true); - } - - private readSessionInfoKeys( - fiscalCode: FiscalCode - ): Promise>> { - return new Promise>>(resolve => { - log.info(`Reading session list ${userSessionsSetKeyPrefix}${fiscalCode}`); - this.redisClient.smembers( - `${userSessionsSetKeyPrefix}${fiscalCode}`, - (err, response) => resolve(this.arrayStringReply(err, response)) - ); - }); - } - - private delSessionInfoKeys( - fiscalCode: FiscalCode - ): Promise> { - return new Promise>(resolve => { - log.info( - `Deleting session info ${userSessionsSetKeyPrefix}${fiscalCode}` - ); - this.redisClient.del(`${userSessionsSetKeyPrefix}${fiscalCode}`, err => - resolve(err ? left(err) : right(true)) - ); - }); - } - - private arrayStringReply( - err: Error | null, - replay: ReadonlyArray | undefined - ): Either> { - if (err) { - return left(err); - } else if (!isArray(replay) || replay.length === 0) { - return left(sessionNotFoundError); - } - return right(replay); - } - - private parseUser(value: string): Either { - return parseJSON(value, toError).chain(data => { - return User.decode(data).mapLeft(err => { - return new Error(errorsToReadableMessages(err).join("/")); - }); - }); - } -} diff --git a/UserDataDeleteOrchestrator/session-utils/redisStorageUtils.ts b/UserDataDeleteOrchestrator/session-utils/redisStorageUtils.ts deleted file mode 100644 index 073326f0..00000000 --- a/UserDataDeleteOrchestrator/session-utils/redisStorageUtils.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { Either, isLeft, left, right } from "fp-ts/lib/Either"; -import { isNumber } from "util"; - -export default class RedisStorageUtils { - /** - * Parse a Redis single string reply. - * - * @see https://redis.io/topics/protocol#simple-string-reply. - */ - protected singleStringReply( - err: Error | null, - reply: "OK" | undefined - ): Either { - if (err) { - return left(err); - } - - return right(reply === "OK"); - } - - /** - * Parse a Redis integer reply. - * - * @see https://redis.io/topics/protocol#integer-reply - */ - protected integerReply( - err: Error | null, - reply: unknown, - expectedReply?: number - ): Either { - if (err) { - return left(err); - } - if (expectedReply !== undefined && expectedReply !== reply) { - return right(false); - } - return right(isNumber(reply)); - } - - protected falsyResponseToError( - response: Either, - error: Error - ): Either { - if (isLeft(response)) { - return left(response.value); - } else { - if (response.value) { - return right(true); - } - return left(error); - } - } -} diff --git a/UserDataDeleteOrchestrator/session-utils/token.ts b/UserDataDeleteOrchestrator/session-utils/token.ts deleted file mode 100644 index 755da262..00000000 --- a/UserDataDeleteOrchestrator/session-utils/token.ts +++ /dev/null @@ -1,14 +0,0 @@ -import * as t from "io-ts"; -import { tag } from "italia-ts-commons/lib/types"; - -interface ISessionTokenTag { - readonly kind: "SessionToken"; -} -export const SessionToken = tag()(t.string); -export type SessionToken = t.TypeOf; - -interface IWalletTokenTag { - readonly kind: "WalletToken"; -} -export const WalletToken = tag()(t.string); -export type WalletToken = t.TypeOf; diff --git a/UserDataDeleteOrchestrator/utils.ts b/UserDataDeleteOrchestrator/utils.ts new file mode 100644 index 00000000..d9bada7b --- /dev/null +++ b/UserDataDeleteOrchestrator/utils.ts @@ -0,0 +1,15 @@ +import { FiscalCode } from "italia-ts-commons/lib/strings"; +import { Day, Hour } from "italia-ts-commons/lib/units"; + +export const ABORT_EVENT = "user-data-processing-delete-abort"; + +export const makeOrchestratorId = (fiscalCode: FiscalCode): string => + `${fiscalCode}-USER-DATA-DELETE`; + +const aHourInMilliseconds = 60 * 60 * 1000; +export const addHours = (now: Date, hours: Hour) => + new Date(now.getTime() + hours * aHourInMilliseconds); + +const aDayInMilliseconds = 24 * aHourInMilliseconds; +export const addDays = (now: Date, days: Day) => + new Date(now.getTime() + days * aDayInMilliseconds); diff --git a/UserDataDownloadOrchestrator/__tests__/handler.test.ts b/UserDataDownloadOrchestrator/__tests__/handler.test.ts index 92be435f..f187c7f3 100644 --- a/UserDataDownloadOrchestrator/__tests__/handler.test.ts +++ b/UserDataDownloadOrchestrator/__tests__/handler.test.ts @@ -1,6 +1,6 @@ // tslint:disable: no-any -import { IFunctionContext } from "durable-functions/lib/src/classes"; +import { IOrchestrationFunctionContext } from "durable-functions/lib/src/classes"; import { UserDataProcessingStatusEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingStatus"; import { mockOrchestratorCallActivity, @@ -77,7 +77,7 @@ const consumeOrchestrator = (orch: any) => { }; // just a convenient cast, good for every test case -const context = (mockOrchestratorContext as unknown) as IFunctionContext; +const context = (mockOrchestratorContext as unknown) as IOrchestrationFunctionContext; // tslint:disable-next-line: no-big-function describe("UserDataDownloadOrchestrator", () => { diff --git a/UserDataDownloadOrchestrator/handler.ts b/UserDataDownloadOrchestrator/handler.ts index c997f0e5..ac48a0a3 100644 --- a/UserDataDownloadOrchestrator/handler.ts +++ b/UserDataDownloadOrchestrator/handler.ts @@ -1,8 +1,8 @@ import { - IFunctionContext, + IOrchestrationFunctionContext, RetryOptions } from "durable-functions/lib/src/classes"; -import { isLeft } from "fp-ts/lib/Either"; +import { isLeft, toError } from "fp-ts/lib/Either"; import { UserDataProcessingStatusEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingStatus"; import * as t from "io-ts"; import { readableReport } from "italia-ts-commons/lib/reporters"; @@ -10,7 +10,10 @@ import { ActivityResultSuccess as ExtractUserDataActivityResultSuccess } from ". import { ActivityResultSuccess as SendUserDataDownloadMessageActivityResultSuccess } from "../SendUserDataDownloadMessageActivity/handler"; import { ActivityResultSuccess as SetUserDataProcessingStatusActivityResultSuccess } from "../SetUserDataProcessingStatusActivity/handler"; import { ProcessableUserDataDownload } from "../UserDataProcessingTrigger"; -import { trackEvent, trackException } from "../utils/appinsights"; +import { + trackUserDataDownloadEvent, + trackUserDataDownloadException +} from "../utils/appinsightsEvents"; const logPrefix = "UserDataDownloadOrchestrator"; @@ -73,8 +76,8 @@ const toActivityFailure = ( }); export const handler = function*( - context: IFunctionContext -): IterableIterator { + context: IOrchestrationFunctionContext +): Generator { const document = context.df.getInput(); // This check has been done on the trigger, so it should never fail. // However, it's worth the effort to check it twice @@ -144,44 +147,28 @@ export const handler = function*( }); }); - trackEvent({ - // tslint:disable-next-line: no-duplicate-string - name: "user.data.download", - properties: { - userDataProcessingId: currentUserDataProcessing.userDataProcessingId - }, - tagOverrides: { - "ai.operation.id": currentUserDataProcessing.userDataProcessingId, - "ai.operation.parentId": currentUserDataProcessing.userDataProcessingId - } - }); + trackUserDataDownloadEvent("done", currentUserDataProcessing); return OrchestratorSuccess.encode({ kind: "SUCCESS" }); } catch (error) { - trackException({ - exception: new Error(error), - properties: { - name: "user.data.download", - userDataProcessingId: currentUserDataProcessing.userDataProcessingId - } - }); - context.log.error( - `${logPrefix}|ERROR|Failed processing user data for download: ${error.message}` + trackUserDataDownloadException( + "failed", + toError(error), + currentUserDataProcessing ); + context.log.error(`${logPrefix}|ERROR|${JSON.stringify(error)}`); SetUserDataProcessingStatusActivityResultSuccess.decode( yield context.df.callActivity("SetUserDataProcessingStatusActivity", { currentRecord: currentUserDataProcessing, nextStatus: UserDataProcessingStatusEnum.FAILED }) ).getOrElseL(err => { - trackException({ - exception: new Error(readableReport(err)), - properties: { - name: "user.data.download", - type: "unhandled exception when trying to set document as FAILED", - userDataProcessingId: currentUserDataProcessing.userDataProcessingId - } - }); + trackUserDataDownloadException( + "unhandled_failed_status", + new Error(readableReport(err)), + currentUserDataProcessing + ); + throw new Error( `Activity SetUserDataProcessingStatusActivity (status=FAILED) failed: ${readableReport( err diff --git a/UserDataDownloadOrchestrator/utils.ts b/UserDataDownloadOrchestrator/utils.ts new file mode 100644 index 00000000..7ee0e069 --- /dev/null +++ b/UserDataDownloadOrchestrator/utils.ts @@ -0,0 +1,4 @@ +import { FiscalCode } from "italia-ts-commons/lib/strings"; + +export const makeOrchestratorId = (fiscalCode: FiscalCode): string => + `${fiscalCode}-USER-DATA-DOWNLOAD`; diff --git a/UserDataProcessingTrigger/__tests__/index.test.ts b/UserDataProcessingTrigger/__tests__/index.test.ts index cf931de1..352b8081 100644 --- a/UserDataProcessingTrigger/__tests__/index.test.ts +++ b/UserDataProcessingTrigger/__tests__/index.test.ts @@ -3,9 +3,25 @@ import { UserDataProcessingChoiceEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingChoice"; import { UserDataProcessingStatusEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingStatus"; import { UserDataProcessing } from "io-functions-commons/dist/src/models/user_data_processing"; -import { context, mockStartNew } from "../../__mocks__/durable-functions"; +import { + context, + mockRaiseEvent, + mockStartNew +} from "../../__mocks__/durable-functions"; import { aUserDataProcessing } from "../../__mocks__/mocks"; -import { index } from "../index"; +import { + index, + ProcessableUserDataDelete, + ProcessableUserDataDeleteAbort, + ProcessableUserDataDownload +} from "../index"; + +// converts a UserDataProcessing object in a form as it would come from the database +const toUndecoded = (doc: UserDataProcessing) => ({ + ...doc, + createdAt: doc.createdAt.toISOString(), + updatedAt: doc.updatedAt ? doc.updatedAt.toISOString() : undefined +}); const aProcessableDownload = { ...aUserDataProcessing, @@ -13,16 +29,37 @@ const aProcessableDownload = { status: UserDataProcessingStatusEnum.PENDING }; +const aProcessableDelete = { + ...aUserDataProcessing, + choice: UserDataProcessingChoiceEnum.DELETE, + status: UserDataProcessingStatusEnum.PENDING +}; + const aNonProcessableDownloadWrongStatus = { ...aUserDataProcessing, + choice: UserDataProcessingChoiceEnum.DOWNLOAD, + status: UserDataProcessingStatusEnum.WIP +}; + +const aNonProcessableDeleteWrongStatus = { + ...aUserDataProcessing, + choice: UserDataProcessingChoiceEnum.DELETE, status: UserDataProcessingStatusEnum.WIP }; -const aNonProcessableDownloadWrongChoice = { +const aProcessableDeleteAbort = { ...aUserDataProcessing, - choice: UserDataProcessingChoiceEnum.DELETE + choice: UserDataProcessingChoiceEnum.DELETE, + status: UserDataProcessingStatusEnum.ABORTED }; +jest.mock("../../utils/featureFlags", () => ({ + flags: { + ENABLE_USER_DATA_DELETE: true, + ENABLE_USER_DATA_DOWNLOAD: true + } +})); + describe("UserDataProcessingTrigger", () => { beforeEach(() => { jest.clearAllMocks(); @@ -43,17 +80,92 @@ describe("UserDataProcessingTrigger", () => { const processableDocs: ReadonlyArray = [ aProcessableDownload, aProcessableDownload, - aProcessableDownload + aProcessableDelete ]; const input: ReadonlyArray = [ ...processableDocs, aNonProcessableDownloadWrongStatus, - aNonProcessableDownloadWrongChoice + aNonProcessableDeleteWrongStatus + ]; + + await index(context, input); + + expect(mockStartNew).toHaveBeenCalledTimes(processableDocs.length); + }); + + it("should process every processable document (with undecoded data)", async () => { + const processableDocs: ReadonlyArray = [ + aProcessableDownload, + aProcessableDownload, + aProcessableDelete ]; + const processableDocsAbort: ReadonlyArray = [ + aProcessableDeleteAbort, + aProcessableDeleteAbort + ]; + + const input: ReadonlyArray = [ + ...processableDocs, + aNonProcessableDownloadWrongStatus, + ...processableDocsAbort, + aNonProcessableDeleteWrongStatus + ].map(toUndecoded); + await index(context, input); expect(mockStartNew).toHaveBeenCalledTimes(processableDocs.length); + expect(mockRaiseEvent).toHaveBeenCalledTimes(processableDocsAbort.length); + }); +}); + +describe("ProcessableUserDataDownload", () => { + it("should map processable download records", () => { + expect( + ProcessableUserDataDownload.decode(aProcessableDownload).isRight() + ).toBe(true); + }); + it.each` + name | value + ${"delete wrong status"} | ${aNonProcessableDeleteWrongStatus} + ${"download wrong status"} | ${aNonProcessableDownloadWrongStatus} + ${"processable delete"} | ${aProcessableDelete} + `("should not map unprocessable record '$name'", ({ value }) => { + expect(ProcessableUserDataDownload.decode(value).isLeft()).toBe(true); + }); +}); + +describe("ProcessableUserDataDelete", () => { + it("should map processable delete records", () => { + expect(ProcessableUserDataDelete.decode(aProcessableDelete).isRight()).toBe( + true + ); + }); + it.each` + name | value + ${"delete wrong status"} | ${aNonProcessableDeleteWrongStatus} + ${"download wrong status"} | ${aNonProcessableDownloadWrongStatus} + ${"processable download"} | ${aProcessableDownload} + ${"processable delete abort"} | ${aProcessableDeleteAbort} + `("should not map unprocessable record '$name'", ({ value }) => { + expect(ProcessableUserDataDelete.decode(value).isLeft()).toBe(true); + }); +}); + +describe("ProcessableUserDataDeleteAbort", () => { + it("should map processable delete records", () => { + expect( + ProcessableUserDataDeleteAbort.decode(aProcessableDeleteAbort).isRight() + ).toBe(true); + }); + it.each` + name | value + ${"delete wrong status"} | ${aNonProcessableDeleteWrongStatus} + ${"download wrong status"} | ${aNonProcessableDownloadWrongStatus} + ${"processable download"} | ${aProcessableDownload} + ${"processable delete"} | ${aProcessableDelete} + `("should not map unprocessable record '$name'", ({ value }) => { + expect(ProcessableUserDataDeleteAbort.decode(value).isLeft()).toBe(true); }); }); diff --git a/UserDataProcessingTrigger/function.json b/UserDataProcessingTrigger/function.json index c53c927a..ecd24219 100644 --- a/UserDataProcessingTrigger/function.json +++ b/UserDataProcessingTrigger/function.json @@ -5,7 +5,7 @@ "name": "documents", "direction": "in", "leaseCollectionName": "change-feed-leases", - "leaseCollectionPrefix": "userDataDownload", + "leaseCollectionPrefix": "userDataProcessing", "connectionStringSetting": "COSMOSDB_CONNECTION_STRING", "databaseName": "%COSMOSDB_NAME%", "collectionName": "user-data-processing", diff --git a/UserDataProcessingTrigger/index.ts b/UserDataProcessingTrigger/index.ts index 508a92fe..00c9fda6 100644 --- a/UserDataProcessingTrigger/index.ts +++ b/UserDataProcessingTrigger/index.ts @@ -1,10 +1,24 @@ import { Context } from "@azure/functions"; import * as df from "durable-functions"; +import { DurableOrchestrationClient } from "durable-functions/lib/src/classes"; +import { fromNullable, toError } from "fp-ts/lib/Either"; +import { Lazy } from "fp-ts/lib/function"; import { UserDataProcessingChoiceEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingChoice"; import { UserDataProcessingStatusEnum } from "io-functions-commons/dist/generated/definitions/UserDataProcessingStatus"; import { UserDataProcessing } from "io-functions-commons/dist/src/models/user_data_processing"; import * as t from "io-ts"; import { readableReport } from "italia-ts-commons/lib/reporters"; +import { + ABORT_EVENT as ABORT_DELETE_EVENT, + makeOrchestratorId as makeDeleteOrchestratorId +} from "../UserDataDeleteOrchestrator/utils"; +import { makeOrchestratorId as makeDownloadOrchestratorId } from "../UserDataDownloadOrchestrator/utils"; +import { + trackUserDataDeleteEvent, + trackUserDataDownloadEvent +} from "../utils/appinsightsEvents"; +import { flags } from "../utils/featureFlags"; +import { isOrchestratorRunning } from "../utils/orchestrator"; const logPrefix = "UserDataProcessingTrigger"; @@ -22,57 +36,153 @@ export const ProcessableUserDataDownload = t.intersection([ }) ]); +// models the subset of UserDataProcessing documents that this orchestrator accepts +export type ProcessableUserDataDelete = t.TypeOf< + typeof ProcessableUserDataDelete +>; +export const ProcessableUserDataDelete = t.intersection([ + UserDataProcessing, + // ony the subset of UserDataProcessing documents + // with the following characteristics must be processed + t.interface({ + choice: t.literal(UserDataProcessingChoiceEnum.DELETE), + status: t.literal(UserDataProcessingStatusEnum.PENDING) + }) +]); + +// models the subset of UserDataProcessing documents that are delete abort requests +export type ProcessableUserDataDeleteAbort = t.TypeOf< + typeof ProcessableUserDataDeleteAbort +>; +export const ProcessableUserDataDeleteAbort = t.intersection([ + UserDataProcessing, + // ony the subset of UserDataProcessing documents + // with the following characteristics must be processed + t.interface({ + choice: t.literal(UserDataProcessingChoiceEnum.DELETE), + status: t.literal(UserDataProcessingStatusEnum.ABORTED) + }) +]); + const CosmosDbDocumentCollection = t.readonlyArray(t.readonly(t.UnknownRecord)); type CosmosDbDocumentCollection = t.TypeOf; -interface ITaskDescriptor { - orchestrator: string; - id: ProcessableUserDataDownload["userDataProcessingId"]; - input: ProcessableUserDataDownload; -} +const startOrchestrator = async ( + dfClient: DurableOrchestrationClient, + orchestratorName: + | "UserDataDownloadOrchestrator" + | "UserDataDeleteOrchestrator", + orchestratorId: string, + orchestratorInput: unknown +) => { + return isOrchestratorRunning(dfClient, orchestratorId) + .fold( + error => { + throw error; + }, + _ => + !_.isRunning + ? dfClient.startNew( + orchestratorName, + orchestratorId, + orchestratorInput + ) + : null + ) + .run(); +}; export function index( context: Context, input: unknown -): Promise { +): Promise> { const dfClient = df.getClient(context); - const tasksDescriptors = CosmosDbDocumentCollection.decode(input) + const operations = CosmosDbDocumentCollection.decode(input) .getOrElseL(err => { throw Error(`${logPrefix}: cannot decode input [${readableReport(err)}]`); }) .reduce( - (tasks, maybeProcessable) => - ProcessableUserDataDownload.decode(maybeProcessable).fold( - _ => { - context.log.warn( - `${logPrefix}: skipping document [${JSON.stringify( - maybeProcessable - )}]` - ); - return tasks; - }, - processable => [ - ...tasks, - { - id: processable.userDataProcessingId, - input: processable, - orchestrator: "UserDataDownloadOrchestrator" - } - ] - ), - [] as readonly ITaskDescriptor[] + (lazyOperations, processableOrNot) => + t + .union([ + ProcessableUserDataDownload, + ProcessableUserDataDelete, + ProcessableUserDataDeleteAbort + ]) + .decode(processableOrNot) + .chain(processable => + fromNullable(undefined)( + flags.ENABLE_USER_DATA_DOWNLOAD && + ProcessableUserDataDownload.is(processable) + ? () => { + context.log.info( + `${logPrefix}: starting UserDataDownloadOrchestrator with ${processable.fiscalCode}` + ); + trackUserDataDownloadEvent("started", processable); + const orchestratorId = makeDownloadOrchestratorId( + processable.fiscalCode + ); + return startOrchestrator( + dfClient, + "UserDataDownloadOrchestrator", + orchestratorId, + processable + ); + } + : flags.ENABLE_USER_DATA_DELETE && + ProcessableUserDataDelete.is(processable) + ? () => { + context.log.info( + `${logPrefix}: starting UserDataDeleteOrchestrator with ${processable.fiscalCode}` + ); + trackUserDataDeleteEvent("started", processable); + const orchestratorId = makeDeleteOrchestratorId( + processable.fiscalCode + ); + return startOrchestrator( + dfClient, + "UserDataDeleteOrchestrator", + orchestratorId, + processable + ); + } + : ProcessableUserDataDeleteAbort.is(processable) + ? () => { + context.log.info( + `${logPrefix}: aborting UserDataDeleteOrchestrator with ${processable.fiscalCode}` + ); + trackUserDataDeleteEvent("abort_requested", processable); + const orchestratorId = makeDeleteOrchestratorId( + processable.fiscalCode + ); + return dfClient.raiseEvent( + orchestratorId, + ABORT_DELETE_EVENT, + {} + ); + } + : undefined + ) + ) + .fold( + _ => { + context.log.warn( + `${logPrefix}: skipping document [${JSON.stringify( + processableOrNot + )}]` + ); + return lazyOperations; + }, + lazyOp => [...lazyOperations, lazyOp] + ), + [] as ReadonlyArray>> ); context.log.info( - `${logPrefix}: processing ${tasksDescriptors.length} document${ - tasksDescriptors.length === 1 ? "" : "s" + `${logPrefix}: processing ${operations.length} document${ + operations.length === 1 ? "" : "s" }` ); - const startAllNew = () => - tasksDescriptors.map(({ orchestrator, id, input: orchestratorInput }) => - dfClient.startNew(orchestrator, id, orchestratorInput) - ); - - return Promise.all(startAllNew()); + return Promise.all(operations.map(op => op())); } diff --git a/__mocks__/durable-functions.ts b/__mocks__/durable-functions.ts index d34331bf..ed910a38 100644 --- a/__mocks__/durable-functions.ts +++ b/__mocks__/durable-functions.ts @@ -22,8 +22,11 @@ export const mockTerminate = jest.fn(async (_, __) => { return; }); +export const mockRaiseEvent = jest.fn().mockImplementation(async () => void 0); + export const getClient = jest.fn().mockImplementation(() => ({ getStatus: mockGetStatus, + raiseEvent: mockRaiseEvent, startNew: mockStartNew, terminate: mockTerminate })); @@ -71,14 +74,25 @@ export const mockCallSubOrchestrator = jest name })); export const mockOrchestratorSetCustomStatus = jest.fn(); -export const mockOrchestratorCreateTimer = jest.fn(); +export const mockOrchestratorCancelTimer = jest.fn(); +export const mockOrchestratorCreateTimer = () => ({ + cancel: mockOrchestratorCancelTimer +}); +export const mockWaitForExternalEvent = jest + .fn() + .mockReturnValue("mockWaitForExternalEvent"); + +export const mockOrchestratorTaskAny = jest + .fn() + // mock implementation: return the first task + .mockImplementation(([_]) => _); export const mockOrchestratorContext = { ...context, df: { Task: { all: jest.fn(), - any: jest.fn() + any: mockOrchestratorTaskAny }, callActivity: mockOrchestratorCallActivity, callActivityWithRetry: mockOrchestratorCallActivityWithRetry, @@ -87,7 +101,8 @@ export const mockOrchestratorContext = { currentUtcDateTime: new Date(), getClient, getInput: mockOrchestratorGetInput, - setCustomStatus: mockOrchestratorSetCustomStatus + setCustomStatus: mockOrchestratorSetCustomStatus, + waitForExternalEvent: mockWaitForExternalEvent } }; diff --git a/__mocks__/node-fetch.ts b/__mocks__/node-fetch.ts new file mode 100644 index 00000000..14cc63c8 --- /dev/null +++ b/__mocks__/node-fetch.ts @@ -0,0 +1,48 @@ +const mockJsonBody = { foo: "bar" }; +const mockTextBody = "foobar"; + +export const mockResponseJson = jest + .fn() + .mockImplementation(async () => mockJsonBody); + +export const mockResponseText = jest + .fn() + .mockImplementation(async () => mockTextBody); + +interface IMockResponseValues { + jsonImpl?: () => Promise; + status?: number; + textImpl?: () => Promise; +} + +const getMockResponse = ({ + jsonImpl = async () => mockJsonBody, + status = 100, + textImpl = async () => mockTextBody +}: IMockResponseValues = {}): Response => + (({ + clone: jest.fn(() => getMockResponse({ jsonImpl, status, textImpl })), + json: jest.fn(jsonImpl), + status, + text: jest.fn(textImpl) + } as unknown) as Response); + +export const mockResponse: Response = getMockResponse(); + +// use this method to create an instance of fetch which is bound to predefined values +export const createMockFetch = ({ + jsonImpl = async () => mockJsonBody, + status = 100, + textImpl = async () => mockTextBody +}: IMockResponseValues = {}): typeof fetch => + jest + .fn() + .mockImplementation(async (_: RequestInfo, __?: RequestInit) => + getMockResponse({ jsonImpl, status, textImpl }) + ); + +const mockFetch = jest + .fn() + .mockImplementation(async (_: RequestInfo, __?: RequestInit) => mockResponse); + +export default mockFetch; diff --git a/package.json b/package.json index a705b966..55d31079 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,7 @@ "start": "dotenv -e .env func start --javascript", "predev": "npm-run-all generate build", "dev": "npm-run-all --parallel start watch", - "pretest": "npm-run-all generate:*", + "pretest": "npm-run-all generate", "test": "yarn test:only", "test:only": "jest", "pretest:coverage": "npm-run-all generate:*", @@ -21,6 +21,7 @@ "dist:zip": "zip -r dist.zip . --exclude @.funcignore --exclude .funcignore", "predeploy": "npm-run-all generate build dist:modules", "generate:definitions": "rimraf ./generated/definitions && shx mkdir -p ./generated/definitions && gen-api-models --api-spec ./openapi/index.yaml --out-dir ./generated/definitions", + "generate:session-api": "rimraf ./generated/session-api && shx mkdir -p ./generated/session-api && gen-api-models --api-spec https://raw.githubusercontent.com/pagopa/io-backend/master/api_session.yaml --out-dir ./generated/session-api --request-types --response-decoders", "generate": "npm-run-all generate:*" }, "description": "", @@ -61,10 +62,10 @@ "archiver-zip-encrypted": "^1.0.8", "azure-storage": "^2.10.3", "documentdb": "^1.12.2", - "durable-functions": "^1.2.4", + "durable-functions": "^1.4.3", "express": "^4.15.3", "fp-ts": "1.17.0", - "io-functions-commons": "^10.0.0", + "io-functions-commons": "^10.7.0", "io-functions-express": "^0.1.0", "io-ts": "1.8.5", "italia-ts-commons": "^8.5.0", diff --git a/tslint.json b/tslint.json index a16035d5..bbb08940 100644 --- a/tslint.json +++ b/tslint.json @@ -7,5 +7,11 @@ "rules": { "no-submodule-imports": false }, - "rulesDirectory": [] + "rulesDirectory": [], + "linterOptions": { + "exclude": [ + "node_modules/**/*", + "generated/**/*.ts" + ] + } } \ No newline at end of file diff --git a/utils/__tests__/sessionApiClient.test.ts b/utils/__tests__/sessionApiClient.test.ts new file mode 100644 index 00000000..e93e9d77 --- /dev/null +++ b/utils/__tests__/sessionApiClient.test.ts @@ -0,0 +1,145 @@ +// tslint:disable: no-duplicate-string + +import { readableReport } from "italia-ts-commons/lib/reporters"; +import { aFiscalCode } from "../../__mocks__/mocks"; +import { createMockFetch } from "../../__mocks__/node-fetch"; +import { ProblemJson } from "../../generated/session-api/ProblemJson"; +import { SuccessResponse } from "../../generated/session-api/SuccessResponse"; +import { createClient, WithDefaultsT } from "../sessionApiClient"; + +const baseUrl = ""; + +const anApyKey = "QWERTTYUIP12334"; + +const aSuccessResponse = SuccessResponse.decode({ message: "ok" }).getOrElseL( + err => { + throw new Error(`Invalid mock fr SuccessResponse: ${readableReport(err)}`); + } +); +const aProblemJson500 = ProblemJson.decode({ + status: 400, + title: "Server Error" +}).getOrElseL(err => { + throw new Error(`Invalid mock fr ProblemJson400: ${readableReport(err)}`); +}); + +const withDefaultApiKey: WithDefaultsT<"token"> = apiOperation => ({ + fiscalCode +}) => apiOperation({ fiscalCode, token: anApyKey }); + +describe("sessionApiClient#lockUserSession", () => { + it.each` + name | status | payload + ${"Success"} | ${200} | ${aSuccessResponse} + ${"Not Found"} | ${404} | ${undefined} + ${"Server Error"} | ${500} | ${aProblemJson500} + ${"Bad Request"} | ${400} | ${undefined} + ${"Not Authorized"} | ${401} | ${undefined} + `("should handle $name response", async ({ status, payload }) => { + const fetchApi = createMockFetch({ + jsonImpl: async () => payload, + status + }); + const client = createClient({ baseUrl, fetchApi }); + + const result = await client.lockUserSession({ + fiscalCode: aFiscalCode, + token: anApyKey + }); + + expect(result.isRight()).toBe(true); + expect(result.value).toEqual({ + status, + value: payload + }); + }); + + it("should work with a default parameter", async () => { + // just any working case + const fetchApi = createMockFetch({ + jsonImpl: async () => aSuccessResponse, + status: 200 + }); + + const client = createClient({ + baseUrl, + fetchApi, + withDefaults: withDefaultApiKey + }); + + await client.lockUserSession({ + fiscalCode: aFiscalCode + }); + + // fetchApi is actually a jest.Mock, can be spied + const spiedFetch = fetchApi as jest.Mock; + + // check that arguments are correctly passed to fetch + expect(spiedFetch).toHaveBeenCalledWith( + expect.stringContaining(aFiscalCode), + expect.any(Object) + ); + expect(spiedFetch).toHaveBeenCalledWith( + expect.stringContaining(anApyKey), + expect.any(Object) + ); + }); +}); + +describe("sessionApiClient#unlockUserSession", () => { + it.each` + name | status | payload + ${"Success"} | ${200} | ${aSuccessResponse} + ${"Server Error"} | ${500} | ${aProblemJson500} + ${"Bad Request"} | ${400} | ${undefined} + ${"Not Authorized"} | ${401} | ${undefined} + `("should handle $name response", async ({ status, payload }) => { + const fetchApi = createMockFetch({ + jsonImpl: async () => payload, + status + }); + const client = createClient({ baseUrl, fetchApi }); + + const result = await client.unlockUserSession({ + fiscalCode: aFiscalCode, + token: anApyKey + }); + + expect(result.isRight()).toBe(true); + expect(result.value).toEqual({ + status, + value: payload + }); + }); + + it("should work with a default parameter", async () => { + // just any working case + const fetchApi = createMockFetch({ + jsonImpl: async () => aSuccessResponse, + status: 200 + }); + + const client = createClient({ + baseUrl, + fetchApi, + withDefaults: withDefaultApiKey + }); + + await client.unlockUserSession({ + fiscalCode: aFiscalCode + }); + + // fetchApi is actually a jest.Mock, can be spied + const spiedFetch = fetchApi as jest.Mock; + + // check that arguments are correctly passed to fetch + expect(spiedFetch).toHaveBeenCalledWith( + expect.stringContaining(aFiscalCode), + expect.any(Object) + ); + expect(spiedFetch).toHaveBeenCalledWith( + expect.stringContaining(anApyKey), + expect.any(Object) + ); + }); +}); diff --git a/utils/appinsightsEvents.ts b/utils/appinsightsEvents.ts new file mode 100644 index 00000000..d8a45605 --- /dev/null +++ b/utils/appinsightsEvents.ts @@ -0,0 +1,70 @@ +// tslint:disable: no-duplicate-string + +import { UserDataProcessing } from "io-functions-commons/dist/src/models/user_data_processing"; +import { trackEvent, trackException } from "./appinsights"; + +export const trackUserDataDeleteEvent = ( + eventName: string, + userDataProcessing: UserDataProcessing +) => + trackEvent({ + // tslint:disable-next-line: no-duplicate-string + name: `user.data.delete.${eventName}`, + properties: { + userDataProcessingId: userDataProcessing.userDataProcessingId + }, + tagOverrides: { + "ai.operation.id": userDataProcessing.userDataProcessingId, + "ai.operation.parentId": userDataProcessing.userDataProcessingId + } + }); + +export const trackUserDataDeleteException = ( + eventName: string, + exception: Error, + userDataProcessing: UserDataProcessing +) => + trackException({ + exception, + properties: { + name: `user.data.delete.${eventName}`, + userDataProcessingId: userDataProcessing.userDataProcessingId + }, + tagOverrides: { + "ai.operation.id": userDataProcessing.userDataProcessingId, + "ai.operation.parentId": userDataProcessing.userDataProcessingId + } + }); + +export const trackUserDataDownloadEvent = ( + eventName: string, + userDataProcessing: UserDataProcessing +) => + trackEvent({ + // tslint:disable-next-line: no-duplicate-string + name: `user.data.download.${eventName}`, + properties: { + userDataProcessingId: userDataProcessing.userDataProcessingId + }, + tagOverrides: { + "ai.operation.id": userDataProcessing.userDataProcessingId, + "ai.operation.parentId": userDataProcessing.userDataProcessingId + } + }); + +export const trackUserDataDownloadException = ( + eventName: string, + exception: Error, + userDataProcessing: UserDataProcessing +) => + trackException({ + exception, + properties: { + name: `user.data.download.${eventName}`, + userDataProcessingId: userDataProcessing.userDataProcessingId + }, + tagOverrides: { + "ai.operation.id": userDataProcessing.userDataProcessingId, + "ai.operation.parentId": userDataProcessing.userDataProcessingId + } + }); diff --git a/utils/extensions/README.md b/utils/extensions/README.md new file mode 100644 index 00000000..cffdb446 --- /dev/null +++ b/utils/extensions/README.md @@ -0,0 +1 @@ +This folder contains modules that extend functionalities of `io-ts-commons` package. They are developed locally but they might better be included in the common package. \ No newline at end of file diff --git a/utils/extensions/azure_storage.ts b/utils/extensions/azure_storage.ts new file mode 100644 index 00000000..00c2780d --- /dev/null +++ b/utils/extensions/azure_storage.ts @@ -0,0 +1,28 @@ +import * as azureStorage from "azure-storage"; +import { Either, left, right } from "fp-ts/lib/Either"; + +export * from "io-functions-commons/dist/src/utils/azure_storage"; + +/** + * Deletes a blof if exists + * Assumes that the container already exists. + * + * @param blobService the Azure blob service + * @param containerName the name of the Azure blob storage container + * @param blobName blob storage container name + */ +export function deleteBlob( + blobService: azureStorage.BlobService, + containerName: string, + blobName: string +): Promise> { + return new Promise(resolve => + blobService.deleteBlobIfExists(containerName, blobName, err => { + if (err) { + return resolve(left(err)); + } else { + return resolve(right(true)); + } + }) + ); +} diff --git a/utils/extensions/documentdb.ts b/utils/extensions/documentdb.ts new file mode 100644 index 00000000..337ddaa4 --- /dev/null +++ b/utils/extensions/documentdb.ts @@ -0,0 +1,109 @@ +import * as DocumentDb from "documentdb"; +import { Either, left, right } from "fp-ts/lib/Either"; +import { Option } from "fp-ts/lib/Option"; +import * as DocumentDbUtilsBase from "io-functions-commons/dist/src/utils/documentdb"; + +export * from "io-functions-commons/dist/src/utils/documentdb"; + +export const deleteDocument = ( + client: DocumentDb.DocumentClient, + collectionUri: DocumentDbUtilsBase.IDocumentDbCollectionUri, + documentId: string, + partitionKey?: string +): Promise> => { + const documentUri = DocumentDbUtilsBase.getDocumentUri( + collectionUri, + documentId + ); + return new Promise(resolve => + client.deleteDocument(documentUri.uri, { partitionKey }, err => + resolve(err ? left(err) : right(documentId)) + ) + ); +}; + +/** + * Find all versions of a document. + * + * Pass the partitionKey field / values if it differs from the modelId + * to avoid multi-partition queries. + */ +export function findAllVersionsByModelId( + client: DocumentDb.DocumentClient, + collectionUri: DocumentDbUtilsBase.IDocumentDbCollectionUri, + modelIdField: string, + modelIdValue: string, + partitionKeyField: string, + partitionKeyValue: string +): DocumentDbUtilsBase.IResultIterator { + return DocumentDbUtilsBase.queryDocuments( + client, + collectionUri, + { + parameters: [ + { + name: "@modelId", + value: modelIdValue + }, + { + name: "@partitionKey", + value: partitionKeyValue + } + ], + // do not use ${collectionName} here as it may contain special characters + query: `SELECT * FROM m WHERE (m.${modelIdField} = @modelId + AND m.${partitionKeyField} = @partitionKey)` + }, + partitionKeyValue + ); +} + +export function deleteAllDocuments( + client: DocumentDb.DocumentClient, + collectionUri: DocumentDbUtilsBase.IDocumentDbCollectionUri, + documentIterator: DocumentDbUtilsBase.IResultIterator +): DocumentDbUtilsBase.IFoldableResultIterator< + Promise>> +> { + return DocumentDbUtilsBase.reduceResultIterator( + documentIterator, + ( + prev: Promise>>, + curr: T + ) => + Promise.all([prev, deleteDocument(client, collectionUri, curr.id)]).then( + ([prevResult, currResult]) => [...prevResult, currResult] + ) + ); +} + +export function deleteAllDocumentVersions( + client: DocumentDb.DocumentClient, + collectionUri: DocumentDbUtilsBase.IDocumentDbCollectionUri, + modelIdField: string, + modelIdValue: string, + partitionKeyField: string, + partitionKeyValue: string +): Promise< + Either< + DocumentDb.QueryError, + Option>>> + > +> { + // find all docs to delete + const documentIterator = findAllVersionsByModelId( + client, + collectionUri, + modelIdField, + modelIdValue, + partitionKeyField, + partitionKeyValue + ); + + // then delete + return deleteAllDocuments( + client, + collectionUri, + documentIterator + ).executeNext(Promise.resolve([])); +} diff --git a/utils/extensions/models/message.ts b/utils/extensions/models/message.ts new file mode 100644 index 00000000..65dfd722 --- /dev/null +++ b/utils/extensions/models/message.ts @@ -0,0 +1,41 @@ +import { BlobService } from "azure-storage"; +import * as DocumentDb from "documentdb"; +import { Either } from "fp-ts/lib/Either"; +import { MessageModel as MessageModelBase } from "io-functions-commons/dist/src/models/message"; +import { FiscalCode, NonEmptyString } from "italia-ts-commons/lib/strings"; +import { deleteBlob } from "../azure_storage"; +import * as DocumentDbUtils from "../documentdb"; + +// duplicated from base calss module, as it is not exposed +const MESSAGE_BLOB_STORAGE_SUFFIX = ".json"; +// duplicated from base calss module, as it is not exposed +function blobIdFromMessageId(messageId: string): string { + return `${messageId}${MESSAGE_BLOB_STORAGE_SUFFIX}`; +} +/** + * Extends MessageModel with deleting operations + */ +export class MessageDeletableModel extends MessageModelBase { + public async deleteMessage( + fiscalCode: FiscalCode, + messageId: NonEmptyString + ): Promise> { + return DocumentDbUtils.deleteDocument( + this.dbClient, + this.collectionUri, + messageId, + fiscalCode + ); + } + + public async deleteContentFromBlob( + blobService: BlobService, + messageId: string + ): Promise> { + return deleteBlob( + blobService, + this.containerName, + blobIdFromMessageId(messageId) + ); + } +} diff --git a/utils/extensions/models/message_status.ts b/utils/extensions/models/message_status.ts new file mode 100644 index 00000000..54287ab9 --- /dev/null +++ b/utils/extensions/models/message_status.ts @@ -0,0 +1,44 @@ +import * as DocumentDb from "documentdb"; +import { Either } from "fp-ts/lib/Either"; +import { + MESSAGE_STATUS_MODEL_ID_FIELD, + MESSAGE_STATUS_MODEL_PK_FIELD, + MessageStatusModel as MessageStatusModelBase, + RetrievedMessageStatus +} from "io-functions-commons/dist/src/models/message_status"; +import { NonEmptyString } from "italia-ts-commons/lib/strings"; +import * as DocumentDbUtils from "../documentdb"; + +/** + * Extends MessageStatusModel with deleting operations + */ +export class MessageStatusDeletableModel extends MessageStatusModelBase { + public async deleteMessageStatusVersion( + messageId: NonEmptyString, + documentId: NonEmptyString + ): Promise> { + return DocumentDbUtils.deleteDocument( + this.dbClient, + this.collectionUri, + documentId, + messageId + ); + } + + /** + * Retrieves a list of every version of the requested model + * @param modelId + */ + public findAllVersionsByModelId( + modelId: NonEmptyString + ): DocumentDbUtils.IResultIterator { + return DocumentDbUtils.findAllVersionsByModelId( + this.dbClient, + this.collectionUri, + MESSAGE_STATUS_MODEL_ID_FIELD, + modelId, + MESSAGE_STATUS_MODEL_PK_FIELD, + modelId + ); + } +} diff --git a/utils/extensions/models/notification.ts b/utils/extensions/models/notification.ts new file mode 100644 index 00000000..89f03a0c --- /dev/null +++ b/utils/extensions/models/notification.ts @@ -0,0 +1,72 @@ +/** + * An extensions of io-functions-commons/dist/src/models/notification to implement missing query methods + * Ideally they will be integrated in the common module + */ + +import * as DocumentDb from "documentdb"; +import { Either } from "fp-ts/lib/Either"; +import { + NotificationModel as NotificationModelBase, + RetrievedNotification +} from "io-functions-commons/dist/src/models/notification"; +import { NonEmptyString } from "italia-ts-commons/lib/strings"; +import * as DocumentDbUtils from "../documentdb"; + +/** + * Extends NotificationModel with deleting operations + */ +export class NotificationDeletableModel extends NotificationModelBase { + /** + * Creates a new Notification model + * + * @param dbClient the DocumentDB client + * @param collectionUrl the collection URL + */ + constructor( + dbClient: DocumentDb.DocumentClient, + collectionUrl: DocumentDbUtils.IDocumentDbCollectionUri + ) { + super(dbClient, collectionUrl); + } + + /** + * Returns the notifications for the provided message id + * + * @param messageId The message the notifications refer to + */ + public findNotificationsForMessage( + messageId: string + ): DocumentDbUtils.IResultIterator { + return DocumentDbUtils.queryDocuments( + this.dbClient, + this.collectionUri, + { + parameters: [ + { + name: "@messageId", + value: messageId + } + ], + query: `SELECT * FROM m WHERE m.messageId = @messageId` + }, + messageId + ); + } + + /** + * Deletes a single notification + * @param messageId message identifier of the notification (is partition key) + * @param notificationId notification identifier + */ + public async deleteNotification( + messageId: NonEmptyString, + notificationId: NonEmptyString + ): Promise> { + return DocumentDbUtils.deleteDocument( + this.dbClient, + this.collectionUri, + notificationId, + messageId + ); + } +} diff --git a/utils/extensions/models/notification_status.ts b/utils/extensions/models/notification_status.ts new file mode 100644 index 00000000..03d3fb46 --- /dev/null +++ b/utils/extensions/models/notification_status.ts @@ -0,0 +1,62 @@ +import * as DocumentDb from "documentdb"; +import { Either } from "fp-ts/lib/Either"; +import { + NOTIFICATION_STATUS_MODEL_ID_FIELD, + NOTIFICATION_STATUS_MODEL_PK_FIELD, + NotificationStatusModel as NotificationStatusModelBase, + RetrievedNotificationStatus +} from "io-functions-commons/dist/src/models/notification_status"; +import { NonEmptyString } from "italia-ts-commons/lib/strings"; +import * as DocumentDbUtils from "../documentdb"; + +/** + * Extends NotificationStatusModel with deleting operations + */ +export class NotificationStatusDeletableModel extends NotificationStatusModelBase { + public async deleteNotificationStatusVersion( + notificationId: NonEmptyString, + documentId: NonEmptyString + ): Promise> { + return DocumentDbUtils.deleteDocument( + this.dbClient, + this.collectionUri, + documentId, + notificationId + ); + } + + /** + * Retrieves a list of every version of the requested model + * @param modelId + */ + public findAllVersionsByModelId( + notificationId: NonEmptyString, + modelId: NonEmptyString + ): DocumentDbUtils.IResultIterator { + return DocumentDbUtils.findAllVersionsByModelId( + this.dbClient, + this.collectionUri, + NOTIFICATION_STATUS_MODEL_ID_FIELD, + modelId, + NOTIFICATION_STATUS_MODEL_PK_FIELD, + notificationId + ); + } + + /** + * Retrieves a list of every version of the requested model + * @param modelId + */ + public findAllVersionsByNotificationId( + notificationId: NonEmptyString + ): DocumentDbUtils.IResultIterator { + return DocumentDbUtils.findAllVersionsByModelId( + this.dbClient, + this.collectionUri, + NOTIFICATION_STATUS_MODEL_PK_FIELD, + notificationId, + NOTIFICATION_STATUS_MODEL_PK_FIELD, + notificationId + ); + } +} diff --git a/utils/extensions/models/profile.ts b/utils/extensions/models/profile.ts new file mode 100644 index 00000000..6e0a9f54 --- /dev/null +++ b/utils/extensions/models/profile.ts @@ -0,0 +1,43 @@ +import * as DocumentDb from "documentdb"; +import { Either } from "fp-ts/lib/Either"; +import { + PROFILE_MODEL_PK_FIELD, + ProfileModel as ProfileModelBase, + RetrievedProfile +} from "io-functions-commons/dist/src/models/profile"; +import { FiscalCode, NonEmptyString } from "italia-ts-commons/lib/strings"; +import * as DocumentDbUtils from "../documentdb"; + +/** + * Extends ProfileModel with deleting operations + */ +export class ProfileDeletableModel extends ProfileModelBase { + public async deleteProfileVersion( + fiscalCode: FiscalCode, + documentId: NonEmptyString + ): Promise> { + return DocumentDbUtils.deleteDocument( + this.dbClient, + this.collectionUri, + documentId, + fiscalCode + ); + } + + /** + * Retrieves a list of every version of the requested model + * @param modelId + */ + public findAllVersionsByModelId( + fiscalCode: FiscalCode + ): DocumentDbUtils.IResultIterator { + return DocumentDbUtils.findAllVersionsByModelId( + this.dbClient, + this.collectionUri, + PROFILE_MODEL_PK_FIELD, + fiscalCode, + PROFILE_MODEL_PK_FIELD, + fiscalCode + ); + } +} diff --git a/utils/featureFlags.ts b/utils/featureFlags.ts new file mode 100644 index 00000000..36abbcea --- /dev/null +++ b/utils/featureFlags.ts @@ -0,0 +1,18 @@ +/** + * Contains feature flags for the app + */ + +import { fromNullable } from "fp-ts/lib/Option"; + +const getFlagFromEnv = (name: string, defaultValue: boolean) => + fromNullable(process.env[name]) + .map(value => value === "1") + .getOrElse(defaultValue); + +export const flags = { + ENABLE_USER_DATA_DELETE: getFlagFromEnv("FF_ENABLE_USER_DATA_DELETE", true), + ENABLE_USER_DATA_DOWNLOAD: getFlagFromEnv( + "FF_ENABLE_USER_DATA_DOWNLOAD", + true + ) +}; diff --git a/utils/fetch.ts b/utils/fetch.ts new file mode 100644 index 00000000..4d112d0d --- /dev/null +++ b/utils/fetch.ts @@ -0,0 +1,21 @@ +import { agent } from "italia-ts-commons"; +import { + AbortableFetch, + setFetchTimeout, + toFetch +} from "italia-ts-commons/lib/fetch"; +import { Millisecond } from "italia-ts-commons/lib/units"; + +// HTTP external requests timeout in milliseconds +const DEFAULT_REQUEST_TIMEOUT_MS = 10000; + +// HTTP-only fetch with optional keepalive agent +// @see https://github.com/pagopa/io-ts-commons/blob/master/src/agent.ts#L10 +const httpApiFetch = agent.getHttpFetch(process.env); + +// a fetch that can be aborted and that gets cancelled after fetchTimeoutMs +const abortableFetch = AbortableFetch(httpApiFetch); + +export const timeoutFetch = toFetch( + setFetchTimeout(DEFAULT_REQUEST_TIMEOUT_MS as Millisecond, abortableFetch) +); diff --git a/utils/orchestrator.ts b/utils/orchestrator.ts new file mode 100644 index 00000000..798e3f08 --- /dev/null +++ b/utils/orchestrator.ts @@ -0,0 +1,21 @@ +import * as df from "durable-functions"; +import { DurableOrchestrationClient } from "durable-functions/lib/src/classes"; +import { toError } from "fp-ts/lib/Either"; +import { TaskEither, tryCatch } from "fp-ts/lib/TaskEither"; +import { PromiseType } from "italia-ts-commons/lib/types"; + +export const isOrchestratorRunning = ( + client: DurableOrchestrationClient, + orchestratorId: string +): TaskEither< + Error, + PromiseType> & { + isRunning: boolean; + } +> => + tryCatch(() => client.getStatus(orchestratorId), toError).map(status => ({ + ...status, + isRunning: + status.runtimeStatus === df.OrchestrationRuntimeStatus.Running || + status.runtimeStatus === df.OrchestrationRuntimeStatus.Pending + })); diff --git a/utils/sessionApiClient.ts b/utils/sessionApiClient.ts new file mode 100644 index 00000000..58ac4f37 --- /dev/null +++ b/utils/sessionApiClient.ts @@ -0,0 +1,155 @@ +/** + * Do not edit this file it is auto-generated by italia-utils / gen-api-models. + * See https://github.com/pagopa/italia-utils + */ +/* tslint:disable */ + +import { + RequestParams, + TypeofApiCall, + createFetchRequestForApi, + ReplaceRequestParams, + ApiHeaderJson, + TypeofApiParams +} from "italia-ts-commons/lib/requests"; + +import { + LockUserSessionT, + lockUserSessionDefaultDecoder, + UnlockUserSessionT, + unlockUserSessionDefaultDecoder +} from "../generated/session-api/requestTypes"; +import { identity } from "fp-ts/lib/function"; + +export type ApiOperation = TypeofApiCall & + TypeofApiCall; + +export type ParamKeys = keyof (TypeofApiParams & + TypeofApiParams); + +/** + * Defines an adapter for TypeofApiCall which omit one or more parameters in the signature + * @param ApiT the type which defines the operation to expose + * @param K the parameter to omit. undefined means no parameters will be omitted + */ +export type OmitApiCallParams< + ApiT, + K extends ParamKeys | undefined = undefined +> = ( + op: TypeofApiCall +) => K extends string + ? TypeofApiCall, K>>> + : TypeofApiCall; + +/** + * Defines an adapter for TypeofApiCall which omit one or more parameters in the signature + * @param ApiT the type which defines the operation to expose + * @param K the parameter to omit. undefined means no parameters will be omitted + */ +export type WithDefaultsT< + K extends ParamKeys | undefined = undefined +> = OmitApiCallParams; + +/** + * Defines a collection of api operations + * @param K name of the parameters that the Clients masks from the operations + */ +export type Client = { + readonly lockUserSession: TypeofApiCall< + ReplaceRequestParams< + LockUserSessionT, + Omit, K> + > + >; + + readonly unlockUserSession: TypeofApiCall< + ReplaceRequestParams< + UnlockUserSessionT, + Omit, K> + > + >; +}; + +/** + * Create an instance of a client + * @param params hash map of parameters thata define the client: + * - baseUrl: the base url for every api call (required) + * - fetchApi: an implementation of the fetch() web API, depending on the platform (required) + * - basePath: optional path to be appended to the baseUrl + * - withDefaults: optional adapter to be applied to every operation, to omit some paramenters + * @returns a collection of api operations + */ +export function createClient(params: { + baseUrl: string; + // tslint:disable-next-line:no-any + fetchApi: typeof fetch; + withDefaults: WithDefaultsT; + basePath?: string; +}): Client; +export function createClient(params: { + baseUrl: string; + // tslint:disable-next-line:no-any + fetchApi: typeof fetch; + withDefaults?: undefined; + basePath?: string; +}): Client; +export function createClient({ + baseUrl, + // tslint:disable-next-line:no-any + fetchApi, + withDefaults, + basePath = "/api/v1" +}: { + baseUrl: string; + // tslint:disable-next-line:no-any + fetchApi: typeof fetch; + withDefaults?: WithDefaultsT; + + basePath?: string; +}) { + const options = { + baseUrl, + fetchApi + }; + + const lockUserSessionT: ReplaceRequestParams< + LockUserSessionT, + RequestParams + > = { + method: "post", + + headers: ApiHeaderJson, + + response_decoder: lockUserSessionDefaultDecoder(), + url: ({ fiscalCode }) => `${basePath}/sessions/${fiscalCode}/lock`, + + body: () => "{}", + + query: ({ token }) => ({ token }) + }; + const lockUserSession = (withDefaults || identity)( + createFetchRequestForApi(lockUserSessionT, options) + ); + + const unlockUserSessionT: ReplaceRequestParams< + UnlockUserSessionT, + RequestParams + > = { + method: "delete", + + headers: ApiHeaderJson, + + response_decoder: unlockUserSessionDefaultDecoder(), + url: ({ fiscalCode }) => `${basePath}/sessions/${fiscalCode}/lock`, + + query: ({ token }) => ({ token }) + }; + const unlockUserSession = (withDefaults || identity)( + createFetchRequestForApi(unlockUserSessionT, options) + ); + + return { + lockUserSession, + unlockUserSession + }; +} diff --git a/yarn.lock b/yarn.lock index bbc263d8..5af94cf1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -865,7 +865,7 @@ dependencies: "@types/node" "*" -"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2": +"@types/unist@^2.0.0", "@types/unist@^2.0.2": version "2.0.3" resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== @@ -882,22 +882,6 @@ resolved "https://registry.yarnpkg.com/@types/validator/-/validator-9.4.4.tgz#67c745e988f721ea2a1e4cc5b4cd76e6bb3a76b1" integrity sha512-7bWNKQ3lDMhRS2lxe1aHGTBijZ/a6wQfZmCtKJDefpb81sYd+FrfNqj6Gda1Tcw8bYK0gG1CVuNLWV2JS7K8Dw== -"@types/vfile-message@*": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@types/vfile-message/-/vfile-message-2.0.0.tgz#690e46af0fdfc1f9faae00cd049cc888957927d5" - integrity sha512-GpTIuDpb9u4zIO165fUy9+fXcULdD8HFRNli04GehoMVbeNq7D6OBnqSmg3lxZnC+UvgUhEWKxdKiwYUkGltIw== - dependencies: - vfile-message "*" - -"@types/vfile@^3.0.0": - version "3.0.2" - resolved "https://registry.yarnpkg.com/@types/vfile/-/vfile-3.0.2.tgz#19c18cd232df11ce6fa6ad80259bc86c366b09b9" - integrity sha512-b3nLFGaGkJ9rzOcuXRfHkZMdjsawuDD0ENL9fzTophtBg8FJHSGbH7daXkEpcwy3v7Xol3pAvsmlYyFhR4pqJw== - dependencies: - "@types/node" "*" - "@types/unist" "*" - "@types/vfile-message" "*" - "@types/yargs@^12.0.2", "@types/yargs@^12.0.9": version "12.0.12" resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-12.0.12.tgz#45dd1d0638e8c8f153e87d296907659296873916" @@ -2930,10 +2914,10 @@ duplexer3@^0.1.4: resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= -durable-functions@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/durable-functions/-/durable-functions-1.2.4.tgz#1e480edc6bddf73282050c823ac0eb1775d43d22" - integrity sha512-X3H3DREmvGfOepdSlf2iuOLU+Ca16A9g1WYc9O/Hp/JndDCenSg4t+aPbaU3GJQqQ/CINt+n8DExWE8oWoARGA== +durable-functions@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/durable-functions/-/durable-functions-1.4.3.tgz#e8258f62fd9ee80829fc7e6fa31425a26065612b" + integrity sha512-MJMnqpHmAuZt+IK6uTij7DE+yzMa+++YPhdb1FOIwm+AeBI/R4CYZnnsmOo6DnrmOIfnF0XBewL0nEhx23se8g== dependencies: "@azure/functions" "^1.0.2-beta2" "@types/lodash" "^4.14.119" @@ -2944,6 +2928,7 @@ durable-functions@^1.2.4: debug "~2.6.9" lodash "^4.17.15" rimraf "~2.5.4" + typedoc "^0.17.1" uuid "~3.3.2" validator "~10.8.0" @@ -3428,6 +3413,13 @@ fastq@^1.6.0: dependencies: reusify "^1.0.4" +fault@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/fault/-/fault-1.0.4.tgz#eafcfc0a6d214fc94601e170df29954a4f842f13" + integrity sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA== + dependencies: + format "^0.2.0" + fb-watchman@^1.8.0, fb-watchman@^1.9.0: version "1.9.2" resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-1.9.2.tgz#a24cf47827f82d38fb59a69ad70b76e3b6ae7383" @@ -3615,6 +3607,11 @@ format-util@^1.0.3: resolved "https://registry.yarnpkg.com/format-util/-/format-util-1.0.5.tgz#1ffb450c8a03e7bccffe40643180918cc297d271" integrity sha512-varLbTj0e0yVyRpqQhuWV+8hlePAgaoFRhNFj50BNjEIrw1/DphHSObtqwskVCPWNgzwPoQrZAbfa/SBiicNeg== +format@^0.2.0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b" + integrity sha1-1hcBB+nv3E7TDJ3DkBbflCtctYs= + formidable@^1.2.0: version "1.2.2" resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.2.tgz#bf69aea2972982675f00865342b982986f6b8dd9" @@ -3666,6 +3663,15 @@ fs-extra@^6.0.0: jsonfile "^4.0.0" universalify "^0.1.0" +fs-extra@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" + integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^4.0.0" + universalify "^0.1.0" + fs-extra@^9.0.1: version "9.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.0.1.tgz#910da0062437ba4c39fedd863f1675ccfefcb9fc" @@ -3950,7 +3956,7 @@ growly@^1.2.0, growly@^1.3.0: resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE= -handlebars@^4.0.3, handlebars@^4.1.2, handlebars@~4.5.3: +handlebars@^4.0.3, handlebars@^4.1.2, handlebars@^4.7.6, handlebars@~4.5.3: version "4.5.3" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.5.3.tgz#5cf75bd8714f7605713511a56be7c349becb0482" integrity sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA== @@ -4130,6 +4136,11 @@ hide-powered-by@1.1.0: resolved "https://registry.yarnpkg.com/hide-powered-by/-/hide-powered-by-1.1.0.tgz#be3ea9cab4bdb16f8744be873755ca663383fa7a" integrity sha512-Io1zA2yOA1YJslkr+AJlWSf2yWFkKjvkcL9Ni1XSUqnGLr/qRQe2UI3Cn/J9MsJht7yEVCe0SscY1HgVMujbgg== +highlight.js@^10.0.0: + version "10.1.2" + resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.1.2.tgz#c20db951ba1c22c055010648dfffd7b2a968e00c" + integrity sha512-Q39v/Mn5mfBlMff9r+zzA+gWxRsCRKwEMvYTiisLr/XUiFI/4puWt0Ojdko3R3JCNWGdOWaA5g/Yxqa23kC5AA== + home-or-tmp@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8" @@ -4380,10 +4391,10 @@ invert-kv@^2.0.0: resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== -io-functions-commons@^10.0.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/io-functions-commons/-/io-functions-commons-10.1.0.tgz#c65e819ce461051a2aa2689db8c2025758e78557" - integrity sha512-r9HO4/K/mjt/7GrEgiEv6nRRx8pmb8LYMnd40ERPzmeXJlEV2C0MGOmEGU3A+tb1JhK3ew6Irhb4uxNAWvbDgw== +io-functions-commons@^10.7.0: + version "10.7.0" + resolved "https://registry.yarnpkg.com/io-functions-commons/-/io-functions-commons-10.7.0.tgz#b0ba6c8683f6aea06d1969a96aacd78452dd1181" + integrity sha512-KXiwoaDsaYTtA+vVT3VogVXcseWB5vr3Kt3tNdlk+v+7CcOwQwSgAFJOyf/VFDlVBSj5MgaxyApR/9N6BIZdww== dependencies: "@types/node-fetch" "^2.5.6" applicationinsights "^1.7.3" @@ -4400,11 +4411,12 @@ io-functions-commons@^10.0.0: nodemailer "^4.6.7" referrer-policy "^1.1.0" rehype-stringify "^3.0.0" + remark-frontmatter "^2.0.0" remark-parse "^5.0.0" remark-rehype "^3.0.0" request-ip "^2.1.3" ulid "^2.3.0" - unified "^7.1.0" + unified "^9.0.0" winston "^3.1.0" io-functions-express@^0.1.0, io-functions-express@^0.1.1: @@ -4698,10 +4710,10 @@ is-path-inside@^3.0.1: resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.2.tgz#f5220fc82a3e233757291dddc9c5877f2a1f3017" integrity sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg== -is-plain-obj@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" - integrity sha1-caUMhCnfync8kqOQpKA7OfzVHT4= +is-plain-obj@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" + integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" @@ -6310,6 +6322,11 @@ lru-cache@^4.0.1: pseudomap "^1.0.2" yallist "^2.1.2" +lunr@^2.3.8: + version "2.3.8" + resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.8.tgz#a8b89c31f30b5a044b97d2d28e2da191b6ba2072" + integrity sha512-oxMeX/Y35PNFuZoHp+jUj5OSEmLCaIH4KTFJh7a93cHBoFmpw2IoPs22VIz7vyO2YUnx2Tn9dzIwO2P/4quIRg== + macos-release@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/macos-release/-/macos-release-2.3.0.tgz#eb1930b036c0800adebccd5f17bc4c12de8bb71f" @@ -6383,6 +6400,11 @@ marked-terminal@^3.2.0: node-emoji "^1.4.1" supports-hyperlinks "^1.0.1" +marked@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/marked/-/marked-1.0.0.tgz#d35784245a04871e5988a491e28867362e941693" + integrity sha512-Wo+L1pWTVibfrSr+TTtMuiMfNzmZWiOPeO7rZsQUY5bgsxpHesBEcIWJloWVTFnrMXnf/TL30eTFSGJddmQAng== + marked@^0.6.2: version "0.6.3" resolved "https://registry.yarnpkg.com/marked/-/marked-0.6.3.tgz#79babad78af638ba4d522a9e715cdfdd2429e946" @@ -6588,7 +6610,7 @@ mimic-response@^2.0.0, mimic-response@^2.1.0: resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-2.1.0.tgz#d13763d35f613d09ec37ebb30bac0469c0ee8f43" integrity sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA== -minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: +minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== @@ -7647,7 +7669,7 @@ process-nextick-args@~2.0.0: resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -progress@^2.0.0: +progress@^2.0.0, progress@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== @@ -8066,6 +8088,13 @@ release-it@^13.1.1: yaml "1.8.2" yargs-parser "18.1.0" +remark-frontmatter@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/remark-frontmatter/-/remark-frontmatter-2.0.0.tgz#c9b8539c27cd23b1672c7e0fcbd5795eeedb4dc1" + integrity sha512-uNOQt4tO14qBFWXenF0MLC4cqo3dv8qiHPGyjCl1rwOT0LomSHpcElbjjVh5CwzElInB38HD8aSRVugKQjeyHA== + dependencies: + fault "^1.0.1" + remark-parse@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-5.0.0.tgz#4c077f9e499044d1d5c13f80d7a98cf7b9285d95" @@ -8552,6 +8581,15 @@ shelljs@0.8.3, shelljs@^0.8.1: interpret "^1.0.0" rechoir "^0.6.2" +shelljs@^0.8.4: + version "0.8.4" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.4.tgz#de7684feeb767f8716b326078a8a00875890e3c2" + integrity sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ== + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + shellwords@^0.1.0, shellwords@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" @@ -9416,6 +9454,29 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" +typedoc-default-themes@^0.10.2: + version "0.10.2" + resolved "https://registry.yarnpkg.com/typedoc-default-themes/-/typedoc-default-themes-0.10.2.tgz#743380a80afe62c5ef92ca1bd4abe2ac596be4d2" + integrity sha512-zo09yRj+xwLFE3hyhJeVHWRSPuKEIAsFK5r2u47KL/HBKqpwdUSanoaz5L34IKiSATFrjG5ywmIu98hPVMfxZg== + dependencies: + lunr "^2.3.8" + +typedoc@^0.17.1: + version "0.17.8" + resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.17.8.tgz#96b67e9454aa7853bfc4dc9a55c8a07adfd5478e" + integrity sha512-/OyrHCJ8jtzu+QZ+771YaxQ9s4g5Z3XsQE3Ma7q+BL392xxBn4UMvvCdVnqKC2T/dz03/VXSLVKOP3lHmDdc/w== + dependencies: + fs-extra "^8.1.0" + handlebars "^4.7.6" + highlight.js "^10.0.0" + lodash "^4.17.15" + lunr "^2.3.8" + marked "1.0.0" + minimatch "^3.0.0" + progress "^2.0.3" + shelljs "^0.8.4" + typedoc-default-themes "^0.10.2" + typescript@^3.6.2: version "3.6.2" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.6.2.tgz#105b0f1934119dde543ac8eb71af3a91009efe54" @@ -9465,19 +9526,17 @@ unherit@^1.0.4: inherits "^2.0.0" xtend "^4.0.0" -unified@^7.1.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/unified/-/unified-7.1.0.tgz#5032f1c1ee3364bd09da12e27fdd4a7553c7be13" - integrity sha512-lbk82UOIGuCEsZhPj8rNAkXSDXd6p0QLzIuSsCdxrqnqU56St4eyOB+AlXsVgVeRmetPTYydIuvFfpDIed8mqw== +unified@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/unified/-/unified-9.0.0.tgz#12b099f97ee8b36792dbad13d278ee2f696eed1d" + integrity sha512-ssFo33gljU3PdlWLjNp15Inqb77d6JnJSfyplGJPT/a+fNRNyCBeveBAYJdO5khKdF6WVHa/yYCC7Xl6BDwZUQ== dependencies: - "@types/unist" "^2.0.0" - "@types/vfile" "^3.0.0" bail "^1.0.0" extend "^3.0.0" - is-plain-obj "^1.1.0" + is-buffer "^2.0.0" + is-plain-obj "^2.0.0" trough "^1.0.0" - vfile "^3.0.0" - x-is-string "^0.1.0" + vfile "^4.0.0" union-value@^1.0.0: version "1.0.1" @@ -9530,11 +9589,6 @@ unist-util-remove-position@^1.0.0: dependencies: unist-util-visit "^1.1.0" -unist-util-stringify-position@^1.0.0, unist-util-stringify-position@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-1.1.2.tgz#3f37fcf351279dcbca7480ab5889bb8a832ee1c6" - integrity sha512-pNCVrk64LZv1kElr0N1wPiHEUoXNVFERp+mlTg/s9R5Lwg87f9bM/3sQB99w+N9D/qnM9ar3+AKDBwo/gm/iQQ== - unist-util-stringify-position@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz#cce3bfa1cdf85ba7375d1d5b17bdc4cada9bd9da" @@ -9748,30 +9802,24 @@ vfile-location@^2.0.0: resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-2.0.6.tgz#8a274f39411b8719ea5728802e10d9e0dff1519e" integrity sha512-sSFdyCP3G6Ka0CEmN83A2YCMKIieHx0EDaj5IDP4g1pa5ZJ4FJDvpO0WODLxo4LUX4oe52gmSCK7Jw4SBghqxA== -vfile-message@*: - version "2.0.3" - resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-2.0.3.tgz#0dd4f6879fb240a8099b22bd3755536c92e59ba5" - integrity sha512-qQg/2z8qnnBHL0psXyF72kCjb9YioIynvyltuNKFaUhRtqTIcIMP3xnBaPzirVZNuBrUe1qwFciSx2yApa4byw== +vfile-message@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-2.0.4.tgz#5b43b88171d409eae58477d13f23dd41d52c371a" + integrity sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ== dependencies: "@types/unist" "^2.0.0" unist-util-stringify-position "^2.0.0" -vfile-message@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-1.1.1.tgz#5833ae078a1dfa2d96e9647886cd32993ab313e1" - integrity sha512-1WmsopSGhWt5laNir+633LszXvZ+Z/lxveBf6yhGsqnQIhlhzooZae7zV6YVM1Sdkw68dtAW3ow0pOdPANugvA== - dependencies: - unist-util-stringify-position "^1.1.1" - -vfile@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/vfile/-/vfile-3.0.1.tgz#47331d2abe3282424f4a4bb6acd20a44c4121803" - integrity sha512-y7Y3gH9BsUSdD4KzHsuMaCzRjglXN0W2EcMf0gpvu6+SbsGhMje7xDc8AEoeXy6mIwCKMI6BkjMsRjzQbhMEjQ== +vfile@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/vfile/-/vfile-4.1.1.tgz#282d28cebb609183ac51703001bc18b3e3f17de9" + integrity sha512-lRjkpyDGjVlBA7cDQhQ+gNcvB1BGaTHYuSOcY3S7OhDmBtnzX95FhtZZDecSTDm6aajFymyve6S5DN4ZHGezdQ== dependencies: + "@types/unist" "^2.0.0" is-buffer "^2.0.0" replace-ext "1.0.0" - unist-util-stringify-position "^1.0.0" - vfile-message "^1.0.0" + unist-util-stringify-position "^2.0.0" + vfile-message "^2.0.0" vm2@^3.6.3: version "3.8.2" @@ -10019,11 +10067,6 @@ ws@^5.2.0: dependencies: async-limiter "~1.0.0" -x-is-string@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/x-is-string/-/x-is-string-0.1.0.tgz#474b50865af3a49a9c4657f05acd145458f77d82" - integrity sha1-R0tQhlrzpJqcRlfwWs0UVFj3fYI= - x-xss-protection@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/x-xss-protection/-/x-xss-protection-1.3.0.tgz#3e3a8dd638da80421b0e9fff11a2dbe168f6d52c"