import {Uppy, AwsS3, generateFileID} from "@/import/uppy";
import GenericUserHelper from "@/shared/helper/user/generic";
import * as Sentry from "@sentry/nextjs";
import {BehaviorSubject, of, ReplaySubject} from "rxjs";
import {distinctUntilChanged, share, filter, map, tap, switchMap} from "rxjs/operators";
import {bind} from "@react-rxjs/core";
import { graphQLClient, graphQLClientPOST } from "@/shared/lib/endpoint/api";
import {parseId} from "@/shared/helper/general/id";
import {dateToLocale} from "@/shared/helper/general/dateConverter.mjs";
import {createImageUploadUrl, downloadUrl} from "@/shared/endpoint/graphQLTemplate";
import getStore from "@/shared/lib/store";
import { nanoid } from 'nanoid';
// BIG TODO: enable offline support. Currently that will lead to errors.
export const PLACEHOLDER_PRE_FILENAME = "PLACEHOLDER_C";
/**
* Hacky workaround to avoid to many uppy instances to be spawned and to avoid having dynamic uploadHandler functions by using a static processRequest function.
* @type {{readonly uppy: Uppy, readonly token: string, processRequest(...[*]): Promise<void>}}
*/
const FileSyncHelper = (function () {
GenericUserHelper.token$.subscribe((token) => {
graphQLClientPOST.setHeader("Authorization", token);
});
/**
* Removing path and id from the file path.
* @param {string} filePath
* @returns {string}
*/
const filePathToFileName = (filePath) => filePath?.split("/")?.slice(-1)?.[0]?.split("-")?.slice(1)?.join("-") ?? "";
const filePathToCreationDate = (filePath) => {
if (filePath.startsWith(PLACEHOLDER_PRE_FILENAME)) {
return null;
}
const id = filePath?.split("/")?.slice(-1)?.[0]?.split("-")?.slice(0, 1)?.[0] ?? "";
if (!id) {
return null;
}
try {
const epochInMs = parseId(id).epoch;
return dateToLocale(epochInMs);
} catch (e) {
Sentry.withScope(function (scope) {
scope.setContext("Id", { value: id });
Sentry.captureException(e);
});
return null;
}
}
const uppy = new Uppy({
id: "files",
requiredMetaFields: ["storeMetaData"],
}).use(AwsS3, {
shouldUseMultipart: (file) => file.size > 100 * 2 ** 20, // Note: we have not implemented the createMultipartUpload function yet!
getUploadParameters(file) {
if (!file.meta?.storeMetaData?.storeType || !file.meta?.storeMetaData?.storeId) {
Sentry.withScope(function (scope) {
scope.setContext("Info", { fileId: file.id });
scope.setTag("attachments", "requiredDetailsMissing");
Sentry.captureMessage("Could not upload file becuase of missing store context", "fatal");
});
// TODO: throw exception in general
return {};
}
return graphQLClientPOST
.request(createImageUploadUrl, {
fileName: file.name,
municipalityCode: process.env.NEXT_PUBLIC_MUNICIPALITY_CODE ?? sessionStorage.getItem("municipalityCode"),
mimeType: file.type,
type: file.meta.storeMetaData.storeType,
typeId: file.meta.storeMetaData.storeId,
})
.then(({ createImageUploadUrl: { uploadJson, cdnUrl, filePath } = {}, errors} = {}) => {
if (errors) {
Sentry.withScope(function (scope) {
scope.setContext("Error", errors);
Sentry.captureMessage("Uploading file created a server error!", "fatal");
});
return;
}
const uploadObject = JSON.parse(uploadJson);
uppy.setFileMeta(file.id, {
filePath,
fileName: filePathToFileName(filePath),
cdnUrl,
});
if (resolvers[file.id]) {
resolvers[file.id][3] = true;
}
uploadProgress$.next(resolvers);
// Return an object in the correct shape.
return {
method: "POST",
...uploadObject,
};
});
},
});
const createDownloadUrl = async (filePath) => {
const emptyObject = {
downloadUrl: null,
expiresAt: null
}
if (!filePath) {
return Promise.resolve(emptyObject);
}
let errors;
let result;
try {
// TODO: place the decodeURIComponent on the server-side to support all kind of weird file names
const response = await graphQLClient.request(downloadUrl, { filePath: decodeURIComponent(filePath) });
errors = response.errors;
result = response.downloadUrl;
} catch (e) {
Sentry.withScope(function (scope) {
scope.setContext("Info", { filePath });
Sentry.captureException(e);
});
return emptyObject;
}
if (errors) {
Sentry.withScope(function (scope) {
scope.setContext("Info", { filePath });
scope.setContext("Error", errors);
Sentry.captureMessage("Could not create a download url", "error");
});
return emptyObject;
}
// As it is not safe to have download urls floating around without proper cleaning, the urls are not stored centrally.
return result;
}
const totalUploadProgress$ = new BehaviorSubject("idle");
uppy.on('progress', (progress) => {
// Luckily uppy updates the progress every x (low) seconds, so not every percentage is a trigger.
totalUploadProgress$.next(progress >= 100 ? "idle" : progress);
});
const shareableTotalUploadProgress$ = totalUploadProgress$.pipe(
distinctUntilChanged(),
share({
connector: () => new ReplaySubject(1),
resetOnError: true,
resetOnRefCountZero: false, // Do not reset if ref count becomes zero
resetOnComplete: false, // On complete keep the ReplaySubject (with its value)
})
)
const resolvers = {};
const uploadProgress$ = new BehaviorSubject(resolvers);
uppy.on('upload', (data) => {
data.fileIDs.map((fileId) => {
if (resolvers[fileId]) {
resolvers[fileId][2] = 0;
}
});
uploadProgress$.next(resolvers);
});
uppy.on('upload-progress', (file, progress) => {
if (resolvers[file.id]) {
resolvers[file.id][2] = Math.min(Math.floor((progress.bytesUploaded / progress.bytesTotal) * 100), 100);
}
uploadProgress$.next(resolvers);
});
uppy.on('complete', (result) => {
result.successful.forEach((file) => {
if (resolvers[file.id]) {
resolvers[file.id][2] = 100;
}
});
// Result includes failed and successful uploads
totalUploadProgress$.next("idle");
uploadProgress$.next(resolvers);
});
const processRequest = async (file, responseObject) => {
let retrievedFile = file;
if (retrievedFile.meta.cdnUrl) {
if (resolvers[file.id][1] === true) {
// Already processed or processing.
return;
}
try {
// Getting content of file
const fileContent = await fetch(retrievedFile.meta.cdnUrl, {
headers: {
Authorization: `Bearer ${GenericUserHelper.token}`,
"Content-Type": retrievedFile.meta.type,
Accept: retrievedFile.meta.type,
},
mode: "cors",
credentials: "omit", // Required as we currently use a * wildcard in our CORS header
});
// Uppy has the behaviour to call twice according to online topics, therefore ensure that we do not waste bandwidth by setting a flag.
resolvers[file.id][1] = true;
const fileBlob = await fileContent.blob();
const reader = new FileReader();
reader.onloadend = () => {
uppy.setFileMeta(file.id, { dataUrl: reader.result });
const { name, ...relevantMetaData } = uppy.getFile(file.id)?.meta ?? {};
const obj = {
src: reader.result,
// @deprecated
filename: retrievedFile.meta.name,
fileId: file.id
};
Object.assign(obj, relevantMetaData);
resolvers[file.id][0](obj);
};
reader.readAsDataURL(fileBlob);
} catch (e) {
resolvers[file.id][0]({
error: e,
fileId: file.id,
});
}
} else if (responseObject.status > 299 || responseObject.status < 200) {
resolvers[file.id][0]({
error: "Server error",
fileId: file.id
});
}
};
const retrieveFileName = ({ fileId, filePath }) => {
if (!fileId) {
if (filePath && filePath.match(/^PLACEHOLDER_C([0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz-]){6}_/)) {
return filePath.replace(/^PLACEHOLDER_C([0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz-]){6}_/, "");
}
return filePath ? filePathToFileName(filePath) : "";
}
const { name, fileName } = uppy.getFile(fileId)?.meta ?? {};
return fileName ?? name ?? filePathToFileName(filePath);
};
const retrieveFilePath = (fileId) => {
if (!fileId) {
return "";
}
return uppy.getFile(fileId)?.meta?.filePath ?? "";
}
const archiveOrRestoreFile = async ({ storeType, storeId, itemReferenceId, storeRef, archive = true }) => {
const {rawUpdateStoreValue, getSnapshot} = getStore(storeType, storeId, {createInstance: false}) ?? {};
if (!getSnapshot) {
return { error: true }
}
const snapshot = getSnapshot(storeRef);
const referenceIdIndex = snapshot.findIndex((obj) => obj?.referenceId === itemReferenceId);
if (referenceIdIndex === -1 || snapshot[referenceIdIndex].archive === archive) {
return {
error: referenceIdIndex === -1
};
}
const updateFn = (doc) => {
// Triple check if the store is still in the same array position.
const foundReferenceAgain = getSnapshot(storeRef).findIndex(({referenceId: rId}) => rId === itemReferenceId);
if (foundReferenceAgain === -1) {
return;
}
doc[storeRef][foundReferenceAgain].archived = archive;
};
const message = JSON.stringify({
e: archive ? "ATTACHMENT_ARCHIVED" : "ATTACHMENT_RESTORED",
p: [`${storeRef}[${referenceIdIndex}].archived`],
a: "C"
});
await rawUpdateStoreValue(updateFn, message)
return {
error: false
};
}
const removeFile = async ({ storeType, storeId, itemReferenceId, storeRef }) => {
// Currently: only simply remove from store, not from uppy or s3.
const {rawUpdateStoreValue, getSnapshot} = getStore(storeType, storeId, {createInstance: false}) ?? {};
if (!getSnapshot) {
return { error: true }
}
const snapshot = getSnapshot(storeRef);
const referenceIdIndex = snapshot.findIndex((obj) => obj?.referenceId === itemReferenceId);
if (referenceIdIndex === -1 || snapshot[referenceIdIndex].archive === true) {
return {
error: referenceIdIndex === -1
};
}
const updateFn = (doc) => {
// Triple check if the store is still in the same array position.
const foundReferenceAgain = getSnapshot(storeRef).findIndex(({referenceId: rId}) => rId === itemReferenceId);
if (foundReferenceAgain === -1) {
return;
}
doc[storeRef].splice(foundReferenceAgain, 1);
};
const message = JSON.stringify({
e: "ATTACHMENT_REMOVED",
p: [storeRef],
a: "C"
});
await rawUpdateStoreValue(updateFn, message)
return {
error: false
};
}
const attachAddedFilesToStore = async ({ storeId, storeType, addFilesUploadArray = [], storeRef, groupName = "RESERVED_NONE" }) => {
if (!storeId || !storeType || !storeRef || !Array.isArray(addFilesUploadArray)) {
Sentry.withScope(function (scope) {
scope.setContext("Info", {storeId, storeType, id: storeRef, groupName});
scope.setTag("attachments", "attachToStore");
scope.setContext("Function", { name: "attachAddedFilesToStore" });
Sentry.captureMessage("Could not save attachment in store because of invalid parameters", "fatal");
});
return;
} else if (addFilesUploadArray.length === 0) {
return;
}
const {rawUpdateStoreValue, getSnapshot} = getStore(storeType, storeId, {createInstance: false}) ?? {};
if (!rawUpdateStoreValue) {
Sentry.withScope(function (scope) {
scope.setContext("Info", {storeId, storeType, id: storeRef});
scope.setTag("attachments", "save");
scope.setContext("Function", { name: "attachAddedFilesToStore" });
Sentry.captureMessage("Attachments is called without valid store reference", "fatal");
});
return;
}
const snapshot = getSnapshot(storeRef);
const knownReferenceIds = snapshot?.map((obj) => obj?.referenceId || "") ?? [];
const knownFilePathWithinGroupName = Object.fromEntries(snapshot?.filter((obj) => obj.groupName === groupName).map((obj) => [obj.filePath, obj.referenceId]) ?? []);
const allItems = addFilesUploadArray.map(([fileId, resultPromise, fileHasAlreadyBeenRegisteredByUser]) => {
// Check if reference id doesn't already exist, to prevent duplicate ids within store (within same storeRef)
let referenceId;
let skipped = false;
do {
// Length = 6 character, 37K ids can be generator with this setting. https://zelark.github.io/nano-id-cc/
referenceId = nanoid(6);
} while (knownReferenceIds.includes(referenceId));
knownReferenceIds.push(referenceId);
let name = `${PLACEHOLDER_PRE_FILENAME}${referenceId}_${retrieveFileName({fileId})}`;
let promise = resultPromise;
if (fileHasAlreadyBeenRegisteredByUser && retrieveFilePath(fileId)) {
const filePath = retrieveFilePath(fileId);
// The file is registered and already fully uploaded (because a file path is known), check if the file path is already in the snapshot
if (knownFilePathWithinGroupName[filePath]) {
// filePath exists in same group, so it makes no sense to add to the group again.
skipped = true;
// Provide the reference id of the original item
referenceId = knownFilePathWithinGroupName[filePath];
}
// If known and the file has been uploaded, return the filePath
name = filePath;
promise = undefined;
}
return [name, fileId, referenceId, promise, skipped];
});
const allItemsWithoutSkipped = allItems.filter((arr) => !arr[4]);
if (allItemsWithoutSkipped.length === 0) {
// If there are no items to add to the store, resolve with an early abort.
return [allItems, Promise.resolve(allItems.map((_, fileId, referenceId) => ({
duplicate: true,
skipped: true,
metaData: {
filePath: retrieveFilePath(fileId),
fileId,
referenceId
}})))];
}
const onlyNewlyUploadedItems = allItemsWithoutSkipped.filter(([_, __, ___, uploadPromise]) => !!uploadPromise);
const updateFn = (doc) => {
if (!doc[storeRef]) {
doc[storeRef] = [];
}
allItemsWithoutSkipped.forEach(([filePath, _, referenceId]) => {
doc[storeRef].push({filePath, groupName, archived: false, referenceId, addedAt: Math.floor(Date.now() / 1000)});
});
};
const message = JSON.stringify({
e: "ATTACHMENT_ADD",
p: [storeRef],
a: "C"
});
await rawUpdateStoreValue(updateFn, message);
// Note: deliberately not awaiting Promise.all, so this functions returns as fast as possible an array of files being attached to the store.
return [allItems, Promise.all(onlyNewlyUploadedItems.map(async ([_, fileId, referenceId, promise]) =>
promise.then(async () => {
const currentAttachments = getSnapshot(storeRef);
const foundStore = currentAttachments.findIndex(({referenceId: rId}) => rId === referenceId);
if (foundStore === -1) {
return { error: true };
}
const filePath = retrieveFilePath(fileId);
if (!filePath) {
Sentry.withScope(function (scope) {
scope.setContext("Info", {storeId, storeType, id: storeRef, groupName, filePath, fileId, referenceId});
scope.setTag("attachments", "attachToStore");
scope.setContext("Function", { name: "attachAddedFilesToStore", part: "Processing only newly uploaded items" });
Sentry.captureMessage("Could not save attachment in store (after uploading) because the file path could not be found", "fatal");
});
return {
error: true,
metaData: {
filePath,
fileId,
referenceId
}
};
} else if (currentAttachments[foundStore].filePath === filePath) {
return {
duplicate: true,
error: false,
metaData: {
filePath,
fileId,
referenceId
}
};
}
const updateFn = (doc) => {
// Triple check if the store is still in the same array position.
const foundStoreAgain = getSnapshot(storeRef).findIndex(({referenceId: rId}) => rId === referenceId);
if (foundStoreAgain === -1) {
return;
}
doc[storeRef][foundStoreAgain].filePath = filePath;
};
const message = JSON.stringify({
e: "ATTACHMENT_UPLOADED",
p: [`${storeRef}[${foundStore}].filePath`],
a: "C"
});
return {
error: false,
metaData: {
filePath,
fileId,
referenceId
},
store: await rawUpdateStoreValue(updateFn, message)
};
})
))];
};
const addFiles = async (input, inputMetaData, callback, downloadAndCacheFile = false) => {
const metaData = {
attachAddedFilesToStore: false,
storeRef: undefined,
groupName: undefined,
...inputMetaData
};
if (!metaData?.storeId || !metaData?.storeType) {
Sentry.captureMessage("FileSyncHelper.addFiles was called without storeId or storeType", "fatal");
throw Error("Missing storeId and/or storeType to process upload");
}
let files = [];
// If not already a list, make it a list
const processInput = (!input?.length) ? [input] : input;
// Iterate manually to accommodate Array, FileList, DataTransferItemList
for (let i = 0; i < processInput.length; i += 1) {
const file = processInput[i];
if (Blob && file instanceof Blob) {
// Safari, Firefox, IE land here
files.push(file);
} else if (file.webkitGetAsEntry) {
// Chrome wraps Files in DataTransferItems
files.push(file.webkitGetAsEntry());
} else if (typeof file === "object") {
files.push(file);
}
}
const uploadFileIfNotDoneAlready = async (fileId, file) => {
if (uppy.checkIfFileAlreadyExists(fileId)) {
// File is known, so we don't need to do much. Note: we do not check race conditions
const retrievedFile = uppy.getFile(fileId);
const { name, ...relevantMetaData } = retrievedFile.meta ?? {}
const result = {
// src is only filled if file was downloaded
src: retrievedFile.meta.dataUrl,
// @deprecated
filename: retrievedFile.meta.name,
fileId,
...relevantMetaData,
};
callback?.(result);
return result;
}
uppy.addFile(file);
uppy.setFileMeta(fileId, {
storeMetaData: {
storeId: metaData.storeId,
storeType: metaData.storeType,
},
});
if (downloadAndCacheFile) {
uppy.on("upload-success", processRequest);
}
return new Promise((resolve, reject) => {
// Callback if on download is enabled, otherwise promise is returned
const internalCallback = (obj) => {
uppy.off("upload-success", processRequest);
callback?.(obj);
if (obj?.error) {
reject(obj);
} else {
resolve(obj);
}
}
// [callbackIfDownloadIfEnabled, isDownloaded, uploadCounter, receivedNonExpiredS3UploadUrl]
resolvers[fileId] = [downloadAndCacheFile ? internalCallback : undefined, false, null, false];
const promise = uppy.upload();
if (!downloadAndCacheFile) {
return promise.then(( { successful, failed }) => {
uppy.getFile(fileId);
const obj = {
fileId
}
let test = successful.filter((f) => f.id === fileId);
if (test.length > 0) {
// Successful!
const { name, ...relevantMetaData } = test[0].meta ?? {}
Object.assign(obj, relevantMetaData);
callback?.(obj);
return resolve(obj);
}
// Failure :(
test = failed.filter((f) => f.id === fileId);
const { name, ...relevantMetaData } = test[0]?.meta ?? {}
Object.assign(obj, relevantMetaData);
obj.error = true;
callback?.(obj);
reject(obj);
});
}
})
}
const queue = files.map((file) => {
const convertedFileObject = {
name: file.name,
type: file.type,
data: {
lastModified: file.lastModified,
size: file.size
},
meta: {
relativePath: file.webkitRelativePath || undefined
}
};
const fileId = generateFileID(convertedFileObject);
// TODO: In the future check on duplicates (so uploading the exact same file in one request) and handle that casus
return [fileId, uploadFileIfNotDoneAlready(fileId, file), uppy.checkIfFileAlreadyExists(fileId)];
});
// Note: deliberately not awaiting attachAddedFilesToStore, so this functions returns as fast as possible an array of files being uploaded (and attached to a store)
return [queue, metaData.attachAddedFilesToStore ? attachAddedFilesToStore({ ...metaData, addFilesUploadArray: queue }) : undefined];
}
return {
get uppy() {
return uppy;
},
processRequest(...args) {
return processRequest(...args);
},
addFiles(...args) {
return addFiles(...args);
},
get totalUploadProgress$() {
return shareableTotalUploadProgress$;
},
get uploadProgress$() {
return uploadProgress$;
},
retrieveFileName({ fileId, filePath }) {
return retrieveFileName({ fileId, filePath });
},
retrieveFilePath(fileId) {
return retrieveFilePath(fileId);
},
archiveFile({ storeType, storeId, itemReferenceId, storeRef }) {
return archiveOrRestoreFile({ storeType, storeId, itemReferenceId, storeRef, archive: true })
},
restoreFile({ storeType, storeId, itemReferenceId, storeRef }) {
return archiveOrRestoreFile({ storeType, storeId, itemReferenceId, storeRef, archive: false })
},
removeFile({ storeType, storeId, itemReferenceId, storeRef }) {
return removeFile({ storeType, storeId, itemReferenceId, storeRef });
},
/**
* Provide either the fileId or filePath and the upload date is provided.
* @param {string} [filePath]
* @param {string} [fileId]
* @returns {string|null} Null if no upload date could be found, otherwise formatted date.
*/
getUploadDate({ filePath, fileId }) {
if (!fileId && !filePath) {
return null;
}
return filePathToCreationDate(filePath ?? retrieveFilePath(fileId));
},
createDownloadUrl: async ({ filePath, fileId }) => {
return createDownloadUrl(filePath ?? retrieveFilePath(fileId))
}
};
})();
export const useFileUploadProcess = bind((fileId) => !fileId ? of(null) : FileSyncHelper.uploadProgress$.pipe(
filter((obj) => obj[fileId]),
map((obj) => obj[fileId]?.[2] ?? 0),
distinctUntilChanged(),
switchMap((v) => of(v))
), null)[0];
export const useFileUploadStatus = bind((fileId, filePath) => {
if (!fileId && !filePath) {
return of("EMPTY")
}
if (!fileId && filePath) {
return filePath.startsWith(PLACEHOLDER_PRE_FILENAME) ? of("OTHER_USER_UPLOADING") : of("IDLE");
}
return FileSyncHelper.uploadProgress$.pipe(
filter((obj) => obj[fileId]),
map((obj) => {
if (!obj[fileId]) {
// This should not happen, as we think we are the one uploading, but we know nothing.
return "OTHER_USER_UPLOADING";
}
const hasBeenAddedToBeUploaded = obj[fileId]?.[3];
if (hasBeenAddedToBeUploaded === null || typeof hasBeenAddedToBeUploaded === "undefined") {
return "IDLE";
}
if (hasBeenAddedToBeUploaded === false) {
return "ADDED";
}
if (obj[fileId]?.[2] === 100) {
status = "UPLOAD_COMPLETED";
}
return "UPLOADING";
}),
distinctUntilChanged(),
switchMap((v) => of(v))
);
}, "EMPTY")[0];
export default FileSyncHelper;