Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const newIndex = (() => {
if (action === 'next') {
return index + 1;
}
if (action === 'prev') {
return index - 1;
}
if (action === 'first') {
return 0;
}
if (action === 'last') {
return pageCount;
}
})();
// TODO: stop assuming cursors are for collections
const depth = getCollectionDepth(collection);
const allEntries = getFolderEntries(
window.repoFiles,
collection.get('folder'),
extension,
depth,
);
const entries = allEntries.slice(newIndex * pageSize, newIndex * pageSize + pageSize);
const newCursor = getCursor(collection, extension, allEntries, newIndex);
return Promise.resolve({ entries, cursor: newCursor });
}
} else {
const entry = state.entryDraft.get('entry');
const entryPath = entry?.get('path');
const collection = state.collections.get(entry?.get('collection'));
const path = selectMediaFilePath(state.config, collection, entryPath, file.name);
assetProxy = createAssetProxy({
file,
path,
});
}
dispatch(addAsset(assetProxy));
let mediaFile: MediaFile;
if (integration) {
const id = await getBlobSHA(file);
// integration assets are persisted immediately, thus draft is false
mediaFile = createMediaFileFromAsset({ id, file, assetProxy, draft: false });
} else if (editingDraft) {
const id = await getBlobSHA(file);
mediaFile = createMediaFileFromAsset({ id, file, assetProxy, draft: editingDraft });
return dispatch(addDraftEntryMediaFile(mediaFile));
} else {
mediaFile = await backend.persistMedia(state.config, assetProxy);
}
return dispatch(mediaPersisted(mediaFile, { privateUpload }));
} catch (error) {
console.error(error);
dispatch(
notifSend({
message: `Failed to persist media: ${error}`,
this._metadataSemaphore?.take(async () => {
try {
const branchData = await this.checkMetadataRef();
const file = { path: `${key}.json`, raw: JSON.stringify(data) };
await this.uploadBlob(file);
const changeTree = await this.updateTree(branchData.sha, [file as File]);
const { sha } = await this.commit(`Updating “${key}” metadata`, changeTree);
await this.patchRef('meta', '_netlify_cms', sha);
localForage.setItem(`gh.meta.${key}`, {
expires: Date.now() + 300000, // In 5 minutes
data,
});
this._metadataSemaphore?.leave();
resolve();
} catch (err) {
reject(err);
}
}),
);
async getLocalDraftBackup(collection: Collection, slug: string) {
const key = getEntryBackupKey(collection.get('name'), slug);
const backup = await localForage.getItem(key);
if (!backup || !backup.raw.trim()) {
return {};
}
const { raw, path } = backup;
let { mediaFiles = [] } = backup;
mediaFiles = mediaFiles.map(file => {
// de-serialize the file object
if (file.file) {
return { ...file, url: URL.createObjectURL(file.file) };
}
return file;
});
const label = selectFileEntryLabel(collection, slug);
const entry: EntryValue = this.entryWithFormat(collection)(
.then((response: { cursor: typeof Cursor }) => ({
...response,
// The only existing backend using the pagination system is the
// Algolia integration, which is also the only integration used
// to list entries. Thus, this checking for an integration can
// determine whether or not this is using the old integer-based
// pagination API. Other backends will simply store an empty
// cursor, which behaves identically to no cursor at all.
cursor: integration
? Cursor.create({
actions: ['next'],
meta: { usingOldPaginationAPI: true },
data: { nextPage: page + 1 },
})
: Cursor.create(response.cursor),
}))
.then((response: { cursor: typeof Cursor; pagination: number; entries: EntryValue[] }) =>
processFile = file => ({
...file,
name: basename(file.path),
// BitBucket does not return file SHAs, but it does give us the
// commit SHA. Since the commit SHA will change if any files do,
// we can construct an ID using the commit SHA and the file path
// that will help with caching (though not as well as a normal
// SHA, since it will change even if the individual file itself
// doesn't.)
...(file.commit && file.commit.hash ? { id: `${file.commit.hash}/${file.path}` } : {}),
});
processFiles = files => files.filter(this.isFile).map(this.processFile);
uploadBlob = (item, { commitMessage, branch = this.branch } = {}) => {
const contentBlob = get(item, 'fileObj', new Blob([item.raw]));
const formData = new FormData();
// Third param is filename header, in case path is `message`, `branch`, etc.
formData.append(item.path, contentBlob, basename(item.path));
formData.append('branch', branch);
if (commitMessage) {
formData.append('message', commitMessage);
}
if (this.commitAuthor) {
const { name, email } = this.commitAuthor;
formData.append('author', `${name} <${email}>`);
}
return flow([
unsentRequest.withMethod('POST'),
unsentRequest.withBody(formData),
this.request,
then(() => ({ ...item })),
])(`${this.repoURL}/src`);
};
const mediaFiles = await Promise.all(
entry
.get('mediaFiles')
.toJS()
.map(async (file: MediaFile) => {
// make sure to serialize the file
if (file.url?.startsWith('blob:')) {
const blob = await fetch(file.url).then(res => res.blob());
return { ...file, file: new File([blob], file.name) };
}
return file;
}),
);
await localForage.setItem(key, {
raw,
path: entry.get('path'),
mediaFiles,
});
return localForage.setItem(getEntryBackupKey(), raw);
}
.map(async (file: MediaFile) => {
// make sure to serialize the file
if (file.url?.startsWith('blob:')) {
const blob = await fetch(file.url).then(res => res.blob());
return { ...file, file: new File([blob], file.name) };
}
return file;
}),
);
await localForage.setItem(key, {
raw,
path: entry.get('path'),
mediaFiles,
});
return localForage.setItem(getEntryBackupKey(), raw);
}
.then(() => {
localForage.setItem(`gh.meta.${key}`, {
expires: Date.now() + 300000, // In 5 minutes
data,
});
});
});