From f87b2cd4c25eedb86c99a65b676bc8c4b391c051 Mon Sep 17 00:00:00 2001 From: Kerem Gurkan Date: Mon, 23 Feb 2026 23:49:43 -0700 Subject: [PATCH 1/9] Updates file importing directly to uploads folder with basename --- frontend/src/commands.js | 164 +++++++++--------- .../activities/explorer/ImportFile.jsx | 151 +++++++++------- .../resources-editor/ResourcesWizard.jsx | 20 --- 3 files changed, 167 insertions(+), 168 deletions(-) diff --git a/frontend/src/commands.js b/frontend/src/commands.js index e87813c..cff0d36 100644 --- a/frontend/src/commands.js +++ b/frontend/src/commands.js @@ -1,7 +1,6 @@ import store from "./redux/store" import { isPanelOpen, panelsActions, serializePanel } from "./redux/hooks/panelsHooks" -import { workDirActions, writeToFileHandle } from "./redux/hooks/workingDirectoryHooks" -import { useOpenPanel } from "./redux/hooks/panelsHooks" +import { workDirActions, writeToFileHandle, readFileFromPath } from "./redux/hooks/workingDirectoryHooks" import { showErrorNotification } from "./modules/util" import { showNotification } from "@mantine/notifications" @@ -20,37 +19,38 @@ export default { } ], execute: async fileNameOrId => { - // try to find file by ID first, then by name const file = findFileByNameOrId(fileNameOrId) - // quit if this file doesn't exist if (!file) return "File doesn't exist." - // delete file from disk, try to see if in subdirectory first else delete from root + const dirHandle = store.getState().workingDirectory.directoryHandle const directory = file.id.split("/")[0] + try { - const tempDirectory = await store.getState().workingDirectory.directoryHandle.getDirectoryHandle(directory); + const tempDirectory = await dirHandle.getDirectoryHandle(directory); + + let uploadedFilePath = null; + try { + const jsonFH = await tempDirectory.getFileHandle(file.name); + const jsonText = await (await jsonFH.getFile()).text(); + const jsonData = JSON.parse(jsonText); + uploadedFilePath = jsonData.file || null; + } catch (e) {} + await tempDirectory.removeEntry(file.name); try { - const uploadsDir = await tempDirectory.getDirectoryHandle('uploads'); - const baseName = file.name.replace(/\.[^/.]+$/, ""); - - for await (const entry of uploadsDir.values()) { - if (entry.kind === 'file' && entry.name.startsWith(baseName + '.')) { - await uploadsDir.removeEntry(entry.name); - } + if (uploadedFilePath) { + const uploadsDir = await tempDirectory.getDirectoryHandle('uploads'); + const uploadFileName = uploadedFilePath.split('/').pop(); + await uploadsDir.removeEntry(uploadFileName); } - } catch (e) { - } + } catch (e) {} } catch { - await store.getState().workingDirectory.directoryHandle?.removeEntry(file.name); + await dirHandle?.removeEntry(file.name); } - // close panel if it's open store.dispatch(panelsActions.closePanel(file.id)) - - // remove file from store store.dispatch(workDirActions.removeFile(file.id)) } }, @@ -67,18 +67,13 @@ export default { } ], execute: async fileNameOrId => { - // try to find file by ID first, then by name const file = findFileByNameOrId(fileNameOrId) - - // quit if this file doesn't exist if (!file) return "File doesn't exist." - // make sure panel is open if(!isPanelOpen(file.id)) return "Panel isn't open." - // save await writeToFileHandle(file, serializePanel(file.id)) } }, @@ -108,44 +103,49 @@ export default { try { const parts = file.id.split('/'); let cur = dirHandle; - for (let i = 0; i < parts.length - 1; i++) { cur = await cur.getDirectoryHandle(parts[i]); } - - let uploadsDir; - - try { - uploadsDir = await cur.getDirectoryHandle('uploads'); - } catch (e) { - uploadsDir = null; + const jsonFH = await cur.getFileHandle(parts[parts.length - 1]); + const jsonText = await (await jsonFH.getFile()).text(); + const jsonData = JSON.parse(jsonText); + + if (jsonData.file) { + fileData = await readFileFromPath(dirHandle, jsonData.file); + downloadName = jsonData.file.split('/').pop(); + } else { + fileData = await jsonFH.getFile(); } - - if (uploadsDir) { - const baseName = file.name.replace(/\.[^/.]+$/, ""); - let foundUpload = null; - - for await (const entry of uploadsDir.values()) { - if (entry.kind === 'file' && entry.name.replace(/\.[^/.]+$/, "") === baseName) { - foundUpload = entry; - break; + } catch (err) { + try { + const parts = file.id.split('/'); + let cur = dirHandle; + for (let i = 0; i < parts.length - 1; i++) { + cur = await cur.getDirectoryHandle(parts[i]); + } + + let uploadsDir = null; + try { uploadsDir = await cur.getDirectoryHandle('uploads'); } catch (e) {} + + if (uploadsDir) { + const baseName = file.name.replace(/\.[^/.]+$/, ""); + for await (const entry of uploadsDir.values()) { + if (entry.kind === 'file' && entry.name.replace(/\.[^/.]+$/, "") === baseName) { + const fh = await uploadsDir.getFileHandle(entry.name); + fileData = await fh.getFile(); + downloadName = entry.name; + break; + } } } - - if (foundUpload) { - const fh = await uploadsDir.getFileHandle(foundUpload.name); + + if (!fileData) { + const fh = await cur.getFileHandle(parts[parts.length - 1]); fileData = await fh.getFile(); - downloadName = foundUpload.name; } + } catch (fallbackErr) { + console.warn('Failed to read file from directoryHandle', fallbackErr); } - - // If not found in uploads, fallback to original file - if (!fileData) { - const fh = await cur.getFileHandle(parts[parts.length - 1]); - fileData = await fh.getFile(); - } - } catch (err) { - console.warn('Failed to read file from directoryHandle', err); } } else { console.warn('No usable directoryHandle in store. directoryHandle:', dirHandle); @@ -183,13 +183,11 @@ export default { const file = findFileByNameOrId(fileNameOrId); if (!file) return "File doesn't exist."; - // Create a file input element const input = document.createElement('input'); input.type = 'file'; input.style.display = 'none'; document.body.appendChild(input); - // Return a promise that resolves when file is selected return new Promise((resolve, reject) => { input.onchange = async (e) => { const newFile = e.target.files[0]; @@ -199,27 +197,23 @@ export default { return resolve("No file selected."); } - // Get file extensions const getExtension = (filename) => { const match = filename.match(/\.[^/.]+$/); return match ? match[0] : ''; }; - const originalExt = getExtension(file.name); - const newFileExt = getExtension(newFile.name); - - // Verify extensions match - if (originalExt !== newFileExt) { - document.body.removeChild(input); - showErrorNotification("File type mismatch", `Expected ${originalExt} but got ${newFileExt}`); - return resolve(`File type mismatch. Expected ${originalExt} but got ${newFileExt}`); - } - const directory = file.id.split("/")[0]; try { const tempDirectory = await store.getState().workingDirectory.directoryHandle.getDirectoryHandle(directory); - - // Get or create uploads directory + + let existingFileName = null; + try { + const jsonFH = await tempDirectory.getFileHandle(file.name); + const jsonText = await (await jsonFH.getFile()).text(); + const jsonData = JSON.parse(jsonText); + existingFileName = jsonData.file ? jsonData.file.split('/').pop() : null; + } catch (e) {} + let uploadsDir; try { uploadsDir = await tempDirectory.getDirectoryHandle('uploads'); @@ -227,23 +221,28 @@ export default { uploadsDir = await tempDirectory.getDirectoryHandle('uploads', { create: true }); } - // Remove old upload file(s) for this base name - const baseName = file.name.replace(/\.[^/.]+$/, ""); - for await (const entry of uploadsDir.values()) { - if (entry.kind === 'file' && entry.name.startsWith(baseName + '.')) { - await uploadsDir.removeEntry(entry.name); + if (existingFileName) { + const getExtension = (n) => { const m = n.match(/\.[^/.]+$/); return m ? m[0] : ''; }; + const originalExt = getExtension(existingFileName); + const newFileExt = getExtension(newFile.name); + + if (originalExt !== newFileExt) { + document.body.removeChild(input); + showErrorNotification("File type mismatch", `Expected ${originalExt} but got ${newFileExt}`); + return resolve(`File type mismatch. Expected ${originalExt} but got ${newFileExt}`); } - } - // Write the new file with the original file's name - const targetFileName = baseName + originalExt; - const newFileHandle = await uploadsDir.getFileHandle(targetFileName, { create: true }); - const writable = await newFileHandle.createWritable(); - await writable.write(newFile); - await writable.close(); + try { await uploadsDir.removeEntry(existingFileName); } catch {} - document.body.removeChild(input); - resolve("File updated successfully."); + const newFileHandle = await uploadsDir.getFileHandle(existingFileName, { create: true }); + const writable = await newFileHandle.createWritable(); + await writable.write(newFile); + await writable.close(); + + document.body.removeChild(input); + resolve("File updated successfully."); + return; + } } catch (e) { document.body.removeChild(input); showErrorNotification("Failed to update file", e.message); @@ -260,7 +259,6 @@ export default { resolve("File update cancelled."); }; - // Trigger the file picker input.click(); }); } diff --git a/frontend/src/components/activities/explorer/ImportFile.jsx b/frontend/src/components/activities/explorer/ImportFile.jsx index 09940ba..7529b02 100644 --- a/frontend/src/components/activities/explorer/ImportFile.jsx +++ b/frontend/src/components/activities/explorer/ImportFile.jsx @@ -9,106 +9,90 @@ import { writeToFileHandle } from "../../../redux/hooks/workingDirectoryHooks"; import { useOpenPanel } from "../../../redux/hooks/panelsHooks"; import { workingDirectorySlice } from "../../../redux/store"; import { showErrorNotification } from "../../../modules/util"; +import { useUnifiedModal } from "../../../redux/hooks/useUnifiedModal"; +import { upload_resource } from "../../../API"; export const importedFile = createContext() +const WORKFLOW_SUBDIRS = ['resources', 'strains', 'sampleDesigns', 'experimentalSetups'] + +async function getAvailableBaseName(objectTypeDir, uploadsDir, baseName, ext) { + let candidate = baseName; + let counter = 1; + while (true) { + let jsonExists = false; + let fileExists = false; + try { await objectTypeDir.getFileHandle(`${candidate}.json`); jsonExists = true; } catch {} + try { await uploadsDir.getFileHandle(`${candidate}${ext}`); fileExists = true; } catch {} + if (!jsonExists && !fileExists) return candidate; + candidate = `${baseName} (${counter})`; + counter++; + } +} + export default function ImportFile({ onSelect, text, useSubdirectory = false }) { const [selectedFile, setSelectedFile] = useState(null) const dirName = useSelector(state => state.workingDirectory.directoryHandle) const dispatch = useDispatch() const openPanel = useOpenPanel() const { actions } = workingDirectorySlice - - + const { workflows } = useUnifiedModal() async function addFileMetadata(fileHandle) { - let directoryHandle = null; const file = await fileHandle.getFile(); - - if (useSubdirectory) { - directoryHandle = await dirName.getDirectoryHandle(useSubdirectory, { create: false }) - .catch(() => dirName.getDirectoryHandle(useSubdirectory, { create: true })); - - // TODO: Automatically generate this - if (useSubdirectory === 'resources' || useSubdirectory === 'strains' || useSubdirectory === 'sampleDesigns' || useSubdirectory === "experimentalSetups") { - directoryHandle = await directoryHandle.getDirectoryHandle("uploads", { create: false }) - .catch(() => directoryHandle.getDirectoryHandle("uploads", { create: true })); - } - } - return { fileobj: file, name: file.name, fileHandle: fileHandle, - directoryHandle: directoryHandle, + directoryHandle: null, objectType: await classifyFile(fileHandle) }; } - async function createWorkflowJSON(fileName, objectType) { + async function saveFileToUploads(fileObj, objectType, actualFileName) { + const subDir = await dirName.getDirectoryHandle(objectType, { create: true }); + const uploadsDir = await subDir.getDirectoryHandle('uploads', { create: true }); + const fileHandle = await uploadsDir.getFileHandle(actualFileName, { create: true }); + const writable = await fileHandle.createWritable(); + const arrayBuffer = await fileObj.arrayBuffer(); + await writable.write(arrayBuffer); + await writable.close(); + } + + async function createWorkflowJSON(availableBaseName, objectType, filePath, initialUpload) { try { const directory = await dirName.getDirectoryHandle(objectType, { create: true }); - const baseFileName = fileName.replace(/\.[^/.]+$/, ""); - const jsonFileName = `${baseFileName}.json`; - - let fileExists = false; - for await (const entry of directory.values()) { - if (entry.kind === 'file' && entry.name === jsonFileName) { - fileExists = true; - break; - } - } - - if (!fileExists) { - try { - const uploadsDir = await directory.getDirectoryHandle("uploads", { create: false }); - for await (const entry of uploadsDir.values()) { - if (entry.kind === 'file' && entry.name.replace(/\.[^/.]+$/, "") == baseFileName) { - fileExists = true; - break; - } - } - } catch (e) { - } - } - - if (fileExists) { - showErrorNotification('Same Filename', "SynbioSuite currently does not support uploading multiple files of same name."); - return; - } - + const jsonFileName = `${availableBaseName}.json`; const jsonFileHandle = await directory.getFileHandle(jsonFileName, { create: true }); - + const defaultWorkflow = { activeStep: 0, - file: `${objectType}/uploads/${fileName}`, + file: filePath, collection: {}, - uploads: [] + uploads: initialUpload ? [initialUpload] : [] }; - + await writeToFileHandle(jsonFileHandle, JSON.stringify(defaultWorkflow)); - + jsonFileHandle.id = `${objectType}/${jsonFileName}`; - // TODO: Assign programatically - if (useSubdirectory === 'resources') { + if (objectType === 'resources') { jsonFileHandle.objectType = ObjectTypes.Resources.id; - } else if (useSubdirectory === 'strains') { + } else if (objectType === 'strains') { jsonFileHandle.objectType = ObjectTypes.Strains.id; - } else if (useSubdirectory === 'sampleDesigns') { + } else if (objectType === 'sampleDesigns') { jsonFileHandle.objectType = ObjectTypes.SampleDesigns.id; - } else if (useSubdirectory === 'experimentalSetups') { + } else if (objectType === 'experimentalSetups') { jsonFileHandle.objectType = ObjectTypes.Metadata.id; } - + dispatch(actions.addFile(jsonFileHandle)); - openPanel(jsonFileHandle); } catch (err) { console.error("Error creating resource workflow JSON:", err); } } - + const handleClick = async () => { try { const [fileHandle] = await window.showOpenFilePicker({ @@ -119,13 +103,50 @@ export default function ImportFile({ onSelect, text, useSubdirectory = false }) const fileMetadata = await addFileMetadata(fileHandle) setSelectedFile(fileMetadata) - - // TODO: Automatically generate this list - if (useSubdirectory === 'resources' || useSubdirectory === 'strains' || useSubdirectory === 'sampleDesigns' || useSubdirectory === "experimentalSetups") { - await createWorkflowJSON(fileMetadata.name, useSubdirectory); - } - onSelect?.(fileMetadata) + if (WORKFLOW_SUBDIRS.includes(useSubdirectory)) { + workflows.browseCollections(async (result) => { + if (!result?.completed || !result?.collections?.length) return; + + const collection = result.collections[0]; + const baseName = fileMetadata.name.replace(/\.[^/.]+$/, ""); + const ext = fileMetadata.name.match(/\.[^/.]+$/)?.[0] ?? ''; + + try { + const objectTypeDir = await dirName.getDirectoryHandle(useSubdirectory, { create: true }); + const uploadsDir = await objectTypeDir.getDirectoryHandle('uploads', { create: true }); + const availableBaseName = await getAvailableBaseName(objectTypeDir, uploadsDir, baseName, ext); + const actualFileName = `${availableBaseName}${ext}`; + const filePath = `${useSubdirectory}/uploads/${actualFileName}`; + + await saveFileToUploads(fileMetadata.fileobj, useSubdirectory, actualFileName); + + const uploadEntry = { + collectionName: collection.name || collection.displayId, + uri: collection.uri, + file: filePath, + date: new Date().toLocaleString(undefined, { timeZoneName: 'short' }) + }; + + await createWorkflowJSON(availableBaseName, useSubdirectory, filePath, uploadEntry); + + upload_resource( + filePath, + result.sbh_credential_check?.selectedRepo, + result.authToken, + collection.displayId, + collection.description, + dirName, + result.sbh_overwrite + ); + } catch (err) { + console.error("Error saving file or creating workflow:", err); + showErrorNotification("Import Failed", err.message); + } + }, { multiSelect: false, rootOnly: true }); + } else { + onSelect?.(fileMetadata) + } } catch (err) { console.error("File selection canceled or failed", err) } diff --git a/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx b/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx index 672d0d4..9c66941 100644 --- a/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx +++ b/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx @@ -19,29 +19,9 @@ export default function ResourcesWizard() { const [file, setFile] = usePanelProperty(panelId, 'file', false) const [uploads, setUploads] = usePanelProperty(panelId, 'uploads', false, []) - - const handleValidateAndUpload = async () => { - workflows.browseCollections((result) => { - if (result?.completed && result?.collections && result.collections.length > 0) { - const collection = result.collections[0] - setUploads([ - ...uploads, - { - collectionName: collection.name || collection.displayId, - uri: collection.uri, - file: file, - date: new Date().toLocaleString(undefined, { timeZoneName: 'short' }) - } - ]) - - upload_resource(file, result.sbh_credential_check.selectedRepo, result.authToken, collection.displayId, collection.description, workingDirectory, result.sbh_overwrite) - } - }, { multiSelect: false, rootOnly: true }) - } return ( - {(uploads?.length ?? 0) === 0 ? ( From cc5d6b89ca4cc3b2bce473bd81224aa3f237ad22 Mon Sep 17 00:00:00 2001 From: Kerem Gurkan Date: Tue, 24 Feb 2026 00:49:30 -0700 Subject: [PATCH 2/9] Adds update functionality to resources workflow --- frontend/src/commands.js | 240 ++++++++++++++---- .../activities/explorer/ImportFile.jsx | 6 +- .../resources-editor/ResourcesWizard.jsx | 38 +-- 3 files changed, 212 insertions(+), 72 deletions(-) diff --git a/frontend/src/commands.js b/frontend/src/commands.js index cff0d36..bfff053 100644 --- a/frontend/src/commands.js +++ b/frontend/src/commands.js @@ -3,6 +3,9 @@ import { isPanelOpen, panelsActions, serializePanel } from "./redux/hooks/panels import { workDirActions, writeToFileHandle, readFileFromPath } from "./redux/hooks/workingDirectoryHooks" import { showErrorNotification } from "./modules/util" import { showNotification } from "@mantine/notifications" +import { openUnifiedModal } from "./redux/slices/modalSlice" +import { MODAL_TYPES } from "./modules/unified_modal/unifiedModal" +import { upload_resource, CheckLogin } from "./API" export default { @@ -183,83 +186,210 @@ export default { const file = findFileByNameOrId(fileNameOrId); if (!file) return "File doesn't exist."; - const input = document.createElement('input'); - input.type = 'file'; - input.style.display = 'none'; - document.body.appendChild(input); + const dirHandle = store.getState().workingDirectory.directoryHandle; + const directory = file.id.split("/")[0]; - return new Promise((resolve, reject) => { - input.onchange = async (e) => { - const newFile = e.target.files[0]; - if (!newFile) { - document.body.removeChild(input); - showErrorNotification("Unable to update", "No file selected"); - return resolve("No file selected."); - } + let jsonData = null; + let tempDirectory = null; - const getExtension = (filename) => { - const match = filename.match(/\.[^/.]+$/); - return match ? match[0] : ''; - }; + try { + tempDirectory = await dirHandle.getDirectoryHandle(directory); + const jsonFH = await tempDirectory.getFileHandle(file.name); + const jsonText = await (await jsonFH.getFile()).text(); + jsonData = JSON.parse(jsonText); + } catch (e) { + showErrorNotification("Failed to read workflow file", e.message); + return "Failed to read workflow file."; + } - const directory = file.id.split("/")[0]; - try { - const tempDirectory = await store.getState().workingDirectory.directoryHandle.getDirectoryHandle(directory); + const lastUpload = jsonData.uploads?.length + ? jsonData.uploads[jsonData.uploads.length - 1] + : null; - let existingFileName = null; - try { - const jsonFH = await tempDirectory.getFileHandle(file.name); - const jsonText = await (await jsonFH.getFile()).text(); - const jsonData = JSON.parse(jsonText); - existingFileName = jsonData.file ? jsonData.file.split('/').pop() : null; - } catch (e) {} + if (!lastUpload?.selectedRepo || !lastUpload?.uri) { + showErrorNotification("Cannot update", "No prior upload record with repository information found."); + return "No prior upload record found."; + } - let uploadsDir; - try { - uploadsDir = await tempDirectory.getDirectoryHandle('uploads'); - } catch { - uploadsDir = await tempDirectory.getDirectoryHandle('uploads', { create: true }); + const selectedRepo = lastUpload.selectedRepo; + const expectedEmail = lastUpload.userEmail || null; + const collectionDisplayId = lastUpload.uri.split('/').slice(-2, -1)[0] || lastUpload.collectionName; + const collectionName = lastUpload.collectionName; + const collectionUri = lastUpload.uri; + + function getStoredToken() { + try { + const stored = localStorage.getItem('SynbioHub'); + if (!stored) return null; + const repos = JSON.parse(stored); + const entry = repos.find(r => r.value === selectedRepo); + return entry?.authtoken || null; + } catch { return null; } + } + + async function performUpdate(authToken) { + return new Promise((resolve) => { + const input = document.createElement('input'); + input.type = 'file'; + input.style.display = 'none'; + document.body.appendChild(input); + + input.oncancel = () => { + document.body.removeChild(input); + showNotification({ title: "File update cancelled", message: "The file update was cancelled." }); + resolve("File update cancelled."); + }; + + input.onchange = async (e) => { + const newFile = e.target.files[0]; + document.body.removeChild(input); + + if (!newFile) { + showErrorNotification("Unable to update", "No file selected"); + return resolve("No file selected."); } + const getExtension = (n) => { const m = n.match(/\.[^/.]+$/); return m ? m[0] : ''; }; + const existingFileName = jsonData.file ? jsonData.file.split('/').pop() : null; + if (existingFileName) { - const getExtension = (n) => { const m = n.match(/\.[^/.]+$/); return m ? m[0] : ''; }; const originalExt = getExtension(existingFileName); const newFileExt = getExtension(newFile.name); - if (originalExt !== newFileExt) { - document.body.removeChild(input); showErrorNotification("File type mismatch", `Expected ${originalExt} but got ${newFileExt}`); return resolve(`File type mismatch. Expected ${originalExt} but got ${newFileExt}`); } + } + + try { + const uploadsDir = await tempDirectory.getDirectoryHandle('uploads', { create: true }); - try { await uploadsDir.removeEntry(existingFileName); } catch {} + const newFileName = newFile.name; + const sameFilename = existingFileName && existingFileName === newFileName; - const newFileHandle = await uploadsDir.getFileHandle(existingFileName, { create: true }); - const writable = await newFileHandle.createWritable(); + const stagingName = sameFilename ? `__tmp__${newFileName}` : newFileName; + const stagingFH = await uploadsDir.getFileHandle(stagingName, { create: true }); + const writable = await stagingFH.createWritable(); await writable.write(newFile); await writable.close(); - document.body.removeChild(input); + const newFilePath = `${directory}/uploads/${newFileName}`; + + // TODO: Remove once SBS Server implementation works correctly + try{ await upload_resource( + newFilePath, + selectedRepo, + authToken, + collectionDisplayId, + "", + dirHandle, + 3 + );} catch (e) { + } + + if (sameFilename) { + try { await uploadsDir.removeEntry(existingFileName); } catch {} + const finalFH = await uploadsDir.getFileHandle(newFileName, { create: true }); + const finalWritable = await finalFH.createWritable(); + await finalWritable.write(newFile); + await finalWritable.close(); + try { await uploadsDir.removeEntry(stagingName); } catch {} + } else if (existingFileName) { + try { await uploadsDir.removeEntry(existingFileName); } catch {} + } + + const updateEntry = { + collectionName, + uri: collectionUri, + file: newFilePath, + date: new Date().toLocaleString(undefined, { timeZoneName: 'short' }), + selectedRepo, + userEmail: expectedEmail, + type: 'update', + }; + + const updatedJson = { + ...jsonData, + file: newFilePath, + uploads: [...(jsonData.uploads ?? []), updateEntry], + }; + + const jsonFH = await tempDirectory.getFileHandle(file.name); + await writeToFileHandle(jsonFH, JSON.stringify(updatedJson)); + + // Sync Redux panel state so PanelSaver doesn't overwrite with stale data + if (isPanelOpen(file.id)) { + store.dispatch(panelsActions.updateOne({ + id: file.id, + changes: { + file: newFilePath, + uploads: updatedJson.uploads, + } + })) + } + + showNotification({ + title: "File updated", + message: `${newFileName} uploaded successfully to ${collectionName}.`, + color: "green", + }); + resolve("File updated successfully."); - return; + } catch (err) { + showErrorNotification("Failed to update file", err.message); + resolve("Failed to update file: " + err.message); + } + }; + + input.click(); + }); + } + + const storedToken = getStoredToken(); + if (storedToken) { + try { + const loginResult = await CheckLogin(selectedRepo, storedToken); + if (loginResult.valid) { + const actualEmail = loginResult.profile?.email || ''; + if (!expectedEmail || actualEmail.toLowerCase() === expectedEmail.toLowerCase()) { + return await performUpdate(storedToken); } - } catch (e) { - document.body.removeChild(input); - showErrorNotification("Failed to update file", e.message); - reject("Failed to update file in uploads subdirectory: " + e.message); } - }; - - input.oncancel = () => { - document.body.removeChild(input); - showNotification({ - title: "File update cancelled", - message: "The file update was cancelled.", - }); - resolve("File update cancelled."); - }; - - input.click(); + } catch {} + } + + return new Promise((resolve) => { + store.dispatch(openUnifiedModal({ + modalType: MODAL_TYPES.COLLECTION_BROWSER, + allowedModals: [ + MODAL_TYPES.SBH_CREDENTIAL_CHECK, + MODAL_TYPES.COLLECTION_BROWSER, + MODAL_TYPES.SBH_LOGIN, + MODAL_TYPES.CREATE_COLLECTION, + ], + props: { + selectedRepo, + expectedEmail, + skipRepositorySelection: true, + silentCredentialCheck: true, + multiSelect: false, + rootOnly: true, + }, + callback: async (result) => { + if (!result?.completed) { + showNotification({ title: "Update cancelled", message: "Authentication was cancelled." }); + return resolve("Update cancelled."); + } + + const authToken = result.authToken; + if (!authToken) { + showErrorNotification("Authentication failed", "Could not obtain a valid auth token."); + return resolve("Authentication failed."); + } + + resolve(await performUpdate(authToken)); + }, + })); }); } }, diff --git a/frontend/src/components/activities/explorer/ImportFile.jsx b/frontend/src/components/activities/explorer/ImportFile.jsx index 7529b02..fa536c3 100644 --- a/frontend/src/components/activities/explorer/ImportFile.jsx +++ b/frontend/src/components/activities/explorer/ImportFile.jsx @@ -125,7 +125,9 @@ export default function ImportFile({ onSelect, text, useSubdirectory = false }) collectionName: collection.name || collection.displayId, uri: collection.uri, file: filePath, - date: new Date().toLocaleString(undefined, { timeZoneName: 'short' }) + date: new Date().toLocaleString(undefined, { timeZoneName: 'short' }), + selectedRepo: result.sbh_credential_check?.selectedRepo, + userEmail: result.sbh_credential_check?.userInfo?.email }; await createWorkflowJSON(availableBaseName, useSubdirectory, filePath, uploadEntry); @@ -148,7 +150,7 @@ export default function ImportFile({ onSelect, text, useSubdirectory = false }) onSelect?.(fileMetadata) } } catch (err) { - console.error("File selection canceled or failed", err) + console.warn("File selection canceled or failed", err) } } diff --git a/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx b/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx index 9c66941..46932ea 100644 --- a/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx +++ b/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx @@ -27,22 +27,30 @@ export default function ResourcesWizard() { {(uploads?.length ?? 0) === 0 ? (
No uploads yet.
) : ( - uploads.map((upload, idx) => ( -
-
- Collection Name:{upload.collectionName} + [...uploads].reverse().map((upload, idx) => { + const isNewest = idx === 0; + return ( +
+
+ + {isNewest ? 'Uploaded' : 'Upload of Older Version'} + +
+
+ Collection Name: {upload.collectionName} +
+
+ Collection URL: {upload.uri} +
+
+ Date Uploaded: {upload.date} +
+
+ File: {upload.file} +
-
- Collection URL:{upload.uri} -
-
- Date Uploaded:{upload.date} -
-
- File:{upload.file} -
-
- )) + ); + }) )} From 492b1156ecc407d90971be6be81c55c58980acec Mon Sep 17 00:00:00 2001 From: Kerem Gurkan Date: Tue, 24 Feb 2026 01:46:51 -0700 Subject: [PATCH 3/9] updates API call --- backend/requirements.txt | 4 ++-- backend/sbs_server/app/views.py | 5 ++++- frontend/src/API.js | 6 ++---- .../src/modules/unified_modal/CollectionBrowserModal.jsx | 2 +- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/backend/requirements.txt b/backend/requirements.txt index 7c4b70c..61e7e7a 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -5,8 +5,8 @@ numpy==1.26.1 pandas==1.3.3 pyflapjack @ git+https://github.com/flapjacksynbio/pyFlapjack@xdc_features2 excel2flapjack==1.0.11 -tricahue==0.0b6 -# tricahue @ git+https://github.com/MyersResearchGroup/Tricahue/@dev +# tricahue==0.0b6 +tricahue @ git+https://github.com/MyersResearchGroup/Tricahue/@dev sbol2build flask_swagger_ui xlsxwriter diff --git a/backend/sbs_server/app/views.py b/backend/sbs_server/app/views.py index 313d145..842adf3 100644 --- a/backend/sbs_server/app/views.py +++ b/backend/sbs_server/app/views.py @@ -65,6 +65,10 @@ def sbh_fj_upload(files): if params_file.filename == '': return 'No selected Params file', 400 params_from_request = json.loads(params_file.read()) + if not (params_from_request['sbh_url'].startswith('http://') or params_from_request['sbh_url'].startswith('https://')): + params_from_request['sbh_url'] = 'https://' + params_from_request['sbh_url'] + params_from_request['sbh_user'] = None + params_from_request['sbh_pass'] = None required_params = ['sbh_url', 'sbh_token', 'sbh_user', 'sbh_pass', 'fj_url', 'fj_token', 'fj_user', 'fj_pass', 'sbh_collec', 'sbh_collec_desc', @@ -114,7 +118,6 @@ def sbh_fj_upload(files): fj_token = params_from_request['fj_token'], sbh_token = params_from_request['sbh_token'], homespace = "https://example.org/", - attachments = attachments ) try: diff --git a/frontend/src/API.js b/frontend/src/API.js index 33add72..7126a9d 100644 --- a/frontend/src/API.js +++ b/frontend/src/API.js @@ -37,7 +37,7 @@ export async function upload_resource( sbh_collec, sbh_collec_desc, workingDirectory = null, - sbh_overwrite = false + sbh_overwrite = 0 ) { try { let data = new FormData(); @@ -57,15 +57,13 @@ export async function upload_resource( const paramsObj = { sbh_url: sbh_url, sbh_token: sbh_token, - sbh_user: null, - sbh_pass: null, fj_url: "charmmefj-api.synbiohub.org", fj_token: null, fj_user: null, fj_pass: null, sbh_collec: sbh_collec, sbh_collec_desc: sbh_collec_desc, - sbh_overwrite: sbh_overwrite ? 2 : 0, + sbh_overwrite: sbh_overwrite, fj_overwrite: 1, version: "", attachments: {} diff --git a/frontend/src/modules/unified_modal/CollectionBrowserModal.jsx b/frontend/src/modules/unified_modal/CollectionBrowserModal.jsx index d0e9b6e..16dc459 100644 --- a/frontend/src/modules/unified_modal/CollectionBrowserModal.jsx +++ b/frontend/src/modules/unified_modal/CollectionBrowserModal.jsx @@ -316,7 +316,7 @@ export default function CollectionBrowserModal({ completeWorkflow({ collections: Array.from(selectedCollections.values()), count: selectedCollections.size, - sbh_overwrite: overwrite, + sbh_overwrite: overwrite ? 2 : 0, }); }, [selectedCollections, overwrite, completeWorkflow]); From bb8b0cef1f24cf5ed81238c91a11b0a448093ac9 Mon Sep 17 00:00:00 2001 From: supersonik12 Date: Tue, 24 Feb 2026 15:13:45 -0700 Subject: [PATCH 4/9] quick attachment changes --- backend/sbs_server/app/views.py | 25 +++++------------------- backend/sbs_server/files/sbs_params.json | 18 +++++++++++------ 2 files changed, 17 insertions(+), 26 deletions(-) diff --git a/backend/sbs_server/app/views.py b/backend/sbs_server/app/views.py index 842adf3..2068ebc 100644 --- a/backend/sbs_server/app/views.py +++ b/backend/sbs_server/app/views.py @@ -67,8 +67,7 @@ def sbh_fj_upload(files): params_from_request = json.loads(params_file.read()) if not (params_from_request['sbh_url'].startswith('http://') or params_from_request['sbh_url'].startswith('https://')): params_from_request['sbh_url'] = 'https://' + params_from_request['sbh_url'] - params_from_request['sbh_user'] = None - params_from_request['sbh_pass'] = None + required_params = ['sbh_url', 'sbh_token', 'sbh_user', 'sbh_pass', 'fj_url', 'fj_token', 'fj_user', 'fj_pass', 'sbh_collec', 'sbh_collec_desc', @@ -82,26 +81,12 @@ def sbh_fj_upload(files): return 'No SBH credentials provided', 400 # Attachment files to upload to SBH - attachments = None - if 'Attachments' in files: + if 'Attachments' in files and 'attachments' in params_from_request: attachment_files = files.getlist("Attachments") - attachment_metadata = params_from_request.get('attachments') - - if attachment_metadata is None: - return 'Attachment metadata not provided', 400 - if not isinstance(attachment_metadata, dict): - return 'Attachment metadata must be a JSON object keyed by filename', 400 - - attachments = {} - missing_metadata = [file.filename for file in attachment_files if file.filename not in attachment_metadata] - if missing_metadata: - missing_list = ', '.join(missing_metadata) - return f'Missing attachment metadata for files: {missing_list}', 400 - - for file in attachment_files: - attachments[attachment_metadata[file.filename]] = file - + attachments = {params_from_request['attachments'][file.filename] : file for file in attachment_files} print(attachments) + else: + attachments = None # instantiate the XDC class using the params_from_request dictionary xdc = tricahue.XDC(input_excel_path = files['Metadata'], diff --git a/backend/sbs_server/files/sbs_params.json b/backend/sbs_server/files/sbs_params.json index 2a99604..691734e 100644 --- a/backend/sbs_server/files/sbs_params.json +++ b/backend/sbs_server/files/sbs_params.json @@ -1,10 +1,16 @@ { + "attachments": {"ExpNOTs_GFP_trans.xlsx": "LB", "example.jpg": "ExampleMedia"}, + "fj_overwrite": 1, + "fj_pass": null, + "fj_token": null, "fj_url": "charmmefj-api.synbiohub.org", - "fj_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ0b2tlbl90eXBlIjoicmVmcmVzaCIsImV4cCI6MTc1Mzk4MDI2NywianRpIjoiNTg5MTFmM2IxZTlhNGZlN2FjOTA2YzI2NmIwYzJmMjUiLCJ1c2VyX2lkIjoyfQ.hYVb65i4XinKOMsRgCG2EHrvFcYDx4vvzWg_wImDsDA", + "fj_user": null, + "sbh_collec": "sbs_backend_test", + "sbh_collec_desc": "Uploaded via SBS_server using XDC to SBH", + "sbh_overwrite": 1, + "sbh_pass": null, + "sbh_token": "6a5ef614-d6f2-46de-a551-94616944399f", "sbh_url": "https://synbiohub.org", - "sbh_token": "37ae0d50-ba67-4f66-bb06-79619d8ba7d3", - "sbh_collec": "xdc_sbs_test_uploader_file", - "sbh_collec_desc": "xdc_sbs_test_uploader_file_description", - "fj_overwrite": false, - "sbh_overwrite": false + "sbh_user": null, + "version": "" } \ No newline at end of file From 2a5a4c717d850adc048986a474c4e859ff38809f Mon Sep 17 00:00:00 2001 From: Kerem Gurkan Date: Thu, 5 Mar 2026 20:05:46 -0700 Subject: [PATCH 5/9] Code Quality Fixes --- frontend/src/API.js | 2 ++ frontend/src/commands.js | 8 +++----- .../src/components/activities/explorer/ImportFile.jsx | 7 ++++--- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/frontend/src/API.js b/frontend/src/API.js index 7126a9d..e554721 100644 --- a/frontend/src/API.js +++ b/frontend/src/API.js @@ -58,6 +58,8 @@ export async function upload_resource( sbh_url: sbh_url, sbh_token: sbh_token, fj_url: "charmmefj-api.synbiohub.org", + sbh_user: null, + sbh_pass: null, fj_token: null, fj_user: null, fj_pass: null, diff --git a/frontend/src/commands.js b/frontend/src/commands.js index bfff053..609e5ce 100644 --- a/frontend/src/commands.js +++ b/frontend/src/commands.js @@ -275,8 +275,7 @@ export default { const newFilePath = `${directory}/uploads/${newFileName}`; - // TODO: Remove once SBS Server implementation works correctly - try{ await upload_resource( + await upload_resource( newFilePath, selectedRepo, authToken, @@ -284,15 +283,14 @@ export default { "", dirHandle, 3 - );} catch (e) { - } + ); if (sameFilename) { - try { await uploadsDir.removeEntry(existingFileName); } catch {} const finalFH = await uploadsDir.getFileHandle(newFileName, { create: true }); const finalWritable = await finalFH.createWritable(); await finalWritable.write(newFile); await finalWritable.close(); + try { await uploadsDir.removeEntry(existingFileName); } catch {} try { await uploadsDir.removeEntry(stagingName); } catch {} } else if (existingFileName) { try { await uploadsDir.removeEntry(existingFileName); } catch {} diff --git a/frontend/src/components/activities/explorer/ImportFile.jsx b/frontend/src/components/activities/explorer/ImportFile.jsx index fa536c3..4225ebb 100644 --- a/frontend/src/components/activities/explorer/ImportFile.jsx +++ b/frontend/src/components/activities/explorer/ImportFile.jsx @@ -16,10 +16,10 @@ export const importedFile = createContext() const WORKFLOW_SUBDIRS = ['resources', 'strains', 'sampleDesigns', 'experimentalSetups'] -async function getAvailableBaseName(objectTypeDir, uploadsDir, baseName, ext) { +async function getAvailableBaseName(objectTypeDir, uploadsDir, baseName, ext, maxAttempts = 1000) { let candidate = baseName; let counter = 1; - while (true) { + for (let attempts = 0; attempts < maxAttempts; attempts++) { let jsonExists = false; let fileExists = false; try { await objectTypeDir.getFileHandle(`${candidate}.json`); jsonExists = true; } catch {} @@ -28,6 +28,7 @@ async function getAvailableBaseName(objectTypeDir, uploadsDir, baseName, ext) { candidate = `${baseName} (${counter})`; counter++; } + throw new Error(`Unable to find available base name after ${maxAttempts} attempts.`); } export default function ImportFile({ onSelect, text, useSubdirectory = false }) { @@ -132,7 +133,7 @@ export default function ImportFile({ onSelect, text, useSubdirectory = false }) await createWorkflowJSON(availableBaseName, useSubdirectory, filePath, uploadEntry); - upload_resource( + await upload_resource( filePath, result.sbh_credential_check?.selectedRepo, result.authToken, From a8f62bbe07ed96695ce2df2b8a60607f53e9a168 Mon Sep 17 00:00:00 2001 From: Kerem Gurkan <157067336+Kerem-G@users.noreply.github.com> Date: Thu, 5 Mar 2026 20:11:25 -0700 Subject: [PATCH 6/9] Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- backend/sbs_server/app/views.py | 15 ++++++++++++--- backend/sbs_server/files/sbs_params.json | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/backend/sbs_server/app/views.py b/backend/sbs_server/app/views.py index 2068ebc..e61a119 100644 --- a/backend/sbs_server/app/views.py +++ b/backend/sbs_server/app/views.py @@ -65,8 +65,9 @@ def sbh_fj_upload(files): if params_file.filename == '': return 'No selected Params file', 400 params_from_request = json.loads(params_file.read()) - if not (params_from_request['sbh_url'].startswith('http://') or params_from_request['sbh_url'].startswith('https://')): - params_from_request['sbh_url'] = 'https://' + params_from_request['sbh_url'] + sbh_url = params_from_request.get('sbh_url') + if sbh_url and not (sbh_url.startswith('http://') or sbh_url.startswith('https://')): + params_from_request['sbh_url'] = 'https://' + sbh_url required_params = ['sbh_url', 'sbh_token', 'sbh_user', 'sbh_pass', 'fj_url', 'fj_token', 'fj_user', 'fj_pass', @@ -83,7 +84,14 @@ def sbh_fj_upload(files): # Attachment files to upload to SBH if 'Attachments' in files and 'attachments' in params_from_request: attachment_files = files.getlist("Attachments") - attachments = {params_from_request['attachments'][file.filename] : file for file in attachment_files} + attachments = {} + for file in attachment_files: + if file.filename not in params_from_request['attachments']: + return ( + f"Attachment metadata for file '{file.filename}' not found in request", + 400, + ) + attachments[params_from_request['attachments'][file.filename]] = file print(attachments) else: attachments = None @@ -102,6 +110,7 @@ def sbh_fj_upload(files): fj_overwrite = params_from_request['fj_overwrite'], fj_token = params_from_request['fj_token'], sbh_token = params_from_request['sbh_token'], + attachments = attachments, homespace = "https://example.org/", ) diff --git a/backend/sbs_server/files/sbs_params.json b/backend/sbs_server/files/sbs_params.json index 691734e..b5dc328 100644 --- a/backend/sbs_server/files/sbs_params.json +++ b/backend/sbs_server/files/sbs_params.json @@ -9,7 +9,7 @@ "sbh_collec_desc": "Uploaded via SBS_server using XDC to SBH", "sbh_overwrite": 1, "sbh_pass": null, - "sbh_token": "6a5ef614-d6f2-46de-a551-94616944399f", + "sbh_token": "REPLACE_WITH_SYN_BIO_HUB_TOKEN", "sbh_url": "https://synbiohub.org", "sbh_user": null, "version": "" From 925c70316c187a4535fd5544fc9764c339d80f65 Mon Sep 17 00:00:00 2001 From: Kerem Gurkan Date: Thu, 5 Mar 2026 21:06:39 -0700 Subject: [PATCH 7/9] Frontend now uses backend results --- backend/sbs_server/app/views.py | 4 ---- frontend/src/API.js | 1 - frontend/src/commands.js | 1 + .../activities/explorer/ImportFile.jsx | 24 ++++++++++--------- .../resources-editor/ResourcesWizard.jsx | 2 +- 5 files changed, 15 insertions(+), 17 deletions(-) diff --git a/backend/sbs_server/app/views.py b/backend/sbs_server/app/views.py index e61a119..2bcdad7 100644 --- a/backend/sbs_server/app/views.py +++ b/backend/sbs_server/app/views.py @@ -7,7 +7,6 @@ import os import json import xml.etree.ElementTree as ET - import tricahue import sbol2 as sb2 import pudu @@ -39,9 +38,7 @@ def upload_experiment(): Helper function to upload to SynBioHub and Flapjack using XDC/XDE ''' def sbh_fj_upload(files): - if 'Metadata' not in files: - print(request) return 'No file part', 400 metadata_file = files['Metadata'] if metadata_file.filename == '': @@ -92,7 +89,6 @@ def sbh_fj_upload(files): 400, ) attachments[params_from_request['attachments'][file.filename]] = file - print(attachments) else: attachments = None diff --git a/frontend/src/API.js b/frontend/src/API.js index e554721..9dff254 100644 --- a/frontend/src/API.js +++ b/frontend/src/API.js @@ -88,7 +88,6 @@ export async function upload_resource( return response.data; } catch (error) { console.error("Upload Resource error:", error); - showErrorNotification('Error', error.message); throw error; } } diff --git a/frontend/src/commands.js b/frontend/src/commands.js index 609e5ce..8390267 100644 --- a/frontend/src/commands.js +++ b/frontend/src/commands.js @@ -334,6 +334,7 @@ export default { resolve("File updated successfully."); } catch (err) { + try { await uploadsDir.removeEntry(stagingName); } catch {} showErrorNotification("Failed to update file", err.message); resolve("Failed to update file: " + err.message); } diff --git a/frontend/src/components/activities/explorer/ImportFile.jsx b/frontend/src/components/activities/explorer/ImportFile.jsx index 4225ebb..f00bcb1 100644 --- a/frontend/src/components/activities/explorer/ImportFile.jsx +++ b/frontend/src/components/activities/explorer/ImportFile.jsx @@ -122,26 +122,28 @@ export default function ImportFile({ onSelect, text, useSubdirectory = false }) await saveFileToUploads(fileMetadata.fileobj, useSubdirectory, actualFileName); + const response = await upload_resource( + filePath, + result.sbh_credential_check?.selectedRepo, + result.authToken, + collection.displayId, + collection.description, + dirName, + result.sbh_overwrite + ); + const uploadEntry = { collectionName: collection.name || collection.displayId, - uri: collection.uri, + uri: response.sbh_url, file: filePath, date: new Date().toLocaleString(undefined, { timeZoneName: 'short' }), selectedRepo: result.sbh_credential_check?.selectedRepo, userEmail: result.sbh_credential_check?.userInfo?.email }; - await createWorkflowJSON(availableBaseName, useSubdirectory, filePath, uploadEntry); + console.log(response) - await upload_resource( - filePath, - result.sbh_credential_check?.selectedRepo, - result.authToken, - collection.displayId, - collection.description, - dirName, - result.sbh_overwrite - ); + await createWorkflowJSON(availableBaseName, useSubdirectory, filePath, uploadEntry); } catch (err) { console.error("Error saving file or creating workflow:", err); showErrorNotification("Import Failed", err.message); diff --git a/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx b/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx index 46932ea..14db475 100644 --- a/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx +++ b/frontend/src/components/panels/resources-editor/ResourcesWizard.jsx @@ -40,7 +40,7 @@ export default function ResourcesWizard() { Collection Name: {upload.collectionName}
- Collection URL: {upload.uri} + Collection URL: {upload.uri}
Date Uploaded: {upload.date} From 753e8c1ede2e451b20ea2ca7f59ac09b246e90b0 Mon Sep 17 00:00:00 2001 From: Kerem Gurkan Date: Thu, 5 Mar 2026 21:14:19 -0700 Subject: [PATCH 8/9] Minor bug-fix: update used psuedo link --- frontend/src/commands.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/frontend/src/commands.js b/frontend/src/commands.js index 8390267..4f95d79 100644 --- a/frontend/src/commands.js +++ b/frontend/src/commands.js @@ -215,7 +215,6 @@ export default { const expectedEmail = lastUpload.userEmail || null; const collectionDisplayId = lastUpload.uri.split('/').slice(-2, -1)[0] || lastUpload.collectionName; const collectionName = lastUpload.collectionName; - const collectionUri = lastUpload.uri; function getStoredToken() { try { @@ -275,7 +274,7 @@ export default { const newFilePath = `${directory}/uploads/${newFileName}`; - await upload_resource( + const response = await upload_resource( newFilePath, selectedRepo, authToken, @@ -298,7 +297,7 @@ export default { const updateEntry = { collectionName, - uri: collectionUri, + uri: response.sbh_url, file: newFilePath, date: new Date().toLocaleString(undefined, { timeZoneName: 'short' }), selectedRepo, From 9ed3914e814e704d479749d436c97f38e5739e2e Mon Sep 17 00:00:00 2001 From: Kerem Gurkan Date: Mon, 9 Mar 2026 17:17:49 -0600 Subject: [PATCH 9/9] Codex changes --- frontend/src/commands.js | 4 ++-- frontend/src/components/activities/explorer/ImportFile.jsx | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/src/commands.js b/frontend/src/commands.js index 4f95d79..fbef62a 100644 --- a/frontend/src/commands.js +++ b/frontend/src/commands.js @@ -273,9 +273,10 @@ export default { await writable.close(); const newFilePath = `${directory}/uploads/${newFileName}`; + const uploadPath = sameFilename ? `${directory}/uploads/${stagingName}` : newFilePath; const response = await upload_resource( - newFilePath, + uploadPath, selectedRepo, authToken, collectionDisplayId, @@ -289,7 +290,6 @@ export default { const finalWritable = await finalFH.createWritable(); await finalWritable.write(newFile); await finalWritable.close(); - try { await uploadsDir.removeEntry(existingFileName); } catch {} try { await uploadsDir.removeEntry(stagingName); } catch {} } else if (existingFileName) { try { await uploadsDir.removeEntry(existingFileName); } catch {} diff --git a/frontend/src/components/activities/explorer/ImportFile.jsx b/frontend/src/components/activities/explorer/ImportFile.jsx index f00bcb1..369baaf 100644 --- a/frontend/src/components/activities/explorer/ImportFile.jsx +++ b/frontend/src/components/activities/explorer/ImportFile.jsx @@ -45,7 +45,7 @@ export default function ImportFile({ onSelect, text, useSubdirectory = false }) fileobj: file, name: file.name, fileHandle: fileHandle, - directoryHandle: null, + directoryHandle: useSubdirectory ? await dirName.getDirectoryHandle(useSubdirectory, { create: true }) : null, objectType: await classifyFile(fileHandle) }; }