diff --git a/README.md b/README.md index b9183f8..0da6e4a 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,9 @@ npm run dev ### Backend -The backend consists of a primary Express and Socket.io server, and 3 Cloudflare Workers microservices for the D1 database, R2 storage, and Workers AI. The D1 database also contains a [service binding](https://developers.cloudflare.com/workers/runtime-apis/bindings/service-bindings/) to the R2 storage worker. +The backend consists of a primary Express and Socket.io server, and 3 Cloudflare Workers microservices for the D1 database, R2 storage, and Workers AI. The D1 database also contains a [service binding](https://developers.cloudflare.com/workers/runtime-apis/bindings/service-bindings/) to the R2 storage worker. Each open sandbox instantiates a secure Linux sandboxes on E2B, which is used for the terminal and live preview. + +You will need to make an account on [E2B](https://e2b.dev/) to get an API key. #### Socket.io server @@ -181,3 +183,4 @@ It should be in the form `category(scope or module): message` in your commit mes - [Express](https://expressjs.com/) - [Socket.io](https://socket.io/) - [Drizzle ORM](https://orm.drizzle.team/) +- [E2B](https://e2b.dev/) diff --git a/backend/database/src/index.ts b/backend/database/src/index.ts index 0d2721f..4eb9180 100644 --- a/backend/database/src/index.ts +++ b/backend/database/src/index.ts @@ -110,8 +110,13 @@ export default { const body = await request.json() const { type, name, userId, visibility } = initSchema.parse(body) - const allSandboxes = await db.select().from(sandbox).all() - if (allSandboxes.length >= 8) { + const userSandboxes = await db + .select() + .from(sandbox) + .where(eq(sandbox.userId, userId)) + .all() + + if (userSandboxes.length >= 8) { return new Response("You reached the maximum # of sandboxes.", { status: 400, }) diff --git a/backend/server/.env.example b/backend/server/.env.example index 488783c..594efc5 100644 --- a/backend/server/.env.example +++ b/backend/server/.env.example @@ -5,3 +5,4 @@ PORT=4000 WORKERS_KEY= DATABASE_WORKER_URL= STORAGE_WORKER_URL= +E2B_API_KEY= \ No newline at end of file diff --git a/backend/server/package-lock.json b/backend/server/package-lock.json index e0262a2..8c7b2a0 100644 --- a/backend/server/package-lock.json +++ b/backend/server/package-lock.json @@ -12,8 +12,8 @@ "concurrently": "^8.2.2", "cors": "^2.8.5", "dotenv": "^16.4.5", + "e2b": "^0.16.1", "express": "^4.19.2", - "node-pty": "^1.0.0", "rate-limiter-flexible": "^5.0.3", "socket.io": "^4.7.5", "zod": "^3.22.4" @@ -369,6 +369,19 @@ "node": ">=8" } }, + "node_modules/bufferutil": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.8.tgz", + "integrity": "sha512-4T53u4PdgsXqKaIctwF8ifXlRTTmEPJ8iEPWFdGZvcf7sbwYo6FKFEX9eNNAnzFZ7EzJAQ3CJeOtCRA4rDp7Pw==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "node-gyp-build": "^4.3.0" + }, + "engines": { + "node": ">=6.14.2" + } + }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -662,6 +675,59 @@ "url": "https://dotenvx.com" } }, + "node_modules/e2b": { + "version": "0.16.1", + "resolved": "https://registry.npmjs.org/e2b/-/e2b-0.16.1.tgz", + "integrity": "sha512-2L1R/REEB+EezD4Q4MmcXXNATjvCYov2lv/69+PY6V95+wl1PZblIMTYAe7USxX6P6sqANxNs+kXqZr6RvXkSw==", + "dependencies": { + "isomorphic-ws": "^5.0.0", + "normalize-path": "^3.0.0", + "openapi-typescript-fetch": "^1.1.3", + "path-browserify": "^1.0.1", + "platform": "^1.3.6", + "ws": "^8.15.1" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "bufferutil": "^4.0.8", + "utf-8-validate": "^6.0.3" + } + }, + "node_modules/e2b/node_modules/utf-8-validate": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-6.0.4.tgz", + "integrity": "sha512-xu9GQDeFp+eZ6LnCywXN/zBancWvOpUMzgjLPSjy4BRHSmTelvn2E0DG0o1sTiw5hkCKBHo8rwSKncfRfv2EEQ==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "node-gyp-build": "^4.3.0" + }, + "engines": { + "node": ">=6.14.2" + } + }, + "node_modules/e2b/node_modules/ws": { + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.0.tgz", + "integrity": "sha512-uJq6108EgZMAl20KagGkzCKfMEjxmKvZHG7Tlq0Z6nOky7YF7aq4mOx6xK8TJ/i1LeK4Qus7INktacctDgY8Ow==", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -1082,6 +1148,14 @@ "node": ">=0.12.0" } }, + "node_modules/isomorphic-ws": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz", + "integrity": "sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==", + "peerDependencies": { + "ws": "*" + } + }, "node_modules/lodash": { "version": "4.17.21", "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", @@ -1173,11 +1247,6 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, - "node_modules/nan": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz", - "integrity": "sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==" - }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -1186,13 +1255,15 @@ "node": ">= 0.6" } }, - "node_modules/node-pty": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-pty/-/node-pty-1.0.0.tgz", - "integrity": "sha512-wtBMWWS7dFZm/VgqElrTvtfMq4GzJ6+edFI0Y0zyzygUSZMgZdraDUMUhCIvkjhJjme15qWmbyJbtAx4ot4uZA==", - "hasInstallScript": true, - "dependencies": { - "nan": "^2.17.0" + "node_modules/node-gyp-build": { + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.1.tgz", + "integrity": "sha512-OSs33Z9yWr148JZcbZd5WiAXhh/n9z8TxQcdMhIOlpN9AhWpLfvVFO73+m77bBABQMaY9XSvIa+qk0jlI7Gcaw==", + "optional": true, + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" } }, "node_modules/nodemon": { @@ -1265,7 +1336,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, "engines": { "node": ">=0.10.0" } @@ -1297,6 +1367,15 @@ "node": ">= 0.8" } }, + "node_modules/openapi-typescript-fetch": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/openapi-typescript-fetch/-/openapi-typescript-fetch-1.1.3.tgz", + "integrity": "sha512-smLZPck4OkKMNExcw8jMgrMOGgVGx2N/s6DbKL2ftNl77g5HfoGpZGFy79RBzU/EkaO0OZpwBnslfdBfh7ZcWg==", + "engines": { + "node": ">= 12.0.0", + "npm": ">= 7.0.0" + } + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -1305,6 +1384,11 @@ "node": ">= 0.8" } }, + "node_modules/path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==" + }, "node_modules/path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", @@ -1322,6 +1406,11 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/platform": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz", + "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==" + }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -1835,6 +1924,20 @@ "node": ">= 0.8" } }, + "node_modules/utf-8-validate": { + "version": "5.0.10", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.10.tgz", + "integrity": "sha512-Z6czzLq4u8fPOyx7TU6X3dvUZVvoJmxSQ+IcrlmagKhilxlhZgxPK6C5Jqbkw1IDUmFTM+cz9QDnnLTwDz/2gQ==", + "hasInstallScript": true, + "optional": true, + "peer": true, + "dependencies": { + "node-gyp-build": "^4.3.0" + }, + "engines": { + "node": ">=6.14.2" + } + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", diff --git a/backend/server/package.json b/backend/server/package.json index e0ac9c5..3b89d67 100644 --- a/backend/server/package.json +++ b/backend/server/package.json @@ -14,8 +14,8 @@ "concurrently": "^8.2.2", "cors": "^2.8.5", "dotenv": "^16.4.5", + "e2b": "^0.16.1", "express": "^4.19.2", - "node-pty": "^1.0.0", "rate-limiter-flexible": "^5.0.3", "socket.io": "^4.7.5", "zod": "^3.22.4" diff --git a/backend/server/src/fileoperations.ts b/backend/server/src/fileoperations.ts new file mode 100644 index 0000000..141363d --- /dev/null +++ b/backend/server/src/fileoperations.ts @@ -0,0 +1,177 @@ +import * as dotenv from "dotenv"; +import { + R2FileBody, + R2Files, + Sandbox, + TFile, + TFileData, + TFolder, +} from "./types"; + +dotenv.config(); + +export const getSandboxFiles = async (id: string) => { + const res = await fetch( + `${process.env.STORAGE_WORKER_URL}/api?sandboxId=${id}`, + { + headers: { + Authorization: `${process.env.WORKERS_KEY}`, + }, + } + ); + const data: R2Files = await res.json(); + + const paths = data.objects.map((obj) => obj.key); + const processedFiles = await processFiles(paths, id); + return processedFiles; +}; + +export const getFolder = async (folderId: string) => { + const res = await fetch( + `${process.env.STORAGE_WORKER_URL}/api?folderId=${folderId}`, + { + headers: { + Authorization: `${process.env.WORKERS_KEY}`, + }, + } + ); + const data: R2Files = await res.json(); + + return data.objects.map((obj) => obj.key); +}; + +const processFiles = async (paths: string[], id: string) => { + const root: TFolder = { id: "/", type: "folder", name: "/", children: [] }; + const fileData: TFileData[] = []; + + paths.forEach((path) => { + const allParts = path.split("/"); + if (allParts[1] !== id) { + return; + } + + const parts = allParts.slice(2); + let current: TFolder = root; + + for (let i = 0; i < parts.length; i++) { + const part = parts[i]; + const isFile = i === parts.length - 1 && part.includes("."); + const existing = current.children.find((child) => child.name === part); + + if (existing) { + if (!isFile) { + current = existing as TFolder; + } + } else { + if (isFile) { + const file: TFile = { id: path, type: "file", name: part }; + current.children.push(file); + fileData.push({ id: path, data: "" }); + } else { + const folder: TFolder = { + // id: path, // todo: wrong id. for example, folder "src" ID is: projects/a7vgttfqbgy403ratp7du3ln/src/App.css + id: `projects/${id}/${parts.slice(0, i + 1).join("/")}`, + type: "folder", + name: part, + children: [], + }; + current.children.push(folder); + current = folder; + } + } + } + }); + + await Promise.all( + fileData.map(async (file) => { + const data = await fetchFileContent(file.id); + file.data = data; + }) + ); + + return { + files: root.children, + fileData, + }; +}; + +const fetchFileContent = async (fileId: string): Promise => { + try { + const fileRes = await fetch( + `${process.env.STORAGE_WORKER_URL}/api?fileId=${fileId}`, + { + headers: { + Authorization: `${process.env.WORKERS_KEY}`, + }, + } + ); + return await fileRes.text(); + } catch (error) { + console.error("ERROR fetching file:", error); + return ""; + } +}; + +export const createFile = async (fileId: string) => { + const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ fileId }), + }); + return res.ok; +}; + +export const renameFile = async ( + fileId: string, + newFileId: string, + data: string +) => { + const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/rename`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ fileId, newFileId, data }), + }); + return res.ok; +}; + +export const saveFile = async (fileId: string, data: string) => { + const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/save`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ fileId, data }), + }); + return res.ok; +}; + +export const deleteFile = async (fileId: string) => { + const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api`, { + method: "DELETE", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ fileId }), + }); + return res.ok; +}; + +export const getProjectSize = async (id: string) => { + const res = await fetch( + `${process.env.STORAGE_WORKER_URL}/api/size?sandboxId=${id}`, + { + headers: { + Authorization: `${process.env.WORKERS_KEY}`, + }, + } + ); + return (await res.json()).size; +}; \ No newline at end of file diff --git a/backend/server/src/index.ts b/backend/server/src/index.ts index 92c45df..13ce955 100644 --- a/backend/server/src/index.ts +++ b/backend/server/src/index.ts @@ -1,4 +1,3 @@ -import fs from "fs"; import os from "os"; import path from "path"; import cors from "cors"; @@ -17,8 +16,9 @@ import { getSandboxFiles, renameFile, saveFile, -} from "./utils"; -import { IDisposable, IPty, spawn } from "node-pty"; +} from "./fileoperations"; +import { LockManager } from "./utils"; +import { Sandbox, Terminal, FilesystemManager } from "e2b"; import { MAX_BODY_SIZE, createFileRL, @@ -43,11 +43,21 @@ const io = new Server(httpServer, { let inactivityTimeout: NodeJS.Timeout | null = null; let isOwnerConnected = false; -const terminals: { - [id: string]: { terminal: IPty; onData: IDisposable; onExit: IDisposable }; -} = {}; +const containers: Record = {}; +const connections: Record = {}; +const terminals: Record = {}; -const dirName = path.join(__dirname, ".."); +const dirName = "/home/user"; + +const moveFile = async ( + filesystem: FilesystemManager, + filePath: string, + newFilePath: string +) => { + const fileContents = await filesystem.readBytes(filePath); + await filesystem.writeBytes(newFilePath, fileContents); + await filesystem.remove(filePath); +}; io.use(async (socket, next) => { const handshakeSchema = z.object({ @@ -100,364 +110,490 @@ io.use(async (socket, next) => { next(); }); +const lockManager = new LockManager(); + io.on("connection", async (socket) => { - if (inactivityTimeout) clearTimeout(inactivityTimeout); + try { + if (inactivityTimeout) clearTimeout(inactivityTimeout); - const data = socket.data as { - userId: string; - sandboxId: string; - isOwner: boolean; - }; - - if (data.isOwner) { - isOwnerConnected = true; - } else { - if (!isOwnerConnected) { - socket.emit("disableAccess", "The sandbox owner is not connected."); - return; - } - } - - const sandboxFiles = await getSandboxFiles(data.sandboxId); - sandboxFiles.fileData.forEach((file) => { - const filePath = path.join(dirName, file.id); - fs.mkdirSync(path.dirname(filePath), { recursive: true }); - fs.writeFile(filePath, file.data, function (err) { - if (err) throw err; - }); - }); - - socket.emit("loaded", sandboxFiles.files); - - socket.on("getFile", (fileId: string, callback) => { - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - - callback(file.data); - }); - - socket.on("getFolder", async (folderId: string, callback) => { - const files = await getFolder(folderId); - callback(files); - }); - - // todo: send diffs + debounce for efficiency - socket.on("saveFile", async (fileId: string, body: string) => { - try { - await saveFileRL.consume(data.userId, 1); - - if (Buffer.byteLength(body, "utf-8") > MAX_BODY_SIZE) { - socket.emit( - "rateLimit", - "Rate limited: file size too large. Please reduce the file size." - ); - return; - } - - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - file.data = body; - - fs.writeFile(path.join(dirName, file.id), body, function (err) { - if (err) throw err; - }); - await saveFile(fileId, body); - } catch (e) { - io.emit("rateLimit", "Rate limited: file saving. Please slow down."); - } - }); - - socket.on("moveFile", async (fileId: string, folderId: string, callback) => { - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - - const parts = fileId.split("/"); - const newFileId = folderId + "/" + parts.pop(); - - fs.rename( - path.join(dirName, fileId), - path.join(dirName, newFileId), - function (err) { - if (err) throw err; - } - ); - - file.id = newFileId; - - await renameFile(fileId, newFileId, file.data); - const newFiles = await getSandboxFiles(data.sandboxId); - - callback(newFiles.files); - }); - - socket.on("createFile", async (name: string, callback) => { - try { - const size: number = await getProjectSize(data.sandboxId); - // limit is 200mb - if (size > 200 * 1024 * 1024) { - io.emit( - "rateLimit", - "Rate limited: project size exceeded. Please delete some files." - ); - callback({ success: false }); - } - - await createFileRL.consume(data.userId, 1); - - const id = `projects/${data.sandboxId}/${name}`; - - fs.writeFile(path.join(dirName, id), "", function (err) { - if (err) throw err; - }); - - sandboxFiles.files.push({ - id, - name, - type: "file", - }); - - sandboxFiles.fileData.push({ - id, - data: "", - }); - - await createFile(id); - - callback({ success: true }); - } catch (e) { - io.emit("rateLimit", "Rate limited: file creation. Please slow down."); - } - }); - - socket.on("createFolder", async (name: string, callback) => { - try { - await createFolderRL.consume(data.userId, 1); - - const id = `projects/${data.sandboxId}/${name}`; - - fs.mkdir(path.join(dirName, id), { recursive: true }, function (err) { - if (err) throw err; - }); - - callback(); - } catch (e) { - io.emit("rateLimit", "Rate limited: folder creation. Please slow down."); - } - }); - - socket.on("renameFile", async (fileId: string, newName: string) => { - try { - await renameFileRL.consume(data.userId, 1); - - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - file.id = newName; - - const parts = fileId.split("/"); - const newFileId = - parts.slice(0, parts.length - 1).join("/") + "/" + newName; - - fs.rename( - path.join(dirName, fileId), - path.join(dirName, newFileId), - function (err) { - if (err) throw err; - } - ); - await renameFile(fileId, newFileId, file.data); - } catch (e) { - io.emit("rateLimit", "Rate limited: file renaming. Please slow down."); - return; - } - }); - - socket.on("deleteFile", async (fileId: string, callback) => { - try { - await deleteFileRL.consume(data.userId, 1); - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - - fs.unlink(path.join(dirName, fileId), function (err) { - if (err) throw err; - }); - sandboxFiles.fileData = sandboxFiles.fileData.filter( - (f) => f.id !== fileId - ); - - await deleteFile(fileId); - - const newFiles = await getSandboxFiles(data.sandboxId); - callback(newFiles.files); - } catch (e) { - io.emit("rateLimit", "Rate limited: file deletion. Please slow down."); - } - }); - - // todo - // socket.on("renameFolder", async (folderId: string, newName: string) => { - // }); - - socket.on("deleteFolder", async (folderId: string, callback) => { - const files = await getFolder(folderId); - - await Promise.all( - files.map(async (file) => { - fs.unlink(path.join(dirName, file), function (err) { - if (err) throw err; - }); - - sandboxFiles.fileData = sandboxFiles.fileData.filter( - (f) => f.id !== file - ); - - await deleteFile(file); - }) - ); - - const newFiles = await getSandboxFiles(data.sandboxId); - - callback(newFiles.files); - }); - - socket.on("createTerminal", (id: string, callback) => { - if (terminals[id] || Object.keys(terminals).length >= 4) { - return; - } - - const pty = spawn(os.platform() === "win32" ? "cmd.exe" : "bash", [], { - name: "xterm", - cols: 100, - cwd: path.join(dirName, "projects", data.sandboxId), - }); - - const onData = pty.onData((data) => { - io.emit("terminalResponse", { - id, - data, - }); - }); - - const onExit = pty.onExit((code) => console.log("exit :(", code)); - - pty.write("export PS1='\\u > '\r"); - pty.write("clear\r"); - - terminals[id] = { - terminal: pty, - onData, - onExit, + const data = socket.data as { + userId: string; + sandboxId: string; + isOwner: boolean; }; - callback(); - }); - - socket.on("resizeTerminal", (dimensions: { cols: number; rows: number }) => { - Object.values(terminals).forEach((t) => { - t.terminal.resize(dimensions.cols, dimensions.rows); - }); - }); - - socket.on("terminalData", (id: string, data: string) => { - if (!terminals[id]) { - return; - } - - try { - terminals[id].terminal.write(data); - } catch (e) { - console.log("Error writing to terminal", e); - } - }); - - socket.on("closeTerminal", (id: string, callback) => { - if (!terminals[id]) { - return; - } - - terminals[id].onData.dispose(); - terminals[id].onExit.dispose(); - delete terminals[id]; - - callback(); - }); - - socket.on( - "generateCode", - async ( - fileName: string, - code: string, - line: number, - instructions: string, - callback - ) => { - const fetchPromise = fetch( - `${process.env.DATABASE_WORKER_URL}/api/sandbox/generate`, - { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ - userId: data.userId, - }), - } - ); - - // Generate code from cloudflare workers AI - const generateCodePromise = fetch( - `${process.env.AI_WORKER_URL}/api?fileName=${fileName}&code=${code}&line=${line}&instructions=${instructions}`, - { - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.CF_AI_KEY}`, - }, - } - ); - - const [fetchResponse, generateCodeResponse] = await Promise.all([ - fetchPromise, - generateCodePromise, - ]); - - const json = await generateCodeResponse.json(); - - callback({ response: json.response, success: true }); - } - ); - - socket.on("disconnect", async () => { if (data.isOwner) { - Object.entries(terminals).forEach((t) => { - const { terminal, onData, onExit } = t[1]; - onData.dispose(); - onExit.dispose(); - delete terminals[t[0]]; - }); - - socket.broadcast.emit( - "disableAccess", - "The sandbox owner has disconnected." - ); + isOwnerConnected = true; + connections[data.sandboxId] = (connections[data.sandboxId] ?? 0) + 1; + } else { + if (!isOwnerConnected) { + socket.emit("disableAccess", "The sandbox owner is not connected."); + return; + } } - // const sockets = await io.fetchSockets(); - // if (inactivityTimeout) { - // clearTimeout(inactivityTimeout); - // } - // if (sockets.length === 0) { - // console.log("STARTING TIMER"); - // inactivityTimeout = setTimeout(() => { - // io.fetchSockets().then(async (sockets) => { - // if (sockets.length === 0) { - // console.log("Server stopped", res); - // } - // }); - // }, 20000); - // } else { - // console.log("number of sockets", sockets.length); - // } - }); + await lockManager.acquireLock(data.sandboxId, async () => { + try { + if (!containers[data.sandboxId]) { + containers[data.sandboxId] = await Sandbox.create(); + console.log("Created container ", data.sandboxId); + io.emit( + "previewURL", + "https://" + containers[data.sandboxId].getHostname(5173) + ); + } + } catch (e: any) { + console.error(`Error creating container ${data.sandboxId}:`, e); + io.emit("error", `Error: container creation. ${e.message ?? e}`); + } + }); + + // Change the owner of the project directory to user + const fixPermissions = async () => { + await containers[data.sandboxId].process.startAndWait( + `sudo chown -R user "${path.join(dirName, "projects", data.sandboxId)}"` + ); + }; + + const sandboxFiles = await getSandboxFiles(data.sandboxId); + sandboxFiles.fileData.forEach(async (file) => { + const filePath = path.join(dirName, file.id); + await containers[data.sandboxId].filesystem.makeDir( + path.dirname(filePath) + ); + await containers[data.sandboxId].filesystem.write(filePath, file.data); + }); + fixPermissions(); + + socket.emit("loaded", sandboxFiles.files); + + socket.on("getFile", (fileId: string, callback) => { + console.log(fileId); + try { + const file = sandboxFiles.fileData.find((f) => f.id === fileId); + if (!file) return; + + callback(file.data); + } catch (e: any) { + console.error("Error getting file:", e); + io.emit("error", `Error: get file. ${e.message ?? e}`); + } + }); + + socket.on("getFolder", async (folderId: string, callback) => { + try { + const files = await getFolder(folderId); + callback(files); + } catch (e: any) { + console.error("Error getting folder:", e); + io.emit("error", `Error: get folder. ${e.message ?? e}`); + } + }); + + // todo: send diffs + debounce for efficiency + socket.on("saveFile", async (fileId: string, body: string) => { + try { + if (Buffer.byteLength(body, "utf-8") > MAX_BODY_SIZE) { + socket.emit( + "error", + "Error: file size too large. Please reduce the file size." + ); + return; + } + try { + await saveFileRL.consume(data.userId, 1); + await saveFile(fileId, body); + } catch (e) { + io.emit("error", "Rate limited: file saving. Please slow down."); + return; + } + + const file = sandboxFiles.fileData.find((f) => f.id === fileId); + if (!file) return; + file.data = body; + + await containers[data.sandboxId].filesystem.write( + path.join(dirName, file.id), + body + ); + fixPermissions(); + } catch (e: any) { + console.error("Error saving file:", e); + io.emit("error", `Error: file saving. ${e.message ?? e}`); + } + }); + + socket.on( + "moveFile", + async (fileId: string, folderId: string, callback) => { + try { + const file = sandboxFiles.fileData.find((f) => f.id === fileId); + if (!file) return; + + const parts = fileId.split("/"); + const newFileId = folderId + "/" + parts.pop(); + + await moveFile( + containers[data.sandboxId].filesystem, + path.join(dirName, fileId), + path.join(dirName, newFileId) + ); + fixPermissions(); + + file.id = newFileId; + + await renameFile(fileId, newFileId, file.data); + const newFiles = await getSandboxFiles(data.sandboxId); + callback(newFiles.files); + } catch (e: any) { + console.error("Error moving file:", e); + io.emit("error", `Error: file moving. ${e.message ?? e}`); + } + } + ); + + socket.on("createFile", async (name: string, callback) => { + try { + const size: number = await getProjectSize(data.sandboxId); + // limit is 200mb + if (size > 200 * 1024 * 1024) { + io.emit( + "error", + "Rate limited: project size exceeded. Please delete some files." + ); + callback({ success: false }); + return; + } + + try { + await createFileRL.consume(data.userId, 1); + } catch (e) { + io.emit("error", "Rate limited: file creation. Please slow down."); + return; + } + + const id = `projects/${data.sandboxId}/${name}`; + + await containers[data.sandboxId].filesystem.write( + path.join(dirName, id), + "" + ); + fixPermissions(); + + sandboxFiles.files.push({ + id, + name, + type: "file", + }); + + sandboxFiles.fileData.push({ + id, + data: "", + }); + + await createFile(id); + + callback({ success: true }); + } catch (e: any) { + console.error("Error creating file:", e); + io.emit("error", `Error: file creation. ${e.message ?? e}`); + } + }); + + socket.on("createFolder", async (name: string, callback) => { + try { + try { + await createFolderRL.consume(data.userId, 1); + } catch (e) { + io.emit("error", "Rate limited: folder creation. Please slow down."); + return; + } + + const id = `projects/${data.sandboxId}/${name}`; + + await containers[data.sandboxId].filesystem.makeDir( + path.join(dirName, id) + ); + + callback(); + } catch (e: any) { + console.error("Error creating folder:", e); + io.emit("error", `Error: folder creation. ${e.message ?? e}`); + } + }); + + socket.on("renameFile", async (fileId: string, newName: string) => { + try { + try { + await renameFileRL.consume(data.userId, 1); + } catch (e) { + io.emit("error", "Rate limited: file renaming. Please slow down."); + return; + } + + const file = sandboxFiles.fileData.find((f) => f.id === fileId); + if (!file) return; + file.id = newName; + + const parts = fileId.split("/"); + const newFileId = + parts.slice(0, parts.length - 1).join("/") + "/" + newName; + + await moveFile( + containers[data.sandboxId].filesystem, + path.join(dirName, fileId), + path.join(dirName, newFileId) + ); + fixPermissions(); + await renameFile(fileId, newFileId, file.data); + } catch (e: any) { + console.error("Error renaming folder:", e); + io.emit("error", `Error: folder renaming. ${e.message ?? e}`); + } + }); + + socket.on("deleteFile", async (fileId: string, callback) => { + try { + try { + await deleteFileRL.consume(data.userId, 1); + } catch (e) { + io.emit("error", "Rate limited: file deletion. Please slow down."); + } + + const file = sandboxFiles.fileData.find((f) => f.id === fileId); + if (!file) return; + + await containers[data.sandboxId].filesystem.remove( + path.join(dirName, fileId) + ); + sandboxFiles.fileData = sandboxFiles.fileData.filter( + (f) => f.id !== fileId + ); + + await deleteFile(fileId); + + const newFiles = await getSandboxFiles(data.sandboxId); + callback(newFiles.files); + } catch (e: any) { + console.error("Error deleting file:", e); + io.emit("error", `Error: file deletion. ${e.message ?? e}`); + } + }); + + // todo + // socket.on("renameFolder", async (folderId: string, newName: string) => { + // }); + + socket.on("deleteFolder", async (folderId: string, callback) => { + try { + const files = await getFolder(folderId); + + await Promise.all( + files.map(async (file) => { + await containers[data.sandboxId].filesystem.remove( + path.join(dirName, file) + ); + + sandboxFiles.fileData = sandboxFiles.fileData.filter( + (f) => f.id !== file + ); + + await deleteFile(file); + }) + ); + + const newFiles = await getSandboxFiles(data.sandboxId); + + callback(newFiles.files); + } catch (e: any) { + console.error("Error deleting folder:", e); + io.emit("error", `Error: folder deletion. ${e.message ?? e}`); + } + }); + + socket.on("createTerminal", async (id: string, callback) => { + try { + if (terminals[id] || Object.keys(terminals).length >= 4) { + return; + } + + await lockManager.acquireLock(data.sandboxId, async () => { + try { + terminals[id] = await containers[data.sandboxId].terminal.start({ + onData: (data: string) => { + io.emit("terminalResponse", { id, data }); + }, + size: { cols: 80, rows: 20 }, + onExit: () => console.log("Terminal exited", id), + }); + await terminals[id].sendData( + `cd "${path.join(dirName, "projects", data.sandboxId)}"\r` + ); + await terminals[id].sendData("export PS1='user> '\rclear\r"); + console.log("Created terminal", id); + } catch (e: any) { + console.error(`Error creating terminal ${id}:`, e); + io.emit("error", `Error: terminal creation. ${e.message ?? e}`); + } + }); + + callback(); + } catch (e: any) { + console.error(`Error creating terminal ${id}:`, e); + io.emit("error", `Error: terminal creation. ${e.message ?? e}`); + } + }); + + socket.on( + "resizeTerminal", + (dimensions: { cols: number; rows: number }) => { + try { + Object.values(terminals).forEach((t) => { + t.resize(dimensions); + }); + } catch (e: any) { + console.error("Error resizing terminal:", e); + io.emit("error", `Error: terminal resizing. ${e.message ?? e}`); + } + } + ); + + socket.on("terminalData", (id: string, data: string) => { + try { + if (!terminals[id]) { + return; + } + + terminals[id].sendData(data); + } catch (e: any) { + console.error("Error writing to terminal:", e); + io.emit("error", `Error: writing to terminal. ${e.message ?? e}`); + } + }); + + socket.on("closeTerminal", async (id: string, callback) => { + try { + if (!terminals[id]) { + return; + } + + await terminals[id].kill(); + delete terminals[id]; + + callback(); + } catch (e: any) { + console.error("Error closing terminal:", e); + io.emit("error", `Error: closing terminal. ${e.message ?? e}`); + } + }); + + socket.on( + "generateCode", + async ( + fileName: string, + code: string, + line: number, + instructions: string, + callback + ) => { + try { + const fetchPromise = fetch( + `${process.env.DATABASE_WORKER_URL}/api/sandbox/generate`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ + userId: data.userId, + }), + } + ); + + // Generate code from cloudflare workers AI + const generateCodePromise = fetch( + `${process.env.AI_WORKER_URL}/api?fileName=${fileName}&code=${code}&line=${line}&instructions=${instructions}`, + { + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.CF_AI_KEY}`, + }, + } + ); + + const [fetchResponse, generateCodeResponse] = await Promise.all([ + fetchPromise, + generateCodePromise, + ]); + + const json = await generateCodeResponse.json(); + + callback({ response: json.response, success: true }); + } catch (e: any) { + console.error("Error generating code:", e); + io.emit("error", `Error: code generation. ${e.message ?? e}`); + } + } + ); + + socket.on("disconnect", async () => { + try { + if (data.isOwner) { + connections[data.sandboxId]--; + } + + if (data.isOwner && connections[data.sandboxId] <= 0) { + await Promise.all( + Object.entries(terminals).map(async ([key, terminal]) => { + await terminal.kill(); + delete terminals[key]; + }) + ); + + await lockManager.acquireLock(data.sandboxId, async () => { + try { + if (containers[data.sandboxId]) { + await containers[data.sandboxId].close(); + delete containers[data.sandboxId]; + console.log("Closed container", data.sandboxId); + } + } catch (error) { + console.error("Error closing container ", data.sandboxId, error); + } + }); + + socket.broadcast.emit( + "disableAccess", + "The sandbox owner has disconnected." + ); + } + + // const sockets = await io.fetchSockets(); + // if (inactivityTimeout) { + // clearTimeout(inactivityTimeout); + // } + // if (sockets.length === 0) { + // console.log("STARTING TIMER"); + // inactivityTimeout = setTimeout(() => { + // io.fetchSockets().then(async (sockets) => { + // if (sockets.length === 0) { + // console.log("Server stopped", res); + // } + // }); + // }, 20000); + // } else { + // console.log("number of sockets", sockets.length); + // } + } catch (e: any) { + console.log("Error disconnecting:", e); + io.emit("error", `Error: disconnecting. ${e.message ?? e}`); + } + }); + } catch (e: any) { + console.error("Error connecting:", e); + io.emit("error", `Error: connection. ${e.message ?? e}`); + } }); httpServer.listen(port, () => { diff --git a/backend/server/src/utils.ts b/backend/server/src/utils.ts index 51e28f9..0aebb03 100644 --- a/backend/server/src/utils.ts +++ b/backend/server/src/utils.ts @@ -1,177 +1,23 @@ -import * as dotenv from "dotenv"; -import { - R2FileBody, - R2Files, - Sandbox, - TFile, - TFileData, - TFolder, -} from "./types"; +export class LockManager { + private locks: { [key: string]: Promise }; -dotenv.config(); - -export const getSandboxFiles = async (id: string) => { - const res = await fetch( - `${process.env.STORAGE_WORKER_URL}/api?sandboxId=${id}`, - { - headers: { - Authorization: `${process.env.WORKERS_KEY}`, - }, - } - ); - const data: R2Files = await res.json(); - - const paths = data.objects.map((obj) => obj.key); - const processedFiles = await processFiles(paths, id); - return processedFiles; -}; - -export const getFolder = async (folderId: string) => { - const res = await fetch( - `${process.env.STORAGE_WORKER_URL}/api?folderId=${folderId}`, - { - headers: { - Authorization: `${process.env.WORKERS_KEY}`, - }, - } - ); - const data: R2Files = await res.json(); - - return data.objects.map((obj) => obj.key); -}; - -const processFiles = async (paths: string[], id: string) => { - const root: TFolder = { id: "/", type: "folder", name: "/", children: [] }; - const fileData: TFileData[] = []; - - paths.forEach((path) => { - const allParts = path.split("/"); - if (allParts[1] !== id) { - return; - } - - const parts = allParts.slice(2); - let current: TFolder = root; - - for (let i = 0; i < parts.length; i++) { - const part = parts[i]; - const isFile = i === parts.length - 1 && part.includes("."); - const existing = current.children.find((child) => child.name === part); - - if (existing) { - if (!isFile) { - current = existing as TFolder; - } - } else { - if (isFile) { - const file: TFile = { id: path, type: "file", name: part }; - current.children.push(file); - fileData.push({ id: path, data: "" }); - } else { - const folder: TFolder = { - // id: path, // todo: wrong id. for example, folder "src" ID is: projects/a7vgttfqbgy403ratp7du3ln/src/App.css - id: `projects/${id}/${parts.slice(0, i + 1).join("/")}`, - type: "folder", - name: part, - children: [], - }; - current.children.push(folder); - current = folder; - } - } - } - }); - - await Promise.all( - fileData.map(async (file) => { - const data = await fetchFileContent(file.id); - file.data = data; - }) - ); - - return { - files: root.children, - fileData, - }; -}; - -const fetchFileContent = async (fileId: string): Promise => { - try { - const fileRes = await fetch( - `${process.env.STORAGE_WORKER_URL}/api?fileId=${fileId}`, - { - headers: { - Authorization: `${process.env.WORKERS_KEY}`, - }, - } - ); - return await fileRes.text(); - } catch (error) { - console.error("ERROR fetching file:", error); - return ""; + constructor() { + this.locks = {}; } -}; -export const createFile = async (fileId: string) => { - const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api`, { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ fileId }), - }); - return res.ok; -}; - -export const renameFile = async ( - fileId: string, - newFileId: string, - data: string -) => { - const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/rename`, { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ fileId, newFileId, data }), - }); - return res.ok; -}; - -export const saveFile = async (fileId: string, data: string) => { - const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/save`, { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ fileId, data }), - }); - return res.ok; -}; - -export const deleteFile = async (fileId: string) => { - const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api`, { - method: "DELETE", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ fileId }), - }); - return res.ok; -}; - -export const getProjectSize = async (id: string) => { - const res = await fetch( - `${process.env.STORAGE_WORKER_URL}/api/size?sandboxId=${id}`, - { - headers: { - Authorization: `${process.env.WORKERS_KEY}`, - }, + async acquireLock(key: string, task: () => Promise): Promise { + if (!this.locks[key]) { + this.locks[key] = new Promise(async (resolve, reject) => { + try { + const result = await task(); + resolve(result); + } catch (error) { + reject(error); + } finally { + delete this.locks[key]; + } + }); } - ); - return (await res.json()).size; -}; + return await this.locks[key]; + } +} \ No newline at end of file diff --git a/frontend/components/editor/index.tsx b/frontend/components/editor/index.tsx index e3f3030..05699f1 100644 --- a/frontend/components/editor/index.tsx +++ b/frontend/components/editor/index.tsx @@ -1,9 +1,9 @@ "use client" -import { useEffect, useRef, useState } from "react" +import { SetStateAction, useCallback, useEffect, useRef, useState } from "react" import monaco from "monaco-editor" import Editor, { BeforeMount, OnMount } from "@monaco-editor/react" -import { io } from "socket.io-client" +import { Socket, io } from "socket.io-client" import { toast } from "sonner" import { useClerk } from "@clerk/nextjs" @@ -23,7 +23,7 @@ import Tab from "../ui/tab" import Sidebar from "./sidebar" import GenerateInput from "./generate" import { Sandbox, User, TFile, TFolder, TTab } from "@/lib/types" -import { addNew, processFileType, validateName } from "@/lib/utils" +import { addNew, processFileType, validateName, debounce } from "@/lib/utils" import { Cursors } from "./live/cursors" import { Terminal } from "@xterm/xterm" import DisableAccessModal from "./live/disableModal" @@ -41,12 +41,16 @@ export default function CodeEditor({ sandboxData: Sandbox reactDefinitionFile: string }) { - const socket = io( - `http://localhost:${process.env.NEXT_PUBLIC_SERVER_PORT}?userId=${userData.id}&sandboxId=${sandboxData.id}`, - { - timeout: 2000, - } - ) + const socketRef = useRef(null); + + // Initialize socket connection if it doesn't exist + if (!socketRef.current) { + socketRef.current = io( + `${window.location.protocol}//${window.location.hostname}:${process.env.NEXT_PUBLIC_SERVER_PORT}?userId=${userData.id}&sandboxId=${sandboxData.id}`, + { + timeout: 2000, + } + );} const [isPreviewCollapsed, setIsPreviewCollapsed] = useState(true) const [disableAccess, setDisableAccess] = useState({ @@ -90,6 +94,9 @@ export default function CodeEditor({ }[] >([]) + // Preview state + const [previewURL, setPreviewURL] = useState(""); + const isOwner = sandboxData.userId === userData.id const clerk = useClerk() @@ -290,26 +297,33 @@ export default function CodeEditor({ }, [decorations.options]) // Save file keybinding logic effect + const debouncedSaveData = useCallback( + debounce((value: string | undefined, activeFileId: string | undefined) => { + setTabs((prev) => + prev.map((tab) => + tab.id === activeFileId ? { ...tab, saved: true } : tab + ) + ); + console.log(`Saving file...${activeFileId}`); + console.log(`Saving file...${value}`); + socketRef.current?.emit("saveFile", activeFileId, value); + }, Number(process.env.FILE_SAVE_DEBOUNCE_DELAY)||1000), + [socketRef] + ); + useEffect(() => { const down = (e: KeyboardEvent) => { if (e.key === "s" && (e.metaKey || e.ctrlKey)) { - e.preventDefault() - - setTabs((prev) => - prev.map((tab) => - tab.id === activeFileId ? { ...tab, saved: true } : tab - ) - ) - - socket.emit("saveFile", activeFileId, editorRef?.getValue()) + e.preventDefault(); + debouncedSaveData(editorRef?.getValue(), activeFileId); } - } - document.addEventListener("keydown", down) + }; + document.addEventListener("keydown", down); return () => { - document.removeEventListener("keydown", down) - } - }, [tabs, activeFileId]) + document.removeEventListener("keydown", down); + }; + }, [activeFileId, tabs, debouncedSaveData]); // Liveblocks live collaboration setup effect useEffect(() => { @@ -358,10 +372,10 @@ export default function CodeEditor({ // Connection/disconnection effect useEffect(() => { - socket.connect() - + socketRef.current?.connect() + return () => { - socket.disconnect() + socketRef.current?.disconnect() } }, []) @@ -377,7 +391,7 @@ export default function CodeEditor({ setFiles(files) } - const onRateLimit = (message: string) => { + const onError = (message: string) => { toast.error(message) } @@ -396,20 +410,22 @@ export default function CodeEditor({ }) } - socket.on("connect", onConnect) - socket.on("disconnect", onDisconnect) - socket.on("loaded", onLoadedEvent) - socket.on("rateLimit", onRateLimit) - socket.on("terminalResponse", onTerminalResponse) - socket.on("disableAccess", onDisableAccess) + socketRef.current?.on("connect", onConnect) + socketRef.current?.on("disconnect", onDisconnect) + socketRef.current?.on("loaded", onLoadedEvent) + socketRef.current?.on("error", onError) + socketRef.current?.on("terminalResponse", onTerminalResponse) + socketRef.current?.on("disableAccess", onDisableAccess) + socketRef.current?.on("previewURL", setPreviewURL) return () => { - socket.off("connect", onConnect) - socket.off("disconnect", onDisconnect) - socket.off("loaded", onLoadedEvent) - socket.off("rateLimit", onRateLimit) - socket.off("terminalResponse", onTerminalResponse) - socket.off("disableAccess", onDisableAccess) + socketRef.current?.off("connect", onConnect) + socketRef.current?.off("disconnect", onDisconnect) + socketRef.current?.off("loaded", onLoadedEvent) + socketRef.current?.off("error", onError) + socketRef.current?.off("terminalResponse", onTerminalResponse) + socketRef.current?.off("disableAccess", onDisableAccess) + socketRef.current?.off("previewURL", setPreviewURL) } // }, []); }, [terminals]) @@ -417,31 +433,44 @@ export default function CodeEditor({ // Helper functions for tabs: // Select file and load content - const selectFile = (tab: TTab) => { - if (tab.id === activeFileId) return - setGenerate((prev) => { - return { - ...prev, - show: false, - } - }) - const exists = tabs.find((t) => t.id === tab.id) + // Initialize debounced function once + const fileCache = useRef(new Map()); + // Debounced function to get file content + const debouncedGetFile = useCallback( + debounce((tabId, callback) => { + socketRef.current?.emit('getFile', tabId, callback); + }, 300), // 300ms debounce delay, adjust as needed + [] + ); + + const selectFile = useCallback((tab: TTab) => { + if (tab.id === activeFileId) return; + + setGenerate((prev) => ({ ...prev, show: false })); + + const exists = tabs.find((t) => t.id === tab.id); setTabs((prev) => { if (exists) { - setActiveFileId(exists.id) - return prev + setActiveFileId(exists.id); + return prev; } - return [...prev, tab] - }) + return [...prev, tab]; + }); - socket.emit("getFile", tab.id, (response: string) => { - setActiveFileContent(response) - }) - setEditorLanguage(processFileType(tab.name)) - setActiveFileId(tab.id) - } + if (fileCache.current.has(tab.id)) { + setActiveFileContent(fileCache.current.get(tab.id)); + } else { + debouncedGetFile(tab.id, (response: SetStateAction) => { + fileCache.current.set(tab.id, response); + setActiveFileContent(response); + }); + } + + setEditorLanguage(processFileType(tab.name)); + setActiveFileId(tab.id); + }, [activeFileId, tabs, debouncedGetFile]); // Close tab and remove from tabs const closeTab = (id: string) => { @@ -515,7 +544,7 @@ export default function CodeEditor({ return false } - socket.emit("renameFile", id, newName) + socketRef.current?.emit("renameFile", id, newName) setTabs((prev) => prev.map((tab) => (tab.id === id ? { ...tab, name: newName } : tab)) ) @@ -524,7 +553,7 @@ export default function CodeEditor({ } const handleDeleteFile = (file: TFile) => { - socket.emit("deleteFile", file.id, (response: (TFolder | TFile)[]) => { + socketRef.current?.emit("deleteFile", file.id, (response: (TFolder | TFile)[]) => { setFiles(response) }) closeTab(file.id) @@ -534,11 +563,11 @@ export default function CodeEditor({ setDeletingFolderId(folder.id) console.log("deleting folder", folder.id) - socket.emit("getFolder", folder.id, (response: string[]) => + socketRef.current?.emit("getFolder", folder.id, (response: string[]) => closeTabs(response) ) - socket.emit("deleteFolder", folder.id, (response: (TFolder | TFile)[]) => { + socketRef.current?.emit("deleteFolder", folder.id, (response: (TFolder | TFile)[]) => { setFiles(response) setDeletingFolderId("") }) @@ -565,7 +594,7 @@ export default function CodeEditor({ {generate.show && ai ? ( t.id === activeFileId)?.name ?? "", @@ -625,7 +654,7 @@ export default function CodeEditor({ handleRename={handleRename} handleDeleteFile={handleDeleteFile} handleDeleteFolder={handleDeleteFolder} - socket={socket} + socket={socketRef.current} setFiles={setFiles} addNew={(name, type) => addNew(name, type, setFiles, sandboxData)} deletingFolderId={deletingFolderId} @@ -745,6 +774,7 @@ export default function CodeEditor({ previewPanelRef.current?.expand() setIsPreviewCollapsed(false) }} + src={previewURL} /> @@ -757,7 +787,7 @@ export default function CodeEditor({ ) : (
@@ -772,3 +802,4 @@ export default function CodeEditor({ ) } + diff --git a/frontend/components/editor/preview/index.tsx b/frontend/components/editor/preview/index.tsx index 0544a12..a400d14 100644 --- a/frontend/components/editor/preview/index.tsx +++ b/frontend/components/editor/preview/index.tsx @@ -15,9 +15,11 @@ import { toast } from "sonner" export default function PreviewWindow({ collapsed, open, + src }: { collapsed: boolean open: () => void + src: string }) { const ref = useRef(null) const [iframeKey, setIframeKey] = useState(0) @@ -45,7 +47,7 @@ export default function PreviewWindow({ { - navigator.clipboard.writeText(`http://localhost:5173`) + navigator.clipboard.writeText(src) toast.info("Copied preview link to clipboard") }} > @@ -73,7 +75,7 @@ export default function PreviewWindow({ ref={ref} width={"100%"} height={"100%"} - src={`http://localhost:5173`} + src={src} />
)} diff --git a/frontend/lib/utils.ts b/frontend/lib/utils.ts index c52d06b..85cc434 100644 --- a/frontend/lib/utils.ts +++ b/frontend/lib/utils.ts @@ -61,3 +61,13 @@ export function addNew( ]) } } + +export function debounce void>(func: T, wait: number): T { + let timeout: NodeJS.Timeout | null = null; + return function (...args: Parameters) { + if (timeout) { + clearTimeout(timeout); + } + timeout = setTimeout(() => func(...args), wait); + } as T; +} \ No newline at end of file