Compare commits

..

6 Commits

Author SHA1 Message Date
08d562ee54 chore: remove unused variable reactDefinitionFile 2024-07-17 10:49:58 -04:00
db1410f587 fix: remove editorRef from useEffect 2024-07-17 10:46:34 -04:00
7a80734c25 fix: remove extra state variables from useEffect 2024-07-17 10:46:29 -04:00
0a21cb2637 fix: store rooms in map 2024-07-17 10:46:21 -04:00
2fbabbd403 fix: handle file save bug (#36) 2024-06-27 23:43:18 -07:00
9f0b6a8fdc Implement secure cloud sandboxes with E2B (#35)
* chore: rename utils.ts to fileoperations.ts

* feat: replace node-pty with E2B sandboxes

* added debounced function in the editor

* fix: move socket connection to useRef

* fix: wait until terminals are killed to close the container

* fix: ensure container remains open until all owner connections are closed

* fix: sync files to container instead of local file system

* fix: set project file permissions so that they belong to the terminal user

* fix: use the container URL for the preview panel

* fix: count only the current user's sandboxes towards the limit

* fix: remove hardcoded reference to localhost

* fix: add error handling to the backend

* docs: add information about E2B

---------

Co-authored-by: Akhilesh Rangani <akhileshrangani4@gmail.com>
2024-06-27 23:39:03 -07:00
7 changed files with 557 additions and 410 deletions

View File

@ -29,7 +29,9 @@ npm run dev
### Backend
The backend consists of a primary Express and Socket.io server, and 3 Cloudflare Workers microservices for the D1 database, R2 storage, and Workers AI. The D1 database also contains a [service binding](https://developers.cloudflare.com/workers/runtime-apis/bindings/service-bindings/) to the R2 storage worker.
The backend consists of a primary Express and Socket.io server, and 3 Cloudflare Workers microservices for the D1 database, R2 storage, and Workers AI. The D1 database also contains a [service binding](https://developers.cloudflare.com/workers/runtime-apis/bindings/service-bindings/) to the R2 storage worker. Each open sandbox instantiates a secure Linux sandboxes on E2B, which is used for the terminal and live preview.
You will need to make an account on [E2B](https://e2b.dev/) to get an API key.
#### Socket.io server
@ -181,3 +183,4 @@ It should be in the form `category(scope or module): message` in your commit mes
- [Express](https://expressjs.com/)
- [Socket.io](https://socket.io/)
- [Drizzle ORM](https://orm.drizzle.team/)
- [E2B](https://e2b.dev/)

View File

@ -114,12 +114,12 @@ export default {
.select()
.from(sandbox)
.where(eq(sandbox.userId, userId))
.all();
.all()
if (userSandboxes.length >= 8) {
return new Response("You reached the maximum # of sandboxes.", {
status: 400,
});
})
}
const sb = await db

View File

@ -31,7 +31,7 @@ export const sandbox = sqliteTable("sandbox", {
createdAt: integer("createdAt", { mode: "timestamp_ms" }),
userId: text("user_id")
.notNull()
.references(() => user.id, { onDelete: "cascade" }),
.references(() => user.id),
});
export type Sandbox = typeof sandbox.$inferSelect;

View File

@ -5,3 +5,4 @@ PORT=4000
WORKERS_KEY=
DATABASE_WORKER_URL=
STORAGE_WORKER_URL=
E2B_API_KEY=

View File

@ -49,11 +49,15 @@ const terminals: Record<string, Terminal> = {};
const dirName = "/home/user";
const moveFile = async (filesystem: FilesystemManager, filePath: string, newFilePath: string) => {
const fileContents = await filesystem.readBytes(filePath)
const moveFile = async (
filesystem: FilesystemManager,
filePath: string,
newFilePath: string
) => {
const fileContents = await filesystem.readBytes(filePath);
await filesystem.writeBytes(newFilePath, fileContents);
await filesystem.remove(filePath);
}
};
io.use(async (socket, next) => {
const handshakeSchema = z.object({
@ -109,6 +113,7 @@ io.use(async (socket, next) => {
const lockManager = new LockManager();
io.on("connection", async (socket) => {
try {
if (inactivityTimeout) clearTimeout(inactivityTimeout);
const data = socket.data as {
@ -132,10 +137,14 @@ io.on("connection", async (socket) => {
if (!containers[data.sandboxId]) {
containers[data.sandboxId] = await Sandbox.create();
console.log("Created container ", data.sandboxId);
io.emit("previewURL", "https://" + containers[data.sandboxId].getHostname(5173));
io.emit(
"previewURL",
"https://" + containers[data.sandboxId].getHostname(5173)
);
}
} catch (error) {
console.error("Error creating container ", data.sandboxId, error);
} catch (e: any) {
console.error(`Error creating container ${data.sandboxId}:`, e);
io.emit("error", `Error: container creation. ${e.message ?? e}`);
}
});
@ -144,12 +153,14 @@ io.on("connection", async (socket) => {
await containers[data.sandboxId].process.startAndWait(
`sudo chown -R user "${path.join(dirName, "projects", data.sandboxId)}"`
);
}
};
const sandboxFiles = await getSandboxFiles(data.sandboxId);
sandboxFiles.fileData.forEach(async (file) => {
const filePath = path.join(dirName, file.id);
await containers[data.sandboxId].filesystem.makeDir(path.dirname(filePath));
await containers[data.sandboxId].filesystem.makeDir(
path.dirname(filePath)
);
await containers[data.sandboxId].filesystem.write(filePath, file.data);
});
fixPermissions();
@ -157,43 +168,67 @@ io.on("connection", async (socket) => {
socket.emit("loaded", sandboxFiles.files);
socket.on("getFile", (fileId: string, callback) => {
console.log(fileId);
try {
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
if (!file) return;
callback(file.data);
} catch (e: any) {
console.error("Error getting file:", e);
io.emit("error", `Error: get file. ${e.message ?? e}`);
}
});
socket.on("getFolder", async (folderId: string, callback) => {
try {
const files = await getFolder(folderId);
callback(files);
} catch (e: any) {
console.error("Error getting folder:", e);
io.emit("error", `Error: get folder. ${e.message ?? e}`);
}
});
// todo: send diffs + debounce for efficiency
socket.on("saveFile", async (fileId: string, body: string) => {
try {
await saveFileRL.consume(data.userId, 1);
if (!fileId) return; // handles saving when no file is open
try {
if (Buffer.byteLength(body, "utf-8") > MAX_BODY_SIZE) {
socket.emit(
"rateLimit",
"Rate limited: file size too large. Please reduce the file size."
"error",
"Error: file size too large. Please reduce the file size."
);
return;
}
try {
await saveFileRL.consume(data.userId, 1);
await saveFile(fileId, body);
} catch (e) {
io.emit("error", "Rate limited: file saving. Please slow down.");
return;
}
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
if (!file) return;
file.data = body;
await containers[data.sandboxId].filesystem.write(path.join(dirName, file.id), body);
await containers[data.sandboxId].filesystem.write(
path.join(dirName, file.id),
body
);
fixPermissions();
await saveFile(fileId, body);
} catch (e) {
io.emit("rateLimit", "Rate limited: file saving. Please slow down.");
} catch (e: any) {
console.error("Error saving file:", e);
io.emit("error", `Error: file saving. ${e.message ?? e}`);
}
});
socket.on("moveFile", async (fileId: string, folderId: string, callback) => {
socket.on(
"moveFile",
async (fileId: string, folderId: string, callback) => {
try {
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
if (!file) return;
@ -204,16 +239,20 @@ io.on("connection", async (socket) => {
containers[data.sandboxId].filesystem,
path.join(dirName, fileId),
path.join(dirName, newFileId)
)
);
fixPermissions();
file.id = newFileId;
await renameFile(fileId, newFileId, file.data);
const newFiles = await getSandboxFiles(data.sandboxId);
callback(newFiles.files);
});
} catch (e: any) {
console.error("Error moving file:", e);
io.emit("error", `Error: file moving. ${e.message ?? e}`);
}
}
);
socket.on("createFile", async (name: string, callback) => {
try {
@ -221,17 +260,26 @@ io.on("connection", async (socket) => {
// limit is 200mb
if (size > 200 * 1024 * 1024) {
io.emit(
"rateLimit",
"error",
"Rate limited: project size exceeded. Please delete some files."
);
callback({ success: false });
return;
}
try {
await createFileRL.consume(data.userId, 1);
} catch (e) {
io.emit("error", "Rate limited: file creation. Please slow down.");
return;
}
const id = `projects/${data.sandboxId}/${name}`;
await containers[data.sandboxId].filesystem.write(path.join(dirName, id), "");
await containers[data.sandboxId].filesystem.write(
path.join(dirName, id),
""
);
fixPermissions();
sandboxFiles.files.push({
@ -248,28 +296,42 @@ io.on("connection", async (socket) => {
await createFile(id);
callback({ success: true });
} catch (e) {
io.emit("rateLimit", "Rate limited: file creation. Please slow down.");
} catch (e: any) {
console.error("Error creating file:", e);
io.emit("error", `Error: file creation. ${e.message ?? e}`);
}
});
socket.on("createFolder", async (name: string, callback) => {
try {
try {
await createFolderRL.consume(data.userId, 1);
} catch (e) {
io.emit("error", "Rate limited: folder creation. Please slow down.");
return;
}
const id = `projects/${data.sandboxId}/${name}`;
await containers[data.sandboxId].filesystem.makeDir(path.join(dirName, id));
await containers[data.sandboxId].filesystem.makeDir(
path.join(dirName, id)
);
callback();
} catch (e) {
io.emit("rateLimit", "Rate limited: folder creation. Please slow down.");
} catch (e: any) {
console.error("Error creating folder:", e);
io.emit("error", `Error: folder creation. ${e.message ?? e}`);
}
});
socket.on("renameFile", async (fileId: string, newName: string) => {
try {
try {
await renameFileRL.consume(data.userId, 1);
} catch (e) {
io.emit("error", "Rate limited: file renaming. Please slow down.");
return;
}
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
if (!file) return;
@ -279,27 +341,33 @@ io.on("connection", async (socket) => {
const newFileId =
parts.slice(0, parts.length - 1).join("/") + "/" + newName;
await moveFile(
containers[data.sandboxId].filesystem,
path.join(dirName, fileId),
path.join(dirName, newFileId)
)
);
fixPermissions();
await renameFile(fileId, newFileId, file.data);
} catch (e) {
io.emit("rateLimit", "Rate limited: file renaming. Please slow down.");
return;
} catch (e: any) {
console.error("Error renaming folder:", e);
io.emit("error", `Error: folder renaming. ${e.message ?? e}`);
}
});
socket.on("deleteFile", async (fileId: string, callback) => {
try {
try {
await deleteFileRL.consume(data.userId, 1);
} catch (e) {
io.emit("error", "Rate limited: file deletion. Please slow down.");
}
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
if (!file) return;
await containers[data.sandboxId].filesystem.remove(path.join(dirName, fileId));
await containers[data.sandboxId].filesystem.remove(
path.join(dirName, fileId)
);
sandboxFiles.fileData = sandboxFiles.fileData.filter(
(f) => f.id !== fileId
);
@ -308,8 +376,9 @@ io.on("connection", async (socket) => {
const newFiles = await getSandboxFiles(data.sandboxId);
callback(newFiles.files);
} catch (e) {
io.emit("rateLimit", "Rate limited: file deletion. Please slow down.");
} catch (e: any) {
console.error("Error deleting file:", e);
io.emit("error", `Error: file deletion. ${e.message ?? e}`);
}
});
@ -318,11 +387,14 @@ io.on("connection", async (socket) => {
// });
socket.on("deleteFolder", async (folderId: string, callback) => {
try {
const files = await getFolder(folderId);
await Promise.all(
files.map(async (file) => {
await containers[data.sandboxId].filesystem.remove(path.join(dirName, file));
await containers[data.sandboxId].filesystem.remove(
path.join(dirName, file)
);
sandboxFiles.fileData = sandboxFiles.fileData.filter(
(f) => f.id !== file
@ -335,9 +407,14 @@ io.on("connection", async (socket) => {
const newFiles = await getSandboxFiles(data.sandboxId);
callback(newFiles.files);
} catch (e: any) {
console.error("Error deleting folder:", e);
io.emit("error", `Error: folder deletion. ${e.message ?? e}`);
}
});
socket.on("createTerminal", async (id: string, callback) => {
try {
if (terminals[id] || Object.keys(terminals).length >= 4) {
return;
}
@ -351,36 +428,53 @@ io.on("connection", async (socket) => {
size: { cols: 80, rows: 20 },
onExit: () => console.log("Terminal exited", id),
});
await terminals[id].sendData(`cd "${path.join(dirName, "projects", data.sandboxId)}"\r`)
await terminals[id].sendData(
`cd "${path.join(dirName, "projects", data.sandboxId)}"\r`
);
await terminals[id].sendData("export PS1='user> '\rclear\r");
console.log("Created terminal", id);
} catch (error) {
console.error("Error creating terminal ", id, error);
} catch (e: any) {
console.error(`Error creating terminal ${id}:`, e);
io.emit("error", `Error: terminal creation. ${e.message ?? e}`);
}
});
callback();
} catch (e: any) {
console.error(`Error creating terminal ${id}:`, e);
io.emit("error", `Error: terminal creation. ${e.message ?? e}`);
}
});
socket.on("resizeTerminal", (dimensions: { cols: number; rows: number }) => {
socket.on(
"resizeTerminal",
(dimensions: { cols: number; rows: number }) => {
try {
Object.values(terminals).forEach((t) => {
t.resize(dimensions);
});
});
} catch (e: any) {
console.error("Error resizing terminal:", e);
io.emit("error", `Error: terminal resizing. ${e.message ?? e}`);
}
}
);
socket.on("terminalData", (id: string, data: string) => {
try {
if (!terminals[id]) {
return;
}
try {
terminals[id].sendData(data);
} catch (e) {
console.log("Error writing to terminal", e);
} catch (e: any) {
console.error("Error writing to terminal:", e);
io.emit("error", `Error: writing to terminal. ${e.message ?? e}`);
}
});
socket.on("closeTerminal", async (id: string, callback) => {
try {
if (!terminals[id]) {
return;
}
@ -389,6 +483,10 @@ io.on("connection", async (socket) => {
delete terminals[id];
callback();
} catch (e: any) {
console.error("Error closing terminal:", e);
io.emit("error", `Error: closing terminal. ${e.message ?? e}`);
}
});
socket.on(
@ -400,6 +498,7 @@ io.on("connection", async (socket) => {
instructions: string,
callback
) => {
try {
const fetchPromise = fetch(
`${process.env.DATABASE_WORKER_URL}/api/sandbox/generate`,
{
@ -433,10 +532,15 @@ io.on("connection", async (socket) => {
const json = await generateCodeResponse.json();
callback({ response: json.response, success: true });
} catch (e: any) {
console.error("Error generating code:", e);
io.emit("error", `Error: code generation. ${e.message ?? e}`);
}
}
);
socket.on("disconnect", async () => {
try {
if (data.isOwner) {
connections[data.sandboxId]--;
}
@ -483,7 +587,15 @@ io.on("connection", async (socket) => {
// } else {
// console.log("number of sockets", sockets.length);
// }
} catch (e: any) {
console.log("Error disconnecting:", e);
io.emit("error", `Error: disconnecting. ${e.message ?? e}`);
}
});
} catch (e: any) {
console.error("Error connecting:", e);
io.emit("error", `Error: connection. ${e.message ?? e}`);
}
});
httpServer.listen(port, () => {

View File

@ -63,14 +63,6 @@ const CodeEditor = dynamic(() => import("@/components/editor"), {
loading: () => <Loading />,
})
function getReactDefinitionFile() {
const reactDefinitionFile = fs.readFileSync(
"node_modules/@types/react/index.d.ts",
"utf8"
)
return reactDefinitionFile
}
export default async function CodePage({ params }: { params: { id: string } }) {
const user = await currentUser()
const sandboxId = params.id
@ -94,8 +86,6 @@ export default async function CodePage({ params }: { params: { id: string } }) {
return notFound()
}
const reactDefinitionFile = getReactDefinitionFile()
return (
<div className="overflow-hidden overscroll-none w-screen flex flex-col h-screen bg-background">
<Room id={sandboxId}>
@ -104,7 +94,6 @@ export default async function CodePage({ params }: { params: { id: string } }) {
<CodeEditor
userData={userData}
sandboxData={sandboxData}
reactDefinitionFile={reactDefinitionFile}
/>
</div>
</Room>

View File

@ -35,18 +35,16 @@ import { ImperativePanelHandle } from "react-resizable-panels"
export default function CodeEditor({
userData,
sandboxData,
reactDefinitionFile,
}: {
userData: User
sandboxData: Sandbox
reactDefinitionFile: string
}) {
const socketRef = useRef<Socket | null>(null);
// Initialize socket connection if it doesn't exist
if (!socketRef.current) {
socketRef.current = io(
`http://localhost:${process.env.NEXT_PUBLIC_SERVER_PORT}?userId=${userData.id}&sandboxId=${sandboxData.id}`,
`${window.location.protocol}//${window.location.hostname}:${process.env.NEXT_PUBLIC_SERVER_PORT}?userId=${userData.id}&sandboxId=${sandboxData.id}`,
{
timeout: 2000,
}
@ -105,6 +103,16 @@ export default function CodeEditor({
const [provider, setProvider] = useState<TypedLiveblocksProvider>()
const userInfo = useSelf((me) => me.info)
// Liveblocks providers map to prevent reinitializing providers
type ProviderData = {
provider: LiveblocksProvider<never, never, never, never>;
yDoc: Y.Doc;
yText: Y.Text;
binding?: MonacoBinding;
onSync: (isSynced: boolean) => void;
};
const providersMap = useRef(new Map<string, ProviderData>());
// Refs for libraries / features
const editorContainerRef = useRef<HTMLDivElement>(null)
const monacoRef = useRef<typeof monaco | null>(null)
@ -332,10 +340,15 @@ export default function CodeEditor({
if (!editorRef || !tab || !model) return
const yDoc = new Y.Doc()
const yText = yDoc.getText(tab.id)
const yProvider: any = new LiveblocksProvider(room, yDoc)
let providerData: ProviderData;
// When a file is opened for the first time, create a new provider and store in providersMap.
if (!providersMap.current.has(tab.id)) {
const yDoc = new Y.Doc();
const yText = yDoc.getText(tab.id);
const yProvider = new LiveblocksProvider(room, yDoc);
// Inserts the file content into the editor once when the tab is changed.
const onSync = (isSynced: boolean) => {
if (isSynced) {
const text = yText.toString()
@ -353,22 +366,51 @@ export default function CodeEditor({
yProvider.on("sync", onSync)
setProvider(yProvider)
// Save the provider to the map.
providerData = { provider: yProvider, yDoc, yText, onSync };
providersMap.current.set(tab.id, providerData);
} else {
// When a tab is opened that has been open before, reuse the existing provider.
providerData = providersMap.current.get(tab.id)!;
}
const binding = new MonacoBinding(
yText,
providerData.yText,
model,
new Set([editorRef]),
yProvider.awareness as Awareness
)
providerData.provider.awareness as unknown as Awareness
);
providerData.binding = binding;
setProvider(providerData.provider);
return () => {
yDoc.destroy()
yProvider.destroy()
binding.destroy()
yProvider.off("sync", onSync)
// Cleanup logic
if (binding) {
binding.destroy();
}
}, [editorRef, room, activeFileContent])
if (providerData.binding) {
providerData.binding = undefined;
}
};
}, [room, activeFileContent]);
// Added this effect to clean up when the component unmounts
useEffect(() => {
return () => {
// Clean up all providers when the component unmounts
providersMap.current.forEach((data) => {
if (data.binding) {
data.binding.destroy();
}
data.provider.disconnect();
data.yDoc.destroy();
});
providersMap.current.clear();
};
}, []);
// Connection/disconnection effect
useEffect(() => {
@ -391,7 +433,7 @@ export default function CodeEditor({
setFiles(files)
}
const onRateLimit = (message: string) => {
const onError = (message: string) => {
toast.error(message)
}
@ -413,7 +455,7 @@ export default function CodeEditor({
socketRef.current?.on("connect", onConnect)
socketRef.current?.on("disconnect", onDisconnect)
socketRef.current?.on("loaded", onLoadedEvent)
socketRef.current?.on("rateLimit", onRateLimit)
socketRef.current?.on("error", onError)
socketRef.current?.on("terminalResponse", onTerminalResponse)
socketRef.current?.on("disableAccess", onDisableAccess)
socketRef.current?.on("previewURL", setPreviewURL)
@ -422,7 +464,7 @@ export default function CodeEditor({
socketRef.current?.off("connect", onConnect)
socketRef.current?.off("disconnect", onDisconnect)
socketRef.current?.off("loaded", onLoadedEvent)
socketRef.current?.off("rateLimit", onRateLimit)
socketRef.current?.off("error", onError)
socketRef.current?.off("terminalResponse", onTerminalResponse)
socketRef.current?.off("disableAccess", onDisableAccess)
socketRef.current?.off("previewURL", setPreviewURL)