Compare commits
12 Commits
fix-editor
...
collaborat
Author | SHA1 | Date | |
---|---|---|---|
0fc535c8e4 | |||
186d765682 | |||
2994a4d291 | |||
85ebfe4161 | |||
cfed8a225e | |||
e7dd3238df | |||
530aa2ff53 | |||
3c4850ee72 | |||
e3b2d882dd | |||
0df074924f | |||
e5b320d1c5 | |||
b561f1e962 |
@ -29,9 +29,7 @@ npm run dev
|
||||
|
||||
### Backend
|
||||
|
||||
The backend consists of a primary Express and Socket.io server, and 3 Cloudflare Workers microservices for the D1 database, R2 storage, and Workers AI. The D1 database also contains a [service binding](https://developers.cloudflare.com/workers/runtime-apis/bindings/service-bindings/) to the R2 storage worker. Each open sandbox instantiates a secure Linux sandboxes on E2B, which is used for the terminal and live preview.
|
||||
|
||||
You will need to make an account on [E2B](https://e2b.dev/) to get an API key.
|
||||
The backend consists of a primary Express and Socket.io server, and 3 Cloudflare Workers microservices for the D1 database, R2 storage, and Workers AI. The D1 database also contains a [service binding](https://developers.cloudflare.com/workers/runtime-apis/bindings/service-bindings/) to the R2 storage worker.
|
||||
|
||||
#### Socket.io server
|
||||
|
||||
@ -183,4 +181,3 @@ It should be in the form `category(scope or module): message` in your commit mes
|
||||
- [Express](https://expressjs.com/)
|
||||
- [Socket.io](https://socket.io/)
|
||||
- [Drizzle ORM](https://orm.drizzle.team/)
|
||||
- [E2B](https://e2b.dev/)
|
||||
|
@ -111,16 +111,16 @@ export default {
|
||||
const { type, name, userId, visibility } = initSchema.parse(body)
|
||||
|
||||
const userSandboxes = await db
|
||||
.select()
|
||||
.from(sandbox)
|
||||
.where(eq(sandbox.userId, userId))
|
||||
.all()
|
||||
.select()
|
||||
.from(sandbox)
|
||||
.where(eq(sandbox.userId, userId))
|
||||
.all();
|
||||
|
||||
if (userSandboxes.length >= 8) {
|
||||
return new Response("You reached the maximum # of sandboxes.", {
|
||||
status: 400,
|
||||
})
|
||||
}
|
||||
if (userSandboxes.length >= 8) {
|
||||
return new Response("You reached the maximum # of sandboxes.", {
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const sb = await db
|
||||
.insert(sandbox)
|
||||
|
@ -31,8 +31,8 @@ export const sandbox = sqliteTable("sandbox", {
|
||||
createdAt: integer("createdAt", { mode: "timestamp_ms" }),
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
});
|
||||
.references(() => user.id, { onDelete: "cascade" }),
|
||||
});
|
||||
|
||||
export type Sandbox = typeof sandbox.$inferSelect;
|
||||
|
||||
|
@ -5,4 +5,3 @@ PORT=4000
|
||||
WORKERS_KEY=
|
||||
DATABASE_WORKER_URL=
|
||||
STORAGE_WORKER_URL=
|
||||
E2B_API_KEY=
|
@ -49,15 +49,11 @@ const terminals: Record<string, Terminal> = {};
|
||||
|
||||
const dirName = "/home/user";
|
||||
|
||||
const moveFile = async (
|
||||
filesystem: FilesystemManager,
|
||||
filePath: string,
|
||||
newFilePath: string
|
||||
) => {
|
||||
const fileContents = await filesystem.readBytes(filePath);
|
||||
const moveFile = async (filesystem: FilesystemManager, filePath: string, newFilePath: string) => {
|
||||
const fileContents = await filesystem.readBytes(filePath)
|
||||
await filesystem.writeBytes(newFilePath, fileContents);
|
||||
await filesystem.remove(filePath);
|
||||
};
|
||||
}
|
||||
|
||||
io.use(async (socket, next) => {
|
||||
const handshakeSchema = z.object({
|
||||
@ -113,489 +109,381 @@ io.use(async (socket, next) => {
|
||||
const lockManager = new LockManager();
|
||||
|
||||
io.on("connection", async (socket) => {
|
||||
try {
|
||||
if (inactivityTimeout) clearTimeout(inactivityTimeout);
|
||||
if (inactivityTimeout) clearTimeout(inactivityTimeout);
|
||||
|
||||
const data = socket.data as {
|
||||
userId: string;
|
||||
sandboxId: string;
|
||||
isOwner: boolean;
|
||||
};
|
||||
const data = socket.data as {
|
||||
userId: string;
|
||||
sandboxId: string;
|
||||
isOwner: boolean;
|
||||
};
|
||||
|
||||
if (data.isOwner) {
|
||||
isOwnerConnected = true;
|
||||
connections[data.sandboxId] = (connections[data.sandboxId] ?? 0) + 1;
|
||||
} else {
|
||||
if (!isOwnerConnected) {
|
||||
socket.emit("disableAccess", "The sandbox owner is not connected.");
|
||||
if (data.isOwner) {
|
||||
isOwnerConnected = true;
|
||||
connections[data.sandboxId] = (connections[data.sandboxId] ?? 0) + 1;
|
||||
} else {
|
||||
if (!isOwnerConnected) {
|
||||
socket.emit("disableAccess", "The sandbox owner is not connected.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
await lockManager.acquireLock(data.sandboxId, async () => {
|
||||
try {
|
||||
if (!containers[data.sandboxId]) {
|
||||
containers[data.sandboxId] = await Sandbox.create();
|
||||
console.log("Created container ", data.sandboxId);
|
||||
io.emit("previewURL", "https://" + containers[data.sandboxId].getHostname(5173));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error creating container ", data.sandboxId, error);
|
||||
}
|
||||
});
|
||||
|
||||
// Change the owner of the project directory to user
|
||||
const fixPermissions = async () => {
|
||||
await containers[data.sandboxId].process.startAndWait(
|
||||
`sudo chown -R user "${path.join(dirName, "projects", data.sandboxId)}"`
|
||||
);
|
||||
}
|
||||
|
||||
const sandboxFiles = await getSandboxFiles(data.sandboxId);
|
||||
sandboxFiles.fileData.forEach(async (file) => {
|
||||
const filePath = path.join(dirName, file.id);
|
||||
await containers[data.sandboxId].filesystem.makeDir(path.dirname(filePath));
|
||||
await containers[data.sandboxId].filesystem.write(filePath, file.data);
|
||||
});
|
||||
fixPermissions();
|
||||
|
||||
socket.emit("loaded", sandboxFiles.files);
|
||||
|
||||
socket.on("getFile", (fileId: string, callback) => {
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
|
||||
callback(file.data);
|
||||
});
|
||||
|
||||
socket.on("getFolder", async (folderId: string, callback) => {
|
||||
const files = await getFolder(folderId);
|
||||
callback(files);
|
||||
});
|
||||
|
||||
// todo: send diffs + debounce for efficiency
|
||||
socket.on("saveFile", async (fileId: string, body: string) => {
|
||||
try {
|
||||
await saveFileRL.consume(data.userId, 1);
|
||||
|
||||
if (Buffer.byteLength(body, "utf-8") > MAX_BODY_SIZE) {
|
||||
socket.emit(
|
||||
"rateLimit",
|
||||
"Rate limited: file size too large. Please reduce the file size."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
file.data = body;
|
||||
|
||||
await containers[data.sandboxId].filesystem.write(path.join(dirName, file.id), body);
|
||||
fixPermissions();
|
||||
await saveFile(fileId, body);
|
||||
} catch (e) {
|
||||
io.emit("rateLimit", "Rate limited: file saving. Please slow down.");
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("moveFile", async (fileId: string, folderId: string, callback) => {
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
|
||||
const parts = fileId.split("/");
|
||||
const newFileId = folderId + "/" + parts.pop();
|
||||
|
||||
await moveFile(
|
||||
containers[data.sandboxId].filesystem,
|
||||
path.join(dirName, fileId),
|
||||
path.join(dirName, newFileId)
|
||||
)
|
||||
fixPermissions();
|
||||
|
||||
file.id = newFileId;
|
||||
|
||||
await renameFile(fileId, newFileId, file.data);
|
||||
const newFiles = await getSandboxFiles(data.sandboxId);
|
||||
|
||||
callback(newFiles.files);
|
||||
});
|
||||
|
||||
socket.on("createFile", async (name: string, callback) => {
|
||||
try {
|
||||
const size: number = await getProjectSize(data.sandboxId);
|
||||
// limit is 200mb
|
||||
if (size > 200 * 1024 * 1024) {
|
||||
io.emit(
|
||||
"rateLimit",
|
||||
"Rate limited: project size exceeded. Please delete some files."
|
||||
);
|
||||
callback({ success: false });
|
||||
}
|
||||
|
||||
await createFileRL.consume(data.userId, 1);
|
||||
|
||||
const id = `projects/${data.sandboxId}/${name}`;
|
||||
|
||||
await containers[data.sandboxId].filesystem.write(path.join(dirName, id), "");
|
||||
fixPermissions();
|
||||
|
||||
sandboxFiles.files.push({
|
||||
id,
|
||||
name,
|
||||
type: "file",
|
||||
});
|
||||
|
||||
sandboxFiles.fileData.push({
|
||||
id,
|
||||
data: "",
|
||||
});
|
||||
|
||||
await createFile(id);
|
||||
|
||||
callback({ success: true });
|
||||
} catch (e) {
|
||||
io.emit("rateLimit", "Rate limited: file creation. Please slow down.");
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("createFolder", async (name: string, callback) => {
|
||||
try {
|
||||
await createFolderRL.consume(data.userId, 1);
|
||||
|
||||
const id = `projects/${data.sandboxId}/${name}`;
|
||||
|
||||
await containers[data.sandboxId].filesystem.makeDir(path.join(dirName, id));
|
||||
|
||||
callback();
|
||||
} catch (e) {
|
||||
io.emit("rateLimit", "Rate limited: folder creation. Please slow down.");
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("renameFile", async (fileId: string, newName: string) => {
|
||||
try {
|
||||
await renameFileRL.consume(data.userId, 1);
|
||||
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
file.id = newName;
|
||||
|
||||
const parts = fileId.split("/");
|
||||
const newFileId =
|
||||
parts.slice(0, parts.length - 1).join("/") + "/" + newName;
|
||||
|
||||
|
||||
await moveFile(
|
||||
containers[data.sandboxId].filesystem,
|
||||
path.join(dirName, fileId),
|
||||
path.join(dirName, newFileId)
|
||||
)
|
||||
fixPermissions();
|
||||
await renameFile(fileId, newFileId, file.data);
|
||||
} catch (e) {
|
||||
io.emit("rateLimit", "Rate limited: file renaming. Please slow down.");
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("deleteFile", async (fileId: string, callback) => {
|
||||
try {
|
||||
await deleteFileRL.consume(data.userId, 1);
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
|
||||
await containers[data.sandboxId].filesystem.remove(path.join(dirName, fileId));
|
||||
sandboxFiles.fileData = sandboxFiles.fileData.filter(
|
||||
(f) => f.id !== fileId
|
||||
);
|
||||
|
||||
await deleteFile(fileId);
|
||||
|
||||
const newFiles = await getSandboxFiles(data.sandboxId);
|
||||
callback(newFiles.files);
|
||||
} catch (e) {
|
||||
io.emit("rateLimit", "Rate limited: file deletion. Please slow down.");
|
||||
}
|
||||
});
|
||||
|
||||
// todo
|
||||
// socket.on("renameFolder", async (folderId: string, newName: string) => {
|
||||
// });
|
||||
|
||||
socket.on("deleteFolder", async (folderId: string, callback) => {
|
||||
const files = await getFolder(folderId);
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
await containers[data.sandboxId].filesystem.remove(path.join(dirName, file));
|
||||
|
||||
sandboxFiles.fileData = sandboxFiles.fileData.filter(
|
||||
(f) => f.id !== file
|
||||
);
|
||||
|
||||
await deleteFile(file);
|
||||
})
|
||||
);
|
||||
|
||||
const newFiles = await getSandboxFiles(data.sandboxId);
|
||||
|
||||
callback(newFiles.files);
|
||||
});
|
||||
|
||||
socket.on("createTerminal", async (id: string, callback) => {
|
||||
if (terminals[id] || Object.keys(terminals).length >= 4) {
|
||||
return;
|
||||
}
|
||||
|
||||
await lockManager.acquireLock(data.sandboxId, async () => {
|
||||
try {
|
||||
if (!containers[data.sandboxId]) {
|
||||
containers[data.sandboxId] = await Sandbox.create();
|
||||
console.log("Created container ", data.sandboxId);
|
||||
io.emit(
|
||||
"previewURL",
|
||||
"https://" + containers[data.sandboxId].getHostname(5173)
|
||||
);
|
||||
}
|
||||
} catch (e: any) {
|
||||
console.error(`Error creating container ${data.sandboxId}:`, e);
|
||||
io.emit("error", `Error: container creation. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Change the owner of the project directory to user
|
||||
const fixPermissions = async () => {
|
||||
await containers[data.sandboxId].process.startAndWait(
|
||||
`sudo chown -R user "${path.join(dirName, "projects", data.sandboxId)}"`
|
||||
);
|
||||
};
|
||||
|
||||
const sandboxFiles = await getSandboxFiles(data.sandboxId);
|
||||
sandboxFiles.fileData.forEach(async (file) => {
|
||||
const filePath = path.join(dirName, file.id);
|
||||
await containers[data.sandboxId].filesystem.makeDir(
|
||||
path.dirname(filePath)
|
||||
);
|
||||
await containers[data.sandboxId].filesystem.write(filePath, file.data);
|
||||
});
|
||||
fixPermissions();
|
||||
|
||||
socket.emit("loaded", sandboxFiles.files);
|
||||
|
||||
socket.on("getFile", (fileId: string, callback) => {
|
||||
console.log(fileId);
|
||||
try {
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
|
||||
callback(file.data);
|
||||
} catch (e: any) {
|
||||
console.error("Error getting file:", e);
|
||||
io.emit("error", `Error: get file. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("getFolder", async (folderId: string, callback) => {
|
||||
try {
|
||||
const files = await getFolder(folderId);
|
||||
callback(files);
|
||||
} catch (e: any) {
|
||||
console.error("Error getting folder:", e);
|
||||
io.emit("error", `Error: get folder. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
// todo: send diffs + debounce for efficiency
|
||||
socket.on("saveFile", async (fileId: string, body: string) => {
|
||||
if (!fileId) return; // handles saving when no file is open
|
||||
|
||||
try {
|
||||
if (Buffer.byteLength(body, "utf-8") > MAX_BODY_SIZE) {
|
||||
socket.emit(
|
||||
"error",
|
||||
"Error: file size too large. Please reduce the file size."
|
||||
);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await saveFileRL.consume(data.userId, 1);
|
||||
await saveFile(fileId, body);
|
||||
} catch (e) {
|
||||
io.emit("error", "Rate limited: file saving. Please slow down.");
|
||||
return;
|
||||
}
|
||||
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
file.data = body;
|
||||
|
||||
await containers[data.sandboxId].filesystem.write(
|
||||
path.join(dirName, file.id),
|
||||
body
|
||||
);
|
||||
fixPermissions();
|
||||
} catch (e: any) {
|
||||
console.error("Error saving file:", e);
|
||||
io.emit("error", `Error: file saving. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on(
|
||||
"moveFile",
|
||||
async (fileId: string, folderId: string, callback) => {
|
||||
try {
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
|
||||
const parts = fileId.split("/");
|
||||
const newFileId = folderId + "/" + parts.pop();
|
||||
|
||||
await moveFile(
|
||||
containers[data.sandboxId].filesystem,
|
||||
path.join(dirName, fileId),
|
||||
path.join(dirName, newFileId)
|
||||
);
|
||||
fixPermissions();
|
||||
|
||||
file.id = newFileId;
|
||||
|
||||
await renameFile(fileId, newFileId, file.data);
|
||||
const newFiles = await getSandboxFiles(data.sandboxId);
|
||||
callback(newFiles.files);
|
||||
} catch (e: any) {
|
||||
console.error("Error moving file:", e);
|
||||
io.emit("error", `Error: file moving. ${e.message ?? e}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
socket.on("createFile", async (name: string, callback) => {
|
||||
try {
|
||||
const size: number = await getProjectSize(data.sandboxId);
|
||||
// limit is 200mb
|
||||
if (size > 200 * 1024 * 1024) {
|
||||
io.emit(
|
||||
"error",
|
||||
"Rate limited: project size exceeded. Please delete some files."
|
||||
);
|
||||
callback({ success: false });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await createFileRL.consume(data.userId, 1);
|
||||
} catch (e) {
|
||||
io.emit("error", "Rate limited: file creation. Please slow down.");
|
||||
return;
|
||||
}
|
||||
|
||||
const id = `projects/${data.sandboxId}/${name}`;
|
||||
|
||||
await containers[data.sandboxId].filesystem.write(
|
||||
path.join(dirName, id),
|
||||
""
|
||||
);
|
||||
fixPermissions();
|
||||
|
||||
sandboxFiles.files.push({
|
||||
id,
|
||||
name,
|
||||
type: "file",
|
||||
terminals[id] = await containers[data.sandboxId].terminal.start({
|
||||
onData: (data: string) => {
|
||||
io.emit("terminalResponse", { id, data });
|
||||
},
|
||||
size: { cols: 80, rows: 20 },
|
||||
onExit: () => console.log("Terminal exited", id),
|
||||
});
|
||||
|
||||
sandboxFiles.fileData.push({
|
||||
id,
|
||||
data: "",
|
||||
});
|
||||
|
||||
await createFile(id);
|
||||
|
||||
callback({ success: true });
|
||||
} catch (e: any) {
|
||||
console.error("Error creating file:", e);
|
||||
io.emit("error", `Error: file creation. ${e.message ?? e}`);
|
||||
await terminals[id].sendData(`cd "${path.join(dirName, "projects", data.sandboxId)}"\r`)
|
||||
await terminals[id].sendData("export PS1='user> '\rclear\r");
|
||||
console.log("Created terminal", id);
|
||||
} catch (error) {
|
||||
console.error("Error creating terminal ", id, error);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("createFolder", async (name: string, callback) => {
|
||||
try {
|
||||
callback();
|
||||
});
|
||||
|
||||
socket.on("resizeTerminal", (dimensions: { cols: number; rows: number }) => {
|
||||
Object.values(terminals).forEach((t) => {
|
||||
t.resize(dimensions);
|
||||
});
|
||||
});
|
||||
|
||||
socket.on("terminalData", (id: string, data: string) => {
|
||||
if (!terminals[id]) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
terminals[id].sendData(data);
|
||||
} catch (e) {
|
||||
console.log("Error writing to terminal", e);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("closeTerminal", async (id: string, callback) => {
|
||||
if (!terminals[id]) {
|
||||
return;
|
||||
}
|
||||
|
||||
await terminals[id].kill();
|
||||
delete terminals[id];
|
||||
|
||||
callback();
|
||||
});
|
||||
|
||||
socket.on(
|
||||
"generateCode",
|
||||
async (
|
||||
fileName: string,
|
||||
code: string,
|
||||
line: number,
|
||||
instructions: string,
|
||||
callback
|
||||
) => {
|
||||
const fetchPromise = fetch(
|
||||
`${process.env.DATABASE_WORKER_URL}/api/sandbox/generate`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `${process.env.WORKERS_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: data.userId,
|
||||
}),
|
||||
}
|
||||
);
|
||||
|
||||
// Generate code from cloudflare workers AI
|
||||
const generateCodePromise = fetch(
|
||||
`${process.env.AI_WORKER_URL}/api?fileName=${fileName}&code=${code}&line=${line}&instructions=${instructions}`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `${process.env.CF_AI_KEY}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const [fetchResponse, generateCodeResponse] = await Promise.all([
|
||||
fetchPromise,
|
||||
generateCodePromise,
|
||||
]);
|
||||
|
||||
const json = await generateCodeResponse.json();
|
||||
|
||||
callback({ response: json.response, success: true });
|
||||
}
|
||||
);
|
||||
|
||||
socket.on("disconnect", async () => {
|
||||
if (data.isOwner) {
|
||||
connections[data.sandboxId]--;
|
||||
}
|
||||
|
||||
if (data.isOwner && connections[data.sandboxId] <= 0) {
|
||||
await Promise.all(
|
||||
Object.entries(terminals).map(async ([key, terminal]) => {
|
||||
await terminal.kill();
|
||||
delete terminals[key];
|
||||
})
|
||||
);
|
||||
|
||||
await lockManager.acquireLock(data.sandboxId, async () => {
|
||||
try {
|
||||
await createFolderRL.consume(data.userId, 1);
|
||||
} catch (e) {
|
||||
io.emit("error", "Rate limited: folder creation. Please slow down.");
|
||||
return;
|
||||
}
|
||||
|
||||
const id = `projects/${data.sandboxId}/${name}`;
|
||||
|
||||
await containers[data.sandboxId].filesystem.makeDir(
|
||||
path.join(dirName, id)
|
||||
);
|
||||
|
||||
callback();
|
||||
} catch (e: any) {
|
||||
console.error("Error creating folder:", e);
|
||||
io.emit("error", `Error: folder creation. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("renameFile", async (fileId: string, newName: string) => {
|
||||
try {
|
||||
try {
|
||||
await renameFileRL.consume(data.userId, 1);
|
||||
} catch (e) {
|
||||
io.emit("error", "Rate limited: file renaming. Please slow down.");
|
||||
return;
|
||||
}
|
||||
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
file.id = newName;
|
||||
|
||||
const parts = fileId.split("/");
|
||||
const newFileId =
|
||||
parts.slice(0, parts.length - 1).join("/") + "/" + newName;
|
||||
|
||||
await moveFile(
|
||||
containers[data.sandboxId].filesystem,
|
||||
path.join(dirName, fileId),
|
||||
path.join(dirName, newFileId)
|
||||
);
|
||||
fixPermissions();
|
||||
await renameFile(fileId, newFileId, file.data);
|
||||
} catch (e: any) {
|
||||
console.error("Error renaming folder:", e);
|
||||
io.emit("error", `Error: folder renaming. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("deleteFile", async (fileId: string, callback) => {
|
||||
try {
|
||||
try {
|
||||
await deleteFileRL.consume(data.userId, 1);
|
||||
} catch (e) {
|
||||
io.emit("error", "Rate limited: file deletion. Please slow down.");
|
||||
}
|
||||
|
||||
const file = sandboxFiles.fileData.find((f) => f.id === fileId);
|
||||
if (!file) return;
|
||||
|
||||
await containers[data.sandboxId].filesystem.remove(
|
||||
path.join(dirName, fileId)
|
||||
);
|
||||
sandboxFiles.fileData = sandboxFiles.fileData.filter(
|
||||
(f) => f.id !== fileId
|
||||
);
|
||||
|
||||
await deleteFile(fileId);
|
||||
|
||||
const newFiles = await getSandboxFiles(data.sandboxId);
|
||||
callback(newFiles.files);
|
||||
} catch (e: any) {
|
||||
console.error("Error deleting file:", e);
|
||||
io.emit("error", `Error: file deletion. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
// todo
|
||||
// socket.on("renameFolder", async (folderId: string, newName: string) => {
|
||||
// });
|
||||
|
||||
socket.on("deleteFolder", async (folderId: string, callback) => {
|
||||
try {
|
||||
const files = await getFolder(folderId);
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
await containers[data.sandboxId].filesystem.remove(
|
||||
path.join(dirName, file)
|
||||
);
|
||||
|
||||
sandboxFiles.fileData = sandboxFiles.fileData.filter(
|
||||
(f) => f.id !== file
|
||||
);
|
||||
|
||||
await deleteFile(file);
|
||||
})
|
||||
);
|
||||
|
||||
const newFiles = await getSandboxFiles(data.sandboxId);
|
||||
|
||||
callback(newFiles.files);
|
||||
} catch (e: any) {
|
||||
console.error("Error deleting folder:", e);
|
||||
io.emit("error", `Error: folder deletion. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("createTerminal", async (id: string, callback) => {
|
||||
try {
|
||||
if (terminals[id] || Object.keys(terminals).length >= 4) {
|
||||
return;
|
||||
}
|
||||
|
||||
await lockManager.acquireLock(data.sandboxId, async () => {
|
||||
try {
|
||||
terminals[id] = await containers[data.sandboxId].terminal.start({
|
||||
onData: (data: string) => {
|
||||
io.emit("terminalResponse", { id, data });
|
||||
},
|
||||
size: { cols: 80, rows: 20 },
|
||||
onExit: () => console.log("Terminal exited", id),
|
||||
});
|
||||
await terminals[id].sendData(
|
||||
`cd "${path.join(dirName, "projects", data.sandboxId)}"\r`
|
||||
);
|
||||
await terminals[id].sendData("export PS1='user> '\rclear\r");
|
||||
console.log("Created terminal", id);
|
||||
} catch (e: any) {
|
||||
console.error(`Error creating terminal ${id}:`, e);
|
||||
io.emit("error", `Error: terminal creation. ${e.message ?? e}`);
|
||||
if (containers[data.sandboxId]) {
|
||||
await containers[data.sandboxId].close();
|
||||
delete containers[data.sandboxId];
|
||||
console.log("Closed container", data.sandboxId);
|
||||
}
|
||||
});
|
||||
|
||||
callback();
|
||||
} catch (e: any) {
|
||||
console.error(`Error creating terminal ${id}:`, e);
|
||||
io.emit("error", `Error: terminal creation. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on(
|
||||
"resizeTerminal",
|
||||
(dimensions: { cols: number; rows: number }) => {
|
||||
try {
|
||||
Object.values(terminals).forEach((t) => {
|
||||
t.resize(dimensions);
|
||||
});
|
||||
} catch (e: any) {
|
||||
console.error("Error resizing terminal:", e);
|
||||
io.emit("error", `Error: terminal resizing. ${e.message ?? e}`);
|
||||
} catch (error) {
|
||||
console.error("Error closing container ", data.sandboxId, error);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
socket.on("terminalData", (id: string, data: string) => {
|
||||
try {
|
||||
if (!terminals[id]) {
|
||||
return;
|
||||
}
|
||||
socket.broadcast.emit(
|
||||
"disableAccess",
|
||||
"The sandbox owner has disconnected."
|
||||
);
|
||||
}
|
||||
|
||||
terminals[id].sendData(data);
|
||||
} catch (e: any) {
|
||||
console.error("Error writing to terminal:", e);
|
||||
io.emit("error", `Error: writing to terminal. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("closeTerminal", async (id: string, callback) => {
|
||||
try {
|
||||
if (!terminals[id]) {
|
||||
return;
|
||||
}
|
||||
|
||||
await terminals[id].kill();
|
||||
delete terminals[id];
|
||||
|
||||
callback();
|
||||
} catch (e: any) {
|
||||
console.error("Error closing terminal:", e);
|
||||
io.emit("error", `Error: closing terminal. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on(
|
||||
"generateCode",
|
||||
async (
|
||||
fileName: string,
|
||||
code: string,
|
||||
line: number,
|
||||
instructions: string,
|
||||
callback
|
||||
) => {
|
||||
try {
|
||||
const fetchPromise = fetch(
|
||||
`${process.env.DATABASE_WORKER_URL}/api/sandbox/generate`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `${process.env.WORKERS_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: data.userId,
|
||||
}),
|
||||
}
|
||||
);
|
||||
|
||||
// Generate code from cloudflare workers AI
|
||||
const generateCodePromise = fetch(
|
||||
`${process.env.AI_WORKER_URL}/api?fileName=${fileName}&code=${code}&line=${line}&instructions=${instructions}`,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `${process.env.CF_AI_KEY}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const [fetchResponse, generateCodeResponse] = await Promise.all([
|
||||
fetchPromise,
|
||||
generateCodePromise,
|
||||
]);
|
||||
|
||||
const json = await generateCodeResponse.json();
|
||||
|
||||
callback({ response: json.response, success: true });
|
||||
} catch (e: any) {
|
||||
console.error("Error generating code:", e);
|
||||
io.emit("error", `Error: code generation. ${e.message ?? e}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
socket.on("disconnect", async () => {
|
||||
try {
|
||||
if (data.isOwner) {
|
||||
connections[data.sandboxId]--;
|
||||
}
|
||||
|
||||
if (data.isOwner && connections[data.sandboxId] <= 0) {
|
||||
await Promise.all(
|
||||
Object.entries(terminals).map(async ([key, terminal]) => {
|
||||
await terminal.kill();
|
||||
delete terminals[key];
|
||||
})
|
||||
);
|
||||
|
||||
await lockManager.acquireLock(data.sandboxId, async () => {
|
||||
try {
|
||||
if (containers[data.sandboxId]) {
|
||||
await containers[data.sandboxId].close();
|
||||
delete containers[data.sandboxId];
|
||||
console.log("Closed container", data.sandboxId);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error closing container ", data.sandboxId, error);
|
||||
}
|
||||
});
|
||||
|
||||
socket.broadcast.emit(
|
||||
"disableAccess",
|
||||
"The sandbox owner has disconnected."
|
||||
);
|
||||
}
|
||||
|
||||
// const sockets = await io.fetchSockets();
|
||||
// if (inactivityTimeout) {
|
||||
// clearTimeout(inactivityTimeout);
|
||||
// }
|
||||
// if (sockets.length === 0) {
|
||||
// console.log("STARTING TIMER");
|
||||
// inactivityTimeout = setTimeout(() => {
|
||||
// io.fetchSockets().then(async (sockets) => {
|
||||
// if (sockets.length === 0) {
|
||||
// console.log("Server stopped", res);
|
||||
// }
|
||||
// });
|
||||
// }, 20000);
|
||||
// } else {
|
||||
// console.log("number of sockets", sockets.length);
|
||||
// }
|
||||
} catch (e: any) {
|
||||
console.log("Error disconnecting:", e);
|
||||
io.emit("error", `Error: disconnecting. ${e.message ?? e}`);
|
||||
}
|
||||
});
|
||||
} catch (e: any) {
|
||||
console.error("Error connecting:", e);
|
||||
io.emit("error", `Error: connection. ${e.message ?? e}`);
|
||||
}
|
||||
// const sockets = await io.fetchSockets();
|
||||
// if (inactivityTimeout) {
|
||||
// clearTimeout(inactivityTimeout);
|
||||
// }
|
||||
// if (sockets.length === 0) {
|
||||
// console.log("STARTING TIMER");
|
||||
// inactivityTimeout = setTimeout(() => {
|
||||
// io.fetchSockets().then(async (sockets) => {
|
||||
// if (sockets.length === 0) {
|
||||
// console.log("Server stopped", res);
|
||||
// }
|
||||
// });
|
||||
// }, 20000);
|
||||
// } else {
|
||||
// console.log("number of sockets", sockets.length);
|
||||
// }
|
||||
});
|
||||
});
|
||||
|
||||
httpServer.listen(port, () => {
|
||||
|
@ -63,6 +63,14 @@ const CodeEditor = dynamic(() => import("@/components/editor"), {
|
||||
loading: () => <Loading />,
|
||||
})
|
||||
|
||||
function getReactDefinitionFile() {
|
||||
const reactDefinitionFile = fs.readFileSync(
|
||||
"node_modules/@types/react/index.d.ts",
|
||||
"utf8"
|
||||
)
|
||||
return reactDefinitionFile
|
||||
}
|
||||
|
||||
export default async function CodePage({ params }: { params: { id: string } }) {
|
||||
const user = await currentUser()
|
||||
const sandboxId = params.id
|
||||
@ -86,6 +94,8 @@ export default async function CodePage({ params }: { params: { id: string } }) {
|
||||
return notFound()
|
||||
}
|
||||
|
||||
const reactDefinitionFile = getReactDefinitionFile()
|
||||
|
||||
return (
|
||||
<div className="overflow-hidden overscroll-none w-screen flex flex-col h-screen bg-background">
|
||||
<Room id={sandboxId}>
|
||||
@ -94,6 +104,7 @@ export default async function CodePage({ params }: { params: { id: string } }) {
|
||||
<CodeEditor
|
||||
userData={userData}
|
||||
sandboxData={sandboxData}
|
||||
reactDefinitionFile={reactDefinitionFile}
|
||||
/>
|
||||
</div>
|
||||
</Room>
|
||||
|
@ -35,16 +35,18 @@ import { ImperativePanelHandle } from "react-resizable-panels"
|
||||
export default function CodeEditor({
|
||||
userData,
|
||||
sandboxData,
|
||||
reactDefinitionFile,
|
||||
}: {
|
||||
userData: User
|
||||
sandboxData: Sandbox
|
||||
reactDefinitionFile: string
|
||||
}) {
|
||||
const socketRef = useRef<Socket | null>(null);
|
||||
|
||||
// Initialize socket connection if it doesn't exist
|
||||
if (!socketRef.current) {
|
||||
socketRef.current = io(
|
||||
`${window.location.protocol}//${window.location.hostname}:${process.env.NEXT_PUBLIC_SERVER_PORT}?userId=${userData.id}&sandboxId=${sandboxData.id}`,
|
||||
`http://localhost:${process.env.NEXT_PUBLIC_SERVER_PORT}?userId=${userData.id}&sandboxId=${sandboxData.id}`,
|
||||
{
|
||||
timeout: 2000,
|
||||
}
|
||||
@ -103,16 +105,6 @@ export default function CodeEditor({
|
||||
const [provider, setProvider] = useState<TypedLiveblocksProvider>()
|
||||
const userInfo = useSelf((me) => me.info)
|
||||
|
||||
// Liveblocks providers map to prevent reinitializing providers
|
||||
type ProviderData = {
|
||||
provider: LiveblocksProvider<never, never, never, never>;
|
||||
yDoc: Y.Doc;
|
||||
yText: Y.Text;
|
||||
binding?: MonacoBinding;
|
||||
onSync: (isSynced: boolean) => void;
|
||||
};
|
||||
const providersMap = useRef(new Map<string, ProviderData>());
|
||||
|
||||
// Refs for libraries / features
|
||||
const editorContainerRef = useRef<HTMLDivElement>(null)
|
||||
const monacoRef = useRef<typeof monaco | null>(null)
|
||||
@ -340,77 +332,43 @@ export default function CodeEditor({
|
||||
|
||||
if (!editorRef || !tab || !model) return
|
||||
|
||||
let providerData: ProviderData;
|
||||
const yDoc = new Y.Doc()
|
||||
const yText = yDoc.getText(tab.id)
|
||||
const yProvider: any = new LiveblocksProvider(room, yDoc)
|
||||
|
||||
// When a file is opened for the first time, create a new provider and store in providersMap.
|
||||
if (!providersMap.current.has(tab.id)) {
|
||||
const yDoc = new Y.Doc();
|
||||
const yText = yDoc.getText(tab.id);
|
||||
const yProvider = new LiveblocksProvider(room, yDoc);
|
||||
|
||||
// Inserts the file content into the editor once when the tab is changed.
|
||||
const onSync = (isSynced: boolean) => {
|
||||
if (isSynced) {
|
||||
const text = yText.toString()
|
||||
if (text === "") {
|
||||
if (activeFileContent) {
|
||||
yText.insert(0, activeFileContent)
|
||||
} else {
|
||||
setTimeout(() => {
|
||||
yText.insert(0, editorRef.getValue())
|
||||
}, 0)
|
||||
}
|
||||
const onSync = (isSynced: boolean) => {
|
||||
if (isSynced) {
|
||||
const text = yText.toString()
|
||||
if (text === "") {
|
||||
if (activeFileContent) {
|
||||
yText.insert(0, activeFileContent)
|
||||
} else {
|
||||
setTimeout(() => {
|
||||
yText.insert(0, editorRef.getValue())
|
||||
}, 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
yProvider.on("sync", onSync)
|
||||
|
||||
// Save the provider to the map.
|
||||
providerData = { provider: yProvider, yDoc, yText, onSync };
|
||||
providersMap.current.set(tab.id, providerData);
|
||||
|
||||
} else {
|
||||
// When a tab is opened that has been open before, reuse the existing provider.
|
||||
providerData = providersMap.current.get(tab.id)!;
|
||||
}
|
||||
|
||||
yProvider.on("sync", onSync)
|
||||
|
||||
setProvider(yProvider)
|
||||
|
||||
const binding = new MonacoBinding(
|
||||
providerData.yText,
|
||||
yText,
|
||||
model,
|
||||
new Set([editorRef]),
|
||||
providerData.provider.awareness as unknown as Awareness
|
||||
);
|
||||
|
||||
providerData.binding = binding;
|
||||
|
||||
setProvider(providerData.provider);
|
||||
yProvider.awareness as Awareness
|
||||
)
|
||||
|
||||
return () => {
|
||||
// Cleanup logic
|
||||
if (binding) {
|
||||
binding.destroy();
|
||||
}
|
||||
if (providerData.binding) {
|
||||
providerData.binding = undefined;
|
||||
}
|
||||
};
|
||||
}, [room, activeFileContent]);
|
||||
|
||||
// Added this effect to clean up when the component unmounts
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
// Clean up all providers when the component unmounts
|
||||
providersMap.current.forEach((data) => {
|
||||
if (data.binding) {
|
||||
data.binding.destroy();
|
||||
}
|
||||
data.provider.disconnect();
|
||||
data.yDoc.destroy();
|
||||
});
|
||||
providersMap.current.clear();
|
||||
};
|
||||
}, []);
|
||||
yDoc.destroy()
|
||||
yProvider.destroy()
|
||||
binding.destroy()
|
||||
yProvider.off("sync", onSync)
|
||||
}
|
||||
}, [editorRef, room, activeFileContent])
|
||||
|
||||
// Connection/disconnection effect
|
||||
useEffect(() => {
|
||||
@ -433,7 +391,7 @@ export default function CodeEditor({
|
||||
setFiles(files)
|
||||
}
|
||||
|
||||
const onError = (message: string) => {
|
||||
const onRateLimit = (message: string) => {
|
||||
toast.error(message)
|
||||
}
|
||||
|
||||
@ -455,7 +413,7 @@ export default function CodeEditor({
|
||||
socketRef.current?.on("connect", onConnect)
|
||||
socketRef.current?.on("disconnect", onDisconnect)
|
||||
socketRef.current?.on("loaded", onLoadedEvent)
|
||||
socketRef.current?.on("error", onError)
|
||||
socketRef.current?.on("rateLimit", onRateLimit)
|
||||
socketRef.current?.on("terminalResponse", onTerminalResponse)
|
||||
socketRef.current?.on("disableAccess", onDisableAccess)
|
||||
socketRef.current?.on("previewURL", setPreviewURL)
|
||||
@ -464,7 +422,7 @@ export default function CodeEditor({
|
||||
socketRef.current?.off("connect", onConnect)
|
||||
socketRef.current?.off("disconnect", onDisconnect)
|
||||
socketRef.current?.off("loaded", onLoadedEvent)
|
||||
socketRef.current?.off("error", onError)
|
||||
socketRef.current?.off("rateLimit", onRateLimit)
|
||||
socketRef.current?.off("terminalResponse", onTerminalResponse)
|
||||
socketRef.current?.off("disableAccess", onDisableAccess)
|
||||
socketRef.current?.off("previewURL", setPreviewURL)
|
||||
|
Reference in New Issue
Block a user