diff --git a/LICENSE b/LICENSE index be6bf64..fc2617a 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ MIT License Copyright (c) 2024 Ishaan Dey +Copyright (c) 2024 GitWit, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/backend/server/.prettierrc b/backend/server/.prettierrc new file mode 100644 index 0000000..c2e595e --- /dev/null +++ b/backend/server/.prettierrc @@ -0,0 +1,6 @@ +{ + "tabWidth": 2, + "semi": false, + "singleQuote": false, + "insertFinalNewline": true +} \ No newline at end of file diff --git a/backend/server/nodemon.json b/backend/server/nodemon.json index 5554d0f..c71b99b 100644 --- a/backend/server/nodemon.json +++ b/backend/server/nodemon.json @@ -1,5 +1,7 @@ { - "watch": ["src"], + "watch": [ + "src" + ], "ext": "ts", "exec": "concurrently \"npx tsc --watch\" \"ts-node src/index.ts\"" } \ No newline at end of file diff --git a/backend/server/package.json b/backend/server/package.json index 40c9c18..435cd1b 100644 --- a/backend/server/package.json +++ b/backend/server/package.json @@ -31,4 +31,4 @@ "ts-node": "^10.9.2", "typescript": "^5.4.5" } -} +} \ No newline at end of file diff --git a/backend/server/src/AIWorker.ts b/backend/server/src/AIWorker.ts new file mode 100644 index 0000000..f5c06e9 --- /dev/null +++ b/backend/server/src/AIWorker.ts @@ -0,0 +1,90 @@ +// AIWorker class for handling AI-related operations +export class AIWorker { + private aiWorkerUrl: string + private cfAiKey: string + private databaseWorkerUrl: string + private workersKey: string + + // Constructor to initialize AIWorker with necessary URLs and keys + constructor( + aiWorkerUrl: string, + cfAiKey: string, + databaseWorkerUrl: string, + workersKey: string + ) { + this.aiWorkerUrl = aiWorkerUrl + this.cfAiKey = cfAiKey + this.databaseWorkerUrl = databaseWorkerUrl + this.workersKey = workersKey + } + + // Method to generate code based on user input + async generateCode( + userId: string, + fileName: string, + code: string, + line: number, + instructions: string + ): Promise<{ response: string; success: boolean }> { + try { + const fetchPromise = fetch( + `${process.env.DATABASE_WORKER_URL}/api/sandbox/generate`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ + userId: userId, + }), + } + ) + + // Generate code from cloudflare workers AI + const generateCodePromise = fetch( + `${process.env.AI_WORKER_URL}/api?fileName=${encodeURIComponent( + fileName + )}&code=${encodeURIComponent(code)}&line=${encodeURIComponent( + line + )}&instructions=${encodeURIComponent(instructions)}`, + { + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.CF_AI_KEY}`, + }, + } + ) + + const [fetchResponse, generateCodeResponse] = await Promise.all([ + fetchPromise, + generateCodePromise, + ]) + + if (!generateCodeResponse.ok) { + throw new Error(`HTTP error! status: ${generateCodeResponse.status}`) + } + + const reader = generateCodeResponse.body?.getReader() + const decoder = new TextDecoder() + let result = "" + + if (reader) { + while (true) { + const { done, value } = await reader.read() + if (done) break + result += decoder.decode(value, { stream: true }) + } + } + + // The result should now contain only the modified code + return { response: result.trim(), success: true } + } catch (e: any) { + console.error("Error generating code:", e) + return { + response: "Error generating code. Please try again.", + success: false, + } + } + } +} diff --git a/backend/server/src/DokkuClient.ts b/backend/server/src/DokkuClient.ts index fd0adcd..38c559d 100644 --- a/backend/server/src/DokkuClient.ts +++ b/backend/server/src/DokkuClient.ts @@ -1,37 +1,40 @@ -import { SSHSocketClient, SSHConfig } from "./SSHSocketClient" +import { SSHConfig, SSHSocketClient } from "./SSHSocketClient" +// Interface for the response structure from Dokku commands export interface DokkuResponse { - ok: boolean; - output: string; + ok: boolean + output: string } +// DokkuClient class extends SSHSocketClient to interact with Dokku via SSH export class DokkuClient extends SSHSocketClient { - constructor(config: SSHConfig) { - super( - config, - "/var/run/dokku-daemon/dokku-daemon.sock" - ) + // Initialize with Dokku daemon socket path + super(config, "/var/run/dokku-daemon/dokku-daemon.sock") } + // Send a command to Dokku and parse the response async sendCommand(command: string): Promise { try { - const response = await this.sendData(command); + const response = await this.sendData(command) if (typeof response !== "string") { - throw new Error("Received data is not a string"); + throw new Error("Received data is not a string") } - return JSON.parse(response); + // Parse the JSON response from Dokku + return JSON.parse(response) } catch (error: any) { - throw new Error(`Failed to send command: ${error.message}`); + throw new Error(`Failed to send command: ${error.message}`) } } + // List all deployed Dokku apps async listApps(): Promise { - const response = await this.sendCommand("apps:list"); - return response.output.split("\n").slice(1); // Split by newline and ignore the first line (header) + const response = await this.sendCommand("apps:list") + // Split the output by newline and remove the header + return response.output.split("\n").slice(1) } } -export { SSHConfig }; \ No newline at end of file +export { SSHConfig } diff --git a/backend/server/src/FileManager.ts b/backend/server/src/FileManager.ts new file mode 100644 index 0000000..278d060 --- /dev/null +++ b/backend/server/src/FileManager.ts @@ -0,0 +1,508 @@ +import { FilesystemEvent, Sandbox, WatchHandle } from "e2b" +import path from "path" +import RemoteFileStorage from "./RemoteFileStorage" +import { MAX_BODY_SIZE } from "./ratelimit" +import { TFile, TFileData, TFolder } from "./types" + +// Define the structure for sandbox files +export type SandboxFiles = { + files: (TFolder | TFile)[] + fileData: TFileData[] +} + +// Convert list of paths to the hierchical file structure used by the editor +function generateFileStructure(paths: string[]): (TFolder | TFile)[] { + const root: TFolder = { id: "/", type: "folder", name: "/", children: [] } + + paths.forEach((path) => { + const parts = path.split("/") + let current: TFolder = root + + for (let i = 0; i < parts.length; i++) { + const part = parts[i] + const isFile = i === parts.length - 1 && part.length + const existing = current.children.find((child) => child.name === part) + + if (existing) { + if (!isFile) { + current = existing as TFolder + } + } else { + if (isFile) { + const file: TFile = { id: `/${parts.join("/")}`, type: "file", name: part } + current.children.push(file) + } else { + const folder: TFolder = { + id: `/${parts.slice(0, i + 1).join("/")}`, + type: "folder", + name: part, + children: [], + } + current.children.push(folder) + current = folder + } + } + } + }) + + return root.children +} + +// FileManager class to handle file operations in a sandbox +export class FileManager { + private sandboxId: string + private sandbox: Sandbox + public sandboxFiles: SandboxFiles + private fileWatchers: WatchHandle[] = [] + private dirName = "/home/user/project" + private refreshFileList: (files: SandboxFiles) => void + + // Constructor to initialize the FileManager + constructor( + sandboxId: string, + sandbox: Sandbox, + refreshFileList: (files: SandboxFiles) => void + ) { + this.sandboxId = sandboxId + this.sandbox = sandbox + this.sandboxFiles = { files: [], fileData: [] } + this.refreshFileList = refreshFileList + } + + // Fetch file data from list of paths + private async generateFileData(paths: string[]): Promise { + const fileData: TFileData[] = [] + + for (const path of paths) { + const parts = path.split("/") + const isFile = parts.length > 0 && parts[parts.length - 1].length > 0 + + if (isFile) { + const fileId = `/${parts.join("/")}` + const data = await RemoteFileStorage.fetchFileContent(`projects/${this.sandboxId}${fileId}`) + fileData.push({ id: fileId, data }) + } + } + + return fileData + } + + // Convert local file path to remote path + private getRemoteFileId(localId: string): string { + return `projects/${this.sandboxId}${localId}` + } + + // Convert remote file path to local file path + private getLocalFileId(remoteId: string): string | undefined { + const allParts = remoteId.split("/") + if (allParts[1] !== this.sandboxId) return undefined; + return allParts.slice(2).join("/") + } + + // Convert remote file paths to local file paths + private getLocalFileIds(remoteIds: string[]): string[] { + return remoteIds + .map(this.getLocalFileId.bind(this)) + .filter((id) => id !== undefined); + } + + // Download files from remote storage + private async updateFileData(): Promise { + const remotePaths = await RemoteFileStorage.getSandboxPaths(this.sandboxId) + const localPaths = this.getLocalFileIds(remotePaths) + this.sandboxFiles.fileData = await this.generateFileData(localPaths) + return this.sandboxFiles.fileData + } + + // Update file structure + private async updateFileStructure(): Promise<(TFolder | TFile)[]> { + const remotePaths = await RemoteFileStorage.getSandboxPaths(this.sandboxId) + const localPaths = this.getLocalFileIds(remotePaths) + this.sandboxFiles.files = generateFileStructure(localPaths) + return this.sandboxFiles.files + } + + // Initialize the FileManager + async initialize() { + + // Download files from remote file storage + await this.updateFileStructure() + await this.updateFileData() + + // Copy all files from the project to the container + const promises = this.sandboxFiles.fileData.map(async (file) => { + try { + const filePath = path.join(this.dirName, file.id) + const parentDirectory = path.dirname(filePath) + if (!this.sandbox.files.exists(parentDirectory)) { + await this.sandbox.files.makeDir(parentDirectory) + } + await this.sandbox.files.write(filePath, file.data) + } catch (e: any) { + console.log("Failed to create file: " + e) + } + }) + await Promise.all(promises) + + // Make the logged in user the owner of all project files + this.fixPermissions() + + await this.watchDirectory(this.dirName) + await this.watchSubdirectories(this.dirName) + } + + // Check if the given path is a directory + private async isDirectory(directoryPath: string): Promise { + try { + const result = await this.sandbox.commands.run( + `[ -d "${directoryPath}" ] && echo "true" || echo "false"` + ) + return result.stdout.trim() === "true" + } catch (e: any) { + console.log("Failed to check if directory: " + e) + return false + } + } + + // Change the owner of the project directory to user + private async fixPermissions() { + try { + await this.sandbox.commands.run( + `sudo chown -R user "${this.dirName}"` + ) + } catch (e: any) { + console.log("Failed to fix permissions: " + e) + } + } + + // Watch a directory for changes + async watchDirectory(directory: string): Promise { + try { + const handle = await this.sandbox.files.watch( + directory, + async (event: FilesystemEvent) => { + try { + function removeDirName(path: string, dirName: string) { + return path.startsWith(dirName) + ? path.slice(dirName.length) + : path + } + + // This is the absolute file path in the container + const containerFilePath = path.posix.join(directory, event.name) + // This is the file path relative to the project directory + const sandboxFilePath = removeDirName(containerFilePath, this.dirName) + // This is the directory being watched relative to the project directory + const sandboxDirectory = removeDirName(directory, this.dirName) + + // Helper function to find a folder by id + function findFolderById( + files: (TFolder | TFile)[], + folderId: string + ) { + return files.find( + (file: TFolder | TFile) => + file.type === "folder" && file.id === folderId + ) + } + + // Handle file/directory creation event + if (event.type === "create") { + const folder = findFolderById( + this.sandboxFiles.files, + sandboxDirectory + ) as TFolder + const isDir = await this.isDirectory(containerFilePath) + + const newItem = isDir + ? ({ + id: sandboxFilePath, + name: event.name, + type: "folder", + children: [], + } as TFolder) + : ({ + id: sandboxFilePath, + name: event.name, + type: "file", + } as TFile) + + if (folder) { + // If the folder exists, add the new item (file/folder) as a child + folder.children.push(newItem) + } else { + // If folder doesn't exist, add the new item to the root + this.sandboxFiles.files.push(newItem) + } + + if (!isDir) { + const fileData = await this.sandbox.files.read( + containerFilePath + ) + const fileContents = + typeof fileData === "string" ? fileData : "" + this.sandboxFiles.fileData.push({ + id: sandboxFilePath, + data: fileContents, + }) + } + + console.log(`Create ${sandboxFilePath}`) + } + + // Handle file/directory removal or rename event + else if (event.type === "remove" || event.type == "rename") { + const folder = findFolderById( + this.sandboxFiles.files, + sandboxDirectory + ) as TFolder + const isDir = await this.isDirectory(containerFilePath) + + const isFileMatch = (file: TFolder | TFile | TFileData) => + file.id === sandboxFilePath || + file.id.startsWith(containerFilePath + "/") + + if (folder) { + // Remove item from its parent folder + folder.children = folder.children.filter( + (file: TFolder | TFile) => !isFileMatch(file) + ) + } else { + // Remove from the root if it's not inside a folder + this.sandboxFiles.files = this.sandboxFiles.files.filter( + (file: TFolder | TFile) => !isFileMatch(file) + ) + } + + // Also remove any corresponding file data + this.sandboxFiles.fileData = this.sandboxFiles.fileData.filter( + (file: TFileData) => !isFileMatch(file) + ) + + console.log(`Removed: ${sandboxFilePath}`) + } + + // Handle file write event + else if (event.type === "write") { + const folder = findFolderById( + this.sandboxFiles.files, + sandboxDirectory + ) as TFolder + const fileToWrite = this.sandboxFiles.fileData.find( + (file) => file.id === sandboxFilePath + ) + + if (fileToWrite) { + fileToWrite.data = await this.sandbox.files.read( + containerFilePath + ) + console.log(`Write to ${sandboxFilePath}`) + } else { + // If the file is part of a folder structure, locate it and update its data + const fileInFolder = folder?.children.find( + (file) => file.id === sandboxFilePath + ) + if (fileInFolder) { + const fileData = await this.sandbox.files.read( + containerFilePath + ) + const fileContents = + typeof fileData === "string" ? fileData : "" + this.sandboxFiles.fileData.push({ + id: sandboxFilePath, + data: fileContents, + }) + console.log(`Write to ${sandboxFilePath}`) + } + } + } + + // Tell the client to reload the file list + this.refreshFileList(this.sandboxFiles) + } catch (error) { + console.error( + `Error handling ${event.type} event for ${event.name}:`, + error + ) + } + }, + { timeout: 0 } + ) + this.fileWatchers.push(handle) + return handle + } catch (error) { + console.error(`Error watching filesystem:`, error) + } + } + + // Watch subdirectories recursively + async watchSubdirectories(directory: string) { + const dirContent = await this.sandbox.files.list(directory) + await Promise.all( + dirContent.map(async (item) => { + if (item.type === "dir") { + console.log("Watching " + item.path) + await this.watchDirectory(item.path) + } + }) + ) + } + + // Get file content + async getFile(fileId: string): Promise { + const file = this.sandboxFiles.fileData.find((f) => f.id === fileId) + return file?.data + } + + // Get folder content + async getFolder(folderId: string): Promise { + const remotePaths = await RemoteFileStorage.getFolder(this.getRemoteFileId(folderId)) + return this.getLocalFileIds(remotePaths) + } + + // Save file content + async saveFile(fileId: string, body: string): Promise { + if (!fileId) return // handles saving when no file is open + + if (Buffer.byteLength(body, "utf-8") > MAX_BODY_SIZE) { + throw new Error("File size too large. Please reduce the file size.") + } + await RemoteFileStorage.saveFile(this.getRemoteFileId(fileId), body) + const file = this.sandboxFiles.fileData.find((f) => f.id === fileId) + if (!file) return + file.data = body + + await this.sandbox.files.write(path.posix.join(this.dirName, file.id), body) + this.fixPermissions() + } + + // Move a file to a different folder + async moveFile( + fileId: string, + folderId: string + ): Promise<(TFolder | TFile)[]> { + const fileData = this.sandboxFiles.fileData.find((f) => f.id === fileId) + const file = this.sandboxFiles.files.find((f) => f.id === fileId) + if (!fileData || !file) return this.sandboxFiles.files + + const parts = fileId.split("/") + const newFileId = folderId + "/" + parts.pop() + + await this.moveFileInContainer(fileId, newFileId) + + await this.fixPermissions() + + fileData.id = newFileId + file.id = newFileId + + await RemoteFileStorage.renameFile(this.getRemoteFileId(fileId), this.getRemoteFileId(newFileId), fileData.data) + return this.updateFileStructure() + } + + // Move a file within the container + private async moveFileInContainer(oldPath: string, newPath: string) { + try { + const fileContents = await this.sandbox.files.read( + path.posix.join(this.dirName, oldPath) + ) + await this.sandbox.files.write( + path.posix.join(this.dirName, newPath), + fileContents + ) + await this.sandbox.files.remove(path.posix.join(this.dirName, oldPath)) + } catch (e) { + console.error(`Error moving file from ${oldPath} to ${newPath}:`, e) + } + } + + // Create a new file + async createFile(name: string): Promise { + const size: number = await RemoteFileStorage.getProjectSize(this.sandboxId) + if (size > 200 * 1024 * 1024) { + throw new Error("Project size exceeded. Please delete some files.") + } + + const id = `/${name}` + + await this.sandbox.files.write(path.posix.join(this.dirName, id), "") + await this.fixPermissions() + + this.sandboxFiles.files.push({ + id, + name, + type: "file", + }) + + this.sandboxFiles.fileData.push({ + id, + data: "", + }) + + await RemoteFileStorage.createFile(this.getRemoteFileId(id)) + + return true + } + + // Create a new folder + async createFolder(name: string): Promise { + const id = `/${name}` + await this.sandbox.files.makeDir(path.posix.join(this.dirName, id)) + } + + // Rename a file + async renameFile(fileId: string, newName: string): Promise { + const fileData = this.sandboxFiles.fileData.find((f) => f.id === fileId) + const file = this.sandboxFiles.files.find((f) => f.id === fileId) + if (!fileData || !file) return + + const parts = fileId.split("/") + const newFileId = parts.slice(0, parts.length - 1).join("/") + "/" + newName + + await this.moveFileInContainer(fileId, newFileId) + await this.fixPermissions() + await RemoteFileStorage.renameFile(this.getRemoteFileId(fileId), this.getRemoteFileId(newFileId), fileData.data) + + fileData.id = newFileId + file.id = newFileId + } + + // Delete a file + async deleteFile(fileId: string): Promise<(TFolder | TFile)[]> { + const file = this.sandboxFiles.fileData.find((f) => f.id === fileId) + if (!file) return this.sandboxFiles.files + + await this.sandbox.files.remove(path.posix.join(this.dirName, fileId)) + this.sandboxFiles.fileData = this.sandboxFiles.fileData.filter( + (f) => f.id !== fileId + ) + + await RemoteFileStorage.deleteFile(this.getRemoteFileId(fileId)) + return this.updateFileStructure() + } + + // Delete a folder + async deleteFolder(folderId: string): Promise<(TFolder | TFile)[]> { + const files = await RemoteFileStorage.getFolder(this.getRemoteFileId(folderId)) + + await Promise.all( + files.map(async (file) => { + await this.sandbox.files.remove(path.posix.join(this.dirName, file)) + this.sandboxFiles.fileData = this.sandboxFiles.fileData.filter( + (f) => f.id !== file + ) + await RemoteFileStorage.deleteFile(this.getRemoteFileId(file)) + }) + ) + + return this.updateFileStructure() + } + + // Close all file watchers + async closeWatchers() { + await Promise.all( + this.fileWatchers.map(async (handle: WatchHandle) => { + await handle.close() + }) + ) + } +} diff --git a/backend/server/src/RemoteFileStorage.ts b/backend/server/src/RemoteFileStorage.ts new file mode 100644 index 0000000..e5ed4b2 --- /dev/null +++ b/backend/server/src/RemoteFileStorage.ts @@ -0,0 +1,117 @@ +import * as dotenv from "dotenv" +import { R2Files } from "./types" + +dotenv.config() + +export const RemoteFileStorage = { + getSandboxPaths: async (id: string) => { + const res = await fetch( + `${process.env.STORAGE_WORKER_URL}/api?sandboxId=${id}`, + { + headers: { + Authorization: `${process.env.WORKERS_KEY}`, + }, + } + ) + const data: R2Files = await res.json() + + return data.objects.map((obj) => obj.key) + }, + + getFolder: async (folderId: string) => { + const res = await fetch( + `${process.env.STORAGE_WORKER_URL}/api?folderId=${folderId}`, + { + headers: { + Authorization: `${process.env.WORKERS_KEY}`, + }, + } + ) + const data: R2Files = await res.json() + + return data.objects.map((obj) => obj.key) + }, + + fetchFileContent: async (fileId: string): Promise => { + try { + const fileRes = await fetch( + `${process.env.STORAGE_WORKER_URL}/api?fileId=${fileId}`, + { + headers: { + Authorization: `${process.env.WORKERS_KEY}`, + }, + } + ) + return await fileRes.text() + } catch (error) { + console.error("ERROR fetching file:", error) + return "" + } + }, + + createFile: async (fileId: string) => { + const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ fileId }), + }) + return res.ok + }, + + renameFile: async ( + fileId: string, + newFileId: string, + data: string + ) => { + const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/rename`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ fileId, newFileId, data }), + }) + return res.ok + }, + + saveFile: async (fileId: string, data: string) => { + const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/save`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ fileId, data }), + }) + return res.ok + }, + + deleteFile: async (fileId: string) => { + const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api`, { + method: "DELETE", + headers: { + "Content-Type": "application/json", + Authorization: `${process.env.WORKERS_KEY}`, + }, + body: JSON.stringify({ fileId }), + }) + return res.ok + }, + + getProjectSize: async (id: string) => { + const res = await fetch( + `${process.env.STORAGE_WORKER_URL}/api/size?sandboxId=${id}`, + { + headers: { + Authorization: `${process.env.WORKERS_KEY}`, + }, + } + ) + return (await res.json()).size + } +} + +export default RemoteFileStorage \ No newline at end of file diff --git a/backend/server/src/SSHSocketClient.ts b/backend/server/src/SSHSocketClient.ts index e0dc043..0fe4152 100644 --- a/backend/server/src/SSHSocketClient.ts +++ b/backend/server/src/SSHSocketClient.ts @@ -1,90 +1,98 @@ -import { Client } from "ssh2"; +import { Client } from "ssh2" +// Interface defining the configuration for SSH connection export interface SSHConfig { - host: string; - port?: number; - username: string; - privateKey: Buffer; + host: string + port?: number + username: string + privateKey: Buffer } +// Class to handle SSH connections and communicate with a Unix socket export class SSHSocketClient { - private conn: Client; - private config: SSHConfig; - private socketPath: string; - private isConnected: boolean = false; - - constructor(config: SSHConfig, socketPath: string) { - this.conn = new Client(); - this.config = { ...config, port: 22}; - this.socketPath = socketPath; - - this.setupTerminationHandlers(); - } - - private setupTerminationHandlers() { - process.on("SIGINT", this.closeConnection.bind(this)); - process.on("SIGTERM", this.closeConnection.bind(this)); - } - - private closeConnection() { - console.log("Closing SSH connection..."); - this.conn.end(); - this.isConnected = false; - process.exit(0); - } - - connect(): Promise { - return new Promise((resolve, reject) => { - this.conn - .on("ready", () => { - console.log("SSH connection established"); - this.isConnected = true; - resolve(); - }) - .on("error", (err) => { - console.error("SSH connection error:", err); - this.isConnected = false; - reject(err); - }) - .on("close", () => { - console.log("SSH connection closed"); - this.isConnected = false; - }) - .connect(this.config); - }); - } - - sendData(data: string): Promise { - return new Promise((resolve, reject) => { - if (!this.isConnected) { - reject(new Error("SSH connection is not established")); - return; - } - - this.conn.exec( - `echo "${data}" | nc -U ${this.socketPath}`, - (err, stream) => { - if (err) { - reject(err); - return; - } - - stream - .on("close", (code: number, signal: string) => { - reject( - new Error( - `Stream closed with code ${code} and signal ${signal}` - ) - ); - }) - .on("data", (data: Buffer) => { - resolve(data.toString()); - }) - .stderr.on("data", (data: Buffer) => { - reject(new Error(data.toString())); - }); + private conn: Client + private config: SSHConfig + private socketPath: string + private isConnected: boolean = false + + // Constructor initializes the SSH client and sets up configuration + constructor(config: SSHConfig, socketPath: string) { + this.conn = new Client() + this.config = { ...config, port: 22 } // Default port to 22 if not provided + this.socketPath = socketPath + + this.setupTerminationHandlers() + } + + // Set up handlers for graceful termination + private setupTerminationHandlers() { + process.on("SIGINT", this.closeConnection.bind(this)) + process.on("SIGTERM", this.closeConnection.bind(this)) + } + + // Method to close the SSH connection + private closeConnection() { + console.log("Closing SSH connection...") + this.conn.end() + this.isConnected = false + process.exit(0) + } + + // Method to establish the SSH connection + connect(): Promise { + return new Promise((resolve, reject) => { + this.conn + .on("ready", () => { + console.log("SSH connection established") + this.isConnected = true + resolve() + }) + .on("error", (err) => { + console.error("SSH connection error:", err) + this.isConnected = false + reject(err) + }) + .on("close", () => { + console.log("SSH connection closed") + this.isConnected = false + }) + .connect(this.config) + }) + } + + // Method to send data through the SSH connection to the Unix socket + sendData(data: string): Promise { + return new Promise((resolve, reject) => { + if (!this.isConnected) { + reject(new Error("SSH connection is not established")) + return + } + + // Use netcat to send data to the Unix socket + this.conn.exec( + `echo "${data}" | nc -U ${this.socketPath}`, + (err, stream) => { + if (err) { + reject(err) + return } - ); - }); - } - } \ No newline at end of file + + stream + .on("close", (code: number, signal: string) => { + reject( + new Error( + `Stream closed with code ${code} and signal ${signal}` + ) + ) + }) + .on("data", (data: Buffer) => { + resolve(data.toString()) + }) + .stderr.on("data", (data: Buffer) => { + reject(new Error(data.toString())) + }) + } + ) + }) + } +} diff --git a/backend/server/src/SecureGitClient.ts b/backend/server/src/SecureGitClient.ts index 6fabce6..34f5322 100644 --- a/backend/server/src/SecureGitClient.ts +++ b/backend/server/src/SecureGitClient.ts @@ -1,82 +1,84 @@ -import simpleGit, { SimpleGit } from "simple-git"; -import path from "path"; -import fs from "fs"; -import os from "os"; +import fs from "fs" +import os from "os" +import path from "path" +import simpleGit, { SimpleGit } from "simple-git" export type FileData = { - id: string; - data: string; -}; + id: string + data: string +} export class SecureGitClient { - private gitUrl: string; - private sshKeyPath: string; + private gitUrl: string + private sshKeyPath: string constructor(gitUrl: string, sshKeyPath: string) { - this.gitUrl = gitUrl; - this.sshKeyPath = sshKeyPath; + this.gitUrl = gitUrl + this.sshKeyPath = sshKeyPath } async pushFiles(fileData: FileData[], repository: string): Promise { - let tempDir: string | undefined; + let tempDir: string | undefined try { // Create a temporary directory - tempDir = fs.mkdtempSync(path.posix.join(os.tmpdir(), 'git-push-')); - console.log(`Temporary directory created: ${tempDir}`); + tempDir = fs.mkdtempSync(path.posix.join(os.tmpdir(), "git-push-")) + console.log(`Temporary directory created: ${tempDir}`) // Write files to the temporary directory - console.log(`Writing ${fileData.length} files.`); + console.log(`Writing ${fileData.length} files.`) for (const { id, data } of fileData) { - const filePath = path.posix.join(tempDir, id); - const dirPath = path.dirname(filePath); - + const filePath = path.posix.join(tempDir, id) + const dirPath = path.dirname(filePath) + if (!fs.existsSync(dirPath)) { - fs.mkdirSync(dirPath, { recursive: true }); + fs.mkdirSync(dirPath, { recursive: true }) } - fs.writeFileSync(filePath, data); + fs.writeFileSync(filePath, data) } // Initialize the simple-git instance with the temporary directory and custom SSH command const git: SimpleGit = simpleGit(tempDir, { config: [ - 'core.sshCommand=ssh -i ' + this.sshKeyPath + ' -o IdentitiesOnly=yes' - ] + "core.sshCommand=ssh -i " + + this.sshKeyPath + + " -o IdentitiesOnly=yes", + ], }).outputHandler((_command, stdout, stderr) => { - stdout.pipe(process.stdout); - stderr.pipe(process.stderr); - });; + stdout.pipe(process.stdout) + stderr.pipe(process.stderr) + }) // Initialize a new Git repository - await git.init(); + await git.init() // Add remote repository - await git.addRemote("origin", `${this.gitUrl}:${repository}`); + await git.addRemote("origin", `${this.gitUrl}:${repository}`) // Add files to the repository - for (const {id, data} of fileData) { - await git.add(id); + for (const { id, data } of fileData) { + await git.add(id) } // Commit the changes - await git.commit("Add files."); + await git.commit("Add files.") // Push the changes to the remote repository - await git.push("origin", "master", {'--force': null}); + await git.push("origin", "master", { "--force": null }) - console.log("Files successfully pushed to the repository"); + console.log("Files successfully pushed to the repository") if (tempDir) { - fs.rmSync(tempDir, { recursive: true, force: true }); - console.log(`Temporary directory removed: ${tempDir}`); + fs.rmSync(tempDir, { recursive: true, force: true }) + console.log(`Temporary directory removed: ${tempDir}`) } } catch (error) { if (tempDir) { - fs.rmSync(tempDir, { recursive: true, force: true }); - console.log(`Temporary directory removed: ${tempDir}`); + fs.rmSync(tempDir, { recursive: true, force: true }) + console.log(`Temporary directory removed: ${tempDir}`) } - console.error("Error pushing files to the repository:", error); - throw error; + console.error("Error pushing files to the repository:", error) + throw error } } -} \ No newline at end of file +} diff --git a/backend/server/src/Terminal.ts b/backend/server/src/Terminal.ts index e30f022..482b8a4 100644 --- a/backend/server/src/Terminal.ts +++ b/backend/server/src/Terminal.ts @@ -1,13 +1,13 @@ -import { Sandbox, ProcessHandle } from "e2b"; +import { ProcessHandle, Sandbox } from "e2b" // Terminal class to manage a pseudo-terminal (PTY) in a sandbox environment export class Terminal { - private pty: ProcessHandle | undefined; // Holds the PTY process handle - private sandbox: Sandbox; // Reference to the sandbox environment + private pty: ProcessHandle | undefined // Holds the PTY process handle + private sandbox: Sandbox // Reference to the sandbox environment // Constructor initializes the Terminal with a sandbox constructor(sandbox: Sandbox) { - this.sandbox = sandbox; + this.sandbox = sandbox } // Initialize the terminal with specified rows, columns, and data handler @@ -16,9 +16,9 @@ export class Terminal { cols = 80, onData, }: { - rows?: number; - cols?: number; - onData: (responseData: string) => void; + rows?: number + cols?: number + onData: (responseData: string) => void }): Promise { // Create a new PTY process this.pty = await this.sandbox.pty.create({ @@ -26,35 +26,38 @@ export class Terminal { cols, timeout: 0, onData: (data: Uint8Array) => { - onData(new TextDecoder().decode(data)); // Convert received data to string and pass to handler + onData(new TextDecoder().decode(data)) // Convert received data to string and pass to handler }, - }); + }) } // Send data to the terminal async sendData(data: string) { if (this.pty) { - await this.sandbox.pty.sendInput(this.pty.pid, new TextEncoder().encode(data)); + await this.sandbox.pty.sendInput( + this.pty.pid, + new TextEncoder().encode(data) + ) } else { - console.log("Cannot send data because pty is not initialized."); + console.log("Cannot send data because pty is not initialized.") } } // Resize the terminal async resize(size: { cols: number; rows: number }): Promise { if (this.pty) { - await this.sandbox.pty.resize(this.pty.pid, size); + await this.sandbox.pty.resize(this.pty.pid, size) } else { - console.log("Cannot resize terminal because pty is not initialized."); + console.log("Cannot resize terminal because pty is not initialized.") } } // Close the terminal, killing the PTY process and stopping the input stream async close(): Promise { if (this.pty) { - await this.pty.kill(); + await this.pty.kill() } else { - console.log("Cannot kill pty because it is not initialized."); + console.log("Cannot kill pty because it is not initialized.") } } } @@ -64,4 +67,4 @@ export class Terminal { // await terminal.init(); // terminal.sendData('ls -la'); // await terminal.resize({ cols: 100, rows: 30 }); -// await terminal.close(); \ No newline at end of file +// await terminal.close(); diff --git a/backend/server/src/TerminalManager.ts b/backend/server/src/TerminalManager.ts new file mode 100644 index 0000000..b97aa6c --- /dev/null +++ b/backend/server/src/TerminalManager.ts @@ -0,0 +1,74 @@ +import { Sandbox } from "e2b" +import { Terminal } from "./Terminal" + +export class TerminalManager { + private sandbox: Sandbox + private terminals: Record = {} + + constructor(sandbox: Sandbox) { + this.sandbox = sandbox + } + + async createTerminal( + id: string, + onData: (responseString: string) => void + ): Promise { + if (this.terminals[id]) { + return + } + + this.terminals[id] = new Terminal(this.sandbox) + await this.terminals[id].init({ + onData, + cols: 80, + rows: 20, + }) + + const defaultDirectory = "/home/user/project" + const defaultCommands = [ + `cd "${defaultDirectory}"`, + "export PS1='user> '", + "clear", + ] + for (const command of defaultCommands) { + await this.terminals[id].sendData(command + "\r") + } + + console.log("Created terminal", id) + } + + async resizeTerminal(dimensions: { + cols: number + rows: number + }): Promise { + Object.values(this.terminals).forEach((t) => { + t.resize(dimensions) + }) + } + + async sendTerminalData(id: string, data: string): Promise { + if (!this.terminals[id]) { + return + } + + await this.terminals[id].sendData(data) + } + + async closeTerminal(id: string): Promise { + if (!this.terminals[id]) { + return + } + + await this.terminals[id].close() + delete this.terminals[id] + } + + async closeAllTerminals(): Promise { + await Promise.all( + Object.entries(this.terminals).map(async ([key, terminal]) => { + await terminal.close() + delete this.terminals[key] + }) + ) + } +} diff --git a/backend/server/src/fileoperations.ts b/backend/server/src/fileoperations.ts deleted file mode 100644 index 5e0e249..0000000 --- a/backend/server/src/fileoperations.ts +++ /dev/null @@ -1,177 +0,0 @@ -import * as dotenv from "dotenv"; -import { - R2FileBody, - R2Files, - Sandbox, - TFile, - TFileData, - TFolder, -} from "./types"; - -dotenv.config(); - -export const getSandboxFiles = async (id: string) => { - const res = await fetch( - `${process.env.STORAGE_WORKER_URL}/api?sandboxId=${id}`, - { - headers: { - Authorization: `${process.env.WORKERS_KEY}`, - }, - } - ); - const data: R2Files = await res.json(); - - const paths = data.objects.map((obj) => obj.key); - const processedFiles = await processFiles(paths, id); - return processedFiles; -}; - -export const getFolder = async (folderId: string) => { - const res = await fetch( - `${process.env.STORAGE_WORKER_URL}/api?folderId=${folderId}`, - { - headers: { - Authorization: `${process.env.WORKERS_KEY}`, - }, - } - ); - const data: R2Files = await res.json(); - - return data.objects.map((obj) => obj.key); -}; - -const processFiles = async (paths: string[], id: string) => { - const root: TFolder = { id: "/", type: "folder", name: "/", children: [] }; - const fileData: TFileData[] = []; - - paths.forEach((path) => { - const allParts = path.split("/"); - if (allParts[1] !== id) { - return; - } - - const parts = allParts.slice(2); - let current: TFolder = root; - - for (let i = 0; i < parts.length; i++) { - const part = parts[i]; - const isFile = i === parts.length - 1 && part.length; - const existing = current.children.find((child) => child.name === part); - - if (existing) { - if (!isFile) { - current = existing as TFolder; - } - } else { - if (isFile) { - const file: TFile = { id: path, type: "file", name: part }; - current.children.push(file); - fileData.push({ id: path, data: "" }); - } else { - const folder: TFolder = { - // id: path, // todo: wrong id. for example, folder "src" ID is: projects/a7vgttfqbgy403ratp7du3ln/src/App.css - id: `projects/${id}/${parts.slice(0, i + 1).join("/")}`, - type: "folder", - name: part, - children: [], - }; - current.children.push(folder); - current = folder; - } - } - } - }); - - await Promise.all( - fileData.map(async (file) => { - const data = await fetchFileContent(file.id); - file.data = data; - }) - ); - - return { - files: root.children, - fileData, - }; -}; - -const fetchFileContent = async (fileId: string): Promise => { - try { - const fileRes = await fetch( - `${process.env.STORAGE_WORKER_URL}/api?fileId=${fileId}`, - { - headers: { - Authorization: `${process.env.WORKERS_KEY}`, - }, - } - ); - return await fileRes.text(); - } catch (error) { - console.error("ERROR fetching file:", error); - return ""; - } -}; - -export const createFile = async (fileId: string) => { - const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api`, { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ fileId }), - }); - return res.ok; -}; - -export const renameFile = async ( - fileId: string, - newFileId: string, - data: string -) => { - const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/rename`, { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ fileId, newFileId, data }), - }); - return res.ok; -}; - -export const saveFile = async (fileId: string, data: string) => { - const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/save`, { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ fileId, data }), - }); - return res.ok; -}; - -export const deleteFile = async (fileId: string) => { - const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api`, { - method: "DELETE", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ fileId }), - }); - return res.ok; -}; - -export const getProjectSize = async (id: string) => { - const res = await fetch( - `${process.env.STORAGE_WORKER_URL}/api/size?sandboxId=${id}`, - { - headers: { - Authorization: `${process.env.WORKERS_KEY}`, - }, - } - ); - return (await res.json()).size; -}; \ No newline at end of file diff --git a/backend/server/src/index.ts b/backend/server/src/index.ts index 5c0ba89..f69a303 100644 --- a/backend/server/src/index.ts +++ b/backend/server/src/index.ts @@ -1,107 +1,97 @@ -import path from "path"; -import cors from "cors"; -import express, { Express } from "express"; -import dotenv from "dotenv"; -import { createServer } from "http"; -import { Server } from "socket.io"; -import { DokkuClient } from "./DokkuClient"; -import { SecureGitClient, FileData } from "./SecureGitClient"; -import fs, { readFile } from "fs"; - -import { z } from "zod"; +import cors from "cors" +import dotenv from "dotenv" +import { Sandbox } from "e2b" +import express, { Express } from "express" +import fs from "fs" +import { createServer } from "http" +import { Server } from "socket.io" +import { z } from "zod" +import { AIWorker } from "./AIWorker" +import { DokkuClient } from "./DokkuClient" +import { FileManager, SandboxFiles } from "./FileManager" import { - TFile, - TFileData, - TFolder, - User -} from "./types"; -import { - createFile, - deleteFile, - getFolder, - getProjectSize, - getSandboxFiles, - renameFile, - saveFile, -} from "./fileoperations"; -import { LockManager } from "./utils"; - -import { Sandbox, Filesystem, FilesystemEvent, EntryInfo, WatchHandle } from "e2b"; - -import { Terminal } from "./Terminal" - -import { - MAX_BODY_SIZE, createFileRL, createFolderRL, deleteFileRL, renameFileRL, saveFileRL, -} from "./ratelimit"; +} from "./ratelimit" +import { SecureGitClient } from "./SecureGitClient" +import { TerminalManager } from "./TerminalManager" +import { User } from "./types" +import { LockManager } from "./utils" -process.on('uncaughtException', (error) => { - console.error('Uncaught Exception:', error); +// Handle uncaught exceptions +process.on("uncaughtException", (error) => { + console.error("Uncaught Exception:", error) // Do not exit the process // You can add additional logging or recovery logic here -}); +}) -process.on('unhandledRejection', (reason, promise) => { - console.error('Unhandled Rejection at:', promise, 'reason:', reason); +// Handle unhandled promise rejections +process.on("unhandledRejection", (reason, promise) => { + console.error("Unhandled Rejection at:", promise, "reason:", reason) // Do not exit the process // You can also handle the rejected promise here if needed -}); +}) // The amount of time in ms that a container will stay alive without a hearbeat. -const CONTAINER_TIMEOUT = 60_000; +const CONTAINER_TIMEOUT = 120_000 -dotenv.config(); +// Load environment variables +dotenv.config() -const app: Express = express(); -const port = process.env.PORT || 4000; -app.use(cors()); -const httpServer = createServer(app); +// Initialize Express app and create HTTP server +const app: Express = express() +const port = process.env.PORT || 4000 +app.use(cors()) +const httpServer = createServer(app) const io = new Server(httpServer, { cors: { origin: "*", }, -}); +}) -let inactivityTimeout: NodeJS.Timeout | null = null; -let isOwnerConnected = false; +// Check if the sandbox owner is connected +function isOwnerConnected(sandboxId: string): boolean { + return (connections[sandboxId] ?? 0) > 0 +} -const containers: Record = {}; -const connections: Record = {}; -const terminals: Record = {}; +// Extract port number from a string +function extractPortNumber(inputString: string): number | null { + const cleanedString = inputString.replace(/\x1B\[[0-9;]*m/g, "") + const regex = /http:\/\/localhost:(\d+)/ + const match = cleanedString.match(regex) + return match ? parseInt(match[1]) : null +} -const dirName = "/home/user"; - -const moveFile = async (filesystem: Filesystem, filePath: string, newFilePath: string) => { - try { - const fileContents = await filesystem.read(filePath); - await filesystem.write(newFilePath, fileContents); - await filesystem.remove(filePath); - } catch (e) { - console.error(`Error moving file from ${filePath} to ${newFilePath}:`, e); - } -}; +// Initialize containers and managers +const containers: Record = {} +const connections: Record = {} +const fileManagers: Record = {} +const terminalManagers: Record = {} +// Middleware for socket authentication io.use(async (socket, next) => { + // Define the schema for handshake query validation const handshakeSchema = z.object({ userId: z.string(), sandboxId: z.string(), EIO: z.string(), transport: z.string(), - }); + }) - const q = socket.handshake.query; - const parseQuery = handshakeSchema.safeParse(q); + const q = socket.handshake.query + const parseQuery = handshakeSchema.safeParse(q) + // Check if the query is valid according to the schema if (!parseQuery.success) { - next(new Error("Invalid request.")); - return; + next(new Error("Invalid request.")) + return } - const { sandboxId, userId } = parseQuery.data; + const { sandboxId, userId } = parseQuery.data + // Fetch user data from the database const dbUser = await fetch( `${process.env.DATABASE_WORKER_URL}/api/user?id=${userId}`, { @@ -109,39 +99,50 @@ io.use(async (socket, next) => { Authorization: `${process.env.WORKERS_KEY}`, }, } - ); - const dbUserJSON = (await dbUser.json()) as User; + ) + const dbUserJSON = (await dbUser.json()) as User + // Check if user data was retrieved successfully if (!dbUserJSON) { - next(new Error("DB error.")); - return; + next(new Error("DB error.")) + return } - const sandbox = dbUserJSON.sandbox.find((s) => s.id === sandboxId); + // Check if the user owns the sandbox or has shared access + const sandbox = dbUserJSON.sandbox.find((s) => s.id === sandboxId) const sharedSandboxes = dbUserJSON.usersToSandboxes.find( (uts) => uts.sandboxId === sandboxId - ); + ) + // If user doesn't own or have shared access to the sandbox, deny access if (!sandbox && !sharedSandboxes) { - next(new Error("Invalid credentials.")); - return; + next(new Error("Invalid credentials.")) + return } + // Set socket data with user information socket.data = { userId, sandboxId: sandboxId, isOwner: sandbox !== undefined, - }; + } - next(); -}); + // Allow the connection + next() +}) -const lockManager = new LockManager(); +// Initialize lock manager +const lockManager = new LockManager() -if (!process.env.DOKKU_HOST) console.error('Environment variable DOKKU_HOST is not defined'); -if (!process.env.DOKKU_USERNAME) console.error('Environment variable DOKKU_USERNAME is not defined'); -if (!process.env.DOKKU_KEY) console.error('Environment variable DOKKU_KEY is not defined'); +// Check for required environment variables +if (!process.env.DOKKU_HOST) + console.error("Environment variable DOKKU_HOST is not defined") +if (!process.env.DOKKU_USERNAME) + console.error("Environment variable DOKKU_USERNAME is not defined") +if (!process.env.DOKKU_KEY) + console.error("Environment variable DOKKU_KEY is not defined") +// Initialize Dokku client const client = process.env.DOKKU_HOST && process.env.DOKKU_KEY && process.env.DOKKU_USERNAME ? new DokkuClient({ @@ -149,632 +150,324 @@ const client = username: process.env.DOKKU_USERNAME, privateKey: fs.readFileSync(process.env.DOKKU_KEY), }) - : null; -client?.connect(); + : null +client?.connect() -const git = process.env.DOKKU_HOST && process.env.DOKKU_KEY ? new SecureGitClient( - `dokku@${process.env.DOKKU_HOST}`, - process.env.DOKKU_KEY -) : null; +// Initialize Git client used to deploy Dokku apps +const git = + process.env.DOKKU_HOST && process.env.DOKKU_KEY + ? new SecureGitClient( + `dokku@${process.env.DOKKU_HOST}`, + process.env.DOKKU_KEY + ) + : null +// Add this near the top of the file, after other initializations +const aiWorker = new AIWorker( + process.env.AI_WORKER_URL!, + process.env.CF_AI_KEY!, + process.env.DATABASE_WORKER_URL!, + process.env.WORKERS_KEY! +) + +// Handle socket connections io.on("connection", async (socket) => { try { - if (inactivityTimeout) clearTimeout(inactivityTimeout); - const data = socket.data as { - userId: string; - sandboxId: string; - isOwner: boolean; - }; + userId: string + sandboxId: string + isOwner: boolean + } + // Handle connection based on user type (owner or not) if (data.isOwner) { - isOwnerConnected = true; - connections[data.sandboxId] = (connections[data.sandboxId] ?? 0) + 1; + connections[data.sandboxId] = (connections[data.sandboxId] ?? 0) + 1 } else { - if (!isOwnerConnected) { - socket.emit("disableAccess", "The sandbox owner is not connected."); - return; + if (!isOwnerConnected(data.sandboxId)) { + socket.emit("disableAccess", "The sandbox owner is not connected.") + return } } - const createdContainer = await lockManager.acquireLock(data.sandboxId, async () => { - try { - // Start a new container if the container doesn't exist or it timed out. - if (!containers[data.sandboxId] || !(await containers[data.sandboxId].isRunning())) { - containers[data.sandboxId] = await Sandbox.create({ timeoutMs: CONTAINER_TIMEOUT }); - console.log("Created container ", data.sandboxId); - return true; - } - } catch (e: any) { - console.error(`Error creating container ${data.sandboxId}:`, e); - io.emit("error", `Error: container creation. ${e.message ?? e}`); - } - }); - - const sandboxFiles = await getSandboxFiles(data.sandboxId); - const projectDirectory = path.posix.join(dirName, "projects", data.sandboxId); - const containerFiles = containers[data.sandboxId].files; - const fileWatchers: WatchHandle[] = []; - - // Change the owner of the project directory to user - const fixPermissions = async (projectDirectory: string) => { - try { - await containers[data.sandboxId].commands.run( - `sudo chown -R user "${projectDirectory}"` - ); - } catch (e: any) { - console.log("Failed to fix permissions: " + e); - } - }; - - // Check if the given path is a directory - const isDirectory = async (projectDirectory: string): Promise => { - try { - const result = await containers[data.sandboxId].commands.run( - `[ -d "${projectDirectory}" ] && echo "true" || echo "false"` - ); - return result.stdout.trim() === "true"; - } catch (e: any) { - console.log("Failed to check if directory: " + e); - return false; - } - }; - - // Only continue to container setup if a new container was created - if (createdContainer) { - - // Copy all files from the project to the container - const promises = sandboxFiles.fileData.map(async (file) => { + // Create or retrieve container + const createdContainer = await lockManager.acquireLock( + data.sandboxId, + async () => { try { - const filePath = path.posix.join(dirName, file.id); - const parentDirectory = path.dirname(filePath); - if (!containerFiles.exists(parentDirectory)) { - await containerFiles.makeDir(parentDirectory); + // Start a new container if the container doesn't exist or it timed out. + if ( + !containers[data.sandboxId] || + !(await containers[data.sandboxId].isRunning()) + ) { + containers[data.sandboxId] = await Sandbox.create({ + timeoutMs: CONTAINER_TIMEOUT, + }) + console.log("Created container ", data.sandboxId) + return true } - await containerFiles.write(filePath, file.data); } catch (e: any) { - console.log("Failed to create file: " + e); + console.error(`Error creating container ${data.sandboxId}:`, e) + io.emit("error", `Error: container creation. ${e.message ?? e}`) } - }); - await Promise.all(promises); - - // Make the logged in user the owner of all project files - fixPermissions(projectDirectory); + } + ) + // Function to send loaded event + const sendLoadedEvent = (files: SandboxFiles) => { + socket.emit("loaded", files.files) } - // Start filesystem watcher for the project directory - const watchDirectory = async (directory: string): Promise => { - try { - return await containerFiles.watch(directory, async (event: FilesystemEvent) => { - try { + // Initialize file and terminal managers if container was created + if (createdContainer) { + fileManagers[data.sandboxId] = new FileManager( + data.sandboxId, + containers[data.sandboxId], + sendLoadedEvent + ) + await fileManagers[data.sandboxId].initialize() + terminalManagers[data.sandboxId] = new TerminalManager( + containers[data.sandboxId] + ) + } - function removeDirName(path : string, dirName : string) { - return path.startsWith(dirName) ? path.slice(dirName.length) : path; - } + const fileManager = fileManagers[data.sandboxId] + const terminalManager = terminalManagers[data.sandboxId] - // This is the absolute file path in the container - const containerFilePath = path.posix.join(directory, event.name); - // This is the file path relative to the home directory - const sandboxFilePath = removeDirName(containerFilePath, dirName + "/"); - // This is the directory being watched relative to the home directory - const sandboxDirectory = removeDirName(directory, dirName + "/"); - - // Helper function to find a folder by id - function findFolderById(files: (TFolder | TFile)[], folderId : string) { - return files.find((file : TFolder | TFile) => file.type === "folder" && file.id === folderId); - } - - // A new file or directory was created. - if (event.type === "create") { - const folder = findFolderById(sandboxFiles.files, sandboxDirectory) as TFolder; - const isDir = await isDirectory(containerFilePath); - - const newItem = isDir - ? { id: sandboxFilePath, name: event.name, type: "folder", children: [] } as TFolder - : { id: sandboxFilePath, name: event.name, type: "file" } as TFile; - - if (folder) { - // If the folder exists, add the new item (file/folder) as a child - folder.children.push(newItem); - } else { - // If folder doesn't exist, add the new item to the root - sandboxFiles.files.push(newItem); - } - - if (!isDir) { - const fileData = await containers[data.sandboxId].files.read(containerFilePath); - const fileContents = typeof fileData === "string" ? fileData : ""; - sandboxFiles.fileData.push({ id: sandboxFilePath, data: fileContents }); - } - - console.log(`Create ${sandboxFilePath}`); - } - - // A file or directory was removed or renamed. - else if (event.type === "remove" || event.type == "rename") { - const folder = findFolderById(sandboxFiles.files, sandboxDirectory) as TFolder; - const isDir = await isDirectory(containerFilePath); - - const isFileMatch = (file: TFolder | TFile | TFileData) => file.id === sandboxFilePath || file.id.startsWith(containerFilePath + '/'); - - if (folder) { - // Remove item from its parent folder - folder.children = folder.children.filter((file: TFolder | TFile) => !isFileMatch(file)); - } else { - // Remove from the root if it's not inside a folder - sandboxFiles.files = sandboxFiles.files.filter((file: TFolder | TFile) => !isFileMatch(file)); - } - - // Also remove any corresponding file data - sandboxFiles.fileData = sandboxFiles.fileData.filter((file: TFileData) => !isFileMatch(file)); - - console.log(`Removed: ${sandboxFilePath}`); - } - - // The contents of a file were changed. - else if (event.type === "write") { - const folder = findFolderById(sandboxFiles.files, sandboxDirectory) as TFolder; - const fileToWrite = sandboxFiles.fileData.find(file => file.id === sandboxFilePath); - - if (fileToWrite) { - fileToWrite.data = await containers[data.sandboxId].files.read(containerFilePath); - console.log(`Write to ${sandboxFilePath}`); - } else { - // If the file is part of a folder structure, locate it and update its data - const fileInFolder = folder?.children.find(file => file.id === sandboxFilePath); - if (fileInFolder) { - const fileData = await containers[data.sandboxId].files.read(containerFilePath); - const fileContents = typeof fileData === "string" ? fileData : ""; - sandboxFiles.fileData.push({ id: sandboxFilePath, data: fileContents }); - console.log(`Write to ${sandboxFilePath}`); - } - } - } - - // Tell the client to reload the file list - socket.emit("loaded", sandboxFiles.files); - - } catch (error) { - console.error(`Error handling ${event.type} event for ${event.name}:`, error); - } - }, { "timeout": 0 } ) - } catch (error) { - console.error(`Error watching filesystem:`, error); - } - }; - - // Watch the project directory - const handle = await watchDirectory(projectDirectory); - // Keep track of watch handlers to close later - if (handle) fileWatchers.push(handle); - - // Watch all subdirectories of the project directory, but not deeper - // This also means directories created after the container is created won't be watched - const dirContent = await containerFiles.list(projectDirectory); - await Promise.all(dirContent.map(async (item : EntryInfo) => { - if (item.type === "dir") { - console.log("Watching " + item.path); - // Keep track of watch handlers to close later - const handle = await watchDirectory(item.path); - if (handle) fileWatchers.push(handle); - } - })) - - socket.emit("loaded", sandboxFiles.files); + // Load file list from the file manager into the editor + sendLoadedEvent(fileManager.sandboxFiles) + // Handle various socket events (heartbeat, file operations, terminal operations, etc.) socket.on("heartbeat", async () => { try { // This keeps the container alive for another CONTAINER_TIMEOUT seconds. - // The E2B docs are unclear, but the timeout is relative to the time of this method call. - await containers[data.sandboxId].setTimeout(CONTAINER_TIMEOUT); + // The E2B docs are unclear, but the timeout is relative to the time of this method call. + await containers[data.sandboxId].setTimeout(CONTAINER_TIMEOUT) } catch (e: any) { - console.error("Error setting timeout:", e); - io.emit("error", `Error: set timeout. ${e.message ?? e}`); + console.error("Error setting timeout:", e) + io.emit("error", `Error: set timeout. ${e.message ?? e}`) } - }); + }) - socket.on("getFile", (fileId: string, callback) => { - console.log(fileId); + // Handle request to get file content + socket.on("getFile", async (fileId: string, callback) => { try { - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - - callback(file.data); + const fileContent = await fileManager.getFile(fileId) + callback(fileContent) } catch (e: any) { - console.error("Error getting file:", e); - io.emit("error", `Error: get file. ${e.message ?? e}`); + console.error("Error getting file:", e) + io.emit("error", `Error: get file. ${e.message ?? e}`) } - }); + }) + // Handle request to get folder contents socket.on("getFolder", async (folderId: string, callback) => { try { - const files = await getFolder(folderId); - callback(files); + const files = await fileManager.getFolder(folderId) + callback(files) } catch (e: any) { - console.error("Error getting folder:", e); - io.emit("error", `Error: get folder. ${e.message ?? e}`); + console.error("Error getting folder:", e) + io.emit("error", `Error: get folder. ${e.message ?? e}`) } - }); + }) - // todo: send diffs + debounce for efficiency + // Handle request to save file socket.on("saveFile", async (fileId: string, body: string) => { - if (!fileId) return; // handles saving when no file is open - try { - if (Buffer.byteLength(body, "utf-8") > MAX_BODY_SIZE) { - socket.emit( - "error", - "Error: file size too large. Please reduce the file size." - ); - return; - } - try { - await saveFileRL.consume(data.userId, 1); - await saveFile(fileId, body); - } catch (e) { - io.emit("error", "Rate limited: file saving. Please slow down."); - return; - } - - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - file.data = body; - - await containers[data.sandboxId].files.write( - path.posix.join(dirName, file.id), - body - ); - fixPermissions(projectDirectory); + await saveFileRL.consume(data.userId, 1) + await fileManager.saveFile(fileId, body) } catch (e: any) { - console.error("Error saving file:", e); - io.emit("error", `Error: file saving. ${e.message ?? e}`); + console.error("Error saving file:", e) + io.emit("error", `Error: file saving. ${e.message ?? e}`) } - }); + }) + // Handle request to move file socket.on( "moveFile", async (fileId: string, folderId: string, callback) => { try { - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - - const parts = fileId.split("/"); - const newFileId = folderId + "/" + parts.pop(); - - await moveFile( - containers[data.sandboxId].files, - path.posix.join(dirName, fileId), - path.posix.join(dirName, newFileId) - ); - fixPermissions(projectDirectory); - - file.id = newFileId; - - await renameFile(fileId, newFileId, file.data); - const newFiles = await getSandboxFiles(data.sandboxId); - callback(newFiles.files); + const newFiles = await fileManager.moveFile(fileId, folderId) + callback(newFiles) } catch (e: any) { - console.error("Error moving file:", e); - io.emit("error", `Error: file moving. ${e.message ?? e}`); + console.error("Error moving file:", e) + io.emit("error", `Error: file moving. ${e.message ?? e}`) } } - ); + ) interface CallbackResponse { - success: boolean; - apps?: string[]; - message?: string; + success: boolean + apps?: string[] + message?: string } + // Handle request to list apps socket.on( "list", async (callback: (response: CallbackResponse) => void) => { - console.log("Retrieving apps list..."); + console.log("Retrieving apps list...") try { - if (!client) throw Error("Failed to retrieve apps list: No Dokku client") + if (!client) + throw Error("Failed to retrieve apps list: No Dokku client") callback({ success: true, - apps: await client.listApps() - }); + apps: await client.listApps(), + }) } catch (error) { callback({ success: false, message: "Failed to retrieve apps list", - }); + }) } } - ); + ) + // Handle request to deploy project socket.on( "deploy", async (callback: (response: CallbackResponse) => void) => { try { // Push the project files to the Dokku server - console.log("Deploying project ${data.sandboxId}..."); + console.log("Deploying project ${data.sandboxId}...") if (!git) throw Error("Failed to retrieve apps list: No git client") // Remove the /project/[id]/ component of each file path: - const fixedFilePaths = sandboxFiles.fileData.map((file) => { - return { - ...file, - id: file.id.split("/").slice(2).join("/"), - }; - }); + const fixedFilePaths = fileManager.sandboxFiles.fileData.map( + (file) => { + return { + ...file, + id: file.id.split("/").slice(2).join("/"), + } + } + ) // Push all files to Dokku. - await git.pushFiles(fixedFilePaths, data.sandboxId); + await git.pushFiles(fixedFilePaths, data.sandboxId) callback({ success: true, - }); + }) } catch (error) { callback({ success: false, message: "Failed to deploy project: " + error, - }); + }) } } - ); + ) + // Handle request to create a new file socket.on("createFile", async (name: string, callback) => { try { - const size: number = await getProjectSize(data.sandboxId); - // limit is 200mb - if (size > 200 * 1024 * 1024) { - io.emit( - "error", - "Rate limited: project size exceeded. Please delete some files." - ); - callback({ success: false }); - return; - } - - try { - await createFileRL.consume(data.userId, 1); - } catch (e) { - io.emit("error", "Rate limited: file creation. Please slow down."); - return; - } - - const id = `projects/${data.sandboxId}/${name}`; - - await containers[data.sandboxId].files.write( - path.posix.join(dirName, id), - "" - ); - fixPermissions(projectDirectory); - - sandboxFiles.files.push({ - id, - name, - type: "file", - }); - - sandboxFiles.fileData.push({ - id, - data: "", - }); - - await createFile(id); - - callback({ success: true }); + await createFileRL.consume(data.userId, 1) + const success = await fileManager.createFile(name) + callback({ success }) } catch (e: any) { - console.error("Error creating file:", e); - io.emit("error", `Error: file creation. ${e.message ?? e}`); + console.error("Error creating file:", e) + io.emit("error", `Error: file creation. ${e.message ?? e}`) } - }); + }) + // Handle request to create a new folder socket.on("createFolder", async (name: string, callback) => { try { - try { - await createFolderRL.consume(data.userId, 1); - } catch (e) { - io.emit("error", "Rate limited: folder creation. Please slow down."); - return; - } - - const id = `projects/${data.sandboxId}/${name}`; - - await containers[data.sandboxId].files.makeDir( - path.posix.join(dirName, id) - ); - - callback(); + await createFolderRL.consume(data.userId, 1) + await fileManager.createFolder(name) + callback() } catch (e: any) { - console.error("Error creating folder:", e); - io.emit("error", `Error: folder creation. ${e.message ?? e}`); + console.error("Error creating folder:", e) + io.emit("error", `Error: folder creation. ${e.message ?? e}`) } - }); + }) + // Handle request to rename a file socket.on("renameFile", async (fileId: string, newName: string) => { try { - try { - await renameFileRL.consume(data.userId, 1); - } catch (e) { - io.emit("error", "Rate limited: file renaming. Please slow down."); - return; - } - - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - file.id = newName; - - const parts = fileId.split("/"); - const newFileId = - parts.slice(0, parts.length - 1).join("/") + "/" + newName; - - await moveFile( - containers[data.sandboxId].files, - path.posix.join(dirName, fileId), - path.posix.join(dirName, newFileId) - ); - fixPermissions(projectDirectory); - await renameFile(fileId, newFileId, file.data); + await renameFileRL.consume(data.userId, 1) + await fileManager.renameFile(fileId, newName) } catch (e: any) { - console.error("Error renaming folder:", e); - io.emit("error", `Error: folder renaming. ${e.message ?? e}`); + console.error("Error renaming file:", e) + io.emit("error", `Error: file renaming. ${e.message ?? e}`) } - }); + }) + // Handle request to delete a file socket.on("deleteFile", async (fileId: string, callback) => { try { - try { - await deleteFileRL.consume(data.userId, 1); - } catch (e) { - io.emit("error", "Rate limited: file deletion. Please slow down."); - } - - const file = sandboxFiles.fileData.find((f) => f.id === fileId); - if (!file) return; - - await containers[data.sandboxId].files.remove( - path.posix.join(dirName, fileId) - ); - sandboxFiles.fileData = sandboxFiles.fileData.filter( - (f) => f.id !== fileId - ); - - await deleteFile(fileId); - - const newFiles = await getSandboxFiles(data.sandboxId); - callback(newFiles.files); + await deleteFileRL.consume(data.userId, 1) + const newFiles = await fileManager.deleteFile(fileId) + callback(newFiles) } catch (e: any) { - console.error("Error deleting file:", e); - io.emit("error", `Error: file deletion. ${e.message ?? e}`); + console.error("Error deleting file:", e) + io.emit("error", `Error: file deletion. ${e.message ?? e}`) } - }); - - // todo - // socket.on("renameFolder", async (folderId: string, newName: string) => { - // }); + }) + // Handle request to delete a folder socket.on("deleteFolder", async (folderId: string, callback) => { try { - const files = await getFolder(folderId); - - await Promise.all( - files.map(async (file) => { - await containers[data.sandboxId].files.remove( - path.posix.join(dirName, file) - ); - - sandboxFiles.fileData = sandboxFiles.fileData.filter( - (f) => f.id !== file - ); - - await deleteFile(file); - }) - ); - - const newFiles = await getSandboxFiles(data.sandboxId); - - callback(newFiles.files); + const newFiles = await fileManager.deleteFolder(folderId) + callback(newFiles) } catch (e: any) { - console.error("Error deleting folder:", e); - io.emit("error", `Error: folder deletion. ${e.message ?? e}`); + console.error("Error deleting folder:", e) + io.emit("error", `Error: folder deletion. ${e.message ?? e}`) } - }); + }) + // Handle request to create a new terminal socket.on("createTerminal", async (id: string, callback) => { try { - // Note: The number of terminals per window is limited on the frontend, but not backend - if (terminals[id]) { - return; - } - await lockManager.acquireLock(data.sandboxId, async () => { - try { - terminals[id] = new Terminal(containers[data.sandboxId]) - await terminals[id].init({ - onData: (responseString: string) => { - io.emit("terminalResponse", { id, data: responseString }); - - function extractPortNumber(inputString: string) { - // Remove ANSI escape codes - const cleanedString = inputString.replace(/\x1B\[[0-9;]*m/g, ''); - - // Regular expression to match port number - const regex = /http:\/\/localhost:(\d+)/; - // If a match is found, return the port number - const match = cleanedString.match(regex); - return match ? match[1] : null; - } - const port = parseInt(extractPortNumber(responseString) ?? ""); - if (port) { - io.emit( - "previewURL", - "https://" + containers[data.sandboxId].getHost(port) - ); - } - }, - cols: 80, - rows: 20, - //onExit: () => console.log("Terminal exited", id), - }); - - const defaultDirectory = path.posix.join(dirName, "projects", data.sandboxId); - const defaultCommands = [ - `cd "${defaultDirectory}"`, - "export PS1='user> '", - "clear" - ] - for (const command of defaultCommands) await terminals[id].sendData(command + "\r"); - - console.log("Created terminal", id); - } catch (e: any) { - console.error(`Error creating terminal ${id}:`, e); - io.emit("error", `Error: terminal creation. ${e.message ?? e}`); - } - }); - - callback(); + await terminalManager.createTerminal(id, (responseString: string) => { + io.emit("terminalResponse", { id, data: responseString }) + const port = extractPortNumber(responseString) + if (port) { + io.emit( + "previewURL", + "https://" + containers[data.sandboxId].getHost(port) + ) + } + }) + }) + callback() } catch (e: any) { - console.error(`Error creating terminal ${id}:`, e); - io.emit("error", `Error: terminal creation. ${e.message ?? e}`); + console.error(`Error creating terminal ${id}:`, e) + io.emit("error", `Error: terminal creation. ${e.message ?? e}`) } - }); + }) + // Handle request to resize terminal socket.on( "resizeTerminal", (dimensions: { cols: number; rows: number }) => { try { - Object.values(terminals).forEach((t) => { - t.resize(dimensions); - }); + terminalManager.resizeTerminal(dimensions) } catch (e: any) { - console.error("Error resizing terminal:", e); - io.emit("error", `Error: terminal resizing. ${e.message ?? e}`); + console.error("Error resizing terminal:", e) + io.emit("error", `Error: terminal resizing. ${e.message ?? e}`) } } - ); + ) + // Handle terminal input data socket.on("terminalData", async (id: string, data: string) => { try { - if (!terminals[id]) { - return; - } - - await terminals[id].sendData(data); + await terminalManager.sendTerminalData(id, data) } catch (e: any) { - console.error("Error writing to terminal:", e); - io.emit("error", `Error: writing to terminal. ${e.message ?? e}`); + console.error("Error writing to terminal:", e) + io.emit("error", `Error: writing to terminal. ${e.message ?? e}`) } - }); + }) + // Handle request to close terminal socket.on("closeTerminal", async (id: string, callback) => { try { - if (!terminals[id]) { - return; - } - - await terminals[id].close(); - delete terminals[id]; - - callback(); + await terminalManager.closeTerminal(id) + callback() } catch (e: any) { - console.error("Error closing terminal:", e); - io.emit("error", `Error: closing terminal. ${e.message ?? e}`); + console.error("Error closing terminal:", e) + io.emit("error", `Error: closing terminal. ${e.message ?? e}`) } - }); + }) + // Handle request to generate code socket.on( "generateCode", async ( @@ -785,107 +478,49 @@ io.on("connection", async (socket) => { callback ) => { try { - const fetchPromise = fetch( - `${process.env.DATABASE_WORKER_URL}/api/sandbox/generate`, - { - method: "POST", - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.WORKERS_KEY}`, - }, - body: JSON.stringify({ - userId: data.userId, - }), - } - ); - - // Generate code from cloudflare workers AI - const generateCodePromise = fetch( - `${process.env.AI_WORKER_URL}/api?fileName=${encodeURIComponent(fileName)}&code=${encodeURIComponent(code)}&line=${encodeURIComponent(line)}&instructions=${encodeURIComponent(instructions)}`, - { - headers: { - "Content-Type": "application/json", - Authorization: `${process.env.CF_AI_KEY}`, - }, - } - ); - - const [fetchResponse, generateCodeResponse] = await Promise.all([ - fetchPromise, - generateCodePromise, - ]); - - if (!generateCodeResponse.ok) { - throw new Error(`HTTP error! status: ${generateCodeResponse.status}`); - } - - const reader = generateCodeResponse.body?.getReader(); - const decoder = new TextDecoder(); - let result = ''; - - if (reader) { - while (true) { - const { done, value } = await reader.read(); - if (done) break; - result += decoder.decode(value, { stream: true }); - } - } - - // The result should now contain only the modified code - callback({ response: result.trim(), success: true }); + const result = await aiWorker.generateCode( + data.userId, + fileName, + code, + line, + instructions + ) + callback(result) } catch (e: any) { - console.error("Error generating code:", e); - io.emit("error", `Error: code generation. ${e.message ?? e}`); - callback({ response: "Error generating code. Please try again.", success: false }); + console.error("Error generating code:", e) + io.emit("error", `Error: code generation. ${e.message ?? e}`) } } - ); + ) + // Handle socket disconnection socket.on("disconnect", async () => { try { if (data.isOwner) { - connections[data.sandboxId]--; + connections[data.sandboxId]-- } - // Stop watching file changes in the container - Promise.all(fileWatchers.map(async (handle : WatchHandle) => { - await handle.close(); - })); + await terminalManager.closeAllTerminals() + await fileManager.closeWatchers() if (data.isOwner && connections[data.sandboxId] <= 0) { socket.broadcast.emit( "disableAccess", "The sandbox owner has disconnected." - ); + ) } - - // const sockets = await io.fetchSockets(); - // if (inactivityTimeout) { - // clearTimeout(inactivityTimeout); - // } - // if (sockets.length === 0) { - // console.log("STARTING TIMER"); - // inactivityTimeout = setTimeout(() => { - // io.fetchSockets().then(async (sockets) => { - // if (sockets.length === 0) { - // console.log("Server stopped", res); - // } - // }); - // }, 20000); - // } else { - // console.log("number of sockets", sockets.length); - // } } catch (e: any) { - console.log("Error disconnecting:", e); - io.emit("error", `Error: disconnecting. ${e.message ?? e}`); + console.log("Error disconnecting:", e) + io.emit("error", `Error: disconnecting. ${e.message ?? e}`) } - }); + }) } catch (e: any) { - console.error("Error connecting:", e); - io.emit("error", `Error: connection. ${e.message ?? e}`); + console.error("Error connecting:", e) + io.emit("error", `Error: connection. ${e.message ?? e}`) } -}); +}) +// Start the server httpServer.listen(port, () => { - console.log(`Server running on port ${port}`); -}); + console.log(`Server running on port ${port}`) +}) diff --git a/backend/server/src/ratelimit.ts b/backend/server/src/ratelimit.ts index f0d99fa..f40ab1e 100644 --- a/backend/server/src/ratelimit.ts +++ b/backend/server/src/ratelimit.ts @@ -30,4 +30,4 @@ export const deleteFileRL = new RateLimiterMemory({ export const deleteFolderRL = new RateLimiterMemory({ points: 1, duration: 2, -}) \ No newline at end of file +}) diff --git a/backend/server/src/types.ts b/backend/server/src/types.ts index b71592a..42ad6d0 100644 --- a/backend/server/src/types.ts +++ b/backend/server/src/types.ts @@ -1,70 +1,70 @@ // DB Types export type User = { - id: string; - name: string; - email: string; - generations: number; - sandbox: Sandbox[]; - usersToSandboxes: UsersToSandboxes[]; -}; + id: string + name: string + email: string + generations: number + sandbox: Sandbox[] + usersToSandboxes: UsersToSandboxes[] +} export type Sandbox = { - id: string; - name: string; - type: "react" | "node"; - visibility: "public" | "private"; - createdAt: Date; - userId: string; - usersToSandboxes: UsersToSandboxes[]; -}; + id: string + name: string + type: "react" | "node" + visibility: "public" | "private" + createdAt: Date + userId: string + usersToSandboxes: UsersToSandboxes[] +} export type UsersToSandboxes = { - userId: string; - sandboxId: string; - sharedOn: Date; -}; + userId: string + sandboxId: string + sharedOn: Date +} export type TFolder = { - id: string; - type: "folder"; - name: string; - children: (TFile | TFolder)[]; -}; + id: string + type: "folder" + name: string + children: (TFile | TFolder)[] +} export type TFile = { - id: string; - type: "file"; - name: string; -}; + id: string + type: "file" + name: string +} export type TFileData = { - id: string; - data: string; -}; + id: string + data: string +} export type R2Files = { - objects: R2FileData[]; - truncated: boolean; - delimitedPrefixes: any[]; -}; + objects: R2FileData[] + truncated: boolean + delimitedPrefixes: any[] +} export type R2FileData = { - storageClass: string; - uploaded: string; - checksums: any; - httpEtag: string; - etag: string; - size: number; - version: string; - key: string; -}; + storageClass: string + uploaded: string + checksums: any + httpEtag: string + etag: string + size: number + version: string + key: string +} export type R2FileBody = R2FileData & { - body: ReadableStream; - bodyUsed: boolean; - arrayBuffer: Promise; - text: Promise; - json: Promise; - blob: Promise; -}; + body: ReadableStream + bodyUsed: boolean + arrayBuffer: Promise + text: Promise + json: Promise + blob: Promise +} diff --git a/backend/server/src/utils.ts b/backend/server/src/utils.ts index 0aebb03..5ae1377 100644 --- a/backend/server/src/utils.ts +++ b/backend/server/src/utils.ts @@ -1,23 +1,23 @@ export class LockManager { - private locks: { [key: string]: Promise }; + private locks: { [key: string]: Promise } constructor() { - this.locks = {}; + this.locks = {} } async acquireLock(key: string, task: () => Promise): Promise { if (!this.locks[key]) { this.locks[key] = new Promise(async (resolve, reject) => { try { - const result = await task(); - resolve(result); + const result = await task() + resolve(result) } catch (error) { - reject(error); + reject(error) } finally { - delete this.locks[key]; + delete this.locks[key] } - }); + }) } - return await this.locks[key]; + return await this.locks[key] } -} \ No newline at end of file +} diff --git a/frontend/components/dashboard/newProject.tsx b/frontend/components/dashboard/newProject.tsx index 0248058..0f1a5d2 100644 --- a/frontend/components/dashboard/newProject.tsx +++ b/frontend/components/dashboard/newProject.tsx @@ -8,7 +8,7 @@ import { } from "@/components/ui/dialog" import { zodResolver } from "@hookform/resolvers/zod" import Image from "next/image" -import { useState } from "react" +import { useCallback, useEffect, useMemo, useState } from "react" import { useForm } from "react-hook-form" import { z } from "zod" @@ -32,10 +32,14 @@ import { import { createSandbox } from "@/lib/actions" import { projectTemplates } from "@/lib/data" import { useUser } from "@clerk/nextjs" -import { Loader2 } from "lucide-react" +import { ChevronLeft, ChevronRight, Loader2, Search } from "lucide-react" import { useRouter } from "next/navigation" import { Button } from "../ui/button" +import { cn } from "@/lib/utils" +import type { EmblaCarouselType } from "embla-carousel" +import useEmblaCarousel from "embla-carousel-react" +import { WheelGesturesPlugin } from "embla-carousel-wheel-gestures" const formSchema = z.object({ name: z .string() @@ -55,11 +59,20 @@ export default function NewProjectModal({ open: boolean setOpen: (open: boolean) => void }) { + const router = useRouter() + const user = useUser() const [selected, setSelected] = useState("reactjs") const [loading, setLoading] = useState(false) - const router = useRouter() - - const user = useUser() + const [emblaRef, emblaApi] = useEmblaCarousel({ loop: false }, [ + WheelGesturesPlugin(), + ]) + const { + prevBtnDisabled, + nextBtnDisabled, + onPrevButtonClick, + onNextButtonClick, + } = usePrevNextButtons(emblaApi) + const [search, setSearch] = useState("") const form = useForm>({ resolver: zodResolver(formSchema), @@ -69,6 +82,26 @@ export default function NewProjectModal({ }, }) + const handleTemplateClick = useCallback( + ({ id, index }: { id: string; index: number }) => { + setSelected(id) + emblaApi?.scrollTo(index) + }, + [emblaApi] + ) + const filteredTemplates = useMemo( + () => + projectTemplates.filter( + (item) => + item.name.toLowerCase().includes(search.toLowerCase()) || + item.description.toLowerCase().includes(search.toLowerCase()) + ), + [search, projectTemplates] + ) + const emptyTemplates = useMemo( + () => filteredTemplates.length === 0, + [filteredTemplates] + ) async function onSubmit(values: z.infer) { if (!user.isSignedIn) return @@ -78,7 +111,6 @@ export default function NewProjectModal({ const id = await createSandbox(sandboxData) router.push(`/code/${id}`) } - return ( Create A Sandbox -
- {projectTemplates.map((item) => ( - - ))} + {filteredTemplates.map((item, i) => ( + + ))} + {emptyTemplates && ( +
+

No templates found

+ +
+ )} +
+
+ +
+
+ +
+
@@ -176,3 +272,68 @@ export default function NewProjectModal({
) } + +function SearchInput({ + value, + onValueChange, +}: { + value?: string + onValueChange?: (value: string) => void +}) { + const onSubmit = useCallback((e: React.FormEvent) => { + e.preventDefault() + console.log("searching") + }, []) + return ( + + + + ) +} +const usePrevNextButtons = (emblaApi: EmblaCarouselType | undefined) => { + const [prevBtnDisabled, setPrevBtnDisabled] = useState(true) + const [nextBtnDisabled, setNextBtnDisabled] = useState(true) + + const onPrevButtonClick = useCallback(() => { + if (!emblaApi) return + emblaApi.scrollPrev() + }, [emblaApi]) + + const onNextButtonClick = useCallback(() => { + if (!emblaApi) return + emblaApi.scrollNext() + }, [emblaApi]) + + const onSelect = useCallback((emblaApi: EmblaCarouselType) => { + setPrevBtnDisabled(!emblaApi.canScrollPrev()) + setNextBtnDisabled(!emblaApi.canScrollNext()) + }, []) + + useEffect(() => { + if (!emblaApi) return + + onSelect(emblaApi) + emblaApi.on("reInit", onSelect).on("select", onSelect) + }, [emblaApi, onSelect]) + + return { + prevBtnDisabled, + nextBtnDisabled, + onPrevButtonClick, + onNextButtonClick, + } +} diff --git a/frontend/components/editor/AIChat/ChatMessage.tsx b/frontend/components/editor/AIChat/ChatMessage.tsx index 7fd665f..6b0fa72 100644 --- a/frontend/components/editor/AIChat/ChatMessage.tsx +++ b/frontend/components/editor/AIChat/ChatMessage.tsx @@ -1,25 +1,31 @@ -import { Check, ChevronDown, ChevronUp, Copy, CornerUpLeft } from 'lucide-react'; -import React, { useState } from 'react'; -import ReactMarkdown from 'react-markdown'; -import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'; -import { vscDarkPlus } from 'react-syntax-highlighter/dist/esm/styles/prism'; -import remarkGfm from 'remark-gfm'; -import { Button } from '../../ui/button'; -import { copyToClipboard, stringifyContent } from './lib/chatUtils'; +import { Check, ChevronDown, ChevronUp, Copy, CornerUpLeft } from "lucide-react" +import React, { useState } from "react" +import ReactMarkdown from "react-markdown" +import { Prism as SyntaxHighlighter } from "react-syntax-highlighter" +import { vscDarkPlus } from "react-syntax-highlighter/dist/esm/styles/prism" +import remarkGfm from "remark-gfm" +import { Button } from "../../ui/button" +import { copyToClipboard, stringifyContent } from "./lib/chatUtils" interface MessageProps { message: { - role: 'user' | 'assistant'; - content: string; - context?: string; - }; - setContext: (context: string | null) => void; - setIsContextExpanded: (isExpanded: boolean) => void; + role: "user" | "assistant" + content: string + context?: string + } + setContext: (context: string | null) => void + setIsContextExpanded: (isExpanded: boolean) => void } -export default function ChatMessage({ message, setContext, setIsContextExpanded }: MessageProps) { - const [expandedMessageIndex, setExpandedMessageIndex] = useState(null); - const [copiedText, setCopiedText] = useState(null); +export default function ChatMessage({ + message, + setContext, + setIsContextExpanded, +}: MessageProps) { + const [expandedMessageIndex, setExpandedMessageIndex] = useState< + number | null + >(null) + const [copiedText, setCopiedText] = useState(null) const renderCopyButton = (text: any) => ( - ); + ) const askAboutCode = (code: any) => { - const contextString = stringifyContent(code); - setContext(`Regarding this code:\n${contextString}`); - setIsContextExpanded(false); - }; + const contextString = stringifyContent(code) + setContext(`Regarding this code:\n${contextString}`) + setIsContextExpanded(false) + } const renderMarkdownElement = (props: any) => { - const { node, children } = props; - const content = stringifyContent(children); + const { node, children } = props + const content = stringifyContent(children) return (
@@ -59,22 +65,30 @@ export default function ChatMessage({ message, setContext, setIsContextExpanded
- {React.createElement(node.tagName, { - ...props, - className: `${props.className || ''} hover:bg-transparent rounded p-1 transition-colors` - }, children)} + {React.createElement( + node.tagName, + { + ...props, + className: `${ + props.className || "" + } hover:bg-transparent rounded p-1 transition-colors`, + }, + children + )} - ); - }; + ) + } return (
-
- {message.role === 'user' && ( +
+ {message.role === "user" && (
{renderCopyButton(message.content)}