Merge branch 'main' into feat/profile-page

This commit is contained in:
Hamzat Victor Oluwabori
2024-11-25 23:10:21 +01:00
committed by GitHub
79 changed files with 2716 additions and 5837 deletions

View File

@ -1,12 +0,0 @@
# http://editorconfig.org
root = true
[*]
indent_style = tab
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.yml]
indent_style = space

172
backend/ai/.gitignore vendored
View File

@ -1,172 +0,0 @@
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
# wrangler project
.dev.vars
.wrangler/

File diff suppressed because it is too large Load Diff

View File

@ -1,22 +0,0 @@
{
"name": "ai",
"version": "0.0.0",
"private": true,
"scripts": {
"deploy": "wrangler deploy",
"dev": "wrangler dev",
"start": "wrangler dev",
"test": "vitest",
"cf-typegen": "wrangler types"
},
"devDependencies": {
"@cloudflare/vitest-pool-workers": "^0.1.0",
"@cloudflare/workers-types": "^4.20240512.0",
"typescript": "^5.0.4",
"vitest": "1.3.0",
"wrangler": "^3.0.0"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.27.2"
}
}

View File

@ -1,128 +0,0 @@
import { Anthropic } from "@anthropic-ai/sdk"
import { MessageParam } from "@anthropic-ai/sdk/src/resources/messages.js"
export interface Env {
ANTHROPIC_API_KEY: string
}
export default {
async fetch(request: Request, env: Env): Promise<Response> {
// Handle CORS preflight requests
if (request.method === "OPTIONS") {
return new Response(null, {
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
"Access-Control-Allow-Headers": "Content-Type",
},
})
}
if (request.method !== "GET" && request.method !== "POST") {
return new Response("Method Not Allowed", { status: 405 })
}
let body
let isEditCodeWidget = false
if (request.method === "POST") {
body = (await request.json()) as {
messages: unknown
context: unknown
activeFileContent: string
}
} else {
const url = new URL(request.url)
const fileName = url.searchParams.get("fileName") || ""
const code = url.searchParams.get("code") || ""
const line = url.searchParams.get("line") || ""
const instructions = url.searchParams.get("instructions") || ""
body = {
messages: [{ role: "human", content: instructions }],
context: `File: ${fileName}\nLine: ${line}\nCode:\n${code}`,
activeFileContent: code,
}
isEditCodeWidget = true
}
const messages = body.messages
const context = body.context
const activeFileContent = body.activeFileContent
if (!Array.isArray(messages) || messages.length === 0) {
return new Response("Invalid or empty messages", { status: 400 })
}
let systemMessage
if (isEditCodeWidget) {
systemMessage = `You are an AI code editor. Your task is to modify the given code based on the user's instructions. Only output the modified code, without any explanations or markdown formatting. The code should be a direct replacement for the existing code.
Context:
${context}
Active File Content:
${activeFileContent}
Instructions: ${messages[0].content}
Respond only with the modified code that can directly replace the existing code.`
} else {
systemMessage = `You are an intelligent programming assistant. Please respond to the following request concisely. If your response includes code, please format it using triple backticks (\`\`\`) with the appropriate language identifier. For example:
\`\`\`python
print("Hello, World!")
\`\`\`
Provide a clear and concise explanation along with any code snippets. Keep your response brief and to the point.
${context ? `Context:\n${context}\n` : ""}
${activeFileContent ? `Active File Content:\n${activeFileContent}\n` : ""}`
}
const anthropicMessages = messages.map((msg) => ({
role: msg.role === "human" ? "user" : "assistant",
content: msg.content,
})) as MessageParam[]
try {
const anthropic = new Anthropic({ apiKey: env.ANTHROPIC_API_KEY })
const stream = await anthropic.messages.create({
model: "claude-3-5-sonnet-20240620",
max_tokens: 1024,
system: systemMessage,
messages: anthropicMessages,
stream: true,
})
const encoder = new TextEncoder()
const streamResponse = new ReadableStream({
async start(controller) {
for await (const chunk of stream) {
if (
chunk.type === "content_block_delta" &&
chunk.delta.type === "text_delta"
) {
const bytes = encoder.encode(chunk.delta.text)
controller.enqueue(bytes)
}
}
controller.close()
},
})
return new Response(streamResponse, {
headers: {
"Content-Type": "text/plain; charset=utf-8",
"Access-Control-Allow-Origin": "*",
"Cache-Control": "no-cache",
Connection: "keep-alive",
},
})
} catch (error) {
console.error("Error:", error)
return new Response("Internal Server Error", { status: 500 })
}
},
}

View File

@ -1,30 +0,0 @@
// test/index.spec.ts
import {
createExecutionContext,
env,
SELF,
waitOnExecutionContext,
} from "cloudflare:test"
import { describe, expect, it } from "vitest"
import worker from "../src/index"
// For now, you'll need to do something like this to get a correctly-typed
// `Request` to pass to `worker.fetch()`.
const IncomingRequest = Request<unknown, IncomingRequestCfProperties>
describe("Hello World worker", () => {
it("responds with Hello World! (unit style)", async () => {
const request = new IncomingRequest("http://example.com")
// Create an empty context to pass to `worker.fetch()`.
const ctx = createExecutionContext()
const response = await worker.fetch(request, env, ctx)
// Wait for all `Promise`s passed to `ctx.waitUntil()` to settle before running test assertions
await waitOnExecutionContext(ctx)
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`)
})
it("responds with Hello World! (integration style)", async () => {
const response = await SELF.fetch("https://example.com")
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`)
})
})

View File

@ -1,11 +0,0 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"types": [
"@cloudflare/workers-types/experimental",
"@cloudflare/vitest-pool-workers"
]
},
"include": ["./**/*.ts", "../src/env.d.ts"],
"exclude": []
}

View File

@ -1,106 +0,0 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig.json to read more about this file */
/* Projects */
// "incremental": true, /* Enable incremental compilation */
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
// "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
"target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
"lib": [
"es2021"
] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
"jsx": "react" /* Specify what JSX code is generated. */,
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */
// "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
/* Modules */
"module": "es2022" /* Specify what module code is generated. */,
// "rootDir": "./", /* Specify the root folder within your source files. */
"moduleResolution": "Bundler" /* Specify how TypeScript looks up a file from a given module specifier. */,
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */
"types": [
"@cloudflare/workers-types/2023-07-01"
] /* Specify type package names to be included without being referenced in a source file. */,
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
"resolveJsonModule": true /* Enable importing .json files */,
// "noResolve": true, /* Disallow `import`s, `require`s or `<reference>`s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
"allowJs": true /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */,
"checkJs": false /* Enable error reporting in type-checked JavaScript files. */,
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */
// "outDir": "./", /* Specify an output folder for all emitted files. */
// "removeComments": true, /* Disable emitting comments. */
"noEmit": true /* Disable emitting files from a compilation. */,
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */
"isolatedModules": true /* Ensure that each file can be safely transpiled without relying on other imports. */,
"allowSyntheticDefaultImports": true /* Allow 'import x from y' when a module doesn't have a default export. */,
// "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */,
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
/* Type Checking */
"strict": true /* Enable all strict type-checking options. */,
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */
// "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */
// "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */
},
"exclude": ["test"]
}

View File

@ -1,11 +0,0 @@
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config"
export default defineWorkersConfig({
test: {
poolOptions: {
workers: {
wrangler: { configPath: "./wrangler.toml" },
},
},
},
})

View File

@ -1,3 +0,0 @@
// Generated by Wrangler
// After adding bindings to `wrangler.toml`, regenerate this interface via `npm run cf-typegen`
interface Env {}

View File

@ -1,10 +0,0 @@
name = "ai"
main = "src/index.ts"
compatibility_date = "2024-05-12"
compatibility_flags = ["nodejs_compat"]
[ai]
binding = "AI"
[vars]
ANTHROPIC_API_KEY = ""

View File

@ -1,7 +1,7 @@
{
"version": "5",
"dialect": "sqlite",
"id": "afe10bff-362b-402c-bdb5-038341692f35",
"id": "1288b006-6410-4b1c-8c96-d9797878a116",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"sandbox": {
@ -140,13 +140,6 @@
"autoincrement": false,
"default": "CURRENT_TIMESTAMP"
},
"image": {
"name": "image",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"generations": {
"name": "generations",
"type": "integer",
@ -154,6 +147,28 @@
"notNull": false,
"autoincrement": false,
"default": 0
},
"tier": {
"name": "tier",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'FREE'"
},
"tierExpiresAt": {
"name": "tierExpiresAt",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"lastResetDate": {
"name": "lastResetDate",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {

View File

@ -1,236 +0,0 @@
{
"version": "5",
"dialect": "sqlite",
"id": "e570d5ac-700d-4e62-8a46-482b21ae1fe1",
"prevId": "afe10bff-362b-402c-bdb5-038341692f35",
"tables": {
"sandbox": {
"name": "sandbox",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"type": {
"name": "type",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"visibility": {
"name": "visibility",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"createdAt": {
"name": "createdAt",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP"
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"likeCount": {
"name": "likeCount",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"viewCount": {
"name": "viewCount",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
}
},
"indexes": {
"sandbox_id_unique": {
"name": "sandbox_id_unique",
"columns": [
"id"
],
"isUnique": true
}
},
"foreignKeys": {
"sandbox_user_id_user_id_fk": {
"name": "sandbox_user_id_user_id_fk",
"tableFrom": "sandbox",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {}
},
"user": {
"name": "user",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"email": {
"name": "email",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"username": {
"name": "username",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"avatarUrl": {
"name": "avatarUrl",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"createdAt": {
"name": "createdAt",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "CURRENT_TIMESTAMP"
},
"generations": {
"name": "generations",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
}
},
"indexes": {
"user_id_unique": {
"name": "user_id_unique",
"columns": [
"id"
],
"isUnique": true
},
"user_username_unique": {
"name": "user_username_unique",
"columns": [
"username"
],
"isUnique": true
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {}
},
"users_to_sandboxes": {
"name": "users_to_sandboxes",
"columns": {
"userId": {
"name": "userId",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"sandboxId": {
"name": "sandboxId",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"sharedOn": {
"name": "sharedOn",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"users_to_sandboxes_userId_user_id_fk": {
"name": "users_to_sandboxes_userId_user_id_fk",
"tableFrom": "users_to_sandboxes",
"tableTo": "user",
"columnsFrom": [
"userId"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
},
"users_to_sandboxes_sandboxId_sandbox_id_fk": {
"name": "users_to_sandboxes_sandboxId_sandbox_id_fk",
"tableFrom": "users_to_sandboxes",
"tableTo": "sandbox",
"columnsFrom": [
"sandboxId"
],
"columnsTo": [
"id"
],
"onDelete": "no action",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {}
}
},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
}
}

View File

@ -5,29 +5,8 @@
{
"idx": 0,
"version": "5",
"when": 1731288423588,
"tag": "0000_cuddly_patriot",
"breakpoints": true
},
{
"idx": 1,
"version": "5",
"when": 1731290863632,
"tag": "0001_opposite_newton_destine",
"breakpoints": true
},
{
"idx": 2,
"version": "5",
"when": 1731296235880,
"tag": "0002_rainy_fantastic_four",
"breakpoints": true
},
{
"idx": 3,
"version": "5",
"when": 1731297339306,
"tag": "0003_lying_snowbird",
"when": 1732568535771,
"tag": "0000_rapid_korath",
"breakpoints": true
}
]

View File

@ -245,114 +245,6 @@ export default {
return success
} else return methodNotAllowed
} else if (path === "/api/sandbox/generate" && method === "POST") {
const generateSchema = z.object({
userId: z.string(),
})
const body = await request.json()
const { userId } = generateSchema.parse(body)
const dbUser = await db.query.user.findFirst({
where: (user, { eq }) => eq(user.id, userId),
})
if (!dbUser) {
return new Response("User not found.", { status: 400 })
}
if (dbUser.generations !== null && dbUser.generations >= 10) {
return new Response("You reached the maximum # of generations.", {
status: 400,
})
}
await db
.update(user)
.set({ generations: sql`${user.generations} + 1` })
.where(eq(user.id, userId))
.get()
return success
} else if (path === "/api/sandbox/like") {
if (method === "POST") {
const likeSchema = z.object({
sandboxId: z.string(),
userId: z.string(),
})
try {
const body = await request.json()
const { sandboxId, userId } = likeSchema.parse(body)
// Check if user has already liked
const existingLike = await db.query.sandboxLikes.findFirst({
where: (likes, { and, eq }) =>
and(eq(likes.sandboxId, sandboxId), eq(likes.userId, userId)),
})
if (existingLike) {
// Unlike
await db
.delete(sandboxLikes)
.where(
and(
eq(sandboxLikes.sandboxId, sandboxId),
eq(sandboxLikes.userId, userId)
)
)
await db
.update(sandbox)
.set({
likeCount: sql`${sandbox.likeCount} - 1`,
})
.where(eq(sandbox.id, sandboxId))
return json({
message: "Unlike successful",
liked: false,
})
} else {
// Like
await db.insert(sandboxLikes).values({
sandboxId,
userId,
createdAt: new Date(),
})
await db
.update(sandbox)
.set({
likeCount: sql`${sandbox.likeCount} + 1`,
})
.where(eq(sandbox.id, sandboxId))
return json({
message: "Like successful",
liked: true,
})
}
} catch (error) {
return new Response("Invalid request format", { status: 400 })
}
} else if (method === "GET") {
const params = url.searchParams
const sandboxId = params.get("sandboxId")
const userId = params.get("userId")
if (!sandboxId || !userId) {
return invalidRequest
}
const like = await db.query.sandboxLikes.findFirst({
where: (likes, { and, eq }) =>
and(eq(likes.sandboxId, sandboxId), eq(likes.userId, userId)),
})
return json({
liked: !!like,
})
} else {
return methodNotAllowed
}
} else if (path === "/api/user") {
if (method === "GET") {
const params = url.searchParams
@ -426,12 +318,14 @@ export default {
avatarUrl: z.string().optional(),
createdAt: z.string().optional(),
generations: z.number().optional(),
tier: z.enum(["FREE", "PRO", "ENTERPRISE"]).optional(),
tierExpiresAt: z.number().optional(),
lastResetDate: z.number().optional(),
})
const body = await request.json()
const { id, name, email, username, avatarUrl, createdAt, generations } =
userSchema.parse(body)
const { id, name, email, username, avatarUrl, createdAt, generations, tier, tierExpiresAt, lastResetDate } = userSchema.parse(body)
const res = await db
.insert(user)
.values({
@ -442,6 +336,9 @@ export default {
avatarUrl,
createdAt: createdAt ? new Date(createdAt) : new Date(),
generations,
tier,
tierExpiresAt,
lastResetDate,
})
.returning()
.get()
@ -521,6 +418,76 @@ export default {
return json({ exists: !!exists })
}
return methodNotAllowed
} else if (path === "/api/user/increment-generations" && method === "POST") {
const schema = z.object({
userId: z.string(),
})
const body = await request.json()
const { userId } = schema.parse(body)
await db
.update(user)
.set({ generations: sql`${user.generations} + 1` })
.where(eq(user.id, userId))
.get()
return success
} else if (path === "/api/user/update-tier" && method === "POST") {
const schema = z.object({
userId: z.string(),
tier: z.enum(["FREE", "PRO", "ENTERPRISE"]),
tierExpiresAt: z.date(),
})
const body = await request.json()
const { userId, tier, tierExpiresAt } = schema.parse(body)
await db
.update(user)
.set({
tier,
tierExpiresAt: tierExpiresAt.getTime(),
// Reset generations when upgrading tier
generations: 0
})
.where(eq(user.id, userId))
.get()
return success
} else if (path === "/api/user/check-reset" && method === "POST") {
const schema = z.object({
userId: z.string(),
})
const body = await request.json()
const { userId } = schema.parse(body)
const dbUser = await db.query.user.findFirst({
where: (user, { eq }) => eq(user.id, userId),
})
if (!dbUser) {
return new Response("User not found", { status: 404 })
}
const now = new Date()
const lastReset = dbUser.lastResetDate ? new Date(dbUser.lastResetDate) : new Date(0)
if (now.getMonth() !== lastReset.getMonth() || now.getFullYear() !== lastReset.getFullYear()) {
await db
.update(user)
.set({
generations: 0,
lastResetDate: now.getTime()
})
.where(eq(user.id, userId))
.get()
return new Response("Reset successful", { status: 200 })
}
return new Response("No reset needed", { status: 200 })
} else return notFound
},
}

View File

@ -16,6 +16,9 @@ export const user = sqliteTable("user", {
sql`CURRENT_TIMESTAMP`
),
generations: integer("generations").default(0),
tier: text("tier", { enum: ["FREE", "PRO", "ENTERPRISE"] }).default("FREE"),
tierExpiresAt: integer("tierExpiresAt"),
lastResetDate: integer("lastResetDate"),
})
export type User = typeof user.$inferSelect

File diff suppressed because it is too large Load Diff

View File

@ -14,8 +14,9 @@
"concurrently": "^8.2.2",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
"e2b": "^0.16.2-beta.47",
"e2b": "^1.0.5",
"express": "^4.19.2",
"jzip": "^1.0.0",
"rate-limiter-flexible": "^5.0.3",
"simple-git": "^3.25.0",
"socket.io": "^4.7.5",
@ -25,10 +26,11 @@
"devDependencies": {
"@types/cors": "^2.8.17",
"@types/express": "^4.17.21",
"@types/jszip": "^3.4.1",
"@types/node": "^20.12.7",
"@types/ssh2": "^1.15.0",
"nodemon": "^3.1.0",
"ts-node": "^10.9.2",
"typescript": "^5.4.5"
}
}
}

View File

@ -1,90 +0,0 @@
// AIWorker class for handling AI-related operations
export class AIWorker {
private aiWorkerUrl: string
private cfAiKey: string
private databaseWorkerUrl: string
private workersKey: string
// Constructor to initialize AIWorker with necessary URLs and keys
constructor(
aiWorkerUrl: string,
cfAiKey: string,
databaseWorkerUrl: string,
workersKey: string
) {
this.aiWorkerUrl = aiWorkerUrl
this.cfAiKey = cfAiKey
this.databaseWorkerUrl = databaseWorkerUrl
this.workersKey = workersKey
}
// Method to generate code based on user input
async generateCode(
userId: string,
fileName: string,
code: string,
line: number,
instructions: string
): Promise<{ response: string; success: boolean }> {
try {
const fetchPromise = fetch(
`${process.env.DATABASE_WORKER_URL}/api/sandbox/generate`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `${process.env.WORKERS_KEY}`,
},
body: JSON.stringify({
userId: userId,
}),
}
)
// Generate code from cloudflare workers AI
const generateCodePromise = fetch(
`${process.env.AI_WORKER_URL}/api?fileName=${encodeURIComponent(
fileName
)}&code=${encodeURIComponent(code)}&line=${encodeURIComponent(
line
)}&instructions=${encodeURIComponent(instructions)}`,
{
headers: {
"Content-Type": "application/json",
Authorization: `${process.env.CF_AI_KEY}`,
},
}
)
const [fetchResponse, generateCodeResponse] = await Promise.all([
fetchPromise,
generateCodePromise,
])
if (!generateCodeResponse.ok) {
throw new Error(`HTTP error! status: ${generateCodeResponse.status}`)
}
const reader = generateCodeResponse.body?.getReader()
const decoder = new TextDecoder()
let result = ""
if (reader) {
while (true) {
const { done, value } = await reader.read()
if (done) break
result += decoder.decode(value, { stream: true })
}
}
// The result should now contain only the modified code
return { response: result.trim(), success: true }
} catch (e: any) {
console.error("Error generating code:", e)
return {
response: "Error generating code. Please try again.",
success: false,
}
}
}
}

View File

@ -1,58 +1,61 @@
import { Socket } from "socket.io"
class Counter {
private count: number = 0
private count: number = 0
increment() {
this.count++
}
increment() {
this.count++
}
decrement() {
this.count = Math.max(0, this.count - 1)
}
decrement() {
this.count = Math.max(0, this.count - 1)
}
getValue(): number {
return this.count
}
getValue(): number {
return this.count
}
}
// Owner Connection Management
export class ConnectionManager {
// Counts how many times the owner is connected to a sandbox
private ownerConnections: Record<string, Counter> = {}
// Stores all sockets connected to a given sandbox
private sockets: Record<string, Set<Socket>> = {}
// Counts how many times the owner is connected to a sandbox
private ownerConnections: Record<string, Counter> = {}
// Stores all sockets connected to a given sandbox
private sockets: Record<string, Set<Socket>> = {}
// Checks if the owner of a sandbox is connected
ownerIsConnected(sandboxId: string): boolean {
return this.ownerConnections[sandboxId]?.getValue() > 0
// Checks if the owner of a sandbox is connected
ownerIsConnected(sandboxId: string): boolean {
return this.ownerConnections[sandboxId]?.getValue() > 0
}
// Adds a connection for a sandbox
addConnectionForSandbox(socket: Socket, sandboxId: string, isOwner: boolean) {
this.sockets[sandboxId] ??= new Set()
this.sockets[sandboxId].add(socket)
// If the connection is for the owner, increments the owner connection counter
if (isOwner) {
this.ownerConnections[sandboxId] ??= new Counter()
this.ownerConnections[sandboxId].increment()
}
}
// Adds a connection for a sandbox
addConnectionForSandbox(socket: Socket, sandboxId: string, isOwner: boolean) {
this.sockets[sandboxId] ??= new Set()
this.sockets[sandboxId].add(socket)
// Removes a connection for a sandbox
removeConnectionForSandbox(
socket: Socket,
sandboxId: string,
isOwner: boolean
) {
this.sockets[sandboxId]?.delete(socket)
// If the connection is for the owner, increments the owner connection counter
if (isOwner) {
this.ownerConnections[sandboxId] ??= new Counter()
this.ownerConnections[sandboxId].increment()
}
// If the connection being removed is for the owner, decrements the owner connection counter
if (isOwner) {
this.ownerConnections[sandboxId]?.decrement()
}
}
// Removes a connection for a sandbox
removeConnectionForSandbox(socket: Socket, sandboxId: string, isOwner: boolean) {
this.sockets[sandboxId]?.delete(socket)
// If the connection being removed is for the owner, decrements the owner connection counter
if (isOwner) {
this.ownerConnections[sandboxId]?.decrement()
}
}
// Returns the set of sockets connected to a given sandbox
connectionsForSandbox(sandboxId: string): Set<Socket> {
return this.sockets[sandboxId] ?? new Set();
}
}
// Returns the set of sockets connected to a given sandbox
connectionsForSandbox(sandboxId: string): Set<Socket> {
return this.sockets[sandboxId] ?? new Set()
}
}

View File

@ -1,4 +1,5 @@
import { FilesystemEvent, Sandbox, WatchHandle } from "e2b"
import JSZip from "jszip"
import path from "path"
import RemoteFileStorage from "./RemoteFileStorage"
import { MAX_BODY_SIZE } from "./ratelimit"
@ -23,7 +24,11 @@ function generateFileStructure(paths: string[]): (TFolder | TFile)[] {
}
} else {
if (isFile) {
const file: TFile = { id: `/${parts.join("/")}`, type: "file", name: part }
const file: TFile = {
id: `/${parts.join("/")}`,
type: "file",
name: part,
}
current.children.push(file)
} else {
const folder: TFolder = {
@ -75,7 +80,9 @@ export class FileManager {
if (isFile) {
const fileId = `/${parts.join("/")}`
const data = await RemoteFileStorage.fetchFileContent(`projects/${this.sandboxId}${fileId}`)
const data = await RemoteFileStorage.fetchFileContent(
`projects/${this.sandboxId}${fileId}`
)
fileData.push({ id: fileId, data })
}
}
@ -91,7 +98,7 @@ export class FileManager {
// Convert remote file path to local file path
private getLocalFileId(remoteId: string): string | undefined {
const allParts = remoteId.split("/")
if (allParts[1] !== this.sandboxId) return undefined;
if (allParts[1] !== this.sandboxId) return undefined
return allParts.slice(2).join("/")
}
@ -99,7 +106,7 @@ export class FileManager {
private getLocalFileIds(remoteIds: string[]): string[] {
return remoteIds
.map(this.getLocalFileId.bind(this))
.filter((id) => id !== undefined);
.filter((id) => id !== undefined)
}
// Download files from remote storage
@ -118,9 +125,21 @@ export class FileManager {
return this.files
}
private async loadLocalFiles() {
// Reload file list from the container to include template files
const result = await this.sandbox.commands.run(
`find "${this.dirName}" -type f`
) // List all files recursively
const localPaths = result.stdout.split("\n").filter((path) => path) // Split the output into an array and filter out empty strings
const relativePaths = localPaths.map((filePath) =>
path.posix.relative(this.dirName, filePath)
) // Convert absolute paths to relative paths
this.files = generateFileStructure(relativePaths)
}
// Initialize the FileManager
async initialize() {
// Download files from remote file storage
await this.updateFileStructure()
await this.updateFileData()
@ -140,11 +159,7 @@ export class FileManager {
})
await Promise.all(promises)
// Reload file list from the container to include template files
const result = await this.sandbox.commands.run(`find "${this.dirName}" -type f`); // List all files recursively
const localPaths = result.stdout.split('\n').filter(path => path); // Split the output into an array and filter out empty strings
const relativePaths = localPaths.map(filePath => path.posix.relative(this.dirName, filePath)); // Convert absolute paths to relative paths
this.files = generateFileStructure(relativePaths);
await this.loadLocalFiles()
// Make the logged in user the owner of all project files
this.fixPermissions()
@ -169,9 +184,7 @@ export class FileManager {
// Change the owner of the project directory to user
private async fixPermissions() {
try {
await this.sandbox.commands.run(
`sudo chown -R user "${this.dirName}"`
)
await this.sandbox.commands.run(`sudo chown -R user "${this.dirName}"`)
} catch (e: any) {
console.log("Failed to fix permissions: " + e)
}
@ -180,7 +193,7 @@ export class FileManager {
// Watch a directory for changes
async watchDirectory(directory: string): Promise<WatchHandle | undefined> {
try {
const handle = await this.sandbox.files.watch(
const handle = await this.sandbox.files.watchDir(
directory,
async (event: FilesystemEvent) => {
try {
@ -193,7 +206,10 @@ export class FileManager {
// This is the absolute file path in the container
const containerFilePath = path.posix.join(directory, event.name)
// This is the file path relative to the project directory
const sandboxFilePath = removeDirName(containerFilePath, this.dirName)
const sandboxFilePath = removeDirName(
containerFilePath,
this.dirName
)
// This is the directory being watched relative to the project directory
const sandboxDirectory = removeDirName(directory, this.dirName)
@ -210,77 +226,13 @@ export class FileManager {
// Handle file/directory creation event
if (event.type === "create") {
const folder = findFolderById(
this.files,
sandboxDirectory
) as TFolder
const isDir = await this.isDirectory(containerFilePath)
const newItem = isDir
? ({
id: sandboxFilePath,
name: event.name,
type: "folder",
children: [],
} as TFolder)
: ({
id: sandboxFilePath,
name: event.name,
type: "file",
} as TFile)
if (folder) {
// If the folder exists, add the new item (file/folder) as a child
folder.children.push(newItem)
} else {
// If folder doesn't exist, add the new item to the root
this.files.push(newItem)
}
if (!isDir) {
const fileData = await this.sandbox.files.read(
containerFilePath
)
const fileContents =
typeof fileData === "string" ? fileData : ""
this.fileData.push({
id: sandboxFilePath,
data: fileContents,
})
}
await this.loadLocalFiles()
console.log(`Create ${sandboxFilePath}`)
}
// Handle file/directory removal or rename event
else if (event.type === "remove" || event.type == "rename") {
const folder = findFolderById(
this.files,
sandboxDirectory
) as TFolder
const isDir = await this.isDirectory(containerFilePath)
const isFileMatch = (file: TFolder | TFile | TFileData) =>
file.id === sandboxFilePath ||
file.id.startsWith(containerFilePath + "/")
if (folder) {
// Remove item from its parent folder
folder.children = folder.children.filter(
(file: TFolder | TFile) => !isFileMatch(file)
)
} else {
// Remove from the root if it's not inside a folder
this.files = this.files.filter(
(file: TFolder | TFile) => !isFileMatch(file)
)
}
// Also remove any corresponding file data
this.fileData = this.fileData.filter(
(file: TFileData) => !isFileMatch(file)
)
await this.loadLocalFiles()
console.log(`Removed: ${sandboxFilePath}`)
}
@ -330,7 +282,7 @@ export class FileManager {
)
}
},
{ timeout: 0 }
{ timeoutMs: 0 }
)
this.fileWatchers.push(handle)
return handle
@ -361,7 +313,9 @@ export class FileManager {
// Get folder content
async getFolder(folderId: string): Promise<string[]> {
const remotePaths = await RemoteFileStorage.getFolder(this.getRemoteFileId(folderId))
const remotePaths = await RemoteFileStorage.getFolder(
this.getRemoteFileId(folderId)
)
return this.getLocalFileIds(remotePaths)
}
@ -373,11 +327,20 @@ export class FileManager {
throw new Error("File size too large. Please reduce the file size.")
}
await RemoteFileStorage.saveFile(this.getRemoteFileId(fileId), body)
const file = this.fileData.find((f) => f.id === fileId)
if (!file) return
file.data = body
await this.sandbox.files.write(path.posix.join(this.dirName, file.id), body)
let file = this.fileData.find((f) => f.id === fileId)
if (file) {
file.data = body
} else {
// If the file wasn't in our cache, add it
file = {
id: fileId,
data: body,
}
this.fileData.push(file)
}
await this.sandbox.files.write(path.posix.join(this.dirName, fileId), body)
this.fixPermissions()
}
@ -400,7 +363,11 @@ export class FileManager {
fileData.id = newFileId
file.id = newFileId
await RemoteFileStorage.renameFile(this.getRemoteFileId(fileId), this.getRemoteFileId(newFileId), fileData.data)
await RemoteFileStorage.renameFile(
this.getRemoteFileId(fileId),
this.getRemoteFileId(newFileId),
fileData.data
)
return this.updateFileStructure()
}
@ -432,22 +399,88 @@ export class FileManager {
await this.sandbox.files.write(path.posix.join(this.dirName, id), "")
await this.fixPermissions()
this.files.push({
id,
name,
type: "file",
})
this.fileData.push({
id,
data: "",
})
await RemoteFileStorage.createFile(this.getRemoteFileId(id))
return true
}
public async loadFileContent(): Promise<TFileData[]> {
// Get all file paths, excluding node_modules
const result = await this.sandbox.commands.run(
`find "${this.dirName}" -path "${this.dirName}/node_modules" -prune -o -type f -print`
)
const filePaths = result.stdout.split("\n").filter((path) => path) ?? []
console.log("Paths found for download (excluding node_modules):", filePaths)
// Add files to zip with synchronized content
for (const filePath of filePaths) {
const relativePath = filePath.replace(this.dirName, "") // Remove base directory from path
try {
// Read the file content from the sandbox
const content = await this.sandbox.files.read(filePath)
// Find the existing file data entry or create a new one
const fileDataEntry = this.fileData.find(
(f) => f.id === relativePath
) || {
id: relativePath,
data: typeof content === "string" ? content : "",
}
// Update the file data entry if it already exists, otherwise add it to the list
if (!this.fileData.includes(fileDataEntry)) {
this.fileData.push(fileDataEntry)
} else {
fileDataEntry.data = typeof content === "string" ? content : ""
}
} catch (error) {
console.error(`Failed to read content for ${relativePath}:`, error)
}
}
return this.fileData
}
public async getFilesForDownload(): Promise<string> {
// Create new JSZip instance
const zip = new JSZip()
await this.loadFileContent()
if (this.fileData.length === 0) {
console.error(
"No files found in the sandbox project directory for download."
)
return ""
}
// Add files to zip with synchronized content
for (const fileDataEntry of this.fileData) {
const relativePath = fileDataEntry.id
const content = fileDataEntry.data
zip.file(relativePath, content)
console.log(`Added file to ZIP: ${relativePath}`)
}
// Generate zip file
const zipBlob = await zip.generateAsync({
type: "blob",
compression: "DEFLATE",
compressionOptions: {
level: 6,
},
})
// Convert Blob to Base64
const zipBlobArrayBuffer = await zipBlob.arrayBuffer()
const zipBlobBase64 = btoa(
String.fromCharCode(...new Uint8Array(zipBlobArrayBuffer))
)
return zipBlobBase64
}
// Create a new folder
async createFolder(name: string): Promise<void> {
const id = `/${name}`
@ -465,7 +498,11 @@ export class FileManager {
await this.moveFileInContainer(fileId, newFileId)
await this.fixPermissions()
await RemoteFileStorage.renameFile(this.getRemoteFileId(fileId), this.getRemoteFileId(newFileId), fileData.data)
await RemoteFileStorage.renameFile(
this.getRemoteFileId(fileId),
this.getRemoteFileId(newFileId),
fileData.data
)
fileData.id = newFileId
file.id = newFileId
@ -477,9 +514,6 @@ export class FileManager {
if (!file) return this.files
await this.sandbox.files.remove(path.posix.join(this.dirName, fileId))
this.fileData = this.fileData.filter(
(f) => f.id !== fileId
)
await RemoteFileStorage.deleteFile(this.getRemoteFileId(fileId))
return this.updateFileStructure()
@ -487,14 +521,13 @@ export class FileManager {
// Delete a folder
async deleteFolder(folderId: string): Promise<(TFolder | TFile)[]> {
const files = await RemoteFileStorage.getFolder(this.getRemoteFileId(folderId))
const files = await RemoteFileStorage.getFolder(
this.getRemoteFileId(folderId)
)
await Promise.all(
files.map(async (file) => {
await this.sandbox.files.remove(path.posix.join(this.dirName, file))
this.fileData = this.fileData.filter(
(f) => f.id !== file
)
await RemoteFileStorage.deleteFile(this.getRemoteFileId(file))
})
)
@ -506,7 +539,7 @@ export class FileManager {
async closeWatchers() {
await Promise.all(
this.fileWatchers.map(async (handle: WatchHandle) => {
await handle.close()
await handle.stop()
})
)
}

View File

@ -61,11 +61,7 @@ export const RemoteFileStorage = {
return res.ok
},
renameFile: async (
fileId: string,
newFileId: string,
data: string
) => {
renameFile: async (fileId: string, newFileId: string, data: string) => {
const res = await fetch(`${process.env.STORAGE_WORKER_URL}/api/rename`, {
method: "POST",
headers: {
@ -111,7 +107,7 @@ export const RemoteFileStorage = {
}
)
return (await res.json()).size
}
},
}
export default RemoteFileStorage
export default RemoteFileStorage

View File

@ -1,262 +1,269 @@
import { Sandbox as E2BSandbox } from "e2b"
import { Socket } from "socket.io"
import { AIWorker } from "./AIWorker"
import { CONTAINER_TIMEOUT } from "./constants"
import { DokkuClient } from "./DokkuClient"
import { FileManager } from "./FileManager"
import {
createFileRL,
createFolderRL,
deleteFileRL,
renameFileRL,
saveFileRL,
createFileRL,
createFolderRL,
deleteFileRL,
renameFileRL,
saveFileRL,
} from "./ratelimit"
import { SecureGitClient } from "./SecureGitClient"
import { TerminalManager } from "./TerminalManager"
import { TFile, TFileData, TFolder } from "./types"
import { TFile, TFolder } from "./types"
import { LockManager } from "./utils"
const lockManager = new LockManager()
// Define a type for SocketHandler functions
type SocketHandler<T = Record<string, any>> = (args: T) => any;
type SocketHandler<T = Record<string, any>> = (args: T) => any
// Extract port number from a string
function extractPortNumber(inputString: string): number | null {
const cleanedString = inputString.replace(/\x1B\[[0-9;]*m/g, "")
const regex = /http:\/\/localhost:(\d+)/
const match = cleanedString.match(regex)
return match ? parseInt(match[1]) : null
const cleanedString = inputString.replace(/\x1B\[[0-9;]*m/g, "")
const regex = /http:\/\/localhost:(\d+)/
const match = cleanedString.match(regex)
return match ? parseInt(match[1]) : null
}
type ServerContext = {
aiWorker: AIWorker;
dokkuClient: DokkuClient | null;
gitClient: SecureGitClient | null;
};
dokkuClient: DokkuClient | null
gitClient: SecureGitClient | null
}
export class Sandbox {
// Sandbox properties:
sandboxId: string
type: string
fileManager: FileManager | null
terminalManager: TerminalManager | null
container: E2BSandbox | null
// Server context:
dokkuClient: DokkuClient | null
gitClient: SecureGitClient | null
constructor(
sandboxId: string,
type: string,
{ dokkuClient, gitClient }: ServerContext
) {
// Sandbox properties:
sandboxId: string;
type: string;
fileManager: FileManager | null;
terminalManager: TerminalManager | null;
container: E2BSandbox | null;
this.sandboxId = sandboxId
this.type = type
this.fileManager = null
this.terminalManager = null
this.container = null
// Server context:
dokkuClient: DokkuClient | null;
gitClient: SecureGitClient | null;
aiWorker: AIWorker;
this.dokkuClient = dokkuClient
this.gitClient = gitClient
}
constructor(sandboxId: string, type: string, { aiWorker, dokkuClient, gitClient }: ServerContext) {
// Sandbox properties:
this.sandboxId = sandboxId;
this.type = type;
this.fileManager = null;
this.terminalManager = null;
this.container = null;
// Server context:
this.aiWorker = aiWorker;
this.dokkuClient = dokkuClient;
this.gitClient = gitClient;
}
// Initializes the container for the sandbox environment
async initialize(
fileWatchCallback: ((files: (TFolder | TFile)[]) => void) | undefined
) {
// Acquire a lock to ensure exclusive access to the sandbox environment
await lockManager.acquireLock(this.sandboxId, async () => {
// Check if a container already exists and is running
if (this.container && await this.container.isRunning()) {
console.log(`Found existing container ${this.sandboxId}`)
} else {
console.log("Creating container", this.sandboxId)
// Create a new container with a specified template and timeout
const templateTypes = ["vanillajs", "reactjs", "nextjs", "streamlit"];
const template = templateTypes.includes(this.type)
? `gitwit-${this.type}`
: `base`;
this.container = await E2BSandbox.create(template, {
timeoutMs: CONTAINER_TIMEOUT,
})
}
// Initializes the container for the sandbox environment
async initialize(
fileWatchCallback: ((files: (TFolder | TFile)[]) => void) | undefined
) {
// Acquire a lock to ensure exclusive access to the sandbox environment
await lockManager.acquireLock(this.sandboxId, async () => {
// Check if a container already exists and is running
if (this.container && (await this.container.isRunning())) {
console.log(`Found existing container ${this.sandboxId}`)
} else {
console.log("Creating container", this.sandboxId)
// Create a new container with a specified template and timeout
const templateTypes = [
"vanillajs",
"reactjs",
"nextjs",
"streamlit",
"php",
]
const template = templateTypes.includes(this.type)
? `gitwit-${this.type}`
: `base`
this.container = await E2BSandbox.create(template, {
timeoutMs: CONTAINER_TIMEOUT,
})
// Ensure a container was successfully created
if (!this.container) throw new Error("Failed to create container")
}
})
// Ensure a container was successfully created
if (!this.container) throw new Error("Failed to create container")
// Initialize the terminal manager if it hasn't been set up yet
if (!this.terminalManager) {
this.terminalManager = new TerminalManager(this.container)
console.log(`Terminal manager set up for ${this.sandboxId}`)
}
// Initialize the file manager if it hasn't been set up yet
if (!this.fileManager) {
this.fileManager = new FileManager(
this.sandboxId,
this.container,
fileWatchCallback ?? null
)
// Initialize the file manager and emit the initial files
await this.fileManager.initialize()
}
// Initialize the terminal manager if it hasn't been set up yet
if (!this.terminalManager) {
this.terminalManager = new TerminalManager(this.container)
console.log(`Terminal manager set up for ${this.sandboxId}`)
}
// Called when the client disconnects from the Sandbox
async disconnect() {
// Close all terminals managed by the terminal manager
await this.terminalManager?.closeAllTerminals()
// This way the terminal manager will be set up again if we reconnect
this.terminalManager = null;
// Close all file watchers managed by the file manager
await this.fileManager?.closeWatchers()
// This way the file manager will be set up again if we reconnect
this.fileManager = null;
// Initialize the file manager if it hasn't been set up yet
if (!this.fileManager) {
this.fileManager = new FileManager(
this.sandboxId,
this.container,
fileWatchCallback ?? null
)
// Initialize the file manager and emit the initial files
await this.fileManager.initialize()
}
}
// Called when the client disconnects from the Sandbox
async disconnect() {
// Close all terminals managed by the terminal manager
await this.terminalManager?.closeAllTerminals()
// This way the terminal manager will be set up again if we reconnect
this.terminalManager = null
// Close all file watchers managed by the file manager
await this.fileManager?.closeWatchers()
// This way the file manager will be set up again if we reconnect
this.fileManager = null
}
handlers(connection: { userId: string; isOwner: boolean; socket: Socket }) {
// Handle heartbeat from a socket connection
const handleHeartbeat: SocketHandler = (_: any) => {
// Only keep the sandbox alive if the owner is still connected
if (connection.isOwner) {
this.container?.setTimeout(CONTAINER_TIMEOUT)
}
}
handlers(connection: { userId: string, isOwner: boolean, socket: Socket }) {
// Handle getting a file
const handleGetFile: SocketHandler = ({ fileId }: any) => {
return this.fileManager?.getFile(fileId)
}
// Handle heartbeat from a socket connection
const handleHeartbeat: SocketHandler = (_: any) => {
// Only keep the sandbox alive if the owner is still connected
if (connection.isOwner) {
this.container?.setTimeout(CONTAINER_TIMEOUT)
}
}
// Handle getting a folder
const handleGetFolder: SocketHandler = ({ folderId }: any) => {
return this.fileManager?.getFolder(folderId)
}
// Handle getting a file
const handleGetFile: SocketHandler = ({ fileId }: any) => {
return this.fileManager?.getFile(fileId)
}
// Handle saving a file
const handleSaveFile: SocketHandler = async ({ fileId, body }: any) => {
await saveFileRL.consume(connection.userId, 1)
return this.fileManager?.saveFile(fileId, body)
}
// Handle getting a folder
const handleGetFolder: SocketHandler = ({ folderId }: any) => {
return this.fileManager?.getFolder(folderId)
}
// Handle moving a file
const handleMoveFile: SocketHandler = ({ fileId, folderId }: any) => {
return this.fileManager?.moveFile(fileId, folderId)
}
// Handle saving a file
const handleSaveFile: SocketHandler = async ({ fileId, body }: any) => {
await saveFileRL.consume(connection.userId, 1);
return this.fileManager?.saveFile(fileId, body)
}
// Handle listing apps
const handleListApps: SocketHandler = async (_: any) => {
if (!this.dokkuClient)
throw Error("Failed to retrieve apps list: No Dokku client")
return { success: true, apps: await this.dokkuClient.listApps() }
}
// Handle moving a file
const handleMoveFile: SocketHandler = ({ fileId, folderId }: any) => {
return this.fileManager?.moveFile(fileId, folderId)
}
// Handle deploying code
const handleDeploy: SocketHandler = async (_: any) => {
if (!this.gitClient) throw Error("No git client")
if (!this.fileManager) throw Error("No file manager")
await this.gitClient.pushFiles(
await this.fileManager?.loadFileContent(),
this.sandboxId
)
return { success: true }
}
// Handle listing apps
const handleListApps: SocketHandler = async (_: any) => {
if (!this.dokkuClient) throw Error("Failed to retrieve apps list: No Dokku client")
return { success: true, apps: await this.dokkuClient.listApps() }
}
// Handle creating a file
const handleCreateFile: SocketHandler = async ({ name }: any) => {
await createFileRL.consume(connection.userId, 1)
return { success: await this.fileManager?.createFile(name) }
}
// Handle deploying code
const handleDeploy: SocketHandler = async (_: any) => {
if (!this.gitClient) throw Error("No git client")
if (!this.fileManager) throw Error("No file manager")
await this.gitClient.pushFiles(this.fileManager?.fileData, this.sandboxId)
return { success: true }
}
// Handle creating a folder
const handleCreateFolder: SocketHandler = async ({ name }: any) => {
await createFolderRL.consume(connection.userId, 1)
return { success: await this.fileManager?.createFolder(name) }
}
// Handle creating a file
const handleCreateFile: SocketHandler = async ({ name }: any) => {
await createFileRL.consume(connection.userId, 1);
return { "success": await this.fileManager?.createFile(name) }
}
// Handle renaming a file
const handleRenameFile: SocketHandler = async ({
fileId,
newName,
}: any) => {
await renameFileRL.consume(connection.userId, 1)
return this.fileManager?.renameFile(fileId, newName)
}
// Handle creating a folder
const handleCreateFolder: SocketHandler = async ({ name }: any) => {
await createFolderRL.consume(connection.userId, 1);
return { "success": await this.fileManager?.createFolder(name) }
}
// Handle deleting a file
const handleDeleteFile: SocketHandler = async ({ fileId }: any) => {
await deleteFileRL.consume(connection.userId, 1)
return this.fileManager?.deleteFile(fileId)
}
// Handle renaming a file
const handleRenameFile: SocketHandler = async ({ fileId, newName }: any) => {
await renameFileRL.consume(connection.userId, 1)
return this.fileManager?.renameFile(fileId, newName)
}
// Handle deleting a folder
const handleDeleteFolder: SocketHandler = ({ folderId }: any) => {
return this.fileManager?.deleteFolder(folderId)
}
// Handle deleting a file
const handleDeleteFile: SocketHandler = async ({ fileId }: any) => {
await deleteFileRL.consume(connection.userId, 1)
return this.fileManager?.deleteFile(fileId)
}
// Handle deleting a folder
const handleDeleteFolder: SocketHandler = ({ folderId }: any) => {
return this.fileManager?.deleteFolder(folderId)
}
// Handle creating a terminal session
const handleCreateTerminal: SocketHandler = async ({ id }: any) => {
await lockManager.acquireLock(this.sandboxId, async () => {
await this.terminalManager?.createTerminal(id, (responseString: string) => {
connection.socket.emit("terminalResponse", { id, data: responseString })
const port = extractPortNumber(responseString)
if (port) {
connection.socket.emit(
"previewURL",
"https://" + this.container?.getHost(port)
)
}
})
// Handle creating a terminal session
const handleCreateTerminal: SocketHandler = async ({ id }: any) => {
await lockManager.acquireLock(this.sandboxId, async () => {
await this.terminalManager?.createTerminal(
id,
(responseString: string) => {
connection.socket.emit("terminalResponse", {
id,
data: responseString,
})
}
// Handle resizing a terminal
const handleResizeTerminal: SocketHandler = ({ dimensions }: any) => {
this.terminalManager?.resizeTerminal(dimensions)
}
// Handle sending data to a terminal
const handleTerminalData: SocketHandler = ({ id, data }: any) => {
return this.terminalManager?.sendTerminalData(id, data)
}
// Handle closing a terminal
const handleCloseTerminal: SocketHandler = ({ id }: any) => {
return this.terminalManager?.closeTerminal(id)
}
// Handle generating code
const handleGenerateCode: SocketHandler = ({ fileName, code, line, instructions }: any) => {
return this.aiWorker.generateCode(connection.userId, fileName, code, line, instructions)
}
// Handle downloading files by download button
const handleDownloadFiles: SocketHandler = async () => {
if (!this.fileManager) throw Error("No file manager")
// Get all files with their data through fileManager
const files = this.fileManager.fileData.map((file: TFileData) => ({
path: file.id.startsWith('/') ? file.id.slice(1) : file.id,
content: file.data
}))
return { files }
}
return {
"heartbeat": handleHeartbeat,
"getFile": handleGetFile,
"downloadFiles": handleDownloadFiles,
"getFolder": handleGetFolder,
"saveFile": handleSaveFile,
"moveFile": handleMoveFile,
"list": handleListApps,
"deploy": handleDeploy,
"createFile": handleCreateFile,
"createFolder": handleCreateFolder,
"renameFile": handleRenameFile,
"deleteFile": handleDeleteFile,
"deleteFolder": handleDeleteFolder,
"createTerminal": handleCreateTerminal,
"resizeTerminal": handleResizeTerminal,
"terminalData": handleTerminalData,
"closeTerminal": handleCloseTerminal,
"generateCode": handleGenerateCode,
};
const port = extractPortNumber(responseString)
if (port) {
connection.socket.emit(
"previewURL",
"https://" + this.container?.getHost(port)
)
}
}
)
})
}
}
// Handle resizing a terminal
const handleResizeTerminal: SocketHandler = ({ dimensions }: any) => {
this.terminalManager?.resizeTerminal(dimensions)
}
// Handle sending data to a terminal
const handleTerminalData: SocketHandler = ({ id, data }: any) => {
return this.terminalManager?.sendTerminalData(id, data)
}
// Handle closing a terminal
const handleCloseTerminal: SocketHandler = ({ id }: any) => {
return this.terminalManager?.closeTerminal(id)
}
// Handle downloading files by download button
const handleDownloadFiles: SocketHandler = async () => {
if (!this.fileManager) throw Error("No file manager")
// Get the Base64 encoded ZIP string
const zipBase64 = await this.fileManager.getFilesForDownload()
return { zipBlob: zipBase64 }
}
return {
heartbeat: handleHeartbeat,
getFile: handleGetFile,
downloadFiles: handleDownloadFiles,
getFolder: handleGetFolder,
saveFile: handleSaveFile,
moveFile: handleMoveFile,
list: handleListApps,
deploy: handleDeploy,
createFile: handleCreateFile,
createFolder: handleCreateFolder,
renameFile: handleRenameFile,
deleteFile: handleDeleteFile,
deleteFolder: handleDeleteFolder,
createTerminal: handleCreateTerminal,
resizeTerminal: handleResizeTerminal,
terminalData: handleTerminalData,
closeTerminal: handleCloseTerminal,
}
}
}

View File

@ -57,7 +57,7 @@ export class SecureGitClient {
// Add files to the repository
for (const { id, data } of fileData) {
await git.add(id)
await git.add(id.startsWith("/") ? id.slice(1) : id)
}
// Commit the changes

View File

@ -1,8 +1,8 @@
import { ProcessHandle, Sandbox } from "e2b"
import { CommandHandle, Sandbox } from "e2b"
// Terminal class to manage a pseudo-terminal (PTY) in a sandbox environment
export class Terminal {
private pty: ProcessHandle | undefined // Holds the PTY process handle
private pty: CommandHandle | undefined // Holds the PTY process handle
private sandbox: Sandbox // Reference to the sandbox environment
// Constructor initializes the Terminal with a sandbox
@ -24,7 +24,7 @@ export class Terminal {
this.pty = await this.sandbox.pty.create({
rows,
cols,
timeout: 0,
timeoutMs: 0,
onData: (data: Uint8Array) => {
onData(new TextDecoder().decode(data)) // Convert received data to string and pass to handler
},

View File

@ -1,2 +1,2 @@
// The amount of time in ms that a container will stay alive without a hearbeat.
export const CONTAINER_TIMEOUT = 120_000
export const CONTAINER_TIMEOUT = 120_000

View File

@ -4,20 +4,19 @@ import express, { Express } from "express"
import fs from "fs"
import { createServer } from "http"
import { Server, Socket } from "socket.io"
import { AIWorker } from "./AIWorker"
import { ConnectionManager } from "./ConnectionManager"
import { DokkuClient } from "./DokkuClient"
import { Sandbox } from "./Sandbox"
import { SecureGitClient } from "./SecureGitClient"
import { socketAuth } from "./socketAuth"; // Import the new socketAuth middleware
import { socketAuth } from "./socketAuth" // Import the new socketAuth middleware
import { TFile, TFolder } from "./types"
// Log errors and send a notification to the client
export const handleErrors = (message: string, error: any, socket: Socket) => {
console.error(message, error);
socket.emit("error", `${message} ${error.message ?? error}`);
};
console.error(message, error)
socket.emit("error", `${message} ${error.message ?? error}`)
}
// Handle uncaught exceptions
process.on("uncaughtException", (error) => {
@ -64,10 +63,10 @@ if (!process.env.DOKKU_KEY)
const dokkuClient =
process.env.DOKKU_HOST && process.env.DOKKU_KEY && process.env.DOKKU_USERNAME
? new DokkuClient({
host: process.env.DOKKU_HOST,
username: process.env.DOKKU_USERNAME,
privateKey: fs.readFileSync(process.env.DOKKU_KEY),
})
host: process.env.DOKKU_HOST,
username: process.env.DOKKU_USERNAME,
privateKey: fs.readFileSync(process.env.DOKKU_KEY),
})
: null
dokkuClient?.connect()
@ -75,19 +74,11 @@ dokkuClient?.connect()
const gitClient =
process.env.DOKKU_HOST && process.env.DOKKU_KEY
? new SecureGitClient(
`dokku@${process.env.DOKKU_HOST}`,
process.env.DOKKU_KEY
)
`dokku@${process.env.DOKKU_HOST}`,
process.env.DOKKU_KEY
)
: null
// Add this near the top of the file, after other initializations
const aiWorker = new AIWorker(
process.env.AI_WORKER_URL!,
process.env.CF_AI_KEY!,
process.env.DATABASE_WORKER_URL!,
process.env.WORKERS_KEY!
)
// Handle a client connecting to the server
io.on("connection", async (socket) => {
try {
@ -110,21 +101,22 @@ io.on("connection", async (socket) => {
try {
// Create or retrieve the sandbox manager for the given sandbox ID
const sandbox = sandboxes[data.sandboxId] ?? new Sandbox(
data.sandboxId,
data.type,
{
aiWorker, dokkuClient, gitClient,
}
)
const sandbox =
sandboxes[data.sandboxId] ??
new Sandbox(data.sandboxId, data.type, {
dokkuClient,
gitClient,
})
sandboxes[data.sandboxId] = sandbox
// This callback recieves an update when the file list changes, and notifies all relevant connections.
const sendFileNotifications = (files: (TFolder | TFile)[]) => {
connections.connectionsForSandbox(data.sandboxId).forEach((socket: Socket) => {
socket.emit("loaded", files);
});
};
connections
.connectionsForSandbox(data.sandboxId)
.forEach((socket: Socket) => {
socket.emit("loaded", files)
})
}
// Initialize the sandbox container
// The file manager and terminal managers will be set up if they have been closed
@ -134,26 +126,35 @@ io.on("connection", async (socket) => {
// Register event handlers for the sandbox
// For each event handler, listen on the socket for that event
// Pass connection-specific information to the handlers
Object.entries(sandbox.handlers({
userId: data.userId,
isOwner: data.isOwner,
socket
})).forEach(([event, handler]) => {
socket.on(event, async (options: any, callback?: (response: any) => void) => {
try {
const result = await handler(options)
callback?.(result);
} catch (e: any) {
handleErrors(`Error processing event "${event}":`, e, socket);
Object.entries(
sandbox.handlers({
userId: data.userId,
isOwner: data.isOwner,
socket,
})
).forEach(([event, handler]) => {
socket.on(
event,
async (options: any, callback?: (response: any) => void) => {
try {
const result = await handler(options)
callback?.(result)
} catch (e: any) {
handleErrors(`Error processing event "${event}":`, e, socket)
}
}
});
});
)
})
// Handle disconnection event
socket.on("disconnect", async () => {
try {
// Deregister the connection
connections.removeConnectionForSandbox(socket, data.sandboxId, data.isOwner)
connections.removeConnectionForSandbox(
socket,
data.sandboxId,
data.isOwner
)
// If the owner has disconnected from all sockets, close open terminals and file watchers.o
// The sandbox itself will timeout after the heartbeat stops.
@ -165,20 +166,18 @@ io.on("connection", async (socket) => {
)
}
} catch (e: any) {
handleErrors("Error disconnecting:", e, socket);
handleErrors("Error disconnecting:", e, socket)
}
})
} catch (e: any) {
handleErrors(`Error initializing sandbox ${data.sandboxId}:`, e, socket);
handleErrors(`Error initializing sandbox ${data.sandboxId}:`, e, socket)
}
} catch (e: any) {
handleErrors("Error connecting:", e, socket);
handleErrors("Error connecting:", e, socket)
}
})
// Start the server
httpServer.listen(port, () => {
console.log(`Server running on port ${port}`)
})
})

View File

@ -4,72 +4,72 @@ import { Sandbox, User } from "./types"
// Middleware for socket authentication
export const socketAuth = async (socket: Socket, next: Function) => {
// Define the schema for handshake query validation
const handshakeSchema = z.object({
userId: z.string(),
sandboxId: z.string(),
EIO: z.string(),
transport: z.string(),
})
// Define the schema for handshake query validation
const handshakeSchema = z.object({
userId: z.string(),
sandboxId: z.string(),
EIO: z.string(),
transport: z.string(),
})
const q = socket.handshake.query
const parseQuery = handshakeSchema.safeParse(q)
const q = socket.handshake.query
const parseQuery = handshakeSchema.safeParse(q)
// Check if the query is valid according to the schema
if (!parseQuery.success) {
next(new Error("Invalid request."))
return
// Check if the query is valid according to the schema
if (!parseQuery.success) {
next(new Error("Invalid request."))
return
}
const { sandboxId, userId } = parseQuery.data
// Fetch user data from the database
const dbUser = await fetch(
`${process.env.DATABASE_WORKER_URL}/api/user?id=${userId}`,
{
headers: {
Authorization: `${process.env.WORKERS_KEY}`,
},
}
)
const dbUserJSON = (await dbUser.json()) as User
const { sandboxId, userId } = parseQuery.data
// Fetch user data from the database
const dbUser = await fetch(
`${process.env.DATABASE_WORKER_URL}/api/user?id=${userId}`,
{
headers: {
Authorization: `${process.env.WORKERS_KEY}`,
},
}
)
const dbUserJSON = (await dbUser.json()) as User
// Fetch sandbox data from the database
const dbSandbox = await fetch(
`${process.env.DATABASE_WORKER_URL}/api/sandbox?id=${sandboxId}`,
{
headers: {
Authorization: `${process.env.WORKERS_KEY}`,
},
}
)
const dbSandboxJSON = (await dbSandbox.json()) as Sandbox
// Check if user data was retrieved successfully
if (!dbUserJSON) {
next(new Error("DB error."))
return
// Fetch sandbox data from the database
const dbSandbox = await fetch(
`${process.env.DATABASE_WORKER_URL}/api/sandbox?id=${sandboxId}`,
{
headers: {
Authorization: `${process.env.WORKERS_KEY}`,
},
}
)
const dbSandboxJSON = (await dbSandbox.json()) as Sandbox
// Check if the user owns the sandbox or has shared access
const sandbox = dbUserJSON.sandbox.find((s) => s.id === sandboxId)
const sharedSandboxes = dbUserJSON.usersToSandboxes.find(
(uts) => uts.sandboxId === sandboxId
)
// Check if user data was retrieved successfully
if (!dbUserJSON) {
next(new Error("DB error."))
return
}
// If user doesn't own or have shared access to the sandbox, deny access
if (!sandbox && !sharedSandboxes) {
next(new Error("Invalid credentials."))
return
}
// Check if the user owns the sandbox or has shared access
const sandbox = dbUserJSON.sandbox.find((s) => s.id === sandboxId)
const sharedSandboxes = dbUserJSON.usersToSandboxes.find(
(uts) => uts.sandboxId === sandboxId
)
// Set socket data with user information
socket.data = {
userId,
sandboxId: sandboxId,
isOwner: sandbox !== undefined,
type: dbSandboxJSON.type
}
// If user doesn't own or have shared access to the sandbox, deny access
if (!sandbox && !sharedSandboxes) {
next(new Error("Invalid credentials."))
return
}
// Allow the connection
next()
// Set socket data with user information
socket.data = {
userId,
sandboxId: sandboxId,
isOwner: sandbox !== undefined,
type: dbSandboxJSON.type,
}
// Allow the connection
next()
}

View File

@ -20,4 +20,4 @@ export class LockManager {
}
return await this.locks[key]
}
}
}