start ws server file fetching logic

This commit is contained in:
Ishaan Dey 2024-04-26 02:10:37 -04:00
parent a49de2294d
commit 4e7d6d1a97
6 changed files with 164 additions and 39 deletions

57
backend/server/dist/getSandboxFiles.js vendored Normal file
View File

@ -0,0 +1,57 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const getSandboxFiles = (id) => __awaiter(void 0, void 0, void 0, function* () {
const sandboxRes = yield fetch(`https://storage.ishaan1013.workers.dev/api?sandboxId=${id}`);
const sandboxData = yield sandboxRes.json();
const paths = sandboxData.objects.map((obj) => obj.key);
return processFiles(paths, id);
});
const processFiles = (paths, id) => {
const root = { id: "/", type: "folder", name: "/", children: [] };
paths.forEach((path) => {
const allParts = path.split("/");
if (allParts[1] !== id) {
console.log("invalid path!!!!");
return;
}
const parts = allParts.slice(2);
let current = root;
for (let i = 0; i < parts.length; i++) {
const part = parts[i];
const isFile = i === parts.length - 1 && part.includes(".");
const existing = current.children.find((child) => child.name === part);
if (existing) {
if (!isFile) {
current = existing;
}
}
else {
if (isFile) {
const file = { id: path, type: "file", name: part };
current.children.push(file);
}
else {
const folder = {
id: path,
type: "folder",
name: part,
children: [],
};
current.children.push(folder);
current = folder;
}
}
}
});
return root.children;
};
exports.default = getSandboxFiles;

View File

@ -17,6 +17,7 @@ const dotenv_1 = __importDefault(require("dotenv"));
const http_1 = require("http"); const http_1 = require("http");
const socket_io_1 = require("socket.io"); const socket_io_1 = require("socket.io");
const zod_1 = require("zod"); const zod_1 = require("zod");
const getSandboxFiles_1 = __importDefault(require("./getSandboxFiles"));
dotenv_1.default.config(); dotenv_1.default.config();
const app = (0, express_1.default)(); const app = (0, express_1.default)();
const port = process.env.PORT || 4000; const port = process.env.PORT || 4000;
@ -43,8 +44,8 @@ io.use((socket, next) => __awaiter(void 0, void 0, void 0, function* () {
next(new Error("Invalid request.")); next(new Error("Invalid request."));
return; return;
} }
const query = parseQuery.data; const { sandboxId, userId, type } = parseQuery.data;
const dbUser = yield fetch(`http://localhost:8787/api/user?id=${query.userId}`); const dbUser = yield fetch(`http://localhost:8787/api/user?id=${userId}`);
const dbUserJSON = (yield dbUser.json()); const dbUserJSON = (yield dbUser.json());
console.log("dbUserJSON:", dbUserJSON); console.log("dbUserJSON:", dbUserJSON);
if (!dbUserJSON) { if (!dbUserJSON) {
@ -52,29 +53,23 @@ io.use((socket, next) => __awaiter(void 0, void 0, void 0, function* () {
next(new Error("DB error.")); next(new Error("DB error."));
return; return;
} }
const sandbox = dbUserJSON.sandbox.find((s) => s.id === query.sandboxId); const sandbox = dbUserJSON.sandbox.find((s) => s.id === sandboxId);
if (!sandbox) { if (!sandbox) {
console.log("Invalid credentials."); console.log("Invalid credentials.");
next(new Error("Invalid credentials.")); next(new Error("Invalid credentials."));
return; return;
} }
const data = { socket.data = {
userId: query.userId, id: sandboxId,
sandboxId: query.sandboxId, type,
type: query.type, userId,
init: sandbox.init,
}; };
socket.data = data;
next(); next();
})); }));
io.on("connection", (socket) => __awaiter(void 0, void 0, void 0, function* () { io.on("connection", (socket) => __awaiter(void 0, void 0, void 0, function* () {
const data = socket.data; const data = socket.data;
console.log("init:", data.init); const sandboxFiles = yield (0, getSandboxFiles_1.default)(data.id);
if (!data.init) { // fetch all file data TODO
// const dbUser = await fetch(
// `http://localhost:8787/sandbox/${data.sandboxId}/init`
// )
}
// socket.emit("loaded", { // socket.emit("loaded", {
// rootContent: await fetchDir("/workspace", "") // rootContent: await fetchDir("/workspace", "")
// }); // });

View File

@ -0,0 +1,56 @@
import { R2Files, Sandbox, TFile, TFolder, User } from "./types"
const getSandboxFiles = async (id: string) => {
const sandboxRes = await fetch(
`https://storage.ishaan1013.workers.dev/api?sandboxId=${id}`
)
const sandboxData: R2Files = await sandboxRes.json()
const paths = sandboxData.objects.map((obj) => obj.key)
return processFiles(paths, id)
}
const processFiles = (paths: string[], id: string): (TFile | TFolder)[] => {
const root: TFolder = { id: "/", type: "folder", name: "/", children: [] }
paths.forEach((path) => {
const allParts = path.split("/")
if (allParts[1] !== id) {
console.log("invalid path!!!!")
return
}
const parts = allParts.slice(2)
let current: TFolder = root
for (let i = 0; i < parts.length; i++) {
const part = parts[i]
const isFile = i === parts.length - 1 && part.includes(".")
const existing = current.children.find((child) => child.name === part)
if (existing) {
if (!isFile) {
current = existing as TFolder
}
} else {
if (isFile) {
const file: TFile = { id: path, type: "file", name: part }
current.children.push(file)
} else {
const folder: TFolder = {
id: path,
type: "folder",
name: part,
children: [],
}
current.children.push(folder)
current = folder
}
}
}
})
return root.children
}
export default getSandboxFiles

View File

@ -5,6 +5,7 @@ import { Server } from "socket.io"
import { z } from "zod" import { z } from "zod"
import { User } from "./types" import { User } from "./types"
import getSandboxFiles from "./getSandboxFiles"
dotenv.config() dotenv.config()
@ -39,11 +40,9 @@ io.use(async (socket, next) => {
return return
} }
const query = parseQuery.data const { sandboxId, userId, type } = parseQuery.data
const dbUser = await fetch( const dbUser = await fetch(`http://localhost:8787/api/user?id=${userId}`)
`http://localhost:8787/api/user?id=${query.userId}`
)
const dbUserJSON = (await dbUser.json()) as User const dbUserJSON = (await dbUser.json()) as User
console.log("dbUserJSON:", dbUserJSON) console.log("dbUserJSON:", dbUserJSON)
@ -54,7 +53,7 @@ io.use(async (socket, next) => {
return return
} }
const sandbox = dbUserJSON.sandbox.find((s) => s.id === query.sandboxId) const sandbox = dbUserJSON.sandbox.find((s) => s.id === sandboxId)
if (!sandbox) { if (!sandbox) {
console.log("Invalid credentials.") console.log("Invalid credentials.")
@ -62,33 +61,25 @@ io.use(async (socket, next) => {
return return
} }
const data = { socket.data = {
userId: query.userId, id: sandboxId,
sandboxId: query.sandboxId, type,
type: query.type, userId,
init: sandbox.init,
} }
socket.data = data
next() next()
}) })
io.on("connection", async (socket) => { io.on("connection", async (socket) => {
const data = socket.data as { const data = socket.data as {
userId: string userId: string
sandboxId: string id: string
type: "node" | "react" type: "node" | "react"
init: boolean
} }
console.log("init:", data.init) const sandboxFiles = await getSandboxFiles(data.id)
if (!data.init) { // fetch all file data TODO
// const dbUser = await fetch(
// `http://localhost:8787/sandbox/${data.sandboxId}/init`
// )
}
// socket.emit("loaded", { // socket.emit("loaded", {
// rootContent: await fetchDir("/workspace", "") // rootContent: await fetchDir("/workspace", "")

View File

@ -11,7 +11,35 @@ export type Sandbox = {
id: string id: string
name: string name: string
type: "react" | "node" type: "react" | "node"
init: boolean
bucket: string | null
userId: string userId: string
} }
export type TFolder = {
id: string
type: "folder"
name: string
children: (TFile | TFolder)[]
}
export type TFile = {
id: string
type: "file"
name: string
}
export type R2Files = {
objects: R2FileData[]
truncated: boolean
delimitedPrefixes: any[]
}
export type R2FileData = {
storageClass: string
uploaded: string
checksums: any
httpEtag: string
etag: string
size: number
version: string
key: string
}

View File

@ -11,8 +11,6 @@ export type Sandbox = {
id: string id: string
name: string name: string
type: "react" | "node" type: "react" | "node"
init: boolean
bucket: string | null
userId: string userId: string
} }