start ws server file fetching logic
This commit is contained in:
parent
a49de2294d
commit
4e7d6d1a97
57
backend/server/dist/getSandboxFiles.js
vendored
Normal file
57
backend/server/dist/getSandboxFiles.js
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const getSandboxFiles = (id) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const sandboxRes = yield fetch(`https://storage.ishaan1013.workers.dev/api?sandboxId=${id}`);
|
||||
const sandboxData = yield sandboxRes.json();
|
||||
const paths = sandboxData.objects.map((obj) => obj.key);
|
||||
return processFiles(paths, id);
|
||||
});
|
||||
const processFiles = (paths, id) => {
|
||||
const root = { id: "/", type: "folder", name: "/", children: [] };
|
||||
paths.forEach((path) => {
|
||||
const allParts = path.split("/");
|
||||
if (allParts[1] !== id) {
|
||||
console.log("invalid path!!!!");
|
||||
return;
|
||||
}
|
||||
const parts = allParts.slice(2);
|
||||
let current = root;
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const part = parts[i];
|
||||
const isFile = i === parts.length - 1 && part.includes(".");
|
||||
const existing = current.children.find((child) => child.name === part);
|
||||
if (existing) {
|
||||
if (!isFile) {
|
||||
current = existing;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (isFile) {
|
||||
const file = { id: path, type: "file", name: part };
|
||||
current.children.push(file);
|
||||
}
|
||||
else {
|
||||
const folder = {
|
||||
id: path,
|
||||
type: "folder",
|
||||
name: part,
|
||||
children: [],
|
||||
};
|
||||
current.children.push(folder);
|
||||
current = folder;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
return root.children;
|
||||
};
|
||||
exports.default = getSandboxFiles;
|
25
backend/server/dist/index.js
vendored
25
backend/server/dist/index.js
vendored
@ -17,6 +17,7 @@ const dotenv_1 = __importDefault(require("dotenv"));
|
||||
const http_1 = require("http");
|
||||
const socket_io_1 = require("socket.io");
|
||||
const zod_1 = require("zod");
|
||||
const getSandboxFiles_1 = __importDefault(require("./getSandboxFiles"));
|
||||
dotenv_1.default.config();
|
||||
const app = (0, express_1.default)();
|
||||
const port = process.env.PORT || 4000;
|
||||
@ -43,8 +44,8 @@ io.use((socket, next) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
next(new Error("Invalid request."));
|
||||
return;
|
||||
}
|
||||
const query = parseQuery.data;
|
||||
const dbUser = yield fetch(`http://localhost:8787/api/user?id=${query.userId}`);
|
||||
const { sandboxId, userId, type } = parseQuery.data;
|
||||
const dbUser = yield fetch(`http://localhost:8787/api/user?id=${userId}`);
|
||||
const dbUserJSON = (yield dbUser.json());
|
||||
console.log("dbUserJSON:", dbUserJSON);
|
||||
if (!dbUserJSON) {
|
||||
@ -52,29 +53,23 @@ io.use((socket, next) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
next(new Error("DB error."));
|
||||
return;
|
||||
}
|
||||
const sandbox = dbUserJSON.sandbox.find((s) => s.id === query.sandboxId);
|
||||
const sandbox = dbUserJSON.sandbox.find((s) => s.id === sandboxId);
|
||||
if (!sandbox) {
|
||||
console.log("Invalid credentials.");
|
||||
next(new Error("Invalid credentials."));
|
||||
return;
|
||||
}
|
||||
const data = {
|
||||
userId: query.userId,
|
||||
sandboxId: query.sandboxId,
|
||||
type: query.type,
|
||||
init: sandbox.init,
|
||||
socket.data = {
|
||||
id: sandboxId,
|
||||
type,
|
||||
userId,
|
||||
};
|
||||
socket.data = data;
|
||||
next();
|
||||
}));
|
||||
io.on("connection", (socket) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const data = socket.data;
|
||||
console.log("init:", data.init);
|
||||
if (!data.init) {
|
||||
// const dbUser = await fetch(
|
||||
// `http://localhost:8787/sandbox/${data.sandboxId}/init`
|
||||
// )
|
||||
}
|
||||
const sandboxFiles = yield (0, getSandboxFiles_1.default)(data.id);
|
||||
// fetch all file data TODO
|
||||
// socket.emit("loaded", {
|
||||
// rootContent: await fetchDir("/workspace", "")
|
||||
// });
|
||||
|
56
backend/server/src/getSandboxFiles.ts
Normal file
56
backend/server/src/getSandboxFiles.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { R2Files, Sandbox, TFile, TFolder, User } from "./types"
|
||||
|
||||
const getSandboxFiles = async (id: string) => {
|
||||
const sandboxRes = await fetch(
|
||||
`https://storage.ishaan1013.workers.dev/api?sandboxId=${id}`
|
||||
)
|
||||
const sandboxData: R2Files = await sandboxRes.json()
|
||||
|
||||
const paths = sandboxData.objects.map((obj) => obj.key)
|
||||
return processFiles(paths, id)
|
||||
}
|
||||
|
||||
const processFiles = (paths: string[], id: string): (TFile | TFolder)[] => {
|
||||
const root: TFolder = { id: "/", type: "folder", name: "/", children: [] }
|
||||
|
||||
paths.forEach((path) => {
|
||||
const allParts = path.split("/")
|
||||
if (allParts[1] !== id) {
|
||||
console.log("invalid path!!!!")
|
||||
return
|
||||
}
|
||||
|
||||
const parts = allParts.slice(2)
|
||||
let current: TFolder = root
|
||||
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const part = parts[i]
|
||||
const isFile = i === parts.length - 1 && part.includes(".")
|
||||
const existing = current.children.find((child) => child.name === part)
|
||||
|
||||
if (existing) {
|
||||
if (!isFile) {
|
||||
current = existing as TFolder
|
||||
}
|
||||
} else {
|
||||
if (isFile) {
|
||||
const file: TFile = { id: path, type: "file", name: part }
|
||||
current.children.push(file)
|
||||
} else {
|
||||
const folder: TFolder = {
|
||||
id: path,
|
||||
type: "folder",
|
||||
name: part,
|
||||
children: [],
|
||||
}
|
||||
current.children.push(folder)
|
||||
current = folder
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return root.children
|
||||
}
|
||||
|
||||
export default getSandboxFiles
|
@ -5,6 +5,7 @@ import { Server } from "socket.io"
|
||||
|
||||
import { z } from "zod"
|
||||
import { User } from "./types"
|
||||
import getSandboxFiles from "./getSandboxFiles"
|
||||
|
||||
dotenv.config()
|
||||
|
||||
@ -39,11 +40,9 @@ io.use(async (socket, next) => {
|
||||
return
|
||||
}
|
||||
|
||||
const query = parseQuery.data
|
||||
const { sandboxId, userId, type } = parseQuery.data
|
||||
|
||||
const dbUser = await fetch(
|
||||
`http://localhost:8787/api/user?id=${query.userId}`
|
||||
)
|
||||
const dbUser = await fetch(`http://localhost:8787/api/user?id=${userId}`)
|
||||
const dbUserJSON = (await dbUser.json()) as User
|
||||
|
||||
console.log("dbUserJSON:", dbUserJSON)
|
||||
@ -54,7 +53,7 @@ io.use(async (socket, next) => {
|
||||
return
|
||||
}
|
||||
|
||||
const sandbox = dbUserJSON.sandbox.find((s) => s.id === query.sandboxId)
|
||||
const sandbox = dbUserJSON.sandbox.find((s) => s.id === sandboxId)
|
||||
|
||||
if (!sandbox) {
|
||||
console.log("Invalid credentials.")
|
||||
@ -62,33 +61,25 @@ io.use(async (socket, next) => {
|
||||
return
|
||||
}
|
||||
|
||||
const data = {
|
||||
userId: query.userId,
|
||||
sandboxId: query.sandboxId,
|
||||
type: query.type,
|
||||
init: sandbox.init,
|
||||
socket.data = {
|
||||
id: sandboxId,
|
||||
type,
|
||||
userId,
|
||||
}
|
||||
|
||||
socket.data = data
|
||||
|
||||
next()
|
||||
})
|
||||
|
||||
io.on("connection", async (socket) => {
|
||||
const data = socket.data as {
|
||||
userId: string
|
||||
sandboxId: string
|
||||
id: string
|
||||
type: "node" | "react"
|
||||
init: boolean
|
||||
}
|
||||
|
||||
console.log("init:", data.init)
|
||||
const sandboxFiles = await getSandboxFiles(data.id)
|
||||
|
||||
if (!data.init) {
|
||||
// const dbUser = await fetch(
|
||||
// `http://localhost:8787/sandbox/${data.sandboxId}/init`
|
||||
// )
|
||||
}
|
||||
// fetch all file data TODO
|
||||
|
||||
// socket.emit("loaded", {
|
||||
// rootContent: await fetchDir("/workspace", "")
|
||||
|
@ -11,7 +11,35 @@ export type Sandbox = {
|
||||
id: string
|
||||
name: string
|
||||
type: "react" | "node"
|
||||
init: boolean
|
||||
bucket: string | null
|
||||
userId: string
|
||||
}
|
||||
|
||||
export type TFolder = {
|
||||
id: string
|
||||
type: "folder"
|
||||
name: string
|
||||
children: (TFile | TFolder)[]
|
||||
}
|
||||
|
||||
export type TFile = {
|
||||
id: string
|
||||
type: "file"
|
||||
name: string
|
||||
}
|
||||
|
||||
export type R2Files = {
|
||||
objects: R2FileData[]
|
||||
truncated: boolean
|
||||
delimitedPrefixes: any[]
|
||||
}
|
||||
|
||||
export type R2FileData = {
|
||||
storageClass: string
|
||||
uploaded: string
|
||||
checksums: any
|
||||
httpEtag: string
|
||||
etag: string
|
||||
size: number
|
||||
version: string
|
||||
key: string
|
||||
}
|
||||
|
@ -11,8 +11,6 @@ export type Sandbox = {
|
||||
id: string
|
||||
name: string
|
||||
type: "react" | "node"
|
||||
init: boolean
|
||||
bucket: string | null
|
||||
userId: string
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user