chore: format Cloudflare Worker code
This commit is contained in:
parent
63aa2b1e09
commit
52aa9d92fd
@ -1,22 +1,22 @@
|
||||
{
|
||||
"name": "ai",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"deploy": "wrangler deploy",
|
||||
"dev": "wrangler dev",
|
||||
"start": "wrangler dev",
|
||||
"test": "vitest",
|
||||
"cf-typegen": "wrangler types"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/vitest-pool-workers": "^0.1.0",
|
||||
"@cloudflare/workers-types": "^4.20240512.0",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "1.3.0",
|
||||
"wrangler": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.27.2"
|
||||
}
|
||||
"name": "ai",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"deploy": "wrangler deploy",
|
||||
"dev": "wrangler dev",
|
||||
"start": "wrangler dev",
|
||||
"test": "vitest",
|
||||
"cf-typegen": "wrangler types"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/vitest-pool-workers": "^0.1.0",
|
||||
"@cloudflare/workers-types": "^4.20240512.0",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "1.3.0",
|
||||
"wrangler": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.27.2"
|
||||
}
|
||||
}
|
@ -1,57 +1,61 @@
|
||||
import { Anthropic } from "@anthropic-ai/sdk";
|
||||
import { MessageParam } from "@anthropic-ai/sdk/src/resources/messages.js";
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import { MessageParam } from "@anthropic-ai/sdk/src/resources/messages.js"
|
||||
|
||||
export interface Env {
|
||||
ANTHROPIC_API_KEY: string;
|
||||
ANTHROPIC_API_KEY: string
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
// Handle CORS preflight requests
|
||||
if (request.method === "OPTIONS") {
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
|
||||
"Access-Control-Allow-Headers": "Content-Type",
|
||||
},
|
||||
});
|
||||
}
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
// Handle CORS preflight requests
|
||||
if (request.method === "OPTIONS") {
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "GET, POST, OPTIONS",
|
||||
"Access-Control-Allow-Headers": "Content-Type",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (request.method !== "GET" && request.method !== "POST") {
|
||||
return new Response("Method Not Allowed", { status: 405 });
|
||||
}
|
||||
if (request.method !== "GET" && request.method !== "POST") {
|
||||
return new Response("Method Not Allowed", { status: 405 })
|
||||
}
|
||||
|
||||
let body;
|
||||
let isEditCodeWidget = false;
|
||||
if (request.method === "POST") {
|
||||
body = await request.json() as { messages: unknown; context: unknown; activeFileContent: string };
|
||||
} else {
|
||||
const url = new URL(request.url);
|
||||
const fileName = url.searchParams.get("fileName") || "";
|
||||
const code = url.searchParams.get("code") || "";
|
||||
const line = url.searchParams.get("line") || "";
|
||||
const instructions = url.searchParams.get("instructions") || "";
|
||||
let body
|
||||
let isEditCodeWidget = false
|
||||
if (request.method === "POST") {
|
||||
body = (await request.json()) as {
|
||||
messages: unknown
|
||||
context: unknown
|
||||
activeFileContent: string
|
||||
}
|
||||
} else {
|
||||
const url = new URL(request.url)
|
||||
const fileName = url.searchParams.get("fileName") || ""
|
||||
const code = url.searchParams.get("code") || ""
|
||||
const line = url.searchParams.get("line") || ""
|
||||
const instructions = url.searchParams.get("instructions") || ""
|
||||
|
||||
body = {
|
||||
messages: [{ role: "human", content: instructions }],
|
||||
context: `File: ${fileName}\nLine: ${line}\nCode:\n${code}`,
|
||||
activeFileContent: code,
|
||||
};
|
||||
isEditCodeWidget = true;
|
||||
}
|
||||
body = {
|
||||
messages: [{ role: "human", content: instructions }],
|
||||
context: `File: ${fileName}\nLine: ${line}\nCode:\n${code}`,
|
||||
activeFileContent: code,
|
||||
}
|
||||
isEditCodeWidget = true
|
||||
}
|
||||
|
||||
const messages = body.messages;
|
||||
const context = body.context;
|
||||
const activeFileContent = body.activeFileContent;
|
||||
const messages = body.messages
|
||||
const context = body.context
|
||||
const activeFileContent = body.activeFileContent
|
||||
|
||||
if (!Array.isArray(messages) || messages.length === 0) {
|
||||
return new Response("Invalid or empty messages", { status: 400 });
|
||||
}
|
||||
if (!Array.isArray(messages) || messages.length === 0) {
|
||||
return new Response("Invalid or empty messages", { status: 400 })
|
||||
}
|
||||
|
||||
let systemMessage;
|
||||
if (isEditCodeWidget) {
|
||||
systemMessage = `You are an AI code editor. Your task is to modify the given code based on the user's instructions. Only output the modified code, without any explanations or markdown formatting. The code should be a direct replacement for the existing code.
|
||||
let systemMessage
|
||||
if (isEditCodeWidget) {
|
||||
systemMessage = `You are an AI code editor. Your task is to modify the given code based on the user's instructions. Only output the modified code, without any explanations or markdown formatting. The code should be a direct replacement for the existing code.
|
||||
|
||||
Context:
|
||||
${context}
|
||||
@ -61,9 +65,9 @@ ${activeFileContent}
|
||||
|
||||
Instructions: ${messages[0].content}
|
||||
|
||||
Respond only with the modified code that can directly replace the existing code.`;
|
||||
} else {
|
||||
systemMessage = `You are an intelligent programming assistant. Please respond to the following request concisely. If your response includes code, please format it using triple backticks (\`\`\`) with the appropriate language identifier. For example:
|
||||
Respond only with the modified code that can directly replace the existing code.`
|
||||
} else {
|
||||
systemMessage = `You are an intelligent programming assistant. Please respond to the following request concisely. If your response includes code, please format it using triple backticks (\`\`\`) with the appropriate language identifier. For example:
|
||||
|
||||
\`\`\`python
|
||||
print("Hello, World!")
|
||||
@ -71,51 +75,54 @@ print("Hello, World!")
|
||||
|
||||
Provide a clear and concise explanation along with any code snippets. Keep your response brief and to the point.
|
||||
|
||||
${context ? `Context:\n${context}\n` : ''}
|
||||
${activeFileContent ? `Active File Content:\n${activeFileContent}\n` : ''}`;
|
||||
}
|
||||
${context ? `Context:\n${context}\n` : ""}
|
||||
${activeFileContent ? `Active File Content:\n${activeFileContent}\n` : ""}`
|
||||
}
|
||||
|
||||
const anthropicMessages = messages.map(msg => ({
|
||||
role: msg.role === 'human' ? 'user' : 'assistant',
|
||||
content: msg.content
|
||||
})) as MessageParam[];
|
||||
const anthropicMessages = messages.map((msg) => ({
|
||||
role: msg.role === "human" ? "user" : "assistant",
|
||||
content: msg.content,
|
||||
})) as MessageParam[]
|
||||
|
||||
try {
|
||||
const anthropic = new Anthropic({ apiKey: env.ANTHROPIC_API_KEY });
|
||||
try {
|
||||
const anthropic = new Anthropic({ apiKey: env.ANTHROPIC_API_KEY })
|
||||
|
||||
const stream = await anthropic.messages.create({
|
||||
model: "claude-3-5-sonnet-20240620",
|
||||
max_tokens: 1024,
|
||||
system: systemMessage,
|
||||
messages: anthropicMessages,
|
||||
stream: true,
|
||||
});
|
||||
const stream = await anthropic.messages.create({
|
||||
model: "claude-3-5-sonnet-20240620",
|
||||
max_tokens: 1024,
|
||||
system: systemMessage,
|
||||
messages: anthropicMessages,
|
||||
stream: true,
|
||||
})
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
const streamResponse = new ReadableStream({
|
||||
async start(controller) {
|
||||
for await (const chunk of stream) {
|
||||
if (chunk.type === 'content_block_delta' && chunk.delta.type === 'text_delta') {
|
||||
const bytes = encoder.encode(chunk.delta.text);
|
||||
controller.enqueue(bytes);
|
||||
}
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
const streamResponse = new ReadableStream({
|
||||
async start(controller) {
|
||||
for await (const chunk of stream) {
|
||||
if (
|
||||
chunk.type === "content_block_delta" &&
|
||||
chunk.delta.type === "text_delta"
|
||||
) {
|
||||
const bytes = encoder.encode(chunk.delta.text)
|
||||
controller.enqueue(bytes)
|
||||
}
|
||||
}
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
return new Response(streamResponse, {
|
||||
headers: {
|
||||
"Content-Type": "text/plain; charset=utf-8",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error:", error);
|
||||
return new Response("Internal Server Error", { status: 500 });
|
||||
}
|
||||
},
|
||||
};
|
||||
return new Response(streamResponse, {
|
||||
headers: {
|
||||
"Content-Type": "text/plain; charset=utf-8",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Cache-Control": "no-cache",
|
||||
Connection: "keep-alive",
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
console.error("Error:", error)
|
||||
return new Response("Internal Server Error", { status: 500 })
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@ -1,25 +1,30 @@
|
||||
// test/index.spec.ts
|
||||
import { env, createExecutionContext, waitOnExecutionContext, SELF } from 'cloudflare:test';
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import worker from '../src/index';
|
||||
import {
|
||||
createExecutionContext,
|
||||
env,
|
||||
SELF,
|
||||
waitOnExecutionContext,
|
||||
} from "cloudflare:test"
|
||||
import { describe, expect, it } from "vitest"
|
||||
import worker from "../src/index"
|
||||
|
||||
// For now, you'll need to do something like this to get a correctly-typed
|
||||
// `Request` to pass to `worker.fetch()`.
|
||||
const IncomingRequest = Request<unknown, IncomingRequestCfProperties>;
|
||||
const IncomingRequest = Request<unknown, IncomingRequestCfProperties>
|
||||
|
||||
describe('Hello World worker', () => {
|
||||
it('responds with Hello World! (unit style)', async () => {
|
||||
const request = new IncomingRequest('http://example.com');
|
||||
describe("Hello World worker", () => {
|
||||
it("responds with Hello World! (unit style)", async () => {
|
||||
const request = new IncomingRequest("http://example.com")
|
||||
// Create an empty context to pass to `worker.fetch()`.
|
||||
const ctx = createExecutionContext();
|
||||
const response = await worker.fetch(request, env, ctx);
|
||||
const ctx = createExecutionContext()
|
||||
const response = await worker.fetch(request, env, ctx)
|
||||
// Wait for all `Promise`s passed to `ctx.waitUntil()` to settle before running test assertions
|
||||
await waitOnExecutionContext(ctx);
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`);
|
||||
});
|
||||
await waitOnExecutionContext(ctx)
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`)
|
||||
})
|
||||
|
||||
it('responds with Hello World! (integration style)', async () => {
|
||||
const response = await SELF.fetch('https://example.com');
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`);
|
||||
});
|
||||
});
|
||||
it("responds with Hello World! (integration style)", async () => {
|
||||
const response = await SELF.fetch("https://example.com")
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`)
|
||||
})
|
||||
})
|
||||
|
@ -1,11 +1,11 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": [
|
||||
"@cloudflare/workers-types/experimental",
|
||||
"@cloudflare/vitest-pool-workers"
|
||||
]
|
||||
},
|
||||
"include": ["./**/*.ts", "../src/env.d.ts"],
|
||||
"exclude": []
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": [
|
||||
"@cloudflare/workers-types/experimental",
|
||||
"@cloudflare/vitest-pool-workers"
|
||||
]
|
||||
},
|
||||
"include": ["./**/*.ts", "../src/env.d.ts"],
|
||||
"exclude": []
|
||||
}
|
||||
|
@ -12,7 +12,9 @@
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
|
||||
"lib": ["es2021"] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
|
||||
"lib": [
|
||||
"es2021"
|
||||
] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
|
||||
"jsx": "react" /* Specify what JSX code is generated. */,
|
||||
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config";
|
||||
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config"
|
||||
|
||||
export default defineWorkersConfig({
|
||||
test: {
|
||||
poolOptions: {
|
||||
workers: {
|
||||
wrangler: { configPath: "./wrangler.toml" },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
test: {
|
||||
poolOptions: {
|
||||
workers: {
|
||||
wrangler: { configPath: "./wrangler.toml" },
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
3
backend/ai/worker-configuration.d.ts
vendored
3
backend/ai/worker-configuration.d.ts
vendored
@ -1,4 +1,3 @@
|
||||
// Generated by Wrangler
|
||||
// After adding bindings to `wrangler.toml`, regenerate this interface via `npm run cf-typegen`
|
||||
interface Env {
|
||||
}
|
||||
interface Env {}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import type { Config } from "drizzle-kit";
|
||||
import type { Config } from "drizzle-kit"
|
||||
|
||||
export default process.env.LOCAL_DB_PATH
|
||||
? ({
|
||||
@ -16,4 +16,4 @@ export default process.env.LOCAL_DB_PATH
|
||||
wranglerConfigPath: "wrangler.toml",
|
||||
dbName: "d1-sandbox",
|
||||
},
|
||||
} satisfies Config);
|
||||
} satisfies Config)
|
||||
|
@ -1,32 +1,32 @@
|
||||
{
|
||||
"name": "database",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"deploy": "wrangler deploy",
|
||||
"dev": "wrangler dev",
|
||||
"start": "wrangler dev",
|
||||
"test": "vitest",
|
||||
"generate": "drizzle-kit generate:sqlite --schema=src/schema.ts",
|
||||
"up": "drizzle-kit up:sqlite --schema=src/schema.ts",
|
||||
"db:studio": "cross-env LOCAL_DB_PATH=$(find .wrangler/state/v3/d1/miniflare-D1DatabaseObject -type f -name '*.sqlite' -print -quit) drizzle-kit studio"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/vitest-pool-workers": "^0.1.0",
|
||||
"@cloudflare/workers-types": "^4.20240405.0",
|
||||
"@types/itty-router-extras": "^0.4.3",
|
||||
"drizzle-kit": "^0.20.17",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "1.3.0",
|
||||
"wrangler": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@paralleldrive/cuid2": "^2.2.2",
|
||||
"better-sqlite3": "^9.5.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"drizzle-orm": "^0.30.8",
|
||||
"itty-router": "^5.0.16",
|
||||
"itty-router-extras": "^0.4.6",
|
||||
"zod": "^3.22.4"
|
||||
}
|
||||
"name": "database",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"deploy": "wrangler deploy",
|
||||
"dev": "wrangler dev",
|
||||
"start": "wrangler dev",
|
||||
"test": "vitest",
|
||||
"generate": "drizzle-kit generate:sqlite --schema=src/schema.ts",
|
||||
"up": "drizzle-kit up:sqlite --schema=src/schema.ts",
|
||||
"db:studio": "cross-env LOCAL_DB_PATH=$(find .wrangler/state/v3/d1/miniflare-D1DatabaseObject -type f -name '*.sqlite' -print -quit) drizzle-kit studio"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/vitest-pool-workers": "^0.1.0",
|
||||
"@cloudflare/workers-types": "^4.20240405.0",
|
||||
"@types/itty-router-extras": "^0.4.3",
|
||||
"drizzle-kit": "^0.20.17",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "1.3.0",
|
||||
"wrangler": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@paralleldrive/cuid2": "^2.2.2",
|
||||
"better-sqlite3": "^9.5.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"drizzle-orm": "^0.30.8",
|
||||
"itty-router": "^5.0.16",
|
||||
"itty-router-extras": "^0.4.6",
|
||||
"zod": "^3.22.4"
|
||||
}
|
||||
}
|
@ -1,11 +1,11 @@
|
||||
// import type { DrizzleD1Database } from "drizzle-orm/d1";
|
||||
import { drizzle } from "drizzle-orm/d1"
|
||||
import { json } from "itty-router-extras"
|
||||
import { ZodError, z } from "zod"
|
||||
import { z } from "zod"
|
||||
|
||||
import { user, sandbox, usersToSandboxes } from "./schema"
|
||||
import * as schema from "./schema"
|
||||
import { and, eq, sql } from "drizzle-orm"
|
||||
import * as schema from "./schema"
|
||||
import { sandbox, user, usersToSandboxes } from "./schema"
|
||||
|
||||
export interface Env {
|
||||
DB: D1Database
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||
import { createId } from "@paralleldrive/cuid2";
|
||||
import { relations, sql } from "drizzle-orm";
|
||||
import { createId } from "@paralleldrive/cuid2"
|
||||
import { relations } from "drizzle-orm"
|
||||
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core"
|
||||
|
||||
export const user = sqliteTable("user", {
|
||||
id: text("id")
|
||||
@ -11,14 +11,14 @@ export const user = sqliteTable("user", {
|
||||
email: text("email").notNull(),
|
||||
image: text("image"),
|
||||
generations: integer("generations").default(0),
|
||||
});
|
||||
})
|
||||
|
||||
export type User = typeof user.$inferSelect;
|
||||
export type User = typeof user.$inferSelect
|
||||
|
||||
export const userRelations = relations(user, ({ many }) => ({
|
||||
sandbox: many(sandbox),
|
||||
usersToSandboxes: many(usersToSandboxes),
|
||||
}));
|
||||
}))
|
||||
|
||||
export const sandbox = sqliteTable("sandbox", {
|
||||
id: text("id")
|
||||
@ -32,9 +32,9 @@ export const sandbox = sqliteTable("sandbox", {
|
||||
userId: text("user_id")
|
||||
.notNull()
|
||||
.references(() => user.id),
|
||||
});
|
||||
})
|
||||
|
||||
export type Sandbox = typeof sandbox.$inferSelect;
|
||||
export type Sandbox = typeof sandbox.$inferSelect
|
||||
|
||||
export const sandboxRelations = relations(sandbox, ({ one, many }) => ({
|
||||
author: one(user, {
|
||||
@ -42,7 +42,7 @@ export const sandboxRelations = relations(sandbox, ({ one, many }) => ({
|
||||
references: [user.id],
|
||||
}),
|
||||
usersToSandboxes: many(usersToSandboxes),
|
||||
}));
|
||||
}))
|
||||
|
||||
export const usersToSandboxes = sqliteTable("users_to_sandboxes", {
|
||||
userId: text("userId")
|
||||
@ -52,15 +52,18 @@ export const usersToSandboxes = sqliteTable("users_to_sandboxes", {
|
||||
.notNull()
|
||||
.references(() => sandbox.id),
|
||||
sharedOn: integer("sharedOn", { mode: "timestamp_ms" }),
|
||||
});
|
||||
})
|
||||
|
||||
export const usersToSandboxesRelations = relations(usersToSandboxes, ({ one }) => ({
|
||||
group: one(sandbox, {
|
||||
fields: [usersToSandboxes.sandboxId],
|
||||
references: [sandbox.id],
|
||||
}),
|
||||
user: one(user, {
|
||||
fields: [usersToSandboxes.userId],
|
||||
references: [user.id],
|
||||
}),
|
||||
}));
|
||||
export const usersToSandboxesRelations = relations(
|
||||
usersToSandboxes,
|
||||
({ one }) => ({
|
||||
group: one(sandbox, {
|
||||
fields: [usersToSandboxes.sandboxId],
|
||||
references: [sandbox.id],
|
||||
}),
|
||||
user: one(user, {
|
||||
fields: [usersToSandboxes.userId],
|
||||
references: [user.id],
|
||||
}),
|
||||
})
|
||||
)
|
||||
|
@ -1,25 +1,30 @@
|
||||
// test/index.spec.ts
|
||||
import { env, createExecutionContext, waitOnExecutionContext, SELF } from "cloudflare:test";
|
||||
import { describe, it, expect } from "vitest";
|
||||
import worker from "../src/index";
|
||||
import {
|
||||
createExecutionContext,
|
||||
env,
|
||||
SELF,
|
||||
waitOnExecutionContext,
|
||||
} from "cloudflare:test"
|
||||
import { describe, expect, it } from "vitest"
|
||||
import worker from "../src/index"
|
||||
|
||||
// For now, you'll need to do something like this to get a correctly-typed
|
||||
// `Request` to pass to `worker.fetch()`.
|
||||
const IncomingRequest = Request<unknown, IncomingRequestCfProperties>;
|
||||
const IncomingRequest = Request<unknown, IncomingRequestCfProperties>
|
||||
|
||||
describe("Hello World worker", () => {
|
||||
it("responds with Hello World! (unit style)", async () => {
|
||||
const request = new IncomingRequest("http://example.com");
|
||||
// Create an empty context to pass to `worker.fetch()`.
|
||||
const ctx = createExecutionContext();
|
||||
const response = await worker.fetch(request, env, ctx);
|
||||
// Wait for all `Promise`s passed to `ctx.waitUntil()` to settle before running test assertions
|
||||
await waitOnExecutionContext(ctx);
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`);
|
||||
});
|
||||
it("responds with Hello World! (unit style)", async () => {
|
||||
const request = new IncomingRequest("http://example.com")
|
||||
// Create an empty context to pass to `worker.fetch()`.
|
||||
const ctx = createExecutionContext()
|
||||
const response = await worker.fetch(request, env, ctx)
|
||||
// Wait for all `Promise`s passed to `ctx.waitUntil()` to settle before running test assertions
|
||||
await waitOnExecutionContext(ctx)
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`)
|
||||
})
|
||||
|
||||
it("responds with Hello World! (integration style)", async () => {
|
||||
const response = await SELF.fetch("https://example.com");
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`);
|
||||
});
|
||||
});
|
||||
it("responds with Hello World! (integration style)", async () => {
|
||||
const response = await SELF.fetch("https://example.com")
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`)
|
||||
})
|
||||
})
|
||||
|
@ -1,11 +1,11 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": [
|
||||
"@cloudflare/workers-types/experimental",
|
||||
"@cloudflare/vitest-pool-workers"
|
||||
]
|
||||
},
|
||||
"include": ["./**/*.ts", "../src/env.d.ts"],
|
||||
"exclude": []
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": [
|
||||
"@cloudflare/workers-types/experimental",
|
||||
"@cloudflare/vitest-pool-workers"
|
||||
]
|
||||
},
|
||||
"include": ["./**/*.ts", "../src/env.d.ts"],
|
||||
"exclude": []
|
||||
}
|
||||
|
@ -12,7 +12,9 @@
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
|
||||
"lib": ["es2021"] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
|
||||
"lib": [
|
||||
"es2021"
|
||||
] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
|
||||
"jsx": "react" /* Specify what JSX code is generated. */,
|
||||
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config";
|
||||
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config"
|
||||
|
||||
export default defineWorkersConfig({
|
||||
test: {
|
||||
poolOptions: {
|
||||
workers: {
|
||||
wrangler: { configPath: "./wrangler.toml" },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
test: {
|
||||
poolOptions: {
|
||||
workers: {
|
||||
wrangler: { configPath: "./wrangler.toml" },
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@ -1,23 +1,23 @@
|
||||
{
|
||||
"name": "storage",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"deploy": "wrangler deploy",
|
||||
"dev": "wrangler dev --remote",
|
||||
"start": "wrangler dev",
|
||||
"test": "vitest",
|
||||
"cf-typegen": "wrangler types"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/vitest-pool-workers": "^0.1.0",
|
||||
"@cloudflare/workers-types": "^4.20240419.0",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "1.3.0",
|
||||
"wrangler": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"p-limit": "^6.1.0",
|
||||
"zod": "^3.23.4"
|
||||
}
|
||||
"name": "storage",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"deploy": "wrangler deploy",
|
||||
"dev": "wrangler dev --remote",
|
||||
"start": "wrangler dev",
|
||||
"test": "vitest",
|
||||
"cf-typegen": "wrangler types"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/vitest-pool-workers": "^0.1.0",
|
||||
"@cloudflare/workers-types": "^4.20240419.0",
|
||||
"typescript": "^5.0.4",
|
||||
"vitest": "1.3.0",
|
||||
"wrangler": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"p-limit": "^6.1.0",
|
||||
"zod": "^3.23.4"
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
import pLimit from "p-limit"
|
||||
import { z } from "zod"
|
||||
import pLimit from 'p-limit';
|
||||
|
||||
export interface Env {
|
||||
R2: R2Bucket
|
||||
@ -144,20 +144,24 @@ export default {
|
||||
const body = await request.json()
|
||||
const { sandboxId, type } = initSchema.parse(body)
|
||||
|
||||
console.log(`Copying template: ${type}`);
|
||||
console.log(`Copying template: ${type}`)
|
||||
|
||||
// List all objects under the directory
|
||||
const { objects } = await env.Templates.list({ prefix: type });
|
||||
const { objects } = await env.Templates.list({ prefix: type })
|
||||
|
||||
// Copy each object to the new directory with a 5 concurrency limit
|
||||
const limit = pLimit(5);
|
||||
await Promise.all(objects.map(({ key }) =>
|
||||
limit(async () => {
|
||||
const destinationKey = key.replace(type, `projects/${sandboxId}`);
|
||||
const fileBody = await env.Templates.get(key).then(res => res?.body ?? "");
|
||||
await env.R2.put(destinationKey, fileBody);
|
||||
})
|
||||
));
|
||||
const limit = pLimit(5)
|
||||
await Promise.all(
|
||||
objects.map(({ key }) =>
|
||||
limit(async () => {
|
||||
const destinationKey = key.replace(type, `projects/${sandboxId}`)
|
||||
const fileBody = await env.Templates.get(key).then(
|
||||
(res) => res?.body ?? ""
|
||||
)
|
||||
await env.R2.put(destinationKey, fileBody)
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
return success
|
||||
} else {
|
||||
|
@ -1,25 +1,30 @@
|
||||
// test/index.spec.ts
|
||||
import { env, createExecutionContext, waitOnExecutionContext, SELF } from 'cloudflare:test';
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import worker from '../src/index';
|
||||
import {
|
||||
createExecutionContext,
|
||||
env,
|
||||
SELF,
|
||||
waitOnExecutionContext,
|
||||
} from "cloudflare:test"
|
||||
import { describe, expect, it } from "vitest"
|
||||
import worker from "../src/index"
|
||||
|
||||
// For now, you'll need to do something like this to get a correctly-typed
|
||||
// `Request` to pass to `worker.fetch()`.
|
||||
const IncomingRequest = Request<unknown, IncomingRequestCfProperties>;
|
||||
const IncomingRequest = Request<unknown, IncomingRequestCfProperties>
|
||||
|
||||
describe('Hello World worker', () => {
|
||||
it('responds with Hello World! (unit style)', async () => {
|
||||
const request = new IncomingRequest('http://example.com');
|
||||
describe("Hello World worker", () => {
|
||||
it("responds with Hello World! (unit style)", async () => {
|
||||
const request = new IncomingRequest("http://example.com")
|
||||
// Create an empty context to pass to `worker.fetch()`.
|
||||
const ctx = createExecutionContext();
|
||||
const response = await worker.fetch(request, env, ctx);
|
||||
const ctx = createExecutionContext()
|
||||
const response = await worker.fetch(request, env, ctx)
|
||||
// Wait for all `Promise`s passed to `ctx.waitUntil()` to settle before running test assertions
|
||||
await waitOnExecutionContext(ctx);
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`);
|
||||
});
|
||||
await waitOnExecutionContext(ctx)
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`)
|
||||
})
|
||||
|
||||
it('responds with Hello World! (integration style)', async () => {
|
||||
const response = await SELF.fetch('https://example.com');
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`);
|
||||
});
|
||||
});
|
||||
it("responds with Hello World! (integration style)", async () => {
|
||||
const response = await SELF.fetch("https://example.com")
|
||||
expect(await response.text()).toMatchInlineSnapshot(`"Hello World!"`)
|
||||
})
|
||||
})
|
||||
|
@ -1,11 +1,11 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": [
|
||||
"@cloudflare/workers-types/experimental",
|
||||
"@cloudflare/vitest-pool-workers"
|
||||
]
|
||||
},
|
||||
"include": ["./**/*.ts", "../src/env.d.ts"],
|
||||
"exclude": []
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": [
|
||||
"@cloudflare/workers-types/experimental",
|
||||
"@cloudflare/vitest-pool-workers"
|
||||
]
|
||||
},
|
||||
"include": ["./**/*.ts", "../src/env.d.ts"],
|
||||
"exclude": []
|
||||
}
|
||||
|
@ -12,7 +12,9 @@
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
|
||||
"lib": ["es2021"] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
|
||||
"lib": [
|
||||
"es2021"
|
||||
] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
|
||||
"jsx": "react" /* Specify what JSX code is generated. */,
|
||||
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config";
|
||||
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config"
|
||||
|
||||
export default defineWorkersConfig({
|
||||
test: {
|
||||
poolOptions: {
|
||||
workers: {
|
||||
wrangler: { configPath: "./wrangler.toml" },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
test: {
|
||||
poolOptions: {
|
||||
workers: {
|
||||
wrangler: { configPath: "./wrangler.toml" },
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
3
backend/storage/worker-configuration.d.ts
vendored
3
backend/storage/worker-configuration.d.ts
vendored
@ -1,4 +1,3 @@
|
||||
// Generated by Wrangler
|
||||
// After adding bindings to `wrangler.toml`, regenerate this interface via `npm run cf-typegen`
|
||||
interface Env {
|
||||
}
|
||||
interface Env {}
|
||||
|
Loading…
x
Reference in New Issue
Block a user