forked from ianmacartney/streaming-chat-gpt
-
Notifications
You must be signed in to change notification settings - Fork 0
/
init.ts
46 lines (42 loc) · 1.41 KB
/
init.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import { api } from "./_generated/api";
import { internalMutation } from "./_generated/server";
const seedMessages = [
["Ian", "Hey, glad you're here.", 0],
["Abhi", "What's up?", 1000],
["Ian", "I'm hoping to show how reactive Convex is.", 1500],
["Abhi", "Could you show streaming a ChatGPT response?", 1700],
["Ian", "By updating the DB and having the query reflow?", 3000],
["Abhi", "Yeah. @gpt do you think that's a good idea?", 2000],
["Ian", "Very clever! Let's see what it thinks.", 600],
["Ian", "Thanks @gpt!", 5000],
] as const;
if (!process.env.OPENAI_API_KEY) {
const deploymentName = process.env.CONVEX_CLOUD_URL?.slice(8).replace(
".convex.cloud",
""
);
throw new Error(
"\n Missing OPENAI_API_KEY in environment variables.\n\n" +
" Get one at https://openai.com/ and paste it on the Convex dashboard:\n" +
` https://dashboard.convex.dev/d/${deploymentName}/settings?var=OPENAI_API_KEY`
);
}
export const seed = internalMutation({
handler: async (ctx) => {
let totalDelay = 0;
for (const [author, body, delay] of seedMessages) {
totalDelay += delay;
await ctx.scheduler.runAfter(totalDelay, api.messages.send, {
author,
body,
});
}
},
});
export default internalMutation({
handler: async (ctx) => {
const anyMessage = await ctx.db.query("messages").first();
if (anyMessage) return;
await seed(ctx, {});
},
});