-
Notifications
You must be signed in to change notification settings - Fork 736
/
Copy pathaugment.js
383 lines (333 loc) · 12.9 KB
/
augment.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
import utils from "@/utils/index.js";
import yaml from "yaml";
async function promptAnalysis({ context, data }) {
const { task } = data;
const { code, decorators, apis } = task;
return [
{
role: "system",
content: `you are an expert backend and node js dev
- your role is to generate an analysis for for functions to implement that may require the use of external APIs , either through API calls or npm sdks
- you are provided with descriptions and contextual code snippets of desired functions, from a node server module
these function are tagged as needing the implementation of external APIs/SDKs for the tasks they are meant to accomplish
- you are also provided with some search results for external APIs for each function from some external APIs that were indexed
> you are to determine whether external API search result(s) are relevant or no for the desired use cases descriptions
> if so , which ones to use and how ? what do they need to run ? how to use them ? how to format their expected response ?
> if no search result is relevant, do you know, about fitting nodejs/npm SDKs/packages or other APIs you are familiar with that are fit for the task ?
and if so, how to use them ?
> note : if a fitting external API is identified and also has SDKs you know about
its prefereable to call the API using the provided openapi / docs instead of the SDK you already know about
( because SDKs might have been updated since your last knowledge base ) ;
use SDKs for when no APIs search results make sense for the analyzed implementation case
> note : if you are using references from provided docs, extracts and include snippets from them inside your analysis to further document your analysis properly
conduct a detailed analysis for each of the ${apis.length} provided functions to implement
your reply should start with : "\`\`\`markdown" and end with "\`\`\`"
you will be tipped $999
you're a genius
`,
},
// each api entry in its own message
...apis.map(({ id, description, snippet, rag }) => {
return {
role: "user",
content: `\`\`\`task:${id}
${yaml.stringify({
functionDescription: description,
contextCodeSnippet: snippet,
})}
\`\`\`
\`\`\`apis-search-results:${id}
${rag.length ? yaml.stringify(rag) : ""}
\`\`\`
`,
};
}),
{
role: "user",
content: `Conduct your analysis each of the ${apis.length} provided functions to implement, with each function in its separate and very detailed section, and make sure you do not miss any useful detail !
be extremely detailed and include every single used reference and detail in your analysis for it to be fully comprehensive and complete
you are a genius`,
},
];
}
async function promptImplementMerge({ context, data }) {
const { pm, db, backend, task } = data;
const { prd, frd, drd, brd } = pm;
const { code, apis, analysis } = task;
const { openapi, asyncapi } = backend.specifications;
/*
should also get BRD here ! important ! ie. so it doesnt do stupid placeholders ?
should provide full code too ? maybe 2 pass implement directly ; implement and revise ?
no need for rag here
*/
return [
{
role: "system",
content: `Your task, as the genius backend dev expert you are, is to generate the full nodejs script for a module, based on the provided specifications and details of the backend in development
- the current code of the server script is provided
the desired updates are provided
> your main task is to add the provided functions , and return a fully functional script that has both the original features and the newly added updates, with everything working perfectly and as expected
---
your role is to implement the full express server for the provided task for the \`server.mjs\` (type: module script)
you will answer in 3 parts :
- analysis , in between \`\`\`markdown\`\`\`\` section
- code , in between \`\`\`mjs\`\`\`\` section
- dependencies and env variables , in between \`\`\`yaml\`\`\`\` section ; where any needed packages to install and needed env variables to setup will be mentionned ; the yaml should have objects : { dependencies : {"package":"version"} , env : {"key" , "temp_value"} } ("dependencies" (for packages) and "env" for env variables (and their temporary values) )
use doublequotes for every string inside the yaml to make sure formatting is good
---
in your analysis, ask yourself :
> what are the added functions ?
> how do i merge all updates perfectly with the working code ?
> is the full flow covered ?
> are all the expected functions fullfilled ?
> am i covering all the parts for all the required updates ?
including imports, functions, db operations, ... ?
are all the new updates congruent with the original code structure, flow, db operations and all that is expected ?
---
for any db requirements, use postgres from \`@electric-sql/pglite\`
- to use postgres, include this snippet in your script :
\`\`\`
import { PGlite } from "@electric-sql/pglite";
const postgres = new PGlite("./db");
/* then, can be used like this :
await postgres.query("SELECT * FROM exampletable;")
*/
// note : the postgres tables + seed were already created before , you can use the postgres directly without configuring it
\`\`\`
postgres is use exactly how is provided in the snippet, do not change anything about loading it / configuring it, else it breaks ;
postgres is imported, initialized and queries EXACTLY AS SHOWN IN THE SNIPPET ! NO OTHER WAY !
---
note : the postgres tables + seed were already created before , you can use the postgres directly without configuring it ; do not create tables in script !
extremely important :
- the DB R/W need to be 100% compatible with the tables and schemas of the provided DB specifications !!
- if it makes use of .env , make your you import \`dotenv\` and \`dotenv.config()\` to read .env before !
---
extremely important :
- you are to implement the entire server as specified in the provided docs , with a focus on DB R/W operations
- you are to implement every single thing needed by the backend server and output one single big working perfect \`server.mjs\` script
- do not assume anything is implemented yet ! you will do 100% of everything needed and output one single big working perfect \`server.mjs\` script
- no placeholders, no hallucinated imports
- again, do not assume anything is implemented yet ! you will do 100% of everything needed and output one single big working perfect \`server.mjs\` script
---
note:
> if ie. some mock data is meant to to store an image url, use a https://picsum.photos/ url with a random seed
super important :
> use snake_case for any new naming you do
> ensure full perfect coherence with DB fields names and all provided specs names
---
extremely important :
- the DB R/W need to be 100% compatible with the tables and schemas of the provided DB specifications !!
- the app flow must be 100% working perfect everywhere
you are a genius + you get tipped $9999999
`,
},
{
role: "user",
content: `\`\`\`PRD:product-requirements-document
${prd}
\`\`\`
\`\`\`FRD:features-requirements-document
${frd}
\`\`\`
`,
},
{
role: "user",
content: `
\`\`\`DB:postgres:sql
${db.postgres}
\`\`\`
---
extremely turbo important :
> pay extreme attention to DB details :
> the things that you are expected to provide with inserts :
> should you make a uuid before inserting with postgres query ?
> are there key constraints ?
> is the db querying code using the exact names as in db fields ?
> are you providing everything needed to db every single time ?
`,
},
{
role: "user",
content: `\`\`\`BRD:backend-requirements-document
${brd}
\`\`\``,
},
data.backend?.requirements?.restApi?.required && {
role: "user",
content: `\`\`\`BACKEND:specifications:openAPI
${yaml.stringify(openapi)}
\`\`\``,
},
data.backend?.requirements?.realtimeWebsockets?.required && {
role: "user",
content: `\`\`\`BACKEND:specifications:asyncAPI
${yaml.stringify(asyncapi)}
\`\`\``,
},
{
role: "user",
content: `The functions updates of the original code are the following :
\`\`\`functions:update:tasks
${yaml.stringify({
toUpdate: apis.map(({ description, snippet }) => {
return {
functionDescription: description,
contextCodeSnippet: snippet,
};
}),
})}
\`\`\``,
},
{
role: "user",
content: `The original full script code to update is :
\`\`\`mjs
${code}
\`\`\`
`,
},
{
role: "user",
content: `The analysis of the new updates to make to the server code is in the following :
\`\`\`functions:update:analysis
${analysis}
\`\`\``,
},
{
role: "user",
content: `extremely important :
- you are to implement the entire \`server.mjs\` as specified in the backend specifications , with a focus on DB R/W operations
- you are to implement every single thing needed by the server and output one single big working perfect \`server.mjs\` script
- do not assume anything is implemented yet ! you will do 100% of everything needed and output one single big working perfect \`server.mjs\` script
- no placeholders, no hallucinated imports
---
extremely turbo important :
> pay extreme attention to DB details :
> the things that you are expected to provide with inserts :
> should you make a uuid before inserting with a postgres query ?
> are there key constraints ? should you create something before inserting something else because of contraints ?
> is the db querying code using the exact names as in db fields ?
> are you providing everything needed to db every single time ?
---
- again, do not assume anything is implemented yet ! you will do 100% of everything needed and output one single big working perfect \`server.mjs\` script
- again , you are to implement every single thing needed by the server and output one single big working perfect \`server.mjs\` script
- no placeholders, no hallucinated imports ; one 100% perfect complete working server script
extremely important :
- the DB R/W need to be 100% compatible with the tables and schemas of the provided DB specifications !!
now do the analysis , write the full working script and specify the dependencies+env`,
},
].filter((e) => e);
}
async function promptImplementReview({ context, data }) {
const { task } = data;
const { brd } = pm;
const { code, decorators, apis, analysis, implementations } = task;
/*
maybe double check verify instead ?
*/
return [];
}
async function swarmAugmentBackendExternalapis({ context, data }) {
/*
*/
const { task } = data;
const { code } = task;
const decorators = (await utils.parsers.extract.decorators({ code })).filter(
(item) => item.type === "external-api" || item.type === "external-apis",
);
if (!decorators.length) return {};
// apis RAG
const apis = await Promise.all(
decorators.map(async (item, idx) => {
const { description, snippet } = item;
const ragText = `Description : ${description}\n\nCode Snippet :\n\`\`\`\n${snippet}\n\`\`\``;
return {
id: `fn:${idx + 1}/${decorators.length}`,
description,
snippet,
rag: (
await context.run({
id: `op:INDEXDB::QUERY`,
context,
data: {
index: "apis",
text: ragText,
amount: 5,
},
})
).results,
};
}),
);
data.task.decorators = decorators;
data.task.apis = apis;
const messagesAnalysis = await promptAnalysis({ context, data });
const analysisPass = (
await context.run({
id: "op:LLM::GEN",
context: {
...context, // {streams , project}
operation: {
key: "swarm.augment.analysis",
meta: {
name: "Swarm Post-Gen Check",
desc: "analysis pass",
},
},
},
data: {
model: process.env.LOCAL_INFERENCE_MODEL, //`gpt-4o`,
messages: messagesAnalysis,
preparser: `backticks`,
parser: false,
},
})
).generated;
data.task.analysis = analysisPass;
const messagesImplementMerge = await promptImplementMerge({ context, data });
const { generated } = await context.run({
id: "op:LLM::GEN",
context: {
...context, // {streams , project}
operation: {
key: "swarm.augment.implement",
meta: {
name: "Swarm Code Update",
desc: "implement changes & merge",
},
},
},
data: {
model: process.env.LOCAL_INFERENCE_MODEL, //`gpt-4o`,
messages: messagesImplementMerge,
preparser: false,
parser: false,
},
});
const extraction = await utils.parsers.extract.backticksMultiple({
text: generated,
delimiters: [`markdown`, `mjs`, `yaml`],
});
const { mjs } = extraction;
if (!mjs.length || !extraction.yaml) {
throw new Error(
"swarm:augment:backend:externalApis:generate error - generated code is empty",
);
}
const parsedYaml = extraction.yaml ? yaml.parse(extraction.yaml) : {};
const generatedServer = {
mjs,
dependencies: parsedYaml.dependencies
? Object.fromEntries(
Object.keys(parsedYaml.dependencies).map((key) => [key, "*"]),
)
: [],
env: parsedYaml.env ? parsedYaml.env : {},
timestamp: Date.now(),
};
return generatedServer;
}
export default {
"SWARM:AUGMENT::BACKEND:EXTERNALAPIS": swarmAugmentBackendExternalapis,
};