Skip to content

Commit

Permalink
Use compact prompt when generating requests via Claude 3.5
Browse files Browse the repository at this point in the history
  • Loading branch information
actualwitch committed Sep 24, 2024
1 parent fe3af11 commit c268d09
Show file tree
Hide file tree
Showing 4 changed files with 99 additions and 17 deletions.
79 changes: 64 additions & 15 deletions Tiltfile
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# Automagically install & update npm dependencies when package.json changes
# Automagically install & update pnpm dependencies when package.json changes
local_resource(
"node_modules",
labels=["api", "frontend"],
deps=["package.json", "api/package.json", "frontend/package.json"],
labels=["api", "studio"],
deps=["package.json", "api/package.json", "studio/package.json"],
dir=".",
cmd="npm install",
cmd="pnpm install",
)

# Ensure the api/dist directory exists
Expand All @@ -14,22 +14,31 @@ local_resource(
cmd="mkdir api/dist || true",
)

# Build & serve the frontend
local_resource(
"frontend-build",
labels=["frontend"],
cmd="npm run clean:frontend && npm run build:frontend",
deps=["frontend/src"],
"packages-build",
labels=["studio"],
cmd="pnpm --filter @fiberplane/fpx-types build && pnpm --filter @fiberplane/fpx-utils build && pnpm --filter @fiberplane/hono-otel build",
deps=["packages"],
ignore=["packages/*/dist"],
)

# Build & serve the studio
local_resource(
"studio-build",
labels=["studio"],
cmd="pnpm clean:frontend && pnpm build:frontend",
deps=["studio/src"],
resource_deps=["node_modules", "api-dist"],
)

local_resource(
"frontend-serve",
labels=["frontend"],
"studio-serve",
labels=["studio"],
deps=["studio/src"],
resource_deps=["node_modules", "api-dist"],
serve_cmd="npm run dev",
serve_cmd="pnpm dev",
serve_dir="studio",
auto_init=False,
trigger_mode=TRIGGER_MODE_MANUAL,
)

Expand All @@ -38,15 +47,15 @@ local_resource(
"db-generate",
labels=["api"],
dir="api",
cmd="npm run db:generate",
cmd="pnpm db:generate",
deps=["api/drizzle.config.ts"],
)

local_resource(
"db-migrate",
labels=["api"],
dir="api",
cmd="npm run db:migrate",
cmd="pnpm db:migrate",
deps=["api/migrate.ts"],
)

Expand All @@ -55,6 +64,46 @@ local_resource(
"api",
labels=["api"],
resource_deps=["node_modules", "db-generate", "db-migrate"],
serve_cmd="npm run dev",
serve_cmd="pnpm dev",
serve_dir="api",
)

local_resource(
"reset-db",
labels=["api"],
cmd="rm fpx.db",
dir="api",
auto_init=False,
trigger_mode=TRIGGER_MODE_MANUAL,
)

local_resource(
"format",
labels=["api", "studio"],
cmd="pnpm format",
auto_init=False,
trigger_mode=TRIGGER_MODE_MANUAL,
)


# Examples

local_resource(
"examples-node-api",
dir="examples/node-api",
labels=["examples"],
serve_dir="examples/node-api",
serve_cmd="pnpm dev",
auto_init=False,
trigger_mode=TRIGGER_MODE_MANUAL,
)

local_resource(
"examples-goose-quotes",
dir="examples/goose-quotes",
labels=["examples"],
serve_dir="examples/goose-quotes",
serve_cmd="pnpm db:generate && pnpm db:migrate && pnpm dev",
auto_init=False,
trigger_mode=TRIGGER_MODE_MANUAL,
)
5 changes: 4 additions & 1 deletion api/src/lib/ai/anthropic.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import Anthropic from "@anthropic-ai/sdk";
import { CLAUDE_3_5_SONNET } from "@fiberplane/fpx-types";
import logger from "../../logger.js";
import { getSystemPrompt, invokeRequestGenerationPrompt } from "./prompts.js";
import { makeRequestTool as makeRequestToolBase } from "./tools.js";
Expand Down Expand Up @@ -73,11 +74,13 @@ export async function generateRequestWithAnthropic({
name: makeRequestTool.name,
};

const systemPrompt = getSystemPrompt(persona, model === CLAUDE_3_5_SONNET);

const response = await anthropicClient.messages.create({
model,
tool_choice: toolChoice,
tools: [makeRequestTool],
system: getSystemPrompt(persona),
system: systemPrompt,
messages: [
{
role: "user",
Expand Down
30 changes: 29 additions & 1 deletion api/src/lib/ai/prompts.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import { PromptTemplate } from "@langchain/core/prompts";

export const getSystemPrompt = (persona: string) => {
export const getSystemPrompt = (persona: string, slimPrompt = false) => {
if (slimPrompt) {
return persona === "QA"
? HOSTILE_SLIM_SYSTEM_PROMPT
: FRIENDLY_SLIM_SYSTEM_PROMPT;
}
return persona === "QA"
? QA_PARAMETER_GENERATION_SYSTEM_PROMPT
: FRIENDLY_PARAMETER_GENERATION_SYSTEM_PROMPT;
Expand Down Expand Up @@ -284,6 +289,29 @@ Use the tool "make_request". Always respond in valid JSON.
***Don't make your responses too long, otherwise we cannot parse your JSON response.***
`);

const PATH_INSTRUCTION = `
For example, if you get a route like \`/users/:id\`, you should return a filled-in "path" field,
like \`/users/1234567890\` and a "pathParams" field like:
{ "path": "/users/1234567890", "pathParams": { "key": ":id", "value": "1234567890" } }
*Remember to keep the colon in the pathParam key!*
`;

export const FRIENDLY_SLIM_SYSTEM_PROMPT = cleanPrompt(`
You are a friendly, expert full-stack engineer and an API testing assistant. Please help user to craft requests to route handlers.
${PATH_INSTRUCTION}
`);

export const HOSTILE_SLIM_SYSTEM_PROMPT = cleanPrompt(`
You are an expert QA Engineer, a thorough API tester with a generally hostile disposition. Please help user to craft requests to route handlers.
${PATH_INSTRUCTION}
You should focus on trying to break things. Be clever and creative with test data.
`);

/**
* Clean a prompt by trimming whitespace for each line and joining the lines.
*/
Expand Down
2 changes: 2 additions & 0 deletions www/src/content/changelog/!canary.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,6 @@ draft: true

### Features

- **Compact AI query generation prompts** Using latest model from Antrhopic, we were able to achieve a significant token reduction in the query generation prompts. Only affects Claude 3.5 Sonnet for now.

### Bug fixes

0 comments on commit c268d09

Please sign in to comment.