Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: better prompt examples (#1523) #1526

Draft
wants to merge 4 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .env
Original file line number Diff line number Diff line change
Expand Up @@ -164,4 +164,6 @@ HF_ORG_EARLY_ACCESS=
PUBLIC_SMOOTH_UPDATES=false
COMMUNITY_TOOLS=false

PUBLIC_COMMIT_SHA=
PUBLIC_COMMIT_SHA=

PROMPT_EXAMPLES=`[]`
121 changes: 32 additions & 89 deletions chart/env/prod.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -57,21 +57,7 @@ envVars:
"temperature": 0.6,
"max_new_tokens": 1024,
"truncate": 7167
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
},
{
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
},
{
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
}
},
{
"name": "CohereForAI/c4ai-command-r-plus-08-2024",
Expand All @@ -86,21 +72,7 @@ envVars:
"truncate": 28672,
"max_new_tokens": 2048,
"temperature": 0.3
},
"promptExamples": [
{
"title": "Generate a mouse portrait",
"prompt": "Generate the portrait of a scientific mouse in its laboratory."
},
{
"title": "Review a pull request",
"prompt": "Review this pull request: https://github.com/huggingface/chat-ui/pull/1131/files"
},
{
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}
]
}
},
{
"name": "Qwen/Qwen2.5-72B-Instruct",
Expand All @@ -115,21 +87,7 @@ envVars:
"temperature": 0.6,
"truncate": 28672,
"max_new_tokens": 3072
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
},
{
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
},
{
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
}
},
{
"name": "nvidia/Llama-3.1-Nemotron-70B-Instruct-HF",
Expand Down Expand Up @@ -228,20 +186,6 @@ envVars:
"websiteUrl": "https://nousresearch.com/",
"modelUrl": "https://huggingface.co/NousResearch/Hermes-3-Llama-3.1-8B",
"tokenizer": "NousResearch/Hermes-3-Llama-3.1-8B",
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
},
{
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
},
{
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
],
"parameters": {
"stop": ["<|im_end|>"],
"temperature": 0.6,
Expand All @@ -263,21 +207,7 @@ envVars:
"temperature": 0.6,
"truncate": 14336,
"max_new_tokens": 1536
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
},
{
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
},
{
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
}
},
{
"name": "microsoft/Phi-3.5-mini-instruct",
Expand All @@ -292,21 +222,7 @@ envVars:
"temperature": 0.6,
"truncate": 28672,
"max_new_tokens": 3072
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
},
{
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
},
{
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
}
},
{
"name": "llhf/Meta-Llama-3.1-8B-Instruct",
Expand Down Expand Up @@ -364,6 +280,33 @@ envVars:
"transferTo": "microsoft/Phi-3.5-mini-instruct"
}
]
PROMPT_EXAMPLES: >
[
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
},
{
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
},
{
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
},
{
"type": "multimodal",
"title": "Identify a flower",
"prompt": "What kind of flower is this?",
"fileUrl": "https://huggingface.co/datasets/huggingchat/prompt-examples/resolve/main/flower.jpg"
},
{
"type": "tool",
"title": "Generate a painting",
"prompt": "Generate a painting of a forest, oil painting style.",
"toolId": "000000000000000000000001"
}
]
PUBLIC_ORIGIN: "https://huggingface.co"
PUBLIC_SHARE_PREFIX: "https://hf.co/chat"
PUBLIC_ANNOUNCEMENT_BANNERS: >
Expand Down
57 changes: 49 additions & 8 deletions src/lib/components/chat/ChatIntroduction.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,31 @@
import ModelCardMetadata from "../ModelCardMetadata.svelte";
import { base } from "$app/paths";
import JSON5 from "json5";
import type { PromptExample } from "$lib/server/promptExamples";

import CarbonImage from "~icons/carbon/image";
import CarbonTools from "~icons/carbon/tools";

export let currentModel: Model;
export let promptExamples: PromptExample[];

const announcementBanners = envPublic.PUBLIC_ANNOUNCEMENT_BANNERS
? JSON5.parse(envPublic.PUBLIC_ANNOUNCEMENT_BANNERS)
: [];

const dispatch = createEventDispatcher<{ message: string }>();
const dispatch = createEventDispatcher<{
message: {
prompt: string;
file?: File | string;
tool?: string;
};
}>();

const prompts = promptExamples
.filter((prompt: PromptExample) => prompt?.models?.includes(currentModel.id) ?? true)
.filter(Boolean)
.sort(() => Math.random() - 0.5)
.slice(0, 3) as PromptExample[];
</script>

<div class="my-auto grid gap-8 lg:grid-cols-3">
Expand Down Expand Up @@ -73,20 +90,44 @@
<ModelCardMetadata variant="dark" model={currentModel} />
</div>
</div>
{#if currentModel.promptExamples}
{#if prompts && prompts.length > 0}
<div class="lg:col-span-3 lg:mt-6">
<p class="mb-3 text-gray-600 dark:text-gray-300">Examples</p>
<p class="mb-3 text-sm text-gray-500 dark:text-gray-400">Examples</p>
<div class="grid gap-3 lg:grid-cols-3 lg:gap-5">
{#each currentModel.promptExamples as example}
{#each prompts as example}
<button
type="button"
class="rounded-xl border bg-gray-50 p-3 text-gray-600 hover:bg-gray-100 dark:border-gray-800 dark:bg-gray-800 dark:text-gray-300 dark:hover:bg-gray-700 max-xl:text-sm xl:p-3.5"
on:click={() => dispatch("message", example.prompt)}
class="flex w-full max-w-full items-center gap-2 rounded-xl border bg-gray-50 p-3 text-gray-600 hover:bg-gray-100 dark:border-gray-800 dark:bg-gray-800 dark:text-gray-300 dark:hover:bg-gray-700 max-xl:text-sm xl:p-3.5"
class:multimodal={example.type === "multimodal"}
class:tool={example.type === "tool"}
on:click={() =>
dispatch("message", {
prompt: example.prompt,
file: example?.fileUrl ?? undefined,
tool: example?.toolId ?? undefined,
})}
>
{example.title}
{#if example.type === "multimodal"}
<CarbonImage class="min-w-6 text-lg text-blue-700 dark:text-blue-500" />
{:else if example.type === "tool"}
<CarbonTools class="min-w-6 text-lg text-purple-700 dark:text-purple-500" />
{/if}
<span class="ml-2 flex w-full flex-col items-start">
<span class="text-md text-left">{example.title}</span>
</span>
</button>
{/each}
</div>
</div>{/if}
</div>
{/if}
<div class="h-40 sm:h-24" />
</div>

<style lang="postcss">
.multimodal {
@apply border-blue-500/20 bg-blue-500/20 hover:bg-blue-500/30;
}
.tool {
@apply border-purple-500/20 bg-purple-500/20 hover:bg-purple-500/30;
}
</style>
4 changes: 3 additions & 1 deletion src/lib/components/chat/ChatWindow.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
export let assistant: Assistant | undefined = undefined;
export let preprompt: string | undefined = undefined;
export let files: File[] = [];
export let promptExamples: PromptExample[] = [];

$: isReadOnly = !models.some((model) => model.id === currentModel.id);

Expand Down Expand Up @@ -324,6 +325,7 @@
{:else if !assistant}
<ChatIntroduction
{currentModel}
{promptExamples}
on:message={(ev) => {
if ($page.data.loginRequired) {
ev.preventDefault();
Expand Down Expand Up @@ -526,7 +528,7 @@
<CarbonCheckmark class="text-[.6rem] sm:mr-1.5 sm:text-green-600" />
<div class="text-green-600 max-sm:hidden">Link copied to clipboard</div>
{:else}
<CarbonExport class="sm:text-primary-500 text-[.6rem] sm:mr-1.5" />
<CarbonExport class="text-[.6rem] sm:mr-1.5 sm:text-primary-500" />
<div class="max-sm:hidden">Share this conversation</div>
{/if}
</button>
Expand Down
55 changes: 55 additions & 0 deletions src/lib/server/promptExamples.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { z } from "zod";
import { validModelIdSchema, models } from "./models";
import { validToolIdSchema } from "./tools";
import JSON5 from "json5";
import { env } from "$env/dynamic/private";

const basePromptSchema = z.object({
title: z.string(),
prompt: z.string(),
models: z.array(validModelIdSchema).optional(),
});

const multimodalPromptSchema = basePromptSchema.extend({
type: z.literal("multimodal"),
fileUrl: z.string().url(),
});

const toolPromptSchema = basePromptSchema.extend({
type: z.literal("tool"),
toolId: validToolIdSchema,
fileUrl: z.string().url().optional(),
});

const simplePromptSchema = basePromptSchema
.extend({
type: z.literal("simple").optional(),
})
.transform((data) => ({
...data,
type: data,
}));

const promptExamplesSchema = z.array(
z.union([multimodalPromptSchema, toolPromptSchema, simplePromptSchema])
);

export type PromptExample = z.infer<typeof promptExamplesSchema>[number];

// parse the prompt examples from the environment variable
const promptExamples = promptExamplesSchema.parse(JSON5.parse(env.PROMPT_EXAMPLES));

// add model specific prompt examples for legacy configs
const modelSpecificPromptExamples = models
.filter((model) => !!model.promptExamples)
.map((model) =>
model.promptExamples?.map((example) => ({
...example,
models: [model.id],
}))
)
.flat();

const combinedPromptExamples = [...promptExamples, ...modelSpecificPromptExamples];

export { combinedPromptExamples as promptExamples };
3 changes: 3 additions & 0 deletions src/lib/server/tools/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -308,3 +308,6 @@ export function getCallMethod(tool: Omit<BaseTool, "call">): BackendCall {
}

export const toolFromConfigs = configTools.parse(JSON5.parse(env.TOOLS)) satisfies ConfigTool[];
export const validToolIdSchema = z.enum(
toolFromConfigs.map((t) => t._id.toString()) as [string, ...string[]]
);
3 changes: 2 additions & 1 deletion src/routes/+layout.server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import { toolFromConfigs } from "$lib/server/tools";
import { MetricsServer } from "$lib/server/metrics";
import type { ToolFront, ToolInputFile } from "$lib/types/Tool";
import { ReviewStatus } from "$lib/types/Review";
import { promptExamples } from "$lib/server/promptExamples";

export const load: LayoutServerLoad = async ({ locals, depends }) => {
depends(UrlDependency.ConversationList);
Expand Down Expand Up @@ -220,7 +221,6 @@ export const load: LayoutServerLoad = async ({ locals, depends }) => {
displayName: model.displayName,
description: model.description,
logoUrl: model.logoUrl,
promptExamples: model.promptExamples,
parameters: model.parameters,
preprompt: model.preprompt,
multimodal: model.multimodal,
Expand Down Expand Up @@ -276,6 +276,7 @@ export const load: LayoutServerLoad = async ({ locals, depends }) => {
isAdmin: locals.user.isAdmin ?? false,
isEarlyAccess: locals.user.isEarlyAccess ?? false,
},
promptExamples,
assistant: assistant ? JSON.parse(JSON.stringify(assistant)) : null,
enableAssistants,
enableAssistantsRAG: env.ENABLE_ASSISTANTS_RAG === "true",
Expand Down
1 change: 1 addition & 0 deletions src/routes/+page.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -97,5 +97,6 @@
assistant={data.assistant}
{currentModel}
models={data.models}
promptExamples={data.promptExamples}
bind:files
/>
1 change: 1 addition & 0 deletions src/routes/conversation/[id]/+page.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -405,6 +405,7 @@
{messages}
shared={data.shared}
preprompt={data.preprompt}
promptExamples={data.promptExamples}
bind:files
on:message={onMessage}
on:retry={onRetry}
Expand Down
1 change: 1 addition & 0 deletions src/routes/models/[...model]/+page.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -82,5 +82,6 @@
{loading}
currentModel={findCurrentModel([...data.models, ...data.oldModels], modelId)}
models={data.models}
promptExamples={data.promptExamples}
bind:files
/>
Loading