mishig HF staff commited on
Commit
5efc2f0
1 Parent(s): 2fde811

[system prompts] Support default system prompts

Browse files
src/lib/components/InferencePlayground/InferencePlayground.svelte CHANGED
@@ -13,7 +13,7 @@
13
  } from "./inferencePlaygroundUtils";
14
 
15
  import { onDestroy, onMount } from "svelte";
16
- import GenerationConfig from "./InferencePlaygroundGenerationConfig.svelte";
17
  import HFTokenModal from "./InferencePlaygroundHFTokenModal.svelte";
18
  import ModelSelector from "./InferencePlaygroundModelSelector.svelte";
19
  import PlaygroundConversation from "./InferencePlaygroundConversation.svelte";
@@ -29,7 +29,7 @@
29
  export let models: ModelEntryWithTokenizer[];
30
 
31
  const startMessageUser: ChatCompletionInputMessage = { role: "user", content: "" };
32
- const startMessageSystem: ChatCompletionInputMessage = { role: "system", content: "" };
33
 
34
  const modelIdsFromQueryParam = $page.url.searchParams.get("modelId")?.split(",");
35
  const modelsFromQueryParam = modelIdsFromQueryParam?.map(id => models.find(model => model.id === id));
@@ -40,7 +40,7 @@
40
  model: models.find(m => FEATURED_MODELS_IDS.includes(m.id)) ?? models[0],
41
  config: { ...defaultGenerationConfig },
42
  messages: [{ ...startMessageUser }],
43
- systemMessage: startMessageSystem,
44
  streaming: true,
45
  },
46
  ],
@@ -52,7 +52,7 @@
52
  model,
53
  config: { ...defaultGenerationConfig },
54
  messages: [{ ...startMessageUser }],
55
- systemMessage: startMessageSystem,
56
  streaming: true,
57
  };
58
  }) as [Conversation] | [Conversation, Conversation];
@@ -81,6 +81,9 @@
81
  const hfTokenLocalStorageKey = "hf_token";
82
 
83
  $: systemPromptSupported = session.conversations.some(conversation => isSystemPromptSupported(conversation.model));
 
 
 
84
  $: compareActive = session.conversations.length === 2;
85
 
86
  function addMessage(conversationIdx: number) {
 
13
  } from "./inferencePlaygroundUtils";
14
 
15
  import { onDestroy, onMount } from "svelte";
16
+ import GenerationConfig, { defaultSystemMessage } from "./InferencePlaygroundGenerationConfig.svelte";
17
  import HFTokenModal from "./InferencePlaygroundHFTokenModal.svelte";
18
  import ModelSelector from "./InferencePlaygroundModelSelector.svelte";
19
  import PlaygroundConversation from "./InferencePlaygroundConversation.svelte";
 
29
  export let models: ModelEntryWithTokenizer[];
30
 
31
  const startMessageUser: ChatCompletionInputMessage = { role: "user", content: "" };
32
+ let systemMessage: ChatCompletionInputMessage = { role: "system", content: "" };
33
 
34
  const modelIdsFromQueryParam = $page.url.searchParams.get("modelId")?.split(",");
35
  const modelsFromQueryParam = modelIdsFromQueryParam?.map(id => models.find(model => model.id === id));
 
40
  model: models.find(m => FEATURED_MODELS_IDS.includes(m.id)) ?? models[0],
41
  config: { ...defaultGenerationConfig },
42
  messages: [{ ...startMessageUser }],
43
+ systemMessage,
44
  streaming: true,
45
  },
46
  ],
 
52
  model,
53
  config: { ...defaultGenerationConfig },
54
  messages: [{ ...startMessageUser }],
55
+ systemMessage,
56
  streaming: true,
57
  };
58
  }) as [Conversation] | [Conversation, Conversation];
 
81
  const hfTokenLocalStorageKey = "hf_token";
82
 
83
  $: systemPromptSupported = session.conversations.some(conversation => isSystemPromptSupported(conversation.model));
84
+ $: if (session.conversations[0].model.id) {
85
+ session.conversations[0].systemMessage.content = defaultSystemMessage?.[session.conversations[0].model.id] ?? "";
86
+ }
87
  $: compareActive = session.conversations.length === 2;
88
 
89
  function addMessage(conversationIdx: number) {
src/lib/components/InferencePlayground/InferencePlaygroundGenerationConfig.svelte CHANGED
@@ -1,3 +1,10 @@
 
 
 
 
 
 
 
1
  <script lang="ts">
2
  import type { Conversation } from "$lib/components/InferencePlayground/types";
3
 
 
1
+ <script context="module" lang="ts">
2
+ export const defaultSystemMessage: { [key: string]: string } = {
3
+ "Qwen/QwQ-32B-Preview":
4
+ "You are a helpful and harmless assistant. You are Qwen developed by Alibaba. You should think step-by-step.",
5
+ } as const;
6
+ </script>
7
+
8
  <script lang="ts">
9
  import type { Conversation } from "$lib/components/InferencePlayground/types";
10