feat(prompts): load system prompts and defaults from Docker volume; set runtime system prompt; add BACKBEAT standards

This commit is contained in:
anthonyrawlins
2025-09-06 15:42:41 +10:00
parent 1ccb84093e
commit 1806a4fe09
6 changed files with 901 additions and 20 deletions

View File

@@ -16,12 +16,13 @@ const (
)
var (
availableModels []string
modelWebhookURL string
defaultModel string
ollamaEndpoint string = "http://localhost:11434" // Default fallback
aiProvider string = "resetdata" // Default provider
resetdataConfig ResetDataConfig
availableModels []string
modelWebhookURL string
defaultModel string
ollamaEndpoint string = "http://localhost:11434" // Default fallback
aiProvider string = "resetdata" // Default provider
resetdataConfig ResetDataConfig
defaultSystemPrompt string
)
// AIProvider represents the AI service provider
@@ -118,17 +119,17 @@ func generateResetDataResponse(ctx context.Context, model, prompt string) (strin
}
// Create the request payload in OpenAI format
requestPayload := OpenAIRequest{
Model: modelToUse,
Messages: []OpenAIMessage{
{Role: "system", Content: "You are a helpful assistant."},
{Role: "user", Content: prompt},
},
Temperature: 0.2,
TopP: 0.7,
MaxTokens: 1024,
Stream: false,
}
requestPayload := OpenAIRequest{
Model: modelToUse,
Messages: []OpenAIMessage{
{Role: "system", Content: defaultSystemPromptOrFallback()},
{Role: "user", Content: prompt},
},
Temperature: 0.2,
TopP: 0.7,
MaxTokens: 1024,
Stream: false,
}
payloadBytes, err := json.Marshal(requestPayload)
if err != nil {
@@ -233,7 +234,12 @@ func SetResetDataConfig(config ResetDataConfig) {
// SetOllamaEndpoint configures the Ollama API endpoint
func SetOllamaEndpoint(endpoint string) {
ollamaEndpoint = endpoint
ollamaEndpoint = endpoint
}
// SetDefaultSystemPrompt configures the default system message used when building prompts.
func SetDefaultSystemPrompt(systemPrompt string) {
defaultSystemPrompt = systemPrompt
}
// selectBestModel calls the model selection webhook to choose the best model for a prompt
@@ -291,6 +297,13 @@ func selectBestModel(availableModels []string, prompt string) string {
// GenerateResponseSmart automatically selects the best model for the prompt
func GenerateResponseSmart(ctx context.Context, prompt string) (string, error) {
selectedModel := selectBestModel(availableModels, prompt)
return GenerateResponse(ctx, selectedModel, prompt)
selectedModel := selectBestModel(availableModels, prompt)
return GenerateResponse(ctx, selectedModel, prompt)
}
func defaultSystemPromptOrFallback() string {
if strings.TrimSpace(defaultSystemPrompt) != "" {
return defaultSystemPrompt
}
return "You are a helpful assistant."
}