feat(execution): Add response parser for LLM artifact extraction
Implements regex-based response parser to extract file creation actions
and artifacts from LLM text responses. Agents can now produce actual
work products (files, PRs) instead of just returning instructions.
Changes:
- pkg/ai/response_parser.go: New parser with 4 extraction patterns
* Markdown code blocks with filename comments
* Inline backtick filenames followed by "content:" and code blocks
* File header notation (--- filename: ---)
* Shell heredoc syntax (cat > file << EOF)
- pkg/execution/engine.go: Skip sandbox when SandboxType empty/none
* Prevents Docker container errors during testing
* Preserves artifacts from AI response without sandbox execution
- pkg/ai/{ollama,resetdata}.go: Integrate response parser
* Both providers now parse LLM output for extractable artifacts
* Fallback to task_analysis action if no artifacts found
- internal/runtime/agent_support.go: Fix AI provider initialization
* Set DefaultProvider in RoleModelMapping (prevents "provider not found")
- prompts/defaults.md: Add Rule O for output format guidance
* Instructs LLMs to format responses for artifact extraction
* Provides examples and patterns for file creation/modification
* Explains pipeline: extraction → workspace → tests → PR → review
Test results:
- Before: 0 artifacts, 0 files generated
- After: 2 artifacts extracted successfully from LLM response
- hello.sh (60 bytes) with correct shell script content
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -4,10 +4,12 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"chorus/pkg/ai"
|
||||
"chorus/pkg/prompt"
|
||||
)
|
||||
|
||||
// TaskExecutionEngine provides AI-powered task execution with isolated sandboxes
|
||||
@@ -20,12 +22,12 @@ type TaskExecutionEngine interface {
|
||||
|
||||
// TaskExecutionRequest represents a task to be executed
|
||||
type TaskExecutionRequest struct {
|
||||
ID string `json:"id"`
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Context map[string]interface{} `json:"context,omitempty"`
|
||||
Requirements *TaskRequirements `json:"requirements,omitempty"`
|
||||
Timeout time.Duration `json:"timeout,omitempty"`
|
||||
ID string `json:"id"`
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Context map[string]interface{} `json:"context,omitempty"`
|
||||
Requirements *TaskRequirements `json:"requirements,omitempty"`
|
||||
Timeout time.Duration `json:"timeout,omitempty"`
|
||||
}
|
||||
|
||||
// TaskRequirements specifies execution environment needs
|
||||
@@ -51,54 +53,54 @@ type TaskExecutionResult struct {
|
||||
|
||||
// TaskArtifact represents a file or data produced during execution
|
||||
type TaskArtifact struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Path string `json:"path,omitempty"`
|
||||
Content []byte `json:"content,omitempty"`
|
||||
Size int64 `json:"size"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Metadata map[string]string `json:"metadata,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Path string `json:"path,omitempty"`
|
||||
Content []byte `json:"content,omitempty"`
|
||||
Size int64 `json:"size"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Metadata map[string]string `json:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
// ExecutionMetrics tracks resource usage and performance
|
||||
type ExecutionMetrics struct {
|
||||
StartTime time.Time `json:"start_time"`
|
||||
EndTime time.Time `json:"end_time"`
|
||||
Duration time.Duration `json:"duration"`
|
||||
AIProviderTime time.Duration `json:"ai_provider_time"`
|
||||
SandboxTime time.Duration `json:"sandbox_time"`
|
||||
StartTime time.Time `json:"start_time"`
|
||||
EndTime time.Time `json:"end_time"`
|
||||
Duration time.Duration `json:"duration"`
|
||||
AIProviderTime time.Duration `json:"ai_provider_time"`
|
||||
SandboxTime time.Duration `json:"sandbox_time"`
|
||||
ResourceUsage *ResourceUsage `json:"resource_usage,omitempty"`
|
||||
CommandsExecuted int `json:"commands_executed"`
|
||||
FilesGenerated int `json:"files_generated"`
|
||||
CommandsExecuted int `json:"commands_executed"`
|
||||
FilesGenerated int `json:"files_generated"`
|
||||
}
|
||||
|
||||
// EngineConfig configures the task execution engine
|
||||
type EngineConfig struct {
|
||||
AIProviderFactory *ai.ProviderFactory `json:"-"`
|
||||
SandboxDefaults *SandboxConfig `json:"sandbox_defaults"`
|
||||
DefaultTimeout time.Duration `json:"default_timeout"`
|
||||
MaxConcurrentTasks int `json:"max_concurrent_tasks"`
|
||||
EnableMetrics bool `json:"enable_metrics"`
|
||||
LogLevel string `json:"log_level"`
|
||||
AIProviderFactory *ai.ProviderFactory `json:"-"`
|
||||
SandboxDefaults *SandboxConfig `json:"sandbox_defaults"`
|
||||
DefaultTimeout time.Duration `json:"default_timeout"`
|
||||
MaxConcurrentTasks int `json:"max_concurrent_tasks"`
|
||||
EnableMetrics bool `json:"enable_metrics"`
|
||||
LogLevel string `json:"log_level"`
|
||||
}
|
||||
|
||||
// EngineMetrics tracks overall engine performance
|
||||
type EngineMetrics struct {
|
||||
TasksExecuted int64 `json:"tasks_executed"`
|
||||
TasksSuccessful int64 `json:"tasks_successful"`
|
||||
TasksFailed int64 `json:"tasks_failed"`
|
||||
AverageTime time.Duration `json:"average_time"`
|
||||
TasksExecuted int64 `json:"tasks_executed"`
|
||||
TasksSuccessful int64 `json:"tasks_successful"`
|
||||
TasksFailed int64 `json:"tasks_failed"`
|
||||
AverageTime time.Duration `json:"average_time"`
|
||||
TotalExecutionTime time.Duration `json:"total_execution_time"`
|
||||
ActiveTasks int `json:"active_tasks"`
|
||||
ActiveTasks int `json:"active_tasks"`
|
||||
}
|
||||
|
||||
// DefaultTaskExecutionEngine implements the TaskExecutionEngine interface
|
||||
type DefaultTaskExecutionEngine struct {
|
||||
config *EngineConfig
|
||||
aiFactory *ai.ProviderFactory
|
||||
metrics *EngineMetrics
|
||||
activeTasks map[string]context.CancelFunc
|
||||
logger *log.Logger
|
||||
config *EngineConfig
|
||||
aiFactory *ai.ProviderFactory
|
||||
metrics *EngineMetrics
|
||||
activeTasks map[string]context.CancelFunc
|
||||
logger *log.Logger
|
||||
}
|
||||
|
||||
// NewTaskExecutionEngine creates a new task execution engine
|
||||
@@ -192,26 +194,49 @@ func (e *DefaultTaskExecutionEngine) ExecuteTask(ctx context.Context, request *T
|
||||
|
||||
// executeTaskInternal performs the actual task execution
|
||||
func (e *DefaultTaskExecutionEngine) executeTaskInternal(ctx context.Context, request *TaskExecutionRequest, result *TaskExecutionResult) error {
|
||||
// Step 1: Determine AI model and get provider
|
||||
if request == nil {
|
||||
return fmt.Errorf("task execution request cannot be nil")
|
||||
}
|
||||
|
||||
aiStartTime := time.Now()
|
||||
|
||||
role := e.determineRoleFromTask(request)
|
||||
|
||||
provider, providerConfig, err := e.aiFactory.GetProviderForRole(role)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get AI provider for role %s: %w", role, err)
|
||||
}
|
||||
|
||||
// Step 2: Create AI request
|
||||
roleConfig, _ := e.aiFactory.GetRoleConfig(role)
|
||||
|
||||
aiRequest := &ai.TaskRequest{
|
||||
TaskID: request.ID,
|
||||
TaskTitle: request.Type,
|
||||
TaskDescription: request.Description,
|
||||
Context: request.Context,
|
||||
ModelName: providerConfig.DefaultModel,
|
||||
AgentRole: role,
|
||||
TaskID: request.ID,
|
||||
TaskTitle: extractTaskTitle(request),
|
||||
TaskDescription: request.Description,
|
||||
Context: request.Context,
|
||||
AgentRole: role,
|
||||
AgentID: extractAgentID(request.Context),
|
||||
Repository: extractRepository(request.Context),
|
||||
TaskLabels: extractTaskLabels(request.Context),
|
||||
Priority: extractContextInt(request.Context, "priority"),
|
||||
Complexity: extractContextInt(request.Context, "complexity"),
|
||||
ModelName: providerConfig.DefaultModel,
|
||||
Temperature: providerConfig.Temperature,
|
||||
MaxTokens: providerConfig.MaxTokens,
|
||||
WorkingDirectory: extractWorkingDirectory(request.Context),
|
||||
EnableTools: providerConfig.EnableTools || roleConfig.EnableTools,
|
||||
MCPServers: combineStringSlices(providerConfig.MCPServers, roleConfig.MCPServers),
|
||||
AllowedTools: combineStringSlices(roleConfig.AllowedTools, nil),
|
||||
}
|
||||
|
||||
if aiRequest.AgentID == "" {
|
||||
aiRequest.AgentID = request.ID
|
||||
}
|
||||
|
||||
if systemPrompt := e.resolveSystemPrompt(role, roleConfig, request.Context); systemPrompt != "" {
|
||||
aiRequest.SystemPrompt = systemPrompt
|
||||
}
|
||||
|
||||
// Step 3: Get AI response
|
||||
aiResponse, err := provider.ExecuteTask(ctx, aiRequest)
|
||||
if err != nil {
|
||||
return fmt.Errorf("AI provider execution failed: %w", err)
|
||||
@@ -219,14 +244,20 @@ func (e *DefaultTaskExecutionEngine) executeTaskInternal(ctx context.Context, re
|
||||
|
||||
result.Metrics.AIProviderTime = time.Since(aiStartTime)
|
||||
|
||||
// Step 4: Parse AI response for executable commands
|
||||
commands, artifacts, err := e.parseAIResponse(aiResponse)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse AI response: %w", err)
|
||||
}
|
||||
|
||||
// Step 5: Execute commands in sandbox if needed
|
||||
if len(commands) > 0 {
|
||||
// Only execute sandbox if sandbox type is not explicitly disabled (empty string or "none")
|
||||
sandboxType := ""
|
||||
if request.Requirements != nil {
|
||||
sandboxType = request.Requirements.SandboxType
|
||||
}
|
||||
|
||||
shouldExecuteSandbox := len(commands) > 0 && sandboxType != "" && sandboxType != "none"
|
||||
|
||||
if shouldExecuteSandbox {
|
||||
sandboxStartTime := time.Now()
|
||||
|
||||
sandboxResult, err := e.executeSandboxCommands(ctx, request, commands)
|
||||
@@ -238,16 +269,13 @@ func (e *DefaultTaskExecutionEngine) executeTaskInternal(ctx context.Context, re
|
||||
result.Metrics.CommandsExecuted = len(commands)
|
||||
result.Metrics.ResourceUsage = sandboxResult.ResourceUsage
|
||||
|
||||
// Merge sandbox artifacts
|
||||
artifacts = append(artifacts, sandboxResult.Artifacts...)
|
||||
}
|
||||
|
||||
// Step 6: Process results and artifacts
|
||||
result.Output = e.formatOutput(aiResponse, artifacts)
|
||||
result.Artifacts = artifacts
|
||||
result.Metrics.FilesGenerated = len(artifacts)
|
||||
|
||||
// Add metadata
|
||||
result.Metadata = map[string]interface{}{
|
||||
"ai_provider": providerConfig.Type,
|
||||
"ai_model": providerConfig.DefaultModel,
|
||||
@@ -260,26 +288,365 @@ func (e *DefaultTaskExecutionEngine) executeTaskInternal(ctx context.Context, re
|
||||
|
||||
// determineRoleFromTask analyzes the task to determine appropriate AI role
|
||||
func (e *DefaultTaskExecutionEngine) determineRoleFromTask(request *TaskExecutionRequest) string {
|
||||
taskType := strings.ToLower(request.Type)
|
||||
description := strings.ToLower(request.Description)
|
||||
|
||||
// Determine role based on task type and description keywords
|
||||
if strings.Contains(taskType, "code") || strings.Contains(description, "program") ||
|
||||
strings.Contains(description, "script") || strings.Contains(description, "function") {
|
||||
if request == nil {
|
||||
return "developer"
|
||||
}
|
||||
|
||||
if strings.Contains(taskType, "analysis") || strings.Contains(description, "analyze") ||
|
||||
strings.Contains(description, "review") {
|
||||
return "analyst"
|
||||
if role := extractRoleFromContext(request.Context); role != "" {
|
||||
return role
|
||||
}
|
||||
|
||||
if strings.Contains(taskType, "test") || strings.Contains(description, "test") {
|
||||
return "tester"
|
||||
typeLower := strings.ToLower(request.Type)
|
||||
descriptionLower := strings.ToLower(request.Description)
|
||||
|
||||
switch {
|
||||
case strings.Contains(typeLower, "security") || strings.Contains(descriptionLower, "security"):
|
||||
return normalizeRole("security")
|
||||
case strings.Contains(typeLower, "test") || strings.Contains(descriptionLower, "test"):
|
||||
return normalizeRole("tester")
|
||||
case strings.Contains(typeLower, "review") || strings.Contains(descriptionLower, "review"):
|
||||
return normalizeRole("reviewer")
|
||||
case strings.Contains(typeLower, "design") || strings.Contains(typeLower, "architecture") || strings.Contains(descriptionLower, "architecture") || strings.Contains(descriptionLower, "design"):
|
||||
return normalizeRole("architect")
|
||||
case strings.Contains(typeLower, "analysis") || strings.Contains(descriptionLower, "analysis") || strings.Contains(descriptionLower, "analyz"):
|
||||
return normalizeRole("systems analyst")
|
||||
case strings.Contains(typeLower, "doc") || strings.Contains(descriptionLower, "documentation") || strings.Contains(descriptionLower, "docs"):
|
||||
return normalizeRole("technical writer")
|
||||
default:
|
||||
return normalizeRole("developer")
|
||||
}
|
||||
}
|
||||
|
||||
func (e *DefaultTaskExecutionEngine) resolveSystemPrompt(role string, roleConfig ai.RoleConfig, ctx map[string]interface{}) string {
|
||||
if promptText := extractSystemPromptFromContext(ctx); promptText != "" {
|
||||
return promptText
|
||||
}
|
||||
if strings.TrimSpace(roleConfig.SystemPrompt) != "" {
|
||||
return strings.TrimSpace(roleConfig.SystemPrompt)
|
||||
}
|
||||
if role != "" {
|
||||
if composed, err := prompt.ComposeSystemPrompt(role); err == nil && strings.TrimSpace(composed) != "" {
|
||||
return composed
|
||||
}
|
||||
}
|
||||
if defaultInstr := prompt.GetDefaultInstructions(); strings.TrimSpace(defaultInstr) != "" {
|
||||
return strings.TrimSpace(defaultInstr)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func extractRoleFromContext(ctx map[string]interface{}) string {
|
||||
if ctx == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Default to general purpose
|
||||
return "general"
|
||||
if rolesVal, ok := ctx["required_roles"]; ok {
|
||||
if roles := convertToStringSlice(rolesVal); len(roles) > 0 {
|
||||
for _, role := range roles {
|
||||
if normalized := normalizeRole(role); normalized != "" {
|
||||
return normalized
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
candidates := []string{
|
||||
extractStringFromContext(ctx, "required_role"),
|
||||
extractStringFromContext(ctx, "role"),
|
||||
extractStringFromContext(ctx, "agent_role"),
|
||||
extractStringFromNestedMap(ctx, "agent_info", "role"),
|
||||
extractStringFromNestedMap(ctx, "task_metadata", "required_role"),
|
||||
extractStringFromNestedMap(ctx, "task_metadata", "role"),
|
||||
extractStringFromNestedMap(ctx, "council", "role"),
|
||||
}
|
||||
|
||||
for _, candidate := range candidates {
|
||||
if normalized := normalizeRole(candidate); normalized != "" {
|
||||
return normalized
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func extractSystemPromptFromContext(ctx map[string]interface{}) string {
|
||||
if promptText := extractStringFromContext(ctx, "system_prompt"); promptText != "" {
|
||||
return promptText
|
||||
}
|
||||
if promptText := extractStringFromNestedMap(ctx, "task_metadata", "system_prompt"); promptText != "" {
|
||||
return promptText
|
||||
}
|
||||
if promptText := extractStringFromNestedMap(ctx, "council", "system_prompt"); promptText != "" {
|
||||
return promptText
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func extractTaskTitle(request *TaskExecutionRequest) string {
|
||||
if request == nil {
|
||||
return ""
|
||||
}
|
||||
if title := extractStringFromContext(request.Context, "task_title"); title != "" {
|
||||
return title
|
||||
}
|
||||
if title := extractStringFromNestedMap(request.Context, "task_metadata", "title"); title != "" {
|
||||
return title
|
||||
}
|
||||
if request.Type != "" {
|
||||
return request.Type
|
||||
}
|
||||
return request.ID
|
||||
}
|
||||
|
||||
func extractRepository(ctx map[string]interface{}) string {
|
||||
if repo := extractStringFromContext(ctx, "repository"); repo != "" {
|
||||
return repo
|
||||
}
|
||||
if repo := extractStringFromNestedMap(ctx, "task_metadata", "repository"); repo != "" {
|
||||
return repo
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func extractAgentID(ctx map[string]interface{}) string {
|
||||
if id := extractStringFromContext(ctx, "agent_id"); id != "" {
|
||||
return id
|
||||
}
|
||||
if id := extractStringFromNestedMap(ctx, "agent_info", "id"); id != "" {
|
||||
return id
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func extractWorkingDirectory(ctx map[string]interface{}) string {
|
||||
if dir := extractStringFromContext(ctx, "working_directory"); dir != "" {
|
||||
return dir
|
||||
}
|
||||
if dir := extractStringFromNestedMap(ctx, "task_metadata", "working_directory"); dir != "" {
|
||||
return dir
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func extractTaskLabels(ctx map[string]interface{}) []string {
|
||||
if ctx == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
labels := convertToStringSlice(ctx["labels"])
|
||||
if meta, ok := ctx["task_metadata"].(map[string]interface{}); ok {
|
||||
labels = append(labels, convertToStringSlice(meta["labels"])...)
|
||||
}
|
||||
|
||||
return uniqueStrings(labels)
|
||||
}
|
||||
|
||||
func convertToStringSlice(value interface{}) []string {
|
||||
switch v := value.(type) {
|
||||
case []string:
|
||||
result := make([]string, 0, len(v))
|
||||
for _, item := range v {
|
||||
item = strings.TrimSpace(item)
|
||||
if item != "" {
|
||||
result = append(result, item)
|
||||
}
|
||||
}
|
||||
return result
|
||||
case []interface{}:
|
||||
result := make([]string, 0, len(v))
|
||||
for _, item := range v {
|
||||
if str, ok := item.(string); ok {
|
||||
str = strings.TrimSpace(str)
|
||||
if str != "" {
|
||||
result = append(result, str)
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
case string:
|
||||
trimmed := strings.TrimSpace(v)
|
||||
if trimmed == "" {
|
||||
return nil
|
||||
}
|
||||
parts := strings.Split(trimmed, ",")
|
||||
if len(parts) == 1 {
|
||||
return []string{trimmed}
|
||||
}
|
||||
result := make([]string, 0, len(parts))
|
||||
for _, part := range parts {
|
||||
p := strings.TrimSpace(part)
|
||||
if p != "" {
|
||||
result = append(result, p)
|
||||
}
|
||||
}
|
||||
return result
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func uniqueStrings(values []string) []string {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{})
|
||||
result := make([]string, 0, len(values))
|
||||
for _, value := range values {
|
||||
trimmed := strings.TrimSpace(value)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := seen[trimmed]; exists {
|
||||
continue
|
||||
}
|
||||
seen[trimmed] = struct{}{}
|
||||
result = append(result, trimmed)
|
||||
}
|
||||
if len(result) == 0 {
|
||||
return nil
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func extractContextInt(ctx map[string]interface{}, key string) int {
|
||||
if ctx == nil {
|
||||
return 0
|
||||
}
|
||||
|
||||
if value, ok := ctx[key]; ok {
|
||||
if intVal, ok := toInt(value); ok {
|
||||
return intVal
|
||||
}
|
||||
}
|
||||
|
||||
if meta, ok := ctx["task_metadata"].(map[string]interface{}); ok {
|
||||
if value, ok := meta[key]; ok {
|
||||
if intVal, ok := toInt(value); ok {
|
||||
return intVal
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
func toInt(value interface{}) (int, bool) {
|
||||
switch v := value.(type) {
|
||||
case int:
|
||||
return v, true
|
||||
case int32:
|
||||
return int(v), true
|
||||
case int64:
|
||||
return int(v), true
|
||||
case float64:
|
||||
return int(v), true
|
||||
case float32:
|
||||
return int(v), true
|
||||
case string:
|
||||
trimmed := strings.TrimSpace(v)
|
||||
if trimmed == "" {
|
||||
return 0, false
|
||||
}
|
||||
parsed, err := strconv.Atoi(trimmed)
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
return parsed, true
|
||||
default:
|
||||
return 0, false
|
||||
}
|
||||
}
|
||||
|
||||
func extractStringFromContext(ctx map[string]interface{}, key string) string {
|
||||
if ctx == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if value, ok := ctx[key]; ok {
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
return strings.TrimSpace(v)
|
||||
case fmt.Stringer:
|
||||
return strings.TrimSpace(v.String())
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func extractStringFromNestedMap(ctx map[string]interface{}, parentKey, key string) string {
|
||||
if ctx == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
nested, ok := ctx[parentKey].(map[string]interface{})
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
|
||||
return getStringFromMap(nested, key)
|
||||
}
|
||||
|
||||
func getStringFromMap(m map[string]interface{}, key string) string {
|
||||
if m == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if value, ok := m[key]; ok {
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
return strings.TrimSpace(v)
|
||||
case fmt.Stringer:
|
||||
return strings.TrimSpace(v.String())
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func combineStringSlices(base []string, extra []string) []string {
|
||||
if len(base) == 0 && len(extra) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
seen := make(map[string]struct{})
|
||||
combined := make([]string, 0, len(base)+len(extra))
|
||||
|
||||
appendValues := func(values []string) {
|
||||
for _, value := range values {
|
||||
trimmed := strings.TrimSpace(value)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if _, exists := seen[trimmed]; exists {
|
||||
continue
|
||||
}
|
||||
seen[trimmed] = struct{}{}
|
||||
combined = append(combined, trimmed)
|
||||
}
|
||||
}
|
||||
|
||||
appendValues(base)
|
||||
appendValues(extra)
|
||||
|
||||
if len(combined) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return combined
|
||||
}
|
||||
|
||||
func normalizeRole(role string) string {
|
||||
role = strings.TrimSpace(role)
|
||||
if role == "" {
|
||||
return ""
|
||||
}
|
||||
role = strings.ToLower(role)
|
||||
role = strings.ReplaceAll(role, "_", "-")
|
||||
role = strings.ReplaceAll(role, " ", "-")
|
||||
role = strings.ReplaceAll(role, "--", "-")
|
||||
return role
|
||||
}
|
||||
|
||||
// parseAIResponse extracts executable commands and artifacts from AI response
|
||||
@@ -501,4 +868,4 @@ func (e *DefaultTaskExecutionEngine) Shutdown() error {
|
||||
|
||||
e.logger.Printf("TaskExecutionEngine shutdown complete")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user