From 8e4446d56486bacc9299db3f3f46c4a50a620bc5 Mon Sep 17 00:00:00 2001 From: Rounak Bhatia Date: Wed, 17 Jul 2024 19:11:32 +0530 Subject: [PATCH] Make API key optional and Show activity logs of In Review Stories (#75) --- app/controllers/llm_api_key.go | 6 +++- app/services/llm_api_key.go | 2 +- app/types/request/llm_api_key_request.go | 4 +-- .../impl/next_js_server_test_executor.go | 11 +++++++ .../impl/open_ai_code_generation_executor.go | 7 ++-- ...pen_ai_next_js_code_generation_executor.go | 32 +++++++++++++++++-- ...en_ai_next_js_update_code_file_executor.go | 4 ++- .../impl/open_ai_update_code_file_executor.go | 2 +- .../(programmer)/design_workbench/page.tsx | 4 +-- gui/src/app/(programmer)/workbench/page.tsx | 4 +-- .../WorkBenchComponents/ActiveWorkbench.tsx | 25 ++++++++++++++- server.go | 1 - 12 files changed, 85 insertions(+), 17 deletions(-) diff --git a/app/controllers/llm_api_key.go b/app/controllers/llm_api_key.go index d87109af..6713f86d 100644 --- a/app/controllers/llm_api_key.go +++ b/app/controllers/llm_api_key.go @@ -33,7 +33,11 @@ func (c *LLMAPIKeyController) CreateLLMAPIKey(context *gin.Context) { } for _, apiKey := range createLLMAPIKey.APIKeys { - err = c.llmAPIKeyService.CreateOrUpdateLLMAPIKey(orgId, apiKey.LLMModel, apiKey.LLMAPIKey) + if apiKey.LLMAPIKey == nil { + err = c.llmAPIKeyService.CreateOrUpdateLLMAPIKey(orgId, apiKey.LLMModel, "") + } else { + err = c.llmAPIKeyService.CreateOrUpdateLLMAPIKey(orgId, apiKey.LLMModel, *apiKey.LLMAPIKey) + } if err != nil { context.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) return diff --git a/app/services/llm_api_key.go b/app/services/llm_api_key.go index a1a1183f..8c26b1fa 100644 --- a/app/services/llm_api_key.go +++ b/app/services/llm_api_key.go @@ -14,7 +14,7 @@ type LLMAPIKeyService struct { } func (s *LLMAPIKeyService) CreateOrUpdateLLMAPIKey(organisationID uint, llmModel string, llmAPIKey string) error { - if llmModel == "" || llmAPIKey == "" { + if llmModel == "" { return errors.New("missing required fields") } err := s.llm_api_key_repo.CreateOrUpdateLLMAPIKey(organisationID, llmModel, llmAPIKey) diff --git a/app/types/request/llm_api_key_request.go b/app/types/request/llm_api_key_request.go index c58ad875..4f0edd4c 100644 --- a/app/types/request/llm_api_key_request.go +++ b/app/types/request/llm_api_key_request.go @@ -5,6 +5,6 @@ type CreateLLMAPIKeyRequest struct { } type LLMAPIKey struct { - LLMModel string `json:"llm_model" binding:"required"` - LLMAPIKey string `json:"llm_api_key" binding:"required"` + LLMModel string `json:"llm_model" binding:"required"` + LLMAPIKey *string `json:"llm_api_key"` } diff --git a/app/workflow_executors/step_executors/impl/next_js_server_test_executor.go b/app/workflow_executors/step_executors/impl/next_js_server_test_executor.go index 35b2bcbd..62e0e448 100644 --- a/app/workflow_executors/step_executors/impl/next_js_server_test_executor.go +++ b/app/workflow_executors/step_executors/impl/next_js_server_test_executor.go @@ -152,6 +152,17 @@ func (e NextJsServerStartTestExecutor) Execute(step steps.ServerStartTestStep) e fmt.Printf("Error updating story status: %s\n", err.Error()) return err } + //creating activity log + err := e.activityLogService.CreateActivityLog( + step.Execution.ID, + step.ExecutionStep.ID, + "INFO", + "Design story completed successfully!", + ) + if err!= nil { + fmt.Printf("Error creating activity log: %s\n", err.Error()) + return err + } fmt.Println("Story Status Updated to DONE") return nil } diff --git a/app/workflow_executors/step_executors/impl/open_ai_code_generation_executor.go b/app/workflow_executors/step_executors/impl/open_ai_code_generation_executor.go index 3e61f7b2..b707b48a 100644 --- a/app/workflow_executors/step_executors/impl/open_ai_code_generation_executor.go +++ b/app/workflow_executors/step_executors/impl/open_ai_code_generation_executor.go @@ -164,7 +164,7 @@ func (openAICodeGenerator OpenAICodeGenerator) Execute(step steps.GenerateCodeSt step.Execution.ID, step.ExecutionStep.ID, "INFO", - fmt.Sprintf("Action required: There's an issue with your LLM API Key. Ensure your API Key is correct. Settings", settingsUrl, "blue", "underline"), + fmt.Sprintf("Action required: There's an issue with your LLM API Key. Ensure your API Key for %s is correct. Settings", constants.GPT_4O, settingsUrl, "blue", "underline"), ) if err != nil { fmt.Printf("Error creating activity log: %s\n", err.Error()) @@ -198,7 +198,8 @@ func (openAICodeGenerator OpenAICodeGenerator) Execute(step steps.GenerateCodeSt fmt.Printf("Error commiting code: %s\n", err.Error()) return err } - return errors.New("LLM API Key not found in database") + errorString := fmt.Sprintf("LLM API Key for model %s not found in database", constants.GPT_4O) + return errors.New(errorString) } apiKey := llmAPIKey.LLMAPIKey fmt.Println("_________API_KEY_________", apiKey) @@ -257,7 +258,7 @@ func (openAICodeGenerator *OpenAICodeGenerator) GenerateCode(apiKey string, fram step.Execution.ID, step.ExecutionStep.ID, "INFO", - fmt.Sprintf("Action required: There's an issue with your LLM API Key. Ensure your API Key is correct. Settings", settingsUrl, "blue", "underline"), + fmt.Sprintf("Action required: There's an issue with your LLM API Key. Ensure your API Key for %s is correct. Settings", constants.GPT_4O, settingsUrl, "blue", "underline"), ) if err != nil { fmt.Printf("Error creating activity log: %s\n", err.Error()) diff --git a/app/workflow_executors/step_executors/impl/open_ai_next_js_code_generation_executor.go b/app/workflow_executors/step_executors/impl/open_ai_next_js_code_generation_executor.go index 73720eb6..819d2dfb 100644 --- a/app/workflow_executors/step_executors/impl/open_ai_next_js_code_generation_executor.go +++ b/app/workflow_executors/step_executors/impl/open_ai_next_js_code_generation_executor.go @@ -9,11 +9,13 @@ import ( "ai-developer/app/services/s3_providers" "ai-developer/app/utils" "ai-developer/app/workflow_executors/step_executors/steps" + "errors" "fmt" "os" "os/exec" "path/filepath" "strings" + "go.uber.org/zap" ) @@ -124,10 +126,36 @@ func (openAiCodeGenerator OpenAiNextJsCodeGenerator) Execute(step steps.Generate if openAiCodeGenerator.llmAPIKeyService == nil { fmt.Println("_____NULL_____") } - llmAPIKey, err := openAiCodeGenerator.llmAPIKeyService.GetLLMAPIKeyByModelName("claude-3", organisationId) + llmAPIKey, err := openAiCodeGenerator.llmAPIKeyService.GetLLMAPIKeyByModelName(constants.CLAUDE_3, organisationId) if err != nil { fmt.Println("Error getting claude api key: ", err) } + if llmAPIKey == nil || llmAPIKey.LLMAPIKey == "" { + openAiCodeGenerator.logger.Info("_____claude API Key not found_____") + settingsUrl := config.Get("app.url").(string) + "/settings" + err := openAiCodeGenerator.activityLogService.CreateActivityLog( + step.Execution.ID, + step.ExecutionStep.ID, + "INFO", + fmt.Sprintf("Action required: There's an issue with your LLM API Key. Ensure your API Key for %s is correct. Settings", constants.CLAUDE_3, settingsUrl, "blue", "underline"), + ) + if err != nil { + fmt.Printf("Error creating activity log: %s\n", err.Error()) + return err + } + //Update Execution Status and Story Status + if err := openAiCodeGenerator.storyService.UpdateStoryStatus(int(step.Story.ID), constants.InReviewLLMKeyNotFound); err != nil { + fmt.Printf("Error updating story status: %s\n", err.Error()) + return err + } + //Update execution status to IN REVIEW + if err := openAiCodeGenerator.executionService.UpdateExecutionStatus(step.Execution.ID, constants.InReviewLLMKeyNotFound); err != nil { + fmt.Printf("Error updating execution step: %s\n", err.Error()) + return err + } + errorString := fmt.Sprintf("LLM API Key for model %s not found in database", constants.CLAUDE_3) + return errors.New(errorString) + } apiKey := llmAPIKey.LLMAPIKey fmt.Println("_________API KEY_________", apiKey) @@ -139,7 +167,7 @@ func (openAiCodeGenerator OpenAiNextJsCodeGenerator) Execute(step steps.Generate step.Execution.ID, step.ExecutionStep.ID, "INFO", - fmt.Sprintf("Action required: There's an issue with your LLM API Key. Ensure your API Key is correct. Settings", settingsUrl, "blue", "underline"), + fmt.Sprintf("Action required: There's an issue with your LLM API Key. Ensure your API Key for %s is correct. Settings", constants.CLAUDE_3, settingsUrl, "blue", "underline"), ) if err != nil { fmt.Printf("Error creating activity log: %s\n", err.Error()) diff --git a/app/workflow_executors/step_executors/impl/open_ai_next_js_update_code_file_executor.go b/app/workflow_executors/step_executors/impl/open_ai_next_js_update_code_file_executor.go index 124836f8..cf32d54a 100644 --- a/app/workflow_executors/step_executors/impl/open_ai_next_js_update_code_file_executor.go +++ b/app/workflow_executors/step_executors/impl/open_ai_next_js_update_code_file_executor.go @@ -86,7 +86,9 @@ func (e NextJsUpdateCodeFileExecutor) Execute(step steps.UpdateCodeFileStep) err func (e *NextJsUpdateCodeFileExecutor) UpdateReGeneratedCodeFile(response Response, step steps.UpdateCodeFileStep) error { var llmResponse map[string]interface{} var filePath string - if strings.Contains(response.FileName, "app/") { + if response.FileName == "package.json" { + filePath = config.FrontendWorkspacePath(step.Project.HashID, step.Story.HashID) + "/" + response.FileName + } else if strings.Contains(response.FileName, "app/") { filePath = config.FrontendWorkspacePath(step.Project.HashID, step.Story.HashID) + "/" + response.FileName } else { filePath = config.FrontendWorkspacePath(step.Project.HashID, step.Story.HashID) + "/app/" + response.FileName diff --git a/app/workflow_executors/step_executors/impl/open_ai_update_code_file_executor.go b/app/workflow_executors/step_executors/impl/open_ai_update_code_file_executor.go index a33b16c7..6d1399b0 100644 --- a/app/workflow_executors/step_executors/impl/open_ai_update_code_file_executor.go +++ b/app/workflow_executors/step_executors/impl/open_ai_update_code_file_executor.go @@ -82,7 +82,7 @@ func (e UpdateCodeFileExecutor) Execute(step steps.UpdateCodeFileStep) error { } else if strings.HasPrefix(line, "|code|") || strings.HasPrefix(line, "|terminal|") { isCode = true } else if isCode { - if strings.TrimSpace(line) == "```" || strings.TrimSpace(line) == "```plaintext" || strings.TrimSpace(line) == "```bash" || strings.TrimSpace(line) == "```terminal" || strings.TrimSpace(line) == "```python" || strings.TrimSpace(line) == "```css" || strings.TrimSpace(line) == "```html" || strings.TrimSpace(line) == "```javascript" || strings.TrimSpace(line) == "```ini" { + if strings.TrimSpace(line) == "```" || strings.TrimSpace(line) == "```shell" || strings.TrimSpace(line) == "```plaintext" || strings.TrimSpace(line) == "```bash" || strings.TrimSpace(line) == "```terminal" || strings.TrimSpace(line) == "```python" || strings.TrimSpace(line) == "```css" || strings.TrimSpace(line) == "```html" || strings.TrimSpace(line) == "```javascript" || strings.TrimSpace(line) == "```ini" { continue } currentContent = append(currentContent, line) diff --git a/gui/src/app/(programmer)/design_workbench/page.tsx b/gui/src/app/(programmer)/design_workbench/page.tsx index 657d5c52..4350ab15 100644 --- a/gui/src/app/(programmer)/design_workbench/page.tsx +++ b/gui/src/app/(programmer)/design_workbench/page.tsx @@ -16,8 +16,8 @@ const DesignWorkBenchPage: React.FC = () => { const activeDesignWorkbenchCondition = () => { return ( storiesList && - (storiesList.IN_PROGRESS || storiesList.DONE) && - (storiesList.IN_PROGRESS.length > 0 || storiesList.DONE.length > 0) + (storiesList.IN_PROGRESS || storiesList.DONE || storiesList.IN_REVIEW) && + (storiesList.IN_PROGRESS.length > 0 || storiesList.DONE.length > 0 || storiesList.IN_REVIEW.length > 0) ); }; diff --git a/gui/src/app/(programmer)/workbench/page.tsx b/gui/src/app/(programmer)/workbench/page.tsx index 8340978a..80d8269e 100644 --- a/gui/src/app/(programmer)/workbench/page.tsx +++ b/gui/src/app/(programmer)/workbench/page.tsx @@ -18,8 +18,8 @@ export default function WorkBench() { const activeWorkbenchCondition = () => { return ( storiesList && - (storiesList.IN_PROGRESS || storiesList.DONE) && - (storiesList.IN_PROGRESS.length > 0 || storiesList.DONE.length > 0) + (storiesList.IN_PROGRESS || storiesList.DONE || storiesList.IN_REVIEW) && + (storiesList.IN_PROGRESS.length > 0 || storiesList.DONE.length > 0 || storiesList.IN_REVIEW.length > 0) ); }; diff --git a/gui/src/components/WorkBenchComponents/ActiveWorkbench.tsx b/gui/src/components/WorkBenchComponents/ActiveWorkbench.tsx index fac28d4b..71abb375 100644 --- a/gui/src/components/WorkBenchComponents/ActiveWorkbench.tsx +++ b/gui/src/components/WorkBenchComponents/ActiveWorkbench.tsx @@ -43,6 +43,7 @@ const ActiveWorkbench: React.FC = ({ const completeStoriesList = [ ...storiesList.IN_PROGRESS, ...storiesList.DONE, + ...storiesList.IN_REVIEW ]; const story = completeStoriesList.find( @@ -84,6 +85,10 @@ const ActiveWorkbench: React.FC = ({ return storiesList && storiesList.DONE && storiesList.DONE.length > 0; }; + const handleInReviewCheck = () => { + return storiesList && storiesList.IN_REVIEW && storiesList.IN_REVIEW.length > 0; + }; + useEffect(() => { let id = null; if (typeof window !== 'undefined') { @@ -107,7 +112,7 @@ const ActiveWorkbench: React.FC = ({ useEffect(() => { if ( storiesList && - (storiesList.IN_PROGRESS.length > 0 || storiesList.DONE.length > 0) + (storiesList.IN_PROGRESS.length > 0 || storiesList.DONE.length > 0 || storiesList.IN_REVIEW.length > 0) ) handleSelectedStory(); }, [storiesList, selectedStoryId]); @@ -173,6 +178,24 @@ const ActiveWorkbench: React.FC = ({ )} + {handleInReviewCheck() && ( + + {storiesList.IN_REVIEW.map((story) => ( + + handleItemSelect(story.story_id.toString()) + } + > + {story.story_name} + + ))} + + )} + {handleDoneCheck() && ( {storiesList.DONE.map((story) => ( diff --git a/server.go b/server.go index 623c96ba..8bbf653b 100644 --- a/server.go +++ b/server.go @@ -506,7 +506,6 @@ func main() { llmApiKeys := api.Group("/llm_api_key", middleware.AuthenticateJWT()) llmApiKeys.POST("", llm_api_key.CreateLLMAPIKey) llmApiKeys.POST("/", llm_api_key.CreateLLMAPIKey) - llmApiKeys.GET("/:organisation_id", orgAuthMiddleware.Authorize(), llm_api_key.FetchAllLLMAPIKeyByOrganisationID) // Wrap the socket.io server as Gin handlers for specific routes