From 8ede94c14b1e9914a17d53e780abb33fe8f53fbf Mon Sep 17 00:00:00 2001 From: Aseem Saxena Date: Fri, 4 Apr 2025 15:00:41 -0700 Subject: [PATCH 1/3] wip --- codeflash/optimization/function_optimizer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/codeflash/optimization/function_optimizer.py b/codeflash/optimization/function_optimizer.py index 93def83c0..d92164d37 100644 --- a/codeflash/optimization/function_optimizer.py +++ b/codeflash/optimization/function_optimizer.py @@ -261,6 +261,7 @@ def optimize_function(self) -> Result[BestOptimization, str]: best_optimization.candidate.explanation, title="Best Candidate Explanation", border_style="blue" ) ) + get_new_explanation = "abc" explanation = Explanation( raw_explanation_message=best_optimization.candidate.explanation, winning_behavioral_test_results=best_optimization.winning_behavioral_test_results, From 522e47410584f6e42d66afbb81c13fe83636ab0e Mon Sep 17 00:00:00 2001 From: Aseem Saxena Date: Fri, 4 Apr 2025 18:10:13 -0700 Subject: [PATCH 2/3] quick todos, modify prompts and function arguments --- codeflash/api/aiservice.py | 59 ++++++++++++++++++++ codeflash/optimization/function_optimizer.py | 9 ++- tests/test_explain_api.py | 16 ++++++ 3 files changed, 82 insertions(+), 2 deletions(-) create mode 100644 tests/test_explain_api.py diff --git a/codeflash/api/aiservice.py b/codeflash/api/aiservice.py index fddc5c18a..f3ecae218 100644 --- a/codeflash/api/aiservice.py +++ b/codeflash/api/aiservice.py @@ -204,6 +204,65 @@ def optimize_python_code_line_profiler( console.rule() return [] + def get_new_explanation( + self, + source_code: str, + dependency_code: str, + trace_id: str, + num_candidates: int = 10, + experiment_metadata: ExperimentMetadata | None = None, + existing_explanation: str = "", + ) -> str: + """Optimize the given python code for performance by making a request to the Django endpoint. + + Parameters + ---------- + - source_code (str): The python code to optimize. + - dependency_code (str): The dependency code used as read-only context for the optimization + - trace_id (str): Trace id of optimization run + - num_candidates (int): Number of optimization variants to generate. Default is 10. + - experiment_metadata (Optional[ExperimentalMetadata, None]): Any available experiment metadata for this optimization + - existing_explanation (str): Existing explanation from AIservice call + + Returns + ------- + - List[OptimizationCandidate]: A list of Optimization Candidates. + + """ + payload = { + "source_code": source_code, + "dependency_code": dependency_code, + "num_variants": num_candidates, + "trace_id": trace_id, + "python_version": platform.python_version(), + "experiment_metadata": experiment_metadata, + "codeflash_version": codeflash_version, + "existing_explanation": existing_explanation, + } + + logger.info("Generating optimized candidates…") + console.rule() + try: + response = self.make_ai_service_request("/explain", payload=payload, timeout=600) + except requests.exceptions.RequestException as e: + logger.exception(f"Error generating optimized candidates: {e}") + ph("cli-optimize-error-caught", {"error": str(e)}) + return "" + + if response.status_code == 200: + explanation = response.json()["explanation"] + logger.info(f"New Explanation: {explanation}") + console.rule() + return explanation + try: + error = response.json()["error"] + except Exception: + error = response.text + logger.error(f"Error generating optimized candidates: {response.status_code} - {error}") + ph("cli-optimize-error-response", {"response_status_code": response.status_code, "error": error}) + console.rule() + return "" + def log_results( self, diff --git a/codeflash/optimization/function_optimizer.py b/codeflash/optimization/function_optimizer.py index d92164d37..10c0eaafe 100644 --- a/codeflash/optimization/function_optimizer.py +++ b/codeflash/optimization/function_optimizer.py @@ -261,9 +261,14 @@ def optimize_function(self) -> Result[BestOptimization, str]: best_optimization.candidate.explanation, title="Best Candidate Explanation", border_style="blue" ) ) - get_new_explanation = "abc" + #could possibly have it in the best optimization dataclass + new_explanation = self.aiservice_client.get_new_explanation(source_code=code_context.read_writable_code, + dependency_code=code_context.read_only_context_code, + trace_id=self.function_trace_id, + num_candidates=1, + experiment_metadata=None, existing_explanation=best_optimization.candidate.explanation) explanation = Explanation( - raw_explanation_message=best_optimization.candidate.explanation, + raw_explanation_message=new_explanation if new_explanation!="" else best_optimization.candidate.explanation, winning_behavioral_test_results=best_optimization.winning_behavioral_test_results, winning_benchmarking_test_results=best_optimization.winning_benchmarking_test_results, original_runtime_ns=original_code_baseline.runtime, diff --git a/tests/test_explain_api.py b/tests/test_explain_api.py new file mode 100644 index 000000000..bfc161237 --- /dev/null +++ b/tests/test_explain_api.py @@ -0,0 +1,16 @@ +from codeflash.api.aiservice import AiServiceClient +from codeflash.models.ExperimentMetadata import ExperimentMetadata +def test_explain_api(): + aiservice = AiServiceClient() + source_code: str = "a" + dependency_code: str = "b" + trace_id: str = "d5822364-7617-4389-a4fc-64602a00b714" + num_candidates: int = 1 + experiment_metadata: ExperimentMetadata | None = None + existing_explanation: str = "some explanation" + new_explanation = aiservice.get_new_explanation(source_code=source_code, + dependency_code=dependency_code, + trace_id=trace_id, + num_candidates=num_candidates, + experiment_metadata=experiment_metadata, existing_explanation=existing_explanation) + assert new_explanation.__len__()>0 \ No newline at end of file From b83bd97eb088ffe8be58691fc2dbd2610a749cd4 Mon Sep 17 00:00:00 2001 From: Aseem Saxena Date: Mon, 7 Apr 2025 16:27:22 -0700 Subject: [PATCH 3/3] relevant context for new explanations --- codeflash/api/aiservice.py | 15 ++++------- codeflash/optimization/function_optimizer.py | 20 +++++++------- tests/test_explain_api.py | 28 ++++++++++++-------- 3 files changed, 32 insertions(+), 31 deletions(-) diff --git a/codeflash/api/aiservice.py b/codeflash/api/aiservice.py index f3ecae218..b22fbc361 100644 --- a/codeflash/api/aiservice.py +++ b/codeflash/api/aiservice.py @@ -207,11 +207,10 @@ def optimize_python_code_line_profiler( def get_new_explanation( self, source_code: str, + optimized_code: str, dependency_code: str, trace_id: str, - num_candidates: int = 10, - experiment_metadata: ExperimentMetadata | None = None, - existing_explanation: str = "", + existing_explanation: str, ) -> str: """Optimize the given python code for performance by making a request to the Django endpoint. @@ -230,16 +229,12 @@ def get_new_explanation( """ payload = { - "source_code": source_code, - "dependency_code": dependency_code, - "num_variants": num_candidates, "trace_id": trace_id, - "python_version": platform.python_version(), - "experiment_metadata": experiment_metadata, - "codeflash_version": codeflash_version, + "source_code": source_code, + "optimized_code":optimized_code, "existing_explanation": existing_explanation, + "dependency_code": dependency_code, } - logger.info("Generating optimized candidates…") console.rule() try: diff --git a/codeflash/optimization/function_optimizer.py b/codeflash/optimization/function_optimizer.py index 10c0eaafe..faf0d762a 100644 --- a/codeflash/optimization/function_optimizer.py +++ b/codeflash/optimization/function_optimizer.py @@ -8,6 +8,7 @@ import time import uuid from collections import defaultdict, deque +from dataclasses import replace from pathlib import Path from typing import TYPE_CHECKING @@ -254,21 +255,14 @@ def optimize_function(self) -> Result[BestOptimization, str]: ) if best_optimization: - logger.info("Best candidate:") - code_print(best_optimization.candidate.source_code) - console.print( - Panel( - best_optimization.candidate.explanation, title="Best Candidate Explanation", border_style="blue" - ) - ) - #could possibly have it in the best optimization dataclass new_explanation = self.aiservice_client.get_new_explanation(source_code=code_context.read_writable_code, dependency_code=code_context.read_only_context_code, trace_id=self.function_trace_id, num_candidates=1, experiment_metadata=None, existing_explanation=best_optimization.candidate.explanation) + best_optimization.candidate = replace(best_optimization.candidate, explanation=new_explanation if new_explanation!="" else best_optimization.candidate.explanation) explanation = Explanation( - raw_explanation_message=new_explanation if new_explanation!="" else best_optimization.candidate.explanation, + raw_explanation_message=best_optimization.candidate.explanation, winning_behavioral_test_results=best_optimization.winning_behavioral_test_results, winning_benchmarking_test_results=best_optimization.winning_benchmarking_test_results, original_runtime_ns=original_code_baseline.runtime, @@ -276,7 +270,13 @@ def optimize_function(self) -> Result[BestOptimization, str]: function_name=function_to_optimize_qualified_name, file_path=self.function_to_optimize.file_path, ) - + logger.info("Best candidate:") + code_print(best_optimization.candidate.source_code) + console.print( + Panel( + best_optimization.candidate.explanation, title="Best Candidate Explanation", border_style="blue" + ) + ) self.log_successful_optimization(explanation, generated_tests) self.replace_function_and_helpers_with_optimized_code( diff --git a/tests/test_explain_api.py b/tests/test_explain_api.py index bfc161237..390b532b4 100644 --- a/tests/test_explain_api.py +++ b/tests/test_explain_api.py @@ -1,16 +1,22 @@ from codeflash.api.aiservice import AiServiceClient -from codeflash.models.ExperimentMetadata import ExperimentMetadata def test_explain_api(): aiservice = AiServiceClient() - source_code: str = "a" - dependency_code: str = "b" + source_code: str = """def bubble_sort(arr): + n = len(arr) + for i in range(n): + for j in range(0, n-i-1): + if arr[j] > arr[j+1]: + arr[j], arr[j+1] = arr[j+1], arr[j] + return arr +""" + dependency_code: str = "def helper(): return 1" trace_id: str = "d5822364-7617-4389-a4fc-64602a00b714" - num_candidates: int = 1 - experiment_metadata: ExperimentMetadata | None = None - existing_explanation: str = "some explanation" - new_explanation = aiservice.get_new_explanation(source_code=source_code, - dependency_code=dependency_code, - trace_id=trace_id, - num_candidates=num_candidates, - experiment_metadata=experiment_metadata, existing_explanation=existing_explanation) + existing_explanation: str = "I used to numpy to optimize it" + optimized_code: str = """def bubble_sort(arr): + return arr.sort() +""" + new_explanation = aiservice.get_new_explanation(source_code=source_code, optimized_code=optimized_code, + existing_explanation=existing_explanation, dependency_code=dependency_code, + trace_id=trace_id) + print("\nNew explanation: \n", new_explanation) assert new_explanation.__len__()>0 \ No newline at end of file