From 6322ec7f0e3bf3d494a164cd4d30f578b8248d37 Mon Sep 17 00:00:00 2001 From: AashrithC Date: Wed, 4 Jun 2025 16:44:44 +0530 Subject: [PATCH 1/3] fixed utils error --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9fd9250..e4ed9fa 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ cd ../../ # Prepare Polyglot # Make sure git is properly configured in your environment with username and email -python polyglot/prepare_polyglot_dataset.py +python -m polyglot.prepare_polyglot_dataset ``` ## Running the DGM From 8970391d82be1c87395d47bff67152770edd0969 Mon Sep 17 00:00:00 2001 From: AashrithC Date: Sat, 7 Jun 2025 22:30:58 +0530 Subject: [PATCH 2/3] fixed eval bug --- DGM_outer.py | 2 +- llm.py | 9 ++++++--- prompts/self_improvement_prompt.py | 4 ++-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/DGM_outer.py b/DGM_outer.py index 2c87f4b..2848af6 100644 --- a/DGM_outer.py +++ b/DGM_outer.py @@ -220,7 +220,7 @@ def get_full_eval_threshold(output_dir, archive): def main(): parser = argparse.ArgumentParser(description="Darwin Godel Machine!") - parser.add_argument("--max_generation", type=int, default=80, help="Maximum number of evolution iterations.") + parser.add_argument("--max_generation", type=int, default=4, help="Maximum number of evolution iterations.") parser.add_argument("--selfimprove_size", type=int, default=2, help="Number of self-improvements attempts per DGM generation.") parser.add_argument("--selfimprove_workers", type=int, default=2, help="Number of parallel workers for self-improvement attempts.") parser.add_argument( diff --git a/llm.py b/llm.py index d6b4da1..23f467f 100644 --- a/llm.py +++ b/llm.py @@ -16,10 +16,11 @@ "gpt-4o-mini-2024-07-18", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", - "o1-preview-2024-09-12", + "o4-mini-2025-04-16", + #"o1-preview-2024-09-12", "o1-mini-2024-09-12", - "o1-2024-12-17", - "o3-mini-2025-01-31", + #"o1-2024-12-17", + #"o3-mini-2025-01-31", # OpenRouter models "llama3.1-405b", # Anthropic Claude models via Amazon Bedrock @@ -49,6 +50,8 @@ def create_client(model: str): Returns: Tuple[Any, str]: A tuple containing the client instance and the client model name. """ + return openai.OpenAI(), "o1-mini-2024-09-12" + if model.startswith("claude-"): print(f"Using Anthropic API with model {model}.") return anthropic.Anthropic(), model diff --git a/prompts/self_improvement_prompt.py b/prompts/self_improvement_prompt.py index eb96087..0a95827 100644 --- a/prompts/self_improvement_prompt.py +++ b/prompts/self_improvement_prompt.py @@ -314,8 +314,8 @@ def get_diagnose_prompt_swe(entry_id, commit, root_dir, out_dir, dataset, patch_ diagnose_prompt_out = diagnose_prompt_contextlength else: # Get user prompt for the entry - md_logs, eval_logs, predicted_patches = find_selfimprove_eval_logs(entry_id, out_dir, commit_id=commit) - md_log, eval_log, predicted_patch = process_selfimprove_eval_logs(md_logs, eval_logs, predicted_patches) + md_logs, eval_logs, predicted_patches, eval_results = find_selfimprove_eval_logs(entry_id, out_dir, commit_id=commit) + md_log, eval_log, predicted_patch, eval_result = process_selfimprove_eval_logs(md_logs, eval_logs, predicted_patches, eval_results) entry = next((e for e in dataset if e['instance_id'] == entry_id), None) answer_patch = entry['patch'] test_patch = entry['test_patch'] From ea5e87ea57da7a79c5139e1a61639711b3cff02e Mon Sep 17 00:00:00 2001 From: Aashrith Chejerla <94641198+AashrithC@users.noreply.github.com> Date: Sun, 8 Jun 2025 13:10:38 +0530 Subject: [PATCH 3/3] Update README.md --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index e4ed9fa..a3b2d34 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,6 @@ +***In-progress adaptation of DGM research. Original repo/paper found below.*** + +

Darwin Gödel Machine:
Open-Ended Evolution of Self-Improving Agents