Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion DGM_outer.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ def get_full_eval_threshold(output_dir, archive):

def main():
parser = argparse.ArgumentParser(description="Darwin Godel Machine!")
parser.add_argument("--max_generation", type=int, default=80, help="Maximum number of evolution iterations.")
parser.add_argument("--max_generation", type=int, default=4, help="Maximum number of evolution iterations.")
parser.add_argument("--selfimprove_size", type=int, default=2, help="Number of self-improvements attempts per DGM generation.")
parser.add_argument("--selfimprove_workers", type=int, default=2, help="Number of parallel workers for self-improvement attempts.")
parser.add_argument(
Expand Down
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
***In-progress adaptation of DGM research. Original repo/paper found below.***


<h1 align="center">
Darwin Gödel Machine:<br/>Open-Ended Evolution of Self-Improving Agents
</h1>
Expand Down Expand Up @@ -59,7 +62,7 @@ cd ../../

# Prepare Polyglot
# Make sure git is properly configured in your environment with username and email
python polyglot/prepare_polyglot_dataset.py
python -m polyglot.prepare_polyglot_dataset
```

## Running the DGM
Expand Down
9 changes: 6 additions & 3 deletions llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,11 @@
"gpt-4o-mini-2024-07-18",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
"o1-preview-2024-09-12",
"o4-mini-2025-04-16",
#"o1-preview-2024-09-12",
"o1-mini-2024-09-12",
"o1-2024-12-17",
"o3-mini-2025-01-31",
#"o1-2024-12-17",
#"o3-mini-2025-01-31",
# OpenRouter models
"llama3.1-405b",
# Anthropic Claude models via Amazon Bedrock
Expand Down Expand Up @@ -49,6 +50,8 @@ def create_client(model: str):
Returns:
Tuple[Any, str]: A tuple containing the client instance and the client model name.
"""
return openai.OpenAI(), "o1-mini-2024-09-12"

if model.startswith("claude-"):
print(f"Using Anthropic API with model {model}.")
return anthropic.Anthropic(), model
Expand Down
4 changes: 2 additions & 2 deletions prompts/self_improvement_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,8 +314,8 @@ def get_diagnose_prompt_swe(entry_id, commit, root_dir, out_dir, dataset, patch_
diagnose_prompt_out = diagnose_prompt_contextlength
else:
# Get user prompt for the entry
md_logs, eval_logs, predicted_patches = find_selfimprove_eval_logs(entry_id, out_dir, commit_id=commit)
md_log, eval_log, predicted_patch = process_selfimprove_eval_logs(md_logs, eval_logs, predicted_patches)
md_logs, eval_logs, predicted_patches, eval_results = find_selfimprove_eval_logs(entry_id, out_dir, commit_id=commit)
md_log, eval_log, predicted_patch, eval_result = process_selfimprove_eval_logs(md_logs, eval_logs, predicted_patches, eval_results)
entry = next((e for e in dataset if e['instance_id'] == entry_id), None)
answer_patch = entry['patch']
test_patch = entry['test_patch']
Expand Down