Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import os
- import anthropic
- import google.generativeai as genai
- import time
- import argparse
- from datetime import datetime
- # Initialize clients
- claude_client = anthropic.Anthropic(
- api_key=os.environ.get("ANTHROPIC_API_KEY"),
- )
- # Configure the Gemini API
- genai.configure(api_key=os.environ.get("GOOGLE_API_KEY"))
- # Configuration
- MAX_ITERATIONS = 3
- CLAUDE_MODEL = "claude-3-7-sonnet-20250219"
- GEMINI_MODEL = "gemini-2.5-pro-exp-03-25"
- DEFAULT_PROMPT = """
- Task: Please iterate and improve on this procedural generation script. Potential things to add: multi-room layouts, non-rectangular rooms, hallways, etc.
- """
- def load_from_file(filepath):
- """Load content from a file with enhanced error handling"""
- if not os.path.exists(filepath):
- print(f"Error: File {filepath} does not exist")
- return None
- try:
- # First try with UTF-8 encoding
- with open(filepath, 'r', encoding='utf-8') as file:
- content = file.read()
- return content
- except UnicodeDecodeError:
- # If UTF-8 fails, try with a different encoding
- try:
- with open(filepath, 'r', encoding='latin-1') as file:
- content = file.read()
- print(f"Note: File loaded using latin-1 encoding instead of UTF-8")
- return content
- except Exception as e:
- print(f"Error reading file with latin-1 encoding: {e}")
- return None
- except Exception as e:
- print(f"Detailed error loading file {filepath}: {str(e)}")
- # Print additional debugging information
- print(f"File path: {os.path.abspath(filepath)}")
- print(f"File exists: {os.path.exists(filepath)}")
- print(f"File size: {os.path.getsize(filepath) if os.path.exists(filepath) else 'N/A'} bytes")
- print(f"File permissions: {oct(os.stat(filepath).st_mode & 0o777) if os.path.exists(filepath) else 'N/A'}")
- return None
- def ask_claude(prompt, conversation_history):
- messages = conversation_history.copy()
- messages.append({"role": "user", "content": prompt})
- response = claude_client.messages.create(
- model=CLAUDE_MODEL,
- max_tokens=60000,
- temperature=0.7,
- messages=messages
- )
- return response.content[0].text
- def ask_gemini(prompt):
- # Create a generative model
- model = genai.GenerativeModel(GEMINI_MODEL)
- # For chat-style interactions
- chat = model.start_chat()
- response = chat.send_message(
- prompt,
- generation_config=genai.GenerationConfig(
- temperature=0.7
- )
- )
- # Return the text from the response
- return response.text
- def save_to_file(content, filename=None):
- """Save content to a file with timestamped filename if not specified"""
- if filename is None:
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
- filename = f"llm_collaboration_{timestamp}.md"
- with open(filename, 'w', encoding='utf-8') as file:
- file.write(content)
- print(f"\nOutput saved to: {filename}")
- return filename
- def run_collaboration(initial_prompt, max_iterations=MAX_ITERATIONS):
- conversation_history = []
- full_output = f"# LLM Collaboration\nDate: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n"
- full_output += f"## Initial Task\n\n```\n{initial_prompt}\n```\n\n"
- # Start with Claude
- print("Starting collaboration...")
- claude_response = ask_claude(initial_prompt, conversation_history)
- conversation_history.append({"role": "assistant", "content": claude_response})
- full_output += f"## Claude (Iteration 1)\n\n{claude_response}\n\n"
- print("\n=== CLAUDE (Iteration 1) ===\n")
- print(claude_response)
- for i in range(2, max_iterations + 1):
- # Gemini's turn
- gemini_prompt = f"""
- You are collaborating with another AI (Claude) in coding a solution for the user's task.
- Here is Claude's latest contribution:
- {claude_response}
- Review the concepts, then improve upon it. You can:
- 1. Suggest architectural improvements
- 2. Enhance the implementation
- 3. Add missing features
- 4. Fix any issues you see
- 5. Build upon the existing design
- Provide your improved version of the design and explain your changes.
- """
- gemini_response = ask_gemini(gemini_prompt)
- full_output += f"## Gemini (Iteration {i})\n\n{gemini_response}\n\n"
- print(f"\n=== GEMINI (Iteration {i}) ===\n")
- print(gemini_response)
- # Claude's turn
- claude_prompt = f"""
- You are collaborating with another AI (Gemini) in coding a solution for the user's task.
- Here is Gemini's latest contribution based on your previous work:
- {gemini_response}
- Review the design, then improve upon it. You can:
- 1. Suggest architectural improvements
- 2. Enhance the implementation
- 3. Add missing features
- 4. Fix any issues you see
- 5. Build upon the existing design
- Provide your improved version of the design and explain your changes.
- """
- conversation_history.append({"role": "user", "content": claude_prompt})
- claude_response = ask_claude(claude_prompt, conversation_history)
- conversation_history.append({"role": "assistant", "content": claude_response})
- full_output += f"## Claude (Iteration {i})\n\n{claude_response}\n\n"
- print(f"\n=== CLAUDE (Iteration {i}) ===\n")
- print(claude_response)
- # Optional: Add a small delay to not hit rate limits
- time.sleep(1)
- # Final assessment - ask Claude to summarize the collaboration
- final_prompt = """
- Now that we've gone through several iterations of collaboration, please provide:
- 1. A summary of how the design evolved
- 2. The final architecture and implementation
- 3. Any areas that could be further improved with additional iterations
- """
- conversation_history.append({"role": "user", "content": final_prompt})
- final_summary = ask_claude(final_prompt, conversation_history)
- full_output += f"## Final Collaboration Summary\n\n{final_summary}\n\n"
- print("\n=== FINAL COLLABORATION SUMMARY ===\n")
- print(final_summary)
- # Save complete output to file
- filename = save_to_file(full_output)
- # Optionally, save just the final solution to a separate file
- save_to_file(final_summary, "final_solution.md")
- return filename
- def parse_arguments():
- parser = argparse.ArgumentParser(description='LLM Collaboration Tool')
- parser.add_argument('--file', '-f', type=str, help='Path to input file containing task and/or code')
- parser.add_argument('--iterations', '-i', type=int, default=MAX_ITERATIONS,
- help=f'Number of iteration rounds (default: {MAX_ITERATIONS})')
- return parser.parse_args()
- if __name__ == "__main__":
- args = parse_arguments()
- # Load initial prompt from file if specified, otherwise use default
- initial_prompt = DEFAULT_PROMPT
- if args.file:
- file_content = load_from_file(args.file)
- if file_content:
- initial_prompt = file_content
- else:
- print(f"Could not load file {args.file}, using default prompt")
- output_file = run_collaboration(initial_prompt, args.iterations)
- print(f"Complete collaboration log saved to {output_file}")
- print("Final solution saved to final_solution.md")
Advertisement
Add Comment
Please, Sign In to add comment