Advertisement
Guest User

caching for claude

a guest
Nov 28th, 2024
61
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 12.01 KB | None | 0 0
  1. import tkinter as tk
  2. from tkinter import ttk, scrolledtext
  3. from tkinter import filedialog
  4. import threading
  5. import anthropic
  6. import json
  7. from datetime import datetime
  8.  
  9.  
  10. class ClaudeGUI:
  11. def __init__(self, root):
  12. self.root = root
  13. self.root.title("Claude Prompt Caching Interface")
  14. self.root.geometry("1000x800")
  15.  
  16. # Available models configuration
  17. self.models = {
  18. "Claude 3.5 Sonnet": {
  19. "id": "claude-3-5-sonnet-20241022",
  20. "description": "Best balance of intelligence and speed",
  21. "max_tokens": 1024,
  22. "icon": "🚀" # Unicode icons for visual distinction
  23. }
  24. # Add more models here as needed, example:
  25. # "Claude 3 Opus": {
  26. # "id": "claude-3-opus-20240229",
  27. # "description": "Most capable model",
  28. # "max_tokens": 4096,
  29. # "icon": "⭐"
  30. # }
  31. }
  32.  
  33. # Configure style
  34. style = ttk.Style()
  35. style.configure("Custom.TFrame", background="#f0f0f0")
  36. style.configure("Stats.TLabel", font=("Arial", 10))
  37.  
  38. # Create main container
  39. main_container = ttk.Frame(root, style="Custom.TFrame", padding="10")
  40. main_container.pack(fill=tk.BOTH, expand=True)
  41.  
  42. # Top Section: API Key and Model Selection
  43. top_frame = ttk.Frame(main_container)
  44. top_frame.pack(fill=tk.X, pady=(0, 10))
  45.  
  46. # API Key
  47. api_frame = ttk.Frame(top_frame)
  48. api_frame.pack(fill=tk.X, pady=(0, 5))
  49. ttk.Label(api_frame, text="API Key:").pack(side=tk.LEFT)
  50. self.api_key_var = tk.StringVar()
  51. self.api_key_entry = ttk.Entry(api_frame, textvariable=self.api_key_var, show="*")
  52. self.api_key_entry.pack(side=tk.LEFT, fill=tk.X, expand=True, padx=(5, 0))
  53.  
  54. # History Control
  55. history_frame = ttk.Frame(top_frame)
  56. history_frame.pack(fill=tk.X, pady=(0, 5))
  57. ttk.Label(history_frame, text="Conversation Memory (turns):").pack(side=tk.LEFT)
  58. self.history_turns = tk.IntVar(value=10)
  59. self.history_spinbox = ttk.Spinbox(
  60. history_frame,
  61. from_=1,
  62. to=100,
  63. width=5,
  64. textvariable=self.history_turns
  65. )
  66. self.history_spinbox.pack(side=tk.LEFT, padx=(5, 0))
  67.  
  68. # Model Selection
  69. model_frame = ttk.Frame(top_frame)
  70. model_frame.pack(fill=tk.X, pady=(0, 5))
  71. ttk.Label(model_frame, text="Model:").pack(side=tk.LEFT)
  72.  
  73. # Create model selection dropdown
  74. self.selected_model = tk.StringVar()
  75. self.model_dropdown = ttk.Combobox(model_frame,
  76. textvariable=self.selected_model,
  77. values=[f"{k} {v['icon']}" for k, v in self.models.items()],
  78. state="readonly")
  79. self.model_dropdown.pack(side=tk.LEFT, padx=(5, 0))
  80. self.model_dropdown.set(f"{list(self.models.keys())[0]} {self.models[list(self.models.keys())[0]]['icon']}") # Set default
  81.  
  82. # Model description label
  83. self.model_description = ttk.Label(model_frame, style="Stats.TLabel")
  84. self.model_description.pack(side=tk.LEFT, padx=(10, 0))
  85. self.update_model_description()
  86.  
  87. # Bind model change event
  88. self.model_dropdown.bind('<<ComboboxSelected>>', self.update_model_description)
  89.  
  90. # Cached Prompt Section
  91. ttk.Label(main_container, text="Cached Prompt (Optional):").pack(anchor=tk.W)
  92. self.cached_prompt = scrolledtext.ScrolledText(main_container, height=5)
  93. self.cached_prompt.pack(fill=tk.X, pady=(0, 10))
  94.  
  95. # Add placeholder text
  96. placeholder = "Enter system prompt to cache here (optional)\nExample: You are an AI assistant trained to help with coding tasks..."
  97. self.cached_prompt.insert("1.0", placeholder)
  98. self.cached_prompt.bind("<FocusIn>", self.on_cached_prompt_focus_in)
  99. self.cached_prompt.bind("<FocusOut>", self.on_cached_prompt_focus_out)
  100. self.cached_prompt.config(fg='grey')
  101.  
  102. # Stats Display
  103. self.stats_label = ttk.Label(main_container,
  104. text="Tokens - Input: 0 | Output: 0 | Cached: 0",
  105. style="Stats.TLabel")
  106. self.stats_label.pack(anchor=tk.W, pady=(0, 5))
  107.  
  108. # Chat Display
  109. self.chat_display = scrolledtext.ScrolledText(main_container, height=20)
  110. self.chat_display.pack(fill=tk.BOTH, expand=True, pady=(0, 10))
  111. self.chat_display.tag_configure("user", foreground="blue")
  112. self.chat_display.tag_configure("assistant", foreground="green")
  113. self.chat_display.tag_configure("system", foreground="red")
  114.  
  115. # Progress Bar
  116. self.progress = ttk.Progressbar(main_container, mode='indeterminate')
  117. self.progress.pack(fill=tk.X, pady=(0, 10))
  118.  
  119. # User Input
  120. self.user_input = scrolledtext.ScrolledText(main_container, height=4)
  121. self.user_input.pack(fill=tk.X, pady=(0, 10))
  122. self.user_input.bind("<Command-Return>", self.send_message) # Mac
  123. self.user_input.bind("<Control-Return>", self.send_message) # Windows/Linux
  124.  
  125. # Buttons
  126. button_frame = ttk.Frame(main_container)
  127. button_frame.pack(fill=tk.X)
  128.  
  129. ttk.Button(button_frame, text="Send", command=self.send_message).pack(side=tk.LEFT, padx=5)
  130. ttk.Button(button_frame, text="Clear Chat", command=self.clear_chat).pack(side=tk.LEFT, padx=5)
  131. ttk.Button(button_frame, text="Save Chat", command=self.save_chat).pack(side=tk.LEFT, padx=5)
  132.  
  133. # Initialize variables
  134. self.total_tokens = {"input": 0, "output": 0, "cached": 0}
  135. self.client = None
  136. self.conversation_history = [] # Store conversation history
  137. self.api_key = "your api key" # Put your API Key here
  138.  
  139. self.api_key_var.set(self.api_key) # Set the API key in the UI
  140. self.api_key_entry.config(state='disabled') # Disable the entry since key is hardcoded
  141.  
  142. def update_model_description(self, event=None):
  143. selected = self.model_dropdown.get().rsplit(' ', 1)[0] # Remove icon
  144. description = self.models[selected]["description"]
  145. self.model_description.config(text=description)
  146.  
  147. def get_selected_model_id(self):
  148. selected = self.model_dropdown.get().rsplit(' ', 1)[0] # Remove icon
  149. return self.models[selected]["id"]
  150.  
  151. def append_message(self, sender, message):
  152. timestamp = datetime.now().strftime("%H:%M:%S")
  153. self.chat_display.insert(tk.END, f"[{timestamp}] {sender}: ", sender.lower())
  154. self.chat_display.insert(tk.END, f"{message}\n\n")
  155. self.chat_display.see(tk.END)
  156.  
  157. def send_message(self, event=None):
  158. if not self.api_key_var.get():
  159. self.append_message("System", "Please enter your API key.")
  160. return
  161.  
  162. message = self.user_input.get("1.0", tk.END).strip()
  163. if not message:
  164. return
  165.  
  166. self.append_message("User", message)
  167. self.user_input.delete("1.0", tk.END)
  168.  
  169. # Start progress bar
  170. self.progress.start()
  171.  
  172. # Create thread for API call
  173. thread = threading.Thread(target=self.process_message, args=(message,))
  174. thread.daemon = True
  175. thread.start()
  176.  
  177. def process_message(self, message):
  178. try:
  179. client = anthropic.Anthropic(api_key=self.api_key)
  180.  
  181. # Get cached prompt if it exists
  182. cached_prompt = self.cached_prompt.get("1.0", tk.END).strip()
  183.  
  184. # Prepare conversation history
  185. messages = []
  186. # Add past conversations (up to last 10 turns)
  187. messages.extend(self.conversation_history[-10:])
  188. # Add current message
  189. messages.append({
  190. "role": "user",
  191. "content": message
  192. })
  193.  
  194. # Prepare API call parameters
  195. params = {
  196. "model": self.get_selected_model_id(),
  197. "max_tokens": self.models[self.model_dropdown.get().rsplit(' ', 1)[0]]["max_tokens"],
  198. "messages": messages,
  199. "extra_headers": {"anthropic-beta": "prompt-caching-2024-07-31"}
  200. }
  201.  
  202. # Only add system message if cached prompt is not empty
  203. if cached_prompt and cached_prompt != "Enter system prompt to cache here (optional)\nExample: You are an AI assistant trained to help with coding tasks...":
  204. params["system"] = [
  205. {
  206. "type": "text",
  207. "text": cached_prompt,
  208. "cache_control": {"type": "ephemeral"}
  209. }
  210. ]
  211.  
  212. response = client.messages.create(**params)
  213.  
  214. # Add the response to conversation history
  215. self.conversation_history.append({
  216. "role": "user",
  217. "content": message
  218. })
  219. self.conversation_history.append({
  220. "role": "assistant",
  221. "content": response.content[0].text
  222. })
  223.  
  224. # Update statistics
  225. self.total_tokens["input"] += response.usage.input_tokens
  226. self.total_tokens["output"] += response.usage.output_tokens
  227. cache_read = getattr(response.usage, 'cache_read_input_tokens', 0)
  228. self.total_tokens["cached"] += cache_read
  229.  
  230. # Update UI in the main thread
  231. self.root.after(0, self.update_ui, response.content[0].text)
  232.  
  233. except Exception as e:
  234. self.root.after(0, self.append_message, "System", f"Error: {str(e)}")
  235. finally:
  236. self.root.after(0, self.progress.stop)
  237.  
  238. def update_ui(self, response_text):
  239. self.append_message("Assistant", response_text)
  240. self.stats_label.config(
  241. text=f"Tokens - Input: {self.total_tokens['input']} | "
  242. f"Output: {self.total_tokens['output']} | "
  243. f"Cached: {self.total_tokens['cached']}"
  244. )
  245.  
  246. def clear_chat(self):
  247. self.chat_display.delete("1.0", tk.END)
  248. self.total_tokens = {"input": 0, "output": 0, "cached": 0}
  249. self.stats_label.config(text="Tokens - Input: 0 | Output: 0 | Cached: 0")
  250. self.conversation_history = [] # Clear conversation history
  251. self.append_message("System", "Chat cleared")
  252.  
  253. def on_cached_prompt_focus_in(self, event):
  254. if self.cached_prompt.get("1.0", tk.END).strip() == "Enter system prompt to cache here (optional)\nExample: You are an AI assistant trained to help with coding tasks...":
  255. self.cached_prompt.delete("1.0", tk.END)
  256. self.cached_prompt.config(fg='black')
  257.  
  258. def on_cached_prompt_focus_out(self, event):
  259. if not self.cached_prompt.get("1.0", tk.END).strip():
  260. self.cached_prompt.insert("1.0", "Enter system prompt to cache here (optional)\nExample: You are an AI assistant trained to help with coding tasks...")
  261. self.cached_prompt.config(fg='grey')
  262.  
  263. def save_chat(self):
  264. file_path = filedialog.asksaveasfilename(
  265. defaultextension=".txt",
  266. filetypes=[("Text files", "*.txt"), ("All files", "*.*")]
  267. )
  268. if file_path:
  269. try:
  270. with open(file_path, 'w', encoding='utf-8') as file:
  271. file.write(self.chat_display.get("1.0", tk.END))
  272. self.append_message("System", f"Chat saved to {file_path}")
  273. except Exception as e:
  274. self.append_message("System", f"Error saving chat: {str(e)}")
  275.  
  276. def main():
  277. root = tk.Tk()
  278. app = ClaudeGUI(root)
  279. root.mainloop()
  280.  
  281. if __name__ == "__main__":
  282. main()
  283.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement