Advertisement
Guest User

llm-python-file.py

a guest
Jan 24th, 2025
123
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.18 KB | None | 0 0
  1. #!/bin/python3
  2.  
  3. import sys
  4. # Example: reuse your existing OpenAI setup
  5. from openai import OpenAI
  6. import httpx
  7.  
  8. document_file_path = sys.argv[1]
  9. system = sys.argv[2]
  10. preprompt = sys.argv[3]
  11. postprompt = sys.argv[4]
  12. temp = float(sys.argv[5])
  13. # Read the content of the document file
  14. try:
  15.     with open(document_file_path, 'r') as file:
  16.         document = file.read()
  17. except FileNotFoundError:
  18.     print(f"Error: The file '{document_file_path}' does not exist.")
  19.     sys.exit(1)
  20. except Exception as e:
  21.     print(f"Error: {e}")
  22.     sys.exit(1)
  23.  
  24.  
  25. # Point to the local server
  26. client = OpenAI(base_url="http://127.0.0.1:9090/v1", api_key="none", timeout=httpx.Timeout(3600))
  27.  
  28. completion = client.chat.completions.create(
  29.   model="llama-3_2-3b",
  30.   messages=[
  31.     {"role": "system", "content": system },
  32.     {"role": "user", "content": preprompt },
  33.     {"role": "user", "content": document },
  34.     {"role": "user", "content": postprompt }
  35.   ],
  36.   temperature=temp,
  37.   stream=True,
  38. )
  39.  
  40. #print(completion.choices[0].message.content.strip())
  41.  
  42. for chunk in completion:
  43.   if chunk.choices[0].delta.content:
  44.     print(chunk.choices[0].delta.content, end="", flush=True)
  45. print('\n')
  46.  
  47.  
Tags: python llm
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement