Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/env python3
- import requests
- import cfg
- API_KEY = cfg.gemini_key
- # не принимать запросы больше чем
- MAX_REQUEST = 14000
- # хранилище диалогов {id:list(mem)}
- CHATS = {}
- def ai(q: str, mem) -> str:
- """
- Generate the response of an AI model based on a given question and memory.
- Parameters:
- - q (str): The question to be passed to the AI model.
- - mem: The memory of the AI model which contains previous interactions.
- Returns:
- - str: The response generated by the AI model based on the given question and memory.
- """
- url = "https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent?key=" + API_KEY
- mem_ = {"contents": mem + [{"role": "user", "parts": [{"text": q}]}]}
- response = requests.post(url, json=mem_, timeout=60)
- try:
- resp = response.json()['candidates'][0]['content']['parts'][0]['text']
- except Exception as ai_error:
- resp = 'Gemini didnt respond'
- # print(ai_error)
- if resp:
- mem.append({"role": "user", "parts": [{"text": q}]})
- mem.append({"role": "model", "parts": [{"text": resp}]})
- size = 0
- for x in mem:
- text = x['parts'][0]['text']
- size += len(text)
- while size > 25000:
- mem = mem[2:]
- return resp
- def chat(query: str, id: str) -> str:
- """
- A function that handles chat queries.
- Args:
- query (str): The query string.
- id (str): The ID of the chat.
- Returns:
- str: The response from the AI.
- """
- if id not in CHATS:
- CHATS[id] = []
- mem = CHATS[id]
- return ai(query, mem)
- def reset(id: str):
- """
- Resets the chat history for the given ID.
- Parameters:
- id (str): The ID of the chat to reset.
- Returns:
- None
- """
- CHATS[id] = []
- if __name__ == '__main__':
- mem = []
- while 1:
- q = input('>')
- r = chat(q, 'test')
- print(r)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement