ollaiClient.py 499 B

123456789101112131415161718192021222324
  1. import ollama
  2. host = "127.0.0.1"
  3. port = "12434"
  4. client = ollama.Client(host=f"http://{host}:{port}")
  5. model1 = "deepseek-r1:8b"
  6. model2 = "deepseek-r1:1.5b"
  7. model3 = "qwen2:0.5b"
  8. def getAiResponse(question):
  9. res = client.chat(model=model3,
  10. messages=[
  11. {"role": "user", "content": question}],
  12. options={"temperature": 0})
  13. print(res.message.content)
  14. return res
  15. if __name__ == "__main__":
  16. getAiResponse("11111")