import ollama host = "127.0.0.1" port = "12434" client = ollama.Client(host=f"http://{host}:{port}") model1 = "deepseek-r1:8b" model2 = "deepseek-r1:1.5b" model3 = "qwen2:0.5b" def getAiResponse(question): res = client.chat(model=model3, messages=[ {"role": "user", "content": question}], options={"temperature": 0}) print(res.message.content) return res if __name__ == "__main__": getAiResponse("11111")