Files
test2/05-LLM/04_ollama_api_qwen.py
2026-02-26 16:49:19 +08:00

87 lines
2.6 KiB
Python
Raw Permalink Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import ollama
import requests
# Python中使用ollama API的方式
# 使用Python直接调用ollama方式一
def dm01():
"""
如果是访问本地服务器直接使用ollama即可
如果是访问远程服务器则需要使用ollama.Client创建一个新的对象并指定主机地址: new_ollama = ollama.Client(url)
"""
# 获取用户要问的问题
pro = '给我讲一个笑话'
# 访问本地服务器,获取响应
res = ollama.chat(
model="qwen2.5:0.5b",
messages=[{"role": "user", "content": pro}]
)
print('res-->', res) # 整个对象
# 只获取响应内容的两种方式
print(res.message.content)
# print(res['message']['content'])
# 使用Python直接调用ollama方式二
def dm02():
from ollama import Client
client = Client(host='http://127.0.0.1:11434')
response = client.chat(model='qwen2.5:0.5b', messages=[
{
'role': 'user',
'content': '为什么天空是蓝⾊的?',
},
])
print(response['message']['content'])
# 使用Python直接调用ollama方式三
def dm03():
stream = ollama.chat(
model='qwen2.5:0.5b',
messages=[{'role': 'user', 'content': '讲一个不少于200字的笑话'}],
stream=True,
)
# print("stream-->", stream) # 只是个对象
for chunk in stream:
# print("chunk-->", chunk)
print(chunk['message']['content'], end='', flush=True)
# 使用requests库调用ollama
def dm04():
headers = {"Content-Type": "application/json"}
data = {
"model": "deepseek-r1:1.5b", # 模型选择
"options": {
"temperature": 0. # 为0表示不让模型⾃由发挥输出结果相对较固定> 0的话输出的结果会⽐较放⻜⾃我
},
"stream": False, # 流式输出
"messages": [{
"role": "system",
"content": "你是谁?"
}] # 对话列表
}
response = requests.post(url='http://127.0.0.1:11434/api/chat', json=data, headers=headers, timeout=60)
print(response.json()['message']['content'])
# 使用langchain框架调用ollama
def dm05():
from langchain_community.llms import Ollama
# 如果⾃⼰本地系统有ollama服务可以省略base_url
# temperature控制生成文本随机性的重要参数取值越小接近0模型输出结果越确定越大模型输出结果越开放
llm = Ollama(base_url="http://127.0.0.1:11434",
model="deepseek-r1:1.5b", temperature=0)
res = llm.invoke("给我讲一个笑话")
print(res)
if __name__ == '__main__':
# dm01()
# dm02()
# dm03()
# dm04()
dm05()