说明
示例代码中 <YOUR_ENDPOINT_ID>
需要替换为您在平台上创建的推理接入点 ID。
import os from volcenginesdkarkruntime import Ark client = Ark(api_key=os.environ.get("ARK_API_KEY")) print("----- standard request -----") completion = client.chat.completions.create( model="<YOUR_ENDPOINT_ID>", messages = [ {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"}, {"role": "user", "content": "常见的十字花科植物有哪些?"}, ], ) print(completion.choices[0].message.content)
import os from volcenginesdkarkruntime import Ark client = Ark(api_key=os.environ.get("ARK_API_KEY")) print("----- multiple rounds request -----") completion = client.chat.completions.create( model="<YOUR_ENDPOINT_ID>", messages = [ {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"}, {"role": "user", "content": "花椰菜是什么?"}, {"role": "assistant", "content": "花椰菜又称菜花、花菜,是一种常见的蔬菜。"}, {"role": "user", "content": "再详细点"}, ], ) print(completion.choices[0].message.content)
import os from volcenginesdkarkruntime import Ark client = Ark(api_key=os.environ.get("ARK_API_KEY")) print("----- streaming request -----") stream = client.chat.completions.create( model="<YOUR_ENDPOINT_ID>", messages = [ {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"}, {"role": "user", "content": "常见的十字花科植物有哪些?"}, ], stream=True ) for chunk in stream: if not chunk.choices: continue print(chunk.choices[0].delta.content, end="") print()
import asyncio import os from volcenginesdkarkruntime import AsyncArk client = AsyncArk(api_key=os.environ.get("ARK_API_KEY")) async def main() -> None: stream = await client.chat.completions.create( model="<YOUR_ENDPOINT_ID>", messages=[ {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"}, {"role": "user", "content": "常见的十字花科植物有哪些?"}, ], stream=True ) async for completion in stream: print(completion.choices[0].delta.content, end="") print() asyncio.run(main())
import os from volcenginesdkarkruntime import Ark client = Ark(api_key=os.environ.get("ARK_API_KEY")) print("----- function call request -----") completion = client.chat.completions.create( model="<YOUR_ENDPOINT_ID>", messages = [ {"role": "user", "content": "北京今天天气如何?"}, ], tools=[ { "type": "function", "function": { "name": "get_current_weather", "description": "获取给定地点的天气", "parameters": { "type": "object", "properties": { "location": { "type": "string", "description": "地点的位置信息,比如北京" }, "unit": { "type": "string", "enum": [ "摄氏度", "华氏度" ] } }, "required": [ "location" ] } } } ] ) print(completion.choices[0])
import os from volcenginesdkarkruntime._constants import CLIENT_REQUEST_HEADER from volcenginesdkarkruntime import Ark client = Ark(api_key=os.environ.get("ARK_API_KEY")) print("----- standard request -----") completion = client.chat.completions.create( model="<YOUR_ENDPOINT_ID>", messages = [ {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"}, {"role": "user", "content": "常见的十字花科植物有哪些?"}, ], # 自定义request id extra_headers={CLIENT_REQUEST_HEADER: "202406251728190000B7EA7A9648AC08D9"} ) print(completion.choices[0].message.content)
import os from volcenginesdkarkruntime._exceptions import ArkAPIError from volcenginesdkarkruntime import Ark client = Ark(api_key=os.environ.get("ARK_API_KEY")) # Streaming: print("----- streaming request -----") try: stream = client.chat.completions.create( model="<YOUR_ENDPOINT_ID>", messages=[ {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"}, {"role": "user", "content": "常见的十字花科植物有哪些?"}, ], stream=True ) for chunk in stream: if not chunk.choices: continue print(chunk.choices[0].delta.content, end="") print() except ArkAPIError as e: print(e)