You need to enable JavaScript to run this app.
导航
多语言社区 SDK
最近更新时间:2024.09.24 21:11:04首次发布时间:2024.09.24 20:15:17

火山方舟大模型/智能体调用 v3 API 与 OpenAI API 协议兼容,您可以使用兼容 OpenAI API 协议的多语言社区 SDK 调用火山方舟大模型/智能体 API。使用兼容 OpenAI 协议的 SDK 调用,您需要在社区 SDK 代码示例的基础上,完成以下步骤:

  • 获取并配置 API Key 为火山方舟的 API Key

  • 修改 base_url 为:

    • 大模型调用:https://<ARK_DOMAIN>/api/v3/
    • 智能体调用:https://<ARK_DOMAIN>/api/v3/bots/
  • model 参数填写对应资源的 ID

  • ARK_DOMAIN详见地域和访问域名

常见社区 SDK 及其调用示例见下文。

注意

社区第三方 SDK 不由火山引擎团队维护,本文仅供参考。

Python SDK

openai-python

Python>=3.7
pip install --upgrade openai>=1.0

单轮

from openai import OpenAI

client = OpenAI(
    base_url="https://<ARK_DOMAIN>/api/v3",
    api_key=os.environ.get("ARK_API_KEY")
)

print("----- standard request -----")
completion = client.chat.completions.create(
    model="<YOUR_ENDPOINT_ID>",
    messages = [
        {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"},
        {"role": "user", "content": "常见的十字花科植物有哪些?"},
    ],
)
print(completion.choices[0].message.content)

多轮

from openai import OpenAI

client = OpenAI(
    base_url="https://<ARK_DOMAIN>/api/v3",
    api_key=os.environ.get("ARK_API_KEY")
)

print("----- standard request -----")
completion = client.chat.completions.create(
    model="<YOUR_ENDPOINT_ID>",
    messages=[
        {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"},
        {"role": "user", "content": "花椰菜是什么?"},
        {"role": "assistant", "content": "花椰菜又称菜花、花菜,是一种常见的蔬菜。"},
        {"role": "user", "content": "再详细点"},
    ],
)
print(completion.choices[0].message.content)

流式

from openai import OpenAI

client = OpenAI(
    base_url="https://<ARK_DOMAIN>/api/v3",
    api_key=os.environ.get("ARK_API_KEY")
)

print("----- streaming request -----")
stream = client.chat.completions.create(
    model="<YOUR_ENDPOINT_ID>",
    messages=[
        {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"},
        {"role": "user", "content": "常见的十字花科植物有哪些?"},
    ],
    stream=True,
)
for chunk in stream:
    if not chunk.choices:
        continue

    print(chunk.choices[0].delta.content, end="")
print()

异步

import asyncio

from openai import AsyncOpenAI

client = OpenAI(
    base_url="https://<ARK_DOMAIN>/api/v3",
    api_key=os.environ.get("ARK_API_KEY")
)

async def main() -> None:
    stream = await client.chat.completions.create(
        model="<YOUR_ENDPOINT_ID>",
        messages=[
            {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"},
            {"role": "user", "content": "常见的十字花科植物有哪些?"},
        ],
        stream=True,
    )
    async for completion in stream:
        print(completion.choices[0].delta.content, end="")
    print()

Function call

from openai import OpenAI

client = OpenAI(
    base_url="https://<ARK_DOMAIN>/api/v3",
    api_key=os.environ.get("ARK_API_KEY")
)

print("----- function call request -----")
completion = client.chat.completions.create(
    model="<YOUR_ENDPOINT_ID>",
    messages = [
        {"role": "user", "content": "北京今天天气如何?"},
    ],
    tools=[
        {
            "type": "function",
            "function": {
                "name": "get_current_weather",
                "description": "获取给定地点的天气",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "location": {
                            "type": "string",
                            "description": "地点的位置信息,比如北京"
                        },
                        "unit": {
                            "type": "string",
                            "enum": [
                                "摄氏度",
                                "华氏度"
                            ]
                        }
                    },
                    "required": [
                        "location"
                    ]
                }
            }
        }
    ]
)
print(completion.choices[0])

设置自定义header

from openai import OpenAI

client = OpenAI(
    base_url="https://<ARK_DOMAIN>/api/v3",
    api_key=os.environ.get("ARK_API_KEY")
)

print("----- standard request -----")
completion = client.chat.completions.create(
    model="<YOUR_ENDPOINT_ID>",
    messages = [
        {"role": "system", "content": "你是豆包,是由字节跳动开发的 AI 人工智能助手"},
        {"role": "user", "content": "常见的十字花科植物有哪些?"},
    ],
    # 自定义request id
    extra_headers={"X-Client-Request-Id": "202406251728190000B7EA7A9648AC08D9"}
)
print(completion.choices[0].message.content)

Embeddings

from openai import OpenAI

client = OpenAI(
    base_url="https://<ARK_DOMAIN>/api/v3",
    api_key=os.environ.get("ARK_API_KEY")
)


print("----- embeddings request -----")
resp = client.embeddings.create(
    model="<YOUR_ENDPOINT_ID>",
    input=["花椰菜又称菜花、花菜,是一种常见的蔬菜。"]
)
print(resp)

langchain

pip install langchain-openai>=0.1.1

from langchain_openai import ChatOpenAI
from langchain_core.prompts import PromptTemplate


llm = ChatOpenAI(
    openai_api_key=os.environ.get("ARK_API_KEY"), 
    openai_api_base="https://<ARK_DOMAIN>/api/v3",
    model_name="<YOUR_ENDPOINT_ID>"
)

template = """Question: {question}

Answer: Let's think step by step."""

prompt = PromptTemplate.from_template(template)

question = "What NFL team won the Super Bowl in the year Justin Beiber was born?"

llm_chain = prompt | llm

print(llm_chain.invoke(question))