赞
踩
import os import openai openai.api_type = "azure" openai.api_base = os.getenv("AZURE_OPENAI_ENDPOINT") openai.api_key = os.getenv("AZURE_OPENAI_KEY") openai.api_version = "2023-05-15" response = openai.ChatCompletion.create( engine="gpt-35-turbo", # engine = "deployment_name". messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Does Azure OpenAI support customer managed keys?"}, {"role": "assistant", "content": "Yes, customer managed keys are supported by Azure OpenAI."}, {"role": "user", "content": "Do other Azure AI services support this too?"} ] ) print(response) print(response['choices'][0]['message']['content'])
b. OpenAI Python 1.*
import os from openai import AzureOpenAI client = AzureOpenAI( azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT"), api_key=os.getenv("AZURE_OPENAI_KEY"), api_version="2023-05-15" ) response = client.chat.completions.create( model="gpt-35-turbo", # model = "deployment_name". messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Does Azure OpenAI support customer managed keys?"}, {"role": "assistant", "content": "Yes, customer managed keys are supported by Azure OpenAI."}, {"role": "user", "content": "Do other Azure AI services support this too?"} ] ) print(response.choices[0].message.content)
import os import openai openai.api_key = os.getenv("AZURE_OPENAI_KEY") openai.api_base = os.getenv("AZURE_OPENAI_ENDPOINT") # your endpoint should look like the following https://YOUR_RESOURCE_NAME.openai.azure.com/ openai.api_type = 'azure' openai.api_version = '2023-05-15' # this might change in the future deployment_name='REPLACE_WITH_YOUR_DEPLOYMENT_NAME' #This will correspond to the custom name you chose for your deployment when you deployed a model. # Send a completion call to generate an answer print('Sending a test completion job') start_phrase = 'Write a tagline for an ice cream shop. ' response = openai.Completion.create(engine=deployment_name, prompt=start_phrase, max_tokens=10) text = response['choices'][0]['text'].replace('\n', '').replace(' .', '.').strip() print(start_phrase+text)
b. OpenAI Python 1.*
import os from openai import AzureOpenAI client = AzureOpenAI( api_key=os.getenv("AZURE_OPENAI_KEY"), api_version="2023-12-01-preview", azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT") ) deployment_name='REPLACE_WITH_YOUR_DEPLOYMENT_NAME' #This will correspond to the custom name you chose for your deployment when you deployed a model. # Send a completion call to generate an answer print('Sending a test completion job') start_phrase = 'Write a tagline for an ice cream shop. ' response = client.completions.create(model=deployment_name, prompt=start_phrase, max_tokens=10) print(response.choices[0].text)
import openai
openai.api_type = "azure"
openai.api_key = YOUR_API_KEY
openai.api_base = "https://YOUR_RESOURCE_NAME.openai.azure.com"
openai.api_version = "2023-05-15"
response = openai.Embedding.create(
input="Your text string goes here",
engine="YOUR_DEPLOYMENT_NAME"
)
embeddings = response['data'][0]['embedding']
print(embeddings)
b. OpenAI Python 1.*
import os
from openai import AzureOpenAI
client = AzureOpenAI(
api_key = os.getenv("AZURE_OPENAI_KEY"),
api_version = "2023-05-15",
azure_endpoint =os.getenv("AZURE_OPENAI_ENDPOINT")
)
response = client.embeddings.create(
input = "Your text string goes here",
model= "text-embedding-ada-002" # model = "deployment_name".
)
print(response.model_dump_json(indent=2))
import os
import asyncio
from openai import AsyncAzureOpenAI
async def main():
client = AsyncAzureOpenAI(
api_key = os.getenv("AZURE_OPENAI_KEY"),
api_version = "2023-12-01-preview",
azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
)
response = await client.chat.completions.create(model="gpt-35-turbo", messages=[{"role": "user", "content": "Hello world"}])
print(response.model_dump_json(indent=2))
asyncio.run(main())
import os import openai import dotenv import requests dotenv.load_dotenv() openai.api_base = os.environ.get("AOAIEndpoint") openai.api_version = "2023-08-01-preview" openai.api_type = 'azure' openai.api_key = os.environ.get("AOAIKey") def setup_byod(deployment_id: str) -> None: """Sets up the OpenAI Python SDK to use your own data for the chat endpoint. :param deployment_id: The deployment ID for the model to use with your own data. To remove this configuration, simply set openai.requestssession to None. """ class BringYourOwnDataAdapter(requests.adapters.HTTPAdapter): def send(self, request, **kwargs): request.url = f"{openai.api_base}/openai/deployments/{deployment_id}/extensions/chat/completions?api-version={openai.api_version}" return super().send(request, **kwargs) session = requests.Session() # Mount a custom adapter which will use the extensions endpoint for any call using the given `deployment_id` session.mount( prefix=f"{openai.api_base}/openai/deployments/{deployment_id}", adapter=BringYourOwnDataAdapter() ) openai.requestssession = session aoai_deployment_id = os.environ.get("AOAIDeploymentId") setup_byod(aoai_deployment_id) completion = openai.ChatCompletion.create( messages=[{"role": "user", "content": "What are the differences between Azure Machine Learning and Azure AI services?"}], deployment_id=os.environ.get("AOAIDeploymentId"), dataSources=[ # camelCase is intentional, as this is the format the API expects { "type": "AzureCognitiveSearch", "parameters": { "endpoint": os.environ.get("SearchEndpoint"), "key": os.environ.get("SearchKey"), "indexName": os.environ.get("SearchIndex"), } } ] ) print(completion)
b. OpenAI Python 1.*
import os import openai import dotenv dotenv.load_dotenv() endpoint = os.environ.get("AOAIEndpoint") api_key = os.environ.get("AOAIKey") deployment = os.environ.get("AOAIDeploymentId") client = openai.AzureOpenAI( base_url=f"{endpoint}/openai/deployments/{deployment}/extensions", api_key=api_key, api_version="2023-08-01-preview", ) completion = client.chat.completions.create( model=deployment, messages=[ { "role": "user", "content": "How is Azure machine learning different than Azure OpenAI?", }, ], extra_body={ "dataSources": [ { "type": "AzureCognitiveSearch", "parameters": { "endpoint": os.environ["SearchEndpoint"], "key": os.environ["SearchKey"], "indexName": os.environ["SearchIndex"] } } ] } ) print(completion.model_dump_json(indent=2))
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。