diff --git a/openapi.yaml b/openapi.yaml index 585fdf99..bc8682ce 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -71,19 +71,21 @@ paths: ) print(completion.choices[0].message) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); + node.js: |- + import OpenAI from 'openai'; - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); + const openai = new OpenAI(); - const completion = await openai.createChatCompletion({ - model: "VAR_model_id", - messages: [{"role": "system", "content": "You are a helpful assistant."}, {role: "user", content: "Hello world"}], - }); - console.log(completion.data.choices[0].message); + async function main() { + const completion = await openai.chat.completions.create({ + messages: [{ role: 'system', content: 'string' }], + model: 'gpt-3.5-turbo', + }); + + console.log(completion.id); + } + + main(); response: &chat_completion_example | { "id": "chatcmpl-123", @@ -130,7 +132,7 @@ paths: openai.api_key = os.getenv("OPENAI_API_KEY") completion = openai.ChatCompletion.create( - model="VAR_model_id", + model="gpt-3.5-turbo", messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Hello!"} @@ -141,25 +143,27 @@ paths: for chunk in completion: print(chunk.choices[0].delta) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); + node.js: |- + import OpenAI from 'openai'; - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); + const openai = new OpenAI(); - const completion = await openai.createChatCompletion({ - model: "VAR_model_id", - messages: [{"role": "system", "content": "You are a helpful assistant."}, {role: "user", content: "Hello world"}], - stream: true - }); + async function main() { + const completion = await openai.chat.completions.create({ + model: 'gpt-3.5-turbo', + messages: [ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Hello!"} + ], + stream: true, + }); - for await (const chunk of response) { - // https://github.com/openai/openai-node/issues/18 - const parsed = parseChunk(chunk); - console.log(parsed.choices[0].delta); + for await (const chunk of completion) { + console.log(chunk.choices[0].delta.content); + } } + + main(); response: &chat_completion_chunk_example | { "id": "chatcmpl-123", @@ -221,18 +225,22 @@ paths: max_tokens=7, temperature=0 ) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createCompletion({ - model: "VAR_model_id", - prompt: "Say this is a test", - max_tokens: 7, - temperature: 0, - }); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const completion = await openai.completions.create({ + model: 'VAR_model_id', + prompt: 'Say this is a test.' + max_tokens: 7, + temperature: 0, + }); + + console.log(completion.id); + } + main(); response: | { "id": "cmpl-uqkvlQyYK7bGYrRHQ0eXlWi7", @@ -278,25 +286,23 @@ paths: stream=True ): print(chunk['choices'][0]['text']) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createCompletion({ - model: "VAR_model_id", - prompt: "Say this is a test", - max_tokens: 7, - temperature: 0, - stream: true - }); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); - for await (const chunk of response) { - // https://github.com/openai/openai-node/issues/18 - const parsed = parseChunk(chunk); - console.log(parsed.choices[0].text); + async function main() { + const stream = await openai.completions.create({ + model: 'VAR_model_id', + prompt: 'Say this is a test.', + stream: true, + }); + + for await (const chunk of stream) { + console.log(chunk.choices[0].text) + } } + main(); response: | { "id": "cmpl-7iA7iJjj8V2zOkCGvWF2hAkDWBQZe", @@ -357,17 +363,22 @@ paths: input="What day of the wek is it?", instruction="Fix the spelling mistakes" ) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createEdit({ - model: "VAR_model_id", - input: "What day of the wek is it?", - instruction: "Fix the spelling mistakes", - }); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const edit = await openai.edits.create({ + model: 'VAR_model_id', + input: 'What day of the wek is it?', + instruction: 'Fix the spelling mistakes.', + }); + + console.log(edit.object); + } + + main(); response: &edit_example | { "object": "edit", @@ -427,17 +438,17 @@ paths: n=2, size="1024x1024" ) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createImage({ - prompt: "A cute baby sea otter", - n: 2, - size: "1024x1024", - }); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const image = await openai.images.generate({ prompt: 'A cute baby sea otter' }); + + console.log(image.created); + } + main(); response: | { "created": 1589478378, @@ -494,19 +505,22 @@ paths: n=2, size="1024x1024" ) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createImageEdit( - fs.createReadStream("otter.png"), - "A cute baby sea otter wearing a beret", - fs.createReadStream("mask.png"), - 2, - "1024x1024" - ); + node.js: |- + import fs from 'fs'; + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const image = await openai.images.edit({ + image: fs.createReadStream('otter.png'), + mask: fs.createReadStream('mask.png'), + prompt: 'A cute baby sea otter wearing a beret', + }); + + console.log(image.created); + } + main(); response: | { "created": 1589478378, @@ -559,17 +573,18 @@ paths: n=2, size="1024x1024" ) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createImageVariation( - fs.createReadStream("otter.png"), - 2, - "1024x1024" - ); + node.js: |- + import OpenAI, { toFile } from 'openai'; + const openai = new OpenAI(); + + async function main() { + const image = await openai.images.createVariation({ + image: await toFile(Buffer.from('# my file contents'), 'README.md'), + }); + + console.log(image.created); + } + main(); response: | { "created": 1589478378, @@ -623,16 +638,21 @@ paths: model="text-embedding-ada-002", input="The food was delicious and the waiter..." ) - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createEmbedding({ - model: "text-embedding-ada-002", - input: "The food was delicious and the waiter...", - }); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const embedding = await openai.embeddings.create({ + model: 'text-embedding-ada-002', + input: 'The quick brown fox jumped over the lazy dog', + }); + + console.log(embedding.object); + } + + main(); response: | { "object": "list", @@ -691,16 +711,20 @@ paths: openai.api_key = os.getenv("OPENAI_API_KEY") audio_file = open("audio.mp3", "rb") transcript = openai.Audio.transcribe("whisper-1", audio_file) - node: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const resp = await openai.createTranscription( - fs.createReadStream("audio.mp3"), - "whisper-1" - ); + node: |- + import fs from 'fs'; + import OpenAI from 'openai'; + const openai = new OpenAI(); + + async function main() { + const transcription = await openai.audio.transcriptions.create({ + file: fs.createReadStream('audio.mp3'), + model: 'whisper-1', + }); + + console.log(transcription.text); + } + main(); response: | { "text": "Imagine the wildest idea that you've ever had, and you're curious about how it might scale to something that's a 100, a 1,000 times bigger. This is a place where you can get to do that." @@ -783,13 +807,20 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.File.list() - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.listFiles(); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const list = await openai.files.list(); + + for await (const file of list) { + console.log(file.id); + } + } + + main(); response: | { "data": [ @@ -850,17 +881,22 @@ paths: file=open("mydata.jsonl", "rb"), purpose='fine-tune' ) - node.js: | - const fs = require("fs"); - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createFile( - fs.createReadStream("mydata.jsonl"), - "fine-tune" - ); + node.js: |- + import fs from 'fs'; + import OpenAI, { toFile } from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const file = await openai.files.create({ + file: fs.createReadStream('mydata.jsonl'), + purpose: 'fine-tune', + }); + + console.log(file.id); + } + + main(); response: | { "id": "file-XjGxS3KTG0uNmNOK362iJua3", @@ -905,13 +941,18 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.File.delete("file-XjGxS3KTG0uNmNOK362iJua3") - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.deleteFile("file-XjGxS3KTG0uNmNOK362iJua3"); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const file = await openai.files.del('string'); + + console.log(file.id); + } + + main(); response: | { "id": "file-XjGxS3KTG0uNmNOK362iJua3", @@ -950,13 +991,18 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.File.retrieve("file-XjGxS3KTG0uNmNOK362iJua3") - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.retrieveFile("file-XjGxS3KTG0uNmNOK362iJua3"); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const file = await openai.files.retrieve('string'); + + console.log(file.id); + } + + main(); response: | { "id": "file-XjGxS3KTG0uNmNOK362iJua3", @@ -1001,12 +1047,17 @@ paths: openai.api_key = os.getenv("OPENAI_API_KEY") content = openai.File.download("file-XjGxS3KTG0uNmNOK362iJua3") node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.downloadFile("file-XjGxS3KTG0uNmNOK362iJua3"); + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const file = await openai.files.retrieveFileContent('file-XjGxS3KTG0uNmNOK362iJua3'); + + console.log(file); + } + + main(); /fine-tunes: post: @@ -1050,14 +1101,18 @@ paths: openai.api_key = os.getenv("OPENAI_API_KEY") openai.FineTune.create(training_file="file-XGinujblHPwGLSztz8cPS8XY") node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createFineTune({ - training_file: "file-XGinujblHPwGLSztz8cPS8XY", - }); + import OpenAI from 'openai'; + const openai = new OpenAI(); + + async function main() { + const fineTune = await openai.fineTunes.create({ + training_file: 'file-ajSREls59WBbvgSzJSVWxMCB' + }); + + console.log(fineTune.id); + } + + main(); response: | { "id": "ft-AF1WoRqd3aJAHsqc9NY7iL8F", @@ -1121,13 +1176,20 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.FineTune.list() - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.listFineTunes(); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const list = await openai.fineTunes.list(); + + for await (const fineTune of list) { + console.log(fineTune.id); + } + } + + main(); response: | { "object": "list", @@ -1189,13 +1251,18 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.FineTune.retrieve(id="ft-AF1WoRqd3aJAHsqc9NY7iL8F") - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.retrieveFineTune("ft-AF1WoRqd3aJAHsqc9NY7iL8F"); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const fineTune = await openai.fineTunes.retrieve('ft-AF1WoRqd3aJAHsqc9NY7iL8F'); + + console.log(fineTune.id); + } + + main(); response: &fine_tune_example | { "id": "ft-AF1WoRqd3aJAHsqc9NY7iL8F", @@ -1303,13 +1370,16 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.FineTune.cancel(id="ft-AF1WoRqd3aJAHsqc9NY7iL8F") - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.cancelFineTune("ft-AF1WoRqd3aJAHsqc9NY7iL8F"); + node.js: |- + import OpenAI from 'openai'; + const openai = new OpenAI(); + + async function main() { + const fineTune = await openai.fineTunes.cancel('ft-AF1WoRqd3aJAHsqc9NY7iL8F'); + + console.log(fineTune.id); + } + main(); response: | { "id": "ft-xhrpBbvVUzYGo8oUO1FY4nI7", @@ -1387,13 +1457,16 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.FineTune.list_events(id="ft-AF1WoRqd3aJAHsqc9NY7iL8F") - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.listFineTuneEvents("ft-AF1WoRqd3aJAHsqc9NY7iL8F"); + node.js: |- + import OpenAI from 'openai'; + const openai = new OpenAI(); + + async function main() { + const fineTune = await openai.fineTunes.listEvents('ft-AF1WoRqd3aJAHsqc9NY7iL8F'); + + console.log(fineTune.object); + } + main(); response: | { "object": "list", @@ -1457,13 +1530,19 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.Model.list() - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.listModels(); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const list = await openai.models.list(); + + for await (const model of list) { + console.log(model.id); + } + } + main(); response: | { "object": "list", @@ -1525,13 +1604,17 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.Model.retrieve("VAR_model_id") - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.retrieveModel("VAR_model_id"); + node.js: |- + import OpenAI from 'openai'; + const openai = new OpenAI(); + + async function main() { + const model = await openai.models.retrieve('text-davinci-001'); + + console.log(model.id); + } + + main(); response: &retrieve_model_response | { "id": "VAR_model_id", @@ -1573,13 +1656,17 @@ paths: import openai openai.api_key = os.getenv("OPENAI_API_KEY") openai.Model.delete("curie:ft-acmeco-2021-03-03-21-44-20") - node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.deleteModel('curie:ft-acmeco-2021-03-03-21-44-20'); + node.js: |- + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const model = await openai.models.del('curie:ft-acmeco-2021-03-03-21-44-20'); + + console.log(model.id); + } + main(); response: | { "id": "curie:ft-acmeco-2021-03-03-21-44-20", @@ -1626,14 +1713,16 @@ paths: input="I want to kill them.", ) node.js: | - const { Configuration, OpenAIApi } = require("openai"); - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - const openai = new OpenAIApi(configuration); - const response = await openai.createModeration({ - input: "I want to kill them.", - }); + import OpenAI from 'openai'; + + const openai = new OpenAI(); + + async function main() { + const moderation = await openai.moderations.create({ input: 'I want to kill them.' }); + + console.log(moderation.id); + } + main(); response: &moderation_example | { "id": "modr-XXXXX",