From c81ac58f17a1f1b4fe247b79d77e52c9d31438c1 Mon Sep 17 00:00:00 2001 From: Roger Date: Wed, 8 Mar 2023 16:25:08 -0800 Subject: [PATCH 1/2] Chat endpoint improvements, tests, readme updates, and an alternate Conversation class --- OpenAI_API/Chat/ChatEndpoint.cs | 45 +++++-- OpenAI_API/Chat/ChatMessageRole.cs | 119 +++++++++++++++++ OpenAI_API/Chat/ChatRequest.cs | 17 +-- OpenAI_API/Chat/ChatResult.cs | 206 ++++++++++++++++------------- OpenAI_API/Chat/Conversation.cs | 126 ++++++++++++++++++ OpenAI_Tests/ChatEndpointTests.cs | 143 ++++++++++++++++++++ README.md | 64 +++++++++ 7 files changed, 607 insertions(+), 113 deletions(-) create mode 100644 OpenAI_API/Chat/ChatMessageRole.cs create mode 100644 OpenAI_API/Chat/Conversation.cs create mode 100644 OpenAI_Tests/ChatEndpointTests.cs diff --git a/OpenAI_API/Chat/ChatEndpoint.cs b/OpenAI_API/Chat/ChatEndpoint.cs index 60e3d68..fca675a 100644 --- a/OpenAI_API/Chat/ChatEndpoint.cs +++ b/OpenAI_API/Chat/ChatEndpoint.cs @@ -1,6 +1,7 @@ using OpenAI_API.Models; using System; using System.Collections.Generic; +using System.Linq; using System.Net.Http; using System.Text; using System.Threading.Tasks; @@ -10,8 +11,8 @@ namespace OpenAI_API.Chat /// /// ChatGPT API endpoint. Use this endpoint to send multiple messages and carry on a conversation. /// - public class ChatEndpoint : EndpointBase - { + public class ChatEndpoint : EndpointBase + { /// /// This allows you to set default parameters for every request, for example to set a default temperature or max tokens. For every request, if you do not have a parameter set on the request but do have it set here as a default, the request will automatically pick up the default value. /// @@ -28,6 +29,15 @@ public class ChatEndpoint : EndpointBase /// internal ChatEndpoint(OpenAIAPI api) : base(api) { } + /// + /// Creates an ongoing chat which can easily encapsulate the conversation. This is the simplest way to use the Chat endpoint. + /// + /// + public Conversation CreateConversation() + { + return new Conversation(this, defaultChatRequestArgs: DefaultChatRequestArgs); + } + #region Non-streaming /// @@ -35,7 +45,7 @@ public class ChatEndpoint : EndpointBase /// /// The request to send to the API. /// Asynchronously returns the completion result. Look in its property for the results. - public async Task CreateChatAsync(ChatRequest request) + public async Task CreateChatCompletionAsync(ChatRequest request) { return await HttpPost(postData: request); } @@ -46,10 +56,10 @@ public async Task CreateChatAsync(ChatRequest request) /// The request to send to the API. /// Overrides as a convenience. /// Asynchronously returns the completion result. Look in its property for the results. - public Task CreateChatAsync(ChatRequest request, int numOutputs = 5) + public Task CreateChatCompletionAsync(ChatRequest request, int numOutputs = 5) { request.NumChoicesPerMessage = numOutputs; - return CreateChatAsync(request); + return CreateChatCompletionAsync(request); } /// @@ -66,7 +76,7 @@ public Task CreateChatAsync(ChatRequest request, int numOutputs = 5) /// Maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. /// One or more sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. /// Asynchronously returns the completion result. Look in its property for the results. - public Task CreateChatAsync(IEnumerable messages, + public Task CreateChatCompletionAsync(IList messages, Model model = null, double? temperature = null, double? top_p = null, @@ -74,7 +84,7 @@ public Task CreateChatAsync(ChatRequest request, int numOutputs = 5) int? max_tokens = null, double? frequencyPenalty = null, double? presencePenalty = null, - IReadOnlyDictionary logitBias = null, + IReadOnlyDictionary logitBias = null, params string[] stopSequences) { ChatRequest request = new ChatRequest(DefaultChatRequestArgs) @@ -90,23 +100,30 @@ public Task CreateChatAsync(ChatRequest request, int numOutputs = 5) PresencePenalty = presencePenalty ?? DefaultChatRequestArgs.PresencePenalty, LogitBias = logitBias ?? DefaultChatRequestArgs.LogitBias }; - return CreateChatAsync(request); + return CreateChatCompletionAsync(request); } /// - /// Ask the API to complete the request using the specified parameters. This is non-streaming, so it will wait until the API returns the full result. Any non-specified parameters will fall back to default values specified in if present. + /// Ask the API to complete the request using the specified message(s). Any parameters will fall back to default values specified in if present. /// /// The messages to use in the generation. - /// - public Task CreateChatAsync(IEnumerable messages) + /// The with the API response. + public Task CreateChatCompletionAsync(params ChatMessage[] messages) { ChatRequest request = new ChatRequest(DefaultChatRequestArgs) { Messages = messages }; - return CreateChatAsync(request); + return CreateChatCompletionAsync(request); } + /// + /// Ask the API to complete the request using the specified message(s). Any parameters will fall back to default values specified in if present. + /// + /// The user message or messages to use in the generation. All strings are assumed to be of Role + /// The with the API response. + public Task CreateChatCompletionAsync(params string[] userMessages) => CreateChatCompletionAsync(userMessages.Select(m => new ChatMessage(ChatMessageRole.User, m)).ToArray()); + #endregion #region Streaming @@ -168,7 +185,7 @@ public IAsyncEnumerable StreamChatEnumerableAsync(ChatRequest reques /// Maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. /// One or more sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. /// An async enumerable with each of the results as they come in. See the C# docs for more details on how to consume an async enumerable. - public IAsyncEnumerable StreamChatEnumerableAsync(IEnumerable messages, + public IAsyncEnumerable StreamChatEnumerableAsync(IList messages, Model model = null, double? temperature = null, double? top_p = null, @@ -176,7 +193,7 @@ public IAsyncEnumerable StreamChatEnumerableAsync(ChatRequest reques int? max_tokens = null, double? frequencyPenalty = null, double? presencePenalty = null, - IReadOnlyDictionary logitBias = null, + IReadOnlyDictionary logitBias = null, params string[] stopSequences) { ChatRequest request = new ChatRequest(DefaultChatRequestArgs) diff --git a/OpenAI_API/Chat/ChatMessageRole.cs b/OpenAI_API/Chat/ChatMessageRole.cs new file mode 100644 index 0000000..055d094 --- /dev/null +++ b/OpenAI_API/Chat/ChatMessageRole.cs @@ -0,0 +1,119 @@ +using Newtonsoft.Json; +using System; +using System.Collections.Generic; +using System.ComponentModel.Design; +using System.Text; + +namespace OpenAI_API.Chat +{ + /// + /// Represents the Role of a . Typically, a conversation is formatted with a system message first, followed by alternating user and assistant messages. See the OpenAI docs for more details about usage. + /// + public class ChatMessageRole : IEquatable + { + /// + /// Contructor is prvate to force usage of strongly typed values + /// + /// + private ChatMessageRole(string value) { Value = value; } + + /// + /// Gets the singleton instance of based on the string value. + /// + /// Muse be one of "system", "user", or "assistant" + /// + public static ChatMessageRole FromString(string roleName) + { + switch (roleName) + { + case "system": + return ChatMessageRole.System; + case "user": + return ChatMessageRole.User; + case "assistant": + return ChatMessageRole.Assistant; + default: + return null; + } + } + + private string Value { get; set; } + + /// + /// The system message helps set the behavior of the assistant. + /// + public static ChatMessageRole System { get { return new ChatMessageRole("system"); } } + /// + /// The user messages help instruct the assistant. They can be generated by the end users of an application, or set by a developer as an instruction. + /// + public static ChatMessageRole User { get { return new ChatMessageRole("user"); } } + /// + /// The assistant messages help store prior responses. They can also be written by a developer to help give examples of desired behavior. + /// + public static ChatMessageRole Assistant { get { return new ChatMessageRole("assistant"); } } + + /// + /// Gets the string value for this role to pass to the API + /// + /// The size as a string + public override string ToString() + { + return Value; + } + + /// + /// Determines whether this instance and a specified object have the same value. + /// + /// The ChatMessageRole to compare to this instance + /// true if obj is a ChatMessageRole and its value is the same as this instance; otherwise, false. If obj is null, the method returns false + public override bool Equals(object obj) + { + return Value.Equals((obj as ChatMessageRole).Value); + } + + /// + /// Returns the hash code for this object + /// + /// A 32-bit signed integer hash code + public override int GetHashCode() + { + return Value.GetHashCode(); + } + + /// + /// Determines whether this instance and a specified object have the same value. + /// + /// The ChatMessageRole to compare to this instance + /// true if other's value is the same as this instance; otherwise, false. If other is null, the method returns false + public bool Equals(ChatMessageRole other) + { + return Value.Equals(other.Value); + } + + /// + /// Gets the string value for this role to pass to the API + /// + /// The ChatMessageRole to convert + public static implicit operator String(ChatMessageRole value) { return value; } + + ///// + ///// Used during the Json serialization process + ///// + //internal class ChatMessageRoleJsonConverter : JsonConverter + //{ + // public override void WriteJson(JsonWriter writer, ChatMessageRole value, JsonSerializer serializer) + // { + // writer.WriteValue(value.ToString()); + // } + + // public override ChatMessageRole ReadJson(JsonReader reader, Type objectType, ChatMessageRole existingValue, bool hasExistingValue, JsonSerializer serializer) + // { + // if (reader.TokenType != JsonToken.String) + // { + // throw new JsonSerializationException(); + // } + // return new ChatMessageRole(reader.ReadAsString()); + // } + //} + } +} diff --git a/OpenAI_API/Chat/ChatRequest.cs b/OpenAI_API/Chat/ChatRequest.cs index 4b9fb37..166afb2 100644 --- a/OpenAI_API/Chat/ChatRequest.cs +++ b/OpenAI_API/Chat/ChatRequest.cs @@ -22,7 +22,7 @@ public class ChatRequest /// The messages to send with this Chat Request /// [JsonProperty("messages")] - public IEnumerable Messages { get; set; } + public IList Messages { get; set; } /// /// What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer. It is generally recommend to use this or but not both. @@ -52,7 +52,7 @@ public class ChatRequest /// This is only used for serializing the request into JSON, do not use it directly. /// [JsonProperty("stop")] - public object CompiledStop + internal object CompiledStop { get { @@ -109,9 +109,9 @@ public string StopSequence /// Accepts a json object that maps tokens(specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. /// Mathematically, the bias is added to the logits generated by the model prior to sampling. /// The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token. - /// + /// [JsonProperty("logit_bias")] - public IReadOnlyDictionary LogitBias { get; set; } + public IReadOnlyDictionary LogitBias { get; set; } /// /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. @@ -123,16 +123,17 @@ public string StopSequence /// Creates a new, empty /// public ChatRequest() - { - this.Model = OpenAI_API.Models.Model.ChatGPTTurbo; - } + { } /// /// Create a new chat request using the data from the input chat request. /// /// public ChatRequest(ChatRequest basedOn) - { + { + if (basedOn == null) + return; + this.Model = basedOn.Model; this.Messages = basedOn.Messages; this.Temperature = basedOn.Temperature; diff --git a/OpenAI_API/Chat/ChatResult.cs b/OpenAI_API/Chat/ChatResult.cs index cfe4137..0deb91c 100644 --- a/OpenAI_API/Chat/ChatResult.cs +++ b/OpenAI_API/Chat/ChatResult.cs @@ -9,107 +9,131 @@ namespace OpenAI_API.Chat /// Represents a result from calling the Chat API /// public class ChatResult : ApiResultBase - { - /// - /// The identifier of the result, which may be used during troubleshooting - /// - [JsonProperty("id")] - public string Id { get; set; } + { + /// + /// The identifier of the result, which may be used during troubleshooting + /// + [JsonProperty("id")] + public string Id { get; set; } - /// - /// The list of choices that the user was presented with during the chat interaction - /// - [JsonProperty("choices")] - public IReadOnlyList Choices { get; set; } + /// + /// The list of choices that the user was presented with during the chat interaction + /// + [JsonProperty("choices")] + public IReadOnlyList Choices { get; set; } - /// - /// The usage statistics for the chat interaction - /// - [JsonProperty("usage")] - public ChatUsage Usage { get; set; } - } + /// + /// The usage statistics for the chat interaction + /// + [JsonProperty("usage")] + public ChatUsage Usage { get; set; } - /// - /// A message received from the API, including the message text, index, and reason why the message finished. - /// - public class ChatChoice - { - /// - /// The index of the choice in the list of choices - /// - [JsonProperty("index")] - public int Index { get; set; } + /// + /// A convenience method to return the content of the message in the first choice of this response + /// + /// The content of the message, not including . + public override string ToString() + { + if (Choices != null && Choices.Count > 0) + return Choices[0].ToString(); + else + return null; + } + } - /// - /// The message that was presented to the user as the choice - /// - [JsonProperty("message")] - public ChatMessage Message { get; set; } + /// + /// A message received from the API, including the message text, index, and reason why the message finished. + /// + public class ChatChoice + { + /// + /// The index of the choice in the list of choices + /// + [JsonProperty("index")] + public int Index { get; set; } + + /// + /// The message that was presented to the user as the choice + /// + [JsonProperty("message")] + public ChatMessage Message { get; set; } + + /// + /// The reason why the chat interaction ended after this choice was presented to the user + /// + [JsonProperty("finish_reason")] + public string FinishReason { get; set; } - /// - /// The reason why the chat interaction ended after this choice was presented to the user - /// - [JsonProperty("finish_reason")] - public string FinishReason { get; set; } + /// + /// Partial message "delta" from a stream. For example, the result from StreamChatEnumerableAsync. + /// If this result object is not from a stream, this will be null + /// + [JsonProperty("delta")] + public ChatMessage Delta { get; set; } - /// - /// Partial message "delta" from a stream. For example, the result from StreamChatEnumerableAsync. - /// If this result object is not from a stream, this will be null - /// - [JsonProperty("delta")] - public ChatMessage Delta { get; set; } - } + /// + /// A convenience method to return the content of the message in this response + /// + /// The content of the message in this response, not including . + public override string ToString() + { + return Message.Content; + } + } - /// - /// Chat message sent or received from the API. Includes who is speaking in the "role" and the message text in the "content" - /// - public class ChatMessage - { - /// - /// Constructor for a new Chat Message - /// - /// The role of the message, which can be "system", "assistant" or "user" - /// The text to send in the message - public ChatMessage(string role, string content) - { - this.Role = role; - this.Content = content; - } + /// + /// Chat message sent or received from the API. Includes who is speaking in the "role" and the message text in the "content" + /// + public class ChatMessage + { + public ChatMessage() { } - /// - /// The role of the message, which can be "system", "assistant" or "user" - /// - [JsonProperty("role")] - public string Role { get; set; } + /// + /// Constructor for a new Chat Message + /// + /// The role of the message, which can be "system", "assistant" or "user" + /// The text to send in the message + public ChatMessage(ChatMessageRole role, string content) + { + this.Role = role; + this.Content = content; + } - /// - /// The content of the message - /// - [JsonProperty("content")] - public string Content { get; set; } - } + [JsonProperty("role")] + internal string rawRole { get; set; } - /// - /// How many tokens were used in this chat message. - /// - public class ChatUsage - { - /// - /// The number of prompt tokens used during the chat interaction - /// - [JsonProperty("prompt_tokens")] - public int PromptTokens { get; set; } + /// + /// The role of the message, which can be "system", "assistant" or "user" + /// + [JsonIgnore] + public ChatMessageRole Role + { + get + { + return ChatMessageRole.FromString(rawRole); + } + set + { + rawRole = value.ToString(); + } + } - /// - /// The number of completion tokens used during the chat interaction - /// - [JsonProperty("completion_tokens")] - public int CompletionTokens { get; set; } + /// + /// The content of the message + /// + [JsonProperty("content")] + public string Content { get; set; } + } - /// - /// The total number of tokens used during the chat interaction - /// - [JsonProperty("total_tokens")] - public int TotalTokens { get; set; } - } + /// + /// How many tokens were used in this chat message. + /// + public class ChatUsage : Usage + { + /// + /// The number of completion tokens used during the chat interaction + /// + [JsonProperty("completion_tokens")] + public int CompletionTokens { get; set; } + } } diff --git a/OpenAI_API/Chat/Conversation.cs b/OpenAI_API/Chat/Conversation.cs new file mode 100644 index 0000000..6bd1ac7 --- /dev/null +++ b/OpenAI_API/Chat/Conversation.cs @@ -0,0 +1,126 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using static System.Net.WebRequestMethods; + +namespace OpenAI_API.Chat +{ + /// + /// Represents on ongoing chat with back-and-forth interactions between the user and the chatbot. This is the simplest way to interact with the ChatGPT API, rather than manually using the ChatEnpoint methods. You do lose some flexibility though. + /// + public class Conversation + { + /// + /// An internal reference to the API endpoint, needed for API requests + /// + private ChatEndpoint _endpoint; + + /// + /// Allows setting the parameters to use when calling the ChatGPT API. Can be useful for setting temperature, presence_penalty, and more. Se OpenAI documentation for a list of possible parameters to tweak. + /// + public ChatRequest RequestParameters { get; private set; } + + /// + /// Specifies the model to use for ChatGPT requests. This is just a shorthand to access .Model + /// + public OpenAI_API.Models.Model Model + { + get + { + return RequestParameters.Model; + } + set + { + RequestParameters.Model = value; + } + } + + /// + /// After calling , this contains the full response object which can contain useful metadata like token usages, , etc. This is overwritten with every call to and only contains the most recent result. + /// + public ChatResult MostResentAPIResult { get; private set; } + + /// + /// Creates a new conversation with ChatGPT chat + /// + /// A reference to the API endpoint, needed for API requests. Generally should be . + /// Optionally specify the model to use for ChatGPT requests. If not specified, used .Model or falls back to + /// Allows setting the parameters to use when calling the ChatGPT API. Can be useful for setting temperature, presence_penalty, and more. Se OpenAI documentation for a list of possible parameters to tweak. + public Conversation(ChatEndpoint endpoint, OpenAI_API.Models.Model model = null, ChatRequest defaultChatRequestArgs = null) + { + RequestParameters = new ChatRequest(defaultChatRequestArgs); + if (model != null) + RequestParameters.Model = model; + if (RequestParameters.Model == null) + RequestParameters.Model = Models.Model.ChatGPTTurbo; + + _Messages = new List(); + _endpoint = endpoint; + RequestParameters.NumChoicesPerMessage = 1; + RequestParameters.Stream = false; + } + + /// + /// A list of messages exchanged so far. Do not modify this list directly. Instead, use , , , or . + /// + public IReadOnlyList Messages { get => _Messages; } + private List _Messages; + + /// + /// Appends a to the chat hstory + /// + /// The to append to the chat history + public void AppendMessage(ChatMessage message) + { + _Messages.Add(message); + } + + /// + /// Creates and appends a to the chat hstory + /// + /// The for the message. Typically, a conversation is formatted with a system message first, followed by alternating user and assistant messages. See the OpenAI docs for more details about usage. + /// The content of the message) + public void AppendMessage(ChatMessageRole role, string content) => this.AppendMessage(new ChatMessage(role, content)); + + /// + /// Creates and appends a to the chat hstory with the Role of . The user messages help instruct the assistant. They can be generated by the end users of an application, or set by a developer as an instruction. + /// + /// Text content generated by the end users of an application, or set by a developer as an instruction + public void AppendUserInput(string content) => this.AppendMessage(new ChatMessage(ChatMessageRole.User, content)); + /// + /// Creates and appends a to the chat hstory with the Role of . The system message helps set the behavior of the assistant. + /// + /// text content that helps set the behavior of the assistant + public void AppendSystemMessage(string content) => this.AppendMessage(new ChatMessage(ChatMessageRole.System, content)); + /// + /// Creates and appends a to the chat hstory with the Role of . Assistant messages can be written by a developer to help give examples of desired behavior. + /// + /// Text content written by a developer to help give examples of desired behavior + public void AppendExampleChatbotOutput(string content) => this.AppendMessage(new ChatMessage(ChatMessageRole.Assistant, content)); + + /// + /// Calls the API to get a response, which is appended to the current chat's as an . + /// + /// The string of the response from the chatbot API + public async Task GetResponseFromChatbot() + { + ChatRequest req = new ChatRequest(RequestParameters); + req.Messages = _Messages.ToList(); + + var res = await _endpoint.CreateChatCompletionAsync(req); + MostResentAPIResult = res; + + if (res.Choices.Count > 0) + { + var newMsg = res.Choices[0].Message; + AppendMessage(res.Choices[0].Message); + return res.Choices[0].Message.Content; + } + return null; + } + } +} diff --git a/OpenAI_Tests/ChatEndpointTests.cs b/OpenAI_Tests/ChatEndpointTests.cs new file mode 100644 index 0000000..60714fa --- /dev/null +++ b/OpenAI_Tests/ChatEndpointTests.cs @@ -0,0 +1,143 @@ +using NUnit.Framework; +using OpenAI_API.Chat; +using OpenAI_API.Completions; +using OpenAI_API.Models; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; + +namespace OpenAI_Tests +{ + public class ChatEndpointTests + { + [SetUp] + public void Setup() + { + OpenAI_API.APIAuthentication.Default = new OpenAI_API.APIAuthentication(Environment.GetEnvironmentVariable("TEST_OPENAI_SECRET_KEY")); + } + + [Test] + public void BasicCompletion() + { + var api = new OpenAI_API.OpenAIAPI(); + + Assert.IsNotNull(api.Chat); + + var results = api.Chat.CreateChatCompletionAsync(new ChatRequest() + { + Model = Model.ChatGPTTurbo, + Temperature = 0.1, + MaxTokens = 5, + Messages = new ChatMessage[] { + new ChatMessage(ChatMessageRole.User, "Hello!") + } + }).Result; + Assert.IsNotNull(results); + if (results.CreatedUnixTime.HasValue) + { + Assert.NotZero(results.CreatedUnixTime.Value); + Assert.NotNull(results.Created); + Assert.Greater(results.Created.Value, new DateTime(2018, 1, 1)); + Assert.Less(results.Created.Value, DateTime.Now.AddDays(1)); + } + else + { + Assert.Null(results.Created); + } + Assert.NotNull(results.Object); + Assert.NotNull(results.Choices); + Assert.NotZero(results.Choices.Count); + Assert.AreEqual(ChatMessageRole.Assistant, results.Choices[0].Message.Role); + Assert.That(results.Choices.All(c => c.Message.Role.Equals(ChatMessageRole.Assistant))); + Assert.That(results.Choices.All(c => c.Message.Content.Length > 1)); + } + + [Test] + public void SimpleCompletion() + { + var api = new OpenAI_API.OpenAIAPI(); + + Assert.IsNotNull(api.Chat); + + var results = api.Chat.CreateChatCompletionAsync("Hello!").Result; + Assert.IsNotNull(results); + if (results.CreatedUnixTime.HasValue) + { + Assert.NotZero(results.CreatedUnixTime.Value); + Assert.NotNull(results.Created); + Assert.Greater(results.Created.Value, new DateTime(2018, 1, 1)); + Assert.Less(results.Created.Value, DateTime.Now.AddDays(1)); + } + else + { + Assert.Null(results.Created); + } + Assert.NotNull(results.Object); + Assert.NotNull(results.Choices); + Assert.NotZero(results.Choices.Count); + Assert.AreEqual(ChatMessageRole.Assistant, results.Choices[0].Message.Role); + Assert.That(results.Choices.All(c => c.Message.Role.Equals(ChatMessageRole.Assistant))); + Assert.That(results.Choices.All(c => c.Message.Content.Length > 1)); + Assert.IsNotEmpty(results.ToString()); + } + + [Test] + public void ChatBackAndForth() + { + var api = new OpenAI_API.OpenAIAPI(); + + var chat = api.Chat.CreateConversation(); + + chat.AppendSystemMessage("You are a teacher who helps children understand if things are animals or not. If the user tells you an animal, you say \"yes\". If the user tells you something that is not an animal, you say \"no\". You only ever respond with \"yes\" or \"no\". You do not say anything else."); + chat.AppendUserInput("Is this an animal? Cat"); + chat.AppendExampleChatbotOutput("Yes"); + chat.AppendUserInput("Is this an animal? House"); + chat.AppendExampleChatbotOutput("No"); + chat.AppendUserInput("Is this an animal? Dog"); + string res = chat.GetResponseFromChatbot().Result; + Assert.NotNull(res); + Assert.IsNotEmpty(res); + Assert.AreEqual("Yes", res.Trim()); + chat.AppendUserInput("Is this an animal? Chair"); + res = chat.GetResponseFromChatbot().Result; + Assert.NotNull(res); + Assert.IsNotEmpty(res); + Assert.AreEqual("No", res.Trim()); + + foreach (ChatMessage msg in chat.Messages) + { + Console.WriteLine($"{msg.Role}: {msg.Content}"); + } + } + + [Test] + public async Task StreamCompletionEnumerableAsync_ShouldStreamData() + { + var api = new OpenAI_API.OpenAIAPI(); + + Assert.IsNotNull(api.Chat); + + var req = new ChatRequest() + { + Model = Model.ChatGPTTurbo, + Temperature = 0.2, + MaxTokens = 500, + Messages = new ChatMessage[] { + new ChatMessage(ChatMessageRole.User, "Please explain how mountains are formed in great detail.") + } + }; + + var chatResults = new List(); + await foreach (var res in api.Chat.StreamChatEnumerableAsync(req)) + { + chatResults.Add(res); + } + + Assert.Greater(chatResults.Count, 100); + Assert.That(chatResults.Select(cr => cr.Choices[0].Delta.Content).Count(c => !string.IsNullOrEmpty(c)) > 50); + } + + } +} diff --git a/README.md b/README.md index d4e0725..79ce4f0 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,7 @@ Console.WriteLine(result); * [Requirements](#requirements) * [Installation](#install-from-nuget) * [Authentication](#authentication) + * [ChatGPT API](#chatgpt) * [Completions API](#completions) * [Streaming completion results](#streaming) * [Embeddings API](#embeddings) @@ -73,7 +74,70 @@ You may optionally include an openAIOrganization (OPENAI_ORGANIZATION in env or OpenAIAPI api = new OpenAIAPI(new APIAuthentication("YOUR_API_KEY","org-yourOrgHere")); ``` +### ChatGPT +The Chat API is accessed via `OpenAIAPI.Chat`. There are two ways to use the Chat Endpoint, either via simplified conversations or with the full Request/Response methods. +#### Chat Conversations +The Conversation Class allows you to easily interact with ChatGPT by adding messages to a chat and asking ChatGPT to reply. +```csharp +var chat = api.Chat.CreateConversation(); + +/// give instruction as System +chat.AppendSystemMessage("You are a teacher who helps children understand if things are animals or not. If the user tells you an animal, you say \"yes\". If the user tells you something that is not an animal, you say \"no\". You only ever respond with \"yes\" or \"no\". You do not say anything else."); + +// give a few examples as user and assistant +chat.AppendUserInput("Is this an animal? Cat"); +chat.AppendExampleChatbotOutput("Yes"); +chat.AppendUserInput("Is this an animal? House"); +chat.AppendExampleChatbotOutput("No"); + +// now let's ask it a question' +chat.AppendUserInput("Is this an animal? Dog"); +// and get the response +string response = await chat.GetResponseFromChatbot(); +Console.WriteLine(response); // "Yes" + +// and continue the conversation by asking another +chat.AppendUserInput("Is this an animal? Chair"); +// and get another response +response = await chat.GetResponseFromChatbot(); +Console.WriteLine(response); // "No" + +// the entire chat history is available in chat.Messages +foreach (ChatMessage msg in chat.Messages) +{ + Console.WriteLine($"{msg.Role}: {msg.Content}"); +} +``` + +#### Chat Endpoint Requests +You can access full control of the Chat API by using the `OpenAIAPI.Chat.CreateChatCompletionAsync()` and related methods. + +```csharp +async Task CreateChatCompletionAsync(ChatRequest request); + +// for example +var result = await api.Chat.CreateChatCompletionAsync(new ChatRequest() + { + Model = Model.ChatGPTTurbo, + Temperature = 0.1, + MaxTokens = 50, + Messages = new ChatMessage[] { + new ChatMessage(ChatMessageRole.User, "Hello!") + } + }) +// or +var result = api.Chat.CreateChatCompletionAsync("Hello!"); + +var reply = results.Choices[0].Message; +Console.WriteLine($"{reply.Role}: {reply.Content.Trim()}"); +// or +Console.WriteLine(results); +``` + +It returns a `ChatResult` which is mostly metadata, so use its `.ToString()` method to get the text if all you want is assistant's reply text. + +There's also an async streaming API which works similarly to the [Completions endpoint streaming results](#streaming). ### Completions The Completion API is accessed via `OpenAIAPI.Completions`: From 6e0ca11395bceb030cd84a4ae5acba66990c7254 Mon Sep 17 00:00:00 2001 From: Roger Date: Wed, 8 Mar 2023 16:33:57 -0800 Subject: [PATCH 2/2] Fix ChatMessageRole to be singletons, pull ChatMessage to seperate file --- OpenAI_API/Chat/ChatMessage.cs | 57 ++++++++++++++++++++++++++++++ OpenAI_API/Chat/ChatMessageRole.cs | 10 +++--- OpenAI_API/Chat/ChatResult.cs | 44 ----------------------- OpenAI_Tests/ChatEndpointTests.cs | 1 + 4 files changed, 63 insertions(+), 49 deletions(-) create mode 100644 OpenAI_API/Chat/ChatMessage.cs diff --git a/OpenAI_API/Chat/ChatMessage.cs b/OpenAI_API/Chat/ChatMessage.cs new file mode 100644 index 0000000..0a68243 --- /dev/null +++ b/OpenAI_API/Chat/ChatMessage.cs @@ -0,0 +1,57 @@ +using Newtonsoft.Json; +using System; +using System.Collections.Generic; +using System.Text; + +namespace OpenAI_API.Chat +{ + /// + /// Chat message sent or received from the API. Includes who is speaking in the "role" and the message text in the "content" + /// + public class ChatMessage + { + /// + /// Creates an empty , with defaulting to + /// + public ChatMessage() + { + this.Role = ChatMessageRole.User; + } + + /// + /// Constructor for a new Chat Message + /// + /// The role of the message, which can be "system", "assistant" or "user" + /// The text to send in the message + public ChatMessage(ChatMessageRole role, string content) + { + this.Role = role; + this.Content = content; + } + + [JsonProperty("role")] + internal string rawRole { get; set; } + + /// + /// The role of the message, which can be "system", "assistant" or "user" + /// + [JsonIgnore] + public ChatMessageRole Role + { + get + { + return ChatMessageRole.FromString(rawRole); + } + set + { + rawRole = value.ToString(); + } + } + + /// + /// The content of the message + /// + [JsonProperty("content")] + public string Content { get; set; } + } +} diff --git a/OpenAI_API/Chat/ChatMessageRole.cs b/OpenAI_API/Chat/ChatMessageRole.cs index 055d094..3d4ed99 100644 --- a/OpenAI_API/Chat/ChatMessageRole.cs +++ b/OpenAI_API/Chat/ChatMessageRole.cs @@ -12,7 +12,7 @@ namespace OpenAI_API.Chat public class ChatMessageRole : IEquatable { /// - /// Contructor is prvate to force usage of strongly typed values + /// Contructor is private to force usage of strongly typed values /// /// private ChatMessageRole(string value) { Value = value; } @@ -37,20 +37,20 @@ public static ChatMessageRole FromString(string roleName) } } - private string Value { get; set; } + private string Value { get; } /// /// The system message helps set the behavior of the assistant. /// - public static ChatMessageRole System { get { return new ChatMessageRole("system"); } } + public static ChatMessageRole System { get; } = new ChatMessageRole("system"); /// /// The user messages help instruct the assistant. They can be generated by the end users of an application, or set by a developer as an instruction. /// - public static ChatMessageRole User { get { return new ChatMessageRole("user"); } } + public static ChatMessageRole User { get; } = new ChatMessageRole("user"); /// /// The assistant messages help store prior responses. They can also be written by a developer to help give examples of desired behavior. /// - public static ChatMessageRole Assistant { get { return new ChatMessageRole("assistant"); } } + public static ChatMessageRole Assistant { get; } = new ChatMessageRole("assistant"); /// /// Gets the string value for this role to pass to the API diff --git a/OpenAI_API/Chat/ChatResult.cs b/OpenAI_API/Chat/ChatResult.cs index 0deb91c..9138d19 100644 --- a/OpenAI_API/Chat/ChatResult.cs +++ b/OpenAI_API/Chat/ChatResult.cs @@ -81,50 +81,6 @@ public override string ToString() } } - /// - /// Chat message sent or received from the API. Includes who is speaking in the "role" and the message text in the "content" - /// - public class ChatMessage - { - public ChatMessage() { } - - /// - /// Constructor for a new Chat Message - /// - /// The role of the message, which can be "system", "assistant" or "user" - /// The text to send in the message - public ChatMessage(ChatMessageRole role, string content) - { - this.Role = role; - this.Content = content; - } - - [JsonProperty("role")] - internal string rawRole { get; set; } - - /// - /// The role of the message, which can be "system", "assistant" or "user" - /// - [JsonIgnore] - public ChatMessageRole Role - { - get - { - return ChatMessageRole.FromString(rawRole); - } - set - { - rawRole = value.ToString(); - } - } - - /// - /// The content of the message - /// - [JsonProperty("content")] - public string Content { get; set; } - } - /// /// How many tokens were used in this chat message. /// diff --git a/OpenAI_Tests/ChatEndpointTests.cs b/OpenAI_Tests/ChatEndpointTests.cs index 60714fa..174a7f9 100644 --- a/OpenAI_Tests/ChatEndpointTests.cs +++ b/OpenAI_Tests/ChatEndpointTests.cs @@ -79,6 +79,7 @@ public void SimpleCompletion() Assert.NotZero(results.Choices.Count); Assert.AreEqual(ChatMessageRole.Assistant, results.Choices[0].Message.Role); Assert.That(results.Choices.All(c => c.Message.Role.Equals(ChatMessageRole.Assistant))); + Assert.That(results.Choices.All(c => c.Message.Role == ChatMessageRole.Assistant)); Assert.That(results.Choices.All(c => c.Message.Content.Length > 1)); Assert.IsNotEmpty(results.ToString()); }