unit OpenAI.Chat;
interface
uses
System.SysUtils, OpenAI.API.Params, OpenAI.API, OpenAI.Chat.Functions,
System.Classes, REST.JsonReflect, System.JSON, OpenAI.Types;
{$SCOPEDENUMS ON}
type
///
/// Type of message role
///
TMessageRole = (
///
/// System message
///
System,
///
/// User message
///
User,
///
/// Assistant message
///
Assistant,
///
/// Func message. For models avaliable functions
///
Func,
///
/// Tool message
///
Tool);
TMessageRoleHelper = record helper for TMessageRole
function ToString: string;
class function FromString(const Value: string): TMessageRole; static;
end;
///
/// Finish reason
///
TFinishReason = (
///
/// API returned complete model output
///
Stop,
///
/// Incomplete model output due to max_tokens parameter or token limit
///
Length,
///
/// The model decided to call a function
///
FunctionCall,
///
/// Omitted content due to a flag from our content filters
///
ContentFilter,
///
/// API response still in progress or incomplete
///
Null,
///
/// If the model called a tool
///
ToolCalls);
TFinishReasonHelper = record helper for TFinishReason
function ToString: string;
class function Create(const Value: string): TFinishReason; static;
end;
TFinishReasonInterceptor = class(TJSONInterceptorStringToString)
public
function StringConverter(Data: TObject; Field: string): string; override;
procedure StringReverter(Data: TObject; Field: string; Arg: string); override;
end;
TFunctionCallType = (None, Auto, Func);
TFunctionCall = record
private
FFuncName: string;
FType: TFunctionCallType;
public
///
/// The model does not call a function, and responds to the end-user
///
class function None: TFunctionCall; static;
///
/// The model can pick between an end-user or calling a function
///
class function Auto: TFunctionCall; static;
///
/// Forces the model to call that function
///
class function Func(const Name: string): TFunctionCall; static;
function ToString: string;
end;
TFunctionCallBuild = record
Name: string;
///
/// JSON, example '{ \"location\": \"Boston, MA\"}'
///
Arguments: string;
// helpers
class function Create(const Name, Arguments: string): TFunctionCallBuild; static;
end;
TChatToolCallBuild = record
///
/// The ID of the tool call.
///
Id: string;
///
/// The type of the tool. Currently, only function is supported.
///
&Type: string;
///
/// The function that the model called.
///
&Function: TFunctionCallBuild;
// helpers
class function Create(const Id, &Type: string; &Function: TFunctionCallBuild): TChatToolCallBuild; static;
end;
TMessageContentType = (Text, ImageUrl);
TImageDetail = (
///
/// By default, the model will use the auto setting which will look at the image input size
/// and decide if it should use the low or high setting
///
Auto,
///
/// Will disable the “high res” model. The model will receive a low-res 512px x 512px version of the image,
/// and represent the image with a budget of 65 tokens. This allows the API to return faster responses and
/// consume fewer input tokens for use cases that do not require high detail.
///
Low,
///
/// Will enable “high res” mode, which first allows the model to see the low res image and then
/// creates detailed crops of input images as 512px squares based on the input image size.
/// Each of the detailed crops uses twice the token budget (65 tokens) for a total of 129 tokens.
///
High);
TImageDetailHelper = record helper for TImageDetail
function ToString: string; inline;
end;
TMessageContent = record
///
/// The type of the content part.
///
ContentType: TMessageContentType;
///
/// The text content.
///
Text: string;
///
/// Either a URL of the image or the base64 encoded image data.
///
Url: string;
///
/// Specifies the detail level of the image. Learn more in the Vision guide.
///
/// https://platform.openai.com/docs/guides/vision/low-or-high-fidelity-image-understanding
Detail: TImageDetail;
//helpers
class function CreateText(const Text: string): TMessageContent; static;
///
/// The Chat Completions API, unlike the Assistants API, is not stateful.
/// That means you have to manage the messages (including images) you pass to the model yourself.
/// If you want to pass the same image to the model multiple times, you will have to pass the image each time
/// you make a request to the API.
///
/// For long running conversations, we suggest passing images via URL's instead of base64.
/// The latency of the model can also be improved by downsizing your images ahead of time to be less than
/// the maximum size they are expected them to be. For low res mode, we expect a 512px x 512px image.
/// For high rest mode, the short side of the image should be less than 768px and the long side should be less
/// than 2,000px.
///
class function CreateImage(const Url: string; const Detail: TImageDetail = TImageDetail.Auto): TMessageContent; overload; static;
///
/// The Chat Completions API, unlike the Assistants API, is not stateful.
/// That means you have to manage the messages (including images) you pass to the model yourself.
/// If you want to pass the same image to the model multiple times, you will have to pass the image each time
/// you make a request to the API.
///
/// For long running conversations, we suggest passing images via URL's instead of base64.
/// The latency of the model can also be improved by downsizing your images ahead of time to be less than
/// the maximum size they are expected them to be. For low res mode, we expect a 512px x 512px image.
/// For high rest mode, the short side of the image should be less than 768px and the long side should be less
/// than 2,000px.
///
class function CreateImage(const Data: TBase64Data; const Detail: TImageDetail = TImageDetail.Auto): TMessageContent; overload; static;
///
/// The Chat Completions API, unlike the Assistants API, is not stateful.
/// That means you have to manage the messages (including images) you pass to the model yourself.
/// If you want to pass the same image to the model multiple times, you will have to pass the image each time
/// you make a request to the API.
///
/// For long running conversations, we suggest passing images via URL's instead of base64.
/// The latency of the model can also be improved by downsizing your images ahead of time to be less than
/// the maximum size they are expected them to be. For low res mode, we expect a 512px x 512px image.
/// For high rest mode, the short side of the image should be less than 768px and the long side should be less
/// than 2,000px.
///
class function CreateImage(const Data: TStream; const FileContentType: string; const Detail: TImageDetail = TImageDetail.Auto): TMessageContent; overload; static;
end;
TChatMessageBuild = record
private
FRole: TMessageRole;
FContent: string;
FTool_call_id: string;
FFunction_call: TFunctionCallBuild;
FTool_calls: TArray;
FTag: string;
FName: string;
FContents: TArray;
public
///
/// The role of the messages author. One of "system", "user", "assistant", "tool".
///
property Role: TMessageRole read FRole write FRole;
///
/// The contents of the message. content is required for all messages, and may be null
/// for assistant messages with function calls.
///
property Content: string read FContent write FContent;
///
/// An array of content parts with a defined type, each can be of type "text" or "image_url"
/// when passing in images.
/// You can pass multiple images by adding multiple "image_url" content parts.
/// Image input is only supported when using the "gpt-4-vision-preview" model.
///
property Contents: TArray read FContents write FContents;
///
/// The name of the author of this message. "name" is required if role is "function",
/// and it should be the name of the function whose response is in the content.
/// May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.
///
property Name: string read FName write FName;
///
/// The name and arguments of a function that should be called, as generated by the model.
///
property FunctionCall: TFunctionCallBuild read FFunction_call write FFunction_call;
///
/// Tag - custom field for convenience. Not used in requests!
///
property Tag: string read FTag write FTag;
///
/// Tool call that this message is responding to.
///
property ToolCallId: string read FTool_call_id write FTool_call_id;
///
/// The tool calls generated by the model, such as function calls.
///
property ToolCalls: TArray read FTool_calls write FTool_calls;
// helpers
class function Create(Role: TMessageRole; const Content: string; const Name: string = ''): TChatMessageBuild; static;
//Help functions
///
/// From user
///
class function User(const Content: string; const Name: string = ''): TChatMessageBuild; overload; static;
///
/// From user
///
class function User(const Content: TArray; const Name: string = ''): TChatMessageBuild; overload; static;
///
/// From system
///
class function System(const Content: string; const Name: string = ''): TChatMessageBuild; static;
///
/// From assistant
///
class function Assistant(const Content: string; const Name: string = ''): TChatMessageBuild; static;
///
/// Function result
///
class function Func(const Content: string; const Name: string = ''): TChatMessageBuild; static;
///
/// Tool result
///
class function Tool(const Content, ToolCallId: string; const Name: string = ''): TChatMessageBuild; static;
///
/// Assistant want call function
///
class function AssistantFunc(const Name, Arguments: string): TChatMessageBuild; static;
///
/// Assistant want call tool
///
class function AssistantTool(const Content: string; const ToolCalls: TArray): TChatMessageBuild; static;
end;
TChatFunctionBuild = record
private
FName: string;
FDescription: string;
FParameters: string;
public
///
/// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes,
/// with a maximum length of 64.
///
property Name: string read FName write FName;
///
/// The description of what the function does.
///
property Description: string read FDescription write FDescription;
///
/// The parameters the functions accepts, described as a JSON Schema object
///
property Parameters: string read FParameters write FParameters;
class function Create(const Name, Description: string; const ParametersJSON: string): TChatFunctionBuild; static;
end;
TChatResponseFormat = (Text, JSONObject);
TChatResponseFormatHelper = record helper for TChatResponseFormat
function ToString: string; inline;
end;
TChatToolParam = class(TJSONParam)
protected
///
/// The type of the tool. Currently, only function is supported.
///
function &Type(const Value: string): TChatToolParam;
end;
TChatToolFunctionParam = class(TChatToolParam)
///
/// The type of the tool. Currently, only function is supported.
///
function &Function(const Value: IChatFunction): TChatToolFunctionParam;
constructor Create; reintroduce; overload;
constructor Create(const Value: IChatFunction); reintroduce; overload;
end;
TChatToolChoiceParam = record
private
FFuncName: string;
FType: TFunctionCallType;
public
///
/// The model does not call a function, and responds to the end-user
///
class function None: TChatToolChoiceParam; static;
///
/// The model can pick between an end-user or calling a function
///
class function Auto: TChatToolChoiceParam; static;
///
/// Forces the model to call that function
///
class function Func(const Name: string): TChatToolChoiceParam; static;
end;
TChatParams = class(TJSONParam)
///
/// ID of the model to use. See the model endpoint compatibility table for details on which models
/// work with the Chat API.
///
/// https://platform.openai.com/docs/models/model-endpoint-compatibility
function Model(const Value: string): TChatParams;
///
/// A list of messages comprising the conversation so far.
///
function Messages(const Value: TArray): TChatParams; overload;
///
/// A list of functions the model may generate JSON inputs for.
///
function Functions(const Value: TArray): TChatParams; deprecated 'Use Tools';
///
/// Controls how the model responds to function calls. none means the model does not call a function,
/// and responds to the end-user. auto means the model can pick between an end-user or calling a function.
/// Specifying a particular function via {"name": "my_function"} forces the model to call that function.
/// none is the default when no functions are present. auto is the default if functions are present.
///
function FunctionCall(const Value: TFunctionCall): TChatParams; deprecated 'Use ToolChoice';
///
/// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random,
/// while lower values like 0.2 will make it more focused and deterministic.
/// We generally recommend altering this or top_p but not both.
///
function Temperature(const Value: Single): TChatParams;
///
/// A list of tools the model may call. Currently, only functions are supported as a tool.
/// Use this to provide a list of functions the model may generate JSON inputs for.
/// A max of 128 functions are supported.
///
function Tools(const Value: TArray): TChatParams;
///
/// Controls which (if any) function is called by the model.
/// "none" means the model will not call a function and instead generates a message.
/// "auto" means the model can pick between generating a message or calling a function.
/// Specifying a particular function via {"type: "function", "function": {"name": "my_function"}}
/// forces the model to call that function.
/// "none" is the default when no functions are present. "auto" is the default if functions are present.
///
function ToolChoice(const Value: TChatToolChoiceParam): TChatParams;
///
/// An alternative to sampling with temperature, called nucleus sampling, where the model considers the
/// results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%
/// probability mass are considered.
/// We generally recommend altering this or temperature but not both.
///
function TopP(const Value: Single): TChatParams;
///
/// How many chat completion choices to generate for each input message.
/// Note that you will be charged based on the number of generated tokens across all of the choices.
/// Keep n as 1 to minimize costs.
///
function N(const Value: Integer): TChatParams;
///
/// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as
/// data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message.
///
function Stream(const Value: Boolean = True): TChatParams;
///
/// An object specifying the format that the model must output.
/// Compatible with GPT-4 Turbo and all GPT-3.5 Turbo models newer than gpt-3.5-turbo-1106.
/// Setting to { "type": "json_object" } enables JSON mode, which guarantees the message the model
/// generates is valid JSON.
/// Important: when using JSON mode, you must also instruct the model to produce JSON yourself via a
/// system or user message. Without this, the model may generate an unending stream of whitespace until
/// the generation reaches the token limit, resulting in a long-running and seemingly "stuck" request.
/// Also note that the message content may be partially cut off if finish_reason="length",
/// which indicates the generation exceeded max_tokens or the conversation exceeded the max context length.
///
function ResponseFormat(const Value: TChatResponseFormat): TChatParams;
///
/// This feature is in Beta. If specified, our system will make a best effort to sample
/// deterministically, such that repeated requests with the same seed and parameters
/// should return the same result. Determinism is not guaranteed, and you should refer
/// to the system_fingerprint response parameter to monitor changes in the backend.
///
function Seed(const Value: Integer): TChatParams;
///
/// Up to 4 sequences where the API will stop generating further tokens.
///
function Stop(const Value: string): TChatParams; overload;
///
/// Up to 4 sequences where the API will stop generating further tokens.
///
function Stop(const Value: TArray): TChatParams; overload;
///
/// The maximum number of tokens that can be generated in the chat completion.
/// The total length of input tokens and generated tokens is limited by the model's context length.
///
function MaxTokens(const Value: Integer): TChatParams;
///
/// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear
/// in the text so far, increasing the model's likelihood to talk about new topics.
///
function PresencePenalty(const Value: Single = 0): TChatParams;
///
/// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency
/// in the text so far,
/// decreasing the model's likelihood to repeat the same line verbatim.
///
function FrequencyPenalty(const Value: Single = 0): TChatParams;
///
/// Modify the likelihood of specified tokens appearing in the completion.
///
/// Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias
/// value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior
/// to sampling.
/// The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood
/// of selection;
/// values like -100 or 100 should result in a ban or exclusive selection of the relevant token.
///
function LogitBias(const Value: TJSONObject): TChatParams;
///
/// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
///
function User(const Value: string): TChatParams;
///
/// Whether to return log probabilities of the output tokens or not.
/// If true, returns the log probabilities of each output token returned in the content of message.
/// This option is currently not available on the gpt-4-vision-preview model.
///
function Logprobs(const Value: Boolean = True): TChatParams;
///
/// An integer between 0 and 20 specifying the number of most likely tokens to return at each token position,
/// each with an associated log probability. logprobs must be set to true if this parameter is used.
///
function TopLogprobs(const Value: Integer): TChatParams;
constructor Create; override;
end;
TChatUsage = class
private
FCompletion_tokens: Int64;
FPrompt_tokens: Int64;
FTotal_tokens: Int64;
public
///
/// Number of tokens in the prompt.
///
property PromptTokens: Int64 read FPrompt_tokens write FPrompt_tokens;
///
/// Number of tokens in the generated completion.
///
property CompletionTokens: Int64 read FCompletion_tokens write FCompletion_tokens;
///
/// Total number of tokens used in the request (prompt + completion).
///
property TotalTokens: Int64 read FTotal_tokens write FTotal_tokens;
end;
TChatFunctionCall = class
private
FName: string;
FArguments: string;
public
///
/// The name of the function to call.
///
property Name: string read FName write FName;
///
/// The arguments to call the function with, as generated by the model in JSON format.
/// Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your
/// function schema. Validate the arguments in your code before calling your function.
/// JSON, example '{ \"location\": \"Boston, MA\"}'
///
property Arguments: string read FArguments write FArguments;
end;
TChatToolCall = class
private
FId: string;
FType: string;
FFunction: TChatFunctionCall;
public
///
/// The ID of the tool call.
///
property Id: string read FId write FId;
///
/// The type of the tool. Currently, only function is supported.
///
property &Type: string read FType write FType;
///
/// The function that the model called.
///
property &Function: TChatFunctionCall read FFunction write FFunction;
destructor Destroy; override;
end;
TChatMessage = class
private
FRole: string;
FContent: string;
FFunction_call: TChatFunctionCall;
FTool_calls: TArray;
public
///
/// The role of the author of this message.
///
property Role: string read FRole write FRole;
///
/// The contents of the message.
///
property Content: string read FContent write FContent;
///
/// Deprecated and replaced by ToolCalls.
/// The name and arguments of a function that should be called, as generated by the model.
///
property FunctionCall: TChatFunctionCall read FFunction_call write FFunction_call;
///
/// The tool calls generated by the model, such as function calls.
///
property ToolCalls: TArray read FTool_calls write FTool_calls;
destructor Destroy; override;
end;
TLogprobContent = class
private
FToken: string;
FLogprob: Extended;
FBytes: TArray;
FTop_logprobs: TArray;
public
///
/// The token.
///
property Token: string read FToken write FToken;
///
/// The log probability of this token, if it is within the top 20 most likely tokens.
/// Otherwise, the value -9999.0 is used to signify that the token is very unlikely.
///
property Logprob: Extended read FLogprob write FLogprob;
///
/// A list of integers representing the UTF-8 bytes representation of the token.
/// Useful in instances where characters are represented by multiple tokens and their byte
/// representations must be combined to generate the correct text representation.
/// Can be null if there is no bytes representation for the token.
///
property Bytes: TArray read FBytes write FBytes;
///
/// List of the most likely tokens and their log probability, at this token position.
/// In rare cases, there may be fewer than the number of requested top_logprobs returned.
///
property TopLogprobs: TArray read FTop_logprobs write FTop_logprobs;
destructor Destroy; override;
end;
TLogprobs = class
private
FContent: TArray;
public
///
/// A list of message content tokens with log probability information.
///
property Content: TArray read FContent write FContent;
destructor Destroy; override;
end;
TChatChoices = class
private
FIndex: Int64;
FMessage: TChatMessage;
[JsonReflectAttribute(ctString, rtString, TFinishReasonInterceptor)]
FFinish_reason: TFinishReason;
FDelta: TChatMessage;
FLogprobs: TLogprobs;
public
///
/// The index of the choice in the list of choices.
///
property Index: Int64 read FIndex write FIndex;
///
/// A chat completion message generated by the model.
///
property Message: TChatMessage read FMessage write FMessage;
///
/// A chat completion delta generated by streamed model responses.
///
property Delta: TChatMessage read FDelta write FDelta;
///
/// The reason the model stopped generating tokens.
/// This will be stop if the model hit a natural stop point or a provided stop sequence,
/// length if the maximum number of tokens specified in the request was reached,
/// content_filter if content was omitted due to a flag from our content filters,
/// tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.
///
property FinishReason: TFinishReason read FFinish_reason write FFinish_reason;
///
/// Log probability information for the choice.
///
property Logprobs: TLogprobs read FLogprobs write FLogprobs;
destructor Destroy; override;
end;
TChat = class
private
FChoices: TArray;
FCreated: Int64;
FId: string;
FObject: string;
FUsage: TChatUsage;
FModel: string;
FSystem_fingerprint: string;
public
///
/// A unique identifier for the chat completion.
///
property Id: string read FId write FId;
///
/// The object type, which is always chat.completion.
///
property &Object: string read FObject write FObject;
///
/// The Unix timestamp (in seconds) of when the chat completion was created.
///
property Created: Int64 read FCreated write FCreated;
///
/// The model used for the chat completion.
///
property Model: string read FModel write FModel;
///
/// A list of chat completion choices. Can be more than one if N is greater than 1.
///
property Choices: TArray read FChoices write FChoices;
///
/// Usage statistics for the completion request.
///
property Usage: TChatUsage read FUsage write FUsage;
///
/// This fingerprint represents the backend configuration that the model runs with.
/// Can be used in conjunction with the seed request parameter to understand when backend
/// changes have been made that might impact determinism.
///
property SystemFingerprint: string read FSystem_fingerprint write FSystem_fingerprint;
destructor Destroy; override;
end;
TChatEvent = reference to procedure(var Chat: TChat; IsDone: Boolean; var Cancel: Boolean);
///
/// Given a chat conversation, the model will return a chat completion response.
///
TChatRoute = class(TOpenAIAPIRoute)
public
///
/// Creates a completion for the chat message
///
///
///
function Create(ParamProc: TProc): TChat;
///
/// Creates a completion for the chat message
///
///
/// The Chat object will be nil if all data is received!
///
function CreateStream(ParamProc: TProc; Event: TChatEvent): Boolean;
end;
implementation
uses
Rest.Json, System.Rtti, System.Net.HttpClient, OpenAI.Utils.Base64;
{ TChatRoute }
function TChatRoute.Create(ParamProc: TProc): TChat;
begin
Result := API.Post('chat/completions', ParamProc);
end;
function TChatRoute.CreateStream(ParamProc: TProc; Event: TChatEvent): Boolean;
var
Response: TStringStream;
RetPos: Integer;
begin
Response := TStringStream.Create('', TEncoding.UTF8);
try
RetPos := 0;
Result := API.Post('chat/completions', ParamProc, Response,
TReceiveDataCallback(
procedure(const Sender: TObject; AContentLength: Int64; AReadCount: Int64; var AAbort: Boolean)
var
IsDone: Boolean;
Data: string;
Chat: TChat;
TextBuffer: string;
Line: string;
Ret: Integer;
begin
try
TextBuffer := Response.DataString;
except
// If there is an encoding error, then the data is definitely not all.
// This is necessary because the data from the server may not be complete for successful encoding
on E: EEncodingError do
Exit;
end;
repeat
Ret := TextBuffer.IndexOf(#10, RetPos);
if Ret < 0 then
Continue;
Line := TextBuffer.Substring(RetPos, Ret - RetPos);
RetPos := Ret + 1;
if Line.IsEmpty or Line.StartsWith(#10) then
Continue;
Chat := nil;
Data := Line.Replace('data: ', '').Trim([' ', #13, #10]);
IsDone := Data = '[DONE]';
if not IsDone then
try
Chat := TJson.JsonToObject(Data);
except
Chat := nil;
end;
try
Event(Chat, IsDone, AAbort);
finally
Chat.Free;
end;
until Ret < 0;
end));
finally
Response.Free;
end;
end;
{ TChat }
destructor TChat.Destroy;
var
Item: TChatChoices;
begin
if Assigned(FUsage) then
FUsage.Free;
for Item in FChoices do
Item.Free;
inherited;
end;
{ TChatParams }
constructor TChatParams.Create;
begin
inherited;
Model('gpt-3.5-turbo');
// Model('gpt-3.5-turbo-0613');
// Model('gpt-3.5-turbo-16k');
end;
function TChatParams.Functions(const Value: TArray): TChatParams;
var
Items: TJSONArray;
Item: IChatFunction;
begin
Items := TJSONArray.Create;
for Item in Value do
Items.Add(TChatFunction.ToJson(Item));
Result := TChatParams(Add('functions', Items));
end;
function TChatParams.LogitBias(const Value: TJSONObject): TChatParams;
begin
Result := TChatParams(Add('logit_bias', Value));
end;
function TChatParams.Logprobs(const Value: Boolean): TChatParams;
begin
Result := TChatParams(Add('logprobs', Value));
end;
function TChatParams.FunctionCall(const Value: TFunctionCall): TChatParams;
begin
Result := TChatParams(Add('function_call', Value.ToString));
end;
function TChatParams.FrequencyPenalty(const Value: Single): TChatParams;
begin
Result := TChatParams(Add('frequency_penalty', Value));
end;
function TChatParams.MaxTokens(const Value: Integer): TChatParams;
begin
Result := TChatParams(Add('max_tokens', Value));
end;
function TChatParams.Model(const Value: string): TChatParams;
begin
Result := TChatParams(Add('model', Value));
end;
function TChatParams.N(const Value: Integer): TChatParams;
begin
Result := TChatParams(Add('n', Value));
end;
function TChatParams.PresencePenalty(const Value: Single): TChatParams;
begin
Result := TChatParams(Add('presence_penalty', Value));
end;
function TChatParams.ResponseFormat(const Value: TChatResponseFormat): TChatParams;
var
VJO: TJSONParam;
begin
VJO := TJSONParam.Create;
VJO.Add('type', Value.ToString);
Result := TChatParams(Add('response_format', VJO));
end;
function TChatParams.Messages(const Value: TArray): TChatParams;
var
Item: TChatMessageBuild;
ToolItem: TChatToolCallBuild;
JSON, ContentItem, ImageUrl: TJSONObject;
Content: TMessageContent;
Items, Tools, Contents: TJSONArray;
FuncData, ToolData: TJSONObject;
begin
Items := TJSONArray.Create;
try
for Item in Value do
begin
JSON := TJSONObject.Create;
//role
JSON.AddPair('role', Item.Role.ToString);
//content
if not Item.Content.IsEmpty then
JSON.AddPair('content', Item.Content)
else if Length(Item.Contents) > 0 then
begin
Contents := TJSONArray.Create;
JSON.AddPair('content', Contents);
for Content in Item.Contents do
begin
ContentItem := TJSONObject.Create;
Contents.Add(ContentItem);
case Content.ContentType of
TMessageContentType.Text:
begin
ContentItem.AddPair('type', 'text');
ContentItem.AddPair('text', Content.Text);
end;
TMessageContentType.ImageUrl:
begin
ContentItem.AddPair('type', 'image_url');
ImageUrl := TJSONObject.Create;
ContentItem.AddPair('image_url', ImageUrl);
ImageUrl.AddPair('url', Content.Url);
if Content.Detail <> TImageDetail.Auto then
ImageUrl.AddPair('detail', Content.Detail.ToString);
end;
end;
end;
end;
//name
if not Item.Name.IsEmpty then
JSON.AddPair('name', Item.Name);
//function call
if not Item.FunctionCall.Name.IsEmpty then
begin
FuncData := TJSONObject.Create;
JSON.AddPair('function_call', FuncData);
FuncData.AddPair('name', Item.FunctionCall.Name);
FuncData.AddPair('arguments', Item.FunctionCall.Arguments);
end;
//tool calls
if Length(Item.ToolCalls) > 0 then
begin
Tools := TJSONArray.Create;
JSON.AddPair('tool_calls', Tools);
for ToolItem in Item.ToolCalls do
begin
ToolData := TJSONObject.Create;
Tools.Add(ToolData);
ToolData.AddPair('id', ToolItem.Id);
ToolData.AddPair('type', ToolItem.&Type);
if not ToolItem.&Function.Name.IsEmpty then
begin
FuncData := TJSONObject.Create;
ToolData.AddPair('function', FuncData);
FuncData.AddPair('name', ToolItem.&Function.Name);
FuncData.AddPair('arguments', ToolItem.&Function.Arguments);
end;
end;
end;
Items.Add(JSON);
end;
except
Items.Free;
raise;
end;
Result := TChatParams(Add('messages', Items));
end;
function TChatParams.Seed(const Value: Integer): TChatParams;
begin
Result := TChatParams(Add('seed', Value));
end;
function TChatParams.Stop(const Value: TArray): TChatParams;
begin
Result := TChatParams(Add('stop', Value));
end;
function TChatParams.Stop(const Value: string): TChatParams;
begin
Result := TChatParams(Add('stop', Value));
end;
function TChatParams.Stream(const Value: Boolean): TChatParams;
begin
Result := TChatParams(Add('stream', Value));
end;
function TChatParams.Temperature(const Value: Single): TChatParams;
begin
Result := TChatParams(Add('temperature', Value));
end;
function TChatParams.ToolChoice(const Value: TChatToolChoiceParam): TChatParams;
var
VJO, VJF: TJSONParam;
begin
case Value.FType of
TFunctionCallType.None:
Result := TChatParams(Add('tool_choice', 'none'));
TFunctionCallType.Auto:
Result := TChatParams(Add('tool_choice', 'auto'));
TFunctionCallType.Func:
begin
VJO := TJSONParam.Create;
VJO.Add('type', 'function');
VJF := TJSONParam.Create;
VJO.Add('function', VJF);
VJF.Add('name', Value.FFuncName);
Result := TChatParams(Add('tool_choice', VJO));
end;
else
Result := Self;
end;
end;
function TChatParams.Tools(const Value: TArray): TChatParams;
begin
Result := TChatParams(Add('tools', TArray(Value)));
end;
function TChatParams.TopLogprobs(const Value: Integer): TChatParams;
begin
Result := TChatParams(Add('top_logprobs', Value));
end;
function TChatParams.TopP(const Value: Single): TChatParams;
begin
Result := TChatParams(Add('top_p', Value));
end;
function TChatParams.User(const Value: string): TChatParams;
begin
Result := TChatParams(Add('user', Value));
end;
{ TChatMessageBuild }
class function TChatMessageBuild.Assistant(const Content: string; const Name: string): TChatMessageBuild;
begin
Result.FRole := TMessageRole.Assistant;
Result.FContent := Content;
Result.FName := Name;
end;
class function TChatMessageBuild.AssistantFunc(const Name, Arguments: string): TChatMessageBuild;
begin
Result.FRole := TMessageRole.Assistant;
Result.FContent := 'null';
Result.FFunction_call.Name := Name;
Result.FFunction_call.Arguments := Arguments;
end;
class function TChatMessageBuild.AssistantTool(const Content: string; const ToolCalls: TArray): TChatMessageBuild;
begin
Result.FRole := TMessageRole.Assistant;
if Content.IsEmpty then
Result.FContent := 'null'
else
Result.FContent := Content;
Result.FTool_calls := ToolCalls;
end;
class function TChatMessageBuild.Create(Role: TMessageRole; const Content: string; const Name: string): TChatMessageBuild;
begin
Result.FRole := Role;
Result.FContent := Content;
Result.FName := Name;
end;
class function TChatMessageBuild.Func(const Content, Name: string): TChatMessageBuild;
begin
Result.FRole := TMessageRole.Func;
Result.FContent := Content;
Result.FName := Name;
end;
class function TChatMessageBuild.System(const Content: string; const Name: string): TChatMessageBuild;
begin
Result.FRole := TMessageRole.System;
Result.FContent := Content;
Result.FName := Name;
end;
class function TChatMessageBuild.Tool(const Content, ToolCallId, Name: string): TChatMessageBuild;
begin
Result.FRole := TMessageRole.Tool;
Result.FContent := Content;
Result.FName := Name;
Result.FTool_call_id := ToolCallId;
end;
class function TChatMessageBuild.User(const Content: TArray; const Name: string): TChatMessageBuild;
begin
Result.FRole := TMessageRole.User;
Result.FContents := Content;
Result.FName := Name;
end;
class function TChatMessageBuild.User(const Content: string; const Name: string): TChatMessageBuild;
begin
Result.FRole := TMessageRole.User;
Result.FContent := Content;
Result.FName := Name;
end;
{ TMessageRoleHelper }
class function TMessageRoleHelper.FromString(const Value: string): TMessageRole;
begin
if Value = 'system' then
Exit(TMessageRole.System)
else if Value = 'user' then
Exit(TMessageRole.User)
else if Value = 'assistant' then
Exit(TMessageRole.Assistant)
else if Value = 'tool' then
Exit(TMessageRole.Tool)
else if Value = 'function' then
Exit(TMessageRole.Func)
else
Result := TMessageRole.User;
end;
function TMessageRoleHelper.ToString: string;
begin
case Self of
TMessageRole.System:
Result := 'system';
TMessageRole.User:
Result := 'user';
TMessageRole.Assistant:
Result := 'assistant';
TMessageRole.Func:
Result := 'function';
TMessageRole.Tool:
Result := 'tool';
end;
end;
{ TChatChoices }
destructor TChatChoices.Destroy;
begin
FMessage.Free;
FDelta.Free;
FLogprobs.Free;
inherited;
end;
{ TChatFonctionBuild }
class function TChatFunctionBuild.Create(const Name, Description: string; const ParametersJSON: string): TChatFunctionBuild;
begin
Result.FName := Name;
Result.FDescription := Description;
Result.FParameters := ParametersJSON;
end;
{ TChatMessage }
destructor TChatMessage.Destroy;
var
Item: TChatToolCall;
begin
if Assigned(FFunction_call) then
FFunction_call.Free;
for Item in FTool_calls do
Item.Free;
inherited;
end;
{ TFunctionCall }
class function TFunctionCall.Auto: TFunctionCall;
begin
Result.FType := TFunctionCallType.Auto;
end;
class function TFunctionCall.Func(const Name: string): TFunctionCall;
begin
Result.FType := TFunctionCallType.Func;
Result.FFuncName := Name;
end;
class function TFunctionCall.None: TFunctionCall;
begin
Result.FType := TFunctionCallType.None;
end;
function TFunctionCall.ToString: string;
var
JSON: TJSONObject;
begin
case FType of
TFunctionCallType.None:
Result := 'none';
TFunctionCallType.Auto:
Result := 'auto';
TFunctionCallType.Func:
begin
JSON := TJSONObject.Create(TJSONPair.Create('name', FFuncName));
try
Result := JSON.ToJSON;
finally
JSON.Free;
end;
end;
end;
end;
{ TFinishReasonInterceptor }
function TFinishReasonInterceptor.StringConverter(Data: TObject; Field: string): string;
begin
Result := RTTI.GetType(Data.ClassType).GetField(Field).GetValue(Data).AsType.ToString;
end;
procedure TFinishReasonInterceptor.StringReverter(Data: TObject; Field, Arg: string);
begin
RTTI.GetType(Data.ClassType).GetField(Field).SetValue(Data, TValue.From(TFinishReason.Create(Arg)));
end;
{ TFinishReasonHelper }
class function TFinishReasonHelper.Create(const Value: string): TFinishReason;
begin
if Value = 'stop' then
Exit(TFinishReason.Stop)
else if Value = 'length' then
Exit(TFinishReason.Length)
else if Value = 'function_call' then
Exit(TFinishReason.FunctionCall)
else if Value = 'content_filter' then
Exit(TFinishReason.ContentFilter)
else if Value = 'tool_calls' then
Exit(TFinishReason.ToolCalls)
else if Value = 'null' then
Exit(TFinishReason.Null);
Result := TFinishReason.Stop;
end;
function TFinishReasonHelper.ToString: string;
begin
case Self of
TFinishReason.Stop:
Exit('stop');
TFinishReason.Length:
Exit('length');
TFinishReason.FunctionCall:
Exit('function_call');
TFinishReason.ContentFilter:
Exit('content_filter');
TFinishReason.ToolCalls:
Exit('tool_calls');
TFinishReason.Null:
Exit('null');
end;
end;
{ TChatResponseFormatHelper }
function TChatResponseFormatHelper.ToString: string;
begin
case Self of
TChatResponseFormat.Text:
Exit('text');
TChatResponseFormat.JSONObject:
Exit('json_object');
end;
end;
{ TChatToolParam }
function TChatToolParam.&Type(const Value: string): TChatToolParam;
begin
Result := TChatToolParam(Add('type', Value));
end;
{ TChatToolFunctionParam }
constructor TChatToolFunctionParam.Create;
begin
inherited;
&Type('function');
end;
constructor TChatToolFunctionParam.Create(const Value: IChatFunction);
begin
Create;
&Function(Value);
end;
function TChatToolFunctionParam.&Function(const Value: IChatFunction): TChatToolFunctionParam;
begin
Result := TChatToolFunctionParam(Add('function', TChatFunction.ToJson(Value)));
end;
{ TChatToolChoiceParam }
class function TChatToolChoiceParam.Auto: TChatToolChoiceParam;
begin
Result.FType := TFunctionCallType.Auto;
end;
class function TChatToolChoiceParam.Func(const Name: string): TChatToolChoiceParam;
begin
Result.FType := TFunctionCallType.Func;
Result.FFuncName := Name;
end;
class function TChatToolChoiceParam.None: TChatToolChoiceParam;
begin
Result.FType := TFunctionCallType.None;
end;
{ TChatToolCall }
destructor TChatToolCall.Destroy;
begin
FFunction.Free;
inherited;
end;
{ TChatToolCallBuild }
class function TChatToolCallBuild.Create(const Id, &Type: string; &Function: TFunctionCallBuild): TChatToolCallBuild;
begin
Result.Id := Id;
Result.&Type := &Type;
Result.&Function := &Function;
end;
{ TFunctionCallBuild }
class function TFunctionCallBuild.Create(const Name, Arguments: string): TFunctionCallBuild;
begin
Result.Name := Name;
Result.Arguments := Arguments;
end;
{ TMessageContent }
class function TMessageContent.CreateImage(const Url: string; const Detail: TImageDetail): TMessageContent;
begin
Result.ContentType := TMessageContentType.ImageUrl;
Result.Url := Url;
Result.Detail := Detail;
end;
class function TMessageContent.CreateImage(const Data: TStream; const FileContentType: string; const Detail: TImageDetail): TMessageContent;
begin
Result := CreateImage(StreamToBase64(Data, FileContentType), Detail);
end;
class function TMessageContent.CreateImage(const Data: TBase64Data; const Detail: TImageDetail): TMessageContent;
begin
Result.ContentType := TMessageContentType.ImageUrl;
Result.Url := Data.ToString;
Result.Detail := Detail;
end;
class function TMessageContent.CreateText(const Text: string): TMessageContent;
begin
Result.ContentType := TMessageContentType.Text;
Result.Text := Text;
end;
{ TImageDetailHelper }
function TImageDetailHelper.ToString: string;
begin
case Self of
TImageDetail.Auto:
Exit('auto');
TImageDetail.Low:
Exit('low');
TImageDetail.High:
Exit('high');
end;
end;
{ TLogprobs }
destructor TLogprobs.Destroy;
var
Item: TLogprobContent;
begin
for Item in FContent do
Item.Free;
inherited;
end;
{ TLogprobContent }
destructor TLogprobContent.Destroy;
var
Item: TLogprobContent;
begin
for Item in FTop_logprobs do
Item.Free;
inherited;
end;
end.