Class OpenAIModel
Inheritance
object
OpenAIModel
Inherited Members
object.Equals(object)
object.Equals(object, object)
object.GetHashCode()
object.GetType()
object.MemberwiseClone()
object.ReferenceEquals(object, object)
object.ToString()
Namespace: DotnetPrompt.LLM.OpenAI
Assembly: DotnetPrompt.LLM.OpenAI.dll
Syntax
public class OpenAIModel : BaseModel, ILargeLanguageModel
Constructors
|
Improve this Doc
View Source
OpenAIModel(IConfiguration, ILogger<OpenAIModel>, IDistributedCache)
Declaration
public OpenAIModel(IConfiguration configuration, ILogger<OpenAIModel> logger, IDistributedCache cache)
Parameters
Type |
Name |
Description |
IConfiguration |
configuration |
|
ILogger<OpenAIModel> |
logger |
|
IDistributedCache |
cache |
|
Exceptions
Type |
Condition |
System.InvalidOperationException |
|
|
Improve this Doc
View Source
OpenAIModel(string, OpenAIModelConfiguration, ILogger, IDistributedCache)
Declaration
public OpenAIModel(string openAIApiKey, OpenAIModelConfiguration defaultModelConfiguration, ILogger logger, IDistributedCache cache = null)
Parameters
Type |
Name |
Description |
string |
openAIApiKey |
|
OpenAIModelConfiguration |
defaultModelConfiguration |
|
ILogger |
logger |
|
IDistributedCache |
cache |
|
|
Improve this Doc
View Source
OpenAIModel(string, OpenAIModelConfiguration)
Default constructor without logging
Declaration
public OpenAIModel(string openAIApiKey, OpenAIModelConfiguration defaultModelConfiguration)
Parameters
Fields
|
Improve this Doc
View Source
BatchSize
Declaration
Field Value
|
Improve this Doc
View Source
OpenAiApiKey
Declaration
public string OpenAiApiKey
Field Value
Properties
|
Improve this Doc
View Source
DefaultModelConfiguration
Declaration
public OpenAIModelConfiguration DefaultModelConfiguration { get; init; }
Property Value
|
Improve this Doc
View Source
LLMType
Declaration
public override string LLMType { get; }
Property Value
Overrides
|
Improve this Doc
View Source
MaxRequestTokens
Declaration
public override int MaxRequestTokens { get; }
Property Value
Overrides
|
Improve this Doc
View Source
Holds any model parameters valid for create
call not explicitly specified.
Declaration
public Dictionary<string, object> ModelExtraArguments { get; set; }
Property Value
Type |
Description |
Dictionary<string, object> |
|
|
Improve this Doc
View Source
Streaming
Declaration
public bool Streaming { get; init; }
Property Value
Methods
|
Improve this Doc
View Source
AsUniqueString()
Declaration
protected override string AsUniqueString()
Returns
Overrides
|
Improve this Doc
View Source
CompletionsResponseValue(OpenAIModelConfiguration)
Declaration
protected virtual Task<Completions> CompletionsResponseValue(OpenAIModelConfiguration options)
Parameters
Returns
|
Improve this Doc
View Source
CompletionWithRetry(OpenAIModelConfiguration)
Use tenacity to retry the completion call.
Declaration
public Task<Completions> CompletionWithRetry(OpenAIModelConfiguration modelConfiguration)
Parameters
Returns
|
Improve this Doc
View Source
GenerateInternalAsync(IList<string>, IList<string>)
Declaration
protected override Task<ModelResult> GenerateInternalAsync(IList<string> prompts, IList<string> stop = null)
Parameters
Type |
Name |
Description |
IList<string> |
prompts |
|
IList<string> |
stop |
|
Returns
Overrides
|
Improve this Doc
View Source
GetNumTokens(string)
Declaration
public override int GetNumTokens(string text)
Parameters
Type |
Name |
Description |
string |
text |
|
Returns
Overrides
|
Improve this Doc
View Source
GetSubPrompts(OpenAIModelConfiguration, IList<string>)
Declaration
public List<List<string>> GetSubPrompts(OpenAIModelConfiguration completionsOptions, IList<string> prompts)
Parameters
Returns
Type |
Description |
List<List<string>> |
|
|
Improve this Doc
View Source
MaxTokensForPrompt(string)
Calculate the maximum number of tokens possible to generate for a prompt.
Declaration
public int MaxTokensForPrompt(string prompt)
Parameters
Type |
Name |
Description |
string |
prompt |
The prompt to pass into the model.
|
Returns
Type |
Description |
int |
The maximum number of tokens to generate for a prompt.
|
Examples
var maxTokens = openai.MaxTokensForPrompt("Tell me a joke.")
|
Improve this Doc
View Source
ModelNameToContextSize(string)
Calculate the maximum number of tokens possible to generate for a model.
text-davinci-003: 4,097 tokens
text-curie-001: 2,048 tokens
text-babbage-001: 2,048 tokens
text-ada-001: 2,048 tokens
code-davinci-002: 8,000 tokens
code-cushman-001: 2,048 tokens
Declaration
public int ModelNameToContextSize(string modelName)
Parameters
Type |
Name |
Description |
string |
modelName |
The modelname we want to know the context size for.
|
Returns
Type |
Description |
int |
The maximum context size
|
Implements