Add gpt-4-1106-preview (gpt-4-turbo) to model list
This commit is contained in:
parent
f29358379d
commit
ea87e6b3d4
1 changed files with 9 additions and 3 deletions
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
import functools
|
||||
import json
|
||||
import warnings
|
||||
from dataclasses import dataclass
|
||||
|
||||
import httpx
|
||||
|
|
@ -34,16 +35,20 @@ class EncodingMeta:
|
|||
@functools.cache
|
||||
def from_model(cls, model):
|
||||
if model == "gpt-3.5-turbo":
|
||||
# print("Warning: gpt-3.5-turbo may update over time. Returning num tokens assuming gpt-3.5-turbo-0613.")
|
||||
warnings.warn(
|
||||
"Warning: gpt-3.5-turbo may update over time. Returning num tokens assuming gpt-3.5-turbo-0613."
|
||||
)
|
||||
model = "gpt-3.5-turbo-0613"
|
||||
if model == "gpt-4":
|
||||
# print("Warning: gpt-4 may update over time. Returning num tokens assuming gpt-4-0613.")
|
||||
warnings.warn(
|
||||
"Warning: gpt-4 may update over time. Returning num tokens assuming gpt-4-0613."
|
||||
)
|
||||
model = "gpt-4-0613"
|
||||
|
||||
try:
|
||||
encoding = tiktoken.encoding_for_model(model)
|
||||
except KeyError:
|
||||
print("Warning: model not found. Using cl100k_base encoding.")
|
||||
warnings.warn("Warning: model not found. Using cl100k_base encoding.")
|
||||
encoding = tiktoken.get_encoding("cl100k_base")
|
||||
|
||||
if model in {
|
||||
|
|
@ -53,6 +58,7 @@ class EncodingMeta:
|
|||
"gpt-4-32k-0314",
|
||||
"gpt-4-0613",
|
||||
"gpt-4-32k-0613",
|
||||
"gpt-4-1106-preview",
|
||||
}:
|
||||
tokens_per_message = 3
|
||||
tokens_per_name = 1
|
||||
|
|
|
|||
Loading…
Reference in a new issue