Commit 62f8d40f
Changed files (42)
bin
examples
openai
api_resources
bin/openai
@@ -4,8 +4,7 @@ import logging
import sys
import openai
-from openai.cli import display_error
-from openai.cli import api_register, tools_register
+from openai.cli import api_register, display_error, tools_register
logger = logging.getLogger()
formatter = logging.Formatter("[%(asctime)s] %(message)s")
@@ -62,8 +61,10 @@ def main():
args.func(args)
except openai.error.OpenAIError as e:
display_error(e)
+ return 1
except KeyboardInterrupt:
sys.stderr.write("\n")
+ return 1
return 0
examples/codex/backtranslation.py
@@ -1,7 +1,9 @@
-import openai
-from smokey import Smokey
from typing import List, Union
+from smokey import Smokey
+
+import openai
+
def get_candidates(
prompt: str,
examples/finetuning/answers-with-ft.py
@@ -1,6 +1,7 @@
-import openai
import argparse
+import openai
+
def create_context(
question, search_file_id, max_len=1800, search_model="ada", max_rerank=10
examples/semanticsearch/semanticsearch.py
@@ -1,10 +1,11 @@
#!/usr/bin/env python
-import openai
import argparse
import logging
import sys
from typing import List
+import openai
+
logger = logging.getLogger()
formatter = logging.Formatter("[%(asctime)s] [%(process)d] %(message)s")
handler = logging.StreamHandler(sys.stderr)
openai/api_resources/abstract/__init__.py
@@ -1,12 +1,10 @@
# flake8: noqa
from openai.api_resources.abstract.api_resource import APIResource
-from openai.api_resources.abstract.singleton_api_resource import SingletonAPIResource
from openai.api_resources.abstract.createable_api_resource import CreateableAPIResource
-from openai.api_resources.abstract.updateable_api_resource import UpdateableAPIResource
from openai.api_resources.abstract.deletable_api_resource import DeletableAPIResource
from openai.api_resources.abstract.listable_api_resource import ListableAPIResource
-from openai.api_resources.abstract.custom_method import custom_method
from openai.api_resources.abstract.nested_resource_class_methods import (
nested_resource_class_methods,
)
+from openai.api_resources.abstract.updateable_api_resource import UpdateableAPIResource
openai/api_resources/abstract/api_resource.py
@@ -14,16 +14,16 @@ class APIResource(OpenAIObject):
return instance
def refresh(self, request_id=None):
- headers = util.populate_headers(request_id=request_id)
- self.refresh_from(self.request("get", self.instance_url(), headers=headers))
+ self.refresh_from(
+ self.request("get", self.instance_url(), request_id=request_id)
+ )
return self
@classmethod
def class_url(cls):
if cls == APIResource:
raise NotImplementedError(
- "APIResource is an abstract class. You should perform "
- "actions on its subclasses (e.g. Charge, Customer)"
+ "APIResource is an abstract class. You should perform actions on its subclasses."
)
# Namespaces are separated in object names with periods (.) and in URLs
# with forward slashes (/), so replace the former with the latter.
@@ -54,7 +54,6 @@ class APIResource(OpenAIObject):
url_,
api_key=None,
api_base=None,
- idempotency_key=None,
request_id=None,
api_version=None,
organization=None,
@@ -66,8 +65,9 @@ class APIResource(OpenAIObject):
organization=organization,
api_base=api_base,
)
- headers = util.populate_headers(idempotency_key, request_id)
- response, _, api_key = requestor.request(method_, url_, params, headers)
+ response, _, api_key = requestor.request(
+ method_, url_, params, request_id=request_id
+ )
return util.convert_to_openai_object(
response, api_key, api_version, organization
)
openai/api_resources/abstract/createable_api_resource.py
@@ -1,7 +1,5 @@
-from __future__ import absolute_import, division, print_function
-
-from openai.api_resources.abstract.api_resource import APIResource
from openai import api_requestor, util
+from openai.api_resources.abstract.api_resource import APIResource
class CreateableAPIResource(APIResource):
@@ -12,7 +10,6 @@ class CreateableAPIResource(APIResource):
cls,
api_key=None,
api_base=None,
- idempotency_key=None,
request_id=None,
api_version=None,
organization=None,
@@ -25,8 +22,9 @@ class CreateableAPIResource(APIResource):
organization=organization,
)
url = cls.class_url()
- headers = util.populate_headers(idempotency_key, request_id)
- response, _, api_key = requestor.request("post", url, params, headers)
+ response, _, api_key = requestor.request(
+ "post", url, params, request_id=request_id
+ )
return util.convert_to_openai_object(
response,
openai/api_resources/abstract/custom_method.py
@@ -1,40 +0,0 @@
-from urllib.parse import quote_plus
-
-from openai import util
-
-
-def custom_method(name, http_verb, http_path=None):
- if http_verb not in ["get", "post", "delete"]:
- raise ValueError(
- "Invalid http_verb: %s. Must be one of 'get', 'post' or 'delete'"
- % http_verb
- )
- if http_path is None:
- http_path = name
-
- def wrapper(cls):
- def custom_method_request(cls, sid, **params):
- url = "%s/%s/%s" % (
- cls.class_url(),
- quote_plus(sid),
- http_path,
- )
- return cls._static_request(http_verb, url, **params)
-
- existing_method = getattr(cls, name, None)
- if existing_method is None:
- setattr(cls, name, classmethod(custom_method_request))
- else:
- # If a method with the same name we want to use already exists on
- # the class, we assume it's an instance method. In this case, the
- # new class method is prefixed with `_cls_`, and the original
- # instance method is decorated with `util.class_method_variant` so
- # that the new class method is called when the original method is
- # called as a class method.
- setattr(cls, "_cls_" + name, classmethod(custom_method_request))
- instance_method = util.class_method_variant("_cls_" + name)(existing_method)
- setattr(cls, name, instance_method)
-
- return cls
-
- return wrapper
openai/api_resources/abstract/deletable_api_resource.py
@@ -1,16 +1,12 @@
from urllib.parse import quote_plus
-from openai import util
from openai.api_resources.abstract.api_resource import APIResource
class DeletableAPIResource(APIResource):
@classmethod
- def _cls_delete(cls, sid, **params):
+ def delete(cls, sid, **params):
+ if isinstance(cls, APIResource):
+ raise ValueError(".delete may only be called as a class method now.")
url = "%s/%s" % (cls.class_url(), quote_plus(sid))
return cls._static_request("delete", url, **params)
-
- @util.class_method_variant("_cls_delete")
- def delete(self, **params):
- self.refresh_from(self.request("delete", self.instance_url(), params))
- return self
openai/api_resources/abstract/engine_api_resource.py
@@ -4,6 +4,7 @@ from urllib.parse import quote_plus
from openai import api_requestor, error, util
from openai.api_resources.abstract.api_resource import APIResource
+from openai.openai_response import OpenAIResponse
MAX_TIMEOUT = 20
@@ -31,7 +32,6 @@ class EngineAPIResource(APIResource):
cls,
api_key=None,
api_base=None,
- idempotency_key=None,
request_id=None,
api_version=None,
organization=None,
@@ -62,12 +62,12 @@ class EngineAPIResource(APIResource):
organization=organization,
)
url = cls.class_url(engine)
- headers = util.populate_headers(idempotency_key, request_id)
response, _, api_key = requestor.request(
- "post", url, params, headers, stream=stream
+ "post", url, params, stream=stream, request_id=request_id
)
if stream:
+ assert not isinstance(response, OpenAIResponse) # must be an iterator
return (
util.convert_to_openai_object(
line,
@@ -99,9 +99,7 @@ class EngineAPIResource(APIResource):
if not isinstance(id, str):
raise error.InvalidRequestError(
- "Could not determine which URL to request: %s instance "
- "has invalid ID: %r, %s. ID should be of type `str` (or"
- " `unicode`)" % (type(self).__name__, id, type(id)),
+ f"Could not determine which URL to request: {type(self).__name__} instance has invalid ID: {id}, {type(id)}. ID should be of type str.",
"id",
)
openai/api_resources/abstract/listable_api_resource.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, division, print_function
-
from openai import api_requestor, util
from openai.api_resources.abstract.api_resource import APIResource
@@ -19,7 +17,6 @@ class ListableAPIResource(APIResource):
api_base=None,
**params,
):
- headers = util.populate_headers(request_id=request_id)
requestor = api_requestor.APIRequestor(
api_key,
api_base=api_base or cls.api_base(),
@@ -27,7 +24,9 @@ class ListableAPIResource(APIResource):
organization=organization,
)
url = cls.class_url()
- response, _, api_key = requestor.request("get", url, params, headers)
+ response, _, api_key = requestor.request(
+ "get", url, params, request_id=request_id
+ )
openai_object = util.convert_to_openai_object(
response, api_key, api_version, organization
)
openai/api_resources/abstract/nested_resource_class_methods.py
@@ -28,7 +28,6 @@ def nested_resource_class_methods(
method,
url,
api_key=None,
- idempotency_key=None,
request_id=None,
api_version=None,
organization=None,
@@ -37,8 +36,9 @@ def nested_resource_class_methods(
requestor = api_requestor.APIRequestor(
api_key, api_version=api_version, organization=organization
)
- headers = util.populate_headers(idempotency_key, request_id)
- response, _, api_key = requestor.request(method, url, params, headers)
+ response, _, api_key = requestor.request(
+ method, url, params, request_id=request_id
+ )
return util.convert_to_openai_object(
response, api_key, api_version, organization
)
openai/api_resources/abstract/singleton_api_resource.py
@@ -1,24 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-from openai.api_resources.abstract.api_resource import APIResource
-
-
-class SingletonAPIResource(APIResource):
- @classmethod
- def retrieve(cls, **params):
- return super(SingletonAPIResource, cls).retrieve(None, **params)
-
- @classmethod
- def class_url(cls):
- if cls == SingletonAPIResource:
- raise NotImplementedError(
- "SingletonAPIResource is an abstract class. You should "
- "perform actions on its subclasses (e.g. Balance)"
- )
- # Namespaces are separated in object names with periods (.) and in URLs
- # with forward slashes (/), so replace the former with the latter.
- base = cls.OBJECT_NAME.replace(".", "/") # type: ignore
- return "/v1/%s" % (base,)
-
- def instance_url(self):
- return self.class_url()
openai/api_resources/abstract/updateable_api_resource.py
@@ -1,6 +1,5 @@
from urllib.parse import quote_plus
-from openai import util
from openai.api_resources.abstract.api_resource import APIResource
@@ -9,15 +8,3 @@ class UpdateableAPIResource(APIResource):
def modify(cls, sid, **params):
url = "%s/%s" % (cls.class_url(), quote_plus(sid))
return cls._static_request("post", url, **params)
-
- def save(self, idempotency_key=None, request_id=None):
- updated_params = self.serialize(None)
- headers = util.populate_headers(idempotency_key, request_id)
-
- if updated_params:
- self.refresh_from(
- self.request("post", self.instance_url(), updated_params, headers)
- )
- else:
- util.logger.debug("Trying to save already saved object %r", self)
- return self
openai/api_resources/completion.py
@@ -3,7 +3,7 @@ import time
from openai import util
from openai.api_resources.abstract import DeletableAPIResource, ListableAPIResource
from openai.api_resources.abstract.engine_api_resource import EngineAPIResource
-from openai.error import TryAgain, InvalidRequestError
+from openai.error import InvalidRequestError, TryAgain
class Completion(EngineAPIResource, ListableAPIResource, DeletableAPIResource):
openai/api_resources/engine.py
@@ -1,10 +1,7 @@
import time
from openai import util
-from openai.api_resources.abstract import (
- ListableAPIResource,
- UpdateableAPIResource,
-)
+from openai.api_resources.abstract import ListableAPIResource, UpdateableAPIResource
from openai.error import TryAgain
openai/api_resources/error_object.py
@@ -1,7 +1,7 @@
-from __future__ import absolute_import, division, print_function
+from typing import Optional
-from openai.util import merge_dicts
from openai.openai_object import OpenAIObject
+from openai.util import merge_dicts
class ErrorObject(OpenAIObject):
@@ -9,15 +9,14 @@ class ErrorObject(OpenAIObject):
self,
values,
api_key=None,
- partial=False,
api_version=None,
organization=None,
- last_response=None,
+ response_ms: Optional[int] = None,
):
# Unlike most other API resources, the API will omit attributes in
# error objects when they have a null value. We manually set default
# values here to facilitate generic error handling.
values = merge_dicts({"message": None, "type": None}, values)
return super(ErrorObject, self).refresh_from(
- values, api_key, partial, api_version, organization, last_response
+ values, api_key, api_version, organization, response_ms
)
openai/api_resources/file.py
@@ -1,14 +1,10 @@
-from __future__ import absolute_import, division, print_function
-
import json
import os
+from typing import cast
import openai
from openai import api_requestor, util
-from openai.api_resources.abstract import (
- DeletableAPIResource,
- ListableAPIResource,
-)
+from openai.api_resources.abstract import DeletableAPIResource, ListableAPIResource
class File(ListableAPIResource, DeletableAPIResource):
@@ -16,19 +12,30 @@ class File(ListableAPIResource, DeletableAPIResource):
@classmethod
def create(
- cls, api_key=None, api_base=None, api_version=None, organization=None, **params
+ cls,
+ file,
+ purpose,
+ model=None,
+ api_key=None,
+ api_base=None,
+ api_version=None,
+ organization=None,
):
+ if purpose != "search" and model is not None:
+ raise ValueError("'model' is only meaningful if 'purpose' is 'search'")
requestor = api_requestor.APIRequestor(
api_key,
- api_base=api_base or openai.file_api_base or openai.api_base,
+ api_base=api_base or openai.api_base,
api_version=api_version,
organization=organization,
)
url = cls.class_url()
- supplied_headers = {"Content-Type": "multipart/form-data"}
- response, _, api_key = requestor.request(
- "post", url, params=params, headers=supplied_headers
- )
+ # Set the filename on 'purpose' and 'model' to None so they are
+ # interpreted as form data.
+ files = [("file", file), ("purpose", (None, purpose))]
+ if model is not None:
+ files.append(("model", (None, model)))
+ response, _, api_key = requestor.request("post", url, files=files)
return util.convert_to_openai_object(
response, api_key, api_version, organization
)
@@ -39,17 +46,21 @@ class File(ListableAPIResource, DeletableAPIResource):
):
requestor = api_requestor.APIRequestor(
api_key,
- api_base=api_base or openai.file_api_base or openai.api_base,
+ api_base=api_base or openai.api_base,
api_version=api_version,
organization=organization,
)
url = f"{cls.class_url()}/{id}/content"
- rbody, rcode, rheaders, _, _ = requestor.request_raw("get", url)
- if not 200 <= rcode < 300:
+ result = requestor.request_raw("get", url)
+ if not 200 <= result.status_code < 300:
raise requestor.handle_error_response(
- rbody, rcode, json.loads(rbody), rheaders, stream_error=False
+ result.content,
+ result.status_code,
+ json.loads(cast(bytes, result.content)),
+ result.headers,
+ stream_error=False,
)
- return rbody
+ return result.content
@classmethod
def find_matching_files(
@@ -71,7 +82,7 @@ class File(ListableAPIResource, DeletableAPIResource):
)
all_files = cls.list(
api_key=api_key,
- api_base=api_base or openai.file_api_base or openai.api_base,
+ api_base=api_base or openai.api_base,
api_version=api_version,
organization=organization,
).get("data", [])
openai/api_resources/fine_tune.py
@@ -1,11 +1,12 @@
from urllib.parse import quote_plus
+from openai import api_requestor, util
from openai.api_resources.abstract import (
- ListableAPIResource,
CreateableAPIResource,
+ ListableAPIResource,
nested_resource_class_methods,
)
-from openai import api_requestor, util
+from openai.openai_response import OpenAIResponse
@nested_resource_class_methods("event", operations=["list"])
@@ -18,8 +19,7 @@ class FineTune(ListableAPIResource, CreateableAPIResource):
extn = quote_plus(id)
url = "%s/%s/cancel" % (base, extn)
instance = cls(id, api_key, **params)
- headers = util.populate_headers(request_id=request_id)
- return instance.request("post", url, headers=headers)
+ return instance.request("post", url, request_id=request_id)
@classmethod
def stream_events(
@@ -42,11 +42,11 @@ class FineTune(ListableAPIResource, CreateableAPIResource):
organization=organization,
)
url = "%s/%s/events?stream=true" % (base, extn)
- headers = util.populate_headers(request_id=request_id)
response, _, api_key = requestor.request(
- "get", url, params, headers=headers, stream=True
+ "get", url, params, stream=True, request_id=request_id
)
+ assert not isinstance(response, OpenAIResponse) # must be an iterator
return (
util.convert_to_openai_object(
line,
openai/api_resources/model.py
@@ -1,7 +1,4 @@
-from openai.api_resources.abstract import (
- ListableAPIResource,
- DeletableAPIResource,
-)
+from openai.api_resources.abstract import DeletableAPIResource, ListableAPIResource
class Model(ListableAPIResource, DeletableAPIResource):
openai/tests/test_api_requestor.py
@@ -0,0 +1,27 @@
+import json
+
+import requests
+from pytest_mock import MockerFixture
+
+from openai import Model
+
+
+def test_requestor_sets_request_id(mocker: MockerFixture) -> None:
+ # Fake out 'requests' and confirm that the X-Request-Id header is set.
+
+ got_headers = {}
+
+ def fake_request(self, *args, **kwargs):
+ nonlocal got_headers
+ got_headers = kwargs["headers"]
+ r = requests.Response()
+ r.status_code = 200
+ r.headers["content-type"] = "application/json"
+ r._content = json.dumps({}).encode("utf-8")
+ return r
+
+ mocker.patch("requests.sessions.Session.request", fake_request)
+ fake_request_id = "1234"
+ Model.retrieve("xxx", request_id=fake_request_id) # arbitrary API resource
+ got_request_id = got_headers.get("X-Request-Id")
+ assert got_request_id == fake_request_id
openai/tests/test_endpoints.py
@@ -1,7 +1,8 @@
-import openai
import io
import json
+import openai
+
# FILE TESTS
def test_file_upload():
@@ -12,15 +13,18 @@ def test_file_upload():
assert result.purpose == "search"
assert "id" in result
+ result = openai.File.retrieve(id=result.id)
+ assert result.status == "uploaded"
+
# COMPLETION TESTS
def test_completions():
- result = openai.Completion.create(prompt="This was a test", n=5, engine="davinci")
+ result = openai.Completion.create(prompt="This was a test", n=5, engine="ada")
assert len(result.choices) == 5
def test_completions_multiple_prompts():
result = openai.Completion.create(
- prompt=["This was a test", "This was another test"], n=5, engine="davinci"
+ prompt=["This was a test", "This was another test"], n=5, engine="ada"
)
assert len(result.choices) == 10
openai/tests/test_file_cli.py
@@ -0,0 +1,39 @@
+import json
+import subprocess
+import time
+from tempfile import NamedTemporaryFile
+
+STILL_PROCESSING = "File is still processing. Check back later."
+
+
+def test_file_cli() -> None:
+ contents = json.dumps({"prompt": "1 + 3 =", "completion": "4"}) + "\n"
+ with NamedTemporaryFile(suffix=".jsonl", mode="wb") as train_file:
+ train_file.write(contents.encode("utf-8"))
+ train_file.flush()
+ create_output = subprocess.check_output(
+ ["openai", "api", "files.create", "-f", train_file.name, "-p", "fine-tune"]
+ )
+ file_obj = json.loads(create_output)
+ assert file_obj["bytes"] == len(contents)
+ file_id: str = file_obj["id"]
+ assert file_id.startswith("file-")
+ start_time = time.time()
+ while True:
+ delete_result = subprocess.run(
+ ["openai", "api", "files.delete", "-i", file_id],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ encoding="utf-8",
+ )
+ if delete_result.returncode == 0:
+ break
+ elif STILL_PROCESSING in delete_result.stderr:
+ time.sleep(0.5)
+ if start_time + 60 < time.time():
+ raise RuntimeError("timed out waiting for file to become available")
+ continue
+ else:
+ raise RuntimeError(
+ f"delete failed: stdout={delete_result.stdout} stderr={delete_result.stderr}"
+ )
openai/tests/test_util.py
@@ -0,0 +1,30 @@
+from tempfile import NamedTemporaryFile
+
+import pytest
+
+import openai
+from openai import util
+
+
+@pytest.fixture(scope="function")
+def api_key_file():
+ saved_path = openai.api_key_path
+ try:
+ with NamedTemporaryFile(prefix="openai-api-key", mode="wt") as tmp:
+ openai.api_key_path = tmp.name
+ yield tmp
+ finally:
+ openai.api_key_path = saved_path
+
+
+def test_openai_api_key_path(api_key_file) -> None:
+ print("sk-foo", file=api_key_file)
+ api_key_file.flush()
+ assert util.default_api_key() == "sk-foo"
+
+
+def test_openai_api_key_path_with_malformed_key(api_key_file) -> None:
+ print("malformed-api-key", file=api_key_file)
+ api_key_file.flush()
+ with pytest.raises(ValueError, match="Malformed API key"):
+ util.default_api_key()
openai/__init__.py
@@ -1,31 +1,11 @@
-import os
-
# OpenAI Python bindings.
#
# Originally forked from the MIT-licensed Stripe Python bindings.
-# Configuration variables
-
-api_key = os.environ.get("OPENAI_API_KEY")
-organization = os.environ.get("OPENAI_ORGANIZATION")
-client_id = None
-api_base = os.environ.get("OPENAI_API_BASE", "https://api.openai.com")
-file_api_base = None
-api_version = None
-verify_ssl_certs = True
-proxy = None
-default_http_client = None
-app_info = None
-enable_telemetry = True
-max_network_retries = 0
-ca_bundle_path = os.path.join(os.path.dirname(__file__), "data/ca-certificates.crt")
-debug = False
-
-# Set to either 'debug' or 'info', controls console logging
-log = None
+import os
+from typing import Optional
-# API resources
-from openai.api_resources import ( # noqa: E402,F401
+from openai.api_resources import (
Answer,
Classification,
Completion,
@@ -36,4 +16,48 @@ from openai.api_resources import ( # noqa: E402,F401
Model,
Search,
)
-from openai.error import APIError, InvalidRequestError, OpenAIError # noqa: E402,F401
+from openai.error import APIError, InvalidRequestError, OpenAIError
+
+api_key = os.environ.get("OPENAI_API_KEY")
+# Path of a file with an API key, whose contents can change. Supercedes
+# `api_key` if set. The main use case is volume-mounted Kubernetes secrets,
+# which are updated automatically.
+api_key_path: Optional[str] = os.environ.get("OPENAI_API_KEY_PATH")
+
+organization = os.environ.get("OPENAI_ORGANIZATION")
+api_base = os.environ.get("OPENAI_API_BASE", "https://api.openai.com")
+api_version = None
+verify_ssl_certs = True # No effect. Certificates are always verified.
+proxy = None
+app_info = None
+enable_telemetry = False # Ignored; the telemetry feature was removed.
+ca_bundle_path = os.path.join(os.path.dirname(__file__), "data/ca-certificates.crt")
+debug = False
+log = None # Set to either 'debug' or 'info', controls console logging
+
+__all__ = [
+ "APIError",
+ "Answer",
+ "Classification",
+ "Completion",
+ "Engine",
+ "ErrorObject",
+ "File",
+ "FineTune",
+ "InvalidRequestError",
+ "Model",
+ "OpenAIError",
+ "Search",
+ "api_base",
+ "api_key",
+ "api_key_path",
+ "api_version",
+ "app_info",
+ "ca_bundle_path",
+ "debug",
+ "enable_elemetry",
+ "log",
+ "organization",
+ "proxy",
+ "verify_ssl_certs",
+]
openai/api_requestor.py
@@ -1,60 +1,22 @@
-import calendar
-import datetime
import json
import platform
-import time
-import uuid
+import threading
import warnings
-from io import BytesIO
-from collections import OrderedDict
+from json import JSONDecodeError
+from typing import Dict, Iterator, Optional, Tuple, Union
from urllib.parse import urlencode, urlsplit, urlunsplit
+import requests
+
import openai
-from openai import error, http_client, version, util
-from openai.multipart_data_generator import MultipartDataGenerator
+from openai import error, util, version
from openai.openai_response import OpenAIResponse
-from openai.upload_progress import BufferReader
+TIMEOUT_SECS = 600
+MAX_CONNECTION_RETRIES = 2
-def _encode_datetime(dttime) -> int:
- utc_timestamp: float
- if dttime.tzinfo and dttime.tzinfo.utcoffset(dttime) is not None:
- utc_timestamp = calendar.timegm(dttime.utctimetuple())
- else:
- utc_timestamp = time.mktime(dttime.timetuple())
-
- return int(utc_timestamp)
-
-
-def _encode_nested_dict(key, data, fmt="%s[%s]"):
- d = OrderedDict()
- for subkey, subvalue in data.items():
- d[fmt % (key, subkey)] = subvalue
- return d
-
-
-def _api_encode(data):
- for key, value in data.items():
- if value is None:
- continue
- elif hasattr(value, "openai_id"):
- yield (key, value.openai_id)
- elif isinstance(value, list) or isinstance(value, tuple):
- for i, sv in enumerate(value):
- if isinstance(sv, dict):
- subdict = _encode_nested_dict("%s[%d]" % (key, i), sv)
- for k, v in _api_encode(subdict):
- yield (k, v)
- else:
- yield ("%s[%d]" % (key, i), sv)
- elif isinstance(value, dict):
- subdict = _encode_nested_dict(key, value)
- for subkey, subvalue in _api_encode(subdict):
- yield (subkey, subvalue)
- elif isinstance(value, datetime.datetime):
- yield (key, _encode_datetime(value))
- else:
- yield (key, value)
+# Has one attribute per thread, 'session'.
+_thread_context = threading.local()
def _build_api_url(url, query):
@@ -66,6 +28,35 @@ def _build_api_url(url, query):
return urlunsplit((scheme, netloc, path, query, fragment))
+def _requests_proxies_arg(proxy) -> Optional[Dict[str, str]]:
+ """Returns a value suitable for the 'proxies' argument to 'requests.request."""
+ if proxy is None:
+ return None
+ elif isinstance(proxy, str):
+ return {"http": proxy, "https": proxy}
+ elif isinstance(proxy, dict):
+ return proxy.copy()
+ else:
+ raise ValueError(
+ "'openai.proxy' must be specified as either a string URL or a dict with string URL under the https and/or http keys."
+ )
+
+
+def _make_session() -> requests.Session:
+ if not openai.verify_ssl_certs:
+ warnings.warn("verify_ssl_certs is ignored; openai always verifies.")
+ s = requests.Session()
+ proxies = _requests_proxies_arg(openai.proxy)
+ if proxies:
+ s.proxies = proxies
+ s.verify = openai.ca_bundle_path
+ s.mount(
+ "https://",
+ requests.adapters.HTTPAdapter(max_retries=MAX_CONNECTION_RETRIES),
+ )
+ return s
+
+
def parse_stream(rbody):
for line in rbody:
if line:
@@ -79,40 +70,12 @@ def parse_stream(rbody):
class APIRequestor:
- def __init__(
- self, key=None, client=None, api_base=None, api_version=None, organization=None
- ):
+ def __init__(self, key=None, api_base=None, api_version=None, organization=None):
self.api_base = api_base or openai.api_base
- self.api_key = key
+ self.api_key = key or util.default_api_key()
self.api_version = api_version or openai.api_version
self.organization = organization or openai.organization
- self._default_proxy = None
-
- from openai import verify_ssl_certs as verify
- from openai import proxy
-
- if client:
- self._client = client
- elif openai.default_http_client:
- self._client = openai.default_http_client
- if proxy != self._default_proxy:
- warnings.warn(
- "openai.proxy was updated after sending a "
- "request - this is a no-op. To use a different proxy, "
- "set openai.default_http_client to a new client "
- "configured with the proxy."
- )
- else:
- # If the openai.default_http_client has not been set by the user
- # yet, we'll set it here. This way, we aren't creating a new
- # HttpClient for every request.
- openai.default_http_client = http_client.new_default_http_client(
- verify_ssl_certs=verify, proxy=proxy
- )
- self._client = openai.default_http_client
- self._default_proxy = proxy
-
@classmethod
def format_app_info(cls, info):
str = info["name"]
@@ -122,12 +85,27 @@ class APIRequestor:
str += " (%s)" % (info["url"],)
return str
- def request(self, method, url, params=None, headers=None, stream=False):
- rbody, rcode, rheaders, stream, my_api_key = self.request_raw(
- method.lower(), url, params, headers, stream=stream
+ def request(
+ self,
+ method,
+ url,
+ params=None,
+ headers=None,
+ files=None,
+ stream=False,
+ request_id: Optional[str] = None,
+ ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool, str]:
+ result = self.request_raw(
+ method.lower(),
+ url,
+ params,
+ headers,
+ files=files,
+ stream=stream,
+ request_id=request_id,
)
- resp = self.interpret_response(rbody, rcode, rheaders, stream=stream)
- return resp, stream, my_api_key
+ resp, got_stream = self._interpret_response(result, stream)
+ return resp, got_stream, self.api_key
def handle_error_response(self, rbody, rcode, resp, rheaders, stream_error=False):
try:
@@ -159,20 +137,15 @@ class APIRequestor:
error_data.get("message"), rbody, rcode, resp, rheaders
)
elif rcode in [400, 404, 415]:
- if error_data.get("type") == "idempotency_error":
- return error.IdempotencyError(
- error_data.get("message"), rbody, rcode, resp, rheaders
- )
- else:
- return error.InvalidRequestError(
- error_data.get("message"),
- error_data.get("param"),
- error_data.get("code"),
- rbody,
- rcode,
- resp,
- rheaders,
- )
+ return error.InvalidRequestError(
+ error_data.get("message"),
+ error_data.get("param"),
+ error_data.get("code"),
+ rbody,
+ rcode,
+ resp,
+ rheaders,
+ )
elif rcode == 401:
return error.AuthenticationError(
error_data.get("message"), rbody, rcode, resp, rheaders
@@ -195,19 +168,24 @@ class APIRequestor:
error_data.get("message"), rbody, rcode, resp, rheaders
)
- def request_headers(self, api_key, method, extra):
+ def request_headers(
+ self, method: str, extra, request_id: Optional[str]
+ ) -> Dict[str, str]:
user_agent = "OpenAI/v1 PythonBindings/%s" % (version.VERSION,)
if openai.app_info:
user_agent += " " + self.format_app_info(openai.app_info)
+ uname_without_node = " ".join(
+ v for k, v in platform.uname()._asdict().items() if k != "node"
+ )
ua = {
"bindings_version": version.VERSION,
- "httplib": self._client.name,
+ "httplib": "requests",
"lang": "python",
"lang_version": platform.python_version(),
"platform": platform.platform(),
"publisher": "openai",
- "uname": " ".join(platform.uname()),
+ "uname": uname_without_node,
}
if openai.app_info:
ua["application"] = openai.app_info
@@ -215,92 +193,48 @@ class APIRequestor:
headers = {
"X-OpenAI-Client-User-Agent": json.dumps(ua),
"User-Agent": user_agent,
- "Authorization": "Bearer %s" % (api_key,),
+ "Authorization": "Bearer %s" % (self.api_key,),
}
if self.organization:
headers["OpenAI-Organization"] = self.organization
- if method in {"post", "put"}:
- headers.setdefault("Idempotency-Key", str(uuid.uuid4()))
-
if self.api_version is not None:
headers["OpenAI-Version"] = self.api_version
-
+ if request_id is not None:
+ headers["X-Request-Id"] = request_id
+ if openai.debug:
+ headers["OpenAI-Debug"] = "true"
headers.update(extra)
return headers
def request_raw(
- self, method, url, params=None, supplied_headers=None, stream=False
- ):
- """
- Mechanism for issuing an API call
- """
-
- if self.api_key:
- my_api_key = self.api_key
- else:
- from openai import api_key
-
- my_api_key = api_key
-
- if my_api_key is None:
- raise error.AuthenticationError(
- "No API key provided. (HINT: set your API key in code using "
- '"openai.api_key = <API-KEY>", or you can set the environment variable OPENAI_API_KEY=<API-KEY>). You can generate API keys '
- "in the OpenAI web interface. See https://onboard.openai.com "
- "for details, or email support@openai.com if you have any "
- "questions."
- )
-
+ self,
+ method,
+ url,
+ params=None,
+ supplied_headers=None,
+ files=None,
+ stream=False,
+ request_id: Optional[str] = None,
+ ) -> requests.Response:
abs_url = "%s%s" % (self.api_base, url)
headers = {}
- compress = None
- progress_meter = False
+ data = None
if method == "get" or method == "delete":
if params:
- encoded_params = url_encode_params(params)
+ encoded_params = urlencode(
+ [(k, v) for k, v in params.items() if v is not None]
+ )
abs_url = _build_api_url(abs_url, encoded_params)
- else:
- encoded_params = None
- post_data = None
elif method in {"post", "put"}:
- if (
- supplied_headers is not None
- and supplied_headers.get("Content-Type") == "multipart/form-data"
- ):
- generator = MultipartDataGenerator()
- generator.add_params(params or {})
- post_data = generator.get_post_data()
- content_type = "multipart/form-data; boundary=%s" % (
- generator.boundary,
- )
- # We will overrite Content-Type
- supplied_headers.pop("Content-Type")
- progress_meter = True
- # compress = "gzip"
- compress = None
- else:
- post_data = json.dumps(params).encode()
- content_type = "application/json"
-
- headers["Content-Type"] = content_type
-
- encoded_params = post_data
-
- if progress_meter:
- post_data = BufferReader(post_data, desc="Upload progress")
-
- if compress == "gzip":
- if not hasattr(post_data, "read"):
- post_data = BytesIO(post_data)
- headers["Content-Encoding"] = "gzip"
-
- from openai.gzip_stream import GZIPCompressedStream
-
- post_data = GZIPCompressedStream(post_data, compression_level=9)
+ if params and files:
+ raise ValueError("At most one of params and files may be specified.")
+ if params:
+ data = json.dumps(params).encode()
+ headers["Content-Type"] = "application/json"
else:
raise error.APIConnectionError(
"Unrecognized HTTP method %r. This may indicate a bug in the "
@@ -308,58 +242,75 @@ class APIRequestor:
"assistance." % (method,)
)
- headers = self.request_headers(my_api_key, method, headers)
+ headers = self.request_headers(method, headers, request_id)
if supplied_headers is not None:
- for key, value in supplied_headers.items():
- headers[key] = value
+ headers.update(supplied_headers)
util.log_info("Request to OpenAI API", method=method, path=abs_url)
- util.log_debug(
- "Post details", post_data=encoded_params, api_version=self.api_version
- )
-
- rbody, rcode, rheaders, stream = self._client.request_with_retries(
- method, abs_url, headers, post_data, stream=stream
- )
+ util.log_debug("Post details", data=data, api_version=self.api_version)
+ if not hasattr(_thread_context, "session"):
+ _thread_context.session = _make_session()
+ try:
+ result = _thread_context.session.request(
+ method,
+ abs_url,
+ headers=headers,
+ data=data,
+ files=files,
+ stream=stream,
+ timeout=TIMEOUT_SECS,
+ )
+ except requests.exceptions.RequestException as e:
+ raise error.APIConnectionError("Error communicating with OpenAI") from e
util.log_info(
"OpenAI API response",
path=abs_url,
- response_code=rcode,
- processing_ms=rheaders.get("OpenAI-Processing-Ms"),
+ response_code=result.status_code,
+ processing_ms=result.headers.get("OpenAI-Processing-Ms"),
)
- util.log_debug("API response body", body=rbody, headers=rheaders)
-
- if "Request-Id" in rheaders:
- request_id = rheaders["Request-Id"]
+ # Don't read the whole stream for debug logging unless necessary.
+ if openai.log == "debug":
util.log_debug(
- "Dashboard link for request", link=util.dashboard_link(request_id)
+ "API response body", body=result.content, headers=result.headers
)
+ return result
- return rbody, rcode, rheaders, stream, my_api_key
-
- def interpret_response(self, rbody, rcode, rheaders, stream=False):
- if stream:
+ def _interpret_response(
+ self, result: requests.Response, stream: bool
+ ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool]:
+ """Returns the response(s) and a bool indicating whether it is a stream."""
+ if stream and "text/event-stream" in result.headers.get("Content-Type", ""):
return (
- self.interpret_response_line(line, rcode, rheaders, stream)
- for line in parse_stream(rbody)
- )
+ self._interpret_response_line(
+ line, result.status_code, result.headers, stream=True
+ )
+ for line in parse_stream(result.iter_lines())
+ ), True
else:
- return self.interpret_response_line(rbody, rcode, rheaders, stream)
+ return (
+ self._interpret_response_line(
+ result.content, result.status_code, result.headers, stream=False
+ ),
+ False,
+ )
- def interpret_response_line(self, rbody, rcode, rheaders, stream=False):
+ def _interpret_response_line(
+ self, rbody, rcode, rheaders, stream: bool
+ ) -> OpenAIResponse:
+ if rcode == 503:
+ raise error.ServiceUnavailableError(
+ "The server is overloaded or not ready yet.", rbody, rcode, rheaders
+ )
try:
if hasattr(rbody, "decode"):
rbody = rbody.decode("utf-8")
- resp = OpenAIResponse(rbody, rcode, rheaders)
- except Exception:
+ data = json.loads(rbody)
+ except (JSONDecodeError, UnicodeDecodeError):
raise error.APIError(
- "Invalid response body from API: %s "
- "(HTTP response code was %d)" % (rbody, rcode),
- rbody,
- rcode,
- rheaders,
+ f"HTTP code {rcode} from API ({rbody})", rbody, rcode, rheaders
)
+ resp = OpenAIResponse(data, rheaders)
# In the future, we might add a "status" parameter to errors
# to better handle the "error while streaming" case.
stream_error = stream and "error" in resp.data
@@ -367,15 +318,4 @@ class APIRequestor:
raise self.handle_error_response(
rbody, rcode, resp.data, rheaders, stream_error=stream_error
)
-
return resp
-
-
-def url_encode_params(params):
- encoded_params = urlencode(list(_api_encode(params or {})))
-
- # Don't use strict form encoding by changing the square bracket control
- # characters back to their literals. This is fine by the server, and
- # makes these parameter strings easier to read.
- encoded_params = encoded_params.replace("%5B", "[").replace("%5D", "]")
- return encoded_params
openai/cli.py
@@ -5,6 +5,7 @@ import sys
import warnings
import openai
+from openai.upload_progress import BufferReader
from openai.validators import (
apply_necessary_remediation,
apply_optional_remediation,
@@ -60,9 +61,7 @@ class Engine:
@classmethod
def update(cls, args):
- engine = openai.Engine(id=args.id)
- engine.replicas = args.replicas
- engine.save()
+ engine = openai.Engine.modify(args.id, replicas=args.replicas)
display(engine)
@classmethod
@@ -181,14 +180,12 @@ class Completion:
class Model:
@classmethod
def get(cls, args):
- resp = openai.Model.retrieve(
- id=args.id,
- )
+ resp = openai.Model.retrieve(id=args.id)
print(resp)
@classmethod
def delete(cls, args):
- model = openai.Model(id=args.id).delete()
+ model = openai.Model.delete(args.id)
print(model)
@classmethod
@@ -200,10 +197,10 @@ class Model:
class File:
@classmethod
def create(cls, args):
+ with open(args.file, "rb") as file_reader:
+ buffer_reader = BufferReader(file_reader.read(), desc="Upload progress")
resp = openai.File.create(
- file=open(args.file),
- purpose=args.purpose,
- model=args.model,
+ file=buffer_reader, purpose=args.purpose, model=args.model
)
print(resp)
@@ -214,7 +211,7 @@ class File:
@classmethod
def delete(cls, args):
- file = openai.File(id=args.id).delete()
+ file = openai.File.delete(args.id)
print(file)
@classmethod
openai/error.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, division, print_function
-
import openai
@@ -66,7 +64,7 @@ class OpenAIError(Exception):
return None
return openai.api_resources.error_object.ErrorObject.construct_from(
- self.json_body["error"], openai.api_key
+ self.json_body["error"], key=None
)
@@ -95,10 +93,6 @@ class APIConnectionError(OpenAIError):
self.should_retry = should_retry
-class IdempotencyError(OpenAIError):
- pass
-
-
class InvalidRequestError(OpenAIError):
def __init__(
self,
@@ -138,6 +132,10 @@ class RateLimitError(OpenAIError):
pass
+class ServiceUnavailableError(OpenAIError):
+ pass
+
+
class SignatureVerificationError(OpenAIError):
def __init__(self, message, sig_header, http_body=None):
super(SignatureVerificationError, self).__init__(message, http_body)
openai/gzip_stream.py
@@ -1,83 +0,0 @@
-# Vendored from https://github.com/leenr/gzip-stream
-import gzip
-import io
-
-
-class GZIPCompressedStream(io.RawIOBase):
- def __init__(self, stream, compression_level):
- assert 1 <= compression_level <= 9
-
- self._compression_level = compression_level
- self._stream = stream
-
- self._compressed_stream = io.BytesIO()
- self._compressor = gzip.GzipFile(
- mode="wb", fileobj=self._compressed_stream, compresslevel=compression_level
- )
-
- # because of the GZIP header written by `GzipFile.__init__`:
- self._compressed_stream.seek(0)
-
- @property
- def compression_level(self):
- return self._compression_level
-
- @property
- def stream(self):
- return self._stream
-
- def readable(self):
- return True
-
- def _read_compressed_into(self, b):
- buf = self._compressed_stream.read(len(b))
- b[: len(buf)] = buf
- return len(buf)
-
- def readinto(self, b):
- b = memoryview(b)
-
- offset = 0
- size = len(b)
- while offset < size:
- offset += self._read_compressed_into(b[offset:])
- if offset < size:
- # self._compressed_buffer now empty
- if self._compressor.closed:
- # nothing to compress anymore
- break
- # compress next bytes
- self._read_n_compress(size)
-
- return offset
-
- def _read_n_compress(self, size):
- assert size > 0
-
- data = self._stream.read(size)
-
- # rewind buffer to the start to free up memory
- # (because anything currently in the buffer should be already
- # streamed off the object)
- self._compressed_stream.seek(0)
- self._compressed_stream.truncate(0)
-
- if data:
- self._compressor.write(data)
- else:
- # this will write final data (will flush zlib with Z_FINISH)
- self._compressor.close()
-
- # rewind to the buffer start
- self._compressed_stream.seek(0)
-
- def __repr__(self):
- return (
- "{self.__class__.__name__}("
- "{self.stream!r}, "
- "compression_level={self.compression_level!r}"
- ")"
- ).format(self=self)
-
-
-__all__ = ("GZIPCompressedStream",)
openai/http_client.py
@@ -1,321 +0,0 @@
-import abc
-import json
-import random
-import textwrap
-import threading
-import time
-from typing import Any, Dict
-from urllib.parse import urlparse
-
-import requests
-
-import openai
-from openai import error, util
-from openai.request_metrics import RequestMetrics
-
-
-def _now_ms():
- return int(round(time.time() * 1000))
-
-
-def new_default_http_client(*args, **kwargs):
- return RequestsClient(*args, **kwargs)
-
-
-class HTTPClient(abc.ABC):
- MAX_DELAY = 2
- INITIAL_DELAY = 0.5
- MAX_RETRY_AFTER = 60
-
- def __init__(self, verify_ssl_certs=True, proxy=None):
- self._verify_ssl_certs = verify_ssl_certs
- if proxy:
- if isinstance(proxy, str):
- proxy = {"http": proxy, "https": proxy}
- if not isinstance(proxy, dict):
- raise ValueError(
- "Proxy(ies) must be specified as either a string "
- "URL or a dict() with string URL under the"
- " "
- "https"
- " and/or "
- "http"
- " keys."
- )
- self._proxy = proxy.copy() if proxy else None
-
- self._thread_local = threading.local()
-
- def request_with_retries(self, method, url, headers, post_data=None, stream=False):
- self._add_telemetry_header(headers)
-
- num_retries = 0
-
- while True:
- request_start = _now_ms()
-
- try:
- response = self.request(method, url, headers, post_data, stream=stream)
- connection_error = None
- except error.APIConnectionError as e:
- connection_error = e
- response = None
-
- if self._should_retry(response, connection_error, num_retries):
- if connection_error:
- util.log_warn(
- "Encountered a retryable error %s"
- % connection_error.user_message
- )
- num_retries += 1
- sleep_time = self._sleep_time_seconds(num_retries, response)
- util.log_info(
- (
- "Initiating retry %i for request %s %s after "
- "sleeping %.2f seconds."
- % (num_retries, method, url, sleep_time)
- )
- )
- time.sleep(sleep_time)
- else:
- if response is not None:
- self._record_request_metrics(response, request_start)
-
- return response
- else:
- assert connection_error is not None
- raise connection_error
-
- def request(self, method, url, headers, post_data=None, stream=False):
- raise NotImplementedError("HTTPClient subclasses must implement `request`")
-
- def _should_retry(self, response, api_connection_error, num_retries):
- if num_retries >= self._max_network_retries():
- return False
-
- if response is None:
- # We generally want to retry on timeout and connection
- # exceptions, but defer this decision to underlying subclass
- # implementations. They should evaluate the driver-specific
- # errors worthy of retries, and set flag on the error returned.
- return api_connection_error.should_retry
-
- _, status_code, rheaders, _ = response
-
- # The API may ask us not to retry (eg; if doing so would be a no-op)
- # or advise us to retry (eg; in cases of lock timeouts); we defer to that.
- #
- # Note that we expect the headers object to be a CaseInsensitiveDict, as is the case with the requests library.
- if rheaders is not None and "openai-should-retry" in rheaders:
- if rheaders["openai-should-retry"] == "false":
- return False
- if rheaders["openai-should-retry"] == "true":
- return True
-
- # Retry on conflict errors.
- if status_code == 409:
- return True
-
- # Retry on 500, 503, and other internal errors.
- #
- # Note that we expect the openai-should-retry header to be false
- # in most cases when a 500 is returned, since our idempotency framework
- # would typically replay it anyway.
- if status_code >= 500:
- return True
-
- return False
-
- def _max_network_retries(self):
- from openai import max_network_retries
-
- # Configured retries, isolated here for tests
- return max_network_retries
-
- def _retry_after_header(self, response=None):
- if response is None:
- return None
- _, _, rheaders, _ = response
-
- try:
- return int(rheaders["retry-after"])
- except (KeyError, ValueError):
- return None
-
- def _sleep_time_seconds(self, num_retries, response=None):
- # Apply exponential backoff with initial_network_retry_delay on the
- # number of num_retries so far as inputs.
- # Do not allow the number to exceed max_network_retry_delay.
- sleep_seconds = min(
- HTTPClient.INITIAL_DELAY * (2 ** (num_retries - 1)), HTTPClient.MAX_DELAY
- )
-
- sleep_seconds = self._add_jitter_time(sleep_seconds)
-
- # But never sleep less than the base sleep seconds.
- sleep_seconds = max(HTTPClient.INITIAL_DELAY, sleep_seconds)
-
- # And never sleep less than the time the API asks us to wait, assuming it's a reasonable ask.
- retry_after = self._retry_after_header(response) or 0
- if retry_after <= HTTPClient.MAX_RETRY_AFTER:
- sleep_seconds = max(retry_after, sleep_seconds)
-
- return sleep_seconds
-
- def _add_jitter_time(self, sleep_seconds):
- # Randomize the value in [(sleep_seconds/ 2) to (sleep_seconds)]
- # Also separated method here to isolate randomness for tests
- sleep_seconds *= 0.5 * (1 + random.uniform(0, 1))
- return sleep_seconds
-
- def _add_telemetry_header(self, headers):
- last_request_metrics = getattr(self._thread_local, "last_request_metrics", None)
- if openai.enable_telemetry and last_request_metrics:
- telemetry = {"last_request_metrics": last_request_metrics.payload()}
- headers["X-OpenAI-Client-Telemetry"] = json.dumps(telemetry)
-
- def _record_request_metrics(self, response, request_start):
- _, _, rheaders, _ = response
- if "Request-Id" in rheaders and openai.enable_telemetry:
- request_id = rheaders["Request-Id"]
- request_duration_ms = _now_ms() - request_start
- self._thread_local.last_request_metrics = RequestMetrics(
- request_id, request_duration_ms
- )
-
- @abc.abstractmethod
- def close(self):
- ...
-
-
-class RequestsClient(HTTPClient):
- name = "requests"
-
- def __init__(self, timeout=600, session=None, **kwargs):
- super(RequestsClient, self).__init__(**kwargs)
- self._session = session
- self._timeout = timeout
-
- def request(self, method, url, headers, post_data=None, stream=False):
- kwargs: Dict[str, Any] = {}
- if self._verify_ssl_certs:
- kwargs["verify"] = openai.ca_bundle_path
- else:
- kwargs["verify"] = False
-
- if self._proxy:
- kwargs["proxies"] = self._proxy
-
- if getattr(self._thread_local, "session", None) is None:
- self._thread_local.session = self._session or requests.Session()
-
- try:
- try:
- result = self._thread_local.session.request(
- method,
- url,
- headers=headers,
- data=post_data,
- timeout=self._timeout,
- stream=stream,
- **kwargs,
- )
- except TypeError as e:
- raise TypeError(
- "Warning: It looks like your installed version of the "
- '"requests" library is not compatible with OpenAI\'s '
- "usage thereof. (HINT: The most likely cause is that "
- 'your "requests" library is out of date. You can fix '
- 'that by running "pip install -U requests".) The '
- "underlying error was: %s" % (e,)
- )
-
- # This causes the content to actually be read, which could cause
- # e.g. a socket timeout. TODO: The other fetch methods probably
- # are susceptible to the same and should be updated.
- if stream and "text/event-stream" in result.headers.get("Content-Type", ""):
- content = result.iter_lines()
- stream = True
- else:
- content = result.content
- stream = False
- status_code = result.status_code
- except Exception as e:
- # Would catch just requests.exceptions.RequestException, but can
- # also raise ValueError, RuntimeError, etc.
- self._handle_request_error(e)
- return content, status_code, result.headers, stream
-
- def _handle_request_error(self, e):
- # Catch SSL error first as it belongs to ConnectionError,
- # but we don't want to retry, unless it is caused by dropped
- # SSL connection
- if isinstance(e, requests.exceptions.SSLError):
- if "ECONNRESET" not in repr(e):
- msg = (
- "Could not verify OpenAI's SSL certificate. Please make "
- "sure that your network is not intercepting certificates. "
- "If this problem persists, let us know at "
- "support@openai.com."
- )
- should_retry = False
- else:
- msg = "Detected ECONNRESET, indicates a dropped SSL connection."
- should_retry = True
- err = "%s: %s" % (type(e).__name__, str(e))
- # Retry only timeout and connect errors; similar to urllib3 Retry
- elif isinstance(
- e,
- (
- requests.exceptions.Timeout,
- requests.exceptions.ConnectionError,
- requests.exceptions.ChunkedEncodingError,
- ),
- ):
- msg = (
- "Unexpected error communicating with OpenAI. "
- "If this problem persists, let us know at "
- "support@openai.com."
- )
- err = "%s: %s" % (type(e).__name__, str(e))
- should_retry = True
- # Catch remaining request exceptions
- elif isinstance(e, requests.exceptions.RequestException):
- msg = (
- "Unexpected error communicating with OpenAI. "
- "If this problem persists, let us know at "
- "support@openai.com."
- )
- err = "%s: %s" % (type(e).__name__, str(e))
- should_retry = False
- else:
- msg = (
- "Unexpected error communicating with OpenAI. "
- "It looks like there's probably a configuration "
- "issue locally. If this problem persists, let us "
- "know at support@openai.com."
- )
- err = "A %s was raised" % (type(e).__name__,)
- if str(e):
- err += " with error message %s" % (str(e),)
- else:
- err += " with no error message"
- should_retry = False
-
- if isinstance(e, requests.RequestException):
- request = e.request # type: requests.Request
- if request is not None:
- err += " (url=" + self._sanitized_url(request.url) + ")"
-
- msg = textwrap.fill(msg) + "\n\n(Network error: %s)" % (err,)
- raise error.APIConnectionError(msg, should_retry=should_retry)
-
- @staticmethod
- def _sanitized_url(url):
- """for now just strip all query params from the url for privacy"""
- url = urlparse(url)
- return url.scheme + "://" + url.netloc + url.path
-
- def close(self):
- if getattr(self._thread_local, "session", None) is not None:
- self._thread_local.session.close()
openai/multipart_data_generator.py
@@ -1,92 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import random
-import io
-
-import openai
-import re
-
-
-class MultipartDataGenerator(object):
- def __init__(self, chunk_size=1028):
- self.data = io.BytesIO()
- self.line_break = "\r\n"
- self.boundary = self._initialize_boundary()
- self.chunk_size = chunk_size
-
- def _remove_array_element(self, input_string):
- match = re.match(r"^(.*)\[.*\]$", input_string)
- return match[1] if match else input_string
-
- def add_params(self, params):
- # Flatten parameters first
- params = dict(openai.api_requestor._api_encode(params))
-
- for key, value in params.items():
-
- # strip array elements if present from key
- key = self._remove_array_element(key)
-
- if value is None:
- continue
-
- self._write(self.param_header())
- self._write(self.line_break)
- if hasattr(value, "read"):
- filename = "blob"
- if hasattr(value, "name"):
- # Convert the filename to string, just in case it's not
- # already one. E.g. `tempfile.TemporaryFile` has a `name`
- # attribute but it's an `int`.
- filename = str(value.name)
-
- self._write('Content-Disposition: form-data; name="')
- self._write(key)
- self._write('"; filename="')
- self._write(filename)
- self._write('"')
- self._write(self.line_break)
- self._write("Content-Type: application/octet-stream")
- self._write(self.line_break)
- self._write(self.line_break)
-
- self._write_file(value)
- else:
- self._write('Content-Disposition: form-data; name="')
- self._write(key)
- self._write('"')
- self._write(self.line_break)
- self._write(self.line_break)
- self._write(str(value))
-
- self._write(self.line_break)
-
- def param_header(self):
- return "--%s" % self.boundary
-
- def get_post_data(self):
- self._write("--%s--" % (self.boundary,))
- self._write(self.line_break)
- return self.data.getvalue()
-
- def _write(self, value):
- if isinstance(value, bytes):
- array = bytearray(value)
- elif isinstance(value, str):
- array = bytearray(value, encoding="utf-8")
- else:
- raise TypeError(
- "unexpected type: {value_type}".format(value_type=type(value))
- )
-
- self.data.write(array)
-
- def _write_file(self, f):
- while True:
- file_contents = f.read(self.chunk_size)
- if not file_contents:
- break
- self._write(file_contents)
-
- def _initialize_boundary(self):
- return random.randint(0, 2 ** 63)
openai/object_classes.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import, division, print_function
-
from openai import api_resources
from openai.api_resources.experimental.completion_config import CompletionConfig
openai/openai_object.py
@@ -1,66 +1,32 @@
-from __future__ import absolute_import, division, print_function
-
-import datetime
import json
from copy import deepcopy
+from typing import Optional
-import openai
from openai import api_requestor, util
-
-
-def _compute_diff(current, previous):
- if isinstance(current, dict):
- previous = previous or {}
- diff = current.copy()
- for key in set(previous.keys()) - set(diff.keys()):
- diff[key] = ""
- return diff
- return current if current is not None else ""
-
-
-def _serialize_list(array, previous):
- array = array or []
- previous = previous or []
- params = {}
-
- for i, v in enumerate(array):
- previous_item = previous[i] if len(previous) > i else None
- if hasattr(v, "serialize"):
- params[str(i)] = v.serialize(previous_item)
- else:
- params[str(i)] = _compute_diff(v, previous_item)
-
- return params
+from openai.openai_response import OpenAIResponse
class OpenAIObject(dict):
api_base_override = None
- class ReprJSONEncoder(json.JSONEncoder):
- def default(self, obj):
- if isinstance(obj, datetime.datetime):
- return api_requestor._encode_datetime(obj)
- return super(OpenAIObject.ReprJSONEncoder, self).default(obj)
-
def __init__(
self,
id=None,
api_key=None,
api_version=None,
organization=None,
- last_response=None,
+ response_ms: Optional[int] = None,
api_base=None,
engine=None,
**params,
):
super(OpenAIObject, self).__init__()
- self._unsaved_values = set()
- self._transient_values = set()
- self._last_response = last_response
+ if response_ms is not None and not isinstance(response_ms, int):
+ raise TypeError(f"response_ms is a {type(response_ms).__name__}.")
+ self._response_ms = response_ms
self._retrieve_params = params
- self._previous = None
object.__setattr__(self, "api_key", api_key)
object.__setattr__(self, "api_version", api_version)
@@ -72,14 +38,8 @@ class OpenAIObject(dict):
self["id"] = id
@property
- def last_response(self):
- return self._last_response
-
- def update(self, update_dict):
- for k in update_dict:
- self._unsaved_values.add(k)
-
- return super(OpenAIObject, self).update(update_dict)
+ def response_ms(self) -> Optional[int]:
+ return self._response_ms
def __setattr__(self, k, v):
if k[0] == "_" or k in self.__dict__:
@@ -91,7 +51,6 @@ class OpenAIObject(dict):
def __getattr__(self, k):
if k[0] == "_":
raise AttributeError(k)
-
try:
return self[k]
except KeyError as err:
@@ -110,37 +69,10 @@ class OpenAIObject(dict):
"We interpret empty strings as None in requests."
"You may set %s.%s = None to delete the property" % (k, str(self), k)
)
-
- # Allows for unpickling in Python 3.x
- if not hasattr(self, "_unsaved_values"):
- self._unsaved_values = set()
-
- self._unsaved_values.add(k)
-
super(OpenAIObject, self).__setitem__(k, v)
- def __getitem__(self, k):
- try:
- return super(OpenAIObject, self).__getitem__(k)
- except KeyError as err:
- if k in self._transient_values:
- raise KeyError(
- "%r. HINT: The %r attribute was set in the past. "
- "It was then wiped when refreshing the object with "
- "the result returned by OpenAI's API, probably as a "
- "result of a save(). The attributes currently "
- "available on this object are: %s"
- % (k, k, ", ".join(list(self.keys())))
- )
- else:
- raise err
-
def __delitem__(self, k):
- super(OpenAIObject, self).__delitem__(k)
-
- # Allows for unpickling in Python 3.x
- if hasattr(self, "_unsaved_values") and k in self._unsaved_values:
- self._unsaved_values.remove(k)
+ raise NotImplementedError("del is not supported")
# Custom unpickling method that uses `update` to update the dictionary
# without calling __setitem__, which would fail if any value is an empty
@@ -172,7 +104,7 @@ class OpenAIObject(dict):
api_version=None,
organization=None,
engine=None,
- last_response=None,
+ response_ms: Optional[int] = None,
):
instance = cls(
values.get("id"),
@@ -180,14 +112,14 @@ class OpenAIObject(dict):
api_version=api_version,
organization=organization,
engine=engine,
- last_response=last_response,
+ response_ms=response_ms,
)
instance.refresh_from(
values,
api_key=key,
api_version=api_version,
organization=organization,
- last_response=last_response,
+ response_ms=response_ms,
)
return instance
@@ -195,29 +127,17 @@ class OpenAIObject(dict):
self,
values,
api_key=None,
- partial=False,
api_version=None,
organization=None,
- last_response=None,
+ response_ms: Optional[int] = None,
):
self.api_key = api_key or getattr(values, "api_key", None)
self.api_version = api_version or getattr(values, "api_version", None)
self.organization = organization or getattr(values, "organization", None)
- self._last_response = last_response or getattr(values, "_last_response", None)
-
- # Wipe old state before setting new. This is useful for e.g.
- # updating a customer, where there is no persistent card
- # parameter. Mark those values which don't persist as transient
- if partial:
- self._unsaved_values = self._unsaved_values - set(values)
- else:
- removed = set(self.keys()) - set(values)
- self._transient_values = self._transient_values | removed
- self._unsaved_values = set()
- self.clear()
-
- self._transient_values = self._transient_values - set(values)
+ self._response_ms = response_ms or getattr(values, "_response_ms", None)
+ # Wipe old state before setting new.
+ self.clear()
for k, v in values.items():
super(OpenAIObject, self).__setitem__(
k, util.convert_to_openai_object(v, api_key, api_version, organization)
@@ -230,7 +150,14 @@ class OpenAIObject(dict):
return None
def request(
- self, method, url, params=None, headers=None, stream=False, plain_old_data=False
+ self,
+ method,
+ url,
+ params=None,
+ headers=None,
+ stream=False,
+ plain_old_data=False,
+ request_id: Optional[str] = None,
):
if params is None:
params = self._retrieve_params
@@ -241,10 +168,11 @@ class OpenAIObject(dict):
organization=self.organization,
)
response, stream, api_key = requestor.request(
- method, url, params, headers, stream=stream
+ method, url, params, stream=stream, headers=headers, request_id=request_id
)
if stream:
+ assert not isinstance(response, OpenAIResponse) # must be an iterator
return (
util.convert_to_openai_object(
line,
@@ -284,7 +212,7 @@ class OpenAIObject(dict):
def __str__(self):
obj = self.to_dict_recursive()
- return json.dumps(obj, sort_keys=True, indent=2, cls=self.ReprJSONEncoder)
+ return json.dumps(obj, sort_keys=True, indent=2)
def to_dict(self):
return dict(self)
@@ -305,27 +233,6 @@ class OpenAIObject(dict):
def openai_id(self):
return self.id
- def serialize(self, previous):
- params = {}
- unsaved_keys = self._unsaved_values or set()
- previous = previous or self._previous or {}
-
- for k, v in self.items():
- if k == "id" or (isinstance(k, str) and k.startswith("_")):
- continue
- elif isinstance(v, openai.api_resources.abstract.APIResource):
- continue
- elif hasattr(v, "serialize"):
- child = v.serialize(previous.get(k, None))
- if child != {}:
- params[k] = child
- elif k in unsaved_keys:
- params[k] = _compute_diff(v, previous.get(k, None))
- elif k == "additional_owners" and v is not None:
- params[k] = _serialize_list(v, previous.get(k, None))
-
- return params
-
# This class overrides __setitem__ to throw exceptions on inputs that it
# doesn't like. This can cause problems when we try to copy an object
# wholesale because some data that's returned from the API may not be valid
openai/openai_response.py
@@ -1,25 +1,20 @@
-from __future__ import absolute_import, division, print_function
+from typing import Optional
-import json
+class OpenAIResponse:
+ def __init__(self, data, headers):
+ self._headers = headers
+ self.data = data
-class OpenAIResponse(object):
- def __init__(self, body, code, headers):
- self.body = body
- self.code = code
- self.headers = headers
- self.data = json.loads(body)
+ @property
+ def request_id(self) -> Optional[str]:
+ return self._headers.get("request-id")
@property
- def idempotency_key(self):
- try:
- return self.headers["idempotency-key"]
- except KeyError:
- return None
+ def organization(self) -> Optional[str]:
+ return self._headers.get("OpenAI-Organization")
@property
- def request_id(self):
- try:
- return self.headers["request-id"]
- except KeyError:
- return None
+ def response_ms(self) -> Optional[int]:
+ h = self._headers.get("Openai-Processing-Ms")
+ return None if h is None else int(h)
openai/request_metrics.py
@@ -1,13 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-
-class RequestMetrics(object):
- def __init__(self, request_id, request_duration_ms):
- self.request_id = request_id
- self.request_duration_ms = request_duration_ms
-
- def payload(self):
- return {
- "request_id": self.request_id,
- "request_duration_ms": self.request_duration_ms,
- }
openai/util.py
@@ -1,36 +1,23 @@
-import functools
-import hmac
-import io
import logging
import os
import re
import sys
-from urllib.parse import parse_qsl
+from typing import Optional
import openai
-
OPENAI_LOG = os.environ.get("OPENAI_LOG")
logger = logging.getLogger("openai")
__all__ = [
- "io",
- "parse_qsl",
"log_info",
"log_debug",
"log_warn",
- "dashboard_link",
"logfmt",
]
-def is_appengine_dev():
- return "APPENGINE_RUNTIME" in os.environ and "Dev" in os.environ.get(
- "SERVER_SOFTWARE", ""
- )
-
-
def _console_log_level():
if openai.log in ["debug", "info"]:
return openai.log
@@ -60,21 +47,6 @@ def log_warn(message, **params):
logger.warn(msg)
-def _test_or_live_environment():
- if openai.api_key is None:
- return
- match = re.match(r"sk_(live|test)_", openai.api_key)
- if match is None:
- return
- return match.groups()[0]
-
-
-def dashboard_link(request_id):
- return "https://dashboard.openai.com/{env}/logs/{reqid}".format(
- env=_test_or_live_environment() or "test", reqid=request_id
- )
-
-
def logfmt(props):
def fmt(key, val):
# Handle case where val is a bytes or bytesarray
@@ -93,10 +65,6 @@ def logfmt(props):
return " ".join([fmt(key, val) for key, val in sorted(props.items())])
-def secure_compare(val1, val2):
- return hmac.compare_digest(val1, val2)
-
-
def get_object_classes():
# This is here to avoid a circular dependency
from openai.object_classes import OBJECT_CLASSES
@@ -112,18 +80,13 @@ def convert_to_openai_object(
engine=None,
plain_old_data=False,
):
- # If we get a OpenAIResponse, we'll want to return a
- # OpenAIObject with the last_response field filled out with
- # the raw API response information
- openai_response = None
+ # If we get a OpenAIResponse, we'll want to return a OpenAIObject.
+ response_ms: Optional[int] = None
if isinstance(resp, openai.openai_response.OpenAIResponse):
- # TODO: move this elsewhere
- openai_response = resp
- resp = openai_response.data
- organization = (
- openai_response.headers.get("OpenAI-Organization") or organization
- )
+ organization = resp.organization
+ response_ms = resp.response_ms
+ resp = resp.data
if plain_old_data:
return resp
@@ -151,7 +114,7 @@ def convert_to_openai_object(
api_key,
api_version=api_version,
organization=organization,
- last_response=openai_response,
+ response_ms=response_ms,
engine=engine,
)
else:
@@ -178,47 +141,22 @@ def convert_to_dict(obj):
return obj
-def populate_headers(idempotency_key=None, request_id=None):
- headers = {}
- if idempotency_key is not None:
- headers["Idempotency-Key"] = idempotency_key
- if request_id is not None:
- headers["X-Request-Id"] = request_id
- if openai.debug:
- headers["OpenAI-Debug"] = "true"
-
- return headers
-
-
def merge_dicts(x, y):
z = x.copy()
z.update(y)
return z
-class class_method_variant(object):
- def __init__(self, class_method_name):
- self.class_method_name = class_method_name
-
- def __call__(self, method):
- self.method = method
- return self
-
- def __get__(self, obj, objtype=None):
- @functools.wraps(self.method)
- def _wrapper(*args, **kwargs):
- if obj is not None:
- # Method was called as an instance method, e.g.
- # instance.method(...)
- return self.method(obj, *args, **kwargs)
- elif len(args) > 0 and isinstance(args[0], objtype):
- # Method was called as a class method with the instance as the
- # first argument, e.g. Class.method(instance, ...) which in
- # Python is the same thing as calling an instance method
- return self.method(args[0], *args[1:], **kwargs)
- else:
- # Method was called as a class method, e.g. Class.method(...)
- class_method = getattr(objtype, self.class_method_name)
- return class_method(*args, **kwargs)
-
- return _wrapper
+def default_api_key() -> str:
+ if openai.api_key_path:
+ with open(openai.api_key_path, "rt") as k:
+ api_key = k.read().strip()
+ if not api_key.startswith("sk-"):
+ raise ValueError(f"Malformed API key in {openai.api_key_path}.")
+ return api_key
+ elif openai.api_key is not None:
+ return openai.api_key
+ else:
+ raise openai.error.AuthenticationError(
+ "No API key provided. You can set your API key in code using 'openai.api_key = <API-KEY>', or you can set the environment variable OPENAI_API_KEY=<API-KEY>). If your API key is stored in a file, you can point the openai module at it with 'openai.api_key_path = <PATH>'. You can generate API keys in the OpenAI web interface. See https://onboard.openai.com for details, or email support@openai.com if you have any questions."
+ )
openai/validators.py
@@ -1,9 +1,9 @@
import os
import sys
-import pandas as pd
-import numpy as np
+from typing import Any, Callable, NamedTuple, Optional
-from typing import NamedTuple, Optional, Callable, Any
+import numpy as np
+import pandas as pd
class Remediation(NamedTuple):
openai/version.py
@@ -1,1 +1,1 @@
-VERSION = "0.10.5"
+VERSION = "0.11.0"
.gitignore
@@ -1,5 +1,6 @@
*.egg-info
-__pycache__
-/public/dist
.idea
-.python-version
\ No newline at end of file
+.python-version
+/public/dist
+__pycache__
+build
.isort.cfg
@@ -0,0 +1,6 @@
+[settings]
+include_trailing_comma=True
+line_length=88
+known_first_party=
+multi_line_output=3
+py_version=36
README.md
@@ -68,7 +68,7 @@ openai api completions.create -e ada -p "Hello world"
## Requirements
-- Python 3.7+
+- Python 3.7.1+
In general we want to support the versions of Python that our
customers are using, so if you run into issues with any version
setup.py
@@ -20,8 +20,8 @@ setup(
"pandas-stubs>=1.1.0.11", # Needed for type hints for mypy
"openpyxl>=3.0.7", # Needed for CLI fine-tuning data preparation tool xlsx format
],
- extras_require={"dev": ["black==20.8b1", "pytest==6.*"]},
- python_requires=">=3.6",
+ extras_require={"dev": ["black~=21.6b0", "pytest==6.*"]},
+ python_requires=">=3.7.1",
scripts=["bin/openai"],
packages=find_packages(exclude=["tests", "tests.*"]),
package_data={