Skip to content

Commit 802e95c

Browse files
committed
Apply formatting only to changed files
1 parent 15ccd0b commit 802e95c

File tree

2 files changed

+13
-15
lines changed

2 files changed

+13
-15
lines changed

databricks/sdk/mixins/open_ai_client.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,7 @@
44

55
from requests import Response
66

7-
from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
8-
HttpRequestResponse,
9-
ServingEndpointsAPI)
7+
from databricks.sdk.service.serving import ExternalFunctionRequestHttpMethod, HttpRequestResponse, ServingEndpointsAPI
108

119

1210
class ServingEndpointsExt(ServingEndpointsAPI):
@@ -77,7 +75,7 @@ def get_open_ai_client(self, **kwargs):
7775
"and use 'from databricks_openai import DatabricksOpenAI' instead. "
7876
"See https://pypi.org/project/databricks-openai/ for more information.",
7977
DeprecationWarning,
80-
stacklevel=2
78+
stacklevel=2,
8179
)
8280
try:
8381
from openai import OpenAI
@@ -120,7 +118,7 @@ def get_langchain_chat_open_ai_client(self, model):
120118
"and use 'from databricks_langchain import ChatDatabricks' instead. "
121119
"See https://pypi.org/project/databricks-langchain/ for more information.",
122120
DeprecationWarning,
123-
stacklevel=2
121+
stacklevel=2,
124122
)
125123
try:
126124
from langchain_openai import ChatOpenAI

tests/test_open_ai_mixin.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -91,15 +91,15 @@ def test_langchain_open_ai_client(monkeypatch):
9191
# Mock the langchain_openai import
9292
mock_chat_openai = Mock()
9393
mock_chat_openai.return_value = MagicMock(
94-
openai_api_base="https://test_host/serving-endpoints",
95-
model_name="databricks-meta-llama-3-1-70b-instruct"
94+
openai_api_base="https://test_host/serving-endpoints", model_name="databricks-meta-llama-3-1-70b-instruct"
9695
)
9796

9897
# Mock the module import
9998
import sys
99+
100100
mock_module = MagicMock()
101101
mock_module.ChatOpenAI = mock_chat_openai
102-
sys.modules['langchain_openai'] = mock_module
102+
sys.modules["langchain_openai"] = mock_module
103103

104104
try:
105105
w = WorkspaceClient(config=Config())
@@ -109,8 +109,8 @@ def test_langchain_open_ai_client(monkeypatch):
109109
assert client.model_name == "databricks-meta-llama-3-1-70b-instruct"
110110
finally:
111111
# Clean up the mock module
112-
if 'langchain_openai' in sys.modules:
113-
del sys.modules['langchain_openai']
112+
if "langchain_openai" in sys.modules:
113+
del sys.modules["langchain_openai"]
114114

115115

116116
def test_http_request(w, requests_mock):
@@ -174,15 +174,15 @@ def test_get_langchain_chat_open_ai_client_deprecation_warning(monkeypatch):
174174
# Mock the langchain_openai import
175175
mock_chat_openai = Mock()
176176
mock_chat_openai.return_value = MagicMock(
177-
openai_api_base="https://test_host/serving-endpoints",
178-
model_name="databricks-meta-llama-3-1-70b-instruct"
177+
openai_api_base="https://test_host/serving-endpoints", model_name="databricks-meta-llama-3-1-70b-instruct"
179178
)
180179

181180
# Mock the module import
182181
import sys
182+
183183
mock_module = MagicMock()
184184
mock_module.ChatOpenAI = mock_chat_openai
185-
sys.modules['langchain_openai'] = mock_module
185+
sys.modules["langchain_openai"] = mock_module
186186

187187
try:
188188
w = WorkspaceClient(config=Config())
@@ -203,5 +203,5 @@ def test_get_langchain_chat_open_ai_client_deprecation_warning(monkeypatch):
203203
assert client.model_name == "databricks-meta-llama-3-1-70b-instruct"
204204
finally:
205205
# Clean up the mock module
206-
if 'langchain_openai' in sys.modules:
207-
del sys.modules['langchain_openai']
206+
if "langchain_openai" in sys.modules:
207+
del sys.modules["langchain_openai"]

0 commit comments

Comments
 (0)