Skip to content

Commit 9867549

Browse files
Removing reference to proxy for cleanup
1 parent 009603c commit 9867549

9 files changed

Lines changed: 27 additions & 84 deletions

File tree

src/datacustomcode/__init__.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,16 @@
1717
from datacustomcode.credentials import AuthType, Credentials
1818
from datacustomcode.io.reader.query_api import QueryAPIDataCloudReader
1919
from datacustomcode.io.writer.print import PrintDataCloudWriter
20-
from datacustomcode.proxy.client.LocalProxyClientProvider import (
21-
LocalProxyClientProvider,
22-
)
20+
# TODO: Restore proxy/LLM gateway integration
21+
# from datacustomcode.proxy.client.LocalProxyClientProvider import (
22+
# LocalProxyClientProvider,
23+
# )
2324

2425
__all__ = [
2526
"AuthType",
2627
"Client",
2728
"Credentials",
28-
"LocalProxyClientProvider",
29+
# "LocalProxyClientProvider", # TODO: Restore
2930
"PrintDataCloudWriter",
3031
"QueryAPIDataCloudReader",
3132
]

src/datacustomcode/client.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@
3333

3434
from datacustomcode.io.reader.base import BaseDataCloudReader
3535
from datacustomcode.io.writer.base import BaseDataCloudWriter, WriteMode
36-
from datacustomcode.proxy.client.base import BaseProxyClient
3736
from datacustomcode.spark.base import BaseSparkSessionProvider
3837

3938

@@ -107,15 +106,13 @@ class Client:
107106
_reader: BaseDataCloudReader
108107
_writer: BaseDataCloudWriter
109108
_file: DefaultFindFilePath
110-
_proxy: Optional[BaseProxyClient]
111109
_data_layer_history: dict[DataCloudObjectType, set[str]]
112110
_code_type: str
113111

114112
def __new__(
115113
cls,
116114
reader: Optional[BaseDataCloudReader] = None,
117115
writer: Optional["BaseDataCloudWriter"] = None,
118-
proxy: Optional[BaseProxyClient] = None,
119116
spark_provider: Optional["BaseSparkSessionProvider"] = None,
120117
code_type: str = "script",
121118
) -> Client:
@@ -178,11 +175,6 @@ def __new__(
178175
@classmethod
179176
def _new_function_client(cls) -> Client:
180177
cls._instance = super().__new__(cls)
181-
cls._instance._proxy = (
182-
config.proxy_config.to_object() # type: ignore
183-
if config.proxy_config is not None
184-
else None
185-
)
186178
return cls._instance
187179

188180
def read_dlo(self, name: str, row_limit: int = 1000) -> PySparkDataFrame:
@@ -237,11 +229,6 @@ def write_to_dmo(
237229
self._validate_data_layer_history_does_not_contain(DataCloudObjectType.DLO)
238230
return self._writer.write_to_dmo(name, dataframe, write_mode, **kwargs)
239231

240-
def call_llm_gateway(self, LLM_MODEL_ID: str, prompt: str, maxTokens: int) -> str:
241-
if self._proxy is None:
242-
raise ValueError("No proxy configured; set proxy or proxy_config")
243-
return self._proxy.call_llm_gateway(LLM_MODEL_ID, prompt, maxTokens)
244-
245232
def find_file_path(self, file_name: str) -> Path:
246233
"""Return a file path"""
247234

src/datacustomcode/config.py

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,6 @@
3838
from datacustomcode.io.base import BaseDataAccessLayer
3939
from datacustomcode.io.reader.base import BaseDataCloudReader # noqa: TCH001
4040
from datacustomcode.io.writer.base import BaseDataCloudWriter # noqa: TCH001
41-
from datacustomcode.proxy.base import BaseProxyAccessLayer
42-
from datacustomcode.proxy.client.base import BaseProxyClient # noqa: TCH001
4341
from datacustomcode.spark.base import BaseSparkSessionProvider
4442

4543
DEFAULT_CONFIG_NAME = "config.yaml"
@@ -94,23 +92,6 @@ class SparkConfig(ForceableConfig):
9492

9593
_P = TypeVar("_P", bound=BaseSparkSessionProvider)
9694

97-
_PX = TypeVar("_PX", bound=BaseProxyAccessLayer)
98-
99-
100-
class ProxyAccessLayerObjectConfig(ForceableConfig, Generic[_PX]):
101-
"""Config for proxy clients that take no constructor args (e.g. no spark)."""
102-
103-
model_config = ConfigDict(validate_default=True, extra="forbid")
104-
type_base: ClassVar[Type[BaseProxyAccessLayer]] = BaseProxyAccessLayer
105-
type_config_name: str = Field(
106-
description="CONFIG_NAME of the proxy client (e.g. 'LocalProxyClient').",
107-
)
108-
options: dict[str, Any] = Field(default_factory=dict)
109-
110-
def to_object(self) -> _PX:
111-
type_ = self.type_base.subclass_from_config_name(self.type_config_name)
112-
return cast(_PX, type_(**self.options))
113-
11495

11596
class SparkProviderConfig(ForceableConfig, Generic[_P]):
11697
model_config = ConfigDict(validate_default=True, extra="forbid")
@@ -128,7 +109,6 @@ def to_object(self) -> _P:
128109
class ClientConfig(BaseModel):
129110
reader_config: Union[AccessLayerObjectConfig[BaseDataCloudReader], None] = None
130111
writer_config: Union[AccessLayerObjectConfig[BaseDataCloudWriter], None] = None
131-
proxy_config: Union[ProxyAccessLayerObjectConfig[BaseProxyClient], None] = None
132112
spark_config: Union[SparkConfig, None] = None
133113
spark_provider_config: Union[
134114
SparkProviderConfig[BaseSparkSessionProvider], None
@@ -156,7 +136,6 @@ def merge(
156136

157137
self.reader_config = merge(self.reader_config, other.reader_config)
158138
self.writer_config = merge(self.writer_config, other.writer_config)
159-
self.proxy_config = merge(self.proxy_config, other.proxy_config)
160139
self.spark_config = merge(self.spark_config, other.spark_config)
161140
self.spark_provider_config = merge(
162141
self.spark_provider_config, other.spark_provider_config

src/datacustomcode/llm_gateway/base.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,6 @@
1616

1717
from abc import abstractmethod
1818

19-
from datacustomcode.proxy.base import BaseProxyAccessLayer
20-
21-
2219
class LLMGateway:
2320
def __init__(self):
2421
pass

src/datacustomcode/llm_gateway/types/generate_text_request.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,6 @@
1-
# Generated by the protocol buffer compiler. DO NOT EDIT!
2-
# sources: llm_gateway.proto
3-
# plugin: python-betterproto
41
from dataclasses import dataclass
5-
from typing import Optional
62

73
import betterproto
8-
import grpclib
94

105
from .google import protobuf
116

src/datacustomcode/llm_gateway/types/generate_text_response.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
11

22
from dataclasses import dataclass
3-
from typing import Optional
43

54
import betterproto
6-
import grpclib
75

86
from .google import protobuf
97

src/datacustomcode/llm_gateway/types/generate_text_response_builder.py

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,3 @@
1-
2-
from dataclasses import dataclass
3-
from typing import Optional
4-
5-
import betterproto
6-
import grpclib
7-
81
from .google import protobuf
92

103

src/datacustomcode/templates/function/payload/entrypoint.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ def function(request: dict, runTime: Runtime) -> dict:
5050
builder = GenerateTextRequestBuilder()
5151
request = builder.set_prompt("Hello").set_model("gpt-4").build()
5252
response = runTime.llm_gateway.generate_text(request)
53+
5354
if response.is_success:
5455
print(response.text)
5556
else:

tests/test_client.py

Lines changed: 21 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
)
1818
from datacustomcode.io.reader.base import BaseDataCloudReader
1919
from datacustomcode.io.writer.base import BaseDataCloudWriter, WriteMode
20-
from datacustomcode.proxy.client.base import BaseProxyClient
2120

2221

2322
class MockDataCloudReader(BaseDataCloudReader):
@@ -76,13 +75,6 @@ def mock_config(mock_spark):
7675
)
7776

7877

79-
@pytest.fixture
80-
def mock_proxy():
81-
"""Mock proxy client to avoid starting Spark when reader/writer are provided."""
82-
proxy = MagicMock(spec=BaseProxyClient)
83-
return proxy
84-
85-
8678
@pytest.fixture
8779
def reset_client():
8880
"""Reset the Client singleton between tests."""
@@ -93,12 +85,12 @@ def reset_client():
9385

9486
class TestClient:
9587

96-
def test_singleton_pattern(self, reset_client, mock_spark, mock_proxy):
88+
def test_singleton_pattern(self, reset_client, mock_spark):
9789
"""Test that Client behaves as a singleton."""
9890
reader = MockDataCloudReader(mock_spark)
9991
writer = MockDataCloudWriter(mock_spark)
10092

101-
client1 = Client(reader=reader, writer=writer, proxy=mock_proxy)
93+
client1 = Client(reader=reader, writer=writer)
10294
client2 = Client()
10395

10496
assert client1 is client2
@@ -144,38 +136,38 @@ def test_initialization_with_config(self, mock_config, reset_client, mock_spark)
144136
assert client._reader is mock_reader
145137
assert client._writer is mock_writer
146138

147-
def test_read_dlo(self, reset_client, mock_spark, mock_proxy):
139+
def test_read_dlo(self, reset_client, mock_spark):
148140
reader = MagicMock(spec=BaseDataCloudReader)
149141
writer = MagicMock(spec=BaseDataCloudWriter)
150142
mock_df = MagicMock(spec=DataFrame)
151143
reader.read_dlo.return_value = mock_df
152144

153-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
145+
client = Client(reader=reader, writer=writer)
154146
result = client.read_dlo("test_dlo")
155147

156148
reader.read_dlo.assert_called_once_with("test_dlo", row_limit=1000)
157149
assert result is mock_df
158150
assert "test_dlo" in client._data_layer_history[DataCloudObjectType.DLO]
159151

160-
def test_read_dmo(self, reset_client, mock_spark, mock_proxy):
152+
def test_read_dmo(self, reset_client, mock_spark):
161153
reader = MagicMock(spec=BaseDataCloudReader)
162154
writer = MagicMock(spec=BaseDataCloudWriter)
163155
mock_df = MagicMock(spec=DataFrame)
164156
reader.read_dmo.return_value = mock_df
165157

166-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
158+
client = Client(reader=reader, writer=writer)
167159
result = client.read_dmo("test_dmo")
168160

169161
reader.read_dmo.assert_called_once_with("test_dmo", row_limit=1000)
170162
assert result is mock_df
171163
assert "test_dmo" in client._data_layer_history[DataCloudObjectType.DMO]
172164

173-
def test_write_to_dlo(self, reset_client, mock_spark, mock_proxy):
165+
def test_write_to_dlo(self, reset_client, mock_spark):
174166
reader = MagicMock(spec=BaseDataCloudReader)
175167
writer = MagicMock(spec=BaseDataCloudWriter)
176168
mock_df = MagicMock(spec=DataFrame)
177169

178-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
170+
client = Client(reader=reader, writer=writer)
179171
client._record_dlo_access("some_dlo")
180172

181173
client.write_to_dlo("test_dlo", mock_df, WriteMode.APPEND, extra_param=True)
@@ -184,12 +176,12 @@ def test_write_to_dlo(self, reset_client, mock_spark, mock_proxy):
184176
"test_dlo", mock_df, WriteMode.APPEND, extra_param=True
185177
)
186178

187-
def test_write_to_dmo(self, reset_client, mock_spark, mock_proxy):
179+
def test_write_to_dmo(self, reset_client, mock_spark):
188180
reader = MagicMock(spec=BaseDataCloudReader)
189181
writer = MagicMock(spec=BaseDataCloudWriter)
190182
mock_df = MagicMock(spec=DataFrame)
191183

192-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
184+
client = Client(reader=reader, writer=writer)
193185
client._record_dmo_access("some_dmo")
194186

195187
client.write_to_dmo("test_dmo", mock_df, WriteMode.OVERWRITE, extra_param=True)
@@ -198,42 +190,42 @@ def test_write_to_dmo(self, reset_client, mock_spark, mock_proxy):
198190
"test_dmo", mock_df, WriteMode.OVERWRITE, extra_param=True
199191
)
200192

201-
def test_mixed_dlo_dmo_raises_exception(self, reset_client, mock_spark, mock_proxy):
193+
def test_mixed_dlo_dmo_raises_exception(self, reset_client, mock_spark):
202194
"""Test that mixing DLOs and DMOs raises an exception."""
203195
reader = MagicMock(spec=BaseDataCloudReader)
204196
writer = MagicMock(spec=BaseDataCloudWriter)
205197
mock_df = MagicMock(spec=DataFrame)
206198

207-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
199+
client = Client(reader=reader, writer=writer)
208200
client._record_dlo_access("test_dlo")
209201

210202
with pytest.raises(DataCloudAccessLayerException) as exc_info:
211203
client.write_to_dmo("test_dmo", mock_df, WriteMode.APPEND)
212204

213205
assert "test_dlo" in str(exc_info.value)
214206

215-
def test_mixed_dmo_dlo_raises_exception(self, reset_client, mock_spark, mock_proxy):
207+
def test_mixed_dmo_dlo_raises_exception(self, reset_client, mock_spark):
216208
"""Test that mixing DMOs and DLOs raises an exception (converse case)."""
217209
reader = MagicMock(spec=BaseDataCloudReader)
218210
writer = MagicMock(spec=BaseDataCloudWriter)
219211
mock_df = MagicMock(spec=DataFrame)
220212

221-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
213+
client = Client(reader=reader, writer=writer)
222214
client._record_dmo_access("test_dmo")
223215

224216
with pytest.raises(DataCloudAccessLayerException) as exc_info:
225217
client.write_to_dlo("test_dlo", mock_df, WriteMode.APPEND)
226218

227219
assert "test_dmo" in str(exc_info.value)
228220

229-
def test_read_pattern_flow(self, reset_client, mock_spark, mock_proxy):
221+
def test_read_pattern_flow(self, reset_client, mock_spark):
230222
"""Test a complete flow of reading and writing within the same object type."""
231223
reader = MagicMock(spec=BaseDataCloudReader)
232224
writer = MagicMock(spec=BaseDataCloudWriter)
233225
mock_df = MagicMock(spec=DataFrame)
234226
reader.read_dlo.return_value = mock_df
235227

236-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
228+
client = Client(reader=reader, writer=writer)
237229

238230
df = client.read_dlo("source_dlo")
239231
client.write_to_dlo("target_dlo", df, WriteMode.APPEND)
@@ -247,7 +239,7 @@ def test_read_pattern_flow(self, reset_client, mock_spark, mock_proxy):
247239

248240
# Reset for DMO test
249241
Client._instance = None
250-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
242+
client = Client(reader=reader, writer=writer)
251243
reader.read_dmo.return_value = mock_df
252244

253245
df = client.read_dmo("source_dmo")
@@ -260,28 +252,28 @@ def test_read_pattern_flow(self, reset_client, mock_spark, mock_proxy):
260252

261253
assert "source_dmo" in client._data_layer_history[DataCloudObjectType.DMO]
262254

263-
def test_read_dlo_with_row_limit(self, reset_client, mock_spark, mock_proxy):
255+
def test_read_dlo_with_row_limit(self, reset_client, mock_spark):
264256
"""Test that row_limit parameter is passed through to reader."""
265257
reader = MagicMock(spec=BaseDataCloudReader)
266258
writer = MagicMock(spec=BaseDataCloudWriter)
267259
mock_df = MagicMock(spec=DataFrame)
268260
reader.read_dlo.return_value = mock_df
269261

270-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
262+
client = Client(reader=reader, writer=writer)
271263
result = client.read_dlo("test_dlo", row_limit=500)
272264

273265
reader.read_dlo.assert_called_once_with("test_dlo", row_limit=500)
274266
assert result is mock_df
275267
assert "test_dlo" in client._data_layer_history[DataCloudObjectType.DLO]
276268

277-
def test_read_dmo_with_row_limit(self, reset_client, mock_spark, mock_proxy):
269+
def test_read_dmo_with_row_limit(self, reset_client, mock_spark):
278270
"""Test that row_limit parameter is passed through to reader."""
279271
reader = MagicMock(spec=BaseDataCloudReader)
280272
writer = MagicMock(spec=BaseDataCloudWriter)
281273
mock_df = MagicMock(spec=DataFrame)
282274
reader.read_dmo.return_value = mock_df
283275

284-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
276+
client = Client(reader=reader, writer=writer)
285277
result = client.read_dmo("test_dmo", row_limit=100)
286278

287279
reader.read_dmo.assert_called_once_with("test_dmo", row_limit=100)

0 commit comments

Comments
 (0)