Skip to content

Commit 10adca3

Browse files
Remote proxy phase 1
1 parent afa1200 commit 10adca3

7 files changed

Lines changed: 28 additions & 101 deletions

File tree

src/datacustomcode/__init__.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,11 @@
1717
from datacustomcode.credentials import AuthType, Credentials
1818
from datacustomcode.io.reader.query_api import QueryAPIDataCloudReader
1919
from datacustomcode.io.writer.print import PrintDataCloudWriter
20-
from datacustomcode.proxy.client.LocalProxyClientProvider import (
21-
LocalProxyClientProvider,
22-
)
2320

2421
__all__ = [
2522
"AuthType",
2623
"Client",
2724
"Credentials",
28-
"LocalProxyClientProvider",
2925
"PrintDataCloudWriter",
3026
"QueryAPIDataCloudReader",
3127
]

src/datacustomcode/client.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@
3333

3434
from datacustomcode.io.reader.base import BaseDataCloudReader
3535
from datacustomcode.io.writer.base import BaseDataCloudWriter, WriteMode
36-
from datacustomcode.proxy.client.base import BaseProxyClient
3736
from datacustomcode.spark.base import BaseSparkSessionProvider
3837

3938

@@ -107,15 +106,13 @@ class Client:
107106
_reader: BaseDataCloudReader
108107
_writer: BaseDataCloudWriter
109108
_file: DefaultFindFilePath
110-
_proxy: Optional[BaseProxyClient]
111109
_data_layer_history: dict[DataCloudObjectType, set[str]]
112110
_code_type: str
113111

114112
def __new__(
115113
cls,
116114
reader: Optional[BaseDataCloudReader] = None,
117115
writer: Optional["BaseDataCloudWriter"] = None,
118-
proxy: Optional[BaseProxyClient] = None,
119116
spark_provider: Optional["BaseSparkSessionProvider"] = None,
120117
code_type: str = "script",
121118
) -> Client:
@@ -223,11 +220,6 @@ def write_to_dmo(
223220
self._validate_data_layer_history_does_not_contain(DataCloudObjectType.DLO)
224221
return self._writer.write_to_dmo(name, dataframe, write_mode, **kwargs) # type: ignore[no-any-return]
225222

226-
def call_llm_gateway(self, LLM_MODEL_ID: str, prompt: str, maxTokens: int) -> str:
227-
if self._proxy is None:
228-
raise ValueError("No proxy configured; set proxy or proxy_config")
229-
return self._proxy.call_llm_gateway(LLM_MODEL_ID, prompt, maxTokens) # type: ignore[no-any-return]
230-
231223
def find_file_path(self, file_name: str) -> Path:
232224
"""Return a file path"""
233225

src/datacustomcode/config.py

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,6 @@
3939
from datacustomcode.io.base import BaseDataAccessLayer
4040
from datacustomcode.io.reader.base import BaseDataCloudReader # noqa: TCH002
4141
from datacustomcode.io.writer.base import BaseDataCloudWriter # noqa: TCH002
42-
from datacustomcode.proxy.base import BaseProxyAccessLayer
43-
from datacustomcode.proxy.client.base import BaseProxyClient # noqa: TCH002
4442
from datacustomcode.spark.base import BaseSparkSessionProvider
4543

4644
if TYPE_CHECKING:
@@ -74,18 +72,6 @@ class SparkConfig(ForceableConfig):
7472

7573
_P = TypeVar("_P", bound=BaseSparkSessionProvider)
7674

77-
_PX = TypeVar("_PX", bound=BaseProxyAccessLayer)
78-
79-
80-
class ProxyAccessLayerObjectConfig(BaseObjectConfig, Generic[_PX]):
81-
"""Config for proxy clients that take no constructor args (e.g. no spark)."""
82-
83-
type_base: ClassVar[Type[BaseProxyAccessLayer]] = BaseProxyAccessLayer
84-
85-
def to_object(self) -> _PX:
86-
type_ = self.type_base.subclass_from_config_name(self.type_config_name)
87-
return cast(_PX, type_(**self.options))
88-
8975

9076
class SparkProviderConfig(BaseObjectConfig, Generic[_P]):
9177
type_base: ClassVar[Type[BaseSparkSessionProvider]] = BaseSparkSessionProvider
@@ -98,7 +84,6 @@ def to_object(self) -> _P:
9884
class ClientConfig(BaseConfig):
9985
reader_config: Union[AccessLayerObjectConfig[BaseDataCloudReader], None] = None
10086
writer_config: Union[AccessLayerObjectConfig[BaseDataCloudWriter], None] = None
101-
proxy_config: Union[ProxyAccessLayerObjectConfig[BaseProxyClient], None] = None
10287
spark_config: Union[SparkConfig, None] = None
10388
spark_provider_config: Union[
10489
SparkProviderConfig[BaseSparkSessionProvider], None
@@ -126,7 +111,6 @@ def merge(
126111

127112
self.reader_config = merge(self.reader_config, other.reader_config)
128113
self.writer_config = merge(self.writer_config, other.writer_config)
129-
self.proxy_config = merge(self.proxy_config, other.proxy_config)
130114
self.spark_config = merge(self.spark_config, other.spark_config)
131115
self.spark_provider_config = merge(
132116
self.spark_provider_config, other.spark_provider_config

src/datacustomcode/config.yaml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,6 @@ spark_config:
1919
spark.sql.execution.arrow.pyspark.enabled: 'true'
2020
spark.driver.extraJavaOptions: -Djava.security.manager=allow
2121

22-
proxy_config:
23-
type_config_name: LocalProxyClientProvider
24-
options:
25-
credentials_profile: default
26-
2722
einstein_predictions_config:
2823
type_config_name: DefaultEinsteinPredictions
2924
options:

src/datacustomcode/proxy/client/LocalProxyClientProvider.py

Lines changed: 0 additions & 34 deletions
This file was deleted.

src/datacustomcode/templates/function/payload/entrypoint.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,16 @@ def make_einstein_prediction(runtime: Runtime) -> None:
6565
)
6666

6767

68+
def generate_text(runtime: Runtime):
69+
builder = GenerateTextRequestBuilder()
70+
llm_request = builder.set_prompt("Hello").set_model("modelName").build()
71+
llm_response = runtime.llm_gateway.generate_text(llm_request)
72+
73+
if llm_response.is_success:
74+
print(llm_response.text)
75+
else:
76+
print(llm_response.error_code)
77+
6878
def function(request: dict, runtime: Runtime) -> dict:
6979
logger.info("Inside Function")
7080
logger.info(request)
@@ -73,17 +83,9 @@ def function(request: dict, runtime: Runtime) -> dict:
7383
output_chunks = []
7484
current_seq_no = 1 # Start sequence number from 1
7585

86+
generate_text(runtime)
7687
make_einstein_prediction(runtime)
7788

78-
builder = GenerateTextRequestBuilder()
79-
llm_request = builder.set_prompt("Hello").set_model("modelName").build()
80-
llm_response = runtime.llm_gateway.generate_text(llm_request)
81-
82-
if llm_response.is_success:
83-
print(llm_response.text)
84-
else:
85-
print(llm_response.error_code)
86-
8789
for item in items:
8890
# Item is DocElement as dict
8991
logger.info(f"Processing item: {item}")

tests/test_client.py

Lines changed: 17 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
)
1818
from datacustomcode.io.reader.base import BaseDataCloudReader
1919
from datacustomcode.io.writer.base import BaseDataCloudWriter, WriteMode
20-
from datacustomcode.proxy.client.base import BaseProxyClient
2120

2221

2322
class MockDataCloudReader(BaseDataCloudReader):
@@ -76,13 +75,6 @@ def mock_config(mock_spark):
7675
)
7776

7877

79-
@pytest.fixture
80-
def mock_proxy():
81-
"""Mock proxy client to avoid starting Spark when reader/writer are provided."""
82-
proxy = MagicMock(spec=BaseProxyClient)
83-
return proxy
84-
85-
8678
@pytest.fixture
8779
def reset_client():
8880
"""Reset the Client singleton between tests."""
@@ -93,12 +85,12 @@ def reset_client():
9385

9486
class TestClient:
9587

96-
def test_singleton_pattern(self, reset_client, mock_spark, mock_proxy):
88+
def test_singleton_pattern(self, reset_client, mock_spark):
9789
"""Test that Client behaves as a singleton."""
9890
reader = MockDataCloudReader(mock_spark)
9991
writer = MockDataCloudWriter(mock_spark)
10092

101-
client1 = Client(reader=reader, writer=writer, proxy=mock_proxy)
93+
client1 = Client(reader=reader, writer=writer)
10294
client2 = Client()
10395

10496
assert client1 is client2
@@ -144,38 +136,38 @@ def test_initialization_with_config(self, mock_config, reset_client, mock_spark)
144136
assert client._reader is mock_reader
145137
assert client._writer is mock_writer
146138

147-
def test_read_dlo(self, reset_client, mock_spark, mock_proxy):
139+
def test_read_dlo(self, reset_client, mock_spark):
148140
reader = MagicMock(spec=BaseDataCloudReader)
149141
writer = MagicMock(spec=BaseDataCloudWriter)
150142
mock_df = MagicMock(spec=DataFrame)
151143
reader.read_dlo.return_value = mock_df
152144

153-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
145+
client = Client(reader=reader, writer=writer)
154146
result = client.read_dlo("test_dlo")
155147

156148
reader.read_dlo.assert_called_once_with("test_dlo")
157149
assert result is mock_df
158150
assert "test_dlo" in client._data_layer_history[DataCloudObjectType.DLO]
159151

160-
def test_read_dmo(self, reset_client, mock_spark, mock_proxy):
152+
def test_read_dmo(self, reset_client, mock_spark):
161153
reader = MagicMock(spec=BaseDataCloudReader)
162154
writer = MagicMock(spec=BaseDataCloudWriter)
163155
mock_df = MagicMock(spec=DataFrame)
164156
reader.read_dmo.return_value = mock_df
165157

166-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
158+
client = Client(reader=reader, writer=writer)
167159
result = client.read_dmo("test_dmo")
168160

169161
reader.read_dmo.assert_called_once_with("test_dmo")
170162
assert result is mock_df
171163
assert "test_dmo" in client._data_layer_history[DataCloudObjectType.DMO]
172164

173-
def test_write_to_dlo(self, reset_client, mock_spark, mock_proxy):
165+
def test_write_to_dlo(self, reset_client, mock_spark):
174166
reader = MagicMock(spec=BaseDataCloudReader)
175167
writer = MagicMock(spec=BaseDataCloudWriter)
176168
mock_df = MagicMock(spec=DataFrame)
177169

178-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
170+
client = Client(reader=reader, writer=writer)
179171
client._record_dlo_access("some_dlo")
180172

181173
client.write_to_dlo("test_dlo", mock_df, WriteMode.APPEND, extra_param=True)
@@ -184,12 +176,12 @@ def test_write_to_dlo(self, reset_client, mock_spark, mock_proxy):
184176
"test_dlo", mock_df, WriteMode.APPEND, extra_param=True
185177
)
186178

187-
def test_write_to_dmo(self, reset_client, mock_spark, mock_proxy):
179+
def test_write_to_dmo(self, reset_client, mock_spark):
188180
reader = MagicMock(spec=BaseDataCloudReader)
189181
writer = MagicMock(spec=BaseDataCloudWriter)
190182
mock_df = MagicMock(spec=DataFrame)
191183

192-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
184+
client = Client(reader=reader, writer=writer)
193185
client._record_dmo_access("some_dmo")
194186

195187
client.write_to_dmo("test_dmo", mock_df, WriteMode.OVERWRITE, extra_param=True)
@@ -198,42 +190,42 @@ def test_write_to_dmo(self, reset_client, mock_spark, mock_proxy):
198190
"test_dmo", mock_df, WriteMode.OVERWRITE, extra_param=True
199191
)
200192

201-
def test_mixed_dlo_dmo_raises_exception(self, reset_client, mock_spark, mock_proxy):
193+
def test_mixed_dlo_dmo_raises_exception(self, reset_client, mock_spark):
202194
"""Test that mixing DLOs and DMOs raises an exception."""
203195
reader = MagicMock(spec=BaseDataCloudReader)
204196
writer = MagicMock(spec=BaseDataCloudWriter)
205197
mock_df = MagicMock(spec=DataFrame)
206198

207-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
199+
client = Client(reader=reader, writer=writer)
208200
client._record_dlo_access("test_dlo")
209201

210202
with pytest.raises(DataCloudAccessLayerException) as exc_info:
211203
client.write_to_dmo("test_dmo", mock_df, WriteMode.APPEND)
212204

213205
assert "test_dlo" in str(exc_info.value)
214206

215-
def test_mixed_dmo_dlo_raises_exception(self, reset_client, mock_spark, mock_proxy):
207+
def test_mixed_dmo_dlo_raises_exception(self, reset_client, mock_spark):
216208
"""Test that mixing DMOs and DLOs raises an exception (converse case)."""
217209
reader = MagicMock(spec=BaseDataCloudReader)
218210
writer = MagicMock(spec=BaseDataCloudWriter)
219211
mock_df = MagicMock(spec=DataFrame)
220212

221-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
213+
client = Client(reader=reader, writer=writer)
222214
client._record_dmo_access("test_dmo")
223215

224216
with pytest.raises(DataCloudAccessLayerException) as exc_info:
225217
client.write_to_dlo("test_dlo", mock_df, WriteMode.APPEND)
226218

227219
assert "test_dmo" in str(exc_info.value)
228220

229-
def test_read_pattern_flow(self, reset_client, mock_spark, mock_proxy):
221+
def test_read_pattern_flow(self, reset_client, mock_spark):
230222
"""Test a complete flow of reading and writing within the same object type."""
231223
reader = MagicMock(spec=BaseDataCloudReader)
232224
writer = MagicMock(spec=BaseDataCloudWriter)
233225
mock_df = MagicMock(spec=DataFrame)
234226
reader.read_dlo.return_value = mock_df
235227

236-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
228+
client = Client(reader=reader, writer=writer)
237229

238230
df = client.read_dlo("source_dlo")
239231
client.write_to_dlo("target_dlo", df, WriteMode.APPEND)
@@ -247,7 +239,7 @@ def test_read_pattern_flow(self, reset_client, mock_spark, mock_proxy):
247239

248240
# Reset for DMO test
249241
Client._instance = None
250-
client = Client(reader=reader, writer=writer, proxy=mock_proxy)
242+
client = Client(reader=reader, writer=writer)
251243
reader.read_dmo.return_value = mock_df
252244

253245
df = client.read_dmo("source_dmo")

0 commit comments

Comments
 (0)