@@ -107,20 +107,23 @@ class Client:
107107 _reader : BaseDataCloudReader
108108 _writer : BaseDataCloudWriter
109109 _file : DefaultFindFilePath
110- _proxy : BaseProxyClient
110+ _proxy : Optional [ BaseProxyClient ]
111111 _data_layer_history : dict [DataCloudObjectType , set [str ]]
112+ _code_type : str
112113
113114 def __new__ (
114115 cls ,
115116 reader : Optional [BaseDataCloudReader ] = None ,
116117 writer : Optional ["BaseDataCloudWriter" ] = None ,
117118 proxy : Optional [BaseProxyClient ] = None ,
118119 spark_provider : Optional ["BaseSparkSessionProvider" ] = None ,
120+ code_type : str = "script" ,
119121 ) -> Client :
122+ if "function" in code_type :
123+ return cls ._new_function_client ()
124+
120125 if cls ._instance is None :
121126 cls ._instance = super ().__new__ (cls )
122-
123- spark = None
124127 # Initialize Readers and Writers from config
125128 # and/or provided reader and writer
126129 if reader is None or writer is None :
@@ -139,22 +142,6 @@ def __new__(
139142 provider = DefaultSparkSessionProvider ()
140143
141144 spark = provider .get_session (config .spark_config )
142- elif (
143- proxy is None
144- and config .proxy_config is not None
145- and config .spark_config is not None
146- ):
147- # Both reader and writer provided; we still need spark for proxy init
148- provider = (
149- spark_provider
150- if spark_provider is not None
151- else (
152- config .spark_provider_config .to_object ()
153- if config .spark_provider_config is not None
154- else DefaultSparkSessionProvider ()
155- )
156- )
157- spark = provider .get_session (config .spark_config )
158145
159146 if config .reader_config is None and reader is None :
160147 raise ValueError (
@@ -163,44 +150,23 @@ def __new__(
163150 elif reader is None or (
164151 config .reader_config is not None and config .reader_config .force
165152 ):
166- if config .proxy_config is None :
167- raise ValueError (
168- "Proxy config is required when reader is built from config"
169- )
170- assert (
171- spark is not None
172- ) # set in "reader is None or writer is None" branch
173- assert config .reader_config is not None # ensured by branch condition
174- proxy_init = config .proxy_config .to_object (spark )
175-
176- reader_init = config .reader_config .to_object (spark )
153+ reader_init = config .reader_config .to_object (spark ) # type: ignore
177154 else :
178155 reader_init = reader
179- if proxy is not None :
180- proxy_init = proxy
181- elif config .proxy_config is None :
182- raise ValueError ("Proxy config is required when reader is provided" )
183- else :
184- assert (
185- spark is not None
186- ) # set in "both provided; proxy from config" branch
187- proxy_init = config .proxy_config .to_object (spark )
188156 if config .writer_config is None and writer is None :
189157 raise ValueError (
190158 "Writer config is required when writer is not provided"
191159 )
192160 elif writer is None or (
193161 config .writer_config is not None and config .writer_config .force
194162 ):
195- assert spark is not None # set when reader or writer from config
196- assert config .writer_config is not None # ensured by branch condition
197- writer_init = config .writer_config .to_object (spark )
163+ writer_init = config .writer_config .to_object (spark ) # type: ignore
198164 else :
199165 writer_init = writer
166+
200167 cls ._instance ._reader = reader_init
201168 cls ._instance ._writer = writer_init
202169 cls ._instance ._file = DefaultFindFilePath ()
203- cls ._instance ._proxy = proxy_init
204170 cls ._instance ._data_layer_history = {
205171 DataCloudObjectType .DLO : set (),
206172 DataCloudObjectType .DMO : set (),
@@ -209,6 +175,16 @@ def __new__(
209175 raise ValueError ("Cannot set reader or writer after client is initialized" )
210176 return cls ._instance
211177
178+ @classmethod
179+ def _new_function_client (cls ) -> Client :
180+ cls ._instance = super ().__new__ (cls )
181+ cls ._instance ._proxy = (
182+ config .proxy_config .to_object () # type: ignore
183+ if config .proxy_config is not None
184+ else None
185+ )
186+ return cls ._instance
187+
212188 def read_dlo (self , name : str ) -> PySparkDataFrame :
213189 """Read a DLO from Data Cloud.
214190
@@ -260,6 +236,8 @@ def write_to_dmo(
260236 return self ._writer .write_to_dmo (name , dataframe , write_mode , ** kwargs )
261237
262238 def call_llm_gateway (self , LLM_MODEL_ID : str , prompt : str , maxTokens : int ) -> str :
239+ if self ._proxy is None :
240+ raise ValueError ("No proxy configured; set proxy or proxy_config" )
263241 return self ._proxy .call_llm_gateway (LLM_MODEL_ID , prompt , maxTokens )
264242
265243 def find_file_path (self , file_name : str ) -> Path :
0 commit comments