@@ -133,7 +133,10 @@ def test_invalid_schema_deserialization(
133133 ["tmp_path" , pytest .param ("s3_tmp_path" , marks = pytest .mark .s3 )],
134134 indirect = True ,
135135)
136- def test_write_parquet_custom_metadata (any_tmp_path : str ) -> None :
136+ @pytest .mark .parametrize ("check_non_existent_directory" , [True , False ])
137+ def test_write_parquet_custom_metadata (
138+ any_tmp_path : str , check_non_existent_directory : bool
139+ ) -> None :
137140 # Arrange
138141 df = pl .DataFrame (
139142 {
@@ -144,10 +147,35 @@ def test_write_parquet_custom_metadata(any_tmp_path: str) -> None:
144147 _ , failure = MySchema .filter (df )
145148 assert failure ._df .height == 4
146149
147- # Act
148150 fs : AbstractFileSystem = url_to_fs (any_tmp_path )[0 ]
149- p = fs .sep .join ([any_tmp_path , "failure.parquet" ])
150- failure .write_parquet (p , metadata = {"custom" : "test" })
151+ path_components = (
152+ [any_tmp_path ]
153+ + (["non_existent_dir" ] if check_non_existent_directory else [])
154+ + ["failure.parquet" ]
155+ )
156+ p = fs .sep .join (path_components )
157+
158+ # Act
159+ if check_non_existent_directory :
160+ failure .write_parquet (p , metadata = {"custom" : "test" }, mkdir = True )
161+ else :
162+ failure .write_parquet (p , metadata = {"custom" : "test" })
151163
152164 # Assert
153165 assert pl .read_parquet_metadata (p )["custom" ] == "test"
166+
167+
168+ def test_write_parquet_fails_without_mkdir (tmp_path : str ) -> None :
169+ # Arrange
170+ df = pl .DataFrame (
171+ {
172+ "a" : [4 , 5 , 6 , 6 , 7 , 8 ],
173+ "b" : [1 , 2 , 3 , 4 , 5 , 6 ],
174+ }
175+ )
176+ _ , failure = MySchema .filter (df )
177+ p = f"{ tmp_path } /non_existent_dir/failure.parquet"
178+
179+ # Act / Assert
180+ with pytest .raises (FileNotFoundError ):
181+ failure .write_parquet (p )
0 commit comments