1111
1212# DayTimeIntervalType introduced in Spark 3.2 (at least) but didn't show up in
1313# PySpark until version 3.3
14- PYSPARK_33 = vparse (pyspark .__version__ ) >= vparse ("3.3" )
1514PYSPARK_35 = vparse (pyspark .__version__ ) >= vparse ("3.5" )
1615
1716
3534_to_pyspark_dtypes [dt .UUID ] = pt .StringType
3635
3736
38- if PYSPARK_33 :
39- _pyspark_interval_units = {
40- pt .DayTimeIntervalType .SECOND : "s" ,
41- pt .DayTimeIntervalType .MINUTE : "m" ,
42- pt .DayTimeIntervalType .HOUR : "h" ,
43- pt .DayTimeIntervalType .DAY : "D" ,
44- }
37+ _pyspark_interval_units = {
38+ pt .DayTimeIntervalType .SECOND : "s" ,
39+ pt .DayTimeIntervalType .MINUTE : "m" ,
40+ pt .DayTimeIntervalType .HOUR : "h" ,
41+ pt .DayTimeIntervalType .DAY : "D" ,
42+ }
4543
4644
4745class PySparkType (TypeMapper ):
@@ -62,7 +60,7 @@ def to_ibis(cls, typ, nullable=True):
6260 fields = {f .name : cls .to_ibis (f .dataType ) for f in typ .fields }
6361
6462 return dt .Struct (fields , nullable = nullable )
65- elif PYSPARK_33 and isinstance (typ , pt .DayTimeIntervalType ):
63+ elif isinstance (typ , pt .DayTimeIntervalType ):
6664 if (
6765 typ .startField == typ .endField
6866 and typ .startField in _pyspark_interval_units
@@ -71,7 +69,7 @@ def to_ibis(cls, typ, nullable=True):
7169 return dt .Interval (unit , nullable = nullable )
7270 else :
7371 raise com .IbisTypeError (f"{ typ !r} couldn't be converted to Interval" )
74- elif PYSPARK_35 and isinstance (typ , pt .TimestampNTZType ):
72+ elif isinstance (typ , pt .TimestampNTZType ):
7573 return dt .Timestamp (nullable = nullable )
7674 elif isinstance (typ , pt .UserDefinedType ):
7775 return cls .to_ibis (typ .sqlType (), nullable = nullable )
0 commit comments