@@ -1703,10 +1703,10 @@ def __getattr__(self, name: str) -> "Column":
1703
1703
errorClass = "JVM_ATTRIBUTE_NOT_SUPPORTED" , messageParameters = {"attr_name" : name }
1704
1704
)
1705
1705
1706
- # if name not in self.columns:
1707
- # raise PySparkAttributeError(
1708
- # errorClass="ATTRIBUTE_NOT_SUPPORTED", messageParameters={"attr_name": name}
1709
- # )
1706
+ if name not in self .columns :
1707
+ raise PySparkAttributeError (
1708
+ errorClass = "ATTRIBUTE_NOT_SUPPORTED" , messageParameters = {"attr_name" : name }
1709
+ )
1710
1710
1711
1711
return self ._col (name )
1712
1712
@@ -1739,14 +1739,14 @@ def __getitem__(
1739
1739
# ConnectColumn(addDataFrameIdToCol(resolve(colName)))
1740
1740
# }
1741
1741
1742
- # # validate the column name
1743
- # if not hasattr(self._session, "is_mock_session"):
1744
- # from pyspark.sql.connect.types import verify_col_name
1745
- #
1746
- # # Try best to verify the column name with cached schema
1747
- # # If fails, fall back to the server side validation
1748
- # if not verify_col_name(item, self._schema):
1749
- # self.select(item).isLocal()
1742
+ # validate the column name
1743
+ if not hasattr (self ._session , "is_mock_session" ):
1744
+ from pyspark .sql .connect .types import verify_col_name
1745
+
1746
+ # Try best to verify the column name with cached schema
1747
+ # If fails, fall back to the server side validation
1748
+ if not verify_col_name (item , self ._schema ):
1749
+ self .select (item ).isLocal ()
1750
1750
1751
1751
return self ._col (item )
1752
1752
elif isinstance (item , Column ):
0 commit comments