column_mapping:dict[str,str]={"SomeColumn":"SomeColumn","SomeOther__c":"SomeOtherColumn",}df:DataFrame=(spark.read.format("delta").load(generate_lakehouse_path("table_name")).select(*column_mapping.keys()).select(*[F.col(old_name).alias(new_name)forold_name,new_nameincolumn_mapping.items()]))ifdf.isEmpty():raiseException("The table is being updated. Please try again later")ifDEBUG:display(df)