@@ -1171,13 +1171,17 @@ class DataFrame private[sql](
1171
1171
*/
1172
1172
def withColumn (colName : String , col : Column ): DataFrame = {
1173
1173
val resolver = sqlContext.analyzer.resolver
1174
- val replaced = schema.exists(f => resolver(f.name, colName))
1175
- if (replaced) {
1176
- val colNames = schema.map { field =>
1177
- val name = field.name
1178
- if (resolver(name, colName)) col.as(colName) else Column (name)
1174
+ val output = queryExecution.analyzed.output
1175
+ val shouldReplace = output.exists(f => resolver(f.name, colName))
1176
+ if (shouldReplace) {
1177
+ val columns = output.map { field =>
1178
+ if (resolver(field.name, colName)) {
1179
+ col.as(colName)
1180
+ } else {
1181
+ Column (field)
1182
+ }
1179
1183
}
1180
- select(colNames : _* )
1184
+ select(columns : _* )
1181
1185
} else {
1182
1186
select(Column (" *" ), col.as(colName))
1183
1187
}
@@ -1188,13 +1192,17 @@ class DataFrame private[sql](
1188
1192
*/
1189
1193
private [spark] def withColumn (colName : String , col : Column , metadata : Metadata ): DataFrame = {
1190
1194
val resolver = sqlContext.analyzer.resolver
1191
- val replaced = schema.exists(f => resolver(f.name, colName))
1192
- if (replaced) {
1193
- val colNames = schema.map { field =>
1194
- val name = field.name
1195
- if (resolver(name, colName)) col.as(colName, metadata) else Column (name)
1195
+ val output = queryExecution.analyzed.output
1196
+ val shouldReplace = output.exists(f => resolver(f.name, colName))
1197
+ if (shouldReplace) {
1198
+ val columns = output.map { field =>
1199
+ if (resolver(field.name, colName)) {
1200
+ col.as(colName, metadata)
1201
+ } else {
1202
+ Column (field)
1203
+ }
1196
1204
}
1197
- select(colNames : _* )
1205
+ select(columns : _* )
1198
1206
} else {
1199
1207
select(Column (" *" ), col.as(colName, metadata))
1200
1208
}
0 commit comments