Skip to content

Commit cbb5793

Browse files
Code review feedback
1 parent 32229c7 commit cbb5793

File tree

4 files changed

+13
-13
lines changed

4 files changed

+13
-13
lines changed

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetConverter.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -449,9 +449,9 @@ private[parquet] class CatalystNativeArrayConverter(
449449
protected[parquet] var capacity: Int = CatalystArrayConverter.INITIAL_ARRAY_SIZE)
450450
extends GroupConverter with CatalystConverter {
451451

452-
type nativeType = elementType.JvmType
452+
type NativeType = elementType.JvmType
453453

454-
private var buffer: CatalystConverter.ArrayScalaType[nativeType] =
454+
private var buffer: CatalystConverter.ArrayScalaType[NativeType] =
455455
elementType.classTag.newArray(capacity)
456456

457457
private var elements: Int = 0
@@ -475,43 +475,43 @@ private[parquet] class CatalystNativeArrayConverter(
475475
// Overriden here to avoid auto-boxing for primitive types
476476
override protected[parquet] def updateBoolean(fieldIndex: Int, value: Boolean): Unit = {
477477
checkGrowBuffer()
478-
buffer(elements) = value.asInstanceOf[nativeType]
478+
buffer(elements) = value.asInstanceOf[NativeType]
479479
elements += 1
480480
}
481481

482482
override protected[parquet] def updateInt(fieldIndex: Int, value: Int): Unit = {
483483
checkGrowBuffer()
484-
buffer(elements) = value.asInstanceOf[nativeType]
484+
buffer(elements) = value.asInstanceOf[NativeType]
485485
elements += 1
486486
}
487487

488488
override protected[parquet] def updateLong(fieldIndex: Int, value: Long): Unit = {
489489
checkGrowBuffer()
490-
buffer(elements) = value.asInstanceOf[nativeType]
490+
buffer(elements) = value.asInstanceOf[NativeType]
491491
elements += 1
492492
}
493493

494494
override protected[parquet] def updateDouble(fieldIndex: Int, value: Double): Unit = {
495495
checkGrowBuffer()
496-
buffer(elements) = value.asInstanceOf[nativeType]
496+
buffer(elements) = value.asInstanceOf[NativeType]
497497
elements += 1
498498
}
499499

500500
override protected[parquet] def updateFloat(fieldIndex: Int, value: Float): Unit = {
501501
checkGrowBuffer()
502-
buffer(elements) = value.asInstanceOf[nativeType]
502+
buffer(elements) = value.asInstanceOf[NativeType]
503503
elements += 1
504504
}
505505

506506
override protected[parquet] def updateBinary(fieldIndex: Int, value: Binary): Unit = {
507507
checkGrowBuffer()
508-
buffer(elements) = value.getBytes.asInstanceOf[nativeType]
508+
buffer(elements) = value.getBytes.asInstanceOf[NativeType]
509509
elements += 1
510510
}
511511

512512
override protected[parquet] def updateString(fieldIndex: Int, value: Binary): Unit = {
513513
checkGrowBuffer()
514-
buffer(elements) = value.toStringUsingUTF8.asInstanceOf[nativeType]
514+
buffer(elements) = value.toStringUsingUTF8.asInstanceOf[NativeType]
515515
elements += 1
516516
}
517517

@@ -533,7 +533,7 @@ private[parquet] class CatalystNativeArrayConverter(
533533
private def checkGrowBuffer(): Unit = {
534534
if (elements >= capacity) {
535535
val newCapacity = 2 * capacity
536-
val tmp: CatalystConverter.ArrayScalaType[nativeType] =
536+
val tmp: CatalystConverter.ArrayScalaType[NativeType] =
537537
elementType.classTag.newArray(newCapacity)
538538
Array.copy(buffer, 0, tmp, 0, capacity)
539539
buffer = tmp

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ private[sql] object ParquetRelation {
151151
}
152152

153153
if (fs.exists(path) &&
154-
!fs.getFileStatus(path)
154+
!fs.getFileStatus(path)
155155
.getPermission
156156
.getUserAction
157157
.implies(FsAction.READ_WRITE)) {

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ case class InsertIntoParquetTable(
168168

169169
val writeSupport =
170170
if (child.output.map(_.dataType).forall(_.isPrimitive())) {
171-
logger.info("Initializing MutableRowWriteSupport")
171+
logger.debug("Initializing MutableRowWriteSupport")
172172
classOf[org.apache.spark.sql.parquet.MutableRowWriteSupport]
173173
} else {
174174
classOf[org.apache.spark.sql.parquet.RowWriteSupport]

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTypes.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ private[parquet] object ParquetTypesConverter {
176176

177177
/**
178178
* Converts a given Catalyst [[org.apache.spark.sql.catalyst.types.DataType]] into
179-
* the corrponsing Parquet `Type`.
179+
* the corresponding Parquet `Type`.
180180
*
181181
* The conversion follows the rules below:
182182
* <ul>

0 commit comments

Comments
 (0)