提交 982ef2b8 编写于 作者: D Davies Liu 提交者: Michael Armbrust

[SPARK-13750][SQL] fix sizeInBytes of HadoopFsRelation

## What changes were proposed in this pull request?

This PR fix the sizeInBytes of HadoopFsRelation.

## How was this patch tested?

Added regression test for that.

Author: Davies Liu <davies@databricks.com>

Closes #11590 from davies/fix_sizeInBytes.
上级 d8813fa0
......@@ -419,6 +419,8 @@ case class HadoopFsRelation(
/** Returns the list of files that will be read when scanning this relation. */
override def inputFiles: Array[String] =
location.allFiles().map(_.getPath.toUri.toString).toArray
override def sizeInBytes: Long = location.allFiles().map(_.getLen).sum
}
/**
......
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.{File, FilenameFilter}
import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.test.SharedSQLContext
class HadoopFsRelationSuite extends QueryTest with SharedSQLContext {
test("sizeInBytes should be the total size of all files") {
withTempDir{ dir =>
dir.delete()
sqlContext.range(1000).write.parquet(dir.toString)
// ignore hidden files
val allFiles = dir.listFiles(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = {
!name.startsWith(".")
}
})
val totalSize = allFiles.map(_.length()).sum
val df = sqlContext.read.parquet(dir.toString)
assert(df.queryExecution.logical.statistics.sizeInBytes === BigInt(totalSize))
}
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册