未验证 提交 ffdf15f8 编写于 作者: J Jialin Qiao 提交者: GitHub

fix spark package name in doc (#1607)

上级 794ad5fd
......@@ -140,7 +140,7 @@ NOTE: Remember to assign necessary read and write permissions in advance.
### Example 1: read from the local file system
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val wide_df = spark.read.tsfile("test.tsfile")
wide_df.show
......@@ -151,7 +151,7 @@ narrow_df.show
### Example 2: read from the hadoop file system
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val wide_df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile")
wide_df.show
......@@ -162,7 +162,7 @@ narrow_df.show
### Example 3: read from a specific directory
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/usr/hadoop")
df.show
```
......@@ -174,7 +174,7 @@ Note 2: Measurements of the same name should have the same schema.
### Example 4: query in wide form
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile")
df.createOrReplaceTempView("tsfile_table")
val newDf = spark.sql("select * from tsfile_table where `device_1.sensor_1`>0 and `device_1.sensor_2` < 22")
......@@ -182,7 +182,7 @@ newDf.show
```
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile")
df.createOrReplaceTempView("tsfile_table")
val newDf = spark.sql("select count(*) from tsfile_table")
......@@ -191,7 +191,7 @@ newDf.show
### Example 5: query in narrow form
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile", true)
df.createOrReplaceTempView("tsfile_table")
val newDf = spark.sql("select * from tsfile_table where device_name = 'root.ln.wf02.wt02' and temperature > 5")
......@@ -199,7 +199,7 @@ newDf.show
```
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile", true)
df.createOrReplaceTempView("tsfile_table")
val newDf = spark.sql("select count(*) from tsfile_table")
......@@ -210,7 +210,7 @@ newDf.show
```scala
// we only support wide_form table to write
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile")
df.show
......@@ -224,7 +224,7 @@ newDf.show
```scala
// we only support wide_form table to write
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile", true)
df.show
......
......@@ -137,7 +137,7 @@ TsFile中的现有数据如下:
### 示例1:从本地文件系统读取
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val wide_df = spark.read.tsfile("test.tsfile")
wide_df.show
......@@ -148,7 +148,7 @@ narrow_df.show
### 示例2:从hadoop文件系统读取
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val wide_df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile")
wide_df.show
......@@ -159,7 +159,7 @@ narrow_df.show
### 示例3:从特定目录读取
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/usr/hadoop")
df.show
```
......@@ -171,7 +171,7 @@ df.show
### 示例4:广泛形式的查询
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile")
df.createOrReplaceTempView("tsfile_table")
val newDf = spark.sql("select * from tsfile_table where `device_1.sensor_1`>0 and `device_1.sensor_2` < 22")
......@@ -179,7 +179,7 @@ newDf.show
```
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile")
df.createOrReplaceTempView("tsfile_table")
val newDf = spark.sql("select count(*) from tsfile_table")
......@@ -189,7 +189,7 @@ newDf.show
### 示例5:缩小形式的查询
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile", true)
df.createOrReplaceTempView("tsfile_table")
val newDf = spark.sql("select * from tsfile_table where device_name = 'root.ln.wf02.wt02' and temperature > 5")
......@@ -197,7 +197,7 @@ newDf.show
```
```scala
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile", true)
df.createOrReplaceTempView("tsfile_table")
val newDf = spark.sql("select count(*) from tsfile_table")
......@@ -208,7 +208,7 @@ newDf.show
```scala
// we only support wide_form table to write
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile")
df.show
......@@ -222,7 +222,7 @@ newDf.show
```scala
// we only support wide_form table to write
import org.apache.iotdb.tsfile._
import org.apache.iotdb.spark.tsfile._
val df = spark.read.tsfile("hdfs://localhost:9000/test.tsfile", true)
df.show
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册