提交 841b395e 编写于 作者: 梦境迷离's avatar 梦境迷离

optimize

上级 d00cadc6
......@@ -31,20 +31,12 @@ package org.bitlap.csv.core
trait Csvable[T] {
/**
* Internal API for processing a specific field of case class object.
* API for processing a specific field of case class object.
*
* @param t case class object
* @return
*/
@InternalApi
def _toCsvString(t: T): String = ""
/**
* Public API, finally get CSV line string.
*
* @return
*/
def toCsvString: String = ""
def _toCsvString(t: T): String
}
......
......@@ -41,7 +41,7 @@ class CsvableBuilder[T] {
* @return
*/
def setField[SF](scalaField: T => SF, value: SF => String): CsvableBuilder[T] =
macro DeriveCsvableBuilder.setFieldImpl[T, SF]
macro DeriveCsvableBuilder.setFieldImpl[T, SF]
/**
* Create a custom builder for converting this scala value to CSV line string.
......@@ -50,19 +50,19 @@ class CsvableBuilder[T] {
* @param columnSeparator The separator for CSV column value.
* @return
*/
def build(t: T, columnSeparator: Char): Csvable[T] = macro DeriveCsvableBuilder.buildImpl[T]
def convert(t: T, columnSeparator: Char): String = macro DeriveCsvableBuilder.convertOneImpl[T]
/**
* Make columnSeparator assign to `,` as default value.
*/
def build(t: T): Csvable[T] = macro DeriveCsvableBuilder.buildDefaultImpl[T]
def convert(t: T): String = macro DeriveCsvableBuilder.convertOneDefaultImpl[T]
/**
* Convert the sequence of Scala case class to CSV string.
*
* @param ts The sequence of Scala case class.
* @param ts The sequence of Scala case class.
* @param columnSeparator The separator for CSV column value.
* @return
* @return It combines CSV lines by '\n'.
*/
def convert(ts: List[T], columnSeparator: Char): String = macro DeriveCsvableBuilder.convertImpl[T]
......@@ -74,11 +74,11 @@ class CsvableBuilder[T] {
/**
* Convert the sequence of Scala case class to CSV string and write to file.
*
* @param ts The sequence of Scala case class.
* @param ts The sequence of Scala case class.
* @param file File to save CSV string.
* @return
*/
def writeTo(ts: List[T], file: File): Boolean = macro DeriveCsvableBuilder.writeToFileImpl[T]
def convertTo(ts: List[T], file: File): Boolean = macro DeriveCsvableBuilder.convertToFileImpl[T]
}
......
/*
* Copyright (c) 2022 bitlap
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
import scala.annotation.StaticAnnotation
/**
* The annotation notes the method should only be used in library.
*
* @author 梦境迷离
* @version 1.0,2022/5/1
*/
case class InternalApi() extends StaticAnnotation
......@@ -31,20 +31,12 @@ package org.bitlap.csv.core
trait Scalable[T] {
/**
* Internal API for processing a specific column value of CSV line data.
* API for processing a specific column value of CSV line data.
*
* @param column The column value of CSV line data.
* @return
*/
@InternalApi
def _toScala(column: String): Option[T] = None
/**
* Public API, finally get scala case class object.
*
* @return
*/
def toScala: Option[T] = None
def _toScala(column: String): Option[T]
}
object Scalable extends ScalableImplicits {
......
......@@ -50,12 +50,12 @@ class ScalableBuilder[T] {
* @param columnSeparator The separator for CSV column value.
* @return
*/
def build(line: String, columnSeparator: Char): Scalable[T] = macro DeriveScalableBuilder.buildImpl[T]
def convert(line: String, columnSeparator: Char): Option[T] = macro DeriveScalableBuilder.convertOneImpl[T]
/**
* Make columnSeparator assign to `,` as default value.
*/
def build(line: String): Scalable[T] = macro DeriveScalableBuilder.buildDefaultImpl[T]
def convert(line: String): Option[T] = macro DeriveScalableBuilder.convertOneDefaultImpl[T]
/**
* Convert all CSV lines to the sequence of Scala case class.
......@@ -78,7 +78,7 @@ class ScalableBuilder[T] {
* @param charset String charset of the CSV file content.
* @return
*/
def readFrom(file: InputStream, charset: String): List[Option[T]] = macro DeriveScalableBuilder.readFromFileImpl[T]
def convertFrom(file: InputStream, charset: String): List[Option[T]] = macro DeriveScalableBuilder.convertFromFileImpl[T]
}
......
......@@ -21,7 +21,7 @@
package org.bitlap.csv.core.macros
import org.bitlap.csv.core.{ Csvable, CsvableBuilder }
import org.bitlap.csv.core.CsvableBuilder
import scala.collection.mutable
import scala.reflect.macros.whitebox
......@@ -57,10 +57,10 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
def applyImpl[T: WeakTypeTag]: Expr[CsvableBuilder[T]] =
deriveBuilderApplyImpl[T]
def buildDefaultImpl[T: WeakTypeTag](t: Expr[T]): Expr[Csvable[T]] =
def convertOneDefaultImpl[T: WeakTypeTag](t: Expr[T]): Expr[String] =
deriveCsvableImpl[T](t, c.Expr[Char](q"','"))
def buildImpl[T: WeakTypeTag](t: Expr[T], columnSeparator: Expr[Char]): Expr[Csvable[T]] =
def convertOneImpl[T: WeakTypeTag](t: Expr[T], columnSeparator: Expr[Char]): Expr[String] =
deriveCsvableImpl[T](t, columnSeparator)
def convertImpl[T: WeakTypeTag](ts: Expr[List[T]], columnSeparator: Expr[Char]): Expr[String] =
......@@ -69,7 +69,7 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
def convertDefaultImpl[T: WeakTypeTag](ts: Expr[List[T]]): Expr[String] =
deriveFullCsvableImpl[T](ts, c.Expr[Char](q"','"))
def writeToFileImpl[T: WeakTypeTag](ts: Expr[List[T]], file: Expr[File]): Expr[Boolean] =
def convertToFileImpl[T: WeakTypeTag](ts: Expr[List[T]], file: Expr[File]): Expr[Boolean] =
deriveFullIntoFileCsvableImpl[T](ts, file, c.Expr[Char](q"','"))
private def deriveBuilderApplyImpl[T: WeakTypeTag]: Expr[CsvableBuilder[T]] = {
......@@ -104,7 +104,7 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
..${getAnnoClassObject[T](customTrees, columnSeparator)}
$packageName.FileUtils.writer($file, $ts.map { ($innerTName: $clazzName) =>
$csvableInstanceTermName.$innerTmpTermName = $innerTName
$csvableInstanceTermName.toCsvString
$csvableInstanceTermName._toCsvString($innerTName)
}
)
"""
......@@ -121,7 +121,7 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
..${getAnnoClassObject[T](customTrees, columnSeparator)}
$ts.map { ($innerTName: $clazzName) =>
$csvableInstanceTermName.$innerTmpTermName = $innerTName
$csvableInstanceTermName.toCsvString
$csvableInstanceTermName._toCsvString($innerTName)
}.mkString("\n")
"""
exprPrintTree[String](force = false, tree)
......@@ -139,14 +139,14 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
val fields = ${clazzName.toTermName}.unapply($funcArgsTempTermName).orNull
if (null == fields) "" else ${fieldsToString[T](funcArgsTempTermName, customTrees)}.mkString($separator.toString)
}
override def toCsvString: String = toCsv($annoClassName.$innerTmpTermName)
override def _toCsvString(t: $clazzName): String = toCsv($annoClassName.$innerTmpTermName)
}
final lazy private val $csvableInstanceTermName = $annoClassName
"""
}
private def deriveCsvableImpl[T: WeakTypeTag](t: Expr[T], columnSeparator: Expr[Char]): Expr[Csvable[T]] = {
private def deriveCsvableImpl[T: WeakTypeTag](t: Expr[T], columnSeparator: Expr[Char]): Expr[String] = {
val clazzName = resolveClazzTypeName[T]
val (customTrees, preTrees) = getCustomPreTress
val annoClassName = TermName(csvableImplClassNamePrefix + MacroCache.getIdentityId)
......@@ -158,14 +158,14 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
object $annoClassName extends $packageName.Csvable[$clazzName] {
final private val $innerTmpTermName = $t
override def toCsvString: String = {
override def _toCsvString(t: $clazzName): String = {
val fields = ${clazzName.toTermName}.unapply($innerTmpTermName).orNull
if (null == fields) "" else ${fieldsToString[T](innerTmpTermName, customTrees)}.mkString($separator.toString)
}
}
$annoClassName
$annoClassName._toCsvString($t)
"""
exprPrintTree[Csvable[T]](force = false, tree)
exprPrintTree[String](force = false, tree)
}
// scalafmt: { maxColumn = 400 }
......
......@@ -21,7 +21,7 @@
package org.bitlap.csv.core.macros
import org.bitlap.csv.core.{ Scalable, ScalableBuilder }
import org.bitlap.csv.core.ScalableBuilder
import java.io.InputStream
import scala.collection.mutable
......@@ -57,7 +57,7 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
def applyImpl[T: WeakTypeTag]: Expr[ScalableBuilder[T]] =
deriveBuilderApplyImpl[T]
def buildImpl[T: WeakTypeTag](line: Expr[String], columnSeparator: Expr[Char]): Expr[Scalable[T]] = {
def convertOneImpl[T: WeakTypeTag](line: Expr[String], columnSeparator: Expr[Char]): Expr[Option[T]] = {
val clazzName = resolveClazzTypeName[T]
deriveScalableImpl[T](clazzName, line, columnSeparator)
}
......@@ -72,12 +72,12 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
deriveFullScalableImpl[T](clazzName, lines, c.Expr[Char](q"','"))
}
def buildDefaultImpl[T: WeakTypeTag](line: Expr[String]): Expr[Scalable[T]] = {
def convertOneDefaultImpl[T: WeakTypeTag](line: Expr[String]): Expr[Option[T]] = {
val clazzName = resolveClazzTypeName[T]
deriveScalableImpl[T](clazzName, line, c.Expr[Char](q"','"))
}
def readFromFileImpl[T: WeakTypeTag](file: Expr[InputStream], charset: Expr[String]): Expr[List[Option[T]]] = {
def convertFromFileImpl[T: WeakTypeTag](file: Expr[InputStream], charset: Expr[String]): Expr[List[Option[T]]] = {
val clazzName = resolveClazzTypeName[T]
deriveFullFromFileScalableImpl[T](clazzName, file, charset, c.Expr[Char](q"','"))
}
......@@ -114,7 +114,7 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
..${getAnnoClassObject[T](clazzName, columnSeparator)}
$packageName.FileUtils.reader($file, $charset).map { ($innerLName: String) =>
$scalableInstanceTermName.$innerTempTermName = ${TermName(innerLName.toString())}
$scalableInstanceTermName.toScala
$scalableInstanceTermName._toScala($innerLName)
}
"""
exprPrintTree[List[Option[T]]](force = false, tree)
......@@ -129,7 +129,7 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
..${getAnnoClassObject[T](clazzName, columnSeparator)}
$lines.map { ($innerLName: String) =>
$scalableInstanceTermName.$innerTempTermName = ${TermName(innerLName.toString())}
$scalableInstanceTermName.toScala
$scalableInstanceTermName._toScala($innerLName)
}
"""
exprPrintTree[List[Option[T]]](force = false, tree)
......@@ -148,7 +148,7 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
}
// scalafmt: { maxColumn = 400 }
private def deriveScalableImpl[T: WeakTypeTag](clazzName: TypeName, line: Expr[String], columnSeparator: Expr[Char]): Expr[Scalable[T]] = {
private def deriveScalableImpl[T: WeakTypeTag](clazzName: TypeName, line: Expr[String], columnSeparator: Expr[Char]): Expr[Option[T]] = {
val annoClassName = TermName(scalableImplClassNamePrefix + MacroCache.getIdentityId)
// NOTE: preTrees must be at the same level as Scalable
val tree =
......@@ -158,9 +158,9 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
final lazy private val $innerColumnFuncTermName = () => $packageName.StringUtils.splitColumns($line, $columnSeparator)
..${scalableBody[T](clazzName, innerColumnFuncTermName)}
}
$annoClassName
$annoClassName._toScala($line)
"""
exprPrintTree[Scalable[T]](force = false, tree)
exprPrintTree[Option[T]](force = false, tree)
}
// scalafmt: { maxColumn = 400 }
......@@ -226,11 +226,14 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(false)"
case t if t =:= typeOf[Long] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(0L)"
case _ =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(null)"
}
}
}
}
q"override def toScala: Option[$clazzName] = Option(${TermName(clazzName.decodedName.toString)}(..$fields))"
// input args not need used
q"override def _toScala(column: String): Option[$clazzName] = Option(${TermName(clazzName.decodedName.toString)}(..$fields))"
}
}
......@@ -74,8 +74,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
kvs.map(kv => Dimension3(kv._1, kv._2)).toList
}
)
.build(csv, ',')
.toScala
.convert(csv, ',')
)
println(metrics)
......@@ -90,8 +89,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
(ds: List[Dimension3]) =>
s"""\"{${ds.map(kv => s"""\"\"${kv.key}\"\":\"\"${kv.value}\"\"""").mkString(",")}}\""""
)
.build(metric.get, ',')
.toCsvString
.convert(metric.get, ',')
)
println(csv)
......@@ -118,8 +116,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
kvs.map(kv => Dimension3(kv._1, kv._2)).toSeq
}
)
.build(csv, ',')
.toScala
.convert(csv, ',')
)
println(metrics)
......@@ -137,8 +134,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
_.dimensions,
dims => StringUtils.extractJsonValues[Dimension3](dims)((k, v) => Dimension3(k, v))
)
.build(csv)
.toScala
.convert(csv)
)
println(metrics.toList)
......@@ -154,8 +150,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
_.dimensions,
dims => StringUtils.extractJsonValues[Dimension3](dims)((k, v) => Dimension3(k, v))
)
.build(line)
.toScala
.convert(line)
}
println(metrics)
......@@ -214,7 +209,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
var _line: String = _;
private val _columns = (() => _root_.org.bitlap.csv.core.StringUtils.splitColumns(_ScalaAnno$1._line, ','));
override def toScala: Option[Metric2] = Option(
override def _toScala(column:String): Option[Metric2] = Option(
Metric2(
_root_.org.bitlap.csv.core.Scalable[Long]._toScala(_columns()(0)).getOrElse(0L),
_root_.org.bitlap.csv.core.Scalable[Int]._toScala(_columns()(1)).getOrElse(0),
......@@ -232,7 +227,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
.toList
.map(((_l: String) => {
_scalableInstance._line = _l;
_scalableInstance.toScala
_scalableInstance._toScala(_l)
}))
metrics.foreach(println)
......@@ -269,7 +264,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
.mkString(','.toString)
});
override def toCsvString: String = toCsv(_CsvAnno$2._tt)
override def _toCsvString(t:Metric2): String = toCsv(_CsvAnno$2._tt)
};
lazy val _csvableInstance = _CsvAnno$2;
val csv = metrics
......@@ -277,7 +272,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
.map[org.bitlap.csv.core.test.Metric2](((x$4: Option[org.bitlap.csv.core.test.Metric2]) => x$4.get))
.map(((_t: Metric2) => {
_csvableInstance._tt = _t;
_csvableInstance.toCsvString
_csvableInstance._toCsvString(_t)
}))
.mkString("\n")
......@@ -291,7 +286,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
_.dimensions,
dims => StringUtils.extractJsonValues[Dimension3](dims)((k, v) => Dimension3(k, v))
)
.readFrom(ClassLoader.getSystemResourceAsStream("simple_data.csv"), "utf-8")
.convertFrom(ClassLoader.getSystemResourceAsStream("simple_data.csv"), "utf-8")
println(metrics)
assert(metrics.nonEmpty)
......@@ -302,7 +297,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
_.dimensions,
ds => s"""\"{${ds.map(kv => s"""\"\"${kv.key}\"\":\"\"${kv.value}\"\"""").mkString(",")}}\""""
)
.writeTo(metrics.filter(_.isDefined).map(_.get), file)
.convertTo(metrics.filter(_.isDefined).map(_.get), file)
file.delete()
}
......
......@@ -33,9 +33,9 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
"CustomConverterBuilder1" should "ok" in {
val line = "abc,cdf,d,12,2,false,0.1,0.23333"
val dimension = ScalableBuilder[Dimension2].build(line, ',').toScala
val dimension = ScalableBuilder[Dimension2].convert(line, ',')
assert(dimension.toString == "Some(Dimension2(abc,Some(cdf),d,12,2,false,0.1,0.23333))")
val csv = CsvableBuilder[Dimension2].build(dimension.get, ',').toCsvString
val csv = CsvableBuilder[Dimension2].convert(dimension.get, ',')
println(csv)
assert(csv == line)
}
......@@ -44,8 +44,7 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
val line = """abc,"{""a"":""b"",""c"":""d""}",d,12,2,false,0.1,0.23333"""
val dimension1 = ScalableBuilder[Dimension2]
.setField(_.c, _ => 12L)
.build(line, ',')
.toScala
.convert(line, ',')
println(dimension1)
assert(dimension1.toString == "Some(Dimension2(abc,Some({\"a\":\"b\",\"c\":\"d\"}),d,12,2,false,0.1,0.23333))")
......@@ -53,8 +52,7 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
val csv = CsvableBuilder[Dimension2]
.setField[Char](_.d, _ => "????????")
.setField[Option[String]](_.value, js => s"""\"${js.get.replace("\"", "\"\"")}\"""")
.build(dimension1.get, ',')
.toCsvString
.convert(dimension1.get, ',')
println(csv)
assert(csv == "abc,\"{\"\"a\"\":\"\"b\"\",\"\"c\"\":\"\"d\"\"}\",????????,12,2,false,0.1,0.23333")
......@@ -64,19 +62,16 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
val line = """abc,"{""a"":""b"",""c"":""d""}",d,12,2,false,0.1,0.23333"""
val d = ScalableBuilder[Dimension2]
.setField(_.value, _ => None)
.build(line, ',')
.toScala
.convert(line, ',')
assert(d.toString == "Some(Dimension2(abc,None,d,12,2,false,0.1,0.23333))")
val d2 = ScalableBuilder[Dimension2]
.setField(_.value, _ => None)
.build("""abc,"{""a"":""b"",""c"":""d""}",d,12,2,false,0.1,0.23333""", ',')
.toScala
.convert("""abc,"{""a"":""b"",""c"":""d""}",d,12,2,false,0.1,0.23333""", ',')
assert(d2.toString == "Some(Dimension2(abc,None,d,12,2,false,0.1,0.23333))")
val e = ScalableBuilder[Dimension2]
.build(line, ',')
.toScala
.convert(line, ',')
println(e)
assert(e.toString == "Some(Dimension2(abc,Some({\"a\":\"b\",\"c\":\"d\"}),d,12,2,false,0.1,0.23333))")
......@@ -85,20 +80,17 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
"CustomConverterBuilder4" should "ok when using toCsvString" in {
val e = Dimension2("1", Some("hello"), 'c', 1L, 1, false, 0.1f, 0.2)
val dimension1 = CsvableBuilder[Dimension2]
.build(e, ',')
.toCsvString
.convert(e, ',')
assert(dimension1 == "1,hello,c,1,1,false,0.1,0.2")
val dimension2 = CsvableBuilder[Dimension2]
.setField[Option[String]](_.value, _ => "hello world")
.build(e, '*')
.toCsvString
.convert(e, '*')
assert(dimension2 == "1*hello world*c*1*1*false*0.1*0.2")
val dimension3 = CsvableBuilder[Dimension2]
.setField[Option[String]](_.value, _ => "hello world")
.build(Dimension2("1", Some("hello"), 'c', 1L, 1, false, 0.1f, 0.2), ',')
.toCsvString
.convert(Dimension2("1", Some("hello"), 'c', 1L, 1, false, 0.1f, 0.2), ',')
assert(dimension3 == "1,hello world,c,1,1,false,0.1,0.2")
}
......@@ -108,11 +100,11 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
Dimension2("2", Some("hello bitlap"), 'c', 1L, 1, false, 0.1f, 0.2)
)
val dimension1 = es.map(e => CsvableBuilder[Dimension2].build(e, ',').toCsvString)
val dimension1 = es.map(e => CsvableBuilder[Dimension2].convert(e, ','))
assert(dimension1 == List("1,hello,c,1,1,true,0.1,0.2", "2,hello bitlap,c,1,1,false,0.1,0.2"))
val csv = List("1,hello,c,1,1,true,0.1,0.2", "2,hello bitlap,c,1,1,false,0.1,0.2")
val scala = csv.map(f => ScalableBuilder[Dimension2].build(f, ',').toScala)
val scala = csv.map(f => ScalableBuilder[Dimension2].convert(f, ','))
assert(
scala.toString() == "List(Some(Dimension2(1,Some(hello),c,1,1,true,0.1,0.2)), Some(Dimension2(2,Some(hello bitlap),c,1,1,false,0.1,0.2)))"
)
......@@ -125,32 +117,32 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
"CustomConverterBuilder6" should "fail when find List or Seq but without using setFiled" in {
"""
|ScalableBuilder[Metric2].build(csv, ',').toScala
|ScalableBuilder[Metric2].convert(csv, ',')
|""".stripMargin shouldNot compile
"""
|CsvableBuilder[Metric2].build(metric, ',').toCsvString
|CsvableBuilder[Metric2].convert(metric, ',')
|""".stripMargin shouldNot compile
}
"CustomConverterBuilder7" should "fail when find List or Seq but without using setFiled" in {
"""
|ScalableBuilder[Metric2].build(csv, ',').toScala
|ScalableBuilder[Metric2].convert(csv, ',')
|""".stripMargin shouldNot compile
"""
|CsvableBuilder[Metric2].build(metric2, ',').toCsvString
|CsvableBuilder[Metric2].convert(metric2, ',')
|""".stripMargin shouldNot compile
}
"CustomConverterBuilder8" should "ok when not pass columnSeparator" in {
val e = Dimension2("1", Some("hello"), 'c', 1L, 1, false, 0.1f, 0.2)
val csv = CsvableBuilder[Dimension2].build(e).toCsvString
val csv = CsvableBuilder[Dimension2].convert(e)
println(csv)
assert(csv == "1,hello,c,1,1,false,0.1,0.2")
val scala = ScalableBuilder[Dimension2].build(csv).toScala
val scala = ScalableBuilder[Dimension2].convert(csv)
println(scala)
assert(scala.get == e)
}
......@@ -159,7 +151,7 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
"""
|case class Test(i:Int)(j:String)
| val t = Test(1)("hello")
| CsvableBuilder[Test].build(t).toCsvString
| CsvableBuilder[Test].convert(t)
|""".stripMargin shouldNot compile
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册