未验证 提交 ab74b018 编写于 作者: 梦境迷离's avatar 梦境迷离 提交者: GitHub

fix some minors (#256)

上级 049effd2
......@@ -34,7 +34,7 @@
- 通用的宏操作API的封装。
- 对象转换器(零依赖,类型安全)。
- JDBC `ResultSet`对象转样例类
- JDBC `ResultSet`转换器
```scala
"org.bitlap" %% "smt-common" % "<VERSION>"
......@@ -42,7 +42,7 @@
## csv
- CSV/TSV文件解析器(零依赖,类型安全)。
- CSV/TSV文件读写工具(零依赖,类型安全)。
```scala
"org.bitlap" %% "smt-csv" % "<VERSION>"
......@@ -50,7 +50,7 @@
## csv-derive
- 为Scala`case class`自动派生`Converter`实例
- 自动派生CSV/TSV文件读写工具
```scala
"org.bitlap" %% "smt-csv-derive" % "<VERSION>"
......
/*
* Copyright (c) 2022 bitlap
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.tools
/** Log Level for elapsed
*
* @author
* 梦境迷离
* @since 2021/8/7
* @version 1.0
*/
object LogLevel extends Enumeration {
type LogLevel = Value
val INFO, WARN, DEBUG = Value
private[bitlap] def getLogLevel(shortType: String): LogLevel = {
// TODO not good way
val tpe1 = s"$PACKAGE.$shortType" // LogLevel.INFO
val tpe2 = s"$PACKAGE.LogLevel.$shortType" // INFO
val v = LogLevel.values.find { p =>
s"$PACKAGE.LogLevel.${p.toString}" == tpe1 ||
s"$PACKAGE.LogLevel.${p.toString}" == tpe2 || s"$PACKAGE.LogLevel.${p.toString}" == shortType
}.get.toString
LogLevel.withName(v)
}
}
......@@ -21,10 +21,9 @@
package org.bitlap.tools
import org.bitlap.tools.LogLevel.LogLevel
import org.bitlap.tools.internal.elapsedMacro.ElapsedProcessor
import scala.annotation.{ compileTimeOnly, StaticAnnotation }
import scala.annotation._
/** annotation to record method cost time.
*
......@@ -38,6 +37,6 @@ import scala.annotation.{ compileTimeOnly, StaticAnnotation }
* @version 1.0
*/
@compileTimeOnly("enable macro to expand macro annotations")
final class elapsed(limit: Int, logLevel: LogLevel) extends StaticAnnotation {
final class elapsed(limit: Int, logLevel: String) extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro ElapsedProcessor.impl
}
......@@ -21,9 +21,6 @@
package org.bitlap.tools.internal
import org.bitlap.tools.LogLevel.LogLevel
import org.bitlap.tools.{ LogLevel, PACKAGE }
import scala.reflect.macros.whitebox
/**
......@@ -47,16 +44,9 @@ object elapsedMacro {
private lazy val start: c.universe.TermName = TermName("$elapsedBegin")
private lazy val valDef: c.universe.TermName = TermName("$elapsed")
private def getLogLevel(logLevel: Tree): LogLevel =
if (logLevel.children.exists(t => t.toString().contains(PACKAGE))) {
evalTree(logLevel)
} else {
LogLevel.getLogLevel(logLevel.toString())
}
private val extractOptions: (Int, LogLevel) = c.prefix.tree match {
private val extractOptions: (Int, String) = c.prefix.tree match {
case q"new elapsed(limit=$limit, logLevel=$logLevel)" =>
(evalTree(limit.asInstanceOf[Tree]), getLogLevel(logLevel.asInstanceOf[Tree]))
(evalTree(limit.asInstanceOf[Tree]), evalTree[String](logLevel.asInstanceOf[Tree]))
case _ => c.abort(c.enclosingPosition, ErrorMessage.UNEXPECTED_PATTERN)
}
......@@ -74,10 +64,15 @@ object elapsedMacro {
if (log.isEmpty) { // if there is no slf4j log, print it to the console
getLog(classNameAndMethodName, q"_root_.scala.Predef.println")
} else {
extractOptions._2 match {
case LogLevel.INFO => getLog(classNameAndMethodName, q"${log.get}.info")
case LogLevel.DEBUG => getLog(classNameAndMethodName, q"${log.get}.debug")
case LogLevel.WARN => getLog(classNameAndMethodName, q"${log.get}.warn")
extractOptions._2.toLowerCase match {
case "info" => getLog(classNameAndMethodName, q"${log.get}.info")
case "debug" => getLog(classNameAndMethodName, q"${log.get}.debug")
case "warn" => getLog(classNameAndMethodName, q"${log.get}.warn")
case _ =>
c.abort(
c.enclosingPosition,
s"${extractOptions._2.toLowerCase} is not in the supported list: info,debug,warn"
)
}
}
}
......
......@@ -53,7 +53,7 @@ object equalsAndHashCodeMacro {
*/
private def getInternalFieldsTermNameExcludeLocal(annotteeClassDefinitions: Seq[Tree]): Seq[TermName] = {
if (annotteeClassDefinitions.exists(f => isNotLocalClassMember(f))) {
c.info(c.enclosingPosition, s"There is a non private class definition inside the class", true)
c.info(c.enclosingPosition, s"There is a non private class definition inside the class", force = true)
}
getClassMemberValDefs(annotteeClassDefinitions)
.filter(p =>
......
......@@ -21,9 +21,8 @@
package org.bitlap.tools.internal
import org.bitlap.tools.logs._
import org.bitlap.tools.logs.LogType._
import org.bitlap.tools.logs.{ LogArgument, LogType }
import org.bitlap.tools.{ logs, PACKAGE }
import scala.reflect.macros.whitebox
......@@ -38,24 +37,16 @@ object logMacro {
import c.universe._
private val extractOptions: logs.LogType.Value = c.prefix.tree match {
private val extractOptions: String = c.prefix.tree match {
case q"new log(logType=$logType)" =>
val tpe = getLogType(logType.asInstanceOf[Tree])
tpe
evalTree(logType.asInstanceOf[Tree])
case q"new log($logType)" =>
val tpe = getLogType(logType.asInstanceOf[Tree])
tpe
evalTree(logType.asInstanceOf[Tree])
case q"new log()" => LogType.JLog
case _ => c.abort(c.enclosingPosition, ErrorMessage.UNEXPECTED_PATTERN)
case _ =>
c.abort(c.enclosingPosition, s"${ErrorMessage.UNEXPECTED_PATTERN}")
}
private def getLogType(logType: Tree): LogType =
if (logType.children.exists(t => t.toString().contains(PACKAGE))) {
evalTree(logType)
} else {
LogType.getLogType(logType.toString())
}
private def logTree(annottees: Seq[c.universe.Expr[Any]]): c.universe.Tree = {
val buildArg = (name: Name) => LogArgument(name.toTermName.decodedName.toString, isClass = true)
(annottees.map(_.tree) match {
......
......@@ -30,8 +30,6 @@ import scala.reflect.macros.whitebox
*/
object toStringMacro {
private final case class Argument(includeInternalFields: Boolean, includeFieldNames: Boolean, callSuper: Boolean)
class ToStringProcessor(override val c: whitebox.Context) extends AbstractMacroProcessor(c) {
import c.universe._
......
......@@ -21,10 +21,9 @@
package org.bitlap.tools
import org.bitlap.tools.logs.LogType
import org.bitlap.tools.internal.logMacro
import scala.annotation.{ compileTimeOnly, StaticAnnotation }
import scala.annotation._
/** annotation to generate log.
*
......@@ -36,6 +35,6 @@ import scala.annotation.{ compileTimeOnly, StaticAnnotation }
* @version 1.0
*/
@compileTimeOnly("enable macro to expand macro annotations")
final class log(logType: LogType.LogType = LogType.JLog) extends StaticAnnotation {
final class log(logType: String = "JLog") extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro logMacro.LogProcessor.impl
}
......@@ -21,40 +21,35 @@
package org.bitlap.tools.logs
import org.bitlap.tools.PACKAGE
import org.bitlap.tools.logs.extension.{ ScalaLoggingLazyImpl, ScalaLoggingStrictImpl }
import org.bitlap.tools.logs.impl.{ JLogImpl, Log4J2Impl, Slf4jImpl }
import org.bitlap.tools.logs.extension._
import org.bitlap.tools.logs.impl._
/** @author
* 梦境迷离
* @version 1.0,2022/3/29
*/
object LogType extends Enumeration {
object LogType {
type LogType = Value
val JLog, Log4j2, Slf4j, ScalaLoggingLazy, ScalaLoggingStrict = Value
val JLog = "JLog"
val Log4j2 = "Log4j2"
val Slf4j = "Slf4j"
val ScalaLoggingLazy = "ScalaLoggingLazy"
val ScalaLoggingStrict = "ScalaLoggingStrict"
private lazy val types: Map[LogType, BaseLog] = Map(
private lazy val types: Map[String, BaseLog] = Map(
JLogImpl.`type` -> JLogImpl,
Log4J2Impl.`type` -> Log4J2Impl,
Slf4jImpl.`type` -> Slf4jImpl,
ScalaLoggingStrictImpl.`type` -> ScalaLoggingStrictImpl,
ScalaLoggingLazyImpl.`type` -> ScalaLoggingLazyImpl
)
def getLogImpl(logType: LogType): BaseLog =
types.getOrElse(logType, default = throw new Exception(s"Not support log type: $logType"))
// TODO not use Enumeration
def getLogType(shortType: String): LogType = {
val tpe1 = s"$PACKAGE.logs.$shortType" // LogType.JLog
val tpe2 = s"$PACKAGE.logs.LogType.$shortType" // JLog
val v = LogType.values.find { p =>
s"$PACKAGE.logs.LogType.${p.toString}" == tpe1 ||
s"$PACKAGE.logs.LogType.${p.toString}" == tpe2 || s"$PACKAGE.logs.LogType.${p.toString}" == shortType
}
.getOrElse(throw new Exception(s"Not support log type: $shortType"))
.toString
LogType.withName(v)
}
).map(kv => kv._1.toLowerCase -> kv._2)
val values = types.keySet
def getLogImpl(logType: String): BaseLog =
types.getOrElse(
logType.toLowerCase,
default = throw new Exception(s"$logType is not in the supported list: ${values.mkString(",")}")
)
}
......@@ -21,8 +21,7 @@
package org.bitlap.tools.logs.extension
import org.bitlap.tools.logs.LogType.LogType
import org.bitlap.tools.logs.{ BaseLog, LogArgument, LogType }
import org.bitlap.tools.logs._
import scala.reflect.macros.whitebox
......@@ -35,7 +34,7 @@ import scala.reflect.macros.whitebox
*/
object ScalaLoggingLazyImpl extends BaseLog {
override val `type`: LogType = LogType.ScalaLoggingLazy
override val `type`: String = LogType.ScalaLoggingLazy
override def getTemplate(c: whitebox.Context)(logArgument: LogArgument): c.Tree = {
import c.universe._
......
......@@ -21,8 +21,7 @@
package org.bitlap.tools.logs.extension
import org.bitlap.tools.logs.LogType.LogType
import org.bitlap.tools.logs.{ BaseLog, LogArgument, LogType }
import org.bitlap.tools.logs._
import scala.reflect.macros.whitebox
......@@ -35,7 +34,7 @@ import scala.reflect.macros.whitebox
*/
object ScalaLoggingStrictImpl extends BaseLog {
override val `type`: LogType = LogType.ScalaLoggingStrict
override val `type`: String = LogType.ScalaLoggingStrict
override def getTemplate(c: whitebox.Context)(logArgument: LogArgument): c.Tree = {
import c.universe._
......
......@@ -21,8 +21,7 @@
package org.bitlap.tools.logs.impl
import org.bitlap.tools.logs.{ BaseLog, LogArgument, LogType }
import org.bitlap.tools.logs.LogType.LogType
import org.bitlap.tools.logs._
import scala.reflect.macros.whitebox
......@@ -32,17 +31,17 @@ import scala.reflect.macros.whitebox
*/
object JLogImpl extends BaseLog {
override val `type`: LogType = LogType.JLog
override val `type`: String = LogType.JLog
override def getTemplate(c: whitebox.Context)(logArgument: LogArgument): c.Tree = {
import c.universe._
if (logArgument.isClass) {
q"""@transient private final val log: java.util.logging.Logger = java.util.logging.Logger.getLogger(classOf[${TypeName(
logArgument.classNameStr
logArgument.className
)}].getName)"""
} else {
q"""@transient private final val log: java.util.logging.Logger = java.util.logging.Logger.getLogger(${TermName(
logArgument.classNameStr
logArgument.className
)}.getClass.getName)"""
}
}
......
......@@ -21,8 +21,7 @@
package org.bitlap.tools.logs.impl
import org.bitlap.tools.logs.{ BaseLog, LogArgument, LogType }
import org.bitlap.tools.logs.LogType.LogType
import org.bitlap.tools.logs._
import scala.reflect.macros.whitebox
......@@ -32,17 +31,17 @@ import scala.reflect.macros.whitebox
*/
object Log4J2Impl extends BaseLog {
override val `type`: LogType = LogType.Log4j2
override val `type`: String = LogType.Log4j2
override def getTemplate(c: whitebox.Context)(logArgument: LogArgument): c.Tree = {
import c.universe._
if (logArgument.isClass) {
q"""@transient private final val log: org.apache.logging.log4j.Logger = org.apache.logging.log4j.LogManager.getLogger(classOf[${TypeName(
logArgument.classNameStr
logArgument.className
)}].getName)"""
} else {
q"""@transient private final val log: org.apache.logging.log4j.Logger = org.apache.logging.log4j.LogManager.getLogger(${TermName(
logArgument.classNameStr
logArgument.className
)}.getClass.getName)"""
}
}
......
......@@ -21,8 +21,7 @@
package org.bitlap.tools.logs.impl
import org.bitlap.tools.logs.{ BaseLog, LogArgument, LogType }
import org.bitlap.tools.logs.LogType.LogType
import org.bitlap.tools.logs._
import scala.reflect.macros.whitebox
......@@ -32,17 +31,17 @@ import scala.reflect.macros.whitebox
*/
object Slf4jImpl extends BaseLog {
override val `type`: LogType = LogType.Slf4j
override val `type`: String = LogType.Slf4j
override def getTemplate(c: whitebox.Context)(logArgument: LogArgument): c.Tree = {
import c.universe._
if (logArgument.isClass) {
q"""@transient private final val log: org.slf4j.Logger = org.slf4j.LoggerFactory.getLogger(classOf[${TypeName(
logArgument.classNameStr
logArgument.className
)}])"""
} else {
q"""@transient private final val log: org.slf4j.Logger = org.slf4j.LoggerFactory.getLogger(${TermName(
logArgument.classNameStr
logArgument.className
)}.getClass)"""
}
}
......
......@@ -21,8 +21,6 @@
package org.bitlap.tools
import org.bitlap.tools.logs.LogType.LogType
import scala.reflect.macros.whitebox
/** @author
......@@ -37,18 +35,18 @@ package object logs {
*/
private[tools] trait BaseLog {
val `type`: LogType
val `type`: String
def getTemplate(c: whitebox.Context)(logArgument: LogArgument): c.Tree
}
/** @author
* 梦境迷离
* @param classNameStr
* @param className
* The class Name.
* @param isClass
* Is it a class?
*/
private[tools] case class LogArgument(classNameStr: String, isClass: Boolean)
private[tools] final case class LogArgument(className: String, isClass: Boolean)
}
......@@ -40,22 +40,22 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
// Duration and TimeUnit must Full class name
"""
| class A {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def i = ???
| }
|
| class B {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.WARN)
| @elapsed(limit = 1, logLevel = "warn")
| def j = ???
| }
|
| class C {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.DEBUG)
| @elapsed(limit = 1, logLevel = "debug")
| def j = ???
| }
|
| class D {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def i:String = ???
| }
| val a = new A()
......@@ -69,13 +69,13 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
// Duration and TimeUnit must Full class name
"""
|class A {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloWorld: String = {
| println("hello world")
| "hello"
| }
|
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloScala: String = {
| Thread.sleep(2000)
| println("hello world")
......@@ -92,18 +92,18 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
// Duration and TimeUnit must Full class name
"""
| class A {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloWorld: String = {
| println("") ; println(""); ""
| }
|
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloScala1: String = { println("") ; println(""); ""}
|
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloScala2: String = { println("") ; println(""); "" }
|
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloScala3: String = {
| val s = "hello"
| val x = "world"
......@@ -118,7 +118,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
// Duration and TimeUnit must Full class name
"""
| class A {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloScala1: String = {
| val s = "hello"
| if (s == "hello") {
......@@ -133,7 +133,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
| a.helloScala1
|
| class B {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloScala11: String = {
| val s = "hello"
| if (s == "hello") {
......@@ -155,7 +155,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.INFO
logLevel = "info"
)
def helloScala1: Future[String] = {
Thread.sleep(1000)
......@@ -164,7 +164,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.DEBUG
logLevel = "info"
)
def helloScala2: Future[String] = {
Thread.sleep(2000)
......@@ -175,7 +175,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.WARN
logLevel = "info"
)
def helloScala3: Future[String] = Future {
"hello world"
......@@ -187,20 +187,20 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
class B {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.WARN
logLevel = "info"
)
def helloScala(t: String): Future[String] =
Future(t)(scala.concurrent.ExecutionContext.Implicits.global)
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.WARN
logLevel = "warn"
)
def helloScala11(t: String): Future[String] = Future(t)(scala.concurrent.ExecutionContext.Implicits.global)
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.INFO
logLevel = "info"
)
def helloScala2: String = {
val s = Future("")(scala.concurrent.ExecutionContext.Implicits.global)
......@@ -214,13 +214,13 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
| object A {
| private final val log1: org.slf4j.Logger = org.slf4j.LoggerFactory.getLogger(A.getClass)
|
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.INFO)
| @elapsed(limit = 1, logLevel = "info")
| def helloScala1: Future[String] = {
| Thread.sleep(1000)
| Future.successful("hello world")
| }
|
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.DEBUG)
| @elapsed(limit = 1, logLevel = "debug")
| def helloScala2: Future[String] = {
| Thread.sleep(2000)
| Future {
......@@ -228,7 +228,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
| }(scala.concurrent.ExecutionContext.Implicits.global)
| }
|
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.WARN)
| @elapsed(limit = 1, logLevel = "warn")
| def helloScala3: Future[String] = Future {
| "hello world"
| }(scala.concurrent.ExecutionContext.Implicits.global)
......@@ -239,14 +239,14 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
"elapsed8" should "ok at input args" in {
@elapsed(
limit = 1,
logLevel = LogLevel.WARN
logLevel = "warn"
)
def helloScala1: String = {
println("")
println("")
"hello"
}
@elapsed(limit = 1, logLevel = LogLevel.INFO)
@elapsed(limit = 1, logLevel = "info")
def helloScala2: String = {
println("")
println("")
......@@ -255,7 +255,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.WARN
logLevel = "warn"
)
def helloScala3: String = {
println("")
......@@ -269,7 +269,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
"elapsed9" should "failed at input args" in {
"""
|@elapsed(logLevel = org.bitlap.tools.LogLevel.WARN, limit = 1)
|@elapsed(logLevel = "warn", limit = 1)
| def helloScala1: String = {
| println("")
| println("")
......@@ -282,7 +282,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.INFO
logLevel = "info"
)
def j: Int = {
var i = 1
......@@ -305,7 +305,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.INFO
logLevel = "info"
)
def k: Unit = {
var i = 1
......@@ -340,7 +340,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.INFO
logLevel = "info"
)
def l: Int = {
val i = 0
......@@ -353,7 +353,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
@elapsed(
limit = 1,
logLevel = org.bitlap.tools.LogLevel.INFO
logLevel = "info"
)
def m: Int = {
var i = 1
......@@ -377,7 +377,7 @@ class ElapsedTest extends AnyFlatSpec with Matchers {
"elapsed11" should "failed at abstract method" in {
"""
|abstract class A {
| @elapsed(limit = 1, logLevel = org.bitlap.tools.LogLevel.WARN)
| @elapsed(limit = 1, logLevel = "warn")
| def hello:String
| }
|""".stripMargin shouldNot compile
......
......@@ -21,7 +21,6 @@
package org.bitlap.tools
import org.bitlap.tools.logs.LogType
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
......@@ -39,8 +38,8 @@ class LogTest extends AnyFlatSpec with Matchers {
"""@log class TestClass2(val i: Int = 0, var j: Int)""" should compile
"""@log() class TestClass3(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.JLog) class TestClass5(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.JLog) class TestClass6(val i: Int = 0, var j: Int)""" should compile
"""@log(logType="JLog") class TestClass5(val i: Int = 0, var j: Int)""" should compile
"""@log(logType="JLog") class TestClass6(val i: Int = 0, var j: Int)""" should compile
}
"log3" should "ok on object" in {
......@@ -51,8 +50,8 @@ class LogTest extends AnyFlatSpec with Matchers {
"""@log object TestClass2""" should compile
"""@log() object TestClass3""" should compile
"""@log object TestClass4""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.JLog) object TestClass5""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.JLog) object TestClass6""" should compile
"""@log(logType="JLog") object TestClass5""" should compile
"""@log(logType="JLog") object TestClass6""" should compile
}
"log4 log4j2" should "ok on object" in {
......@@ -63,8 +62,8 @@ class LogTest extends AnyFlatSpec with Matchers {
"""@log object TestClass2""" should compile
"""@log() object TestClass3""" should compile
"""@log object TestClass4""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Log4j2) object TestClass5""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Log4j2) object TestClass6""" should compile
"""@log(logType="Log4j2") object TestClass5""" should compile
"""@log(logType="Log4j2") object TestClass6""" should compile
}
"log5 slf4j" should "ok on object" in {
......@@ -75,8 +74,8 @@ class LogTest extends AnyFlatSpec with Matchers {
"""@log object TestClass2""" should compile
"""@log() object TestClass3""" should compile
"""@log object TestClass4""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Slf4j) object TestClass5""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Slf4j) object TestClass6""" should compile
"""@log(logType="Slf4j") object TestClass5""" should compile
"""@log(logType="Slf4j") object TestClass6""" should compile
}
"log6 log4j2" should "ok on class" in {
......@@ -87,8 +86,8 @@ class LogTest extends AnyFlatSpec with Matchers {
"""@log class TestClass2(val i: Int = 0, var j: Int)""" should compile
"""@log() class TestClass3(val i: Int = 0, var j: Int)""" should compile
"""@log class TestClass4(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Log4j2) class TestClass5(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Log4j2) class TestClass6(val i: Int = 0, var j: Int)""" should compile
"""@log(logType="Log4j2") class TestClass5(val i: Int = 0, var j: Int)""" should compile
"""@log(logType="Log4j2") class TestClass6(val i: Int = 0, var j: Int)""" should compile
}
"log7 slf4j" should "ok on class" in {
......@@ -99,10 +98,10 @@ class LogTest extends AnyFlatSpec with Matchers {
"""@toString @builder @log class TestClass2(val i: Int = 0, var j: Int)""" should compile // Use with multiple annotations
"""@log() class TestClass3(val i: Int = 0, var j: Int)""" should compile
"""@log class TestClass4(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Slf4j) class TestClass5(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Slf4j) class TestClass6(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Slf4j) class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }""" should compile
"""@log(logType = org.bitlap.tools.logs.LogType.Slf4j) class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }""" should compile
"""@log(logType="Slf4j") class TestClass5(val i: Int = 0, var j: Int)""" should compile
"""@log(logType="Slf4j") class TestClass6(val i: Int = 0, var j: Int)""" should compile
"""@log(logType="Slf4j") class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }""" should compile
"""@log(logType = "Slf4j") class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }""" should compile
}
"log8 slf4j" should "ok on class and has object" in {
......@@ -113,13 +112,13 @@ class LogTest extends AnyFlatSpec with Matchers {
"""@toString @builder @log class TestClass2(val i: Int = 0, var j: Int)""" should compile // Use with multiple annotations
"""@log() class TestClass3(val i: Int = 0, var j: Int)""" should compile
"""@log class TestClass4(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Slf4j) class TestClass5(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Slf4j) class TestClass6(val i: Int = 0, var j: Int)""" should compile
"""@log(logType=org.bitlap.tools.logs.LogType.Slf4j) class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }""" should compile
"""@log(logType = org.bitlap.tools.logs.LogType.Slf4j) @builder class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }
| @log(logType = org.bitlap.tools.logs.LogType.Slf4j) object TestClass6 { log.info("hello world");builder() }""".stripMargin should compile
"""@log(logType="Slf4j") class TestClass5(val i: Int = 0, var j: Int)""" should compile
"""@log(logType="Slf4j") class TestClass6(val i: Int = 0, var j: Int)""" should compile
"""@log(logType="Slf4j") class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }""" should compile
"""@log(logType = "Slf4j") @builder class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }
| @log(logType = "Slf4j") object TestClass6 { log.info("hello world");builder() }""".stripMargin should compile
@log(logType = org.bitlap.tools.logs.LogType.Slf4j)
@log(logType = "Slf4j")
@builder class TestClass8(val i: Int = 0, var j: Int) {
log.info("hello world")
}
......@@ -130,18 +129,18 @@ class LogTest extends AnyFlatSpec with Matchers {
"log9 slf4j" should "ok on class and it object" in {
"""
|@log(logType = org.bitlap.tools.logs.LogType.Slf4j) @builder class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }
|@log(logType = org.bitlap.tools.logs.LogType.Slf4j) object TestClass6 { log.info("hello world"); builder()}
|@log(logType = "Slf4j") @builder class TestClass6(val i: Int = 0, var j: Int){ log.info("hello world") }
|@log(logType = "Slf4j") object TestClass6 { log.info("hello world"); builder()}
|""".stripMargin should compile
}
"log10 slf4j" should "failed on case class" in {
"""
| @log(logType = LogType.JLog)
| @log(logType = "JLog")
| @builder case class TestClass6_2(val i: Int = 0, var j: Int) {
| log.info("hello world")
| }
| @log(logType = org.bitlap.tools.logs.LogType.Slf4j) object TestClass6_2 {
| @log(logType = "Slf4j") object TestClass6_2 {
| log.info("hello world"); builder()
| }
|""".stripMargin shouldNot compile
......@@ -149,22 +148,22 @@ class LogTest extends AnyFlatSpec with Matchers {
"log11 slf4j" should "ok on class and it object" in {
"""
| @log(logType = org.bitlap.tools.logs.LogType.Slf4j)
| @log(logType = "Slf4j")
| @builder class TestClass6(val i: Int = 0, var j: Int) {
| log.info("hello world")
| }
|@log(logType = org.bitlap.tools.logs.LogType.Slf4j) object TestClass6 {
|@log(logType = "Slf4j") object TestClass6 {
| log.info("hello world"); builder()
| }
|""".stripMargin should compile
"""
| @builder
| @log(logType = org.bitlap.tools.logs.LogType.Slf4j)
| @log(logType = "Slf4j")
| class TestClass6(val i: Int = 0, var j: Int) {
| log.info("hello world")
| }
|@log(logType = org.bitlap.tools.logs.LogType.Slf4j) object TestClass6 {
|@log(logType = "Slf4j") object TestClass6 {
| log.info("hello world"); builder()
| }
|""".stripMargin should compile
......@@ -177,13 +176,13 @@ class LogTest extends AnyFlatSpec with Matchers {
@log object TestLog1 {
log.info("")
}
@log(logType = org.bitlap.tools.logs.LogType.Slf4j) class TestLog2() {
@log(logType = "Slf4j") class TestLog2() {
log.info("")
}
@log(logType = JLog) class TestLog3() {
@log(logType = "JLog") class TestLog3() {
log.info("")
}
@log(logType = LogType.Slf4j) class TestLog4() {
@log(logType = "Slf4j") class TestLog4() {
log.info("")
}
}
......@@ -191,19 +190,19 @@ class LogTest extends AnyFlatSpec with Matchers {
"log13 scala loggging lazy" should "ok when does not exists super class" in {
"""
| import org.bitlap.tools.logs.LogType
| @log(logType = LogType.ScalaLoggingLazy)
| @log(logType = "ScalaLoggingLazy")
| class TestClass1(val i: Int = 0, var j: Int) {
| log.info("hello world")
| }
|""".stripMargin should compile
@log(logType = LogType.ScalaLoggingLazy)
@log(logType = "ScalaLoggingLazy")
class TestClass2(val i: Int = 0, var j: Int) {
log.info("hello world")
}
"""
| import org.bitlap.tools.logs.LogType
| @log(logType = LogType.ScalaLoggingLazy)
| @log(logType = "ScalaLoggingLazy")
| class TestClass3(val i: Int = 0, var j: Int) {
| log.info("hello world")
| }
......@@ -211,7 +210,7 @@ class LogTest extends AnyFlatSpec with Matchers {
"""
| import org.bitlap.tools.logs.LogType
| @log(logType = LogType.ScalaLoggingLazy)
| @log(logType = "ScalaLoggingLazy")
| object TestClass4 {
| log.info("hello world")
| }
......@@ -221,19 +220,19 @@ class LogTest extends AnyFlatSpec with Matchers {
"log14 scala loggging strict" should "ok when exists super class" in {
"""
| import org.bitlap.tools.logs.LogType
| @log(logType = LogType.ScalaLoggingStrict)
| @log(logType = "ScalaLoggingStrict")
| class TestClass1(val i: Int = 0, var j: Int) extends Serializable {
| log.info("hello world")
| }
|""".stripMargin should compile
@log(logType = LogType.ScalaLoggingStrict)
@log(logType = "ScalaLoggingStrict")
class TestClass2(val i: Int = 0, var j: Int) extends Serializable {
log.info("hello world")
}
"""
| import org.bitlap.tools.logs.LogType
| @log(logType = LogType.ScalaLoggingStrict)
| @log(logType = "ScalaLoggingStrict")
| class TestClass3(val i: Int = 0, var j: Int) extends Serializable {
| log.info("hello world")
| }
......@@ -241,7 +240,7 @@ class LogTest extends AnyFlatSpec with Matchers {
"""
| import org.bitlap.tools.logs.LogType
| @log(logType = LogType.ScalaLoggingStrict)
| @log(logType = "ScalaLoggingStrict")
| object TestClass4 extends Serializable {
| log.info("hello world")
| }
......
......@@ -29,7 +29,7 @@ trait CacheStrategy
object CacheStrategy {
case class Lru(maxSize: Int = 1000) extends CacheStrategy
case object Normal extends CacheStrategy
case class CustomCacheStrategy[V](cacheAdapter: CacheAdapter[V]) extends CacheStrategy
final case class Lru(maxSize: Int = 1000) extends CacheStrategy
final case object Normal extends CacheStrategy
final case class CustomCacheStrategy[V](cacheAdapter: CacheAdapter[V]) extends CacheStrategy
}
......@@ -29,12 +29,12 @@ sealed trait Options
object Options {
case object enableOptionDefaultsToNone extends Options
final case object enableOptionDefaultsToNone extends Options
case object enableCollectionDefaultsToEmpty extends Options
final case object enableCollectionDefaultsToEmpty extends Options
case object disableCollectionDefaultsToEmpty extends Options
final case object disableCollectionDefaultsToEmpty extends Options
case object disableOptionDefaultsToNone extends Options
final case object disableOptionDefaultsToNone extends Options
}
......@@ -40,6 +40,9 @@ trait ResultSetTransformer[T <: GenericRow] {
case Types.INTEGER => resultSet.getInt(name)
case Types.DOUBLE => resultSet.getDouble(name)
case Types.TIMESTAMP => resultSet.getTimestamp(name)
case Types.TIME => resultSet.getTime(name)
case Types.FLOAT => resultSet.getFloat(name)
case Types.DATE => resultSet.getDate(name)
case _ => resultSet.getObject(name)
}
}
......
......@@ -25,7 +25,7 @@ package org.bitlap.common
* 梦境迷离
* @version 1.0,6/8/22
*/
case class TestEntity(
final case class TestEntity(
name: String,
id: String,
key: String,
......
......@@ -47,7 +47,7 @@ object DeriveCsvConverter {
val typeName = clazzName.toTypeName
val tree =
q"""
new Converter[$typeName] {
new $packageName.Converter[$typeName] {
override def toScala($lineTermName: String): _root_.scala.Option[$typeName] = $packageName.internal.ToCaseClassMacro[$typeName]($lineTermName)($csvFormat)
override def toCsvString($tTermName: $typeName): String = $packageName.internal.ToStringMacro[$typeName]($tTermName)($csvFormat)
}
......
......@@ -103,4 +103,14 @@ object FileUtils {
}
ts.result()
}
def readCsvFromClassPath[T <: Product](fileName: String)(func: String => Option[T]): List[Option[T]] = {
val reader = new InputStreamReader(ClassLoader.getSystemResourceAsStream(fileName))
FileUtils.readFileFunc[T](new BufferedReader(reader), func)
}
def readCsvFromFile[T <: Product](file: File)(func: String => Option[T]): List[Option[T]] = {
val reader = new BufferedReader(new FileReader(file))
FileUtils.readFileFunc[T](reader, func)
}
}
......@@ -28,7 +28,7 @@ package org.bitlap.csv
* @since 2022/04/30
* @version 1.0
*/
trait Scalable[T] {
trait Reader[T] {
/** API for processing a specific column value of CSV line data.
*
......@@ -39,8 +39,8 @@ trait Scalable[T] {
def transform(column: String): Option[T]
}
object Scalable extends ScalableImplicits {
object Reader extends ReaderImplicits {
def apply[T](implicit st: Scalable[T]): Scalable[T] = st
def apply[T](implicit st: Reader[T]): Reader[T] = st
}
......@@ -21,7 +21,7 @@
package org.bitlap.csv
import org.bitlap.csv.internal.ScalableBuilderMacro
import org.bitlap.csv.internal.ReaderBuilderMacro
import java.io.InputStream
/** Builder to create a custom Csv Decoder.
......@@ -30,7 +30,7 @@ import java.io.InputStream
* 梦境迷离
* @version 1.0,2022/4/30
*/
class ScalableBuilder[T] {
class ReaderBuilder[T] {
/** Convert any Scala types to this CSV column string.
*
......@@ -42,8 +42,8 @@ class ScalableBuilder[T] {
* The field type, generally, it is not necessary to specify, but it is safer if specify.
* @return
*/
def setField[SF](scalaField: T => SF, value: String => SF): ScalableBuilder[T] =
macro ScalableBuilderMacro.setFieldImpl[T, SF]
def setField[SF](scalaField: T => SF, value: String => SF): ReaderBuilder[T] =
macro ReaderBuilderMacro.setFieldImpl[T, SF]
/** Create a custom builder for converting this CSV line to scala values.
*
......@@ -53,7 +53,7 @@ class ScalableBuilder[T] {
* For processing CSV in the specified format.
* @return
*/
def convert(line: String)(implicit format: CsvFormat): Option[T] = macro ScalableBuilderMacro.convertOneImpl[T]
def convert(line: String)(implicit format: CsvFormat): Option[T] = macro ReaderBuilderMacro.convertOneImpl[T]
/** Convert all CSV lines to the sequence of Scala case class.
*
......@@ -64,7 +64,7 @@ class ScalableBuilder[T] {
* @return
*/
def convert(lines: List[String])(implicit format: CsvFormat): List[Option[T]] =
macro ScalableBuilderMacro.convertAllImpl[T]
macro ReaderBuilderMacro.convertAllImpl[T]
/** Read all CSV lines of the file and convert them to the sequence of Scala case class.
*
......@@ -75,12 +75,12 @@ class ScalableBuilder[T] {
* @return
*/
def convertFrom(file: InputStream)(implicit format: CsvFormat): List[Option[T]] =
macro ScalableBuilderMacro.convertFromFileImpl[T]
macro ReaderBuilderMacro.convertFromFileImpl[T]
}
object ScalableBuilder {
object ReaderBuilder {
def apply[T <: Product]: ScalableBuilder[T] = macro ScalableBuilderMacro.applyImpl[T]
def apply[T <: Product]: ReaderBuilder[T] = macro ReaderBuilderMacro.applyImpl[T]
}
......@@ -27,37 +27,37 @@ import scala.util.Try
* 梦境迷离
* @version 1.0,2022/5/1
*/
trait ScalableImplicits {
trait ReaderImplicits {
implicit final val stringScalable: Scalable[String] = new Scalable[String] {
implicit final val stringReader: Reader[String] = new Reader[String] {
override def transform(column: String): Option[String] = if (column.isEmpty) None else Some(column)
}
implicit final val intScalable: Scalable[Int] = new Scalable[Int] {
implicit final val intReader: Reader[Int] = new Reader[Int] {
override def transform(column: String): Option[Int] = Try(column.toInt).toOption
}
implicit final val charScalable: Scalable[Char] = new Scalable[Char] {
implicit final val charReader: Reader[Char] = new Reader[Char] {
override def transform(column: String): Option[Char] = if (column.isEmpty) None else Try(column.charAt(0)).toOption
}
implicit final val longScalable: Scalable[Long] = new Scalable[Long] {
implicit final val longReader: Reader[Long] = new Reader[Long] {
override def transform(column: String): Option[Long] = Try(column.toLong).toOption
}
implicit final val shortScalable: Scalable[Short] = new Scalable[Short] {
implicit final val shortReader: Reader[Short] = new Reader[Short] {
override def transform(column: String): Option[Short] = Try(column.toShort).toOption
}
implicit final val doubleScalable: Scalable[Double] = new Scalable[Double] {
implicit final val doubleReader: Reader[Double] = new Reader[Double] {
override def transform(column: String): Option[Double] = Try(column.toDouble).toOption
}
implicit final val floatScalable: Scalable[Float] = new Scalable[Float] {
implicit final val floatReader: Reader[Float] = new Reader[Float] {
override def transform(column: String): Option[Float] = Try(column.toFloat).toOption
}
implicit final val booleanScalable: Scalable[Boolean] = new Scalable[Boolean] {
implicit final val booleanReader: Reader[Boolean] = new Reader[Boolean] {
override def transform(column: String): Option[Boolean] = Try(column.toBoolean).toOption
}
}
/*
* Copyright (c) 2022 bitlap
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv
import java.io.{ BufferedReader, File, FileReader, InputStreamReader }
/** Tool class for parsing CSV files.
*
* @author
* 梦境迷离
* @version 1.0,2022/5/13
*/
object ScalableHelper {
def readCsvFromClassPath[T <: Product](fileName: String)(func: String => Option[T]): List[Option[T]] = {
val reader = new InputStreamReader(ClassLoader.getSystemResourceAsStream(fileName))
FileUtils.readFileFunc[T](new BufferedReader(reader), func)
}
def readCsvFromFile[T <: Product](file: File)(func: String => Option[T]): List[Option[T]] = {
val reader = new BufferedReader(new FileReader(file))
FileUtils.readFileFunc[T](reader, func)
}
}
......@@ -28,7 +28,7 @@ package org.bitlap.csv
* @since 2022/04/27
* @version 1.0
*/
trait Csvable[T] {
trait Writer[T] {
/** API for processing a specific field of case class object.
*
......@@ -40,8 +40,8 @@ trait Csvable[T] {
}
object Csvable extends CsvableImplicits {
object Writer extends WriterImplicits {
def apply[T](implicit st: Csvable[T]): Csvable[T] = st
def apply[T](implicit st: Writer[T]): Writer[T] = st
}
......@@ -21,7 +21,7 @@
package org.bitlap.csv
import org.bitlap.csv.internal.CsvableBuilderMacro
import org.bitlap.csv.internal.WriterBuilderMacro
import java.io.File
/** Builder to create a custom Csv Encoder.
......@@ -30,7 +30,7 @@ import java.io.File
* 梦境迷离
* @version 1.0,2022/4/30
*/
class CsvableBuilder[T] {
class WriterBuilder[T] {
/** Convert this CSV column string to any Scala types.
*
......@@ -42,8 +42,8 @@ class CsvableBuilder[T] {
* The field type, generally, it is not necessary to specify, but it is safer if specify.
* @return
*/
def setField[SF](scalaField: T => SF, value: SF => String): CsvableBuilder[T] =
macro CsvableBuilderMacro.setFieldImpl[T, SF]
def setField[SF](scalaField: T => SF, value: SF => String): WriterBuilder[T] =
macro WriterBuilderMacro.setFieldImpl[T, SF]
/** Create a custom builder for converting this scala value to CSV line string.
*
......@@ -54,7 +54,7 @@ class CsvableBuilder[T] {
* @return
* The string of one CSV line.
*/
def convert(t: T)(implicit format: CsvFormat): String = macro CsvableBuilderMacro.convertOneImpl[T]
def convert(t: T)(implicit format: CsvFormat): String = macro WriterBuilderMacro.convertOneImpl[T]
/** Convert the sequence of Scala case class to CSV string.
*
......@@ -65,7 +65,7 @@ class CsvableBuilder[T] {
* @return
* The string of all CSV lines.
*/
def convert(ts: List[T])(implicit format: CsvFormat): String = macro CsvableBuilderMacro.convertAllImpl[T]
def convert(ts: List[T])(implicit format: CsvFormat): String = macro WriterBuilderMacro.convertAllImpl[T]
/** Convert the sequence of Scala case class to CSV string and write to file.
*
......@@ -79,12 +79,12 @@ class CsvableBuilder[T] {
* The string of all CSV lines.
*/
def convertTo(ts: List[T], file: File)(implicit format: CsvFormat): Boolean =
macro CsvableBuilderMacro.convertToFileImpl[T]
macro WriterBuilderMacro.convertToFileImpl[T]
}
object CsvableBuilder {
object WriterBuilder {
def apply[T <: Product]: CsvableBuilder[T] = macro CsvableBuilderMacro.applyImpl[T]
def apply[T <: Product]: WriterBuilder[T] = macro WriterBuilderMacro.applyImpl[T]
}
......@@ -25,37 +25,37 @@ package org.bitlap.csv
* 梦境迷离
* @version 1.0,2022/5/1
*/
trait CsvableImplicits {
trait WriterImplicits {
implicit final val stringCsvable: Csvable[String] = new Csvable[String] {
implicit final val stringWriter: Writer[String] = new Writer[String] {
override def transform(s: String): String = s
}
implicit final val intCsvable: Csvable[Int] = new Csvable[Int] {
implicit final val intWriter: Writer[Int] = new Writer[Int] {
override def transform(column: Int): String = column.toString
}
implicit final val charCsvable: Csvable[Char] = new Csvable[Char] {
implicit final val charWriter: Writer[Char] = new Writer[Char] {
override def transform(t: Char): String = t.toString
}
implicit final val longCsvable: Csvable[Long] = new Csvable[Long] {
implicit final val longWriter: Writer[Long] = new Writer[Long] {
override def transform(column: Long): String = column.toString
}
implicit final val shortCsvable: Csvable[Short] = new Csvable[Short] {
implicit final val shortWriter: Writer[Short] = new Writer[Short] {
override def transform(column: Short): String = column.toString
}
implicit final val doubleCsvable: Csvable[Double] = new Csvable[Double] {
implicit final val doubleWriter: Writer[Double] = new Writer[Double] {
override def transform(column: Double): String = column.toString
}
implicit final val floatCsvable: Csvable[Float] = new Csvable[Float] {
implicit final val floatWriter: Writer[Float] = new Writer[Float] {
override def transform(column: Float): String = column.toString
}
implicit final val booleanCsvable: Csvable[Boolean] = new Csvable[Boolean] {
implicit final val booleanWriter: Writer[Boolean] = new Writer[Boolean] {
override def transform(column: Boolean): String = column.toString
}
}
......@@ -23,7 +23,7 @@ package org.bitlap.csv.internal
import org.bitlap.common.MacroCache
import org.bitlap.common.internal.AbstractMacroProcessor
import org.bitlap.csv.{ CsvFormat, ScalableBuilder }
import org.bitlap.csv.{ CsvFormat, ReaderBuilder }
import java.io.InputStream
import scala.collection.mutable
......@@ -33,59 +33,59 @@ import scala.reflect.macros.whitebox
* 梦境迷离
* @version 1.0,2022/4/29
*/
class ScalableBuilderMacro(override val c: whitebox.Context) extends AbstractMacroProcessor(c) {
class ReaderBuilderMacro(override val c: whitebox.Context) extends AbstractMacroProcessor(c) {
import c.universe._
protected val packageName = q"_root_.org.bitlap.csv"
private val annoBuilderPrefix = "_AnonScalableBuilder$"
private val annoBuilderPrefix = "_AnonReaderBuilder$"
private val builderFunctionPrefix = "_ScalableBuilderFunction$"
private val builderFunctionPrefix = "_ReaderBuilderFunction$"
private val innerColumnFuncTermName = TermName("_columns")
private val innerLName = q"_l"
private val innerTempTermName = TermName("_line")
private val scalableInstanceTermName = TermName("_scalableInstance")
private val scalableImplClassNamePrefix = "_ScalaAnno$"
private val innerColumnFuncTermName = TermName("_columns")
private val innerLName = q"_l"
private val innerTempTermName = TermName("_line")
private val readerInstanceTermName = TermName("_ReaderInstance")
private val readerImplClassNamePrefix = "_ScalaAnno$"
// scalafmt: { maxColumn = 400 }
@unchecked
def setFieldImpl[T, SF](scalaField: Expr[T => SF], value: Expr[String => SF]): Expr[ScalableBuilder[T]] = {
def setFieldImpl[T, SF](scalaField: Expr[T => SF], value: Expr[String => SF]): Expr[ReaderBuilder[T]] = {
val Function(_, Select(_, termName)) = scalaField.tree
val builderId = getBuilderId(annoBuilderPrefix)
MacroCache.builderFunctionTrees.getOrElseUpdate(builderId, mutable.Map.empty).update(termName.toString, value)
val tree = q"new ${c.prefix.actualType}"
exprPrintTree[ScalableBuilder[T]](force = false, tree)
exprPrintTree[ReaderBuilder[T]](force = false, tree)
}
def applyImpl[T: WeakTypeTag]: Expr[ScalableBuilder[T]] =
def applyImpl[T: WeakTypeTag]: Expr[ReaderBuilder[T]] =
deriveBuilderApplyImpl[T]
def convertOneImpl[T: WeakTypeTag](line: Expr[String])(format: c.Expr[CsvFormat]): Expr[Option[T]] = {
val clazzName = resolveClassTypeName[T]
deriveScalableImpl[T](clazzName, line, format)
deriveReaderImpl[T](clazzName, line, format)
}
def convertAllImpl[T: WeakTypeTag](lines: Expr[List[String]])(format: c.Expr[CsvFormat]): Expr[List[Option[T]]] = {
val clazzName = resolveClassTypeName[T]
deriveFullScalableImpl[T](clazzName, lines, format)
deriveFullReaderImpl[T](clazzName, lines, format)
}
def convertFromFileImpl[T: WeakTypeTag](file: Expr[InputStream])(format: c.Expr[CsvFormat]): Expr[List[Option[T]]] = {
val clazzName = resolveClassTypeName[T]
deriveFullFromFileScalableImpl[T](clazzName, file, format)
deriveFullFromFileReaderImpl[T](clazzName, file, format)
}
private def deriveBuilderApplyImpl[T: WeakTypeTag]: Expr[ScalableBuilder[T]] = {
private def deriveBuilderApplyImpl[T: WeakTypeTag]: Expr[ReaderBuilder[T]] = {
val className = TypeName(annoBuilderPrefix + MacroCache.getBuilderId)
val caseClazzName = weakTypeOf[T].typeSymbol.name.toTypeName
val tree =
q"""
class $className extends $packageName.ScalableBuilder[$caseClazzName]
class $className extends $packageName.ReaderBuilder[$caseClazzName]
new $className
"""
exprPrintTree[ScalableBuilder[T]](force = false, tree)
exprPrintTree[ReaderBuilder[T]](force = false, tree)
}
private def getPreTree: Iterable[Tree] = {
......@@ -101,57 +101,57 @@ class ScalableBuilderMacro(override val c: whitebox.Context) extends AbstractMac
}
// scalafmt: { maxColumn = 400 }
private def deriveFullFromFileScalableImpl[T: WeakTypeTag](clazzName: TypeName, file: Expr[InputStream], format: c.Expr[CsvFormat]): Expr[List[Option[T]]] = {
// NOTE: preTrees must be at the same level as Scalable
private def deriveFullFromFileReaderImpl[T: WeakTypeTag](clazzName: TypeName, file: Expr[InputStream], format: c.Expr[CsvFormat]): Expr[List[Option[T]]] = {
// NOTE: preTrees must be at the same level as Reader
val tree =
q"""
..$getPreTree
..${getAnnoClassObject[T](clazzName, format)}
$packageName.FileUtils.reader($file, $format).map { ($innerLName: String) =>
$scalableInstanceTermName.$innerTempTermName = ${TermName(innerLName.toString())}
$scalableInstanceTermName.transform($innerLName)
$readerInstanceTermName.$innerTempTermName = ${TermName(innerLName.toString())}
$readerInstanceTermName.transform($innerLName)
}
"""
exprPrintTree[List[Option[T]]](force = false, tree)
}
// scalafmt: { maxColumn = 400 }
private def deriveFullScalableImpl[T: WeakTypeTag](clazzName: TypeName, lines: Expr[List[String]], format: c.Expr[CsvFormat]): Expr[List[Option[T]]] = {
// NOTE: preTrees must be at the same level as Scalable
private def deriveFullReaderImpl[T: WeakTypeTag](clazzName: TypeName, lines: Expr[List[String]], format: c.Expr[CsvFormat]): Expr[List[Option[T]]] = {
// NOTE: preTrees must be at the same level as Reader
val tree =
q"""
..$getPreTree
..${getAnnoClassObject[T](clazzName, format)}
$lines.map { ($innerLName: String) =>
$scalableInstanceTermName.$innerTempTermName = ${TermName(innerLName.toString())}
$scalableInstanceTermName.transform($innerLName)
$readerInstanceTermName.$innerTempTermName = ${TermName(innerLName.toString())}
$readerInstanceTermName.transform($innerLName)
}
"""
exprPrintTree[List[Option[T]]](force = false, tree)
}
private def getAnnoClassObject[T: WeakTypeTag](clazzName: TypeName, format: c.Expr[CsvFormat]): Tree = {
val annoClassName = TermName(scalableImplClassNamePrefix + MacroCache.getIdentityId)
val annoClassName = TermName(readerImplClassNamePrefix + MacroCache.getIdentityId)
q"""
object $annoClassName extends $packageName.Scalable[$clazzName] {
object $annoClassName extends $packageName.Reader[$clazzName] {
var $innerTempTermName: String = _
private val $innerColumnFuncTermName = () => $packageName.StringUtils.splitColumns(${annoClassName.toTermName}.$innerTempTermName, $format)
..${scalableBody[T](clazzName, innerColumnFuncTermName)}
..${readerBody[T](clazzName, innerColumnFuncTermName)}
}
private final lazy val $scalableInstanceTermName = $annoClassName
private final lazy val $readerInstanceTermName = $annoClassName
"""
}
// scalafmt: { maxColumn = 400 }
private def deriveScalableImpl[T: WeakTypeTag](clazzName: TypeName, line: Expr[String], format: c.Expr[CsvFormat]): Expr[Option[T]] = {
val annoClassName = TermName(scalableImplClassNamePrefix + MacroCache.getIdentityId)
// NOTE: preTrees must be at the same level as Scalable
private def deriveReaderImpl[T: WeakTypeTag](clazzName: TypeName, line: Expr[String], format: c.Expr[CsvFormat]): Expr[Option[T]] = {
val annoClassName = TermName(readerImplClassNamePrefix + MacroCache.getIdentityId)
// NOTE: preTrees must be at the same level as Reader
val tree =
q"""
..$getPreTree
object $annoClassName extends $packageName.Scalable[$clazzName] {
object $annoClassName extends $packageName.Reader[$clazzName] {
final lazy private val $innerColumnFuncTermName = () => $packageName.StringUtils.splitColumns($line, $format)
..${scalableBody[T](clazzName, innerColumnFuncTermName)}
..${readerBody[T](clazzName, innerColumnFuncTermName)}
}
$annoClassName.transform($line)
"""
......@@ -159,7 +159,7 @@ class ScalableBuilderMacro(override val c: whitebox.Context) extends AbstractMac
}
// scalafmt: { maxColumn = 400 }
private def scalableBody[T: WeakTypeTag](clazzName: TypeName, innerFuncTermName: TermName): Tree = {
private def readerBody[T: WeakTypeTag](clazzName: TypeName, innerFuncTermName: TermName): Tree = {
val customTrees = MacroCache.builderFunctionTrees.getOrElse(getBuilderId(annoBuilderPrefix), mutable.Map.empty)
val params = getCaseClassFieldInfoList[T]()
val fieldNames = params.map(_.fieldName)
......@@ -175,25 +175,25 @@ class ScalableBuilderMacro(override val c: whitebox.Context) extends AbstractMac
case Nil if !customTrees.contains(fieldNames(idx)) =>
fieldType match {
case t if t =:= typeOf[Int] =>
q"$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
q"$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[String] =>
q"""$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"""
q"""$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"""
case t if t =:= typeOf[Float] =>
q"$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse[Float](${fieldTreeInformation.zeroValue})"
q"$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse[Float](${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Double] =>
q"$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse[Double](${fieldTreeInformation.zeroValue})"
q"$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse[Double](${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Char] =>
q"$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
q"$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Byte] =>
q"$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
q"$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Short] =>
q"$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
q"$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Boolean] =>
q"$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
q"$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Long] =>
q"$packageName.Scalable[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
q"$packageName.Reader[$fieldTypeName].transform($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case _ =>
tryOptionGetOrElse(q"$packageName.Scalable[$fieldTypeName].transform($columnValues)", fieldTreeInformation.zeroValue)
tryOptionGetOrElse(q"$packageName.Reader[$fieldTypeName].transform($columnValues)", fieldTreeInformation.zeroValue)
}
case generic :: Nil if customTrees.contains(fieldNames(idx)) && fieldTreeInformation.collectionsFlags.isList =>
tryGetOrElse(q"${customFunction()}.asInstanceOf[_root_.scala.List[$generic]]", fieldTreeInformation.zeroValue)
......@@ -206,7 +206,7 @@ class ScalableBuilderMacro(override val c: whitebox.Context) extends AbstractMac
case generic :: Nil if customTrees.contains(fieldNames(idx)) && fieldTreeInformation.collectionsFlags.isSeq =>
tryGetOrElse(q"${customFunction()}.asInstanceOf[_root_.scala.Seq[$generic]]", fieldTreeInformation.zeroValue)
case generic :: Nil if fieldTreeInformation.collectionsFlags.isOption =>
tryOption(q"$packageName.Scalable[$generic].transform($columnValues)")
tryOption(q"$packageName.Reader[$generic].transform($columnValues)")
case generic =>
c.abort(
c.enclosingPosition,
......
......@@ -23,7 +23,7 @@ package org.bitlap.csv.internal
import org.bitlap.common.MacroCache
import org.bitlap.common.internal.AbstractMacroProcessor
import org.bitlap.csv.{ CsvFormat, CsvableBuilder }
import org.bitlap.csv.{ CsvFormat, WriterBuilder }
import java.io.File
import scala.collection.mutable
......@@ -33,52 +33,52 @@ import scala.reflect.macros.whitebox
* 梦境迷离
* @version 1.0,2022/4/29
*/
class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacroProcessor(c) {
class WriterBuilderMacro(override val c: whitebox.Context) extends AbstractMacroProcessor(c) {
import c.universe._
protected val packageName = q"_root_.org.bitlap.csv"
private val annoBuilderPrefix = "_AnonCsvableBuilder$"
private val annoBuilderPrefix = "_AnonWriterBuilder$"
private val builderFunctionPrefix = "_CsvableBuilderFunction$"
private val builderFunctionPrefix = "_WriterBuilderFunction$"
private val innerTName = q"_t"
private val innerTmpTermName = TermName("_tt")
private val csvableInstanceTermName = TermName("_csvableInstance")
private val csvableImplClassNamePrefix = "_CsvAnno$"
private val funcArgsTempTermName = TermName("temp")
private val innerTName = q"_t"
private val innerTmpTermName = TermName("_tt")
private val writerInstanceTermName = TermName("_WriterInstance")
private val writerImplClassNamePrefix = "_CSVAnno$"
private val funcArgsTempTermName = TermName("temp")
// scalafmt: { maxColumn = 400 }
def setFieldImpl[T, SF](scalaField: Expr[T => SF], value: Expr[SF => String]): Expr[CsvableBuilder[T]] = {
def setFieldImpl[T, SF](scalaField: Expr[T => SF], value: Expr[SF => String]): Expr[WriterBuilder[T]] = {
val Function(_, Select(_, termName)) = scalaField.tree
val builderId = getBuilderId(annoBuilderPrefix)
MacroCache.builderFunctionTrees.getOrElseUpdate(builderId, mutable.Map.empty).update(termName.toString, value)
val tree = q"new ${c.prefix.actualType}"
exprPrintTree[CsvableBuilder[T]](force = false, tree)
exprPrintTree[WriterBuilder[T]](force = false, tree)
}
def applyImpl[T: WeakTypeTag]: Expr[CsvableBuilder[T]] =
def applyImpl[T: WeakTypeTag]: Expr[WriterBuilder[T]] =
deriveBuilderApplyImpl[T]
def convertOneImpl[T: WeakTypeTag](t: Expr[T])(format: c.Expr[CsvFormat]): Expr[String] =
deriveCsvableImpl[T](t, format)
deriveWriterImpl[T](t, format)
def convertAllImpl[T: WeakTypeTag](ts: Expr[List[T]])(format: c.Expr[CsvFormat]): Expr[String] =
deriveFullCsvableImpl[T](ts, format)
deriveFullWriterImpl[T](ts, format)
def convertToFileImpl[T: WeakTypeTag](ts: Expr[List[T]], file: Expr[File])(format: c.Expr[CsvFormat]): Expr[Boolean] =
deriveFullIntoFileCsvableImpl[T](ts, file, format)
deriveFullIntoFileWriterImpl[T](ts, file, format)
private def deriveBuilderApplyImpl[T: WeakTypeTag]: Expr[CsvableBuilder[T]] = {
private def deriveBuilderApplyImpl[T: WeakTypeTag]: Expr[WriterBuilder[T]] = {
val className = TypeName(annoBuilderPrefix + MacroCache.getBuilderId)
val caseClazzName = TypeName(weakTypeOf[T].typeSymbol.name.decodedName.toString)
val tree =
q"""
class $className extends $packageName.CsvableBuilder[$caseClazzName]
class $className extends $packageName.WriterBuilder[$caseClazzName]
new $className
"""
exprPrintTree[CsvableBuilder[T]](force = false, tree)
exprPrintTree[WriterBuilder[T]](force = false, tree)
}
private def getCustomPreTress: (mutable.Map[String, Any], Iterable[Tree]) = {
......@@ -94,7 +94,7 @@ class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacr
}
// scalafmt: { maxColumn = 400 }
private def deriveFullIntoFileCsvableImpl[T: WeakTypeTag](ts: Expr[List[T]], file: Expr[File], format: c.Expr[CsvFormat]): Expr[Boolean] = {
private def deriveFullIntoFileWriterImpl[T: WeakTypeTag](ts: Expr[List[T]], file: Expr[File], format: c.Expr[CsvFormat]): Expr[Boolean] = {
val clazzName = resolveClassTypeName[T]
val (customTrees, preTrees) = getCustomPreTress
val tree =
......@@ -102,8 +102,8 @@ class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacr
..$preTrees
..${getAnnoClassObject[T](customTrees, format)}
$packageName.FileUtils.writer($file, $ts.map { ($innerTName: $clazzName) =>
$csvableInstanceTermName.$innerTmpTermName = $innerTName
$csvableInstanceTermName.transform($innerTName)
$writerInstanceTermName.$innerTmpTermName = $innerTName
$writerInstanceTermName.transform($innerTName)
}, $format
)
"""
......@@ -111,7 +111,7 @@ class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacr
}
// scalafmt: { maxColumn = 400 }
private def deriveFullCsvableImpl[T: WeakTypeTag](ts: Expr[List[T]], format: c.Expr[CsvFormat]): Expr[String] = {
private def deriveFullWriterImpl[T: WeakTypeTag](ts: Expr[List[T]], format: c.Expr[CsvFormat]): Expr[String] = {
val clazzName = resolveClassTypeName[T]
val (customTrees, preTrees) = getCustomPreTress
val tree =
......@@ -119,8 +119,8 @@ class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacr
..$preTrees
..${getAnnoClassObject[T](customTrees, format)}
lazy val lines = $ts.map { ($innerTName: $clazzName) =>
$csvableInstanceTermName.$innerTmpTermName = $innerTName
$csvableInstanceTermName.transform($innerTName)
$writerInstanceTermName.$innerTmpTermName = $innerTName
$writerInstanceTermName.transform($innerTName)
}
$packageName.StringUtils.combineRows(lines, $format)
"""
......@@ -129,9 +129,9 @@ class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacr
private def getAnnoClassObject[T: WeakTypeTag](customTrees: mutable.Map[String, Any], format: c.Expr[CsvFormat]): Tree = {
val clazzName = resolveClassTypeName[T]
val annoClassName = TermName(csvableImplClassNamePrefix + MacroCache.getIdentityId)
val annoClassName = TermName(writerImplClassNamePrefix + MacroCache.getIdentityId)
q"""
object $annoClassName extends $packageName.Csvable[$clazzName] {
object $annoClassName extends $packageName.Writer[$clazzName] {
var $innerTmpTermName: $clazzName = _
lazy private val _toCsv = ($funcArgsTempTermName: $clazzName) => {
......@@ -142,18 +142,18 @@ class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacr
override def transform(t: $clazzName): String = _toCsv($annoClassName.$innerTmpTermName)
}
final lazy private val $csvableInstanceTermName = $annoClassName
final lazy private val $writerInstanceTermName = $annoClassName
"""
}
private def deriveCsvableImpl[T: WeakTypeTag](t: Expr[T], format: c.Expr[CsvFormat]): Expr[String] = {
private def deriveWriterImpl[T: WeakTypeTag](t: Expr[T], format: c.Expr[CsvFormat]): Expr[String] = {
val clazzName = resolveClassTypeName[T]
val (customTrees, preTrees) = getCustomPreTress
val annoClassName = TermName(csvableImplClassNamePrefix + MacroCache.getIdentityId)
val annoClassName = TermName(writerImplClassNamePrefix + MacroCache.getIdentityId)
val tree =
q"""
..$preTrees
object $annoClassName extends $packageName.Csvable[$clazzName] {
object $annoClassName extends $packageName.Writer[$clazzName] {
final private val $innerTmpTermName = $t
override def transform(t: $clazzName): String = {
......@@ -210,7 +210,7 @@ class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacr
case t if t <:< typeOf[Option[_]] && !customTrees.contains(fieldNames(indexType._1)) =>
val genericType = c.typecheck(q"${indexType._2}", c.TYPEmode).tpe.dealias.typeArgs.head
q"""
$packageName.Csvable[$genericType].transform {
$packageName.Writer[$genericType].transform {
if ($innerVarTermName.${indexByName(indexType._1)}.isEmpty) ""
else $innerVarTermName.${indexByName(indexType._1)}.get
}
......@@ -218,7 +218,7 @@ class CsvableBuilderMacro(override val c: whitebox.Context) extends AbstractMacr
case _ if customTrees.contains(fieldNames(indexType._1)) =>
customFunction()
case _ =>
q"$packageName.Csvable[${indexType._2}].transform($innerVarTermName.${indexByName(indexType._1)})"
q"$packageName.Writer[${indexType._2}].transform($innerVarTermName.${indexByName(indexType._1)})"
}
}
}
......
......@@ -29,9 +29,9 @@ import org.scalatest.matchers.should.Matchers
* 梦境迷离
* @version 1.0,2022/4/29
*/
class CsvConverterTest extends AnyFlatSpec with Matchers {
class ConverterTest extends AnyFlatSpec with Matchers {
"CsvConverter1" should "ok" in {
"Converter1" should "ok" in {
val line = "abc,cdf,d,12,2,false,0.1,0.23333"
val dimension = Converter[Dimension].toScala(line)
assert(dimension.toString == "Some(Dimension(abc,Some(cdf),d,12,2,false,0.1,0.23333))")
......@@ -40,7 +40,7 @@ class CsvConverterTest extends AnyFlatSpec with Matchers {
assert(csv == line)
}
"CsvConverter2" should "ok when csv column empty" in {
"Converter2" should "ok when csv column empty" in {
val line =
"abc,,d,12,2,false,0.1,0.23333"
val dimension = Converter[Dimension].toScala(line)
......@@ -52,7 +52,7 @@ class CsvConverterTest extends AnyFlatSpec with Matchers {
}
"CsvConverter3" should "failed when case class currying" in {
"Converter3" should "failed when case class currying" in {
"""
| case class Dimension(key: String, value: Option[String], d: Char, c: Long, e: Short, f: Boolean, g: Float)(h: Double)
| object Dimension {
......@@ -64,7 +64,7 @@ class CsvConverterTest extends AnyFlatSpec with Matchers {
|""".stripMargin shouldNot compile
}
"CsvConverter4" should "ok when using list" in {
"Converter4" should "ok when using list" in {
val line =
"""1,cdf,d,12,2,false,0.1,0.2
|2,cdf,d,12,2,false,0.1,0.1""".stripMargin
......@@ -78,7 +78,7 @@ class CsvConverterTest extends AnyFlatSpec with Matchers {
}
"CsvConverter5" should "ok when input empty" in {
"Converter5" should "ok when input empty" in {
val empty1 = Converter[List[Dimension]].toCsvString(Nil)
println(empty1)
assert(empty1 == "")
......@@ -88,14 +88,14 @@ class CsvConverterTest extends AnyFlatSpec with Matchers {
assert(empty2 == "")
}
"CsvConverter6" should "ok when using json value" in {
"Converter6" should "ok when using json value" in {
val line = """abc,"{""a"":""b"",""c"":""d""}",d,12,2,false,0.1,0.23333"""
val dimension = Converter[Dimension].toScala(line)
println(dimension)
assert(dimension.toString == "Some(Dimension(abc,Some({\"a\":\"b\",\"c\":\"d\"}),d,12,2,false,0.1,0.23333))")
}
"CsvConverter7" should "get None when error" in {
"Converter7" should "get None when error" in {
// xxx should be Boolean, but failure, can get false
val line = """abc,"{""a"":""b"",""c"":""d""}",d,12,2,xxx,0.1,0.23333"""
val dimension = Converter[Dimension].toScala(line)
......
......@@ -21,7 +21,7 @@
package org.bitlap.csv.test
import org.bitlap.csv.{ CsvableBuilder, DefaultCsvFormat, ScalableBuilder }
import org.bitlap.csv.{ DefaultCsvFormat, ReaderBuilder, WriterBuilder }
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
......@@ -29,27 +29,27 @@ import org.scalatest.matchers.should.Matchers
* 梦境迷离
* @version 1.0,2022/4/29
*/
class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
class CustomBuilderTest extends AnyFlatSpec with Matchers {
"CustomConverterBuilder1" should "ok" in {
"CustomBuilderTest1" should "ok" in {
val line = "abc,cdf,d,12,2,false,0.1,0.23333"
val dimension = ScalableBuilder[Dimension2].convert(line)
val dimension = ReaderBuilder[Dimension2].convert(line)
assert(dimension.toString == "Some(Dimension2(abc,Some(cdf),d,12,2,false,0.1,0.23333))")
val csv = CsvableBuilder[Dimension2].convert(dimension.get)
val csv = WriterBuilder[Dimension2].convert(dimension.get)
println(csv)
assert(csv == line)
}
"CustomConverterBuilder2" should "ok when using json value" in {
"CustomBuilderTest2" should "ok when using json value" in {
val line = """abc,"{""a"":""b"",""c"":""d""}",d,12,2,false,0.1,0.23333"""
val dimension1 = ScalableBuilder[Dimension2]
val dimension1 = ReaderBuilder[Dimension2]
.setField(_.c, _ => 12L)
.convert(line)
println(dimension1)
assert(dimension1.toString == "Some(Dimension2(abc,Some({\"a\":\"b\",\"c\":\"d\"}),d,12,2,false,0.1,0.23333))")
val csv = CsvableBuilder[Dimension2]
val csv = WriterBuilder[Dimension2]
.setField[Char](_.d, _ => "????????")
.setField[Option[String]](_.value, js => s"""\"${js.get.replace("\"", "\"\"")}\"""")
.convert(dimension1.get)
......@@ -58,111 +58,111 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
assert(csv == "abc,\"{\"\"a\"\":\"\"b\"\",\"\"c\"\":\"\"d\"\"}\",????????,12,2,false,0.1,0.23333")
}
"CustomConverterBuilder3" should "ok when using json value" in {
"CustomBuilderTest3" should "ok when using json value" in {
val line = """abc,"{""a"":""b"",""c"":""d""}",d,12,2,false,0.1,0.23333"""
val d = ScalableBuilder[Dimension2]
val d = ReaderBuilder[Dimension2]
.setField(_.value, _ => None)
.convert(line)
assert(d.toString == "Some(Dimension2(abc,None,d,12,2,false,0.1,0.23333))")
val d2 = ScalableBuilder[Dimension2]
val d2 = ReaderBuilder[Dimension2]
.setField(_.value, _ => None)
.convert("""abc,"{""a"":""b"",""c"":""d""}",d,12,2,false,0.1,0.23333""")
assert(d2.toString == "Some(Dimension2(abc,None,d,12,2,false,0.1,0.23333))")
val e = ScalableBuilder[Dimension2]
val e = ReaderBuilder[Dimension2]
.convert(line)
println(e)
assert(e.toString == "Some(Dimension2(abc,Some({\"a\":\"b\",\"c\":\"d\"}),d,12,2,false,0.1,0.23333))")
}
"CustomConverterBuilder4" should "ok when using toCsvString" in {
"CustomBuilderTest4" should "ok when using toCsvString" in {
val e = Dimension2("1", Some("hello"), 'c', 1L, 1, false, 0.1f, 0.2)
val dimension1 = CsvableBuilder[Dimension2]
val dimension1 = WriterBuilder[Dimension2]
.convert(e)
assert(dimension1 == "1,hello,c,1,1,false,0.1,0.2")
val dimension2 = CsvableBuilder[Dimension2]
val dimension2 = WriterBuilder[Dimension2]
.setField[Option[String]](_.value, _ => "hello world")
.convert(e)(new DefaultCsvFormat {
override val delimiter: Char = '*'
})
assert(dimension2 == "1*hello world*c*1*1*false*0.1*0.2")
val dimension3 = CsvableBuilder[Dimension2]
val dimension3 = WriterBuilder[Dimension2]
.setField[Option[String]](_.value, _ => "hello world")
.convert(Dimension2("1", Some("hello"), 'c', 1L, 1, false, 0.1f, 0.2))
assert(dimension3 == "1,hello world,c,1,1,false,0.1,0.2")
}
"CustomConverterBuilder5" should "ok when using list" in {
"CustomBuilderTest5" should "ok when using list" in {
val es = List(
Dimension2("1", Some("hello"), 'c', 1L, 1, true, 0.1f, 0.2),
Dimension2("2", Some("hello bitlap"), 'c', 1L, 1, false, 0.1f, 0.2)
)
val dimension1 = es.map(e => CsvableBuilder[Dimension2].convert(e))
val dimension1 = es.map(e => WriterBuilder[Dimension2].convert(e))
assert(dimension1 == List("1,hello,c,1,1,true,0.1,0.2", "2,hello bitlap,c,1,1,false,0.1,0.2"))
val csv = List("1,hello,c,1,1,true,0.1,0.2", "2,hello bitlap,c,1,1,false,0.1,0.2")
val scala = csv.map(f => ScalableBuilder[Dimension2].convert(f))
val scala = csv.map(f => ReaderBuilder[Dimension2].convert(f))
assert(
scala.toString() == "List(Some(Dimension2(1,Some(hello),c,1,1,true,0.1,0.2)), Some(Dimension2(2,Some(hello bitlap),c,1,1,false,0.1,0.2)))"
)
}
"CustomConverterBuilder6" should "fail when find List or Seq but without using setFiled" in {
"CustomBuilderTest6" should "fail when find List or Seq but without using setFiled" in {
"""
|ScalableBuilder[Metric2].convert(csv)
|ReaderBuilder[Metric2].convert(csv)
|""".stripMargin shouldNot compile
"""
|CsvableBuilder[Metric2].convert(metric)
|WriterBuilder[Metric2].convert(metric)
|""".stripMargin shouldNot compile
}
"CustomConverterBuilder7" should "fail when find List or Seq but without using setFiled" in {
"CustomBuilderTest7" should "fail when find List or Seq but without using setFiled" in {
"""
|ScalableBuilder[Metric2].convert(csv)
|ReaderBuilder[Metric2].convert(csv)
|""".stripMargin shouldNot compile
"""
|CsvableBuilder[Metric2].convert(metric2)
|WriterBuilder[Metric2].convert(metric2)
|""".stripMargin shouldNot compile
}
"CustomConverterBuilder8" should "ok when not pass columnSeparator" in {
"CustomBuilderTest8" should "ok when not pass columnSeparator" in {
val e = Dimension2("1", Some("hello"), 'c', 1L, 1, false, 0.1f, 0.2)
val csv = CsvableBuilder[Dimension2].convert(e)
val csv = WriterBuilder[Dimension2].convert(e)
println(csv)
assert(csv == "1,hello,c,1,1,false,0.1,0.2")
val scala = ScalableBuilder[Dimension2].convert(csv)
val scala = ReaderBuilder[Dimension2].convert(csv)
println(scala)
assert(scala.get == e)
}
"CustomConverterBuilder9" should "fail if case class has currying" in {
"CustomBuilderTest9" should "fail if case class has currying" in {
"""
|case class Test(i:Int)(j:String)
| val t = Test(1)("hello")
| CsvableBuilder[Test].convert(t)
| WriterBuilder[Test].convert(t)
|""".stripMargin shouldNot compile
}
"CustomConverterBuilder10" should "get None when error" in {
"CustomBuilderTest10" should "get None when error" in {
val e = Dimension2("1", Some("hello"), 'c', 1L, 1, false, 0.1f, 0.0)
// aaa should be Double, but failure, can get 0.0D
val csv = "1,hello,c,1,1,false,0.1,aaa"
val scala = ScalableBuilder[Dimension2].convert(csv)
val scala = ReaderBuilder[Dimension2].convert(csv)
println(scala)
assert(scala.get == e)
val scala2 = ScalableBuilder[Dimension2].setField(_.h, _ => throw new Exception).convert(csv)
val scala2 = ReaderBuilder[Dimension2].setField(_.h, _ => throw new Exception).convert(csv)
assert(scala2.get == e)
val scala3 = ScalableBuilder[Dimension2].setField(_.value, _ => throw new Exception).convert(csv)
val scala3 = ReaderBuilder[Dimension2].setField(_.value, _ => throw new Exception).convert(csv)
assert(scala3.get == Dimension2("1", None, 'c', 1L, 1, false, 0.1f, 0.0))
}
}
......@@ -21,7 +21,7 @@
package org.bitlap.csv.test
import org.bitlap.csv.{ CsvableBuilder, ScalableBuilder, TsvFormat }
import org.bitlap.csv.{ ReaderBuilder, TsvFormat, WriterBuilder }
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
......@@ -31,9 +31,9 @@ import java.io.File
* 梦境迷离
* @version 1.0,6/4/22
*/
class ScalableTsvTest extends AnyFlatSpec with Matchers {
class ReaderTsvTest extends AnyFlatSpec with Matchers {
"ScalableTsvTest1" should "ok when file is tsv" in {
"ReaderTsvTest1" should "ok when file is tsv" in {
implicit val format = new TsvFormat {
override val delimiter: Char = ' '
override val ignoreEmptyLines: Boolean = true
......@@ -41,7 +41,7 @@ class ScalableTsvTest extends AnyFlatSpec with Matchers {
override val prependHeader: List[String] = List("time", "entity", "dimensions", "metricName", "metricValue")
}
val metrics =
ScalableBuilder[Metric3]
ReaderBuilder[Metric3]
.convertFrom(ClassLoader.getSystemResourceAsStream("simple_data_header.tsv"))
println(metrics)
assert(metrics.nonEmpty)
......@@ -51,7 +51,7 @@ class ScalableTsvTest extends AnyFlatSpec with Matchers {
)
val file = new File("./simple_data_header.tsv")
CsvableBuilder[Metric3]
WriterBuilder[Metric3]
// NOTE: not support pass anonymous object to convertTo method.
.convertTo(metrics.filter(_.isDefined).map(_.get), file)
file.delete()
......
......@@ -23,8 +23,9 @@ package org.bitlap.csv.test
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.bitlap.csv.{ CsvableBuilder, DefaultCsvFormat, ScalableBuilder, StringUtils }
import org.bitlap.csv._
import java.io.File
import org.bitlap.csv.FileUtils
/** Complex use of common tests
*
......@@ -32,7 +33,7 @@ import java.io.File
* 梦境迷离
* @version 1.0,2022/5/1
*/
class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
class WriterAndReaderTest extends AnyFlatSpec with Matchers {
val csvData =
"""100,1,"{""city"":""北京"",""os"":""Mac""}",vv,1
......@@ -52,12 +53,12 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
|200,3,"{""city"":""北京"",""os"":""Mac""}",vv,1
|200,3,"{""city"":""北京"",""os"":""Mac""}",pv,2""".stripMargin
"CsvableAndScalable1" should "ok" in {
"WriterAndReaderTest1" should "ok" in {
val metrics = csvData
.split("\n")
.toList
.map(csv =>
ScalableBuilder[Metric]
ReaderBuilder[Metric]
.setField(
_.dimensions,
dims => {
......@@ -79,7 +80,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
assert(metrics.head.get.dimensions.head.value == "北京")
val csv = metrics.map(metric =>
CsvableBuilder[Metric]
WriterBuilder[Metric]
.setField(
_.dimensions,
(ds: List[Dimension3]) =>
......@@ -94,12 +95,12 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
)
}
"CsvableAndScalable2" should "ok" in {
"WriterAndReaderTest2" should "ok" in {
val metrics = csvData
.split("\n")
.toList
.map(csv =>
ScalableBuilder[Metric2]
ReaderBuilder[Metric2]
.setField[Seq[Dimension3]](
_.dimensions,
dims => {
......@@ -121,11 +122,11 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
assert(metrics.head.get.dimensions.head.value == "北京")
}
"CsvableAndScalable3" should "ok when using StringUtils" in {
"WriterAndReaderTest3" should "ok when using StringUtils" in {
val metrics = csvData
.split("\n")
.map(csv =>
ScalableBuilder[Metric2]
ReaderBuilder[Metric2]
.setField[Seq[Dimension3]](
_.dimensions,
dims => StringUtils.extractJsonValues[Dimension3](dims)((k, v) => Dimension3(k, v))
......@@ -139,10 +140,9 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
assert(metrics.head.get.dimensions.head.value == "北京")
}
"CsvableAndScalable4" should "ok when reading from file" in {
import org.bitlap.csv.ScalableHelper
val metrics = ScalableHelper.readCsvFromClassPath[Metric2]("simple_data.csv") { line =>
ScalableBuilder[Metric2]
"WriterAndReaderTest4" should "ok when reading from file" in {
val metrics = FileUtils.readCsvFromClassPath[Metric2]("simple_data.csv") { line =>
ReaderBuilder[Metric2]
.setField[Seq[Dimension3]](
_.dimensions,
dims => StringUtils.extractJsonValues[Dimension3](dims)((k, v) => Dimension3(k, v))
......@@ -155,14 +155,14 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
assert(metrics.head.get.dimensions.head.value == "北京")
}
"CsvableAndScalable5" should "ok when using convert method" in {
"WriterAndReaderTest5" should "ok when using convert method" in {
val csvLines = csvData.split("\n").toList
val metrics = ScalableBuilder[Metric3].convert(csvLines)
val metrics = ReaderBuilder[Metric3].convert(csvLines)
// if we don't define a custom function for convert `Metric3#dimension`
val csv = CsvableBuilder[Metric3].convert(metrics.filter(_.isDefined).map(_.get))
val csv = WriterBuilder[Metric3].convert(metrics.filter(_.isDefined).map(_.get))
println(metrics)
println(csv)
......@@ -170,8 +170,8 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
assert(csvData.replace("\"", "") == csv.replace("\"", ""))
}
"CsvableAndScalable6" should "ok when using convert and StringUtils" in {
val metrics = ScalableBuilder[Metric2]
"WriterAndReaderTest6" should "ok when using convert and StringUtils" in {
val metrics = ReaderBuilder[Metric2]
.setField[Seq[Dimension3]](
_.dimensions,
dims => StringUtils.extractJsonValues[Dimension3](dims)((k, v) => Dimension3(k, v))
......@@ -183,7 +183,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
assert(metrics.head.get.dimensions.head.key == "city")
assert(metrics.head.get.dimensions.head.value == "北京")
val csv = CsvableBuilder[Metric2]
val csv = WriterBuilder[Metric2]
.setField(
_.dimensions,
(ds: Seq[Dimension3]) =>
......@@ -194,9 +194,9 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
println(csv)
}
"CsvableAndScalable8" should "ok when reading from file" in {
"WriterAndReaderTest7" should "ok when reading from file" in {
val metrics =
ScalableBuilder[Metric2]
ReaderBuilder[Metric2]
.setField[Seq[Dimension3]](
_.dimensions,
dims => StringUtils.extractJsonValues[Dimension3](dims)((k, v) => Dimension3(k, v))
......@@ -207,7 +207,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
assert(metrics.nonEmpty)
val file = new File("./simple_data.csv")
CsvableBuilder[Metric2]
WriterBuilder[Metric2]
.setField[Seq[Dimension3]](
_.dimensions,
ds => s"""\"{${ds.map(kv => s"""\"\"${kv.key}\"\":\"\"${kv.value}\"\"""").mkString(",")}}\""""
......@@ -217,14 +217,14 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
file.delete()
}
"CsvableAndScalable9" should "ok when use custom format" in {
"WriterAndReaderTest8" should "ok when use custom format" in {
implicit val format = new DefaultCsvFormat {
override val ignoreEmptyLines: Boolean = true
override val ignoreHeader: Boolean = true
override val prependHeader: List[String] = List("time", "entity", "dimensions", "metricName", "metricValue")
}
val metrics =
ScalableBuilder[Metric2]
ReaderBuilder[Metric2]
.setField[Seq[Dimension3]](
_.dimensions,
dims => StringUtils.extractJsonValues[Dimension3](dims)((k, v) => Dimension3(k, v))
......@@ -235,7 +235,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
assert(metrics.nonEmpty)
val file = new File("./simple_data_header.csv")
CsvableBuilder[Metric2]
WriterBuilder[Metric2]
.setField[Seq[Dimension3]](
_.dimensions,
ds => s"""\"{${ds.map(kv => s"""\"\"${kv.key}\"\":\"\"${kv.value}\"\"""").mkString(",")}}\""""
......@@ -244,20 +244,20 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
file.delete()
}
"CsvableAndScalable10" should "failure if not setField" in {
"WriterAndReaderTest9" should "failure if not setField" in {
"""
|val metrics = ScalableBuilder[Metric].convert(csvData.split("\n").toList)
|val csv = CsvableBuilder[Metric].convert(metrics.filter(_.isDefined).map(_.get))
|val metrics = ReaderBuilder[Metric].convert(csvData.split("\n").toList)
|val csv = WriterBuilder[Metric].convert(metrics.filter(_.isDefined).map(_.get))
|
|val metrics2 = ScalableBuilder[Metric2].convert(csvData.split("\n").toList)
|val csv2 = CsvableBuilder[Metric2].convert(metrics2.filter(_.isDefined).map(_.get))
|val metrics2 = ReaderBuilder[Metric2].convert(csvData.split("\n").toList)
|val csv2 = WriterBuilder[Metric2].convert(metrics2.filter(_.isDefined).map(_.get))
|
|
|val metrics3 = ScalableBuilder[Metric4].convert(csvData.split("\n").toList)
|val csv3 = CsvableBuilder[Metric4].convert(metrics3.filter(_.isDefined).map(_.get))
|val metrics3 = ReaderBuilder[Metric4].convert(csvData.split("\n").toList)
|val csv3 = WriterBuilder[Metric4].convert(metrics3.filter(_.isDefined).map(_.get))
|
|val metrics4 = ScalableBuilder[Metric5].convert(csvData.split("\n").toList)
|val csv4 = CsvableBuilder[Metric5].convert(metrics4.filter(_.isDefined).map(_.get))
|val metrics4 = ReaderBuilder[Metric5].convert(csvData.split("\n").toList)
|val csv4 = WriterBuilder[Metric5].convert(metrics4.filter(_.isDefined).map(_.get))
|""".stripMargin shouldNot compile
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册