未验证 提交 992ae863 编写于 作者: 梦境迷离's avatar 梦境迷离 提交者: GitHub

fix and support Transformer (#202)

* fix and support CTransformer
* refactor match case
* fix and update `@elapsed` log
上级 2df4ffb2
......@@ -42,6 +42,11 @@ Learn Scala macro and abstract syntax tree.
## common
- Some very general tool classes.
- `Transformer` Transform the case class`From`'s object to the case class`To`'s object.
- `Transformable` Automatically generate instances of`Transformer`.
- Two ways to map fields:
- 1.use`Transformer`and define`Transformer`implicitly in the companion object of the case class.
- 2.use`Transformable`'s`mapField`method directly.
```scala
"org.bitlap" %% "smt-common" % "<VERSION>" // since 0.6.0
```
......
......@@ -46,6 +46,13 @@
## common
- 一些很通用的工具类。
- `Transformer` 将样例类`From`的对象转变为样例类`To`的对象。
- 一些很通用的工具类。
- `Transformer` 将样例类`From`的对象转变为样例类`To`的对象。
- `Transformable` 自动生成`Transformer`的实例。
- 有两种方式可以映射字段:
- 1.使用`Transformer`,并在样例类的伴生对象中定义`Transformer`隐式值。
- 2.直接使用`Transformable``mapField`方法。
```scala
"org.bitlap" %% "smt-common" % "<VERSION>" // 从0.6.0开始
```
......
......@@ -116,6 +116,7 @@ lazy val `smt-csv` = (project in file("smt-csv"))
name := "smt-csv",
crossScalaVersions := List(scala213, scala212, scala211)
)
.dependsOn(`smt-common` % "compile->compile;test->test")
.settings(Publishing.publishSettings)
.settings(paradise())
.enablePlugins(HeaderPlugin)
......
......@@ -300,7 +300,7 @@ abstract class AbstractMacroProcessor(val c: whitebox.Context) {
def existsSuperClassExcludeSdkClass(superClasses: Seq[Tree]): Boolean =
superClasses.nonEmpty && !superClasses.forall(sc => SDKClasses.contains(sc.toString()))
private[macros] case class ValDefAccessor(
private[macros] final case class ValDefAccessor(
mods: Modifiers,
name: TermName,
tpt: Tree,
......@@ -312,6 +312,19 @@ abstract class AbstractMacroProcessor(val c: whitebox.Context) {
def symbol: Symbol = paramType.typeSymbol
def paramType: Type = c.typecheck(tq"$tpt", c.TYPEmode).tpe
def zeroValue: Tree =
paramType match {
case t if t =:= typeOf[Int] => q"0"
case t if t =:= typeOf[Byte] => q"0"
case t if t =:= typeOf[Double] => q"0D"
case t if t =:= typeOf[Float] => q"0F"
case t if t =:= typeOf[Short] => q"0"
case t if t =:= typeOf[Long] => q"0L"
case t if t =:= typeOf[Char] => q"63.toChar" // default char is ?
case t if t =:= typeOf[Boolean] => q"false"
case _ => q"null"
}
}
/** Retrieves the accessor fields on a class and returns a Seq of
......
......@@ -34,7 +34,7 @@ object constructorMacro {
import c.universe._
private val extractArgs: Seq[String] =
private val extractOptions: Seq[String] =
c.prefix.tree match {
case q"new constructor(excludeFields=$excludeFields)" => evalTree(excludeFields.asInstanceOf[Tree])
case q"new constructor($excludeFields)" => evalTree(excludeFields.asInstanceOf[Tree])
......@@ -45,7 +45,7 @@ object constructorMacro {
private def getMutableValDefAndExcludeFields(annotteeClassDefinitions: Seq[Tree]): Seq[c.universe.ValDef] =
getClassMemberValDefs(annotteeClassDefinitions).filter(v =>
v.mods.hasFlag(Flag.MUTABLE) &&
!extractArgs.contains(v.name.decodedName.toString)
!extractOptions.contains(v.name.decodedName.toString)
)
/** Extract the internal fields of members belonging to the class, but not in primary constructor and only `var`.
......
......@@ -53,7 +53,7 @@ object elapsedMacro {
LogLevel.getLogLevel(logLevel.toString())
}
private val extractArgumentsDetail: (Duration, LogLevel) = c.prefix.tree match {
private val extractOptions: (Duration, LogLevel) = c.prefix.tree match {
case q"new elapsed(limit=$limit, logLevel=$logLevel)" =>
(evalTree(limit.asInstanceOf[Tree]), getLogLevel(logLevel.asInstanceOf[Tree]))
case _ => c.abort(c.enclosingPosition, ErrorMessage.UNEXPECTED_PATTERN)
......@@ -62,26 +62,26 @@ object elapsedMacro {
private def getStartExpr: c.universe.Tree =
q"""val $start = _root_.scala.concurrent.duration.Duration.fromNanos(System.nanoTime())"""
private def getLog(methodName: TermName, logBy: Tree): c.universe.Tree = {
private def getLog(classNameAndMethodName: String, logBy: Tree): c.universe.Tree = {
// CI will fail when use lambda.
implicit val durationApply: c.universe.Liftable[Duration] = new Liftable[Duration] {
override def apply(value: Duration): c.universe.Tree = q"${value._1}"
}
q"""
val $valDef = _root_.scala.concurrent.duration.Duration.fromNanos(System.nanoTime()) - $start
if ($valDef._1 >= ${extractArgumentsDetail._1}) $logBy(StringContext("slow invoked method: [", "] elapsed [", " ms]").s(${methodName.toString}, $valDef.toMillis))
if ($valDef._1 >= ${extractOptions._1}) $logBy(StringContext("slow invoked method: [", "] elapsed [", " ms]").s($classNameAndMethodName, $valDef.toMillis))
"""
}
private def getPrintlnLog(methodName: TermName): c.universe.Tree = {
private def getPrintlnLog(classNameAndMethodName: String): c.universe.Tree = {
val log = findValDefInEnclosingClass(TypeName("org.slf4j.Logger"))
if (log.isEmpty) { // if there is no slf4j log, print it to the console
getLog(methodName, q"_root_.scala.Predef.println")
getLog(classNameAndMethodName, q"_root_.scala.Predef.println")
} else {
extractArgumentsDetail._2 match {
case LogLevel.INFO => getLog(methodName, q"${log.get}.info")
case LogLevel.DEBUG => getLog(methodName, q"${log.get}.debug")
case LogLevel.WARN => getLog(methodName, q"${log.get}.warn")
extractOptions._2 match {
case LogLevel.INFO => getLog(classNameAndMethodName, q"${log.get}.info")
case LogLevel.DEBUG => getLog(classNameAndMethodName, q"${log.get}.debug")
case LogLevel.WARN => getLog(classNameAndMethodName, q"${log.get}.warn")
}
}
}
......@@ -104,7 +104,7 @@ object elapsedMacro {
defDef => q"""
$getStartExpr
val resFuture = ${defDef.rhs}
resFuture.onComplete { case _ => ..${getPrintlnLog(defDef.name)} }(_root_.scala.concurrent.ExecutionContext.Implicits.global)
resFuture.onComplete { case _ => ..${getIdentNam(defDef.name)} }(_root_.scala.concurrent.ExecutionContext.Implicits.global)
resFuture
"""
)
......@@ -151,12 +151,18 @@ object elapsedMacro {
// }.toList
// }
private def getIdentNam(method: Name): String =
s"${c.enclosingClass match {
case ClassDef(_, name, _, Template(_, _, _)) => name
case ModuleDef(_, name, Template(_, _, _)) => name
}}#${method.decodedName.toString}"
private def getNewMethod(defDef: DefDef): DefDef =
mapToNewMethod(
defDef,
defDef => q"""
$getStartExpr
${Try(defDef.rhs, Nil, getPrintlnLog(defDef.name))}
${Try(defDef.rhs, Nil, getPrintlnLog(getIdentNam(defDef.name)))}
"""
)
......
......@@ -34,7 +34,7 @@ object equalsAndHashCodeMacro {
import c.universe._
private val extractArgs: Seq[String] = c.prefix.tree match {
private val extractOptions: Seq[String] = c.prefix.tree match {
case q"new equalsAndHashCode(excludeFields=$excludeFields)" => evalTree(excludeFields.asInstanceOf[Tree])
case q"new equalsAndHashCode($excludeFields)" => evalTree(excludeFields.asInstanceOf[Tree])
case q"new equalsAndHashCode()" => Nil
......@@ -58,7 +58,7 @@ object equalsAndHashCodeMacro {
getClassMemberValDefs(annotteeClassDefinitions)
.filter(p =>
isNotLocalClassMember(p) &&
!extractArgs.contains(p.name.decodedName.toString)
!extractOptions.contains(p.name.decodedName.toString)
)
.map(_.name.toTermName)
}
......
......@@ -29,7 +29,7 @@ object jacksonEnumMacro {
import c.universe._
private val extractArgs: Seq[String] =
private val extractOptions: Seq[String] =
c.prefix.tree match {
case q"new jacksonEnum(nonTypeRefers=$nonTypeRefers)" => evalTree(nonTypeRefers.asInstanceOf[Tree])
case q"new jacksonEnum($nonTypeRefers)" => evalTree(nonTypeRefers.asInstanceOf[Tree])
......@@ -43,7 +43,7 @@ object jacksonEnumMacro {
safeValDefs
.filter(_.symbol.name.toTermName.toString == "Value")
.map(getTypeTermName)
.filter(v => !extractArgs.contains(v.decodedName.toString))
.filter(v => !extractOptions.contains(v.decodedName.toString))
.distinct
.map(c =>
q"""class ${TypeName(
......@@ -72,7 +72,7 @@ object jacksonEnumMacro {
val mods = safeValDef.mods.mapAnnotations { f =>
if (
!f.toString().contains("JsonScalaEnumeration") &&
!extractArgs.contains(getTypeTermName(safeValDef).decodedName.toString)
!extractOptions.contains(getTypeTermName(safeValDef).decodedName.toString)
) f ++ List(getAnnotation(valDefTree))
else f
}
......
......@@ -58,21 +58,7 @@ object javaCompatibleMacro {
}
)
)
val defaultParameters = acsVals.map(params =>
params.map { param =>
param.paramType match {
case t if t <:< typeOf[Int] => q"0"
case t if t <:< typeOf[Byte] => q"0"
case t if t <:< typeOf[Double] => q"0D"
case t if t <:< typeOf[Float] => q"0F"
case t if t <:< typeOf[Short] => q"0"
case t if t <:< typeOf[Long] => q"0L"
case t if t <:< typeOf[Char] => q"63.toChar" // default char is ?
case t if t <:< typeOf[Boolean] => q"false"
case _ => q"null"
}
}
)
val defaultParameters = acsVals.map(params => params.map(_.zeroValue))
if (annotteeClassParams.isEmpty || annotteeClassParams.size == 1) {
q"""
def this() = {
......
......@@ -38,7 +38,7 @@ object logMacro {
import c.universe._
private val extractArgs: logs.LogType.Value = c.prefix.tree match {
private val extractOptions: logs.LogType.Value = c.prefix.tree match {
case q"new log(logType=$logType)" =>
val tpe = getLogType(logType.asInstanceOf[Tree])
tpe
......@@ -60,11 +60,11 @@ object logMacro {
val buildArg = (name: Name) => LogArgument(name.toTermName.decodedName.toString, isClass = true)
(annottees.map(_.tree) match {
case (classDef: ClassDef) :: Nil =>
LogType.getLogImpl(extractArgs).getTemplate(c)(buildArg(classDef.name))
LogType.getLogImpl(extractOptions).getTemplate(c)(buildArg(classDef.name))
case (moduleDef: ModuleDef) :: Nil =>
LogType.getLogImpl(extractArgs).getTemplate(c)(buildArg(moduleDef.name).copy(isClass = false))
LogType.getLogImpl(extractOptions).getTemplate(c)(buildArg(moduleDef.name).copy(isClass = false))
case (classDef: ClassDef) :: (_: ModuleDef) :: Nil =>
LogType.getLogImpl(extractArgs).getTemplate(c)(buildArg(classDef.name))
LogType.getLogImpl(extractOptions).getTemplate(c)(buildArg(classDef.name))
case _ => c.abort(c.enclosingPosition, ErrorMessage.ONLY_OBJECT_CLASS)
}).asInstanceOf[Tree]
}
......@@ -75,7 +75,7 @@ object logMacro {
if (classDef.mods.hasFlag(Flag.CASE)) {
c.abort(c.enclosingPosition, ErrorMessage.ONLY_OBJECT_CLASS)
}
val newClass = extractArgs match {
val newClass = extractOptions match {
case ScalaLoggingLazy | ScalaLoggingStrict =>
appendImplDefSuper(checkGetClassDef(annottees), _ => List(logTree(annottees)))
case _ =>
......@@ -87,7 +87,7 @@ object logMacro {
$newClass
"""
case (_: ModuleDef) :: _ =>
extractArgs match {
extractOptions match {
case ScalaLoggingLazy | ScalaLoggingStrict =>
appendImplDefSuper(getModuleDefOption(annottees).get, _ => List(logTree(annottees)))
case _ => prependImplDefBody(getModuleDefOption(annottees).get, _ => List(logTree(annottees)))
......
......@@ -36,14 +36,20 @@ object toStringMacro {
import c.universe._
private def extractTree(aa: Tree, bb: Tree, cc: Tree): (Boolean, Boolean, Boolean) =
(
private case class ToStringOptions(
includeInternalFields: Boolean,
includeFieldNames: Boolean,
callSuper: Boolean
)
private def extractTree(aa: Tree, bb: Tree, cc: Tree): ToStringOptions =
ToStringOptions(
evalTree[Boolean](aa),
evalTree[Boolean](bb),
evalTree[Boolean](cc)
)
private val extractArgumentsDetail: (Boolean, Boolean, Boolean) = c.prefix.tree match {
private val extractOptions: ToStringOptions = c.prefix.tree match {
case q"new toString(includeInternalFields=$aa, includeFieldNames=$bb, callSuper=$cc)" =>
extractTree(aa.asInstanceOf[Tree], bb.asInstanceOf[Tree], cc.asInstanceOf[Tree])
case q"new toString(includeInternalFields=$aa, includeFieldNames=$bb)" =>
......@@ -54,27 +60,23 @@ object toStringMacro {
extractTree(q"true", aa.asInstanceOf[Tree], q"false")
case q"new toString(callSuper=$aa)" =>
extractTree(q"true", q"true", aa.asInstanceOf[Tree])
case q"new toString()" => (true, true, false)
case _ => c.abort(c.enclosingPosition, ErrorMessage.UNEXPECTED_PATTERN)
case q"new toString()" =>
ToStringOptions(includeInternalFields = true, includeFieldNames = true, callSuper = false)
case _ => c.abort(c.enclosingPosition, ErrorMessage.UNEXPECTED_PATTERN)
}
override def createCustomExpr(classDecl: c.universe.ClassDef, compDeclOpt: Option[c.universe.ModuleDef]): Any = {
// extract parameters of annotation, must in order
val argument = Argument(
extractArgumentsDetail._1,
extractArgumentsDetail._2,
extractArgumentsDetail._3
)
val resTree = appendClassBody(classDecl, _ => List(getToStringTemplate(argument, classDecl)))
val resTree = appendClassBody(classDecl, _ => List(getToStringTemplate(extractOptions, classDecl)))
c.Expr(q"""
${compDeclOpt.fold(EmptyTree)(x => x)}
$resTree
""")
}
private def printField(argument: Argument, lastParam: Option[String], field: Tree): Tree =
private def printField(options: ToStringOptions, lastParam: Option[String], field: Tree): Tree =
// Print one field as <name of the field>+"="+fieldName
if (argument.includeFieldNames) {
if (options.includeFieldNames) {
lastParam.fold(q"$field") { lp =>
field match {
case v: ValDef =>
......@@ -94,7 +96,7 @@ object toStringMacro {
}
}
private def getToStringTemplate(argument: Argument, classDecl: ClassDef): Tree = {
private def getToStringTemplate(options: ToStringOptions, classDecl: ClassDef): Tree = {
// For a given class definition, separate the components of the class
val classDefinition = mapToClassDeclInfo(classDecl)
// Check the type of the class, whether it already contains its own toString
......@@ -109,19 +111,19 @@ object toStringMacro {
})
val ctorParams = classDefinition.classParamss.flatten
val member = if (argument.includeInternalFields) ctorParams ++ annotteeClassFieldDefinitions else ctorParams
val member = if (options.includeInternalFields) ctorParams ++ annotteeClassFieldDefinitions else ctorParams
val lastParam = member.lastOption.map {
case v: ValDef => v.name.toTermName.decodedName.toString
case c => c.toString
}
val paramsWithName = member.foldLeft(q"${""}")((res, acc) => q"$res + ${printField(argument, lastParam, acc)}")
val paramsWithName = member.foldLeft(q"${""}")((res, acc) => q"$res + ${printField(options, lastParam, acc)}")
// scala/bug https://github.com/scala/bug/issues/3967 not be 'Foo(i=1,j=2)' in standard library
val toString =
q"""override def toString: String = ${classDefinition.className.toTermName.decodedName.toString} + ${"("} + $paramsWithName + ${")"}"""
// Have super class ?
if (argument.callSuper && classDefinition.superClasses.nonEmpty) {
if (options.callSuper && classDefinition.superClasses.nonEmpty) {
val superClassDef = classDefinition.superClasses.head match {
case tree: Tree => Some(tree) // TODO type check better
case _ => None
......
......@@ -69,7 +69,7 @@ object CacheableMacro {
val newBody =
q"""
val $resultValName = ${defDef.rhs}
val $keyValName = List($getEnclosingClassName, ${name.decodedName.toString})
val $keyValName = _root_.scala.List($getEnclosingClassName, ${name.decodedName.toString})
$importExpr
org.bitlap.cacheable.core.Cache($resultValName)($keyValName, ..${getParamsName(vparamss)})
"""
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.macros
package org.bitlap.common
import java.time.ZonedDateTime
import java.time.format.DateTimeFormatter
......@@ -36,43 +36,39 @@ abstract class AbstractMacroProcessor(val c: blackbox.Context) {
import c.universe._
protected val packageName = q"_root_.org.bitlap.csv.core"
private[macros] def tryGetOrElse(tree: Tree, default: Tree): Tree =
final case class FieldZipInformation(fieldNames: List[String], fieldIndexTypeMapping: List[(Int, Type)])
final case class FieldTreeInformation(
index: Int,
fieldTerm: Tree,
fieldType: Type,
zeroValue: Tree,
isSeq: Boolean = false,
isList: Boolean = false,
isOption: Boolean = false,
genericType: Option[Type] = None
)
final case class FieldInformation(
fieldName: String,
fieldType: Type,
isSeq: Boolean = false,
isList: Boolean = false,
isOption: Boolean = false,
genericType: Option[Type] = None
)
def tryGetOrElse(tree: Tree, default: Tree): Tree =
q"_root_.scala.util.Try($tree).getOrElse($default)"
private[macros] def tryOptionGetOrElse(optionTree: Tree, default: Tree): Tree =
def tryOptionGetOrElse(optionTree: Tree, default: Tree): Tree =
q"_root_.scala.util.Try($optionTree.getOrElse($default)).getOrElse($default)"
private[macros] def tryOption(optionTree: Tree): Tree =
q"_root_.scala.util.Try($optionTree).getOrElse(None)"
private[macros] def getDefaultValue(typ: Type): Tree =
typ match {
case t if t =:= typeOf[Int] =>
q"0"
case t if t =:= typeOf[String] =>
val empty = ""
q"$empty"
case t if t =:= typeOf[Float] =>
q"0.asInstanceOf[Float]"
case t if t =:= typeOf[Double] =>
q"0D"
case t if t =:= typeOf[Char] =>
q"'?'"
case t if t =:= typeOf[Byte] =>
q"0"
case t if t =:= typeOf[Short] =>
q"0"
case t if t =:= typeOf[Boolean] =>
q"false"
case t if t =:= typeOf[Long] =>
q"0L"
case _ => q"null"
}
def tryOption(optionTree: Tree): Tree =
q"_root_.scala.util.Try($optionTree).getOrElse(_root_.scala.None)"
/** Get the list of case class constructor parameters and return the column index, column name, and parameter type
* that zip as a `List[((Int, Tree), Type)]`.
* that zip as a `List[FieldTreeInformation]`.
*
* @param columnsFunc
* The function to get CSV row data temporary identifier, also known as a line.
......@@ -80,19 +76,34 @@ abstract class AbstractMacroProcessor(val c: blackbox.Context) {
* Type of the case class.
* @return
*/
private[macros] def checkCaseClassZipAll[T: c.WeakTypeTag](
columnsFunc: TermName
): List[((Int, Tree), Type)] = {
def checkGetFieldTreeInformationList[T: WeakTypeTag](columnsFunc: TermName): List[FieldTreeInformation] = {
val idxColumn = (i: Int) => q"$columnsFunc()($i)"
val params = getCaseClassParams[T]()
val params = getCaseClassFieldInfo[T]()
val paramsSize = params.size
val types = params.map(f => c.typecheck(tq"$f", c.TYPEmode).tpe)
val types = params.map(_.fieldType)
val indexColumns = (0 until paramsSize).toList.map(i => i -> idxColumn(i))
if (indexColumns.size != types.size) {
c.abort(c.enclosingPosition, "The column num of CSV file is different from that in case class constructor!")
}
indexColumns zip types
indexColumns zip types map { kv =>
val (isOption, isSeq, isList) = isWrapType(kv._2)
val typed = c.typecheck(tq"${kv._2}", c.TYPEmode).tpe
var genericType: Option[Type] = None
if (isList || isSeq || isOption) {
genericType = Option(typed.typeArgs.head)
}
FieldTreeInformation(
kv._1._1,
kv._1._2,
kv._2,
getDefaultValue(kv._2),
isSeq,
isList,
isOption,
genericType
)
}
}
/** Get only the symbol of the case class constructor parameters.
......@@ -101,12 +112,27 @@ abstract class AbstractMacroProcessor(val c: blackbox.Context) {
* Type of the case class.
* @return
*/
private[macros] def getCaseClassParams[T: c.WeakTypeTag](): List[Symbol] = {
def getCaseClassFieldInfo[T: WeakTypeTag](): List[FieldInformation] = {
val parameters = resolveParameters[T]
if (parameters.size > 1) {
c.abort(c.enclosingPosition, "The constructor of case class has currying!")
}
parameters.flatten
parameters.flatten.map { p =>
val typed = c.typecheck(tq"$p", c.TYPEmode).tpe
var genericType: Option[Type] = None
val (isOption, isSeq, isList) = isWrapType(typed)
if (isList || isSeq || isOption) {
genericType = Option(typed.typeArgs.head)
}
FieldInformation(
p.name.decodedName.toString,
typed,
isSeq,
isList,
isOption,
genericType
)
}
}
/** Print the expanded code of macro.
......@@ -116,7 +142,7 @@ abstract class AbstractMacroProcessor(val c: blackbox.Context) {
* @tparam T
* @return
*/
def exprPrintTree[T: c.WeakTypeTag](force: Boolean, resTree: c.Tree): c.Expr[T] = {
def exprPrintTree[T: WeakTypeTag](force: Boolean, resTree: Tree): Expr[T] = {
c.info(
c.enclosingPosition,
s"\n###### Time: ${ZonedDateTime.now().format(DateTimeFormatter.ISO_ZONED_DATE_TIME)} Expanded macro start ######\n" + resTree
......@@ -133,7 +159,7 @@ abstract class AbstractMacroProcessor(val c: blackbox.Context) {
* @return
* The parameters may be currying, so it's a two-level list.
*/
private[macros] def resolveParameters[T: c.WeakTypeTag]: List[List[Symbol]] =
def resolveParameters[T: WeakTypeTag]: List[List[Symbol]] =
c.weakTypeOf[T].resultType.member(TermName("<init>")).typeSignature.paramLists
/** Get the `TypeName` of the class.
......@@ -142,21 +168,24 @@ abstract class AbstractMacroProcessor(val c: blackbox.Context) {
* Type of the case class.
* @return
*/
private[macros] def resolveClazzTypeName[T: c.WeakTypeTag]: c.universe.TypeName =
TypeName(c.weakTypeOf[T].typeSymbol.name.decodedName.toString)
def resolveClassTypeName[T: WeakTypeTag]: TypeName =
c.weakTypeOf[T].typeSymbol.name.toTypeName
/** Get the list of case class constructor parameters and return the column index and parameter type that zip as a
* `List[(Int, Type)])`.
* `FieldZipInformation`.
*
* @tparam T
* Type of the case class.
* @return
*/
private[macros] def checkCaseClassZip[T: c.WeakTypeTag]: (List[String], List[(Int, Type)]) = {
val params = getCaseClassParams[T]()
def checkGetFieldZipInformation[T: WeakTypeTag]: FieldZipInformation = {
val params = getCaseClassFieldInfo[T]()
val paramsSize = params.size
val names = params.map(p => p.name.decodedName.toString)
names -> params.zip(0 until paramsSize).map(f => f._2 -> c.typecheck(tq"${f._1}", c.TYPEmode).tpe)
val names = params.map(_.fieldName)
FieldZipInformation(
names,
params.zip(0 until paramsSize).map(f => f._2 -> f._1.fieldType)
)
}
/** Get the builderId of the current class which generated by *Builder,apply macro.
......@@ -164,6 +193,53 @@ abstract class AbstractMacroProcessor(val c: blackbox.Context) {
* @param annoBuilderPrefix
* @return
*/
private[macros] def getBuilderId(annoBuilderPrefix: String): Int =
def getBuilderId(annoBuilderPrefix: String): Int =
c.prefix.actualType.toString.replace(annoBuilderPrefix, "").toInt
private def getDefaultValue(typ: Type): Tree =
typ match {
case t if t =:= typeOf[Int] =>
q"0"
case t if t =:= typeOf[String] =>
val empty = ""
q"$empty"
case t if t =:= typeOf[Float] =>
q"0.asInstanceOf[Float]"
case t if t =:= typeOf[Double] =>
q"0D"
case t if t =:= typeOf[Char] =>
q"'?'"
case t if t =:= typeOf[Byte] =>
q"0"
case t if t =:= typeOf[Short] =>
q"0"
case t if t =:= typeOf[Boolean] =>
q"false"
case t if t =:= typeOf[Long] =>
q"0L"
case t if t weak_<:< typeOf[List[_]] => q"_root_.scala.Nil"
case t if t weak_<:< typeOf[Seq[_]] => q"_root_.scala.Nil"
case t if t weak_<:< typeOf[Option[_]] => q"_root_.scala.None"
case _ =>
q"null"
}
private type OptionSeqList = (Boolean, Boolean, Boolean)
private def isWrapType(typed: Type): OptionSeqList = {
var isList: Boolean = false
var isSeq: Boolean = false
var isOption: Boolean = false
typed match {
case t if t weak_<:< weakTypeOf[List[_]] =>
isList = true
case t if t weak_<:< weakTypeOf[Option[_]] =>
isOption = true
case t if !isList && (t weak_<:< weakTypeOf[Seq[_]]) =>
isSeq = true
case _ =>
}
Tuple3(isOption, isSeq, isList)
}
}
......@@ -68,13 +68,13 @@ object CaseClassField {
val genericType = fieldType.get match {
case t if t <:< typeOf[Option[_]] =>
val genericType = t.typeArgs.head
tq"Option[$genericType]"
tq"_root_.scala.Option[$genericType]"
case t if t <:< typeOf[Seq[_]] =>
val genericType = t.typeArgs.head
tq"Seq[$genericType]"
tq"_root_.scala.Seq[$genericType]"
case t if t <:< typeOf[List[_]] =>
val genericType = t.typeArgs.head
tq"List[$genericType]"
tq"_root_.scala.List[$genericType]"
case t => tq"$t"
}
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.macros
package org.bitlap.common
import scala.collection.mutable
/** @author
......@@ -40,4 +40,8 @@ object MacroCache {
}
lazy val builderFunctionTrees: mutable.Map[Int, mutable.Map[String, Any]] = mutable.Map.empty
lazy val classFieldNameMapping: mutable.Map[Int, mutable.Map[String, String]] = mutable.Map.empty
lazy val classFieldValueMapping: mutable.Map[Int, mutable.Map[String, Any]] = mutable.Map.empty
}
/*
* Copyright (c) 2022 bitlap
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.common
/** @author
* 梦境迷离
* @version 1.0,6/15/22
*/
class Transformable[From, To] {
/** @param selectFromField
* Select the name of the field to be mapped in the `From` class.
* @param selectToField
* Selects which field of `To` the `From` will eventually be mapped to.
* @param map
* Specify the type mapping of the field, which must be provided when the type is incompatible, or else attempt to
* search for an implicit `Transformer[FromField, ToField]` (a failed search will result in a compile failure).
*
* 1. If the field names are the same and the field types are compatible (`FromField` can be assigned to
* `ToField`), then you don't need to use the `mapField` method, and you don't need an implicit
* `Transformer[FromField, ToField]`. such as:
* {{{
* F => F
* Option[F] => Option[F]
* Seq[F] => Seq[F]
* List[F] => List[F]
* Int => Long
* List[F] => Seq[F]
* ...
* }}}
*
* 2. If field type is of the following types you can use in a more convenient way as long as you define an implicit
* `Transformer[F, T]`, If `F` or `T` is not a case class, then you still need to use the `mapField` method input
* argument `map`.
* {{{
* F => T
* Option[F] => Option[T]
* Seq[F] => Seq[T]
* List[F] => List[T]
* List[F] => Seq[T]
* ...
* }}}
*
* 3. If field type is of the following types you must use the `mapField` method with input argument `map`:
* {{{
* Seq[_] => List[_]
* List[List[F]] => List[List[T]] and more nesting.
* List[Seq[F]] => List[Seq[T]] and more nesting.
* Seq[Seq[F]] => Seq[Seq[T]] and more nesting.
* Seq[List[F]] => Seq[List[T]] and more nesting.
* Option[Option[F]] => Option[Option[T]] and more nesting.
* List[Option[F]] => List[Option[T]] and more nesting.
* More structures like this not supported at this time.
* }}}
*
* Note: They need use the `mapField` method to mapping type:
* {{{
* Seq[String] => List[String]
* Seq[Long] => Seq[Int]
* ...
* }}}
*
* @tparam FromField
* field type
* @tparam ToField
* field type
* @return
* Transformable
*/
@unchecked
def mapField[FromField, ToField](
selectFromField: From => FromField,
selectToField: To => ToField,
map: FromField => ToField
): Transformable[From, To] =
macro TransformerMacro.mapFieldWithValueImpl[From, To, FromField, ToField]
/** Same method as above, but without the map parameter. That is, no type mapping needs to be configured.
*/
@unchecked
def mapField[FromField, ToField](
selectFromField: From => FromField,
selectToField: To => ToField
): Transformable[From, To] =
macro TransformerMacro.mapFieldImpl[From, To, FromField, ToField]
def instance: Transformer[From, To] = macro TransformerMacro.instanceImpl[From, To]
}
object Transformable {
/** Automatically derive `Transformable[From, To]` for case classes only, for non-case classes you should use the
* `mapField` method to configure the mapping relationship.
* @tparam From
* @tparam To
* @return
*/
def apply[From <: Product, To <: Product]: Transformable[From, To] =
macro TransformerMacro.applyImpl[From, To]
}
/*
* Copyright (c) 2022 bitlap
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.common
/** @author
* 梦境迷离
* @version 1.0,6/14/22
*/
trait Transformer[-From, +To] {
def transform(from: From): To
}
object Transformer {
def apply[From <: Product, To <: Product](implicit st: Transformer[From, To]): Transformer[From, To] = st
}
/*
* Copyright (c) 2022 bitlap
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.common
import org.bitlap.common.{ Transformer => BitlapTransformer }
import scala.collection.mutable
import scala.reflect.macros.whitebox
/** @author
* 梦境迷离
* @version 1.0,6/15/22
*/
class TransformerMacro(override val c: whitebox.Context) extends AbstractMacroProcessor(c) {
import c.universe._
protected val packageName = q"_root_.org.bitlap.common"
private val builderFunctionPrefix = "_TransformableFunction$"
private val annoBuilderPrefix = "_AnonObjectTransformable$"
private val fromTermName = TermName("from")
def mapFieldWithValueImpl[From, To, FromField, ToField](
selectFromField: Expr[From => FromField],
selectToField: Expr[To => ToField],
map: Expr[FromField => ToField]
): Expr[Transformable[From, To]] = {
val Function(_, Select(_, fromName)) = selectFromField.tree
val Function(_, Select(_, toName)) = selectToField.tree
val builderId = getBuilderId(annoBuilderPrefix)
MacroCache.classFieldNameMapping
.getOrElseUpdate(builderId, mutable.Map.empty)
.update(toName.decodedName.toString, fromName.decodedName.toString)
MacroCache.classFieldValueMapping
.getOrElseUpdate(builderId, mutable.Map.empty)
.update(toName.decodedName.toString, map)
val tree = q"new ${c.prefix.actualType}"
exprPrintTree[Transformable[From, To]](force = false, tree)
}
def mapFieldImpl[From, To, FromField, ToField](
selectFromField: Expr[From => FromField],
selectToField: Expr[To => ToField]
): Expr[Transformable[From, To]] = {
val Function(_, Select(_, fromName)) = selectFromField.tree
val Function(_, Select(_, toName)) = selectToField.tree
val builderId = getBuilderId(annoBuilderPrefix)
MacroCache.classFieldNameMapping
.getOrElseUpdate(builderId, mutable.Map.empty)
.update(toName.decodedName.toString, fromName.decodedName.toString)
val tree = q"new ${c.prefix.actualType}"
exprPrintTree[Transformable[From, To]](force = false, tree)
}
def instanceImpl[From: WeakTypeTag, To: WeakTypeTag]: Expr[BitlapTransformer[From, To]] = {
val fromClassName = resolveClassTypeName[From]
val toClassName = resolveClassTypeName[To]
val tree = q"""
..$getPreTree
new $packageName.Transformer[$fromClassName, $toClassName] {
override def transform($fromTermName: $fromClassName): $toClassName = {
${getTransformBody[From, To]}
}
}
"""
exprPrintTree[BitlapTransformer[From, To]](force = false, tree)
}
def applyImpl[From: WeakTypeTag, To: WeakTypeTag]: Expr[Transformable[From, To]] =
deriveTransformableApplyImpl[From, To]
private def deriveTransformableApplyImpl[From: WeakTypeTag, To: WeakTypeTag]: Expr[Transformable[From, To]] = {
val builderClassName = TypeName(annoBuilderPrefix + MacroCache.getBuilderId)
val fromClassName = resolveClassTypeName[From]
val toClassName = resolveClassTypeName[To]
val tree =
q"""
class $builderClassName extends $packageName.Transformable[$fromClassName, $toClassName]
new $builderClassName
"""
exprPrintTree[Transformable[From, To]](force = false, tree)
}
private def getPreTree: Iterable[Tree] = {
val customTrees = MacroCache.classFieldValueMapping.getOrElse(getBuilderId(annoBuilderPrefix), mutable.Map.empty)
val (_, preTrees) = customTrees.collect { case (key, expr: Expr[Tree] @unchecked) =>
expr.tree match {
case buildFunction: Function =>
val functionName = TermName(builderFunctionPrefix + key)
key -> q"lazy val $functionName: ${buildFunction.tpe} = $buildFunction"
}
}.unzip
preTrees
}
private def getTransformBody[From: WeakTypeTag, To: WeakTypeTag]: Tree = {
val toClassName = resolveClassTypeName[To]
val toClassInfo = getCaseClassFieldInfo[To]()
val fromClassInfo = getCaseClassFieldInfo[From]()
val customFieldNameMapping =
MacroCache.classFieldNameMapping.getOrElse(getBuilderId(annoBuilderPrefix), mutable.Map.empty)
val customFieldValueMapping =
MacroCache.classFieldValueMapping.getOrElse(getBuilderId(annoBuilderPrefix), mutable.Map.empty)
c.info(c.enclosingPosition, s"Field Name Mapping:$customFieldNameMapping", force = true)
c.info(c.enclosingPosition, s"Field Value Mapping:$customFieldValueMapping", force = true)
val fields = toClassInfo.map { field =>
val fromFieldName = customFieldNameMapping.get(field.fieldName)
val realFromFieldName = fromFieldName.fold(field.fieldName)(x => x)
if (customFieldValueMapping.contains(field.fieldName)) {
q"""${TermName(builderFunctionPrefix + field.fieldName)}.apply(${q"$fromTermName.${TermName(realFromFieldName)}"})"""
} else {
checkFieldGetFieldTerm[From](
realFromFieldName,
fromClassInfo.find(_.fieldName == realFromFieldName),
field
)
}
}
q"""
${toClassName.toTermName}.apply(
..$fields
)
"""
}
private def checkFieldGetFieldTerm[From: WeakTypeTag](
realFromFieldName: String,
fromFieldOpt: Option[FieldInformation],
toField: FieldInformation
): Tree = {
val fromFieldTerm = q"$fromTermName.${TermName(realFromFieldName)}"
val fromClassName = resolveClassTypeName[From]
if (fromFieldOpt.isEmpty) {
c.abort(
c.enclosingPosition,
s"value `$realFromFieldName` is not a member of `$fromClassName`, Please consider using `mapField` method!"
)
return fromFieldTerm
}
val fromField = fromFieldOpt.get
if (!(fromField.fieldType weak_<:< toField.fieldType)) {
tryForWrapType(fromFieldTerm, fromField, toField)
} else {
fromFieldTerm
}
}
private def tryForWrapType(fromFieldTerm: Tree, fromField: FieldInformation, toField: FieldInformation): Tree =
(fromField, toField) match {
case (
FieldInformation(_, _, isSeq1, isList1, isOption1, genericType1),
FieldInformation(_, _, isSeq2, isList2, isOption2, genericType2)
)
if ((isSeq1 && isSeq2) || (isList1 && isList2) || (isOption1 && isOption2)) && genericType1.isDefined && genericType2.isDefined =>
q"""
$fromFieldTerm.map($packageName.Transformer[${genericType1.get}, ${genericType2.get}].transform(_))
"""
case (information1, information2) =>
c.warning(
c.enclosingPosition,
s"No implicit `Transformer` is defined for ${information1.fieldType} => ${information2.fieldType}, which may cause compilation errors!!!"
)
q"""$packageName.Transformer[${information1.fieldType}, ${information2.fieldType}].transform($fromFieldTerm)"""
}
}
/*
* Copyright (c) 2022 bitlap
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.common
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
/** @author
* 梦境迷离
* @version 1.0,6/15/22
*/
class TransformableTest extends AnyFlatSpec with Matchers {
"TransformableTest simple case" should "ok for Transformable" in {
case class A1(a: String, b: Int, cc: Long, d: Option[String])
case class A2(a: String, b: Int, c: Int, d: Option[String])
val a = A1("hello", 1, 2, None)
val b: A2 = Transformable[A1, A2] // todo `fromField: Long` type Long cannot be ignored.
.mapField[Long, Int](_.cc, _.c, (fromField: Long) => if (fromField > 0) fromField.toInt else 0)
.instance
.transform(a)
b.toString shouldEqual "A2(hello,1,2,None)"
case class B1(d: List[String])
case class B2(d: Seq[String])
val b1 = B1(List("hello"))
// List => Seq not need mapping field
val b2: B2 = Transformable[B1, B2].instance.transform(b1)
b2.toString shouldEqual "B2(List(hello))"
}
"TransformableTest simple case" should "ok for implicit Transformable" in {
case class A1(a: String, b: Int, cc: Long, d: Option[String])
case class A2(a: String, b: Int, c: Int, d: Option[String])
val a = A1("hello", 1, 2, None)
implicit val transformer = Transformable[A1, A2]
.mapField(_.b, _.c)
.mapField(_.a, _.a)
.mapField[Option[String], Option[String]](_.d, _.d, (map: Option[String]) => map)
.instance
Transformer[A1, A2].transform(a).toString shouldEqual "A2(hello,1,1,None)"
}
"TransformableTest type not match" should "error if field type is incompatible" in {
"""
|
| case class A1(a: String, b: Int, cc: Long, d: Option[String])
| case class A2(a: String, b: Int, c: Int, d: Option[String])
| val a = A1("hello", 1, 2, None)
| val b: A2 = Transformable[A1, A2]
| .mapField(_.cc, _.c)
| .instance
| .transform(a)
|""".stripMargin shouldNot compile
}
"TransformableTest simple case for nest field" should "ok when field is case class" in {
case class C1(j: Int)
case class D1(c1: C1)
case class C2(j: Int)
case class D2(c2: C2)
implicit val cTransformer: Transformer[C1, C2] = Transformable[C1, C2].instance
implicit val dTransformer: Transformer[D1, D2] = Transformable[D1, D2].mapField(_.c1, _.c2).instance
val d1 = D1(C1(1))
val d2: D2 = Transformer[D1, D2].transform(d1)
println(d2)
}
"TransformableTest more complex case for nest field" should "ok when field is list with case class" in {
case class C1(j: Int)
case class D1(c1: List[C1])
case class C2(j: Int)
case class D2(c2: List[C2])
implicit val cTransformer: Transformer[C1, C2] = Transformable[C1, C2].instance
implicit val dTransformer: Transformer[D1, D2] = Transformable[D1, D2].mapField(_.c1, _.c2).instance
val d1 = D1(List(C1(1), C1(2)))
val d2: D2 = Transformer[D1, D2].transform(d1)
println(d2)
}
"TransformableTest more complex case for two-layer nest field" should "ok for implicit and non-implicit(mapField)" in {
case class C1(j: Int)
case class D1(c1: List[List[C1]])
case class C2(j: Int)
case class D2(c2: List[List[C2]]) // Nesting of the second layer
object C1 {
implicit val cTransformer: Transformer[C1, C2] = Transformable[C1, C2].instance
}
object D1 {
implicit val dTransformer: Transformer[D1, D2] = Transformable[D1, D2]
.mapField[List[List[C1]], List[List[C2]]](
_.c1,
_.c2,
// implicit values of nested dependencies cannot be at the same level, so move it to companion their object
(c1: List[List[C1]]) => c1.map(_.map(Transformer[C1, C2].transform(_)))
)
.instance
}
val d1 = D1(List(List(C1(1), C1(2))))
val d2: D2 = Transformer[D1, D2].transform(d1)
d2.toString shouldBe "D2(List(List(C2(1), C2(2))))"
}
"TransformableTest different type" should "compile ok if can use weak conformance" in {
case class A1(a: String, b: Int, cc: Int, d: Option[String]) // weak conformance
case class A2(a: String, b: Int, c: Long, d: Option[String])
object A1 {
implicit val aTransformer: Transformer[A1, A2] = Transformable[A1, A2].mapField(_.cc, _.c).instance
}
val a1 = A1("hello", 1, 2, None)
val a2 = Transformer[A1, A2].transform(a1)
a2.toString shouldBe "A2(hello,1,2,None)"
}
"TransformableTest type cannot match" should "compile failed if can't use weak conformance" in {
"""
| case class A1(a: String, b: Int, cc: Long, d: Option[String]) // Can't to use weak conformance, must use `mapField(?,?,?)` method for it.
| case class A2(a: String, b: Int, c: Int, d: Option[String])
| object A1 {
|
| implicit val aTransformer: Transformer[A1, A2] = Transformable[A1, A2].mapField(_.cc,_.c).instance
| }
| val a1 = A1("hello", 1, 2, None)
| val a2 = Transformer[A1, A2].transform(a1)
| a2.toString shouldBe "A2(hello,1,2,None)"
|
|
|""".stripMargin shouldNot compile
}
"TransformableTest more complex case to use implicit Transformer" should "compile ok" in {
import org.bitlap.common.models.from._
import org.bitlap.common.models.to._
val fromRow =
List(FRow(List("this is row data1", "this is row data2")))
val fromRowSet = FRowSet.apply(fromRow, 100000)
val fromColumnDesc = List(FColumnDesc("this is column name1"), FColumnDesc("this is column name2"))
val fromTableSchema = FTableSchema(fromColumnDesc)
val fromQueryResult = FQueryResult(tableSchema = fromTableSchema, rows = fromRowSet)
val toRow =
List(TRow(List("this is row data1", "this is row data2")))
val toRowSet = TRowSet(100000, toRow)
val toColumnDesc = List(TColumnDesc("this is column name1"), TColumnDesc("this is column name2"))
val toTableSchema = TTableSchema(toColumnDesc)
val expectToQueryResult = TQueryResult(ttableSchema = toTableSchema, trows = toRowSet)
val actualToQueryResult = Transformer[FQueryResult, TQueryResult].transform(fromQueryResult)
actualToQueryResult shouldBe expectToQueryResult
}
}
/*
* Copyright (c) 2022 bitlap
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.common
/** @author
* 梦境迷离
* @since 2021/11/20
* @version 1.0
*/
object models {
object from {
import org.bitlap.common.models.to._
sealed trait Model
final case class FQueryResult(tableSchema: FTableSchema, rows: FRowSet) extends Model
object FQueryResult {
// mapping name
implicit val queryResultTransform: Transformer[FQueryResult, TQueryResult] =
Transformable[FQueryResult, TQueryResult]
.mapField(_.rows, _.trows)
.mapField(_.tableSchema, _.ttableSchema)
.instance
}
final case class FRowSet(rows: List[FRow] = Nil, startOffset: Long = 0) extends Model
object FRowSet {
// not need mapping
implicit val rowSetTransform: Transformer[FRowSet, TRowSet] = Transformable[FRowSet, TRowSet].instance
}
final case class FRow(values: List[String] = Nil) extends Model
object FRow {
implicit val rowTransform: Transformer[FRow, TRow] = Transformable[FRow, TRow].instance // not need mapping
}
final case class FTableSchema(columns: List[FColumnDesc] = Nil) extends Model
object FTableSchema {
implicit val tableSchemaTransform: Transformer[FTableSchema, TTableSchema] =
Transformable[FTableSchema, TTableSchema].instance
}
final case class FColumnDesc(columnName: String) extends Model
object FColumnDesc {
implicit val columnDescTransform: Transformer[FColumnDesc, TColumnDesc] = Transformable[FColumnDesc, TColumnDesc]
.mapField(_.columnName, _.tcolumnName) // mapping name
.instance
}
}
object to {
sealed trait TModel
final case class TQueryResult(trows: TRowSet, ttableSchema: TTableSchema) extends TModel
final case class TRowSet(startOffset: Long = 0, rows: List[TRow]) extends TModel
final case class TRow(values: List[String] = Nil) extends TModel
final case class TTableSchema(columns: List[TColumnDesc] = Nil) extends TModel
final case class TColumnDesc(tcolumnName: String) extends TModel
}
}
......@@ -21,11 +21,9 @@
package org.bitlap.csv.derive
import org.bitlap.csv.core.Converter
import org.bitlap.csv.core.macros.AbstractMacroProcessor
import scala.reflect.macros.blackbox
import org.bitlap.csv.core.CsvFormat
import org.bitlap.common.AbstractMacroProcessor
import org.bitlap.csv.{ Converter, CsvFormat }
/** This is a tool macro for automatic derivation of the base CSV converter.
*
......@@ -39,17 +37,18 @@ object DeriveCsvConverter {
class Macro(override val c: blackbox.Context) extends AbstractMacroProcessor(c) {
import c.universe._
protected val packageName = q"_root_.org.bitlap.csv"
private val lineTermName = TermName("line")
private val tTermName = TermName("t")
def macroImpl[CC: c.WeakTypeTag](csvFormat: c.Expr[CsvFormat]): c.Expr[CC] = {
val clazzName = c.weakTypeOf[CC].typeSymbol.name
val typeName = TypeName(clazzName.decodedName.toString)
val typeName = clazzName.toTypeName
val tree =
q"""
new Converter[$typeName] {
override def toScala($lineTermName: String): Option[$typeName] = $packageName.macros.DeriveToCaseClass[$typeName]($lineTermName)($csvFormat)
override def toScala($lineTermName: String): _root_.scala.Option[$typeName] = $packageName.macros.DeriveToCaseClass[$typeName]($lineTermName)($csvFormat)
override def toCsvString($tTermName: $typeName): String = $packageName.macros.DeriveToString[$typeName]($tTermName)($csvFormat)
}
"""
......
......@@ -21,8 +21,8 @@
package org.bitlap.csv.derive.test
import org.bitlap.csv.core.Converter
import org.bitlap.csv.derive.DeriveCsvConverter
import org.bitlap.csv.Converter
/** @author
* 梦境迷离
......
......@@ -21,9 +21,8 @@
package org.bitlap.csv.derive.test
import org.bitlap.csv.core.Converter
import org.bitlap.csv.derive.DeriveCsvConverter
import org.bitlap.csv.core.DefaultCsvFormat
import org.bitlap.csv.{ Converter, DefaultCsvFormat }
/** @author
* 梦境迷离
......
......@@ -21,8 +21,8 @@
package org.bitlap.csv.derive.test
import org.bitlap.csv.core.Converter
import org.bitlap.csv.derive.DeriveCsvConverter
import org.bitlap.csv.Converter
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
......
......@@ -21,8 +21,8 @@
package org.bitlap.csv.derive.test
import org.bitlap.csv.core.{ Converter, StringUtils }
import org.bitlap.csv.derive.DeriveCsvConverter
import org.bitlap.csv.{ Converter, StringUtils }
/** @author
* 梦境迷离
......
......@@ -21,7 +21,7 @@
package org.bitlap.csv.derive.test
import org.bitlap.csv.core.Converter
import org.bitlap.csv.Converter
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
import scala.collection.immutable.{ :: => Cons }
import scala.util.Try
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
trait CsvFormat extends Serializable {
def delimiter: Char
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
/** a Custom Csv encoder.
*
......
......@@ -19,9 +19,9 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
import org.bitlap.csv.core.macros.DeriveCsvableBuilder
import org.bitlap.csv.macros.DeriveCsvableBuilder
import java.io.File
/** Builder to create a custom Csv Encoder.
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
/** @author
* 梦境迷离
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
import java.io._
import scala.io.Source
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
/** a Custom Csv decoder.
*
......
......@@ -19,9 +19,9 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
import org.bitlap.csv.core.macros.DeriveScalableBuilder
import org.bitlap.csv.macros.DeriveScalableBuilder
import java.io.InputStream
/** Builder to create a custom Csv Decoder.
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
import java.io.{ BufferedReader, File, FileReader, InputStreamReader }
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
import scala.util.Try
......
......@@ -19,7 +19,7 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core
package org.bitlap.csv
import java.util.regex.Pattern
import scala.collection.mutable.ListBuffer
......
......@@ -19,14 +19,14 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.macros
package org.bitlap.csv.macros
import org.bitlap.csv.core.CsvableBuilder
import org.bitlap.common.{ AbstractMacroProcessor, MacroCache }
import org.bitlap.csv.{ CsvFormat, CsvableBuilder }
import java.io.File
import scala.collection.mutable
import scala.reflect.macros.whitebox
import java.io.File
import org.bitlap.csv.core.CsvFormat
/** @author
* 梦境迷离
......@@ -36,6 +36,8 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
import c.universe._
protected val packageName = q"_root_.org.bitlap.csv"
private val annoBuilderPrefix = "_AnonCsvableBuilder$"
private val builderFunctionPrefix = "_CsvableBuilderFunction$"
......@@ -47,7 +49,7 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
private val funcArgsTempTermName = TermName("temp")
// scalafmt: { maxColumn = 400 }
def setFieldImpl[T: WeakTypeTag, SF: WeakTypeTag](scalaField: Expr[T => SF], value: Expr[SF => String]): Expr[CsvableBuilder[T]] = {
def setFieldImpl[T, SF](scalaField: Expr[T => SF], value: Expr[SF => String]): Expr[CsvableBuilder[T]] = {
val Function(_, Select(_, termName)) = scalaField.tree
val builderId = getBuilderId(annoBuilderPrefix)
MacroCache.builderFunctionTrees.getOrElseUpdate(builderId, mutable.Map.empty).update(termName.toString, value)
......@@ -92,7 +94,7 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
// scalafmt: { maxColumn = 400 }
private def deriveFullIntoFileCsvableImpl[T: WeakTypeTag](ts: Expr[List[T]], file: Expr[File], format: c.Expr[CsvFormat]): Expr[Boolean] = {
val clazzName = resolveClazzTypeName[T]
val clazzName = resolveClassTypeName[T]
val (customTrees, preTrees) = getCustomPreTress
val tree =
q"""
......@@ -109,7 +111,7 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
// scalafmt: { maxColumn = 400 }
private def deriveFullCsvableImpl[T: WeakTypeTag](ts: Expr[List[T]], format: c.Expr[CsvFormat]): Expr[String] = {
val clazzName = resolveClazzTypeName[T]
val clazzName = resolveClassTypeName[T]
val (customTrees, preTrees) = getCustomPreTress
val tree =
q"""
......@@ -125,18 +127,18 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
}
private def getAnnoClassObject[T: WeakTypeTag](customTrees: mutable.Map[String, Any], format: c.Expr[CsvFormat]): Tree = {
val clazzName = resolveClazzTypeName[T]
val clazzName = resolveClassTypeName[T]
val annoClassName = TermName(csvableImplClassNamePrefix + MacroCache.getIdentityId)
q"""
object $annoClassName extends $packageName.Csvable[$clazzName] {
var $innerTmpTermName: $clazzName = _
lazy private val toCsv = ($funcArgsTempTermName: $clazzName) => {
lazy private val _toCsv = ($funcArgsTempTermName: $clazzName) => {
val fields = ${clazzName.toTermName}.unapply($funcArgsTempTermName).orNull
val values = if (null == fields) List.empty else ${fieldsToString[T](funcArgsTempTermName, customTrees)}
val values = if (null == fields) _root_.scala.List.empty else ${fieldsToString[T](funcArgsTempTermName, customTrees)}
$packageName.StringUtils.combineColumns(values, $format)
}
override def _toCsvString(t: $clazzName): String = toCsv($annoClassName.$innerTmpTermName)
override def _toCsvString(t: $clazzName): String = _toCsv($annoClassName.$innerTmpTermName)
}
final lazy private val $csvableInstanceTermName = $annoClassName
......@@ -144,7 +146,7 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
}
private def deriveCsvableImpl[T: WeakTypeTag](t: Expr[T], format: c.Expr[CsvFormat]): Expr[String] = {
val clazzName = resolveClazzTypeName[T]
val clazzName = resolveClassTypeName[T]
val (customTrees, preTrees) = getCustomPreTress
val annoClassName = TermName(csvableImplClassNamePrefix + MacroCache.getIdentityId)
val tree =
......@@ -155,7 +157,7 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
override def _toCsvString(t: $clazzName): String = {
val fields = ${clazzName.toTermName}.unapply($innerTmpTermName).orNull
val values = if (null == fields) List.empty else ${fieldsToString[T](innerTmpTermName, customTrees)}
val values = if (null == fields) _root_.scala.List.empty else ${fieldsToString[T](innerTmpTermName, customTrees)}
$packageName.StringUtils.combineColumns(values, $format)
}
}
......@@ -166,49 +168,42 @@ class DeriveCsvableBuilder(override val c: whitebox.Context) extends AbstractMac
// scalafmt: { maxColumn = 400 }
private def fieldsToString[T: WeakTypeTag](innerVarTermName: TermName, customTrees: mutable.Map[String, Any]): List[Tree] = {
val clazzName = resolveClazzTypeName[T]
val (fieldNames, indexTypes) = checkCaseClassZip
val indexByName = (i: Int) => TermName(fieldNames(i))
indexTypes.map { idxType =>
val customFunction = () => q"${TermName(builderFunctionPrefix + fieldNames(idxType._1))}.apply($innerVarTermName.${indexByName(idxType._1)})"
idxType._2 match {
case t if t <:< typeOf[List[_]] =>
if (customTrees.contains(fieldNames(idxType._1))) {
q"${customFunction()}"
} else {
c.abort(
c.enclosingPosition,
s"Missing usage `setField` for converting `$clazzName.${fieldNames(idxType._1)}` as a `String` , you have to define a custom way by using `setField` method!"
)
}
case t if t <:< typeOf[Seq[_]] =>
if (customTrees.contains(fieldNames(idxType._1))) {
q"${customFunction()}"
} else {
c.abort(
c.enclosingPosition,
s"Missing usage `setField` for converting `$clazzName.${fieldNames(idxType._1)}` as a `String` , you have to define a custom way by using `setField` method!"
)
}
case t if t <:< typeOf[Option[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
if (customTrees.contains(fieldNames(idxType._1))) {
customFunction()
} else {
// scalafmt: { maxColumn = 400 }
q"""
$packageName.Csvable[${genericType.typeSymbol.name.toTypeName}]._toCsvString {
if ($innerVarTermName.${indexByName(idxType._1)}.isEmpty) ""
else $innerVarTermName.${indexByName(idxType._1)}.get
val clazzName = resolveClassTypeName[T]
val fieldZipInformation = checkGetFieldZipInformation
val fieldNames = fieldZipInformation.fieldNames
val indexTypes = fieldZipInformation.fieldIndexTypeMapping
val indexByName = (i: Int) => TermName(fieldNames(i))
indexTypes.map { indexType =>
val customFunction = () => q"${TermName(builderFunctionPrefix + fieldNames(indexType._1))}.apply($innerVarTermName.${indexByName(indexType._1)})"
indexType._2 match {
case t if t <:< typeOf[List[_]] && customTrees.contains(fieldNames(indexType._1)) =>
q"${customFunction()}"
case t if t <:< typeOf[List[_]] && !customTrees.contains(fieldNames(indexType._1)) =>
c.abort(
c.enclosingPosition,
s"Missing usage `setField` for converting `$clazzName.${fieldNames(indexType._1)}` as a `String` , you have to define a custom way by using `setField` method!"
)
case t if t <:< typeOf[Seq[_]] && customTrees.contains(fieldNames(indexType._1)) =>
q"${customFunction()}"
case t if t <:< typeOf[Seq[_]] && !customTrees.contains(fieldNames(indexType._1)) =>
c.abort(
c.enclosingPosition,
s"Missing usage `setField` for converting `$clazzName.${fieldNames(indexType._1)}` as a `String` , you have to define a custom way by using `setField` method!"
)
case t if t <:< typeOf[Option[_]] && customTrees.contains(fieldNames(indexType._1)) =>
customFunction()
case t if t <:< typeOf[Option[_]] && !customTrees.contains(fieldNames(indexType._1)) =>
val genericType = c.typecheck(q"${indexType._2}", c.TYPEmode).tpe.typeArgs.head
q"""
$packageName.Csvable[$genericType]._toCsvString {
if ($innerVarTermName.${indexByName(indexType._1)}.isEmpty) ""
else $innerVarTermName.${indexByName(indexType._1)}.get
}
"""
}
case _ if customTrees.contains(fieldNames(indexType._1)) =>
customFunction()
case _ =>
if (customTrees.contains(fieldNames(idxType._1))) {
customFunction()
} else {
q"$packageName.Csvable[${TypeName(idxType._2.typeSymbol.name.decodedName.toString)}]._toCsvString($innerVarTermName.${indexByName(idxType._1)})"
}
q"$packageName.Csvable[${indexType._2}]._toCsvString($innerVarTermName.${indexByName(indexType._1)})"
}
}
}
......
......@@ -19,14 +19,14 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.macros
package org.bitlap.csv.macros
import org.bitlap.csv.core.ScalableBuilder
import org.bitlap.common.{ AbstractMacroProcessor, MacroCache }
import org.bitlap.csv.{ CsvFormat, ScalableBuilder }
import java.io.InputStream
import scala.collection.mutable
import scala.reflect.macros.whitebox
import org.bitlap.csv.core.CsvFormat
/** @author
* 梦境迷离
......@@ -36,6 +36,8 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
import c.universe._
protected val packageName = q"_root_.org.bitlap.csv"
private val annoBuilderPrefix = "_AnonScalableBuilder$"
private val builderFunctionPrefix = "_ScalableBuilderFunction$"
......@@ -47,7 +49,8 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
private val scalableImplClassNamePrefix = "_ScalaAnno$"
// scalafmt: { maxColumn = 400 }
def setFieldImpl[T: WeakTypeTag, SF: WeakTypeTag](scalaField: Expr[T => SF], value: Expr[String => SF]): Expr[ScalableBuilder[T]] = {
@unchecked
def setFieldImpl[T, SF](scalaField: Expr[T => SF], value: Expr[String => SF]): Expr[ScalableBuilder[T]] = {
val Function(_, Select(_, termName)) = scalaField.tree
val builderId = getBuilderId(annoBuilderPrefix)
MacroCache.builderFunctionTrees.getOrElseUpdate(builderId, mutable.Map.empty).update(termName.toString, value)
......@@ -59,23 +62,23 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
deriveBuilderApplyImpl[T]
def convertOneImpl[T: WeakTypeTag](line: Expr[String])(format: c.Expr[CsvFormat]): Expr[Option[T]] = {
val clazzName = resolveClazzTypeName[T]
val clazzName = resolveClassTypeName[T]
deriveScalableImpl[T](clazzName, line, format)
}
def convertAllImpl[T: WeakTypeTag](lines: Expr[List[String]])(format: c.Expr[CsvFormat]): Expr[List[Option[T]]] = {
val clazzName = resolveClazzTypeName[T]
val clazzName = resolveClassTypeName[T]
deriveFullScalableImpl[T](clazzName, lines, format)
}
def convertFromFileImpl[T: WeakTypeTag](file: Expr[InputStream])(format: c.Expr[CsvFormat]): Expr[List[Option[T]]] = {
val clazzName = resolveClazzTypeName[T]
val clazzName = resolveClassTypeName[T]
deriveFullFromFileScalableImpl[T](clazzName, file, format)
}
private def deriveBuilderApplyImpl[T: WeakTypeTag]: Expr[ScalableBuilder[T]] = {
val className = TypeName(annoBuilderPrefix + MacroCache.getBuilderId)
val caseClazzName = TypeName(weakTypeOf[T].typeSymbol.name.decodedName.toString)
val caseClazzName = weakTypeOf[T].typeSymbol.name.toTypeName
val tree =
q"""
class $className extends $packageName.ScalableBuilder[$caseClazzName]
......@@ -157,75 +160,58 @@ class DeriveScalableBuilder(override val c: whitebox.Context) extends AbstractMa
// scalafmt: { maxColumn = 400 }
private def scalableBody[T: WeakTypeTag](clazzName: TypeName, innerFuncTermName: TermName): Tree = {
val customTrees = MacroCache.builderFunctionTrees.getOrElse(getBuilderId(annoBuilderPrefix), mutable.Map.empty)
val params = getCaseClassParams[T]()
val fieldNames = params.map(_.name.decodedName.toString)
val fields = checkCaseClassZipAll[T](innerFuncTermName).map { idxType =>
val idx = idxType._1._1
val columnValues = idxType._1._2
val fieldTypeName = TypeName(idxType._2.typeSymbol.name.decodedName.toString)
val params = getCaseClassFieldInfo[T]()
val fieldNames = params.map(_.fieldName)
val fields = checkGetFieldTreeInformationList[T](innerFuncTermName).map { fieldTreeInformation =>
val idx = fieldTreeInformation.index
val columnValues = fieldTreeInformation.fieldTerm
val fieldType = fieldTreeInformation.fieldType
val fieldTypeName = fieldType.typeSymbol.name.toTypeName
val customFunction = () => q"${TermName(builderFunctionPrefix + fieldNames(idx))}.apply($columnValues)"
idxType._2 match {
case tp if tp <:< typeOf[List[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
if (customTrees.contains(fieldNames(idx))) {
tryGetOrElse(q"${customFunction()}.asInstanceOf[List[$genericType]]", q"Nil")
} else {
c.abort(
c.enclosingPosition,
s"Missing usage `setField` for parsing `$clazzName.${fieldNames(idx)}` as a `List` , you have to define a custom way by using `setField` method!"
)
// q"$packageName.Scalable[${genericType.typeSymbol.name.toTypeName}]._toScala($columnValues)"
}
case tp if tp <:< typeOf[Seq[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
if (customTrees.contains(fieldNames(idx))) {
tryGetOrElse(q"${customFunction()}.asInstanceOf[Seq[$genericType]]", q"Nil")
} else {
c.abort(
c.enclosingPosition,
s"Missing usage `setField` for parsing `$clazzName.${fieldNames(idx)}` as a `Seq` , you have to define a custom way by using `setField` method!"
)
// q"$packageName.Scalable[${genericType.typeSymbol.name.toTypeName}]._toScala($columnValues)"
}
case tp if tp <:< typeOf[Option[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
if (customTrees.contains(fieldNames(idx))) {
tryOption(q"${customFunction()}.asInstanceOf[Option[$genericType]]")
} else {
tryOption(q"$packageName.Scalable[${genericType.typeSymbol.name.toTypeName}]._toScala($columnValues)")
}
case _ =>
if (customTrees.contains(fieldNames(idx))) {
tryGetOrElse(q"${customFunction()}.asInstanceOf[$fieldTypeName]", getDefaultValue(idxType._2))
} else {
idxType._2 match {
case t if t =:= typeOf[Int] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(0)"
case t if t =:= typeOf[String] =>
q"""$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse("")"""
case t if t =:= typeOf[Float] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse[Float](0.asInstanceOf[Float])"
case t if t =:= typeOf[Double] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse[Double](0D)"
case t if t =:= typeOf[Char] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse('?')"
case t if t =:= typeOf[Byte] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(0)"
case t if t =:= typeOf[Short] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(0)"
case t if t =:= typeOf[Boolean] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(false)"
case t if t =:= typeOf[Long] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(0L)"
case _ =>
tryOptionGetOrElse(q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues)", q"null")
}
fieldTreeInformation.genericType match {
case None if customTrees.contains(fieldNames(idx)) =>
tryGetOrElse(q"${customFunction()}.asInstanceOf[$fieldTypeName]", fieldTreeInformation.zeroValue)
case None if !customTrees.contains(fieldNames(idx)) =>
fieldType match {
case t if t =:= typeOf[Int] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[String] =>
q"""$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"""
case t if t =:= typeOf[Float] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse[Float](${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Double] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse[Double](${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Char] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Byte] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Short] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Boolean] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case t if t =:= typeOf[Long] =>
q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case _ =>
tryOptionGetOrElse(q"$packageName.Scalable[$fieldTypeName]._toScala($columnValues)", fieldTreeInformation.zeroValue)
}
case Some(generic) if customTrees.contains(fieldNames(idx)) && fieldTreeInformation.isSeq =>
tryGetOrElse(q"${customFunction()}.asInstanceOf[_root_.scala.Seq[$generic]]", fieldTreeInformation.zeroValue)
case Some(generic) if customTrees.contains(fieldNames(idx)) && fieldTreeInformation.isList =>
tryGetOrElse(q"${customFunction()}.asInstanceOf[_root_.scala.List[$generic]]", fieldTreeInformation.zeroValue)
case Some(generic) if customTrees.contains(fieldNames(idx)) && fieldTreeInformation.isOption =>
tryGetOrElse(q"${customFunction()}.asInstanceOf[_root_.scala.Option[$generic]]", fieldTreeInformation.zeroValue)
case Some(generic) if fieldTreeInformation.isOption =>
tryOption(q"$packageName.Scalable[$generic]._toScala($columnValues)")
case generic =>
c.abort(
c.enclosingPosition,
s"Missing usage `setField` for parsing `$clazzName.${fieldNames(idx)}` as a `$fieldType` with genericType: `$generic`, you have to define a custom way by using `setField` method!"
)
}
}
// input args not need used
q"""override def _toScala(column: String): Option[$clazzName] =
${tryOption(q"Option(${TermName(clazzName.decodedName.toString)}(..$fields))")}"""
q"""override def _toScala(column: String): _root_.scala.Option[$clazzName] =
${tryOption(q"_root_.scala.Option(${clazzName.toTermName}(..$fields))")}"""
}
}
......@@ -19,10 +19,12 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.macros
package org.bitlap.csv.macros
import org.bitlap.common.AbstractMacroProcessor
import org.bitlap.csv.CsvFormat
import scala.reflect.macros.blackbox
import org.bitlap.csv.core.CsvFormat
/** @author
* 梦境迷离
......@@ -36,53 +38,58 @@ object DeriveToCaseClass {
import c.universe._
protected val packageName = q"_root_.org.bitlap.csv"
// scalafmt: { maxColumn = 400 }
def macroImpl[T <: Product: c.WeakTypeTag](line: c.Expr[String])(format: c.Expr[CsvFormat]): c.Expr[Option[T]] = {
val clazzName = c.weakTypeOf[T].typeSymbol.name
val innerFuncTermName = TermName("_columns")
val fields = (columnsFunc: TermName) =>
checkCaseClassZipAll[T](columnsFunc).map { idxType =>
val columnValues = idxType._1._2
idxType._2 match {
case t if t <:< typeOf[Option[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
tryOption(q"$packageName.Converter[${genericType.typeSymbol.name.toTypeName}].toScala($columnValues)")
case t if t <:< typeOf[List[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
tryOptionGetOrElse(q"$packageName.Converter[List[${genericType.typeSymbol.name.toTypeName}]].toScala($columnValues)", q"Nil")
case t if t <:< typeOf[Seq[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
tryOptionGetOrElse(q"$packageName.Converter[Seq[${genericType.typeSymbol.name.toTypeName}]].toScala($columnValues)", q"Nil")
case t =>
val caseClassFieldTypeName = TypeName(idxType._2.typeSymbol.name.decodedName.toString)
t match {
checkGetFieldTreeInformationList[T](columnsFunc).map { fieldTreeInformation =>
val columnValues = fieldTreeInformation.fieldTerm
val fieldType = fieldTreeInformation.fieldType
fieldTreeInformation.genericType match {
case Some(generic) if fieldTreeInformation.isList =>
tryOptionGetOrElse(q"$packageName.Converter[_root_.scala.List[$generic]].toScala($columnValues)", q"Nil")
case Some(generic) if fieldTreeInformation.isSeq =>
tryOptionGetOrElse(q"$packageName.Converter[_root_.scala.Seq[$generic]].toScala($columnValues)", q"Nil")
case Some(generic) if fieldTreeInformation.isOption =>
tryOption(q"$packageName.Converter[$generic].toScala($columnValues)")
case Some(generic) =>
c.abort(
c.enclosingPosition,
s"Not support `$fieldType` with genericType: `$generic`!!!"
)
case _ =>
val caseClassFieldTypeName = fieldType.typeSymbol.name.toTypeName
fieldType match {
case tt if tt =:= typeOf[Int] =>
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(0)"
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case tt if tt =:= typeOf[String] =>
q"""$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse("")"""
q"""$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"""
case tt if tt =:= typeOf[Float] =>
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(0F)"
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case tt if tt =:= typeOf[Double] =>
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(0D)"
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case tt if tt =:= typeOf[Char] =>
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse('?')"
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case tt if tt =:= typeOf[Byte] =>
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(0)"
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case tt if tt =:= typeOf[Short] =>
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(0)"
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case tt if tt =:= typeOf[Boolean] =>
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(false)"
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case tt if tt =:= typeOf[Long] =>
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(0L)"
q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues).getOrElse(${fieldTreeInformation.zeroValue})"
case _ =>
tryOptionGetOrElse(q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues)", q"null")
tryOptionGetOrElse(q"$packageName.Converter[$caseClassFieldTypeName].toScala($columnValues)", fieldTreeInformation.zeroValue)
}
}
}
val tree =
q"""
lazy val $innerFuncTermName = () => $packageName.StringUtils.splitColumns($line, $format)
Option(${TermName(clazzName.decodedName.toString)}(..${fields(innerFuncTermName)}))
_root_.scala.Option(${clazzName.toTermName}(..${fields(innerFuncTermName)}))
"""
exprPrintTree[T](force = false, tree)
......
......@@ -19,10 +19,12 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.macros
package org.bitlap.csv.macros
import org.bitlap.common.AbstractMacroProcessor
import org.bitlap.csv.CsvFormat
import scala.reflect.macros.blackbox
import org.bitlap.csv.core.CsvFormat
/** @author
* 梦境迷离
......@@ -36,37 +38,40 @@ object DeriveToString {
import c.universe._
protected val packageName = q"_root_.org.bitlap.csv"
def macroImpl[T: c.WeakTypeTag](t: c.Expr[T])(csvFormat: c.Expr[CsvFormat]): c.Expr[String] = {
val (names, indexTypes) = super.checkCaseClassZip[T]
val fieldZipInformation = super.checkGetFieldZipInformation[T]
val names = fieldZipInformation.fieldNames
val indexTypes = fieldZipInformation.fieldIndexTypeMapping
val clazzName = c.weakTypeOf[T].typeSymbol.name
val innerVarTermName = TermName("_t")
val indexByName = (i: Int) => TermName(names(i))
val fieldsToString = indexTypes.map { idxType =>
if (idxType._2 <:< typeOf[Option[_]]) {
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
// scalafmt: { maxColumn = 400 }
q"""$packageName.Converter[${genericType.typeSymbol.name.toTypeName}].toCsvString {
if ($innerVarTermName.${indexByName(idxType._1)}.isEmpty) "" else $innerVarTermName.${indexByName(idxType._1)}.get
val fieldsToString = indexTypes.map { indexType =>
indexType._2 match {
case t if t <:< typeOf[List[_]] =>
val genericType = c.typecheck(q"${indexType._2}", c.TYPEmode).tpe.typeArgs.head
q"$packageName.Converter[_root_.scala.List[$genericType]].toCsvString($innerVarTermName.${indexByName(indexType._1)})"
case t if t <:< typeOf[Seq[_]] =>
val genericType = c.typecheck(q"${indexType._2}", c.TYPEmode).tpe.typeArgs.head
q"$packageName.Converter[_root_.scala.Seq[$genericType]].toCsvString($innerVarTermName.${indexByName(indexType._1)})"
case t if t <:< typeOf[Option[_]] =>
val genericType = c.typecheck(q"${indexType._2}", c.TYPEmode).tpe.typeArgs.head
// scalafmt: { maxColumn = 400 }
q"""$packageName.Converter[$genericType].toCsvString {
if ($innerVarTermName.${indexByName(indexType._1)}.isEmpty) "" else $innerVarTermName.${indexByName(indexType._1)}.get
}
"""
} else {
idxType._2 match {
case t if t <:< typeOf[List[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
q"$packageName.Converter[List[${TypeName(genericType.typeSymbol.name.decodedName.toString)}]].toCsvString($innerVarTermName.${indexByName(idxType._1)})"
case t if t <:< typeOf[Seq[_]] =>
val genericType = c.typecheck(q"${idxType._2}", c.TYPEmode).tpe.typeArgs.head
q"$packageName.Converter[Seq[${TypeName(genericType.typeSymbol.name.decodedName.toString)}]].toCsvString($innerVarTermName.${indexByName(idxType._1)})"
case _ =>
q"$packageName.Converter[${TypeName(idxType._2.typeSymbol.name.decodedName.toString)}].toCsvString($innerVarTermName.${indexByName(idxType._1)})"
}
case _ =>
q"$packageName.Converter[${indexType._2}].toCsvString($innerVarTermName.${indexByName(indexType._1)})"
}
}
val tree =
q"""
val $innerVarTermName = $t
val fields = ${TermName(clazzName.decodedName.toString)}.unapply($innerVarTermName).orNull
val values = if (null == fields) List.empty else $fieldsToString
val fields = ${clazzName.toTermName}.unapply($innerVarTermName).orNull
val values = if (null == fields) _root_.scala.List.empty else $fieldsToString
$packageName.StringUtils.combineColumns(values, $csvFormat)
"""
exprPrintTree[String](force = false, tree)
......
......@@ -19,13 +19,13 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv
package org.bitlap
/** @author
* 梦境迷离
* @version 1.0,6/2/22
*/
package object core {
package object csv {
implicit val defaultCsvFormat: CsvFormat = new DefaultCsvFormat {}
......
......@@ -19,11 +19,11 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.test
package org.bitlap.csv.test
import org.bitlap.csv.Converter
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.bitlap.csv.core.Converter
/** @author
* 梦境迷离
......
......@@ -19,16 +19,11 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.test
import org.bitlap.csv.core.StringUtils
import org.bitlap.csv.core.DefaultCsvFormat
package org.bitlap.csv.test
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.bitlap.csv.core.ScalableBuilder
import org.bitlap.csv.core.CsvableBuilder
import org.bitlap.csv.core.ScalableHelper
import org.bitlap.csv.{ CsvableBuilder, DefaultCsvFormat, ScalableBuilder, ScalableHelper, StringUtils }
import java.io.File
/** Complex use of common tests
......@@ -145,6 +140,7 @@ class CsvableAndScalableTest extends AnyFlatSpec with Matchers {
}
"CsvableAndScalable4" should "ok when reading from file" in {
import org.bitlap.csv.ScalableHelper
val metrics = ScalableHelper.readCsvFromClassPath[Metric2]("simple_data.csv") { line =>
ScalableBuilder[Metric2]
.setField[Seq[Dimension3]](
......
......@@ -19,12 +19,11 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.test
package org.bitlap.csv.test
import org.bitlap.csv.core.{ CsvableBuilder, ScalableBuilder }
import org.bitlap.csv.{ CsvableBuilder, DefaultCsvFormat, ScalableBuilder }
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.bitlap.csv.core.DefaultCsvFormat
/** @author
* 梦境迷离
......@@ -161,12 +160,9 @@ class CustomConverterBuilderTest extends AnyFlatSpec with Matchers {
val scala = ScalableBuilder[Dimension2].convert(csv)
println(scala)
assert(scala.get == e)
val scala2 = ScalableBuilder[Dimension2].setField(_.h, _ => throw new Exception).convert(csv)
assert(scala2.get == e)
val scala3 = ScalableBuilder[Dimension2].setField(_.value, _ => throw new Exception).convert(csv)
assert(scala3.get == Dimension2("1", None, 'c', 1L, 1, false, 0.1f, 0.0))
}
}
......@@ -19,10 +19,10 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.test
package org.bitlap.csv.test
import org.bitlap.csv.core.Converter
import org.bitlap.csv.core.macros.{ DeriveToCaseClass, DeriveToString }
import org.bitlap.csv.Converter
import org.bitlap.csv.macros.{ DeriveToCaseClass, DeriveToString }
/** @author
* 梦境迷离
......
......@@ -19,9 +19,9 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.test
package org.bitlap.csv.test
import org.bitlap.csv.core.{ CsvableBuilder, ScalableBuilder, TsvFormat }
import org.bitlap.csv.{ CsvableBuilder, ScalableBuilder, TsvFormat }
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
......
......@@ -19,15 +19,13 @@
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.bitlap.csv.core.test
package org.bitlap.csv.test
import org.bitlap.csv.core.FileUtils
import org.bitlap.csv.core.StringUtils
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import java.io.{ BufferedReader, InputStreamReader }
import org.bitlap.csv.core.defaultCsvFormat
import org.bitlap.csv.defaultCsvFormat
import org.bitlap.csv.{ FileUtils, StringUtils }
/** @author
* 梦境迷离
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册