diff --git a/.credentials.enc b/.credentials.enc deleted file mode 100644 index eede78e57d..0000000000 --- a/.credentials.enc +++ /dev/null @@ -1 +0,0 @@ -|y#DNNb"* 6?0'EWNX0$+O bR#oxqVcN@&KhF=#5@4ZYMtj:,Zϡx \ No newline at end of file diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000000..1dc41ba10f --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,30 @@ +name: CI +on: + pull_request: + branches: + - main + - lift-4.0 + - lift-5.0 + push: + branches: + - main + - lift-4.0 + - lift-5.0 +jobs: + test: + strategy: + fail-fast: false + matrix: + java: [11, 17, 21] + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup JDK + uses: actions/setup-java@v3 + with: + distribution: temurin + java-version: ${{ matrix.java }} + - uses: sbt/setup-sbt@v1 + - name: Build and Test + run: sbt test diff --git a/.gitignore b/.gitignore index c9825835f3..1cf79724f3 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,10 @@ node_modules # VSCode .vscode + +# metals +.bsp +.bloop +project/.bloop +project/metals.sbt +project/project/ diff --git a/.java-version b/.java-version index 6259340971..b4de394767 100644 --- a/.java-version +++ b/.java-version @@ -1 +1 @@ -1.8 +11 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 65d7d556cc..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,52 +0,0 @@ -language: scala -dist: bionic -sudo: false -scala: - - 2.11.12 - - 2.12.12 - - 2.13.2 -cache: - directories: - - '$HOME/node_modules' - - $HOME/.ivy2 -services: - - mongodb -jdk: - - openjdk8 -matrix: - include: - - scala: 2.12.12 - jdk: openjdk11 - env: DISABLE_PUBLISH=true - - scala: 2.12.12 - jdk: openjdk12 - env: DISABLE_PUBLISH=true - - scala: 2.13.2 - jdk: openjdk11 - env: DISABLE_PUBLISH=true - - scala: 2.13.2 - jdk: openjdk12 - env: DISABLE_PUBLISH=true -script: ./travis.sh -branches: - only: - - master - - /^release-branch-.*$/ -node_js: - - "4.1" -before_script: - - "cd web/webkit" - - "npm install" - - "cd -" -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/85e4d8ae84e569f9c501 - on_success: always - on_failure: always - on_start: never - email: - recipients: - - lift-committers@googlegroups.com - on_success: change - on_failure: always diff --git a/README.md b/README.md index 3540cff710..884f06c7f2 100644 --- a/README.md +++ b/README.md @@ -15,16 +15,25 @@ Because Lift applications are written in [Scala](http://www.scala-lang.org), an ## Getting Started -**Compatibility note:** -As of Lift 3.0, you'll need to be running Java 8 to use Lift. For those using Java 6 or Java 7, -you'll need to use Lift 2.6 until you can upgrade your Java installation. - You can create a new Lift project using your favorite build system by adding Lift as a dependency. Below we walk through setting up Lift in sbt and Maven. +### A note on Java Compatibility + +It's worth noting that the version of Java you're running may change what version of Lift you're +able to run. The following represents our Java compatibility for the recent major releases. + +| Lift Version | Java Version | +|--------------|--------------| +|2.6 |Java >= 6 | +|3.0+ |Java >= 8 | +|4.0+ |Java >= 11 | + +If you do not have a compatible Java version you will get errors when you try to use the Framework. + ### With sbt (new project) -If you're using a recent version of sbt (e.g. 0.13.16), you can create a new Lift application using +If you're using a recent version of sbt, you can create a new Lift application using our Giter8. To create a new, basic Lift application that has some example code, simply execute: ``` @@ -109,7 +118,7 @@ Add Lift to your `pom.xml` like so: 3.3.0 -Where `${scala.version}` is `2.11` or `2.12`. Individual patch releases of the Scala compiler +Where `${scala.version}` is `2.13` for the 4.x series. Individual patch releases of the Scala compiler (e.g. 2.12.2) are binary compatible with everything in their release series, so you only need the first two version parts. @@ -213,4 +222,3 @@ Lift is open source software released under the **Apache 2.0 license**. ## Continuous Integration -SNAPSHOTs are built by [Travis CI](https://travis-ci.org/lift/framework) diff --git a/build.sbt b/build.sbt index 31a23d690e..f96b113ab5 100644 --- a/build.sbt +++ b/build.sbt @@ -2,35 +2,37 @@ import Dependencies._ import LiftSbtHelpers._ ThisBuild / organization := "net.liftweb" -ThisBuild / version := "3.6.0-SNAPSHOT" +ThisBuild / version := "4.0.0-SNAPSHOT" ThisBuild / homepage := Some(url("https://www.liftweb.net")) ThisBuild / licenses += ("Apache License, Version 2.0", url("https://www.apache.org/licenses/LICENSE-2.0.txt")) ThisBuild / startYear := Some(2006) ThisBuild / organizationName := "WorldWide Conferencing, LLC" -val scala211Version = "2.11.12" -val scala212Version = "2.12.15" -val scala213Version = "2.13.8" +val scala213Version = "2.13.18" +val scala3LTSVersion = "3.3.7" -val crossUpTo212 = Seq(scala212Version, scala211Version) -val crossUpTo213 = scala213Version +: crossUpTo212 +ThisBuild / scalaVersion := scala213Version +ThisBuild / crossScalaVersions := Seq(scala213Version, scala3LTSVersion) -ThisBuild / scalaVersion := scala212Version -ThisBuild / crossScalaVersions := crossUpTo212 // default everyone to 2.12 for now - -ThisBuild / libraryDependencies ++= Seq(specs2, specs2Matchers, specs2Mock, scalacheck, scalactic, scalatest) +ThisBuild / libraryDependencies ++= Seq( + specs2(scalaVersion.value), + specs2Matchers(scalaVersion.value), + scalacheck(scalaVersion.value), + scalactic, + scalatest +) ThisBuild / scalacOptions ++= Seq("-deprecation") // Settings for Sonatype compliance ThisBuild / pomIncludeRepository := { _ => false } ThisBuild / publishTo := { - if (isSnapshot.value) { - Some(Opts.resolver.sonatypeSnapshots) - } else { - Some(Opts.resolver.sonatypeStaging) - } + val centralSnapshots = "https://central.sonatype.com/repository/maven-snapshots/" + if (isSnapshot.value) Some("central-snapshots" at centralSnapshots) + else localStaging.value } +ThisBuild / publishMavenStyle := true + ThisBuild / scmInfo := Some(ScmInfo(url("https://github.com/lift/framework"), "scm:git:https://github.com/lift/framework.git")) ThisBuild / pomExtra := Developers.toXml @@ -45,7 +47,7 @@ ThisBuild / resolvers ++= Seq( "releases" at "https://oss.sonatype.org/content/repositories/releases" ) -lazy val liftProjects = core ++ web ++ persistence +lazy val liftProjects = core ++ web lazy val framework = liftProject("lift-framework", file(".")) @@ -55,15 +57,14 @@ lazy val framework = // Core Projects // ------------- lazy val core: Seq[ProjectReference] = - Seq(common, actor, markdown, json, json_scalaz7, json_ext, util) + Seq(common, actor, markdown, util) lazy val common = coreProject("common") .settings( - description := "Common Libraties and Utilities", - libraryDependencies ++= Seq(slf4j_api, logback, slf4j_log4j12, scala_xml, scala_parser) + description := "Common Libraries and Utilities", + libraryDependencies ++= Seq(slf4j_api, logback, slf4j_log4j12, scala_xml, scala_parser, scalamock) ) - .settings(crossScalaVersions := crossUpTo213) lazy val actor = coreProject("actor") @@ -72,53 +73,23 @@ lazy val actor = description := "Simple Actor", Test / parallelExecution := false ) - .settings(crossScalaVersions := crossUpTo213) lazy val markdown = coreProject("markdown") .settings( description := "Markdown Parser", Test / parallelExecution := false, - libraryDependencies ++= Seq(scalatest, scalatest_junit, scala_xml, scala_parser) + libraryDependencies ++= Seq(scalatest, scala_xml, scala_parser) ) - .settings(crossScalaVersions := crossUpTo213) - -lazy val json = - coreProject("json") - .settings( - description := "JSON Library", - Test / parallelExecution := false, - libraryDependencies ++= Seq(scalap(scalaVersion.value), paranamer, scala_xml) - ) - .settings(crossScalaVersions := crossUpTo213) lazy val documentationHelpers = coreProject("documentation-helpers") .settings(description := "Documentation Helpers") .dependsOn(util) - .settings(crossScalaVersions := crossUpTo213) - -lazy val json_scalaz7 = - coreProject("json-scalaz7") - .dependsOn(json) - .settings( - description := "JSON Library based on Scalaz 7", - libraryDependencies ++= Seq(scalaz7) - ) - .settings(crossScalaVersions := crossUpTo213) - -lazy val json_ext = - coreProject("json-ext") - .dependsOn(common, json) - .settings( - description := "Extentions to JSON Library", - libraryDependencies ++= Seq(commons_codec, joda_time, joda_convert) - ) - .settings(crossScalaVersions := crossUpTo213) lazy val util = coreProject("util") - .dependsOn(actor, json, markdown) + .dependsOn(actor, markdown) .settings( description := "Utilities Library", Test / parallelExecution := false, @@ -127,14 +98,12 @@ lazy val util = joda_time, joda_convert, commons_codec, - javamail, log4j, htmlparser, xerces, - jbcrypt + json4s_native, ) ) - .settings(crossScalaVersions := crossUpTo213) // Web Projects // ------------ @@ -146,9 +115,8 @@ lazy val testkit = .dependsOn(util) .settings( description := "Testkit for Webkit Library", - libraryDependencies ++= Seq(commons_httpclient, servlet_api) + libraryDependencies ++= Seq(commons_httpclient, servlet_api, json4s_native, json4s_xml) ) - .settings(crossScalaVersions := crossUpTo213) lazy val webkit = webProject("webkit") @@ -160,14 +128,16 @@ lazy val webkit = commons_fileupload, rhino, servlet_api, - specs2Prov, - specs2MatchersProv, - jetty6, + specs2Prov(scalaVersion.value), + specs2MatchersProv(scalaVersion.value), + jetty11, + jettywebapp, jwebunit, - mockito_scalatest, + mockito_scalatest(scalaVersion.value), jquery, jasmineCore, - jasmineAjax + jasmineAjax, + specs2Mock(scalaVersion.value) ), libraryDependencies ++= { CrossVersion.partialVersion(scalaVersion.value) match { @@ -210,70 +180,3 @@ lazy val webkit = ) .enablePlugins(SbtWeb) - .settings(crossScalaVersions := crossUpTo213) - -// Persistence Projects -// -------------------- -lazy val persistence: Seq[ProjectReference] = - Seq(db, proto, mapper, record, squeryl_record, mongodb, mongodb_record) - -lazy val db = - persistenceProject("db") - .dependsOn(util, webkit) - .settings(libraryDependencies += mockito_scalatest) - .settings(crossScalaVersions := crossUpTo213) - -lazy val proto = - persistenceProject("proto") - .dependsOn(webkit) - .settings(crossScalaVersions := crossUpTo213) - -lazy val mapper = - persistenceProject("mapper") - .dependsOn(db, proto) - .settings( - description := "Mapper Library", - Test / parallelExecution := false, - libraryDependencies ++= Seq(h2, derby, jbcrypt), - Test / initialize := { - System.setProperty( - "derby.stream.error.file", - ((Test / crossTarget).value / "derby.log").absolutePath - ) - } - ) - .settings(crossScalaVersions := crossUpTo213) - -lazy val record = - persistenceProject("record") - .dependsOn(proto) - .settings(libraryDependencies ++= Seq(jbcrypt)) - .settings(crossScalaVersions := crossUpTo213) - -lazy val squeryl_record = - persistenceProject("squeryl-record") - .dependsOn(record, db) - .settings(libraryDependencies ++= Seq(h2, squeryl)) - -lazy val mongodb = - persistenceProject("mongodb") - .dependsOn(json_ext, util) - .settings( - crossScalaVersions := crossUpTo213, - Test / parallelExecution := false, - libraryDependencies ++= Seq(mongo_java_driver, mongo_java_driver_async), - Test / initialize := { - System.setProperty( - "java.util.logging.config.file", - ((Test / resourceDirectory).value / "logging.properties").absolutePath - ) - } - ) - -lazy val mongodb_record = - persistenceProject("mongodb-record") - .dependsOn(record, mongodb) - .settings( - crossScalaVersions := crossUpTo213, - Test / parallelExecution := false - ) diff --git a/core/actor/src/main/java/net/liftweb/actor/JavaActorBase.java b/core/actor/src/main/java/net/liftweb/actor/JavaActorBase.java deleted file mode 100644 index 56836b0d21..0000000000 --- a/core/actor/src/main/java/net/liftweb/actor/JavaActorBase.java +++ /dev/null @@ -1,18 +0,0 @@ -package net.liftweb.actor; - -import java.lang.annotation.*; - -/** - * The internal Java class where the annotations for - * JavaActor are stored - */ -public class JavaActorBase { - /** - * A method annotated with Receive will - * receive a message of the type of its parameter. - */ - @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.METHOD) - public @interface Receive { - } -} diff --git a/core/actor/src/main/scala/net/liftweb/actor/LAFuture.scala b/core/actor/src/main/scala/net/liftweb/actor/LAFuture.scala index 44221bcaac..851d6038cd 100644 --- a/core/actor/src/main/scala/net/liftweb/actor/LAFuture.scala +++ b/core/actor/src/main/scala/net/liftweb/actor/LAFuture.scala @@ -76,15 +76,23 @@ class LAFuture[T](val scheduler: LAScheduler = LAScheduler, context: Box[LAFutur * Get the future value */ @scala.annotation.tailrec - final def get: T = synchronized { - if (satisfied) item - else if (aborted) throw new AbortedFutureException(failure) - else { - this.wait() - if (satisfied) item - else if (aborted) throw new AbortedFutureException(failure) - else get + final def get: T = { + synchronized { + if (satisfied) { + return item + } else if (aborted) { + throw new AbortedFutureException(failure) + } else { + this.wait() + if (satisfied) { + return item + } else if (aborted) { + throw new AbortedFutureException(failure) + } + } } + + get } /** @@ -92,7 +100,7 @@ class LAFuture[T](val scheduler: LAScheduler = LAScheduler, context: Box[LAFutur * value has not been satisfied, execute the function * when the value is satified */ - def foreach(f: T => Unit) { + def foreach(f: T => Unit):Unit = { onSuccess(f) } @@ -188,7 +196,7 @@ class LAFuture[T](val scheduler: LAScheduler = LAScheduler, context: Box[LAFutur /** * Abort the future. It can never be satified */ - def abort() { + def abort(): Unit = { fail(Empty) } @@ -197,7 +205,7 @@ class LAFuture[T](val scheduler: LAScheduler = LAScheduler, context: Box[LAFutur * * @param f the function to execute on success. */ - def onSuccess(f: T => Unit) { + def onSuccess(f: T => Unit): Unit = { val contextFn = LAFuture.inContext(f, context) synchronized { if (satisfied) {LAFuture.executeWithObservers(scheduler, () => contextFn(item))} else @@ -212,7 +220,7 @@ class LAFuture[T](val scheduler: LAScheduler = LAScheduler, context: Box[LAFutur * * @param f the function to execute. Will receive a Box[Nothing] which may be a Failure if there's exception data */ - def onFail(f: Box[Nothing] => Unit) { + def onFail(f: Box[Nothing] => Unit): Unit = { val contextFn = LAFuture.inContext(f, context) synchronized { if (aborted) LAFuture.executeWithObservers(scheduler, () => contextFn(failure)) else @@ -227,7 +235,7 @@ class LAFuture[T](val scheduler: LAScheduler = LAScheduler, context: Box[LAFutur * * @param f the function to execute on completion of the Future */ - def onComplete(f: Box[T] => Unit) { + def onComplete(f: Box[T] => Unit): Unit = { val contextFn = LAFuture.inContext(f, context) synchronized { if (satisfied) {LAFuture.executeWithObservers(scheduler, () => contextFn(Full(item)))} else @@ -240,7 +248,7 @@ class LAFuture[T](val scheduler: LAScheduler = LAScheduler, context: Box[LAFutur * If the execution fails, do this * @param e */ - def fail(e: Exception) { + def fail(e: Exception): Unit = { fail(Failure(e.getMessage, Full(e), Empty)) } @@ -248,7 +256,7 @@ class LAFuture[T](val scheduler: LAScheduler = LAScheduler, context: Box[LAFutur * If the execution fails as a Box[Nothing], do this * @param e */ - def fail(e: Box[Nothing]) { + def fail(e: Box[Nothing]): Unit = { synchronized { if (!satisfied && !aborted) { aborted = true @@ -311,14 +319,14 @@ object LAFuture { * * @param future */ - private def notifyObservers(future: LAFuture[_]) { + private def notifyObservers(future: LAFuture[_]): Unit = { val observers = threadInfo.get() if (null eq observers) {} else { observers.foreach(_(future)) } } - private def executeWithObservers(scheduler: LAScheduler, f: () => Unit) { + private def executeWithObservers(scheduler: LAScheduler, f: () => Unit): Unit = { val cur = threadInfo.get() scheduler.execute(() => { val old = threadInfo.get() diff --git a/core/actor/src/main/scala/net/liftweb/actor/LAPinger.scala b/core/actor/src/main/scala/net/liftweb/actor/LAPinger.scala index a444ff242a..2c18793b8a 100644 --- a/core/actor/src/main/scala/net/liftweb/actor/LAPinger.scala +++ b/core/actor/src/main/scala/net/liftweb/actor/LAPinger.scala @@ -40,14 +40,16 @@ object ThreadPoolRules { object LAPinger { /**The underlying java.util.concurrent.ScheduledExecutor */ - private var service = Executors.newSingleThreadScheduledExecutor(TF) + @volatile var buildService: () => ScheduledExecutorService = () => Executors.newSingleThreadScheduledExecutor(TF) + + private var service: ScheduledExecutorService = buildService() /** * Re-create the underlying SingleThreadScheduledExecutor */ def restart: Unit = synchronized { if ((service eq null) || service.isShutdown) - service = Executors.newSingleThreadScheduledExecutor(TF) + service = buildService() } /** @@ -75,7 +77,7 @@ object LAPinger { try { service.schedule(r, delay, TimeUnit.MILLISECONDS) } catch { - case e: RejectedExecutionException => throw PingerException(msg + " could not be scheduled on " + to, e) + case e: RejectedExecutionException => throw PingerException(msg.toString + " could not be scheduled on " + to, e) } } diff --git a/core/actor/src/main/scala/net/liftweb/actor/LiftActor.scala b/core/actor/src/main/scala/net/liftweb/actor/LiftActor.scala index b39d5ad2aa..cad0f8c0be 100644 --- a/core/actor/src/main/scala/net/liftweb/actor/LiftActor.scala +++ b/core/actor/src/main/scala/net/liftweb/actor/LiftActor.scala @@ -60,18 +60,18 @@ object LAScheduler extends LAScheduler with Loggable { import java.util.concurrent._ private val es = // Executors.newFixedThreadPool(threadPoolSize) - new ThreadPoolExecutor(threadPoolSize, + new ThreadPoolExecutor(threadPoolSize, maxThreadPoolSize, 60, TimeUnit.SECONDS, blockingQueueSize match { - case Full(x) => + case Full(x) => new ArrayBlockingQueue(x) case _ => new LinkedBlockingQueue }) def execute(f: () => Unit): Unit = - es.execute(new Runnable{def run() { + es.execute(new Runnable{def run(): Unit = { try { f() } catch { @@ -93,7 +93,7 @@ object LAScheduler extends LAScheduler with Loggable { * * @param f the function to execute on another thread */ - def execute(f: () => Unit) { + def execute(f: () => Unit) : Unit = { synchronized { if (exec eq null) { exec = createExecutor() @@ -102,12 +102,12 @@ object LAScheduler extends LAScheduler with Loggable { } } - def shutdown() { + def shutdown(): Unit = { synchronized { if (exec ne null) { exec.shutdown() } - + exec = null } } @@ -129,7 +129,7 @@ trait SpecializedLiftActor[T] extends SimpleActor[T] { if (f(this)) Full(this) else next.find(f) */ - def remove() { + def remove(): Unit = { prev.next = next next.prev = prev } @@ -163,7 +163,7 @@ trait SpecializedLiftActor[T] extends SimpleActor[T] { case x if f(x) => Full(x) case x => findMailboxItem(x.next, f) } - + /** * Send a message to the Actor. This call will always succeed * and return almost immediately. The message will be processed @@ -221,7 +221,7 @@ trait SpecializedLiftActor[T] extends SimpleActor[T] { toDo() } - private def processMailbox(ignoreProcessing: Boolean) { + private def processMailbox(ignoreProcessing: Boolean): Unit = { around { proc2(ignoreProcessing) } @@ -240,7 +240,7 @@ trait SpecializedLiftActor[T] extends SimpleActor[T] { case Nil => f case xs => CommonLoanWrapper(xs)(f) } - private def proc2(ignoreProcessing: Boolean) { + private def proc2(ignoreProcessing: Boolean): Unit = { var clearProcessing = true baseMailbox.synchronized { if (!ignoreProcessing && processing) return @@ -404,7 +404,7 @@ with ForwardableActor[Any, Any] { - protected final def forwardMessageTo(msg: Any, forwardTo: TypedActor[Any, Any]) { + protected final def forwardMessageTo(msg: Any, forwardTo: TypedActor[Any, Any]): Unit = { if (null ne responseFuture) { forwardTo match { case la: LiftActor => la ! MsgWithResp(msg, responseFuture) @@ -437,7 +437,7 @@ with ForwardableActor[Any, Any] { * This method is the Java callable version of !?. */ def sendAndGetReply(msg: Any): Any = this !? msg - + /** * Send a message to the Actor and wait for * the actor to process the message and reply. @@ -507,7 +507,7 @@ with ForwardableActor[Any, Any] { * The Actor should call this method with a reply * to the message */ - protected def reply(v: Any) { + protected def reply(v: Any): Unit = { if (null ne responseFuture) { responseFuture.satisfy(v) } @@ -524,95 +524,3 @@ with ForwardableActor[Any, Any] { * have been received by the actor. **/ class MockLiftActor extends MockSpecializedLiftActor[Any] with LiftActor - -import java.lang.reflect._ - -object LiftActorJ { - private var methods: Map[Class[_], DispatchVendor] = Map() - - def calculateHandler(what: LiftActorJ): PartialFunction[Any, Unit] = - synchronized { - val clz = what.getClass - methods.get(clz) match { - case Some(pf) => pf.vend(what) - case _ => { - val pf = buildPF(clz) - methods += clz -> pf - pf.vend(what) - } - } - } - - private def getBaseClasses(clz: Class[_]): List[Class[_]] = clz match { - case null => Nil - case clz => clz :: getBaseClasses(clz.getSuperclass) - } - - private def receiver(in: Method): Boolean = { - in.getParameterTypes().length == 1 && - (in.getAnnotation(classOf[JavaActorBase.Receive]) != null) - } - - private def buildPF(clz: Class[_]): DispatchVendor = { - val methods = getBaseClasses(clz). - flatMap(_.getDeclaredMethods.toList.filter(receiver)) - - val clzMap: Map[Class[_], Method] = - Map(methods.map{m => - m.setAccessible(true) // access private and protected methods - m.getParameterTypes().apply(0) -> m} :_*) - - new DispatchVendor(clzMap) - } -} - -private final class DispatchVendor(map: Map[Class[_], Method]) { - private val baseMap: Map[Class[_], Option[Method]] = - Map(map.map{case (k,v) => (k, Some(v))}.toList :_*) - - def vend(actor: LiftActorJ): PartialFunction[Any, Unit] = - new PartialFunction[Any, Unit] { - var theMap: Map[Class[_], Option[Method]] = baseMap - - def findClass(clz: Class[_]): Option[Method] = - theMap.find(_._1.isAssignableFrom(clz)).flatMap(_._2) - - def isDefinedAt(v: Any): Boolean = { - val clz = v.asInstanceOf[Object].getClass - theMap.get(clz) match { - case Some(Some(_)) => true - case None => { - val answer = findClass(clz) - theMap += clz -> answer - answer.isDefined - } - case _ => false - } - } - - def apply(v: Any): Unit = { - val o: Object = v.asInstanceOf[Object] - val meth = theMap(o.getClass).get - meth.invoke(actor, o) match { - case null => - case x => actor.internalReply(x) - } - } - } -} - -/** - * Java versions of Actors should subclass this method. - * Methods decorated with the @Receive annotation - * will receive messages of that type. - */ -class LiftActorJ extends JavaActorBase with LiftActor { - protected lazy val _messageHandler: PartialFunction[Any, Unit] = - calculateJavaMessageHandler - - protected def calculateJavaMessageHandler = LiftActorJ.calculateHandler(this) - - protected def messageHandler = _messageHandler - - private[actor] def internalReply(v: Any) = reply(v) -} diff --git a/core/actor/src/test/java/net/liftweb/actor/MyJavaActor.java b/core/actor/src/test/java/net/liftweb/actor/MyJavaActor.java deleted file mode 100644 index 15671417af..0000000000 --- a/core/actor/src/test/java/net/liftweb/actor/MyJavaActor.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.actor; - - -/** - * Java implementation of LiftActor for test. - */ -public class MyJavaActor extends LiftActorJ { - private int myValue = 0; - - @Receive protected void set(Set what) { - myValue = what.num(); - } - - @Receive public void get(Get get) { - reply(new Answer(myValue)); - } - - @Receive protected Answer add(Add toAdd) { - myValue += toAdd.num(); - return new Answer(myValue); - } - - @Receive public Answer sub(Sub toSub) { - myValue -= toSub.num(); - return new Answer(myValue); - } -} diff --git a/core/actor/src/test/scala/net/liftweb/actor/ActorSpec.scala b/core/actor/src/test/scala/net/liftweb/actor/ActorSpec.scala index 9eb7fe6fa2..a5a0bc4ac5 100644 --- a/core/actor/src/test/scala/net/liftweb/actor/ActorSpec.scala +++ b/core/actor/src/test/scala/net/liftweb/actor/ActorSpec.scala @@ -33,10 +33,6 @@ class ActorSpec extends Specification { "support common features" in commonFeatures(new MyScalaActor) } - "A Java Actor" should { - "support common features" in commonFeatures(new MyJavaActor) - } - private def commonFeatures(actor: LiftActor) = { sequential diff --git a/core/common/src/main/scala/net/liftweb/common/Box.scala b/core/common/src/main/scala/net/liftweb/common/Box.scala index 971ba43949..ea6d073a75 100644 --- a/core/common/src/main/scala/net/liftweb/common/Box.scala +++ b/core/common/src/main/scala/net/liftweb/common/Box.scala @@ -275,7 +275,7 @@ sealed trait BoxTrait extends OptionImplicits { * res1: net.liftweb.common.Box[Int] = Full(5) * }}} */ - def asA[B](in: T forSome { type T })(implicit m: Manifest[B]): Box[B] = { + def asA[B](in: Any)(implicit m: Manifest[B]): Box[B] = { (Box !! in).asA[B] } } @@ -1117,12 +1117,12 @@ object BoxOrRaw { implicit def rawToBoxOrRaw[T, Q <: T](r: Q): BoxOrRaw[T] = RawBoxOrRaw(r: T) - implicit def boxToBoxOrRaw[T, Q <% T](r: Box[Q]): BoxOrRaw[T] = { - BoxedBoxOrRaw(r.map(v => v: T)) + implicit def boxToBoxOrRaw[T, Q](r: Box[Q])(implicit ev: Q => T): BoxOrRaw[T] = { + BoxedBoxOrRaw(r.map(v => ev(v))) } - implicit def optionToBoxOrRaw[T, Q <% T](r: Option[Q]): BoxOrRaw[T] = { - BoxedBoxOrRaw(r.map(v => v: T)) + implicit def optionToBoxOrRaw[T, Q](r: Option[Q])(implicit ev: Q => T): BoxOrRaw[T] = { + BoxedBoxOrRaw(r.map(v => ev(v))) } implicit def borToBox[T](in: BoxOrRaw[T]): Box[T] = in.box diff --git a/core/common/src/main/scala/net/liftweb/common/BoxLogging.scala b/core/common/src/main/scala/net/liftweb/common/BoxLogging.scala index 6aae0a13c7..da8e7aa3ac 100644 --- a/core/common/src/main/scala/net/liftweb/common/BoxLogging.scala +++ b/core/common/src/main/scala/net/liftweb/common/BoxLogging.scala @@ -143,7 +143,7 @@ trait BoxLogging { } } - private def logFailure(message: String, logFn: (String, Option[Throwable])=>Unit): Unit = { + def logFailure(message: String, logFn: (String, Option[Throwable])=>Unit): Unit = { doLog(message, logFn, ()=>logFn(s"$message: Box was Empty.", None)) } diff --git a/core/common/src/main/scala/net/liftweb/common/Conversions.scala b/core/common/src/main/scala/net/liftweb/common/Conversions.scala index 56240d7e0c..0a03a4bd0c 100644 --- a/core/common/src/main/scala/net/liftweb/common/Conversions.scala +++ b/core/common/src/main/scala/net/liftweb/common/Conversions.scala @@ -48,7 +48,7 @@ sealed trait StringOrNodeSeq { * their needs dictate. */ object StringOrNodeSeq { - implicit def strTo[T <% String](str: T): StringOrNodeSeq = + implicit def strTo(str: String): StringOrNodeSeq = new StringOrNodeSeq { def nodeSeq: NodeSeq = Text(str) } diff --git a/core/common/src/main/scala/net/liftweb/common/HList.scala b/core/common/src/main/scala/net/liftweb/common/HList.scala index 90a094264f..f447fcde09 100644 --- a/core/common/src/main/scala/net/liftweb/common/HList.scala +++ b/core/common/src/main/scala/net/liftweb/common/HList.scala @@ -109,7 +109,7 @@ object HLists { * }}} */ final case class :+:[+H, +T <: HList](head: H, tail: T) extends HList { - override def toString = head + " :+: " + tail + override def toString = s"$head :+: $tail" } /** diff --git a/core/common/src/main/scala/net/liftweb/common/LRU.scala b/core/common/src/main/scala/net/liftweb/common/LRU.scala index b619c5d369..f70b21ef6c 100644 --- a/core/common/src/main/scala/net/liftweb/common/LRU.scala +++ b/core/common/src/main/scala/net/liftweb/common/LRU.scala @@ -24,19 +24,19 @@ private[common] trait LinkedListElem[T1, T2] { private[common] var value2: T2 = _ - private[common] def remove { + private[common] def remove: Unit = { _prev._next = _next _next._prev = _prev } - private[common] def addAtHead(what: LinkedListElem[T1, T2]) { + private[common] def addAtHead(what: LinkedListElem[T1, T2]): Unit = { what._next = _next what._prev = this _next._prev = what this._next = what } - private[common] def addAtTail(what: LinkedListElem[T1, T2]) { + private[common] def addAtTail(what: LinkedListElem[T1, T2]): Unit = { what._prev = _prev what._next = this _prev._next = what @@ -73,7 +73,7 @@ class LRUMap[K, V](initMaxSize: Int, loadFactor: Box[Float], expiredFunc: ((K, V * Updates the `LRUMap`'s current max size to `newMaxSize`, evicting the * oldest entries if the size has shrunk. */ - def updateMaxSize(newMaxSize: Int) { + def updateMaxSize(newMaxSize: Int): Unit = { val oldMaxSize = _maxSize _maxSize = newMaxSize @@ -128,12 +128,12 @@ class LRUMap[K, V](initMaxSize: Int, loadFactor: Box[Float], expiredFunc: ((K, V /** * Alias for `[[-]]`. */ - def remove(key: K) { + def remove(key: K): Unit = { localMap.get(key) match { case null => - case v => - v.remove - localMap.remove(key) + case v => + v.remove + localMap.remove(key) } } @@ -144,13 +144,13 @@ class LRUMap[K, V](initMaxSize: Int, loadFactor: Box[Float], expiredFunc: ((K, V * new in the map and the map has grown beyond the specifiex `[[maxSize]]`, * evicts the least-recently-used entries. */ - def update(key: K, value: V) { + def update(key: K, value: V): Unit = { localMap.get(key) match { case null => val what = new LinkedListElem[K, V] {def value1 = key} - what.value2 = value - addAtHead(what) - localMap.put(key, what) + what.value2 = value + addAtHead(what) + localMap.put(key, what) doRemoveIfTooMany() @@ -173,7 +173,7 @@ class LRUMap[K, V](initMaxSize: Int, loadFactor: Box[Float], expiredFunc: ((K, V * A mechanism for expiring elements from cache. This method can devolve into * O(n ^ 2) if lots of elements can't be expired. */ - private def doRemoveIfTooMany() { + private def doRemoveIfTooMany(): Unit = { while (localMap.size > maxSize) { var toRemove = _prev while (!canExpire(toRemove.value1, toRemove.value2)) { @@ -192,7 +192,7 @@ class LRUMap[K, V](initMaxSize: Int, loadFactor: Box[Float], expiredFunc: ((K, V * * Does nothing by default, override for custom functionality. */ - protected def expired(key: K, value: V) { + protected def expired(key: K, value: V): Unit = { } @@ -202,7 +202,7 @@ class LRUMap[K, V](initMaxSize: Int, loadFactor: Box[Float], expiredFunc: ((K, V val set = localMap.entrySet.iterator new Iterator[(K, V)] { def hasNext = set.hasNext - def next: (K, V) = { + def next() : (K, V) = { val k = set.next (k.getKey, k.getValue.value2) } diff --git a/core/common/src/main/scala/net/liftweb/common/Logging.scala b/core/common/src/main/scala/net/liftweb/common/Logging.scala index 5f60b7f405..7069ffbb30 100644 --- a/core/common/src/main/scala/net/liftweb/common/Logging.scala +++ b/core/common/src/main/scala/net/liftweb/common/Logging.scala @@ -16,7 +16,7 @@ package net.liftweb package common - + import org.slf4j.{MDC => SLF4JMDC, Marker, Logger => SLF4JLogger, LoggerFactory} /** @@ -56,38 +56,38 @@ object Logger { setup.foreach { _() } true } - + /** * This function, if set, will be called before any loggers are created. - * + * * Useful for initializing the logging backend with a non-default configuration. - * + * * Helpers exists for [[Log4j log4j]] and [[Logback logback]]: - * + * * {{{ * Logger.setup = Full(Log4j.withFile(url) * }}} - * + * * or * * {{{ * Logger.setup = Full(Logback.withFile(url)) * }}} - * + * */ var setup: Box[() => Unit] = Empty - + def loggerNameFor(cls: Class[_]) = { val className = cls.getName - if (className endsWith "$") + if (className endsWith "$") className.substring(0, className.length - 1) - else + else className } def apply(cls: Class[_]): Logger = if (ranSetup) new WrappedLogger(LoggerFactory.getLogger(loggerNameFor(cls))) else null def apply(name: String): Logger = if (ranSetup) new WrappedLogger(LoggerFactory.getLogger(name)) else null - + /** * Set the [[http://www.slf4j.org/manual.html#mdc Mapped Diagnostic Context]] * for the thread and execute the block `f`. @@ -101,9 +101,9 @@ object Logger { try { f } finally { - if (old eq null) - MDC.clear - else + if (old eq null) + MDC.clear() + else SLF4JMDC.setContextMap(old) } } @@ -140,12 +140,12 @@ object MDC { * `Logger` is a thin wrapper on top of an SLF4J Logger. * * The main purpose is to utilize Scala features for logging. - * + * * Note that the dynamic type of "this" is used when this trait is mixed in. - * + * * This may not always be what you want. If you need the static type, you have * to declare your own `Logger`: - * + * * {{{ * class MyClass { * val logger = Logger(classOf[MyClass]) @@ -156,7 +156,7 @@ trait Logger { private lazy val logger: SLF4JLogger = _logger protected def _logger = if (Logger.ranSetup) LoggerFactory.getLogger(Logger.loggerNameFor(this.getClass)) else null - + def assertLog(assertion: Boolean, msg: => String) = if (assertion) info(msg) /** @@ -166,7 +166,7 @@ trait Logger { logger.trace(msg+": "+v.toString) v } - + /** * Trace a `[[Failure]]`. If the log level is trace and the `[[Box]]` is a * `Failure`, trace the message concatenated with the `Failure`'s message. If @@ -205,14 +205,14 @@ trait Logger { } } } - - + + def debug(msg: => AnyRef) = if (logger.isDebugEnabled) logger.debug(String.valueOf(msg)) def debug(msg: => AnyRef, t: Throwable) = if (logger.isDebugEnabled) logger.debug(String.valueOf(msg), t) def debug(msg: => AnyRef, marker: Marker) = if (logger.isDebugEnabled) logger.debug(marker, String.valueOf(msg)) def debug(msg: => AnyRef, t: Throwable, marker: Marker) = if (logger.isDebugEnabled) logger.debug(marker, String.valueOf(msg), t) def isDebugEnabled = logger.isDebugEnabled - + /** * Info a `Failure`. If the log level is info and the `Box` is a `Failure`, * info the message concatenated with the `Failure`'s message. If the @@ -233,7 +233,7 @@ trait Logger { def info(msg: => AnyRef, marker: Marker) = if (logger.isInfoEnabled) logger.info(marker,String.valueOf(msg)) def info(msg: => AnyRef, t: Throwable, marker: Marker) = if (logger.isInfoEnabled) logger.info(marker,String.valueOf(msg), t) def isInfoEnabled = logger.isInfoEnabled - + /** * Warn a `Failure`. If the log level is warn and the `Box` is a `Failure`, * warn the message concatenated with the `Failure`'s message. If the @@ -254,7 +254,7 @@ trait Logger { def warn(msg: => AnyRef, marker: Marker) = if (logger.isWarnEnabled) logger.warn(marker,String.valueOf(msg)) def warn(msg: => AnyRef, t: Throwable, marker: Marker) = if (logger.isWarnEnabled) logger.warn(marker,String.valueOf(msg), t) def isWarnEnabled = logger.isWarnEnabled - + /** * Error a `Failure`. If the log level is error and the `Box` is a `Failure`, * error the message concatenated with the `Failure`'s message. If the @@ -276,7 +276,7 @@ trait Logger { def error(msg: => AnyRef, marker: Marker) = if (logger.isErrorEnabled) logger.error(marker,String.valueOf(msg)) def error(msg: => AnyRef, t: Throwable, marker: Marker) = if (logger.isErrorEnabled) logger.error(marker,String.valueOf(msg), t) def isErrorEnabled = logger.isErrorEnabled - + } /** @@ -297,7 +297,7 @@ trait Loggable { /** * If you mix this into your class, you will get a protected `logger` instance * `lazy val` that will be a `[[Logger]]` instance. - * + * * Useful for mixing into objects that are created before Lift has booted (and * thus Logging is not yet configured). */ @@ -311,7 +311,7 @@ trait LazyLoggable { object Log4j { import org.apache.log4j.{LogManager,PropertyConfigurator} import org.apache.log4j.xml.DOMConfigurator - + /** * Default configuration for log4j backend. Appends to the console with a * simple layout at `INFO` level. @@ -329,7 +329,7 @@ object Log4j { """ - + /** * Configure with the contents of the file at the specified `url` (either * `.xml` or `.properties`). @@ -338,23 +338,23 @@ object Log4j { if (url.getPath.endsWith(".xml")) { val domConf = new DOMConfigurator domConf.doConfigure(url, LogManager.getLoggerRepository()) - } else + } else PropertyConfigurator.configure(url) } /** * Configure with the specified configuration. `config` must contain a valid * XML document. */ - def withConfig(config: String)() = { + def withConfig(config: String)(): Unit = { val domConf = new DOMConfigurator val is = new java.io.ByteArrayInputStream(config.getBytes("UTF-8")) domConf.doConfigure(is, LogManager.getLoggerRepository()) } - + /** * Configure with simple defaults. See [[defaultProps]]. */ - def withDefault() = withConfig(defaultProps) + def withDefault(): Unit = withConfig(defaultProps)() } /** @@ -368,12 +368,12 @@ object Logback { /** * Configure with the contents of the XML file at the specified `url`. */ - def withFile(url: java.net.URL)() = { + def withFile(url: java.net.URL)(): Unit = { val lc = LoggerFactory.getILoggerFactory().asInstanceOf[LoggerContext]; val configurator = new JoranConfigurator(); configurator.setContext(lc); // the context was probably already configured by default configuration rules - lc.reset(); + lc.reset(); configurator.doConfigure(url); } } diff --git a/core/common/src/test/scala/net/liftweb/common/BoxLoggingSpec.scala b/core/common/src/test/scala/net/liftweb/common/BoxLoggingSpec.scala index c5d099c2d5..285b812b7d 100644 --- a/core/common/src/test/scala/net/liftweb/common/BoxLoggingSpec.scala +++ b/core/common/src/test/scala/net/liftweb/common/BoxLoggingSpec.scala @@ -3,10 +3,10 @@ package common import org.slf4j.{Logger=>SLF4JLogger} -import org.specs2.mock.Mockito +import org.scalamock.specs2.MockContext import org.specs2.mutable.Specification -class BoxLoggingSpec extends Specification with Mockito { +class BoxLoggingSpec extends Specification { class MockBoxLoggingClass extends BoxLogging { var loggedErrors = List[(String, Option[Throwable])]() var loggedWarns = List[(String, Option[Throwable])]() @@ -410,44 +410,45 @@ class BoxLoggingSpec extends Specification with Mockito { import net.liftweb.common.Logger import org.slf4j.{Logger => SLF4JLogger} - val mockLogger = mock[SLF4JLogger] - mockLogger.isErrorEnabled() returns true + "log to the Lift logger" in new MockContext { + val mockLogger = mock[SLF4JLogger] + (mockLogger.isErrorEnabled: () => Boolean).expects().returning(true).anyNumberOfTimes() - class MyLoggable extends LoggableBoxLogging { - override val logger = new Logger { - override protected def _logger = mockLogger + class MyLoggable extends LoggableBoxLogging { + override val logger = new Logger { + override protected def _logger = mockLogger + } } - } - "log to the Lift logger" in { + (mockLogger.error(_: String)).expects(*).once() + (mockLogger.error(_: String, _: Throwable)).expects(*, *).once() + val result = new MyLoggable { Failure("Failed").logFailure("Second") Failure("Excepted", Full(new Exception("uh-oh")), Empty).logFailure("Third") } - - (there was one(mockLogger).error(any[String])) and - (there was one(mockLogger).error(any[String], any[Exception])) } } "when logging with in SLF4J context" in { import org.slf4j.{Logger => SLF4JLogger} - val mockLogger = mock[SLF4JLogger] + "log to the SLF4J logger" in new MockContext { + val mockLogger = mock[SLF4JLogger] + (mockLogger.isErrorEnabled: () => Boolean).expects().returning(true).anyNumberOfTimes() - class TestClass extends SLF4JBoxLogging { - val logger = mockLogger - } + class TestClass extends SLF4JBoxLogging { + val logger = mockLogger + } + + (mockLogger.error(_: String)).expects(*).once() + (mockLogger.error(_: String, _: Throwable)).expects(*, *).once() - "log to the SLF4J logger" in { new TestClass { Failure("Failed").logFailure("Second") Failure("Excepted", Full(new Exception("uh-oh")), Empty).logFailure("Third") } - - there was one(mockLogger).error(any[String]) - there was one(mockLogger).error(any[String], any[Exception]) } } } diff --git a/core/common/src/test/scala/net/liftweb/common/BoxSpec.scala b/core/common/src/test/scala/net/liftweb/common/BoxSpec.scala index 9e23e7d25c..1977aa4c65 100644 --- a/core/common/src/test/scala/net/liftweb/common/BoxSpec.scala +++ b/core/common/src/test/scala/net/liftweb/common/BoxSpec.scala @@ -49,13 +49,13 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { "A Box" can { "be created from a Option. It is Empty if the option is None" in { - Box(None) must beEmpty + Box(None) must be_==(Empty) } "be created from a Option. It is Full(x) if the option is Some(x)" in { Box(Some(1)) must_== Full(1) } "be created from a List containing one element. It is Empty if the list is empty" in { - Box(Nil) must beEmpty + Box(Nil) must be_==(Empty) } "be created from a List containing one element. It is Full(x) if the list is List(x)" in { Box(List(1)) must_== Full(1) @@ -122,7 +122,7 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { Full(1) filter {_ > 0} must_== Full(1) } "define a 'filter' method, returning Empty if the filter is not satisfied" in { - Full(1) filter {_ == 0} must beEmpty + Full(1) filter {_ == 0} must be_==(Empty) } "define a 'filterMsg' method, returning a Failure if the filter predicate is not satisfied" in { Full(1).filterMsg("not equal to 0")(_ == 0) must_== Failure("not equal to 0", Empty, Empty) @@ -136,10 +136,10 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { Full(1) map { _.toString } must_== Full("1") } "define a 'flatMap' method transforming its value in another Box. If the value is transformed in a Full box, the total result is a Full box" in { - Full(1) flatMap { x: Int => if (x > 0) Full("full") else Empty } must_== Full("full") + Full(1) flatMap { (x: Int) => if (x > 0) Full("full") else Empty } must_== Full("full") } "define a 'flatMap' method transforming its value in another Box. If the value is transformed in an Empty box, the total result is an Empty box" in { - Full(0) flatMap { x: Int => if (x > 0) Full("full") else Empty } must beEmpty + Full(0) flatMap { (x: Int) => if (x > 0) Full("full") else Empty } must be_==(Empty) } "define a 'flatten' method if it contains another Box." in { "If the inner box is a Full box, the final result is identical to that box" in { @@ -157,7 +157,7 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { Full("Albus") collect { case "Albus" => "Dumbledore"} must_== Full("Dumbledore") } "If the partial-function is not defined for the contents of this box, returns Empty" in { - Full("Hermione") collect { case "Albus" => "Dumbledore"} must beEmpty + Full("Hermione") collect { case "Albus" => "Dumbledore"} must be_==(Empty) } } "define a 'transform' method that takes a PartialFunction to transform this box into another box" in { @@ -177,7 +177,7 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { Full(1) flip { _ => "No data found" } mustEqual Empty } "define an 'elements' method returning an iterator containing its value" in { - Full(1).elements.next must_== 1 + Full(1).elements.next() must_== 1 } "define a 'toList' method returning a List containing its value" in { Full(1).toList must_== List(1) @@ -193,7 +193,7 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { } "define a 'pass' method passing the can to a function and returning itself (alias: $)" in { var empty = false - def emptyString(s: Box[String]) = s foreach {c: String => empty = c.isEmpty} + def emptyString(s: Box[String]) = s foreach {(c: String) => empty = c.isEmpty} Full("") $ emptyString _ empty must beTrue } @@ -290,7 +290,7 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { } "return itself if filtered with a predicate" in { val empty: Box[Int] = Empty - empty.filter {_ > 0} must beEmpty + empty.filter {_ > 0} must be_==(Empty) } "define an 'exists' method returning false" in { val empty: Box[Int] = Empty @@ -302,7 +302,7 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { } "define a 'filter' method, returning Empty" in { val empty: Box[Int] = Empty - empty filter {_ > 0} must beEmpty + empty filter {_ > 0} must be_==(Empty) } "define a 'filterMsg' method, returning a Failure" in { Empty.filterMsg("not equal to 0")(_ == 0) must_== Failure("not equal to 0", Empty, Empty) @@ -314,16 +314,16 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { total must_== 0 } "define a 'map' method returning Empty" in { - Empty map {_.toString} must beEmpty + Empty map {_.toString} must be_==(Empty) } "define a 'flatMap' method returning Empty" in { - Empty flatMap {x: Int => Full("full")} must beEmpty + Empty flatMap {(x: Int) => Full("full")} must be_==(Empty) } "define a 'flatten' method returning Empty" in { - Empty.flatten must beEmpty + Empty.flatten must be_==(Empty) } "define a 'collect' method returning Empty" in { - Empty collect { case _ => "Some Value" } must beEmpty + Empty collect { case _ => "Some Value" } must be_==(Empty) } "define a 'transform' method that takes a PartialFunction to transform this Empty box into another box" in { "If the partial-function is defined for Empty, returns the result of applying the partial function to it" in { @@ -399,7 +399,7 @@ class BoxSpec extends Specification with ScalaCheck with BoxGenerator { "A Failure is an Empty Box which" should { "return itself if mapped, flatMapped or flattened" in { Failure("error", Empty, Empty) map {_.toString} must_== Failure("error", Empty, Empty) - Failure("error", Empty, Empty) flatMap {x: String => Full(x.toString)} must_== Failure("error", Empty, Empty) + Failure("error", Empty, Empty) flatMap {(x: String) => Full(x.toString)} must_== Failure("error", Empty, Empty) Failure("error", Empty, Empty).flatten must_== Failure("error", Empty, Empty) } "define a 'collect' method returning itself" in { diff --git a/core/common/src/test/scala/net/liftweb/common/ConversionsSpec.scala b/core/common/src/test/scala/net/liftweb/common/ConversionsSpec.scala index b7a5999432..ff0fe09e82 100644 --- a/core/common/src/test/scala/net/liftweb/common/ConversionsSpec.scala +++ b/core/common/src/test/scala/net/liftweb/common/ConversionsSpec.scala @@ -18,14 +18,16 @@ package net.liftweb package common import scala.xml.{NodeSeq, Text} - import org.specs2.matcher.XmlMatchers import org.specs2.mutable.Specification +import scala.annotation.nowarn + /** * System under specification for Conversions. */ +@nowarn("msg=.* NodeSeqFunc in package common.* is deprecate.*") class ConversionsSpec extends Specification with XmlMatchers { "A StringOrNodeSeq" should { diff --git a/core/common/src/test/scala/net/liftweb/common/LoggingSpec.scala b/core/common/src/test/scala/net/liftweb/common/LoggingSpec.scala index fb28a5d2e8..b1b037f9c9 100644 --- a/core/common/src/test/scala/net/liftweb/common/LoggingSpec.scala +++ b/core/common/src/test/scala/net/liftweb/common/LoggingSpec.scala @@ -86,7 +86,7 @@ class LoggingSpec extends Specification { logger.info("Logged with mdc1=(1,2), mdc2=xx") } logger.info("Logged with mdc1=(1,2), mdc2=yy") - MDC.clear + MDC.clear() logger.info("No MDC values") success } diff --git a/core/json-ext/src/main/scala/net/liftweb/json/ext/EnumSerializer.scala b/core/json-ext/src/main/scala/net/liftweb/json/ext/EnumSerializer.scala deleted file mode 100644 index 83cf4a9e7e..0000000000 --- a/core/json-ext/src/main/scala/net/liftweb/json/ext/EnumSerializer.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2006-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json -package ext - -import scala.reflect.ClassTag - -class EnumSerializer[E <: Enumeration: ClassTag](enum: E) - extends json.Serializer[E#Value] { - import JsonDSL._ - - val EnumerationClass = classOf[E#Value] - - def deserialize(implicit format: Formats): - PartialFunction[(TypeInfo, JValue), E#Value] = { - case (TypeInfo(EnumerationClass, _), json) => json match { - case JInt(value) if (value <= enum.maxId) => enum(value.toInt) - case value => throw new MappingException("Can't convert " + - value + " to "+ EnumerationClass) - } - } - - def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { - case i: E#Value => i.id - } -} - -class EnumNameSerializer[E <: Enumeration: ClassTag](enum: E) - extends json.Serializer[E#Value] { - import JsonDSL._ - - val EnumerationClass = classOf[E#Value] - - def deserialize(implicit format: Formats): - PartialFunction[(TypeInfo, JValue), E#Value] = { - case (TypeInfo(EnumerationClass, _), json) => json match { - case JString(value) if (enum.values.exists(_.toString == value)) => - enum.withName(value) - case value => throw new MappingException("Can't convert " + - value + " to "+ EnumerationClass) - } - } - - def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { - case i: E#Value => i.toString - } -} diff --git a/core/json-ext/src/main/scala/net/liftweb/json/ext/JodaTimeSerializer.scala b/core/json-ext/src/main/scala/net/liftweb/json/ext/JodaTimeSerializer.scala deleted file mode 100644 index 97920f2d7a..0000000000 --- a/core/json-ext/src/main/scala/net/liftweb/json/ext/JodaTimeSerializer.scala +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright 2006-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json -package ext - -import org.joda.time._ -import JsonDSL._ - -object JodaTimeSerializers { - def all = List(DurationSerializer, InstantSerializer, DateTimeSerializer, - DateMidnightSerializer, IntervalSerializer(), LocalDateSerializer(), - LocalTimeSerializer(), PeriodSerializer) -} - -case object PeriodSerializer extends CustomSerializer[Period](format => ( - { - case JString(p) => new Period(p) - case JNull => null - }, - { - case p: Period => JString(p.toString) - } -)) - -case object DurationSerializer extends CustomSerializer[Duration](format => ( - { - case JInt(d) => new Duration(d.longValue) - case JNull => null - }, - { - case d: Duration => JInt(d.getMillis) - } -)) - -case object InstantSerializer extends CustomSerializer[Instant](format => ( - { - case JInt(i) => new Instant(i.longValue) - case JNull => null - }, - { - case i: Instant => JInt(i.getMillis) - } -)) - -object DateParser { - def parse(s: String, format: Formats) = - format.dateFormat.parse(s).map(_.getTime).getOrElse(throw new MappingException("Invalid date format " + s)) -} - -case object DateTimeSerializer extends CustomSerializer[DateTime](format => ( - { - case JString(s) => new DateTime(DateParser.parse(s, format)) - case JNull => null - }, - { - case d: DateTime => JString(format.dateFormat.format(d.toDate)) - } -)) - -case object DateMidnightSerializer extends CustomSerializer[DateMidnight](format => ( - { - case JString(s) => new DateMidnight(DateParser.parse(s, format)) - case JNull => null - }, - { - case d: DateMidnight => JString(format.dateFormat.format(d.toDate)) - } -)) - -private[ext] case class _Interval(start: Long, end: Long) -object IntervalSerializer { - def apply() = new ClassSerializer(new ClassType[Interval, _Interval]() { - def unwrap(i: _Interval)(implicit format: Formats) = new Interval(i.start, i.end) - def wrap(i: Interval)(implicit format: Formats) = _Interval(i.getStartMillis, i.getEndMillis) - }) -} - -private[ext] case class _LocalDate(year: Int, month: Int, day: Int) -object LocalDateSerializer { - def apply() = new ClassSerializer(new ClassType[LocalDate, _LocalDate]() { - def unwrap(d: _LocalDate)(implicit format: Formats) = new LocalDate(d.year, d.month, d.day) - def wrap(d: LocalDate)(implicit format: Formats) = - _LocalDate(d.getYear(), d.getMonthOfYear, d.getDayOfMonth) - }) -} - -private[ext] case class _LocalTime(hour: Int, minute: Int, second: Int, millis: Int) -object LocalTimeSerializer { - def apply() = new ClassSerializer(new ClassType[LocalTime, _LocalTime]() { - def unwrap(t: _LocalTime)(implicit format: Formats) = - new LocalTime(t.hour, t.minute, t.second, t.millis) - def wrap(t: LocalTime)(implicit format: Formats) = - _LocalTime(t.getHourOfDay, t.getMinuteOfHour, t.getSecondOfMinute, t.getMillisOfSecond) - }) -} - -private[ext] trait ClassType[A, B] { - def unwrap(b: B)(implicit format: Formats): A - def wrap(a: A)(implicit format: Formats): B -} - -case class ClassSerializer[A : Manifest, B : Manifest](t: ClassType[A, B]) extends Serializer[A] { - private val Class = implicitly[Manifest[A]].runtimeClass - - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), A] = { - case (TypeInfo(Class, _), json) => json match { - case JNull => null.asInstanceOf[A] - case xs: JObject if (xs.extractOpt[B].isDefined) => t.unwrap(xs.extract[B]) - case value => throw new MappingException("Can't convert " + value + " to " + Class) - } - } - - def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { - case a: A if a.asInstanceOf[AnyRef].getClass == Class => Extraction.decompose(t.wrap(a)) - } -} diff --git a/core/json-ext/src/main/scala/net/liftweb/json/ext/JsonBoxSerializer.scala b/core/json-ext/src/main/scala/net/liftweb/json/ext/JsonBoxSerializer.scala deleted file mode 100644 index 566846fcc5..0000000000 --- a/core/json-ext/src/main/scala/net/liftweb/json/ext/JsonBoxSerializer.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json -package ext - -import java.io._ -import java.lang.reflect.ParameterizedType -import scala.reflect.Manifest -import common._ -import Extraction.{decompose, extract} -import org.apache.commons.codec.binary.Base64 - -class JsonBoxSerializer extends Serializer[Box[_]] { - private val BoxClass = classOf[Box[_]] - - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Box[_]] = { - case (TypeInfo(BoxClass, ptype), json) => json match { - case JNull | JNothing => Empty - case JObject(JField("box_failure", JString("Failure")) :: - JField("msg", JString(msg)) :: - JField("exception", exception) :: - JField("chain", chain) :: Nil) => - Failure(msg, deserializeException(exception), - extract(chain, TypeInfo(BoxClass, Some(typeHoldingFailure))).asInstanceOf[Box[Failure]]) - case JObject(JField("box_failure", JString("ParamFailure")) :: - JField("msg", JString(msg)) :: - JField("exception", exception) :: - JField("chain", chain) :: - JField("paramType", JString(paramType)) :: - JField("param", param) :: Nil) => - val clazz = Thread.currentThread.getContextClassLoader.loadClass(paramType) - ParamFailure(msg, deserializeException(exception), - extract(chain, TypeInfo(BoxClass, Some(typeHoldingFailure))).asInstanceOf[Box[Failure]], - extract(param, TypeInfo(clazz, None))) - case x => - val t = ptype.getOrElse(throw new MappingException("parameterized type not known for Box")) - Full(extract(x, TypeInfo(t.getActualTypeArguments()(0).asInstanceOf[Class[_]], None))) - } - } - - def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { - case Full(x) => decompose(x) - case Empty => JNull - case ParamFailure(msg, exception, chain, param) => - JObject(JField("box_failure", JString("ParamFailure")) :: - JField("msg", JString(msg)) :: - JField("exception", serializeException(exception)) :: - JField("chain", decompose(chain)) :: - JField("paramType", JString(param.asInstanceOf[AnyRef].getClass.getName)) :: - JField("param", decompose(param)) :: Nil) - case Failure(msg, exception, chain) => - JObject(JField("box_failure", JString("Failure")) :: - JField("msg", JString(msg)) :: - JField("exception", serializeException(exception)) :: - JField("chain", decompose(chain)) :: Nil) - } - - private val typeHoldingFailure = new ParameterizedType { - def getActualTypeArguments = Array(classOf[Failure]) - def getOwnerType = classOf[Box[Failure]] - def getRawType = classOf[Box[Failure]] - } - - private def serializeException(exception: Box[Throwable]) = exception match { - case Full(x) => JString(javaSerialize(x)) - case _ => JNull - } - - private def deserializeException(json: JValue) = json match { - case JString(s) => Full(javaDeserialize(s).asInstanceOf[Throwable]) - case _ => Empty - } - - private def javaSerialize(obj: AnyRef): String = { - val bytes = new ByteArrayOutputStream - val out = new ObjectOutputStream(bytes) - out.writeObject(obj) - new String((new Base64).encode(bytes.toByteArray)) - } - - private def javaDeserialize(s: String): Any = { - val bytes = new ByteArrayInputStream((new Base64).decode(s.getBytes("UTF-8"))) - val in = new ObjectInputStream(bytes) - in.readObject - } -} - diff --git a/core/json-ext/src/test/scala/net/liftweb/json/ext/JodaTimeSerializerSpec.scala b/core/json-ext/src/test/scala/net/liftweb/json/ext/JodaTimeSerializerSpec.scala deleted file mode 100644 index 6d2fa86d77..0000000000 --- a/core/json-ext/src/test/scala/net/liftweb/json/ext/JodaTimeSerializerSpec.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json -package ext - -import org.joda.time._ - -import org.specs2.mutable.Specification - -import common._ -import json.Serialization.{read, write => swrite} - - -/** - * System under specification for JodaTimeSerializer. - */ -class JodaTimeSerializerSpec extends Specification { - "JodaTimeSerializer Specification".title - - implicit val formats = Serialization.formats(NoTypeHints) ++ JodaTimeSerializers.all - - "Serialize joda time types" in { - val x = JodaTypes(new Duration(10*1000), new Instant(System.currentTimeMillis), - new DateTime, new DateMidnight, new Interval(1000, 50000), - new LocalDate(2011, 1, 16), new LocalTime(16, 52, 10), Period.weeks(3)) - val ser = swrite(x) - read[JodaTypes](ser) mustEqual x - } - - "DateTime and DateMidnight use configured date format" in { - implicit val formats = new net.liftweb.json.DefaultFormats { - override def dateFormatter = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss'Z'") - } ++ JodaTimeSerializers.all - - val x = Dates(new DateTime(2011, 1, 16, 10, 32, 0, 0, DateTimeZone.UTC), new DateMidnight(2011, 1, 16, DateTimeZone.UTC)) - val ser = swrite(x) - ser mustEqual """{"dt":"2011-01-16 10:32:00Z","dm":"2011-01-16 00:00:00Z"}""" - } - - "null is serialized as JSON null" in { - val x = JodaTypes(null, null, null, null, null, null, null, null) - val ser = swrite(x) - read[JodaTypes](ser) mustEqual x - } -} - -case class JodaTypes(duration: Duration, instant: Instant, dateTime: DateTime, - dateMidnight: DateMidnight, interval: Interval, localDate: LocalDate, - localTime: LocalTime, period: Period) - -case class Dates(dt: DateTime, dm: DateMidnight) diff --git a/core/json-ext/src/test/scala/net/liftweb/json/ext/JsonBoxSerializerSpec.scala b/core/json-ext/src/test/scala/net/liftweb/json/ext/JsonBoxSerializerSpec.scala deleted file mode 100644 index 5701ed8913..0000000000 --- a/core/json-ext/src/test/scala/net/liftweb/json/ext/JsonBoxSerializerSpec.scala +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json -package ext - -import org.specs2.mutable.Specification - -import common._ -import json.Serialization.{read, write => swrite} - - -/** - * System under specification for JsonBoxSerializer. - */ -class JsonBoxSerializerSpec extends Specification { - "JsonBoxSerializer Specification".title - - implicit val formats = net.liftweb.json.DefaultFormats + new JsonBoxSerializer - - "Extract empty age" in { - parse("""{"name":"joe"}""").extract[Person] mustEqual Person("joe", Empty, Empty) - } - - "Extract boxed thing" in { - parse("""{"name":"joe", "thing": "rog", "age":12}""").extract[Person] mustEqual Person("joe", Full(12), Empty, Full("rog")) - } - - "Extract boxed mother" in { - val json = """{"name":"joe", "age":12, "mother": {"name":"ann", "age":53}}""" - val p = parse(json).extract[Person] - p mustEqual Person("joe", Full(12), Full(Person("ann", Full(53), Empty))) - (for { a1 <- p.age; m <-p.mother; a2 <- m.age } yield a1+a2) mustEqual Full(65) - } - - "Render with age" in { - swrite(Person("joe", Full(12), Empty)) mustEqual """{"name":"joe","age":12,"mother":null,"thing":null}""" - } - - "Serialize failure" in { - val exn1 = SomeException("e1") - val exn2 = SomeException("e2") - val p = Person("joe", Full(12), Failure("f", Full(exn1), Failure("f2", Full(exn2), Empty))) - val ser = swrite(p) - read[Person](ser) mustEqual p - } - - "Serialize param failure" in { - val exn = SomeException("e1") - val p = Person("joe", Full(12), ParamFailure("f", Full(exn), Empty, "param value")) - val ser = swrite(p) - read[Person](ser) mustEqual p - } -} - -case class SomeException(msg: String) extends Exception - -case class Person(name: String, age: Box[Int], mother: Box[Person], thing: Box[String] = Empty) - diff --git a/core/json-scalaz7/README.md b/core/json-scalaz7/README.md deleted file mode 100644 index 47eaa878a5..0000000000 --- a/core/json-scalaz7/README.md +++ /dev/null @@ -1,88 +0,0 @@ -Scalaz support for Lift JSON -============================ - -This project adds a type class to parse JSON: - - trait JSON[A] { - def read(json: JValue): Result[A] - def write(value: A): JValue - } - - type Result[+A] = ValidationNel[Error, A] - -Function 'read' returns an Applicative Functor, enabling parsing in an applicative style. - -Simple example --------------- - - scala> import scalaz._ - scala> import Scalaz._ - scala> import net.liftweb.json.scalaz.JsonScalaz._ - scala> import net.liftweb.json._ - - scala> case class Address(street: String, zipCode: String) - scala> case class Person(name: String, age: Int, address: Address) - - scala> val json = parse(""" {"street": "Manhattan 2", "zip": "00223" } """) - scala> (field[String]("street")(json) |@| field[String]("zip")(json)) { Address } - res0: Success(Address(Manhattan 2,00223)) - - scala> (field[String]("streets")(json) |@| field[String]("zip")(json)) { Address } - res1: Failure("no such field 'streets'") - -Notice the required explicit types when reading fields from JSON. The library comes with helpers which -can lift functions with pure values into "parsing context". This works well with Scala's type inferencer: - - scala> Address.applyJSON(field[String]("street"), field[String]("zip"))(json) - res2: Success(Address(Manhattan 2,00223)) - -Function 'applyJSON' above lifts function - - (String, String) => Address - -to - - (JValue => Result[String], JValue => Result[String]) => (JValue => Result[Address]) - -Example which adds a new type class instance --------------------------------------------- - - scala> implicit def addrJSONR: JSONR[Address] = Address.applyJSON(field[String]("street"), field[String]("zip")) - - scala> val p = JsonParser.parse(""" {"name":"joe","age":34,"address":{"street": "Manhattan 2", "zip": "00223" }} """) - scala> Person.applyJSON(field[String]("name"), field[Int]("age"), field[Address]("address"))(p) - res0: Success(Person(joe,34,Address(Manhattan 2,00223))) - -Validation ----------- - -Applicative style parsing works nicely with validation and data conversion. It is easy to compose -validations using a for comprehension. - - def min(x: Int): Int => Result[Int] = (y: Int) => - if (y < x) Fail("min", y + " < " + x) else y.success - - def max(x: Int): Int => Result[Int] = (y: Int) => - if (y > x) Fail("max", y + " > " + x) else y.success - - val ageResult = (jValue: JValue) => for { - age <- field[Int]("age")(jValue) - _ <- min(16)(age) - _ <- max(60)(age) - } yield age - - // Creates a function JValue => Result[Person] - Person.applyJSON(field[String]("name"), ageResult, field[Address]("address")) - -Installation ------------- - -Add dependency to your SBT project description: - - val lift_json_scalaz = "net.liftweb" %% "lift-json-scalaz" % "XXX" - -Links ------ - -* [More examples](https://github.com/lift/framework/tree/master/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz) -* [Scalaz](http://code.google.com/p/scalaz/) diff --git a/core/json-scalaz7/src/main/scala-2.11/net/liftweb/json/scalaz/Base.scala b/core/json-scalaz7/src/main/scala-2.11/net/liftweb/json/scalaz/Base.scala deleted file mode 100644 index 5441c92c64..0000000000 --- a/core/json-scalaz7/src/main/scala-2.11/net/liftweb/json/scalaz/Base.scala +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.json.scalaz - -import scalaz.ValidationNel -import scalaz.Validation._ -import scalaz.std.option._ -import scalaz.std.list._ -import scalaz.syntax.traverse._ -import net.liftweb.json._ -import scala.collection.breakOut - -trait Base { this: Types => - implicit def boolJSON: JSON[Boolean] = new JSON[Boolean] { - def read(json: JValue) = json match { - case JBool(b) => success(b) - case x => failure(UnexpectedJSONError(x, classOf[JBool])).toValidationNel - } - - def write(value: Boolean) = JBool(value) - } - - implicit def intJSON: JSON[Int] = new JSON[Int] { - def read(json: JValue) = json match { - case JInt(x) => success(x.intValue) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: Int) = JInt(BigInt(value)) - } - - implicit def longJSON: JSON[Long] = new JSON[Long] { - def read(json: JValue) = json match { - case JInt(x) => success(x.longValue) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: Long) = JInt(BigInt(value)) - } - - implicit def doubleJSON: JSON[Double] = new JSON[Double] { - def read(json: JValue) = json match { - case JDouble(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JDouble])).toValidationNel - } - - def write(value: Double) = JDouble(value) - } - - implicit def stringJSON: JSON[String] = new JSON[String] { - def read(json: JValue) = json match { - case JString(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JString])).toValidationNel - } - - def write(value: String) = JString(value) - } - - implicit def bigintJSON: JSON[BigInt] = new JSON[BigInt] { - def read(json: JValue) = json match { - case JInt(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: BigInt) = JInt(value) - } - - implicit def jvalueJSON: JSON[JValue] = new JSON[JValue] { - def read(json: JValue) = success(json) - def write(value: JValue) = value - } - - implicit def listJSONR[A: JSONR]: JSONR[List[A]] = new JSONR[List[A]] { - def read(json: JValue) = json match { - case JArray(xs) => { - xs.map(fromJSON[A]).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, A] - } - case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel - } - } - implicit def listJSONW[A: JSONW]: JSONW[List[A]] = new JSONW[List[A]] { - def write(values: List[A]) = JArray(values.map(x => toJSON(x))) - } - - implicit def optionJSONR[A: JSONR]: JSONR[Option[A]] = new JSONR[Option[A]] { - def read(json: JValue) = json match { - case JNothing | JNull => success(None) - case x => fromJSON[A](x).map(some) - } - } - implicit def optionJSONW[A: JSONW]: JSONW[Option[A]] = new JSONW[Option[A]] { - def write(value: Option[A]) = value.map(x => toJSON(x)).getOrElse(JNothing) - } - - implicit def mapJSONR[A: JSONR]: JSONR[Map[String, A]] = new JSONR[Map[String, A]] { - def read(json: JValue) = json match { - case JObject(fs) => - val r = fs.map(f => fromJSON[A](f.value).map(v => (f.name, v))).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, (String, A)] - r.map(_.toMap) - case x => failure(UnexpectedJSONError(x, classOf[JObject])).toValidationNel - } - } - implicit def mapJSONW[A: JSONW]: JSONW[Map[String, A]] = new JSONW[Map[String, A]] { - def write(values: Map[String, A]) = JObject(values.map { case (k, v) => JField(k, toJSON(v)) }(breakOut): _*) - } -} diff --git a/core/json-scalaz7/src/main/scala-2.12/net/liftweb/json/scalaz/Base.scala b/core/json-scalaz7/src/main/scala-2.12/net/liftweb/json/scalaz/Base.scala deleted file mode 100644 index 5441c92c64..0000000000 --- a/core/json-scalaz7/src/main/scala-2.12/net/liftweb/json/scalaz/Base.scala +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.json.scalaz - -import scalaz.ValidationNel -import scalaz.Validation._ -import scalaz.std.option._ -import scalaz.std.list._ -import scalaz.syntax.traverse._ -import net.liftweb.json._ -import scala.collection.breakOut - -trait Base { this: Types => - implicit def boolJSON: JSON[Boolean] = new JSON[Boolean] { - def read(json: JValue) = json match { - case JBool(b) => success(b) - case x => failure(UnexpectedJSONError(x, classOf[JBool])).toValidationNel - } - - def write(value: Boolean) = JBool(value) - } - - implicit def intJSON: JSON[Int] = new JSON[Int] { - def read(json: JValue) = json match { - case JInt(x) => success(x.intValue) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: Int) = JInt(BigInt(value)) - } - - implicit def longJSON: JSON[Long] = new JSON[Long] { - def read(json: JValue) = json match { - case JInt(x) => success(x.longValue) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: Long) = JInt(BigInt(value)) - } - - implicit def doubleJSON: JSON[Double] = new JSON[Double] { - def read(json: JValue) = json match { - case JDouble(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JDouble])).toValidationNel - } - - def write(value: Double) = JDouble(value) - } - - implicit def stringJSON: JSON[String] = new JSON[String] { - def read(json: JValue) = json match { - case JString(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JString])).toValidationNel - } - - def write(value: String) = JString(value) - } - - implicit def bigintJSON: JSON[BigInt] = new JSON[BigInt] { - def read(json: JValue) = json match { - case JInt(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: BigInt) = JInt(value) - } - - implicit def jvalueJSON: JSON[JValue] = new JSON[JValue] { - def read(json: JValue) = success(json) - def write(value: JValue) = value - } - - implicit def listJSONR[A: JSONR]: JSONR[List[A]] = new JSONR[List[A]] { - def read(json: JValue) = json match { - case JArray(xs) => { - xs.map(fromJSON[A]).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, A] - } - case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel - } - } - implicit def listJSONW[A: JSONW]: JSONW[List[A]] = new JSONW[List[A]] { - def write(values: List[A]) = JArray(values.map(x => toJSON(x))) - } - - implicit def optionJSONR[A: JSONR]: JSONR[Option[A]] = new JSONR[Option[A]] { - def read(json: JValue) = json match { - case JNothing | JNull => success(None) - case x => fromJSON[A](x).map(some) - } - } - implicit def optionJSONW[A: JSONW]: JSONW[Option[A]] = new JSONW[Option[A]] { - def write(value: Option[A]) = value.map(x => toJSON(x)).getOrElse(JNothing) - } - - implicit def mapJSONR[A: JSONR]: JSONR[Map[String, A]] = new JSONR[Map[String, A]] { - def read(json: JValue) = json match { - case JObject(fs) => - val r = fs.map(f => fromJSON[A](f.value).map(v => (f.name, v))).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, (String, A)] - r.map(_.toMap) - case x => failure(UnexpectedJSONError(x, classOf[JObject])).toValidationNel - } - } - implicit def mapJSONW[A: JSONW]: JSONW[Map[String, A]] = new JSONW[Map[String, A]] { - def write(values: Map[String, A]) = JObject(values.map { case (k, v) => JField(k, toJSON(v)) }(breakOut): _*) - } -} diff --git a/core/json-scalaz7/src/main/scala-2.13/net/liftweb/json/scalaz/Base.scala b/core/json-scalaz7/src/main/scala-2.13/net/liftweb/json/scalaz/Base.scala deleted file mode 100644 index ae9d8d8375..0000000000 --- a/core/json-scalaz7/src/main/scala-2.13/net/liftweb/json/scalaz/Base.scala +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.json.scalaz - -import scalaz.ValidationNel -import scalaz.Validation._ -import scalaz.std.option._ -import scalaz.std.list._ -import scalaz.syntax.traverse._ -import net.liftweb.json._ - -trait Base { this: Types => - implicit def boolJSON: JSON[Boolean] = new JSON[Boolean] { - def read(json: JValue) = json match { - case JBool(b) => success(b) - case x => failure(UnexpectedJSONError(x, classOf[JBool])).toValidationNel - } - - def write(value: Boolean) = JBool(value) - } - - implicit def intJSON: JSON[Int] = new JSON[Int] { - def read(json: JValue) = json match { - case JInt(x) => success(x.intValue) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: Int) = JInt(BigInt(value)) - } - - implicit def longJSON: JSON[Long] = new JSON[Long] { - def read(json: JValue) = json match { - case JInt(x) => success(x.longValue) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: Long) = JInt(BigInt(value)) - } - - implicit def doubleJSON: JSON[Double] = new JSON[Double] { - def read(json: JValue) = json match { - case JDouble(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JDouble])).toValidationNel - } - - def write(value: Double) = JDouble(value) - } - - implicit def stringJSON: JSON[String] = new JSON[String] { - def read(json: JValue) = json match { - case JString(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JString])).toValidationNel - } - - def write(value: String) = JString(value) - } - - implicit def bigintJSON: JSON[BigInt] = new JSON[BigInt] { - def read(json: JValue) = json match { - case JInt(x) => success(x) - case x => failure(UnexpectedJSONError(x, classOf[JInt])).toValidationNel - } - - def write(value: BigInt) = JInt(value) - } - - implicit def jvalueJSON: JSON[JValue] = new JSON[JValue] { - def read(json: JValue) = success(json) - def write(value: JValue) = value - } - - implicit def listJSONR[A: JSONR]: JSONR[List[A]] = new JSONR[List[A]] { - def read(json: JValue) = json match { - case JArray(xs) => { - xs.map(fromJSON[A]).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, A] - } - case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel - } - } - implicit def listJSONW[A: JSONW]: JSONW[List[A]] = new JSONW[List[A]] { - def write(values: List[A]) = JArray(values.map(x => toJSON(x))) - } - - implicit def optionJSONR[A: JSONR]: JSONR[Option[A]] = new JSONR[Option[A]] { - def read(json: JValue) = json match { - case JNothing | JNull => success(None) - case x => fromJSON[A](x).map(some) - } - } - implicit def optionJSONW[A: JSONW]: JSONW[Option[A]] = new JSONW[Option[A]] { - def write(value: Option[A]) = value.map(x => toJSON(x)).getOrElse(JNothing) - } - - implicit def mapJSONR[A: JSONR]: JSONR[Map[String, A]] = new JSONR[Map[String, A]] { - def read(json: JValue) = json match { - case JObject(fs) => - val r = fs.map(f => fromJSON[A](f.value).map(v => (f.name, v))).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, (String, A)] - r.map(_.toMap) - case x => failure(UnexpectedJSONError(x, classOf[JObject])).toValidationNel - } - } - implicit def mapJSONW[A: JSONW]: JSONW[Map[String, A]] = new JSONW[Map[String, A]] { - def write(values: Map[String, A]) = { - JObject( - values.map { - case (k, v) => JField(k, toJSON(v)) - }.to(List): _* - ) - } - } -} diff --git a/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/JsonScalaz.scala b/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/JsonScalaz.scala deleted file mode 100644 index eb9debe640..0000000000 --- a/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/JsonScalaz.scala +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.json.scalaz - -// FIXME Needed to due to https://issues.scala-lang.org/browse/SI-6541, -// which causes existential types to be inferred for the generated -// unapply of a case class with a wildcard parameterized type. -// Ostensibly should be fixed in 2.12, which means we're a ways away -// from being able to remove this, though. -import scala.language.existentials - -import scalaz.{Equal, Kleisli, Monoid, Semigroup, Show, ValidationNel} -import scalaz.Validation._ -import scalaz.std.option._ -import net.liftweb.json._ - -trait Types { - type Result[+A] = ValidationNel[Error, A] - - sealed trait Error - case class UnexpectedJSONError(was: JValue, expected: Class[_ <: JValue]) extends Error - case class NoSuchFieldError(name: String, json: JValue) extends Error - case class UncategorizedError(key: String, desc: String, args: List[Any]) extends Error - - case object Fail { - def apply[A](key: String, desc: String, args: List[Any]): Result[A] = - failure(UncategorizedError(key, desc, args)).toValidationNel - - def apply[A](key: String, desc: String): Result[A] = - failure(UncategorizedError(key, desc, Nil)).toValidationNel - } - - implicit def JValueShow[A <: JValue]: Show[A] = new Show[A] { - override def shows(json: A): String = compactRender(json) - } - - implicit def JValueMonoid: Monoid[JValue] = Monoid.instance(_ ++ _, JNothing) - implicit def JValueSemigroup: Semigroup[JValue] = Semigroup.instance(_ ++ _) - implicit def JValueEqual: Equal[JValue] = Equal.equalA - - trait JSONR[A] { - def read(json: JValue): Result[A] - } - - trait JSONW[A] { - def write(value: A): JValue - } - - trait JSON[A] extends JSONR[A] with JSONW[A] - - implicit def Result2JSONR[A](f: JValue => Result[A]): JSONR[A] = new JSONR[A] { - def read(json: JValue) = f(json) - } - - def fromJSON[A: JSONR](json: JValue): Result[A] = implicitly[JSONR[A]].read(json) - def toJSON[A: JSONW](value: A): JValue = implicitly[JSONW[A]].write(value) - - def field[A: JSONR](name: String)(json: JValue): Result[A] = json match { - case JObject(fs) => - fs.find(_.name == name) - .map(f => implicitly[JSONR[A]].read(f.value)) - .orElse(implicitly[JSONR[A]].read(JNothing).fold(_ => none, x => some(success(x)))) - .getOrElse(failure(NoSuchFieldError(name, json)).toValidationNel) - case x => failure(UnexpectedJSONError(x, classOf[JObject])).toValidationNel - } - - def validate[A: JSONR](name: String): Kleisli[Result, JValue, A] = Kleisli(field[A](name)) - - def makeObj(fields: Traversable[(String, JValue)]): JObject = - JObject(fields.toList.map { case (n, v) => JField(n, v) }) -} - -object JsonScalaz extends Types with Lifting with Base with Tuples diff --git a/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/Lifting.scala b/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/Lifting.scala deleted file mode 100644 index c82501b81e..0000000000 --- a/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/Lifting.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.json.scalaz - -import scalaz.syntax.apply._ -import net.liftweb.json._ - -trait Lifting { this: Types => - implicit def Func2ToJSON[A: JSONR, B: JSONR, R](z: (A, B) => R) = new { - def applyJSON(a: JValue => Result[A], b: JValue => Result[B]): JValue => Result[R] = - (json: JValue) => (a(json) |@| b(json))(z) - } - - implicit def Func3ToJSON[A: JSONR, B: JSONR, C: JSONR, R](z: (A, B, C) => R) = new { - def applyJSON(a: JValue => Result[A], b: JValue => Result[B], c: JValue => Result[C]): JValue => Result[R] = - (json: JValue) => (a(json) |@| b(json) |@| c(json))(z) - } - - implicit def Func4ToJSON[A: JSONR, B: JSONR, C: JSONR, D: JSONR, R](z: (A, B, C, D) => R) = new { - def applyJSON(a: JValue => Result[A], b: JValue => Result[B], c: JValue => Result[C], d: JValue => Result[D]): JValue => Result[R] = - (json: JValue) => (a(json) |@| b(json) |@| c(json) |@| d(json))(z) - } - - implicit def Func5ToJSON[A: JSONR, B: JSONR, C: JSONR, D: JSONR, E: JSONR, R](z: (A, B, C, D, E) => R) = new { - def applyJSON(a: JValue => Result[A], b: JValue => Result[B], c: JValue => Result[C], d: JValue => Result[D], e: JValue => Result[E]): JValue => Result[R] = - (json: JValue) => (a(json) |@| b(json) |@| c(json) |@| d(json) |@| e(json))(z) - } - - implicit def Func6ToJSON[A: JSONR, B: JSONR, C: JSONR, D: JSONR, E: JSONR, F: JSONR, R](z: (A, B, C, D, E, F) => R) = new { - def applyJSON(a: JValue => Result[A], b: JValue => Result[B], c: JValue => Result[C], d: JValue => Result[D], e: JValue => Result[E], f: JValue => Result[F]): JValue => Result[R] = - (json: JValue) => (a(json) |@| b(json) |@| c(json) |@| d(json) |@| e(json) |@| f(json))(z) - } - - implicit def Func7ToJSON[A: JSONR, B: JSONR, C: JSONR, D: JSONR, E: JSONR, F: JSONR, G: JSONR, R](z: (A, B, C, D, E, F, G) => R) = new { - def applyJSON(a: JValue => Result[A], b: JValue => Result[B], c: JValue => Result[C], d: JValue => Result[D], e: JValue => Result[E], f: JValue => Result[F], g: JValue => Result[G]): JValue => Result[R] = - (json: JValue) => (a(json) |@| b(json) |@| c(json) |@| d(json) |@| e(json) |@| f(json) |@| g(json))(z) - } - - implicit def Func8ToJSON[A: JSONR, B: JSONR, C: JSONR, D: JSONR, E: JSONR, F: JSONR, G: JSONR, H: JSONR, R](z: (A, B, C, D, E, F, G, H) => R) = new { - def applyJSON(a: JValue => Result[A], b: JValue => Result[B], c: JValue => Result[C], d: JValue => Result[D], e: JValue => Result[E], f: JValue => Result[F], g: JValue => Result[G], h: JValue => Result[H]): JValue => Result[R] = - (json: JValue) => (a(json) |@| b(json) |@| c(json) |@| d(json) |@| e(json) |@| f(json) |@| g(json) |@| h(json))(z) - } -} diff --git a/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/Tuples.scala b/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/Tuples.scala deleted file mode 100644 index 6353e4c334..0000000000 --- a/core/json-scalaz7/src/main/scala/net/liftweb/json/scalaz/Tuples.scala +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.json.scalaz - -import scalaz.Validation._ -import scalaz.syntax.apply._ -import net.liftweb.json._ - -trait Tuples { this: Types => - implicit def Tuple2JSON[A: JSON, B: JSON]: JSON[(A, B)] = new JSON[(A, B)] { - def read(json: JValue) = json match { - case JArray(a :: b :: _) => - (fromJSON[A](a) |@| fromJSON[B](b)) { (a, b) => (a, b) } - case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel - } - - def write(value: (A, B)) = JArray(toJSON(value._1) :: toJSON(value._2) :: Nil) - } - - implicit def Tuple3JSON[A: JSON, B: JSON, C: JSON]: JSON[(A, B, C)] = new JSON[(A, B, C)] { - def read(json: JValue) = json match { - case JArray(a :: b :: c :: _) => - (fromJSON[A](a) |@| fromJSON[B](b) |@| fromJSON[C](c)) { (a, b, c) => (a, b, c) } - case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel - } - - def write(value: (A, B, C)) = JArray(toJSON(value._1) :: toJSON(value._2) :: toJSON(value._3) :: Nil) - } - - implicit def Tuple4JSON[A: JSON, B: JSON, C: JSON, D: JSON]: JSON[(A, B, C, D)] = new JSON[(A, B, C, D)] { - def read(json: JValue) = json match { - case JArray(a :: b :: c :: d :: _) => - (fromJSON[A](a) |@| fromJSON[B](b) |@| fromJSON[C](c) |@| fromJSON[D](d)) { (a, b, c, d) => (a, b, c, d) } - case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel - } - - def write(value: (A, B, C, D)) = JArray(toJSON(value._1) :: toJSON(value._2) :: toJSON(value._3) :: toJSON(value._4) :: Nil) - } - - implicit def Tuple5JSON[A: JSON, B: JSON, C: JSON, D: JSON, E: JSON]: JSON[(A, B, C, D, E)] = new JSON[(A, B, C, D, E)] { - def read(json: JValue) = json match { - case JArray(a :: b :: c :: d :: e :: _) => - (fromJSON[A](a) |@| fromJSON[B](b) |@| fromJSON[C](c) |@| fromJSON[D](d) |@| fromJSON[E](e)) { (a, b, c, d, e) => (a, b, c, d, e) } - case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel - } - - def write(value: (A, B, C, D, E)) = JArray(toJSON(value._1) :: toJSON(value._2) :: toJSON(value._3) :: toJSON(value._4) :: toJSON(value._5) :: Nil) - } - - implicit def Tuple6JSON[A: JSON, B: JSON, C: JSON, D: JSON, E: JSON, F: JSON]: JSON[(A, B, C, D, E, F)] = new JSON[(A, B, C, D, E, F)] { - def read(json: JValue) = json match { - case JArray(a :: b :: c :: d :: e :: f :: _) => - (fromJSON[A](a) |@| fromJSON[B](b) |@| fromJSON[C](c) |@| fromJSON[D](d) |@| fromJSON[E](e) |@| fromJSON[F](f)) { (a, b, c, d, e, f) => (a, b, c, d, e, f) } - case x => failure(UnexpectedJSONError(x, classOf[JArray])).toValidationNel - } - - def write(value: (A, B, C, D, E, F)) = JArray(toJSON(value._1) :: toJSON(value._2) :: toJSON(value._3) :: toJSON(value._4) :: toJSON(value._5) :: toJSON(value._6) :: Nil) - } -} diff --git a/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/Example.scala b/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/Example.scala deleted file mode 100644 index 88a9f04b87..0000000000 --- a/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/Example.scala +++ /dev/null @@ -1,68 +0,0 @@ -package net.liftweb.json.scalaz - -import scala.language.reflectiveCalls - -import scalaz._ -import scalaz.syntax.applicative._ -import scalaz.syntax.show._ -import JsonScalaz._ -import net.liftweb.json._ - -import org.specs2.mutable.Specification - -object Example extends Specification { - - case class Address(street: String, zipCode: String) - case class Person(name: String, age: Int, address: Address) - - "Parse address in an Applicative style" in { - val json = parse(""" {"street": "Manhattan 2", "zip": "00223" } """) - val a1 = field[String]("zip")(json) <*> (field[String]("street")(json) map Address.curried) - val a2 = (field[String]("street")(json) |@| field[String]("zip")(json)) { Address } - val a3 = Address.applyJSON(field[String]("street"), field[String]("zip"))(json) - a1 mustEqual Success(Address("Manhattan 2", "00223")) - a2 mustEqual a1 - a3 mustEqual a1 - } - - "Failed address parsing" in { - val json = parse(""" {"street": "Manhattan 2", "zip": "00223" } """) - val a = (field[String]("streets")(json) |@| field[String]("zip")(json)) { Address } - a mustEqual Failure(NonEmptyList(NoSuchFieldError("streets", json))) - } - - "Parse Person with Address" in { - implicit def addrJSON: JSONR[Address] = new JSONR[Address] { - def read(json: JValue) = Address.applyJSON(field[String]("street"), field[String]("zip"))(json) - } - - val p = parse(""" {"name":"joe","age":34,"address":{"street": "Manhattan 2", "zip": "00223" }} """) - val person = Person.applyJSON(field[String]("name"), field[Int]("age"), field[Address]("address"))(p) - person mustEqual Success(Person("joe", 34, Address("Manhattan 2", "00223"))) - } - - "Format Person with Address" in { - implicit def addrJSON: JSONW[Address] = new JSONW[Address] { - def write(a: Address) = - makeObj(("street" -> toJSON(a.street)) :: ("zip" -> toJSON(a.zipCode)) :: Nil) - } - - val p = Person("joe", 34, Address("Manhattan 2", "00223")) - val json = makeObj(("name" -> toJSON(p.name)) :: - ("age" -> toJSON(p.age)) :: - ("address" -> toJSON(p.address)) :: Nil) - json.shows mustEqual - """{"name":"joe","age":34,"address":{"street":"Manhattan 2","zip":"00223"}}""" - } - - "Parse Map" in { - val json = parse(""" {"street": "Manhattan 2", "zip": "00223" } """) - fromJSON[Map[String, String]](json) mustEqual Success(Map("street" -> "Manhattan 2", "zip" -> "00223")) - } - - "Format Map" in { - toJSON(Map("street" -> "Manhattan 2", "zip" -> "00223")).shows mustEqual - """{"street":"Manhattan 2","zip":"00223"}""" - } - -} diff --git a/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/LottoExample.scala b/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/LottoExample.scala deleted file mode 100644 index d7e15a9ebf..0000000000 --- a/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/LottoExample.scala +++ /dev/null @@ -1,49 +0,0 @@ -package net.liftweb.json.scalaz - -import scala.language.reflectiveCalls - -import scalaz._ -import scalaz.syntax.validation._ -import JsonScalaz._ -import net.liftweb.json._ - -import org.specs2.mutable.Specification - -object LottoExample extends Specification { - - case class Winner(winnerId: Long, numbers: List[Int]) - case class Lotto(id: Long, winningNumbers: List[Int], winners: List[Winner], drawDate: Option[String]) - - val json = parse("""{"id":5,"winning-numbers":[2,45,34,23,7,5],"winners":[{"winner-id":23,"numbers":[2,45,34,23,3,5]},{"winner-id":54,"numbers":[52,3,12,11,18,22]}]}""") - - // Lotto line must have exactly 6 numbers - def len(x: Int) = (xs: List[Int]) => - if (xs.length != x) Fail("len", xs.length + " != " + x) else xs.success - - implicit def winnerJSON: JSONR[Winner] = { - val numbersResult = (jValue: JValue) => (for { - numbers <- field[List[Int]]("numbers")(jValue).disjunction - _ <- len(6)(numbers).disjunction - } yield numbers).validation - Winner.applyJSON(field[Long]("winner-id"), numbersResult) - } - - implicit def lottoJSON: JSONR[Lotto] = { - val winningNumbersResult = (jValue: JValue) => (for { - winningNumbers <- field[List[Int]]("winning-numbers")(jValue).disjunction - _ <- len(6)(winningNumbers).disjunction - } yield winningNumbers).validation - Lotto.applyJSON(field[Long]("id") - , winningNumbersResult - , field[List[Winner]]("winners") - , field[Option[String]]("draw-date")) - } - - val winners = List(Winner(23, List(2, 45, 34, 23, 3, 5)), Winner(54, List(52, 3, 12, 11, 18, 22))) - val lotto = Lotto(5, List(2, 45, 34, 23, 7, 5), winners, None) - - "Parse Lotto" in { - fromJSON[Lotto](json) mustEqual Success(lotto) - } - -} diff --git a/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/TupleExample.scala b/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/TupleExample.scala deleted file mode 100644 index 4007f6ce99..0000000000 --- a/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/TupleExample.scala +++ /dev/null @@ -1,14 +0,0 @@ -package net.liftweb.json.scalaz - -import scalaz._ -import JsonScalaz._ -import net.liftweb.json._ - -import org.specs2.mutable.Specification - -object TupleExample extends Specification { - "Parse tuple from List" in { - val json = JsonParser.parse(""" [1,2,3] """) - fromJSON[Tuple3[Int, Int, Int]](json) mustEqual Success(1, 2, 3) - } -} diff --git a/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/ValidationExample.scala b/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/ValidationExample.scala deleted file mode 100644 index 492d759ded..0000000000 --- a/core/json-scalaz7/src/test/scala/net/liftweb/json/scalaz/ValidationExample.scala +++ /dev/null @@ -1,84 +0,0 @@ -package net.liftweb.json.scalaz - -import scala.language.reflectiveCalls - -import scalaz._ -import scalaz.std.list._ -import scalaz.syntax.traverse._ -import scalaz.syntax.validation._ -import JsonScalaz._ -import net.liftweb.json._ - -import org.specs2.mutable.Specification - -object ValidationExample extends Specification { - - case class Person(name: String, age: Int) - - "Validation" should { - def min(x: Int): Int => Result[Int] = (y: Int) => - if (y < x) Fail("min", y + " < " + x) else y.success - - def max(x: Int): Int => Result[Int] = (y: Int) => - if (y > x) Fail("max", y + " > " + x) else y.success - - val json = JsonParser.parse(""" {"name":"joe","age":17} """) - - "fail when age is less than min age" in { - // Age must be between 18 an 60 - val ageResult = (jValue: JValue) => (for { - age <- field[Int]("age")(jValue).disjunction - _ <- min(18)(age).disjunction - _ <- max(60)(age).disjunction - } yield age).validation - val person = Person.applyJSON(field[String]("name"), ageResult) - person(json) mustEqual Failure(NonEmptyList(UncategorizedError("min", "17 < 18", Nil))) - } - - "pass when age within limits" in { - // Age must be between 16 an 60 - val ageResult = (jValue: JValue) => (for { - age <- field[Int]("age")(jValue).disjunction - _ <- min(16)(age).disjunction - _ <- max(60)(age).disjunction - } yield age).validation - val person = Person.applyJSON(field[String]("name"), ageResult) - person(json) mustEqual Success(Person("joe", 17)) - } - } - - case class Range(start: Int, end: Int) - - // This example shows: - // * a validation where result depends on more than one value - // * parse a List with invalid values - - "Range filtering" should { - val json = JsonParser.parse(""" [{"s":10,"e":17},{"s":12,"e":13},{"s":11,"e":8}] """) - - def ascending: (Int, Int) => Result[(Int, Int)] = (x1: Int, x2: Int) => - if (x1 > x2) Fail("asc", x1 + " > " + x2) else (x1, x2).success - - // Valid range is a range having start <= end - implicit def rangeJSON: JSONR[Range] = new JSONR[Range] { - def read(json: JValue) = { - (for { - s <- field[Int]("s")(json).disjunction - e <- field[Int]("e")(json).disjunction - r <- ascending(s, e).disjunction - } yield Range.tupled(r)).validation - } - } - - "fail if lists contains invalid ranges" in { - val r = fromJSON[List[Range]](json) - r mustEqual Failure(NonEmptyList(UncategorizedError("asc", "11 > 8", Nil))) - } - - "optionally return only valid ranges" in { - val ranges = json.children.map(fromJSON[Range]).filter(_.isSuccess).sequence[({type λ[α]=ValidationNel[Error, α]})#λ, Range] - ranges mustEqual Success(List(Range(10, 17), Range(12, 13))) - } - } - -} diff --git a/core/json/.gitignore b/core/json/.gitignore deleted file mode 100644 index 93f62278b3..0000000000 --- a/core/json/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -project/boot -lib_managed -.ensime -!project/build \ No newline at end of file diff --git a/core/json/README.md b/core/json/README.md deleted file mode 100644 index 6311e29b20..0000000000 --- a/core/json/README.md +++ /dev/null @@ -1,779 +0,0 @@ -Parsing and formatting utilities for JSON. - -A central concept in lift-json library is Json AST which models the structure of -a JSON document as a syntax tree. - - sealed abstract class JValue - case object JNothing extends JValue // 'zero' for JValue - case object JNull extends JValue - case class JString(s: String) extends JValue - case class JDouble(num: Double) extends JValue - case class JInt(num: BigInt) extends JValue - case class JBool(value: Boolean) extends JValue - case class JObject(obj: List[JField]) extends JValue - case class JArray(arr: List[JValue]) extends JValue - - case class JField(String, JValue) - -All features are implemented in terms of above AST. Functions are used to transform -the AST itself, or to transform the AST between different formats. Common transformations -are summarized in a following picture. - -![Json AST](https://github.com/lift/lift/raw/master/framework/lift-base/lift-json/json.png) - -Summary of the features: - -* Fast JSON parser -* LINQ style queries -* Case classes can be used to extract values from parsed JSON -* Diff & merge -* DSL to produce valid JSON -* XPath like expressions and HOFs to manipulate JSON -* Pretty and compact printing -* XML conversions -* Serialization -* Low level pull parser API - -Installation -============ - -It comes with Lift, but non-Lift users can add lift-json as a dependency in following ways. -Note, replace XXX with correct Lift version. - -### SBT users - -Add dependency to your project description: - - val lift_json = "net.liftweb" %% "lift-json" % "XXX" - -### Maven users - -Add dependency to your pom: - - - net.liftweb - lift-json - XXX - - -### Others - -Download following jars: - -* https://oss.sonatype.org/content/groups/scala-tools/net/liftweb/lift-json_2.11/2.6.2/lift-json_2.11-2.6.2.jar -* http://mirrors.ibiblio.org/pub/mirrors/maven2/com/thoughtworks/paranamer/paranamer/2.1/paranamer-2.1.jar -* scalap (Only for Scala-2.9 compatible versions) - -Extras ------- - -* [lift-json-ext](https://github.com/lift/framework/tree/master/core/json-ext) - -Support for Box, Enum, Joda-Time, ... - -* [lift-json-scalaz](https://github.com/lift/framework/tree/master/core/json-scalaz) - -Applicative style parsing with Scalaz - -Migration from older versions -============================= - -2.6 -> -------------------------------------------------- - -JField is no longer a JValue. This means more type safety since it is no longer possible -to create invalid JSON where JFields are added directly into JArrays for instance. Most -noticeable consequence of this change is that map, transform, find and filter come in -two versions: - - def map(f: JValue => JValue): JValue - def mapField(f: JField => JField): JValue - def transform(f: PartialFunction[JValue, JValue]): JValue - def transformField(f: PartialFunction[JField, JField]): JValue - def find(p: JValue => Boolean): Option[JValue] - def findField(p: JField => Boolean): Option[JField] - ... - -Use *Field functions to traverse fields in the JSON, and use the functions without 'Field' -in the name to traverse values in the JSON. - -2.2 -> ------- - -Path expressions were changed after 2.2 version. Previous versions returned JField which -unnecessarily complicated the use of the expressions. If you have used path expressions -with pattern matching like: - - val JField("bar", JInt(x)) = json \ "foo" \ "bar" - -It is now required to change that to: - - val JInt(x) = json \ "foo" \ "bar" - -Parsing JSON -============ - -Any valid json can be parsed into internal AST format. - - scala> import net.liftweb.json._ - scala> parse(""" { "numbers" : [1, 2, 3, 4] } """) - res0: net.liftweb.json.JsonAST.JValue = - JObject(List(JField(numbers,JArray(List(JInt(1), JInt(2), JInt(3), JInt(4)))))) - -Producing JSON -============== - -DSL rules ---------- - -* Primitive types map to JSON primitives. -* Any seq produces JSON array. - - scala> val json = List(1, 2, 3) - - scala> compact(render(json)) - - res0: String = [1,2,3] - -* Tuple2[String, A] produces field. - - scala> val json = ("name" -> "joe") - - scala> compact(render(json)) - - res1: String = {"name":"joe"} - -* ~ operator produces object by combining fields. - - scala> val json = ("name" -> "joe") ~ ("age" -> 35) - - scala> compact(render(json)) - - res2: String = {"name":"joe","age":35} - -* Any value can be optional. Field and value is completely removed when it doesn't have a value. - - scala> val json = ("name" -> "joe") ~ ("age" -> Some(35)) - - scala> compact(render(json)) - - res3: String = {"name":"joe","age":35} - - scala> val json = ("name" -> "joe") ~ ("age" -> (None: Option[Int])) - - scala> compact(render(json)) - - res4: String = {"name":"joe"} - -Example -------- - - object JsonExample extends Application { - import net.liftweb.json._ - import net.liftweb.json.JsonDSL._ - - case class Winner(id: Long, numbers: List[Int]) - case class Lotto(id: Long, winningNumbers: List[Int], winners: List[Winner], drawDate: Option[java.util.Date]) - - val winners = List(Winner(23, List(2, 45, 34, 23, 3, 5)), Winner(54, List(52, 3, 12, 11, 18, 22))) - val lotto = Lotto(5, List(2, 45, 34, 23, 7, 5, 3), winners, None) - - val json = - ("lotto" -> - ("lotto-id" -> lotto.id) ~ - ("winning-numbers" -> lotto.winningNumbers) ~ - ("draw-date" -> lotto.drawDate.map(_.toString)) ~ - ("winners" -> - lotto.winners.map { w => - (("winner-id" -> w.id) ~ - ("numbers" -> w.numbers))})) - - println(compact(render(json))) - } - - scala> JsonExample - {"lotto":{"lotto-id":5,"winning-numbers":[2,45,34,23,7,5,3],"winners": - [{"winner-id":23,"numbers":[2,45,34,23,3,5]},{"winner-id":54,"numbers":[52,3,12,11,18,22]}]}} - -Example produces following pretty printed JSON. Notice that draw-date field is not rendered since its value is None: - - scala> pretty(render(JsonExample.json)) - - { - "lotto":{ - "lotto-id":5, - "winning-numbers":[2,45,34,23,7,5,3], - "winners":[{ - "winner-id":23, - "numbers":[2,45,34,23,3,5] - },{ - "winner-id":54, - "numbers":[52,3,12,11,18,22] - }] - } - } - -Merging & Diffing ------------------ - -Two JSONs can be merged and diffed with each other. -Please see more examples in [MergeExamples.scala](./src/test/scala/net/liftweb/json/MergeExamples.scala) and [DiffExamples.scala](src/test/scala/net/liftweb/json/DiffExamples.scala) - - scala> import net.liftweb.json._ - - scala> val lotto1 = parse("""{ - "lotto":{ - "lotto-id":5, - "winning-numbers":[2,45,34,23,7,5,3] - "winners":[{ - "winner-id":23, - "numbers":[2,45,34,23,3,5] - }] - } - }""") - - scala> val lotto2 = parse("""{ - "lotto":{ - "winners":[{ - "winner-id":54, - "numbers":[52,3,12,11,18,22] - }] - } - }""") - - scala> val mergedLotto = lotto1 merge lotto2 - scala> pretty(render(mergedLotto)) - res0: String = - { - "lotto":{ - "lotto-id":5, - "winning-numbers":[2,45,34,23,7,5,3], - "winners":[{ - "winner-id":23, - "numbers":[2,45,34,23,3,5] - },{ - "winner-id":54, - "numbers":[52,3,12,11,18,22] - }] - } - } - - scala> val Diff(changed, added, deleted) = mergedLotto diff lotto1 - changed: net.liftweb.json.JsonAST.JValue = JNothing - added: net.liftweb.json.JsonAST.JValue = JNothing - deleted: net.liftweb.json.JsonAST.JValue = JObject(List((lotto,JObject(List((winners, - JArray(List(JObject(List(JField(winner-id,JInt(54)), JField(numbers,JArray( - List(JInt(52), JInt(3), JInt(12), JInt(11), JInt(18), JInt(22)))))))))))))) - - -Querying JSON -============= - -"LINQ" style ------------- - -JSON values can be extracted using for-comprehensions. -Please see more examples in [JsonQueryExamples.scala](./src/test/scala/net/liftweb/json/JsonQueryExamples.scala) - - scala> import net.liftweb.json._ - scala> val json = parse(""" - { "name": "joe", - "children": [ - { - "name": "Mary", - "age": 5 - }, - { - "name": "Mazy", - "age": 3 - } - ] - } - """) - - scala> for { JObject(o) <- json; JField("age", JInt(age)) <- o } yield age - res0: List[BigInt] = List(5, 3) - - scala> for { - JObject(child) <- json - JField("name", JString(name)) <- child - JField("age", JInt(age)) <- child - if age > 4 - } yield (name, age) - res1: List[(String, BigInt)] = List((Mary,5)) - -XPath + HOFs ------------- - -Json AST can be queried using XPath like functions. Following REPL session shows the usage of -'\\', '\\\\', 'find', 'filter', 'transform', 'remove' and 'values' functions. - - The example json is: - - { - "person": { - "name": "Joe", - "age": 35, - "spouse": { - "person": { - "name": "Marilyn" - "age": 33 - } - } - } - } - - Translated to DSL syntax: - - scala> import net.liftweb.json._ - scala> import net.liftweb.json.JsonDSL._ - - scala> val json = - ("person" -> - ("name" -> "Joe") ~ - ("age" -> 35) ~ - ("spouse" -> - ("person" -> - ("name" -> "Marilyn") ~ - ("age" -> 33) - ) - ) - ) - - scala> json \\ "spouse" - res0: net.liftweb.json.JsonAST.JValue = JObject(List(JField(person,JObject(List((name,JString(Marilyn)), (age,JInt(33))))))) - - scala> compact(render(res0)) - res1: String = {"person":{"name":"Marilyn","age":33}} - - scala> compact(render(json \\ "name")) - res2: String = {"name":"Joe","name":"Marilyn"} - - scala> compact(render((json removeField { _ == JField("name", JString("Marilyn")) }) \\ "name")) - res3: String = {"name":"Joe"} - - scala> compact(render(json \ "person" \ "name")) - res4: String = "Joe" - - scala> compact(render(json \ "person" \ "spouse" \ "person" \ "name")) - res5: String = "Marilyn" - - scala> json findField { - case JField("name", _) => true - case _ => false - } - res6: Option[net.liftweb.json.JsonAST.JField] = Some(JField(name,JString(Joe))) - - scala> json filterField { - case JField("name", _) => true - case _ => false - } - res7: List[net.liftweb.json.JsonAST.JField] = List(JField(name,JString(Joe)), JField(name,JString(Marilyn))) - - scala> json transformField { - case ("name", JString(s)) => ("NAME", JString(s.toUpperCase)) - } - res8: net.liftweb.json.JsonAST.JValue = JObject(List(JField(person,JObject(List( - JField(NAME,JString(JOE)), JField(age,JInt(35)), JField(spouse,JObject(List( - JField(person,JObject(List(JField(NAME,JString(MARILYN)), JField(age,JInt(33))))))))))))) - - scala> json.values - res8: scala.collection.immutable.Map[String,Any] = Map(person -> Map(name -> Joe, age -> 35, spouse -> Map(person -> Map(name -> Marilyn, age -> 33)))) - -Indexed path expressions work too and values can be unboxed using type expressions. - - scala> val json = parse(""" - { "name": "joe", - "children": [ - { - "name": "Mary", - "age": 5 - }, - { - "name": "Mazy", - "age": 3 - } - ] - } - """) - - scala> (json \ "children")(0) - res0: net.liftweb.json.JsonAST.JValue = JObject(List(JField(name,JString(Mary)), JField(age,JInt(5)))) - - scala> (json \ "children")(1) \ "name" - res1: net.liftweb.json.JsonAST.JValue = JString(Mazy) - - scala> json \\ classOf[JInt] - res2: List[net.liftweb.json.JsonAST.JInt#Values] = List(5, 3) - - scala> json \ "children" \\ classOf[JString] - res3: List[net.liftweb.json.JsonAST.JString#Values] = List(Mary, Mazy) - - -Extracting values -================= - -Case classes can be used to extract values from parsed JSON. Non-existing values -can be extracted into scala.Option and strings can be automatically converted into -java.util.Dates. -Please see more examples in [ExtractionExamplesSpec.scala](./src/test/scala/net/liftweb/json/ExtractionExamplesSpec.scala) - - scala> import net.liftweb.json._ - scala> implicit val formats = DefaultFormats // Brings in default date formats etc. - scala> case class Child(name: String, age: Int, birthdate: Option[java.util.Date]) - scala> case class Address(street: String, city: String) - scala> case class Person(name: String, address: Address, children: List[Child]) - scala> val json = parse(""" - { "name": "joe", - "address": { - "street": "Bulevard", - "city": "Helsinki" - }, - "children": [ - { - "name": "Mary", - "age": 5 - "birthdate": "2004-09-04T18:06:22Z" - }, - { - "name": "Mazy", - "age": 3 - } - ] - } - """) - - scala> json.extract[Person] - res0: Person = Person(joe,Address(Bulevard,Helsinki),List(Child(Mary,5,Some(Sat Sep 04 18:06:22 EEST 2004)), Child(Mazy,3,None))) - -By default the constructor parameter names must match json field names. However, sometimes json -field names contain characters which are not allowed characters in Scala identifiers. There's two -solutions for this (see src/test/scala/net/liftweb/json/LottoExample.scala for bigger example). - -Use back ticks. - - scala> case class Person(`first-name`: String) - -Use transform function to postprocess AST. - - scala> case class Person(firstname: String) - scala> json transformField { - case JField("first-name", x) => ("firstname", x) - } - -Extraction function tries to find the best matching constructor when case class has auxiliary -constructors. For instance extracting from JSON {"price":350} into the following case class -will use the auxiliary constructor instead of the primary constructor. - - scala> case class Bike(make: String, price: Int) { - def this(price: Int) = this("Trek", price) - } - scala> parse(""" {"price":350} """).extract[Bike] - res0: Bike = Bike(Trek,350) - -Primitive values can be extracted from JSON primitives or fields. - - scala> (json \ "name").extract[String] - res0: String = "joe" - - scala> ((json \ "children")(0) \ "birthdate").extract[Date] - res1: java.util.Date = Sat Sep 04 21:06:22 EEST 2004 - -DateFormat can be changed by overriding 'DefaultFormats' (or by implmenting trait 'Formats'). - - scala> implicit val formats = new DefaultFormats { - override def dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") - } - -JSON object can be extracted to Map[String, _] too. Each field becomes a key value pair -in result Map. - - scala> val json = parse(""" - { - "name": "joe", - "addresses": { - "address1": { - "street": "Bulevard", - "city": "Helsinki" - }, - "address2": { - "street": "Soho", - "city": "London" - } - } - }""") - - scala> case class PersonWithAddresses(name: String, addresses: Map[String, Address]) - scala> json.extract[PersonWithAddresses] - res0: PersonWithAddresses("joe", Map("address1" -> Address("Bulevard", "Helsinki"), - "address2" -> Address("Soho", "London"))) - - -Serialization -============= - -Case classes can be serialized and deserialized. -Please see other examples in [SerializationExamples.scala](./src/test/scala/net/liftweb/json/SerializationExamples.scala) - - scala> import net.liftweb.json._ - scala> import net.liftweb.json.Serialization.{read, write} - scala> implicit val formats = Serialization.formats(NoTypeHints) - scala> val ser = write(Child("Mary", 5, None)) - scala> read[Child](ser) - res1: Child = Child(Mary,5,None) - -Serialization supports: - -* Arbitrarily deep case class graphs -* All primitive types, including BigInt and Symbol -* List, Seq, Array, Set and Map (note, keys of the Map must be strings: Map[String, _]) -* scala.Option -* java.util.Date -* Polymorphic Lists (see below) -* Tuples (see below) -* Recursive types -* Serialization of fields of a class (see below) -* Custom serializer functions for types which are not supported (see below) - -Serializing polymorphic Lists ------------------------------ - -Type hints are required when serializing polymorphic (or heterogeneous) Lists. Serialized JSON objects -will get an extra field named 'jsonClass' (the name can be changed by overriding 'typeHintFieldName' from Formats). - - scala> trait Animal - scala> case class Dog(name: String) extends Animal - scala> case class Fish(weight: Double) extends Animal - scala> case class Animals(animals: List[Animal]) - scala> implicit val formats = Serialization.formats(ShortTypeHints(List(classOf[Dog], classOf[Fish]))) - scala> val ser = write(Animals(Dog("pluto") :: Fish(1.2) :: Nil)) - ser: String = {"animals":[{"jsonClass":"Dog","name":"pluto"},{"jsonClass":"Fish","weight":1.2}]} - - scala> read[Animals](ser) - res0: Animals = Animals(List(Dog(pluto), Fish(1.2))) - -ShortTypeHints outputs short classname for all instances of configured objects. FullTypeHints outputs full -classname. Other strategies can be implemented by extending TypeHints trait. - -Serializing Tuples ------------------------------ - -If you need to have a heterogeneous collection of completely unrelated types, you might find tuples -useful for representing that data structure. By default, when you serialize a tuple you'll get an -object based on the constructor of the appropreate tuple class. So, for example, if you serialize -a `("bacon", 2)`, you'll get: - - {"_1": "bacon", "_2": 2} - -However, as of Lift 3.1, you can enable `tuplesAsArrays` to represent these tuples as heterogeneous -JSON arrays. To enable this feature you just need to provide a formats object that turns on the -feature. - - scala> import net.liftweb.json._ - scala> import Serialization._ - scala> implicit val formats = new DefaultFormats { override val tuplesAsArrays = true } - scala> write(("bacon", 2)) - res1: String = ["bacon",2] - -When this feature is enabled: - -* lift-json will write tuples as arrays. -* lift-json will correctly extract tuples from arrays -* lift-json will continue to deserialize tuples that were serialied in the old manner, as an object - -The major limitation to this feature is that it doesn't reliably support Scala primitives, so it is -currently disabled by default. If you're using this feature, you should use the Java boxed types -in your class signatures instead of the Scala primitive types. (So, `java.lang.Integer` instead -of `scala.Int`.) - -Serializing fields of a class ------------------------------ - -To enable serialization of fields, a FieldSerializer can be added for some type: - - implicit val formats = DefaultFormats + FieldSerializer[WildDog]() - -Now the type WildDog (and all subtypes) gets serialized with all its fields (+ constructor parameters). -FieldSerializer takes two optional parameters which can be used to intercept the field serialization: - - case class FieldSerializer[A: Manifest]( - serializer: PartialFunction[(String, Any), Option[(String, Any)]] = Map(), - deserializer: PartialFunction[JField, JField] = Map() - ) - -Those PartialFunctions are called just before a field is serialized or deserialized. Some useful PFs to -rename and ignore fields are provided: - - val dogSerializer = FieldSerializer[WildDog]( - renameTo("name", "animalname") orElse ignore("owner"), - renameFrom("animalname", "name")) - - implicit val formats = DefaultFormats + dogSerializer - -Serializing non-supported types -------------------------------- - -It is possible to plug in custom serializer + deserializer functions for any type. -Now, if we have a non case class Interval (thus, not supported by default), we can still serialize it -by providing following serializer. - - scala> class Interval(start: Long, end: Long) { - val startTime = start - val endTime = end - } - - scala> class IntervalSerializer extends CustomSerializer[Interval](format => ( - { - case JObject(JField("start", JInt(s)) :: JField("end", JInt(e)) :: Nil) => - new Interval(s.longValue, e.longValue) - }, - { - case x: Interval => - JObject(JField("start", JInt(BigInt(x.startTime))) :: - JField("end", JInt(BigInt(x.endTime))) :: Nil) - } - )) - - scala> implicit val formats = Serialization.formats(NoTypeHints) + new IntervalSerializer - -Custom serializer is created by providing two partial functions. The first evaluates to a value -if it can unpack the data from JSON. The second creates the desired JSON if the type matches. - -Extensions ----------- - -Module lift-json-ext contains extensions to extraction and serialization. Following types are supported. - - // Lift's box - implicit val formats = net.liftweb.json.DefaultFormats + new JsonBoxSerializer - - // Scala enums - implicit val formats = net.liftweb.json.DefaultFormats + new EnumSerializer(MyEnum) - // or - implicit val formats = net.liftweb.json.DefaultFormats + new EnumNameSerializer(MyEnum) - - // Joda Time - implicit val formats = net.liftweb.json.DefaultFormats ++ JodaTimeSerializers.all - -XML support -=========== - -JSON structure can be converted to XML node and vice versa. -Please see more examples in [XmlExamples.scala](./src/test/scala/net/liftweb/json/XmlExamples.scala) - - scala> import net.liftweb.json.Xml.{toJson, toXml} - scala> val xml = - - - 1 - Harry - - - 2 - David - - - - scala> val json = toJson(xml) - scala> pretty(render(json)) - res3: { - "users":{ - "user":[{ - "id":"1", - "name":"Harry" - },{ - "id":"2", - "name":"David" - }] - } - } - -Now, the above example has two problems. First, the id is converted to String while we might want it as an Int. This -is easy to fix by mapping JString(s) to JInt(s.toInt). The second problem is more subtle. The conversion function -decides to use JSON array because there's more than one user-element in XML. Therefore a structurally equivalent -XML document which happens to have just one user-element will generate a JSON document without JSON array. This -is rarely a desired outcome. These both problems can be fixed by following transformation function. - - scala> json transformField { - case JField("id", JString(s)) => ("id", JInt(s.toInt)) - case JField("user", x: JObject) => ("user", JArray(x :: Nil)) - } - -Other direction is supported too. Converting JSON to XML: - - scala> toXml(json) - res5: scala.xml.NodeSeq = 1Harry2David - -Low level pull parser API -========================= - -Pull parser API is provided for cases requiring extreme performance. It improves parsing -performance by two ways. First, no intermediate AST is generated. Second, you can stop -parsing at any time, skipping rest of the stream. Note, this parsing style is recommended -only as an optimization. Above mentioned functional APIs are easier to use. - -Consider following example which shows how to parse one field value from a big JSON. - - scala> val json = """ - { - ... - "firstName": "John", - "lastName": "Smith", - "address": { - "streetAddress": "21 2nd Street", - "city": "New York", - "state": "NY", - "postalCode": 10021 - }, - "phoneNumbers": [ - { "type": "home", "number": "212 555-1234" }, - { "type": "fax", "number": "646 555-4567" } - ], - ... - }""" - - scala> val parser = (p: Parser) => { - def parse: BigInt = p.nextToken match { - case FieldStart("postalCode") => p.nextToken match { - case IntVal(code) => code - case _ => p.fail("expected int") - } - case End => p.fail("no field named 'postalCode'") - case _ => parse - } - - parse - } - - scala> val postalCode = parse(json, parser) - postalCode: BigInt = 10021 - -Pull parser is a function `Parser => A`, in this example it is concretely `Parser => BigInt`. -Constructed parser recursively reads tokens until it finds `FieldStart("postalCode")` -token. After that the next token must be `IntVal`, otherwise parsing fails. It returns parsed -integer and stops parsing immediately. - -FAQ -=== - -Q1: I have a JSON object and I want to extract it to a case class: - - scala> case class Person(name: String, age: Int) - scala> val json = """{"name":"joe","age":15}""" - -But extraction fails: - - scala> parse(json).extract[Person] - net.liftweb.json.MappingException: Parsed JSON values do not match with class constructor - -A1: - -Extraction does not work for classes defined in REPL. Compile the case class definitions -with scalac and import those to REPL. - -Kudos -===== - -* The original idea for DSL syntax was taken from Lift mailing list ([by Marius](http://markmail.org/message/lniven2hn22vhupu)). - -* The idea for AST and rendering was taken from [Real World Haskell book](http://book.realworldhaskell.org/read/writing-a-library-working-with-json-data.html). diff --git a/core/json/benchmark/Benchmark.scala b/core/json/benchmark/Benchmark.scala deleted file mode 100644 index 92c72c1768..0000000000 --- a/core/json/benchmark/Benchmark.scala +++ /dev/null @@ -1,25 +0,0 @@ -trait Benchmark { - def run(name: String, warmup: Int, count: Int)(f: => Any) = { - print("warmup... ") - repeat(warmup)(f) - System.gc - println("done") - val t = time { - repeat(count)(f) - } - println(name + "\t" + t + "ms") - } - - def repeat(count: Int)(f: => Any) = { - var i = 0; while (i < count) { - f - i += 1 - } - } - - def time(f: => Any): Long = { - val start = System.currentTimeMillis - f - System.currentTimeMillis - start - } -} diff --git a/core/json/benchmark/Jsonbench.scala b/core/json/benchmark/Jsonbench.scala deleted file mode 100644 index 4ec146b122..0000000000 --- a/core/json/benchmark/Jsonbench.scala +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Following libs are required to compile and run the benchmark: - * - jackson-core-asl-1.4.1.jar - * - jackson-mapper-asl-1.4.1.jar - * - lift-json-???.jar - */ -object Jsonbench extends Benchmark { - import scala.util.parsing.json.JSON - import org.codehaus.jackson._ - import org.codehaus.jackson.map._ - import net.liftweb.json.JsonParser - - def main(args: Array[String]) = { - benchmark("Scala std") { JSON.parse(json) } - val mapper = new ObjectMapper - benchmark("Jackson") { mapper.readValue(json, classOf[JsonNode]) } - benchmark("lift-json") { JsonParser.parse(json) } - } - - def benchmark(name: String)(f: => Any) = run(name, 50000, 50000)(f) - - val json = """ -{ - "glossary": { - "title": "example glossary", - "GlossDiv": { - "title": "S", - "GlossList": { - "GlossEntry": { - "ID": "SGML", - "SortAs": "SGML", - "GlossTerm": "Standard Generalized Markup Language", - "Acronym": "SGML", - "Abbrev": "ISO 8879:1986", - "GlossDef": { - "para": "A meta-markup language, used to create markup languages such as DocBook.", - "GlossSeeAlso": ["GML", "XML"] - }, - "GlossSee": "markup" - } - } - } - } -} -""" -} diff --git a/core/json/benchmark/README.md b/core/json/benchmark/README.md deleted file mode 100644 index c1bed3755a..0000000000 --- a/core/json/benchmark/README.md +++ /dev/null @@ -1,62 +0,0 @@ -Benchmarking standard Scala Json parser, Jackson parser and lift-json parser ----------------------------------------------------------------------------- - -Benchmark measures how long it takes to parse 50 000 times the first JSON document -from http://www.json.org/example.html. - -Facts: - -* Ubuntu 8.10 -* Lenovo T60p -* Scala 2.7.4 -* java version "1.6.0_10" - Java(TM) SE Runtime Environment (build 1.6.0_10-b33) - Java HotSpot(TM) Server VM (build 11.0-b15, mixed mode) -* Exec: scala Jsonbench - -Parsing 50 000 json documents: - - Scala std 167127 ms - Jackson 370 ms - lift-json 465 ms - -Summary: - -* Jackson was fastest. -* lift-json was about 350 times faster than standard Scala parser. - -Serialization benchmark, Java serialization and lift-json ---------------------------------------------------------- - -See Serbench.scala - -Facts: - -* Ubuntu 8.10 -* Lenovo T60p -* Scala 2.7.4 -* java version "1.6.0_10" - Java(TM) SE Runtime Environment (build 1.6.0_10-b33) - Java HotSpot(TM) Server VM (build 11.0-b15, mixed mode) -* Exec: scala Serbench - -Serializing 20 000 instances (No type hints): - - Java serialization (full) 1889 ms - lift-json (full) 1542 ms - Java serialization (ser) 373 ms - lift-json (ser) 833 ms - Java serialization (deser) 1396 ms - lift-json (deser) 615 ms - -Serializing 20 000 instances (Using type hints, both short and full gives similar results): - - Java serialization (full) 1912 ms - lift-json (full) 2268 ms - -Summary: - -* Total time about same (serialization + deserialization). -* Java serializes faster. -* lift-json deserializes faster. -* Using type hints comes with a performance penalty. diff --git a/core/json/benchmark/Serbench.scala b/core/json/benchmark/Serbench.scala deleted file mode 100644 index b1d0632dec..0000000000 --- a/core/json/benchmark/Serbench.scala +++ /dev/null @@ -1,87 +0,0 @@ -import net.liftweb.json._ -import net.liftweb.json.Serialization.{read, write} -import net.liftweb.json.JsonDSL._ - -import java.io._ -import java.util.Date - -object Serbench extends Benchmark { - val classes = List(classOf[Project], classOf[Team], classOf[Employee], classOf[Language]) - val project = Project("test", - new Date, - Some(Language("Scala", 2.75)), - List( - Team("QA", List(Employee("John Doe", 5), Employee("Mike", 3))), - Team("Impl", List(Employee("Mark", 4), Employee("Mary", 5), Employee("Nick Noob", 1))))) - - val jvalueProject = { - ("name" -> "test") ~ - ("startDate" -> new Date().getTime) ~ - ("lang" -> - (("name" -> "Scala") ~ - ("version" -> 2.75))) ~ - ("teams" -> List( - ("role" -> "QA") ~ - ("members" -> List(("name" -> "John Doe") ~ ("experience" -> 5), - ("name" -> "Mike") ~ ("experience" -> 3))), - ("role" -> "Impl") ~ - ("members" -> List(("name" -> "Mark") ~ ("experience" -> 4), - ("name" -> "Mary") ~ ("experience" -> 5), - ("name" -> "Nick Noob") ~ ("experience" -> 1))) - )) - - } - - lazy val bigJValue = { - def appendN(json: JObject, count: Int): JObject = { - if (count == 0) json else json ~ appendN(json, count - 1) - } - - appendN(jvalueProject, 100) - } - - def main(args: Array[String]) = { - println("** No type hints") - new Bench()(Serialization.formats(NoTypeHints)) - println("** Short type hints") - new Bench()(Serialization.formats(ShortTypeHints(classes))) - println("** Full type hints") - new Bench()(Serialization.formats(FullTypeHints(classes))) - println("** JValue Serialization") - new JValueBench() - } - - class Bench(implicit formats: Formats) { - benchmark("Java serialization (full)") { deserialize(serialize(project)) } - benchmark("lift-json (full)") { read[Project](write(project)) } - benchmark("Java serialization (ser)") { serialize(project) } - benchmark("lift-json (ser)") { write(project) } - val ser1 = serialize(project) - val ser2 = write(project) - benchmark("Java serialization (deser)") { deserialize(ser1) } - benchmark("lift-json (deser)") { read[Project](ser2) } - } - - class JValueBench { - benchmark("lift-json (ser compact(render(jvalue))") { compact(render(bigJValue)) } - benchmark("lift-json (ser compactRender(jvalue)") { compactRender(bigJValue) } - } - - def benchmark(name: String)(f: => Any) = run(name, 20000, 20000)(f) - - def deserialize(array: Array[Byte]) = - new ObjectInputStream(new ByteArrayInputStream(array)).readObject.asInstanceOf[Project] - - def serialize(project: Project) = { - val baos = new ByteArrayOutputStream() - val oos = new ObjectOutputStream(baos) - oos.writeObject(project) - baos.toByteArray - } - - case class Project(name: String, startDate: Date, lang: Option[Language], teams: List[Team]) extends Serializable - case class Language(name: String, version: Double) extends Serializable - case class Team(role: String, members: List[Employee]) extends Serializable - case class Employee(name: String, experience: Int) extends Serializable - -} diff --git a/core/json/json.dot b/core/json/json.dot deleted file mode 100644 index 7bd09a1cee..0000000000 --- a/core/json/json.dot +++ /dev/null @@ -1,25 +0,0 @@ -digraph json { - s -> ast [label="parse"] - dsl -> ast [label="'implicit'"] - ast -> ser [label="serialize"] - ser -> ast [label="deserialize"] - - ast -> ast [label="map, \\, merge, diff, remove, ..."] - - ast -> class [label="extract"] - class -> ast [label="decompose"] - - ast -> xml [label="toXml"] - xml -> ast [label="toJson"] - - ast -> doc [label="render"] - doc -> String [label="compact, pretty"] - - ast [label="Json AST"] - dsl [label="Json DSL"] - ser [label="Serialized\nstring"] - doc [label="<>\nDocument"] - s [label="String"] - class [label="Case class"] - xml [label="XML"] -} diff --git a/core/json/json.png b/core/json/json.png deleted file mode 100644 index 0dc928b0d8..0000000000 Binary files a/core/json/json.png and /dev/null differ diff --git a/core/json/src/main/scala/net/liftweb/json/Diff.scala b/core/json/src/main/scala/net/liftweb/json/Diff.scala deleted file mode 100644 index 91ebabcf88..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/Diff.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -/** A difference between two JSONs (j1 diff j2). - * @param changed what has changed from j1 to j2 - * @param added what has been added to j2 - * @param deleted what has been deleted from j1 - */ -case class Diff(changed: JValue, added: JValue, deleted: JValue) { - def map(f: JValue => JValue): Diff = { - def applyTo(x: JValue) = x match { - case JNothing => JNothing - case _ => f(x) - } - Diff(applyTo(changed), applyTo(added), applyTo(deleted)) - } - - private[json] def toField(name: String): Diff = { - def applyTo(x: JValue) = x match { - case JNothing => JNothing - case _ => JObject(JField(name, x)) - } - Diff(applyTo(changed), applyTo(added), applyTo(deleted)) - } -} - -/** Computes a diff between two JSONs. - */ -object Diff { - /** Return a diff. - *

- * Example:

-   * val Diff(c, a, d) = ("name", "joe") ~ ("age", 10) diff ("fname", "joe") ~ ("age", 11)
-   * c = JObject(("age",JInt(11)) :: Nil)
-   * a = JObject(("fname",JString("joe")) :: Nil)
-   * d = JObject(("name",JString("joe")) :: Nil)
-   * 
- */ - def diff(val1: JValue, val2: JValue): Diff = (val1, val2) match { - case (x, y) if x == y => Diff(JNothing, JNothing, JNothing) - case (JObject(xs), JObject(ys)) => diffFields(xs, ys) - case (JArray(xs), JArray(ys)) => diffVals(xs, ys) - case (JInt(x), JInt(y)) if (x != y) => Diff(JInt(y), JNothing, JNothing) - case (JDouble(x), JDouble(y)) if (x != y) => Diff(JDouble(y), JNothing, JNothing) - case (JString(x), JString(y)) if (x != y) => Diff(JString(y), JNothing, JNothing) - case (JBool(x), JBool(y)) if (x != y) => Diff(JBool(y), JNothing, JNothing) - case (x, y) => Diff(JNothing, y, x) - } - - private def diffFields(vs1: List[JField], vs2: List[JField]) = { - def diffRec(xleft: List[JField], yleft: List[JField]): Diff = xleft match { - case Nil => Diff(JNothing, if (yleft.isEmpty) JNothing else JObject(yleft), JNothing) - case x :: xs => yleft find (_.name == x.name) match { - case Some(y) => - val Diff(c1, a1, d1) = diff(x.value, y.value).toField(y.name) - val Diff(c2, a2, d2) = diffRec(xs, yleft filterNot (_ == y)) - Diff(c1 merge c2, a1 merge a2, d1 merge d2) - case None => - val Diff(c, a, d) = diffRec(xs, yleft) - Diff(c, a, JObject(x :: Nil) merge d) - } - } - - diffRec(vs1, vs2) - } - - private def diffVals(vs1: List[JValue], vs2: List[JValue]) = { - def diffRec(xleft: List[JValue], yleft: List[JValue]): Diff = (xleft, yleft) match { - case (xs, Nil) => Diff(JNothing, JNothing, if (xs.isEmpty) JNothing else JArray(xs)) - case (Nil, ys) => Diff(JNothing, if (ys.isEmpty) JNothing else JArray(ys), JNothing) - case (x :: xs, y :: ys) => - val Diff(c1, a1, d1) = diff(x, y) - val Diff(c2, a2, d2) = diffRec(xs, ys) - Diff(c1 ++ c2, a1 ++ a2, d1 ++ d2) - } - - diffRec(vs1, vs2) - } - - private[json] trait Diffable { this: JValue => - /** Return a diff. - * @see net.liftweb.json.Diff#diff - */ - def diff(other: JValue) = Diff.diff(this, other) - } -} diff --git a/core/json/src/main/scala/net/liftweb/json/Extraction.scala b/core/json/src/main/scala/net/liftweb/json/Extraction.scala deleted file mode 100644 index e429a87619..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/Extraction.scala +++ /dev/null @@ -1,493 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import java.lang.reflect.{Constructor => JConstructor, Type, InvocationTargetException} -import java.lang.{Integer => JavaInteger, Long => JavaLong, Short => JavaShort, Byte => JavaByte, Boolean => JavaBoolean, Double => JavaDouble, Float => JavaFloat} -import java.util.Date -import java.sql.Timestamp -import scala.reflect.Manifest - -/** Function to extract values from JSON AST using case classes. - * - * See: ExtractionExamples.scala - */ -object Extraction { - import Meta._ - import Meta.Reflection._ - - /** Extract a case class from JSON. - * @see net.liftweb.json.JsonAST.JValue#extract - * @throws MappingException is thrown if extraction fails - */ - def extract[A](json: JValue)(implicit formats: Formats, mf: Manifest[A]): A = { - def allTypes(mf: Manifest[_]): List[Class[_]] = mf.runtimeClass :: (mf.typeArguments flatMap allTypes) - - try { - val types = allTypes(mf) - extract0(json, types.head, types.tail).asInstanceOf[A] - } catch { - case e: MappingException => throw e - case e: Exception => throw new MappingException("unknown error", e) - } - } - - /** Extract a case class from JSON. - * @see net.liftweb.json.JsonAST.JValue#extract - */ - def extractOpt[A](json: JValue)(implicit formats: Formats, mf: Manifest[A]): Option[A] = - try { Some(extract(json)(formats, mf)) } catch { case _: MappingException => None } - - /** Decompose a case class into JSON. - *

- * Example:

-   * case class Person(name: String, age: Int)
-   * implicit val formats = net.liftweb.json.DefaultFormats
-   * Extraction.decompose(Person("joe", 25)) == JObject(JField("age",JInt(25)) :: JField("name",JString("joe")) :: Nil)
-   * 
- */ - def decompose(a: Any)(implicit formats: Formats): JValue = { - def prependTypeHint(clazz: Class[_], o: JObject) = - JObject(JField(formats.typeHintFieldName, JString(formats.typeHints.hintFor(clazz))) :: o.obj) - - def mkObject(clazz: Class[_], fields: List[JField]) = formats.typeHints.containsHint_?(clazz) match { - case true => prependTypeHint(clazz, JObject(fields)) - case false => JObject(fields) - } - - val serializer = formats.typeHints.serialize - val any = a.asInstanceOf[AnyRef] - if (formats.customSerializer(formats).isDefinedAt(a)) { - formats.customSerializer(formats)(a) - } else if (!serializer.isDefinedAt(a)) { - any match { - case null => JNull - case x: JValue => x - case x if primitive_?(x.getClass) => primitive2jvalue(x)(formats) - case x: Map[_, _] => JObject((x map { case (k: String, v) => JField(k, decompose(v)) }).toList) - case x: Iterable[_] => JArray(x.toList map decompose) - case x if (x.getClass.isArray) => JArray(x.asInstanceOf[Array[_]].toList map decompose) - case x: Option[_] => x.flatMap[JValue] { y => Some(decompose(y)) }.getOrElse(JNothing) - case x: Product if formats.tuplesAsArrays && tuple_?(x.getClass) => - JArray(x.productIterator.toList.map(decompose)) - case x => - val fields = getDeclaredFields(x.getClass) - val constructorArgs = primaryConstructorArgs(x.getClass).map{ case (name, _) => (name,fields.get(name)) } - constructorArgs.collect { case (name, Some(f)) => - f.setAccessible(true) - JField(unmangleName(name), decompose(f get x)) - } match { - case args => - val fields = formats.fieldSerializer(x.getClass).map { serializer => - Reflection.fields(x.getClass).map { - case (mangledName, _) => - val n = Meta.unmangleName(mangledName) - val fieldVal = Reflection.getField(x, mangledName) - val s = serializer.serializer orElse Map((n, fieldVal) -> Some(n, fieldVal)) - s((n, fieldVal)).map { case (name, value) => JField(name, decompose(value)) } - .getOrElse(JField(n, JNothing)) - } - } getOrElse Nil - val uniqueFields = fields filterNot (f => args.find(_.name == f.name).isDefined) - mkObject(x.getClass, uniqueFields ++ args) - } - } - } else prependTypeHint(any.getClass, serializer(any)) - } - - /** Flattens the JSON to a key/value map. - */ - def flatten(json: JValue): Map[String, String] = { - def escapePath(str: String) = str - - def flatten0(path: String, json: JValue): Map[String, String] = { - json match { - case JNothing | JNull => Map() - case JString(s) => Map(path -> ("\"" + JsonAST.quote(s) + "\"")) - case JDouble(num) => Map(path -> num.toString) - case JInt(num) => Map(path -> num.toString) - case JBool(value) => Map(path -> value.toString) - case JObject(obj) => obj.foldLeft(Map[String, String]()) { case (map, JField(name, value)) => - map ++ flatten0(path + "." + escapePath(name), value) - } - case JArray(arr) => arr.length match { - case 0 => Map(path -> "[]") - case _ => arr.foldLeft((Map[String, String](), 0)) { - (tuple, value) => (tuple._1 ++ flatten0(path + "[" + tuple._2 + "]", value), tuple._2 + 1) - }._1 - } - } - } - - flatten0("", json) - } - - /** Unflattens a key/value map to a JSON object. - */ - def unflatten(map: Map[String, String]): JValue = { - import scala.util.matching.Regex - - def extractValue(value: String): JValue = value.toLowerCase match { - case "" => JNothing - case "null" => JNull - case "true" => JBool(true) - case "false" => JBool(false) - case "[]" => JArray(Nil) - case x @ _ => - if (value.charAt(0).isDigit) { - if (value.indexOf('.') == -1) JInt(BigInt(value)) - else JDouble(JsonParser.parseDouble(value)) - } - else JString(JsonParser.unquote(value.substring(1))) - } - - def submap(prefix: String): Map[String, String] = - Map( - map.filter(t => t._1 == prefix || t._1.startsWith(prefix + ".") || t._1.startsWith(prefix + "[")).map( - t => (t._1.substring(prefix.length), t._2) - ).toList.toArray: _* - ) - - val ArrayProp = new Regex("""^(\.([^\.\[]+))\[(\d+)\].*$""") - val ArrayElem = new Regex("""^(\[(\d+)\]).*$""") - val OtherProp = new Regex("""^(\.([^\.\[]+)).*$""") - - val uniquePaths = map.keys.foldLeft[Set[String]](Set()) { - (set, key) => - key match { - case ArrayProp(p, f, i) => set + p - case OtherProp(p, f) => set + p - case ArrayElem(p, i) => set + p - case x @ _ => set + x - } - }.toList.sortWith(_ < _) // Sort is necessary to get array order right - - uniquePaths.foldLeft[JValue](JNothing) { (jvalue, key) => - jvalue.merge(key match { - case ArrayProp(p, f, i) => JObject(List(JField(f, unflatten(submap(key))))) - case ArrayElem(p, i) => JArray(List(unflatten(submap(key)))) - case OtherProp(p, f) => JObject(List(JField(f, unflatten(submap(key))))) - case "" => extractValue(map(key)) - }) - } - } - - private[this] def mkMapping(clazz: Class[_], typeArgs: Seq[Class[_]])(implicit formats: Formats): Meta.Mapping = { - if (clazz == classOf[Option[_]] || clazz == classOf[List[_]] || clazz == classOf[Set[_]] || clazz.isArray) { - Col(TypeInfo(clazz, None), mkMapping(typeArgs.head, typeArgs.tail)) - } else if (clazz == classOf[Map[_, _]]) { - Dict(mkMapping(typeArgs.tail.head, typeArgs.tail.tail)) - } else if (formats.tuplesAsArrays && tuple_?(clazz)) { - val childMappings = typeArgs.map(c => mkMapping(c, Nil)).toList - HCol(TypeInfo(clazz, None), childMappings) - } else { - mappingOf(clazz, typeArgs) - } - } - - private def extract0(json: JValue, clazz: Class[_], typeArgs: Seq[Class[_]]) - (implicit formats: Formats): Any = { - val mapping = mkMapping(clazz, typeArgs) - extract0(json, mapping) - } - - def extract(json: JValue, target: TypeInfo)(implicit formats: Formats): Any = - extract0(json, mappingOf(target.clazz)) - - private def extract0(json: JValue, mapping: Mapping)(implicit formats: Formats): Any = { - def newInstance(constructor: Constructor, json: JValue) = { - def findBestConstructor = { - if (constructor.choices.size == 1) { - constructor.choices.head // optimized common case - } else { - val argNames = json match { - case JObject(fs) => fs.map(_.name) - case x => Nil - } - constructor.bestMatching(argNames) - .getOrElse(fail("No constructor for type " + constructor.targetType.clazz + ", " + json)) - } - } - - def setFields(a: AnyRef, json: JValue, constructor: JConstructor[_]) = json match { - case o: JObject => - formats.fieldSerializer(a.getClass).map { serializer => - val constructorArgNames = - Reflection.constructorArgs(a.getClass, constructor, formats.parameterNameReader, None).map(_._1).toSet - val jsonFields = o.obj.map { f => - val JField(n, v) = (serializer.deserializer orElse Map(f -> f))(f) - (n, (n, v)) - }.toMap - - val fieldsToSet = - Reflection.fields(a.getClass).filterNot(f => constructorArgNames.contains(f._1)) - - fieldsToSet.foreach { case (name, typeInfo) => - jsonFields.get(name).foreach { case (n, v) => - val typeArgs = typeInfo.parameterizedType - .map(_.getActualTypeArguments.map(_.asInstanceOf[Class[_]]).toList.zipWithIndex - .map { case (t, idx) => - if (t == classOf[java.lang.Object]) ScalaSigReader.readField(name, a.getClass, idx) - else t - }) - val value = extract0(v, typeInfo.clazz, typeArgs.getOrElse(Nil)) - Reflection.setField(a, n, value) - } - } - } - a - case _ => a - } - - def instantiate = { - val c = findBestConstructor - val jconstructor = c.constructor - val args = c.args.map(a => build(json \ a.path, a)) - try { - if (jconstructor.getDeclaringClass == classOf[java.lang.Object]) - fail("No information known about type") - - val instance = jconstructor.newInstance(args.map(_.asInstanceOf[AnyRef]).toArray: _*) - setFields(instance.asInstanceOf[AnyRef], json, jconstructor) - } catch { - case exception: Exception => - exception match { - case matchedException @ (_:IllegalArgumentException | _:InstantiationException) => - fail("Parsed JSON values do not match with class constructor\nargs=" + - args.mkString(",") + "\narg types=" + args.map(a => if (a != null) - a.asInstanceOf[AnyRef].getClass.getName else "null").mkString(",") + - "\nconstructor=" + jconstructor, matchedException) - - case exceptionThrownInConstructor: InvocationTargetException => - fail("An exception was thrown in the class constructor during extraction", exceptionThrownInConstructor) - - case unmatchedException => - throw unmatchedException - } - } - } - - def mkWithTypeHint(typeHint: String, fields: List[JField], typeInfo: TypeInfo) = { - val obj = JObject(fields filterNot (_.name == formats.typeHintFieldName)) - val deserializer = formats.typeHints.deserialize - if (!deserializer.isDefinedAt(typeHint, obj)) { - val concreteClass = formats.typeHints.classFor(typeHint) getOrElse fail("Do not know how to deserialize '" + typeHint + "'") - val typeArgs = typeInfo.parameterizedType - .map(_.getActualTypeArguments.toList.map(Meta.rawClassOf)).getOrElse(Nil) - build(obj, mappingOf(concreteClass, typeArgs)) - } else deserializer(typeHint, obj) - } - - val custom = formats.customDeserializer(formats) - if (custom.isDefinedAt(constructor.targetType, json)) { - custom(constructor.targetType, json) - } else { - json match { - case JNull => - null - - case JObject(TypeHint(t, fs)) => - mkWithTypeHint(t, fs, constructor.targetType) - - case _ => - instantiate - } - } - } - - object TypeHint { - def unapply(fs: List[JField]): Option[(String, List[JField])] = - if (formats.typeHints == NoTypeHints) None - else { - val grouped = fs groupBy (_.name == formats.typeHintFieldName) - if (grouped.isDefinedAt(true)) - Some((grouped(true).head.value.values.toString, grouped.get(false).getOrElse(Nil))) - else None - } - } - - def newPrimitive(elementType: Class[_], elem: JValue) = convert(elem, elementType, formats) - - def newCollection(root: JValue, m: Mapping, constructor: Array[_] => Any) = { - val array: Array[_] = root match { - case JArray(arr) => arr.map(build(_, m)).toArray - case JNothing | JNull => Array[AnyRef]() - case x => fail("Expected collection but got " + x + " for root " + root + " and mapping " + m) - } - - constructor(array) - } - - def newOption(root: JValue, m: Mapping) = { - root match { - case JNothing | JNull => None - case x => Option(build(x, m)) - } - } - - def newTuple(root: JValue, mappings: List[Mapping]): Any = { - root match { - case JArray(items) if items.nonEmpty && items.length <= tuples.length => - val builtItems: Seq[Object] = items.zip(mappings).map({ - case (item, mapping) => - build(item, mapping).asInstanceOf[Object] - }) - val tupleIndex = items.length - 1 - - val typedTupleConstructor = tupleConstructors.get(tupleIndex).getOrElse { - throw new IllegalArgumentException(s"Cannot instantiate a tuple of length ${items.length} even though that should be a valid tuple length.") - } - typedTupleConstructor.newInstance(builtItems: _*) - - case JArray(items) => - throw new IllegalArgumentException("Cannot create a tuple of length " + items.length) - - case JObject(items) if items.forall(_.name.startsWith("_")) => - val sortedItems = items.sortWith { (i1, i2) => - val numerialName1 = i1.name.drop(1).toInt - val numerialName2 = i2.name.drop(1).toInt - - numerialName1 < numerialName2 - } - newTuple(JArray(sortedItems.map(_.value)), mappings) - - case x => - throw new IllegalArgumentException("Got unexpected while attempting to create tuples: " + x) - } - } - - def build(root: JValue, mapping: Mapping): Any = mapping match { - case Value(targetType) => - convert(root, targetType, formats) - - case c: Constructor => - newInstance(c, root) - - case Cycle(targetType) => - build(root, mappingOf(targetType)) - - case Arg(path, m, optional) => - mkValue(root, m, path, optional) - - case HCol(targetType, mappings) if formats.tuplesAsArrays => - val c = targetType.clazz - if (tuples.find(_.isAssignableFrom(c)).isDefined) { - newTuple(root, mappings) - } else { - fail("Expected tuple but found " + mappings) - } - - case Col(targetType, m) => - val custom = formats.customDeserializer(formats) - val c = targetType.clazz - - if (custom.isDefinedAt(targetType, root)) custom(targetType, root) - else if (c == classOf[List[_]]) newCollection(root, m, a => List(a: _*)) - else if (c == classOf[Set[_]]) newCollection(root, m, a => Set(a: _*)) - else if (c.isArray) newCollection(root, m, mkTypedArray(c)) - else if (classOf[Seq[_]].isAssignableFrom(c)) newCollection(root, m, a => List(a: _*)) - else if (c == classOf[Option[_]]) newOption(root, m) - else fail("Expected collection but got " + m + " for class " + c) - case Dict(m) => root match { - case JObject(xs) => Map(xs.map(x => (x.name, build(x.value, m))): _*) - case x => fail("Expected object but got " + x) - } - } - - def mkTypedArray(c: Class[_])(a: Array[_]) = { - import java.lang.reflect.Array.{newInstance => newArray} - - a.foldLeft((newArray(c.getComponentType, a.length), 0)) { (tuple, e) => { - java.lang.reflect.Array.set(tuple._1, tuple._2, e); (tuple._1, tuple._2 + 1) - }}._1 - } - - def mkList(root: JValue, m: Mapping) = root match { - case JArray(arr) => arr.map(build(_, m)) - case JNothing | JNull => Nil - case x => fail("Expected array but got " + x) - } - - def mkValue(root: JValue, mapping: Mapping, path: String, optional: Boolean) = { - if (optional && root == JNothing) { - None - } else { - try { - val x = build(root, mapping) - if (optional) Option(x) else x - } catch { - case e @ MappingException(msg, _) => - if (optional && (root == JNothing || root == JNull)) { - None - } else { - fail("No usable value for " + path + "\n" + msg, e) - } - } - } - } - - build(json, mapping) - } - - private def convert(json: JValue, targetType: Class[_], formats: Formats): Any = json match { - case JInt(x) if (targetType == classOf[Int]) => x.intValue - case JInt(x) if (targetType == classOf[JavaInteger]) => new JavaInteger(x.intValue) - case JInt(x) if (targetType == classOf[BigInt]) => x - case JInt(x) if (targetType == classOf[Long]) => x.longValue - case JInt(x) if (targetType == classOf[JavaLong]) => new JavaLong(x.longValue) - case JInt(x) if (targetType == classOf[Double]) => x.doubleValue - case JInt(x) if (targetType == classOf[JavaDouble]) => new JavaDouble(x.doubleValue) - case JInt(x) if (targetType == classOf[Float]) => x.floatValue - case JInt(x) if (targetType == classOf[JavaFloat]) => new JavaFloat(x.floatValue) - case JInt(x) if (targetType == classOf[Short]) => x.shortValue - case JInt(x) if (targetType == classOf[JavaShort]) => new JavaShort(x.shortValue) - case JInt(x) if (targetType == classOf[Byte]) => x.byteValue - case JInt(x) if (targetType == classOf[JavaByte]) => new JavaByte(x.byteValue) - case JInt(x) if (targetType == classOf[String]) => x.toString - case JInt(x) if (targetType == classOf[Number]) => x.longValue - case JDouble(x) if (targetType == classOf[Double]) => x - case JDouble(x) if (targetType == classOf[JavaDouble]) => new JavaDouble(x) - case JDouble(x) if (targetType == classOf[Float]) => x.floatValue - case JDouble(x) if (targetType == classOf[JavaFloat]) => new JavaFloat(x.floatValue) - case JDouble(x) if (targetType == classOf[String]) => x.toString - case JDouble(x) if (targetType == classOf[Int]) => x.intValue - case JDouble(x) if (targetType == classOf[Long]) => x.longValue - case JDouble(x) if (targetType == classOf[Number]) => x - case JString(s) if (targetType == classOf[String]) => s - case JString(s) if (targetType == classOf[Symbol]) => Symbol(s) - case JString(s) if (targetType == classOf[Date]) => formats.dateFormat.parse(s).getOrElse(fail("Invalid date '" + s + "'")) - case JString(s) if (targetType == classOf[Timestamp]) => new Timestamp(formats.dateFormat.parse(s).getOrElse(fail("Invalid date '" + s + "'")).getTime) - case JBool(x) if (targetType == classOf[Boolean]) => x - case JBool(x) if (targetType == classOf[JavaBoolean]) => new JavaBoolean(x) - case j: JValue if (targetType == classOf[JValue]) => j - case j: JObject if (targetType == classOf[JObject]) => j - case j: JArray if (targetType == classOf[JArray]) => j - case JNull => null - case JNothing => - fail("Did not find value which can be converted into " + targetType.getName) - case _ => - val custom = formats.customDeserializer(formats) - val typeInfo = TypeInfo(targetType, None) - - if (custom.isDefinedAt(typeInfo, json)) { - custom(typeInfo, json) - } else { - fail("Do not know how to convert " + json + " into " + targetType) - } - } -} diff --git a/core/json/src/main/scala/net/liftweb/json/FieldSerializer.scala b/core/json/src/main/scala/net/liftweb/json/FieldSerializer.scala deleted file mode 100644 index a42777671e..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/FieldSerializer.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -/** - * Serializer which serializes all fields of a class too. - * - * Serialization can be intercepted by giving two optional PartialFunctions as - * constructor parameters: - *

- *

- * FieldSerializer[WildDog](
- *   renameTo("name", "animalname") orElse ignore("owner"),
- *   renameFrom("animalname", "name")
- * )
- * 
- */ -case class FieldSerializer[A: Manifest]( - serializer: PartialFunction[(String, Any), Option[(String, Any)]] = Map(), - deserializer: PartialFunction[JField, JField] = Map() -) - -object FieldSerializer { - def renameFrom(name: String, newName: String): PartialFunction[JField, JField] = { - case JField(`name`, x) => JField(newName, x) - } - - def ignore(name: String): PartialFunction[(String, Any), Option[(String, Any)]] = { - case (`name`, _) => None - } - - def renameTo(name: String, newName: String): PartialFunction[(String, Any), Option[(String, Any)]] = { - case (`name`, x) => Some(newName, x) - } -} diff --git a/core/json/src/main/scala/net/liftweb/json/Formats.scala b/core/json/src/main/scala/net/liftweb/json/Formats.scala deleted file mode 100644 index 61be1871a5..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/Formats.scala +++ /dev/null @@ -1,308 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import java.util.{Date, TimeZone} -import java.util.concurrent.ConcurrentHashMap - -import scala.collection.concurrent.{Map=>ConcurrentScalaMap} -import scala.collection.JavaConverters._ - -/** Formats to use when converting JSON. - * Formats are usually configured by using an implicit parameter: - *
- * implicit val formats = net.liftweb.json.DefaultFormats
- * 
- */ -trait Formats { self: Formats => - val dateFormat: DateFormat - val typeHints: TypeHints = NoTypeHints - val customSerializers: List[Serializer[_]] = Nil - val fieldSerializers: List[(Class[_], FieldSerializer[_])] = Nil - - /** - * Support for the tuple decomposition/extraction that represents tuples as JSON - * arrays. This provides better support for heterogenous arrays in JSON, but enable it at your - * own risk as it does change the behavior of serialization/deserialization and comes - * with some caveats (such as Scala primitives not being recognized reliably during extraction). - */ - val tuplesAsArrays = false - - /** - * The name of the field in JSON where type hints are added (jsonClass by default) - */ - val typeHintFieldName = "jsonClass" - - /** - * Parameter name reading strategy. By deafult 'paranamer' is used. - */ - val parameterNameReader: ParameterNameReader = Meta.ParanamerReader - - /** - * Adds the specified type hints to this formats. - */ - def + (extraHints: TypeHints): Formats = new Formats { - val dateFormat = Formats.this.dateFormat - override val typeHintFieldName = self.typeHintFieldName - override val parameterNameReader = self.parameterNameReader - override val typeHints = self.typeHints + extraHints - override val customSerializers = self.customSerializers - override val fieldSerializers = self.fieldSerializers - } - - /** - * Adds the specified custom serializer to this formats. - */ - def + (newSerializer: Serializer[_]): Formats = new Formats { - val dateFormat = Formats.this.dateFormat - override val typeHintFieldName = self.typeHintFieldName - override val parameterNameReader = self.parameterNameReader - override val typeHints = self.typeHints - override val customSerializers = newSerializer :: self.customSerializers - override val fieldSerializers = self.fieldSerializers - } - - /** - * Adds the specified custom serializers to this formats. - */ - def ++ (newSerializers: Traversable[Serializer[_]]): Formats = - newSerializers.foldLeft(this)(_ + _) - - /** - * Adds a field serializer for a given type to this formats. - */ - def + [A](newSerializer: FieldSerializer[A])(implicit mf: Manifest[A]): Formats = new Formats { - val dateFormat = Formats.this.dateFormat - override val typeHintFieldName = self.typeHintFieldName - override val parameterNameReader = self.parameterNameReader - override val typeHints = self.typeHints - override val customSerializers = self.customSerializers - // The type inferencer infers an existential type below if we use - // value :: list instead of list.::(value), and we get a feature - // warning. - override val fieldSerializers: List[(Class[_], FieldSerializer[_])] = self.fieldSerializers.::((mf.runtimeClass: Class[_], newSerializer)) - } - - private[json] def fieldSerializer(clazz: Class[_]): Option[FieldSerializer[_]] = { - import ClassDelta._ - - val ord = Ordering[Int].on[(Class[_], FieldSerializer[_])](x => delta(x._1, clazz)) - fieldSerializers filter (_._1.isAssignableFrom(clazz)) match { - case Nil => None - case xs => Some((xs min ord)._2) - } - } - - def customSerializer(implicit format: Formats) = - customSerializers.foldLeft(Map(): PartialFunction[Any, JValue]) { (acc, x) => - acc.orElse(x.serialize) - } - - def customDeserializer(implicit format: Formats) = - customSerializers.foldLeft(Map(): PartialFunction[(TypeInfo, JValue), Any]) { (acc, x) => - acc.orElse(x.deserialize) - } -} - -/** Conversions between String and Date. - */ -trait DateFormat { - def parse(s: String): Option[Date] - def format(d: Date): String -} - -trait Serializer[A] { - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), A] - def serialize(implicit format: Formats): PartialFunction[Any, JValue] -} - -/** Type hints can be used to alter the default conversion rules when converting - * Scala instances into JSON and vice versa. Type hints must be used when converting - * class which is not supported by default (for instance when class is not a case class). - *

- * Example:

- * class DateTime(val time: Long)
- *
- * val hints = new ShortTypeHints(classOf[DateTime] :: Nil) {
- *   override def serialize: PartialFunction[Any, JObject] = {
- *     case t: DateTime => JObject(JField("t", JInt(t.time)) :: Nil)
- *   }
- *
- *   override def deserialize: PartialFunction[(String, JObject), Any] = {
- *     case ("DateTime", JObject(JField("t", JInt(t)) :: Nil)) => new DateTime(t.longValue)
- *   }
- * }
- * implicit val formats = DefaultFormats.withHints(hints)
- * 
- */ -trait TypeHints { - import ClassDelta._ - - val hints: List[Class[_]] - - /** Return hint for given type. - */ - def hintFor(clazz: Class[_]): String - - /** Return type for given hint. - */ - def classFor(hint: String): Option[Class[_]] - - def containsHint_?(clazz: Class[_]) = hints exists (_ isAssignableFrom clazz) - def deserialize: PartialFunction[(String, JObject), Any] = Map() - def serialize: PartialFunction[Any, JObject] = Map() - - def components: List[TypeHints] = List(this) - - /** - * Adds the specified type hints to this type hints. - */ - def + (hints: TypeHints): TypeHints = CompositeTypeHints(components ::: hints.components) - - private[TypeHints] case class CompositeTypeHints(override val components: List[TypeHints]) extends TypeHints { - val hints: List[Class[_]] = components.flatMap(_.hints) - - /** - * Chooses most specific class. - */ - def hintFor(clazz: Class[_]): String = components.filter(_.containsHint_?(clazz)) - .map(th => (th.hintFor(clazz), th.classFor(th.hintFor(clazz)).getOrElse(sys.error("hintFor/classFor not invertible for " + th)))) - .sortWith((x, y) => (delta(x._2, clazz) - delta(y._2, clazz)) < 0).head._1 - - def classFor(hint: String): Option[Class[_]] = { - def hasClass(h: TypeHints) = - scala.util.control.Exception.allCatch opt (h.classFor(hint)) map (_.isDefined) getOrElse(false) - - components find (hasClass) flatMap (_.classFor(hint)) - } - - override def deserialize: PartialFunction[(String, JObject), Any] = components.foldLeft[PartialFunction[(String, JObject),Any]](Map()) { - (result, cur) => result.orElse(cur.deserialize) - } - - override def serialize: PartialFunction[Any, JObject] = components.foldLeft[PartialFunction[Any, JObject]](Map()) { - (result, cur) => result.orElse(cur.serialize) - } - } -} - -private[json] object ClassDelta { - def delta(class1: Class[_], class2: Class[_]): Int = { - if (class1 == class2) 0 - else if (class1.getInterfaces.contains(class2)) 0 - else if (class2.getInterfaces.contains(class1)) 0 - else if (class1.isAssignableFrom(class2)) { - 1 + delta(class1, class2.getSuperclass) - } - else if (class2.isAssignableFrom(class1)) { - 1 + delta(class1.getSuperclass, class2) - } - else sys.error("Don't call delta unless one class is assignable from the other") - } -} - -/** Do not use any type hints. - */ -case object NoTypeHints extends TypeHints { - val hints = Nil - def hintFor(clazz: Class[_]) = sys.error("NoTypeHints does not provide any type hints.") - def classFor(hint: String) = None -} - -/** Use short class name as a type hint. - */ -case class ShortTypeHints(hints: List[Class[_]]) extends TypeHints { - def hintFor(clazz: Class[_]) = clazz.getName.substring(clazz.getName.lastIndexOf(".")+1) - def classFor(hint: String) = hints find (hintFor(_) == hint) -} - -/** Use full class name as a type hint. - */ -case class FullTypeHints(hints: List[Class[_]]) extends TypeHints { - private val hintsToClass: ConcurrentScalaMap[String, Class[_]] = - new ConcurrentHashMap[String, Class[_]]().asScala ++= hints.map(clazz => hintFor(clazz) -> clazz) - - def hintFor(clazz: Class[_]) = clazz.getName - - def classFor(hint: String): Option[Class[_]] = { - hintsToClass.get(hint).orElse { - val clazz = Thread.currentThread.getContextClassLoader.loadClass(hint) - hintsToClass.putIfAbsent(hint, clazz).orElse(Some(clazz)) - } - } -} - -/** Default date format is UTC time. - */ -object DefaultFormats extends DefaultFormats { - val losslessDate = new ThreadLocal(new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")) - val UTC = TimeZone.getTimeZone("UTC") -} - -trait DefaultFormats extends Formats { - import java.text.{ParseException, SimpleDateFormat} - - val dateFormat = new DateFormat { - def parse(s: String) = try { - Some(formatter.parse(s)) - } catch { - case e: ParseException => None - } - - def format(d: Date) = formatter.format(d) - - private def formatter = { - val f = dateFormatter - f.setTimeZone(DefaultFormats.UTC) - f - } - } - - protected def dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'") - - /** Lossless date format includes milliseconds too. - */ - def lossless = new DefaultFormats { - override def dateFormatter = DefaultFormats.losslessDate() - } - - /** Default formats with given TypeHints. - */ - def withHints(hints: TypeHints) = new DefaultFormats { - override val typeHints = hints - } -} - -private[json] class ThreadLocal[A](init: => A) extends java.lang.ThreadLocal[A] with (() => A) { - override def initialValue = init - def apply = get -} - -class CustomSerializer[A: Manifest]( - ser: Formats => (PartialFunction[JValue, A], PartialFunction[Any, JValue])) extends Serializer[A] { - - val Class = implicitly[Manifest[A]].runtimeClass - - def deserialize(implicit format: Formats) = { - case (TypeInfo(Class, _), json) => - if (ser(format)._1.isDefinedAt(json)) ser(format)._1(json) - else throw new MappingException("Can't convert " + json + " to " + Class) - } - - def serialize(implicit format: Formats) = ser(format)._2 -} diff --git a/core/json/src/main/scala/net/liftweb/json/JsonAST.scala b/core/json/src/main/scala/net/liftweb/json/JsonAST.scala deleted file mode 100644 index 05290b4398..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/JsonAST.scala +++ /dev/null @@ -1,1172 +0,0 @@ -/* - * Copyright 2009-2015 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import scala.language.implicitConversions -import java.io.Writer -import java.lang.StringBuilder - -/** - * This object contains the abstract syntax tree (or AST) for working with JSON objects in - * lift-json. - * - * The purpose of the JSON AST is to represent and manipulate JSON by leveraging Scala language - * features like types, case classes, etc. The AST should allow you to represent anything you - * could imagine from JSON land using the Scala type system. - * - * Everything in the AST has a single root: JValue. A JValue could, quite literally, be anything. - * It could be an an object (represented by `[[JObject]]`), a string (`[[JString]]`), a null - * (`[[JNull]]`), and so on. So, when constructing a JSON object with the AST directly you might - * construct something like the following: - * - * {{{ - * JObject(JField("bacon", JBool(true)) :: JField("spinach", JBool(false))) - * }}} - * - * Once serialized to the string representation of JSON you would end up with the following: - * - * {{{ - * { - * "bacon":true, - * "spinach":false - * } - * }}} - */ -object JsonAST { - /** - * Concatenate a sequence of `[[JValue]]`s together. - * - * This would be useful in the event that you have a handful of `JValue` instances that need to be - * smacked together into one unit. - * - * For example: - * - * {{{ - * concat(JInt(1), JInt(2)) == JArray(List(JInt(1), JInt(2))) - * }}} - * - */ - def concat(values: JValue*) = values.foldLeft(JNothing: JValue)(_ ++ _) - - object JValue extends Merge.Mergeable - - /** - * The base type for all things that represent distinct JSON entities in the AST. - * - * Most members of the AST will extend this class. The one exception is `[[JField]]` which does - * not extend this class because it really ''can't'' properly exist as a - * first-class citizen of JSON. - */ - sealed abstract class JValue extends Diff.Diffable { - type Values - - /** - * An XPath-like expression to find a child of a `[[JObject]]` or a `[[JArray]]` of `JObject` - * by name. If you call this method on anything other than a `JObject` or `JArray` of `JObject`s - * you'll get a `[[JNothing]]`. - * - * This method is most useful if you have an object that you need to dig into in order to - * retrieve a specific value. So, let's say that you had a JSON object that looked - * something like this: - * - * {{{ - * { - * "name": "Joe", - * "profession": "Software Engineer", - * "catchphrase": { - * "name": "Alabama Cheer", - * "value": "Roll tide" - * } - * } - * }}} - * - * If for some reason you're interested in taking a look at Joe's catchphrase, you can - * query it using the `\` method to find it like so: - * - * Example: - * - * {{{ - * scala> json \ "catchphrase" - * res0: JValue = JObject(List(JField("name", JString("Alabama Cheer")), JField("value", JString("Roll tide")))) - * }}} - * - * Likewise, if you wanted to find Joe's name you could do the following: - * - * {{{ - * scala> json \ "name" - * res0: JValue = JString("Joe") - * }}} - * - * The result could be any subclass of `JValue`. - * In the event that the `JValue` you're operating on is actually an array of objects, you'll - * get back a `JArray` of the result of executing `\` on each object in the array. In the event - * nothing is found, you'll get a `JNothing`. - */ - def \(nameToFind: String): JValue = { - // Use :: instead of List() to avoid the extra array allocation for the variable arguments - findDirectByName(this :: Nil, nameToFind) match { - case Nil => JNothing - case x :: Nil => x - case x => JArray(x) - } - } - - private def findDirectByName(xs: List[JValue], name: String): List[JValue] = xs.flatMap { - case JObject(l) => - l.collect { - case JField(n, value) if n == name => value - } - case JArray(l) => findDirectByName(l, name) - case _ => Nil - } - - private def findDirect(xs: List[JValue], p: JValue => Boolean): List[JValue] = xs.flatMap { - case JObject(l) => - l.collect { - case JField(n, x) if p(x) => x - } - case JArray(l) => findDirect(l, p) - case x if p(x) => x :: Nil - case _ => Nil - } - - /** - * Find all children of a `[[JObject]]` with the matching name, returning an empty `JObject` if - * no matches are found. - * - * For example given this example JSON: - * - * {{{ - * { - * "name": "Joe", - * "profession": "Software Engineer", - * "catchphrase": { - * "name": "Alabama Cheer", - * "value": "Roll tide" - * } - * } - * }}} - * - * We might do the following: - * - * {{{ - * scala> json \\ "name" - * res2: JValue = JObject(List(JField(name,JString(Joe)), JField(name,JString(Alabama Cheer)))) - * }}} - */ - def \\(nameToFind: String): JObject = { - def find(json: JValue): List[JField] = json match { - case JObject(fields) => - fields.foldLeft(List[JField]()) { - case (matchingFields, JField(name, value)) => - matchingFields ::: - List(JField(name, value)).filter(_.name == nameToFind) ::: - find(value) - } - - case JArray(fields) => - fields.foldLeft(List[JField]()) { (matchingFields, children) => - matchingFields ::: find(children) - } - - case _ => - Nil - } - - JObject(find(this)) - } - - /** - * Find immediate children of this `[[JValue]]` that match a specific `JValue` subclass. - * - * This methid will search a `[[JObject]]` or `[[JArray]]` for values of a specific type and - * return a `List` of those values if they any are found. - * - * So given some JSON like so: - * - * {{{ - * [ - * { - * "thinga":1, - * "thingb":"bacon" - * },{ - * "thingc":3, - * "thingd":"Wakka" - * },{ - * "thinge":{ - * "thingf":4 - * }, - * "thingg":true - * } - * ] - * }}} - * - * You would use this method like so: - * - * {{{ - * scala> json \ classOf[JInt] - * res0: List[net.liftweb.json.JInt#Values] = List(1, 3) - * }}} - * - * This method does require that whatever type you're searching for is subtype of `JValue`. - */ - def \[A <: JValue](clazz: Class[A]): List[A#Values] = - findDirect(children, typePredicate(clazz) _).asInstanceOf[List[A]] map { _.values } - - /** - * Find all descendants of this `JValue` that match a specific `JValue` subclass. - * - * Unlike its cousin `\`, this method will recurse down into all children looking for - * type matches searching a `[[JObject]]` or `[[JArray]]` for values of a specific type and - * return a `List` of those values if they are found. - * - * So given some JSON like so: - * - * {{{ - * [ - * { - * "thinga":1, - * "thingb":"bacon" - * },{ - * "thingc":3, - * "thingd":"Wakka" - * },{ - * "thinge":{ - * "thingf":4 - * }, - * "thingg":true - * } - * ] - * }}} - * - * You would use this method like so: - * - * {{{ - * scala> json \\ classOf[JInt] - * res0: List[net.liftweb.json.JInt#Values] = List(1, 3, 4) - * }}} - */ - def \\[A <: JValue](clazz: Class[A]): List[A#Values] = - (this filter typePredicate(clazz) _).asInstanceOf[List[A]] map { _.values } - - private def typePredicate[A <: JValue](clazz: Class[A])(json: JValue) = json match { - case x if x.getClass == clazz => true - case _ => false - } - - /** - * Return the element in the `i`-th position from a `[[JArray]]`. - * Will return `JNothing` when invoked on any other kind of `JValue`. - * - * For example: - * - * {{{ - * scala> val array = JArray(JInt(1) :: JInt(2) :: Nil) - * array: net.liftweb.json.JsonAST.JArray = JArray(List(JInt(1), JInt(2))) - * - * scala> array(1) - * res0: net.liftweb.json.JsonAST.JValue = JInt(2) - * }}} - */ - def apply(i: Int): JValue = JNothing - - /** - * Return a representation of the values in this `[[JValue]]` in a native Scala structure. - * - * For example, you might invoke this on a `[[JObject]]` to have its fields returned - * as a `Map`. - * - * {{{ - * scala> JObject(JField("name", JString("joe")) :: Nil).values - * res0: scala.collection.immutable.Map[String,Any] = Map(name -> joe) - * }}} - */ - def values: Values - - /** - * Return direct child elements of this `JValue`, if this `JValue` is a `[[JObject]]` or `[[JArray]]`. - * - * This method is useful for getting all the values of a `JObject` or `JArray` and will return them as a - * `List[JValue]`. If the `JValue` you invoke this method on is not a `JObject` or `JArray` you will instead - * get `Nil`. - * - * Example: - * - * {{{ - * > JArray(JInt(1) :: JInt(2) :: Nil).children - * List(JInt(1), JInt(2)) - * }}} - * - * @return Direct children of this `JValue` if it is a `[[JObject]]` or - * `[[JArray]]`, or `[[JNothing]]` otherwise. - */ - def children: List[JValue] = this match { - case JObject(l) => l map (_.value) - case JArray(l) => l - case _ => Nil - } - - /** - * Fold over `JValue`s by applying a function to each element. - * - * @param f The function to apply, which takes an accumulator and the next item as paramaters. - * @param z The initial value for the fold. - */ - def fold[A](z: A)(f: (A, JValue) => A): A = { - def rec(acc: A, v: JValue) = { - val newAcc = f(acc, v) - v match { - case JObject(l) => - l.foldLeft(newAcc) { - case (a, JField(name, value)) => value.fold(a)(f) - } - case JArray(l) => - l.foldLeft(newAcc) { (a, e) => - e.fold(a)(f) - } - case _ => newAcc - } - } - rec(z, this) - } - - /** - * Fold over a series of `JField`s applying a function to each one. - * - * @param z The initial value for the fold. - * @param f The function to apply, which takes an accumulator as its first parameter - * and the next field as its second. - */ - def foldField[A](z: A)(f: (A, JField) => A): A = { - def rec(acc: A, v: JValue) = { - v match { - case JObject(l) => l.foldLeft(acc) { - case (a, field@JField(name, value)) => value.foldField(f(a, field))(f) - } - case JArray(l) => l.foldLeft(acc)((a, e) => e.foldField(a)(f)) - case _ => acc - } - } - rec(z, this) - } - - /** - * Return a new `JValue` resulting from applying the given function to each value, recursively. - * - * If this function is invoked on a `[[JObject]]`, it will iterate over the field values of that `JObject`. - * If this function is invoked on a `[[JArray]]`, it will iterate over the values of that `JArray`. - * If this function is invoked on any other kind of `JValue` it will simply pass that instance into the - * function you have provided. - * - * Example: - * - * {{{ - * JArray(JInt(1) :: JInt(2) :: Nil) map { - * case JInt(x) => JInt(x+1) - * case x => x - * } - * }}} - */ - def map(f: JValue => JValue): JValue = { - def rec(v: JValue): JValue = v match { - case JObject(l) => f(JObject(l.map { field => field.copy(value = rec(field.value)) })) - case JArray(l) => f(JArray(l.map(rec))) - case x => f(x) - } - rec(this) - } - - /** - * Return a new `JValue` resulting from applying the given function to each `[[JField]]` in a `[[JObject]]` or a - * `[[JArray]]` of `JObject`, recursively. - * - * Example: - * - * {{{ - * JObject(("age", JInt(10)) :: Nil) map { - * case ("age", JInt(x)) => ("age", JInt(x+1)) - * case x => x - * } - * }}} - * - * @see transformField - */ - def mapField(f: JField => JField): JValue = { - def rec(v: JValue): JValue = v match { - case JObject(l) => JObject(l.map { field => f(field.copy(value = rec(field.value))) }) - case JArray(l) => JArray(l.map(rec)) - case x => x - } - rec(this) - } - - /** Return a new `JValue` resulting from applying the given partial function `f`` - * to each field in JSON. - * - * Example: - * {{{ - * JObject(("age", JInt(10)) :: Nil) transformField { - * case ("age", JInt(x)) => ("age", JInt(x+1)) - * } - * }}} - */ - def transformField(f: PartialFunction[JField, JField]): JValue = mapField { x => - if (f.isDefinedAt(x)) f(x) else x - } - - /** - * Return a new `JValue` resulting from applying the given partial function - * to each value within this `JValue`. - * - * If this is a `JArray`, this means we will transform each value in the - * array and return an updated array. - * - * If this is a `JObject`, this means we will transform the value of each - * field of the object and the object in turn and return an updated object. - * - * If this is another type of `JValue`, the value is transformed directly. - * - * Note that this happens recursively, so you will receive both each value - * in an array ''and'' the array itself, or each field value in an object - * ''and'' the object itself. If an array contains arrays, we will recurse - * into them in turn. - * - * Examples: - * - * {{{ - * > JArray(JInt(1) :: JInt(2) :: Nil) transform { - * case JInt(x) => - * JInt(x+1) - * } - * res0: net.liftweb.json.JsonAST.JValue = JArray(List(JInt(2), JInt(3))) - * }}} - * - * Without type matching, notice that we get the result of the transform - * replacing the array: - * - * {{{ - * > JArray(JInt(1) :: JInt(2) :: Nil) transform { - * case _ => - * JString("hello") - * } - * res0: net.liftweb.json.JsonAST.JValue = JString("hello") - * }}} - * - * @return This `JValue` with its child values recursively transformed by - * the given `PartialFunction`, when defined. If the - * `PartialFunction` is undefined, leaves the child values - * untouched. - */ - def transform(f: PartialFunction[JValue, JValue]): JValue = map { x => - if (f.isDefinedAt(x)) f(x) else x - } - - /** - * Return a new `JValue` resulting from replacing the value at the specified field - * path with the replacement value provided. This has no effect if the path - * is empty or if the value is not a `[[JObject]]` instance. - * - * Example: - * - * {{{ - * > JObject(List(JField("foo", JObject(List(JField("bar", JInt(1))))))).replace("foo" :: "bar" :: Nil, JString("baz")) - * JObject(List(JField("foo", JObject(List(JField("bar", JString("baz"))))))) - * }}} - */ - def replace(l: List[String], replacement: JValue): JValue = { - def rep(l: List[String], in: JValue): JValue = { - l match { - case x :: xs => in match { - case JObject(fields) => JObject( - fields.map { - case JField(`x`, value) => JField(x, if (xs == Nil) replacement else rep(xs, value)) - case field => field - } - ) - case other => other - } - - case Nil => in - } - } - - rep(l, this) - } - - /** - * Return the first field from this `JValue` which matches the given predicate. - * - * When invoked on a `[[JObject]]` it will first attempt to see if the `JObject` has the field defined on it. - * Not finding the field defined, this method will recurse into the fields of that object and search for the - * value there. When invoked on or encountering a `[[JArray]]` during recursion this method will run its search - * on each member of the `JArray`. - * - * Example: - * - * {{{ - * > JObject(JField("age", JInt(2))) findField { - * case JField(n, v) => - * n == "age" - * } - * res0: Option[net.liftweb.json.JsonAST.JField] = Some(JField(age,JInt(2)) - * }}} - */ - def findField(p: JField => Boolean): Option[JField] = { - def find(json: JValue): Option[JField] = json match { - case JObject(fs) if (fs find p).isDefined => return fs find p - case JObject(fs) => fs.flatMap { case JField(n, v) => find(v) }.headOption - case JArray(l) => l.flatMap(find _).headOption - case _ => None - } - find(this) - } - - /** - * Return the first element from a `JValue` which matches the given predicate. - * - * Example: - * - * {{{ - * > JArray(JInt(1) :: JInt(2) :: Nil) find { _ == JInt(2) } - * res0: Option[net.liftweb.json.JsonAST.JValue] = Some(JInt(2)) - * }}} - */ - def find(p: JValue => Boolean): Option[JValue] = { - def find(json: JValue): Option[JValue] = { - json match { - case _ if p(json) => Some(json) - case JObject(fs) => fs.flatMap { case JField(n, v) => find(v) }.headOption - case JArray(l) => l.flatMap(find _).headOption - case _ => None - } - } - - find(this) - } - - /** - * Return a `List` of all fields that match the given predicate. Does not - * recurse into child elements, so this will only check a `JObject`'s field - * values. - * - * Example: - * - * {{{ - * > JObject(JField("age", JInt(10))) filterField { - * case JField("age", JInt(x)) if x > 18 => - * true - * - * case _ => - * false - * } - * res0: List[net.liftweb.json.JsonAST.JField] = List() - * > JObject(JField("age", JInt(10))) filterField { - * case JField("age", JInt(x)) if x < 18 => - * true - * - * case _ => - * false - * } - * res1: List[net.liftweb.json.JsonAST.JField] = List(JField(age,JInt(10))) - * }}} - * - * @return A `List` of `JField`s that match the given predicate `p`, or `Nil` - * if this `JValue` is not a `JObject`. - */ - def filterField(p: JField => Boolean): List[JField] = - foldField(List[JField]())((acc, e) => if (p(e)) e :: acc else acc).reverse - - /** - * Return a List of all values which matches the given predicate, recursively. - * - * Example: - * - * {{{ - * > JArray(JInt(1) :: JInt(2) :: Nil) filter { - * case JInt(x) => x > 1 - * case _ => false - * } - * res0: List[net.liftweb.json.JsonAST.JValue] = List(JInt(2)) - * }}} - * - * This operates recursively, so nested objects work too: - * {{{ - * > ((("boom" -> ("slam" -> "hello")) ~ ("shap" -> 3)): JObject) filter { - * case JString("hello") => true - * case _ => false - * } - * res0: List[net.liftweb.json.JsonAST.JValue] = List(JString(hello)) - * }}} - */ - def filter(p: JValue => Boolean): List[JValue] = - fold(List[JValue]())((acc, e) => if (p(e)) e :: acc else acc).reverse - - /** - * Create a new instance of `[[WithFilter]]` for Scala to use when using - * this `JValue` in a for comprehension. - */ - def withFilter(p: JValue => Boolean) = new WithFilter(this, p) - - final class WithFilter(self: JValue, p: JValue => Boolean) { - def map[A](f: JValue => A): List[A] = self filter p map f - def flatMap[A](f: JValue => List[A]) = self filter p flatMap f - def withFilter(q: JValue => Boolean): WithFilter = new WithFilter(self, x => p(x) && q(x)) - def foreach[U](f: JValue => U): Unit = self filter p foreach f - } - - /** - * Concatenate this `JValue` with another `JValue`. - * - * Example: - * - * {{{ - * > JArray(JInt(1) :: JInt(2) :: Nil) ++ JArray(JInt(3) :: Nil) - * res0: JArray(List(JInt(1), JInt(2), JInt(3))) - * }}} - */ - def ++(other: JValue) = { - def append(value1: JValue, value2: JValue): JValue = (value1, value2) match { - case (JNothing, x) => x - case (x, JNothing) => x - case (JArray(xs), JArray(ys)) => JArray(xs ::: ys) - case (JArray(xs), v: JValue) => JArray(xs ::: List(v)) - case (v: JValue, JArray(xs)) => JArray(v :: xs) - case (x, y) => JArray(x :: y :: Nil) - } - append(this, other) - } - - /** - * Return a `JValue` where all fields matching the given predicate are removed. - * - * Example: - * - * {{{ - * > JObject(JField("age", JInt(10))) removeField { - * case JField("age", _) => true - * case _ => false - * } - * }}} - */ - def removeField(p: JField => Boolean): JValue = this mapField { - case x if p(x) => JField(x.name, JNothing) - case x => x - } - - /** - * Return a JSON where all values matching the given predicate are removed. - * - * Example: - * - * {{{ - * > JArray(JInt(1) :: JInt(2) :: JNull :: Nil).remove(_ == JNull) - * res0: net.liftweb.json.JsonAST.JValue = JArray(List(JInt(1), JInt(2), JNothing)) - * }}} - */ - def remove(p: JValue => Boolean): JValue = this map { - case x if p(x) => JNothing - case x => x - } - - /** - * Extract a value into a concrete Scala instance from its `JValue` representation. - * - * Value can be: - * - a case class - * - a primitive (String, Boolean, Date, etc.)> - * - any type which has a configured [[TypeHints custom deserializer]] - * - a supported collection type of any of the above (List, Seq, Map[String, _], Set) - * - * Example: - * - * {{{ - * > case class Person(name: String) - * > JObject(JField("name", JString("joe")) :: Nil).extract[Person] - * res0: Person("joe") - * }}} - */ - def extract[A](implicit formats: Formats, mf: scala.reflect.Manifest[A]): A = - Extraction.extract(this)(formats, mf) - - /** - * Optionally extract a value into a concrete Scala instance from its `JValue` representation. - * - * This method will attempt to extract a concrete Scala instance of type `A`, but if it fails - * it will return a `[[scala.None]]` instead of throwing an exception as `[[extract]]` would. - * - * Value can be: - * - a case class - * - a primitive (String, Boolean, Date, etc.)> - * - any type which has a configured [[TypeHints custom deserializer]] - * - a supported collection type of any of the above (List, Seq, Map[String, _], Set) - * - * Example: - * - * {{{ - * scala> case class Person(name: String) - * defined class Person - * - * scala> implicit val formats = DefaultFormats - * formats: net.liftweb.json.DefaultFormats.type = net.liftweb.json.DefaultFormats$@39afbb7c - * - * scala> JObject(JField("name", JString("joe")) :: Nil).extractOpt[Person] - * res1: Option[Person] = Some(Person(joe)) - * }}} - */ - def extractOpt[A](implicit formats: Formats, mf: scala.reflect.Manifest[A]): Option[A] = - Extraction.extractOpt(this)(formats, mf) - - /** - * Attempt to extract a concrete Scala instance of type `A` from this `JValue` and, on failing to do so, return - * the default value instead. - * - * Value can be: - * - a case class - * - a primitive (String, Boolean, Date, etc.)> - * - any type which has a configured [[TypeHints custom deserializer]] - * - a supported collection type of any of the above (List, Seq, Map[String, _], Set) - * - * Example: - * - * {{{ - * > case class Person(name: String) - * > JNothing.extractOrElse(Person("joe")) - * res0: Person("joe") - * }}} - */ - def extractOrElse[A](default: => A)(implicit formats: Formats, mf: scala.reflect.Manifest[A]): A = - Extraction.extractOpt(this)(formats, mf).getOrElse(default) - - def toOpt: Option[JValue] = this match { - case JNothing => None - case json => Some(json) - } - } - - case object JNothing extends JValue { - type Values = None.type - def values = None - } - case object JNull extends JValue { - type Values = Null - def values = null - } - case class JString(s: String) extends JValue { - type Values = String - def values = s - } - case class JDouble(num: Double) extends JValue { - type Values = Double - def values = num - } - case class JInt(num: BigInt) extends JValue { - type Values = BigInt - def values = num - } - case class JBool(value: Boolean) extends JValue { - type Values = Boolean - def values = value - } - - case class JObject(obj: List[JField]) extends JValue { - type Values = Map[String, Any] - def values = { - obj.map { - case JField(name, value) => - (name, value.values): (String, Any) - }.toMap - } - - override def equals(that: Any): Boolean = that match { - case o: JObject => obj.toSet == o.obj.toSet - case _ => false - } - - override def hashCode = obj.toSet[JField].hashCode - } - case object JObject { - def apply(fs: JField*): JObject = JObject(fs.toList) - } - - case class JArray(arr: List[JValue]) extends JValue { - type Values = List[Any] - def values = arr.map(_.values) - override def apply(i: Int): JValue = arr(i) - } - - case class JField(name: String, value: JValue) - - private[json] def quote(s: String): String = { - val buf = new StringBuilder - appendEscapedString(buf, s, RenderSettings.compact) - buf.toString - } - - private def appendEscapedString(buf: Appendable, s: String, settings: RenderSettings) { - s.foreach { c => - val strReplacement = c match { - case '"' => "\\\"" - case '\\' => "\\\\" - case '\b' => "\\b" - case '\f' => "\\f" - case '\n' => "\\n" - case '\r' => "\\r" - case '\t' => "\\t" - // Set.contains will cause boxing of c to Character, try and avoid this - case c if ((c >= '\u0000' && c < '\u0020')) || (settings.escapeChars.nonEmpty && settings.escapeChars.contains(c)) => - "\\u%04x".format(c: Int) - - case _ => "" - } - - // Use Char version of append if we can, as it's cheaper. - if (strReplacement.isEmpty) { - buf.append(c) - } else { - buf.append(strReplacement) - } - } - } - - object RenderSettings { - /** - * Pretty-print JSON with 2-space indentation. - */ - val pretty = RenderSettings(2) - /** - * Compact print JSON on one line. - */ - val compact = RenderSettings(0) - - /** - * Ranges of chars that should be escaped if this JSON is to be evaluated - * directly as JavaScript (rather than by a valid JSON parser). - */ - val jsEscapeChars = - List(('\u00ad', '\u00ad'), - ('\u0600', '\u0604'), - ('\u070f', '\u070f'), - ('\u17b4', '\u17b5'), - ('\u200c', '\u200f'), - ('\u2028', '\u202f'), - ('\u2060', '\u206f'), - ('\ufeff', '\ufeff'), - ('\ufff0', '\uffff')) - .foldLeft(Set[Char]()) { - case (set, (start, end)) => - set ++ (start to end).toSet - } - - /** - * Pretty-print JSON with 2-space indentation and escape all JS-sensitive - * characters. - */ - val prettyJs = RenderSettings(2, jsEscapeChars) - /** - * Compact print JSON on one line and escape all JS-sensitive characters. - */ - val compactJs = RenderSettings(0, jsEscapeChars) - } - - /** - * Parent trait for double renderers, which decide how doubles contained in - * a JDouble are rendered to JSON string. - */ - sealed trait DoubleRenderer extends Function1[Double,String] { - def apply(double: Double): String - } - /** - * A `DoubleRenderer` that renders special values `NaN`, `-Infinity`, and - * `Infinity` as-is using `toString`. This is not valid JSON, meaning JSON - * libraries generally won't be able to parse it (including lift-json!), but - * JavaScript can eval it. Other double values are also rendered the same - * way. - * - * Usage is not recommended. - */ - case object RenderSpecialDoubleValuesAsIs extends DoubleRenderer { - def apply(double: Double): String = { - double.toString - } - } - /** - * A `DoubleRenderer` that renders special values `NaN`, `-Infinity`, and - * `Infinity` as `null`. Other doubles are rendered normally using - * `toString`. - */ - case object RenderSpecialDoubleValuesAsNull extends DoubleRenderer { - def apply(double: Double): String = { - if (double.isNaN || double.isInfinity) { - "null" - } else { - double.toString - } - } - } - /** - * A `DoubleRenderer` that throws an `IllegalArgumentException` when the - * special values `NaN`, `-Infinity`, and `Infinity` are encountered. Other - * doubles are rendered normally using `toString`. - */ - case object FailToRenderSpecialDoubleValues extends DoubleRenderer { - def apply(double: Double): String = { - if (double.isNaN || double.isInfinity) { - throw new IllegalArgumentException(s"Double value $double cannot be rendered to JSON with the current DoubleRenderer.") - } else { - double.toString - } - } - } - /** - * RenderSettings allows for customizing how JSON is rendered to a String. - * At the moment, you can customize the indentation (if 0, all the JSON is - * printed on one line), the characters that should be escaped (in addition - * to a base set that will always be escaped for valid JSON), and whether or - * not a space should be included after a field name. - * - * @param doubleRendering Before Lift 3.1.0, the three special double values - * NaN, Infinity, and -Infinity were serialized as-is. This is invalid - * JSON, but valid JavaScript. We now default special double values to - * serialize as null, but provide both the old behavior and a new behavior - * that throws an exception upon finding these values. See - * `[[DoubleRenderer]]` and its subclasses for more. - */ - case class RenderSettings( - indent: Int, - escapeChars: Set[Char] = Set.empty, - spaceAfterFieldName: Boolean = false, - doubleRenderer: DoubleRenderer = RenderSpecialDoubleValuesAsNull - ) { - val lineBreaks_? = indent > 0 - } - - /** - * Render `value` using `[[RenderSettings.pretty]]`. - */ - def prettyRender(value: JValue): String = { - render(value, RenderSettings.pretty) - } - - /** - * Render `value` to the given `appendable` using `[[RenderSettings.pretty]]`. - */ - def prettyRender(value: JValue, appendable: Appendable): String = { - render(value, RenderSettings.pretty, appendable) - } - - /** Renders JSON directly to string in compact format. - * This is an optimized version of compact(render(value)) - * when the intermediate Document is not needed. - */ - def compactRender(value: JValue): String = { - render(value, RenderSettings.compact) - } - - /** - * Render `value` to the given `appendable` using `[[RenderSettings.compact]]`. - */ - def compactRender(value: JValue, appendable: Appendable): String = { - render(value, RenderSettings.compact, appendable) - } - - /** - * Render `value` to the given `appendable` (a `StringBuilder`, by default) - * using the given `settings`. The appendable's `toString` will be called and - * the result will be returned. - */ - def render(value: JValue, settings: RenderSettings, appendable: Appendable = new StringBuilder()): String = { - bufRender(value, appendable, settings).toString() - } - - case class RenderIntermediaryDocument(value: JValue) - def render(value: JValue) = RenderIntermediaryDocument(value) - - /** - * - * @param value the JSON to render - * @param buf the buffer to render the JSON into. may not be empty - */ - private def bufRender(value: JValue, buf: Appendable, settings: RenderSettings, indentLevel: Int = 0): Appendable = value match { - case null => buf.append("null") - case JBool(true) => buf.append("true") - case JBool(false) => buf.append("false") - case JDouble(n) => buf.append(settings.doubleRenderer(n)) - case JInt(n) => buf.append(n.toString) - case JNull => buf.append("null") - case JString(null) => buf.append("null") - case JString(s) => bufQuote(s, buf, settings) - case JArray(arr) => bufRenderArr(arr, buf, settings, indentLevel) - case JObject(obj) => bufRenderObj(obj, buf, settings, indentLevel) - case JNothing => sys.error("can't render 'nothing'") //TODO: this should not throw an exception - } - - private def bufRenderArr(values: List[JValue], buf: Appendable, settings: RenderSettings, indentLevel: Int): Appendable = { - var firstEntry = true - val currentIndent = indentLevel + settings.indent - - buf.append('[') //open array - - if (! values.isEmpty) { - if (settings.lineBreaks_?) { - buf.append('\n') - } - - values.foreach { elem => - if (elem != JNothing) { - if (firstEntry) { - firstEntry = false - } else { - buf.append(',') - - if (settings.lineBreaks_?) { - buf.append('\n') - } - } - - (0 until currentIndent).foreach(_ => buf.append(' ')) - bufRender(elem, buf, settings, currentIndent) - } - } - - if (settings.lineBreaks_?) { - buf.append('\n') - } - - (0 until indentLevel).foreach(_ => buf.append(' ')) - } - - buf.append(']') - buf - } - - private def bufRenderObj(fields: List[JField], buf: Appendable, settings: RenderSettings, indentLevel: Int): Appendable = { - var firstEntry = true - val currentIndent = indentLevel + settings.indent - - buf.append('{') //open bracket - - if (! fields.isEmpty) { - if (settings.lineBreaks_?) { - buf.append('\n') - } - - fields.foreach { - case JField(name, value) if value != JNothing => - if (firstEntry) { - firstEntry = false - } else { - buf.append(',') - - if (settings.lineBreaks_?) { - buf.append('\n') - } - } - - (0 until currentIndent).foreach(_ => buf.append(' ')) - - bufQuote(name, buf, settings) - buf.append(':') - if (settings.spaceAfterFieldName) { - buf.append(' ') - } - bufRender(value, buf, settings, currentIndent) - - case _ => // omit fields with value of JNothing - } - - if (settings.lineBreaks_?) { - buf.append('\n') - } - - (0 until indentLevel).foreach(_ => buf.append(' ')) - } - - buf.append('}') //close bracket - buf - } - - private def bufQuote(s: String, buf: Appendable, settings: RenderSettings): Appendable = { - buf.append('"') //open quote - appendEscapedString(buf, s, settings) - buf.append('"') //close quote - buf - } - -} - -/** Basic implicit conversions from primitive types into JSON. - * Example:
-  * import net.liftweb.json.Implicits._
-  * JObject(JField("name", "joe") :: Nil) == JObject(JField("name", JString("joe")) :: Nil)
-  * 
- */ -object Implicits extends Implicits -trait Implicits { - implicit def int2jvalue(x: Int) = JInt(x) - implicit def long2jvalue(x: Long) = JInt(x) - implicit def bigint2jvalue(x: BigInt) = JInt(x) - implicit def double2jvalue(x: Double) = JDouble(x) - implicit def float2jvalue(x: Float) = JDouble(x) - implicit def bigdecimal2jvalue(x: BigDecimal) = JDouble(x.doubleValue) - implicit def boolean2jvalue(x: Boolean) = JBool(x) - implicit def string2jvalue(x: String) = JString(x) -} - -/** A DSL to produce valid JSON. - * Example:
-  * import net.liftweb.json.JsonDSL._
-  * ("name", "joe") ~ ("age", 15) == JObject(JField("name",JString("joe")) :: JField("age",JInt(15)) :: Nil)
-  * 
- */ -object JsonDSL extends JsonDSL -trait JsonDSL extends Implicits { - implicit def seq2jvalue[A <% JValue](s: Traversable[A]) = - JArray(s.toList.map { a => val v: JValue = a; v }) - - implicit def map2jvalue[A <% JValue](m: Map[String, A]) = - JObject(m.toList.map { case (k, v) => JField(k, v) }) - - implicit def option2jvalue[A <% JValue](opt: Option[A]): JValue = opt match { - case Some(x) => x - case None => JNothing - } - - implicit def symbol2jvalue(x: Symbol) = JString(x.name) - implicit def pair2jvalue[A <% JValue](t: (String, A)) = JObject(List(JField(t._1, t._2))) - implicit def list2jvalue(l: List[JField]) = JObject(l) - implicit def jobject2assoc(o: JObject) = new JsonListAssoc(o.obj) - implicit def pair2Assoc[A <% JValue](t: (String, A)) = new JsonAssoc(t) - - class JsonAssoc[A <% JValue](left: (String, A)) { - def ~[B <% JValue](right: (String, B)) = { - val l: JValue = left._2 - val r: JValue = right._2 - JObject(JField(left._1, l) :: JField(right._1, r) :: Nil) - } - - def ~(right: JObject) = { - val l: JValue = left._2 - JObject(JField(left._1, l) :: right.obj) - } - } - - class JsonListAssoc(left: List[JField]) { - def ~(right: (String, JValue)) = JObject(left ::: List(JField(right._1, right._2))) - def ~(right: JObject) = JObject(left ::: right.obj) - } -} diff --git a/core/json/src/main/scala/net/liftweb/json/JsonParser.scala b/core/json/src/main/scala/net/liftweb/json/JsonParser.scala deleted file mode 100644 index 8a13f44fb5..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/JsonParser.scala +++ /dev/null @@ -1,582 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import scala.annotation.switch - -/** JSON parser. - */ -object JsonParser { - import java.io._ - - class ParseException(message: String, cause: Exception) extends Exception(message, cause) - - /** Parsed tokens from low level pull parser. - */ - sealed abstract class Token - case object OpenObj extends Token - case object CloseObj extends Token - case class FieldStart(name: String) extends Token - case object End extends Token - case class StringVal(value: String) extends Token - case class IntVal(value: BigInt) extends Token - case class DoubleVal(value: Double) extends Token - case class BoolVal(value: Boolean) extends Token - case object NullVal extends Token - case object OpenArr extends Token - case object CloseArr extends Token - - /** Return parsed JSON. - * @throws ParseException is thrown if parsing fails - */ - def parse(s: String): JValue = parse(new Buffer(new StringReader(s), false)) - - /** Return parsed JSON. - * @param closeAutomatically true (default) if the Reader is automatically closed on EOF - * @throws ParseException is thrown if parsing fails - */ - def parse(s: Reader, closeAutomatically: Boolean = true): JValue = - parse(new Buffer(s, closeAutomatically)) - - /** Return parsed JSON. - */ - def parseOpt(s: String): Option[JValue] = - try { parse(s).toOpt } catch { case e: Exception => None } - - /** Return parsed JSON. - * @param closeAutomatically true (default) if the Reader is automatically closed on EOF - */ - def parseOpt(s: Reader, closeAutomatically: Boolean = true): Option[JValue] = - try { parse(s, closeAutomatically).toOpt } catch { case e: Exception => None } - - /** Parse in pull parsing style. - * Use p.nextToken to parse tokens one by one from a string. - * @see net.liftweb.json.JsonParser.Token - */ - def parse[A](s: String, p: Parser => A): A = parse(new StringReader(s), p) - - /** Parse in pull parsing style. - * Use p.nextToken to parse tokens one by one from a stream. - * The Reader must be closed when parsing is stopped. - * @see net.liftweb.json.JsonParser.Token - */ - def parse[A](s: Reader, p: Parser => A): A = p(new Parser(new Buffer(s, false))) - - private[json] def parse(buf: Buffer): JValue = { - try { - astParser(new Parser(buf)) - } catch { - case e: ParseException => throw e - case e: Exception => throw new ParseException("parsing failed", e) - } finally { buf.release } - } - - // JSON hex unicode strings (\u12AF) are translated into characters through - // this array. Each number in the array corresponds to the 4-bit value that - // one number in the hex string will represent. These are combined when - // reading the unicode string. - private[this] final val HexChars: Array[Int] = { - val chars = new Array[Int](128) - var i = 0 - while (i < 10) { - chars(i + '0') = i - i += 1 - } - i = 0 - while (i < 16) { - chars(i + 'a') = 10 + i - chars(i + 'A') = 10 + i - i += 1 - } - chars - } - // The size of one hex character in bits. - private[this] final val hexCharSize = 4 // in bits - - private[json] def unquote(string: String): String = - unquote(new JsonParser.Buffer(new java.io.StringReader(string), false)) - - private[this] def unquote(buf: JsonParser.Buffer): String = { - def unquote0(buf: JsonParser.Buffer): String = { - val builder = buf.builder - builder.delete(0, builder.length()) - var c = '\\' - while (c != '"') { - if (c == '\\') { - buf.substring(intoBuilder = true) - (buf.next: @switch) match { - case '"' => builder.append('"') - case '\\' => builder.append('\\') - case '/' => builder.append('/') - case 'b' => builder.append('\b') - case 'f' => builder.append('\f') - case 'n' => builder.append('\n') - case 'r' => builder.append('\r') - case 't' => builder.append('\t') - case 'u' => - var byte = 0 - var finalChar = 0 - val chars = Array(buf.next, buf.next, buf.next, buf.next) - while (byte < chars.length) { - finalChar = (finalChar << hexCharSize) | HexChars(chars(byte).toInt) - byte += 1 - } - builder.appendCodePoint(finalChar.toChar) - case _ => - builder.append('\\') - } - buf.mark - } - c = buf.next - } - buf.substring(intoBuilder = true) - builder.toString - } - - buf.eofIsFailure = true - buf.mark - var c = buf.next - var forcedReturn: String = null - while (c != '"') { - (c: @switch) match { - case '\\' => - forcedReturn = unquote0(buf) - c = '"' - case _ => - c = buf.next - } - } - buf.eofIsFailure = false - - if (forcedReturn == null) { - new String(buf.substring()) - } else { - forcedReturn - } - } - - private[json] def parseDouble(s: String) = { - s.toDouble - } - - // Intermediate objects and arrays which can be grown mutably for performance. - // These are finalized into immutable JObject and JArray. - private[this] case class IntermediateJObject(fields: scala.collection.mutable.ListBuffer[JField]) - private[this] case class IntermediateJArray(bits: scala.collection.mutable.ListBuffer[JValue]) - - private val astParser = (p: Parser) => { - val vals = new ValStack(p) - var token: Token = null - var root: Option[JValue] = None - - // At the end of an object, if we're looking at an intermediate form of an - // object or array, gather up all their component parts and create the final - // object or array. - def closeBlock(v: Any) { - def toJValue(x: Any) = x match { - case json: JValue => json - case other: IntermediateJObject => JObject(other.fields.result) - case other: IntermediateJArray => JArray(other.bits.result) - case _ => p.fail("unexpected field " + x) - } - - vals.peekOption match { - case Some(JField(name: String, value)) => - vals.pop(classOf[JField]) - val obj = vals.peek(classOf[IntermediateJObject]) - obj.fields.append(JField(name, toJValue(v))) - case Some(o: IntermediateJObject) => - o.fields.append(vals.peek(classOf[JField])) - case Some(a: IntermediateJArray) => a.bits.append(toJValue(v)) - case Some(x) => p.fail("expected field, array or object but got " + x) - case None => root = Some(toJValue(v)) - } - } - - def newValue(v: JValue) { - if (!vals.isEmpty) - vals.peekAny match { - case JField(name, value) => - vals.pop(classOf[JField]) - val obj = vals.peek(classOf[IntermediateJObject]) - obj.fields += (JField(name,v)) - case a: IntermediateJArray => a.bits += v - case other => p.fail("expected field or array but got " + other) - } else { - vals.push(v) - root = Some(v) - } - } - - do { - token = p.nextToken - token match { - case OpenObj => vals.push(IntermediateJObject(scala.collection.mutable.ListBuffer())) - case FieldStart(name) => vals.push(JField(name, null)) - case StringVal(x) => newValue(JString(x)) - case IntVal(x) => newValue(JInt(x)) - case DoubleVal(x) => newValue(JDouble(x)) - case BoolVal(x) => newValue(JBool(x)) - case NullVal => newValue(JNull) - case CloseObj => closeBlock(vals.popAny) - case OpenArr => vals.push(IntermediateJArray(scala.collection.mutable.ListBuffer())) - case CloseArr => closeBlock(vals.popAny) - case End => - } - } while (token != End) - - root getOrElse JNothing - } - - private[this] final val EOF: Char = (-1).asInstanceOf[Char] - - private class ValStack(parser: Parser) { - import java.util.ArrayDeque - private[this] val stack = new ArrayDeque[Any](32) - - def popAny = stack.poll - def pop[A](expectedType: Class[A]) = convert(stack.poll, expectedType) - def push(v: Any) = stack.addFirst(v) - def peekAny = stack.peek - def peek[A](expectedType: Class[A]) = convert(stack.peek, expectedType) - def replace[A](newTop: Any) = { - stack.pop - stack.push(newTop) - } - - private def convert[A](x: Any, expectedType: Class[A]): A = { - if (x == null) parser.fail("expected object or array") - - try { - x.asInstanceOf[A] - } catch { - case cce: ClassCastException => - parser.fail(s"failure during class conversion. I got $x but needed a type of $expectedType", cce) - } - } - - def peekOption = if (stack.isEmpty) None else Some(stack.peek) - def isEmpty = stack.isEmpty - } - - class Parser(buf: Buffer) { - import java.util.ArrayDeque - - // Maintains our current nesting context in the form of BlockMode, which - // indicates if each context is an array or object. - private[this] val blocks = new ArrayDeque[BlockMode](32) - private[this] var fieldNameMode = true - - def fail(msg: String, cause: Exception = null) = throw new ParseException(msg + "\nNear: " + buf.near, cause) - - /** Parse next Token from stream. - */ - def nextToken: Token = { - def parseString: String = - try { - unquote(buf) - } catch { - case p: ParseException => throw p - case cause: Exception => fail("unexpected string end", cause) - } - - def parseValue(first: Char) = { - var wasInt = true - var doubleVal = false - val buf = this.buf - - // Back up and mark the buffer so that we can extract a substring after - // that contains the whole value. - buf.back - buf.mark - while (wasInt) { - val c = buf.next - (c: @switch) match { - case '.' | 'e' | 'E' => - doubleVal = true - case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '-' | '+' => - // continue - case _ => - wasInt = false - if (c != EOF) { - buf.back // don't include the last character - } - } - } - buf.forward // substring is exclusive to the last index - val value = buf.substring() - buf.back // back up so our current pointer is in the right place - (doubleVal: @switch) match { - case true => - DoubleVal(parseDouble(new String(value))) - case false => - IntVal(BigInt(new String(value))) - } - } - - while (true) { - (buf.next: @switch) match { - case '{' => - blocks.addFirst(OBJECT) - fieldNameMode = true - return OpenObj - case '}' => - blocks.poll - return CloseObj - case '"' => - if (fieldNameMode && blocks.peek == OBJECT) return FieldStart(parseString) - else { - fieldNameMode = true - return StringVal(parseString) - } - case 't' => - fieldNameMode = true - if (buf.next == 'r' && buf.next == 'u' && buf.next == 'e') { - return BoolVal(true) - } - fail("expected boolean") - case 'f' => - fieldNameMode = true - if (buf.next == 'a' && buf.next == 'l' && buf.next == 's' && buf.next == 'e') { - return BoolVal(false) - } - fail("expected boolean") - case 'n' => - fieldNameMode = true - if (buf.next == 'u' && buf.next == 'l' && buf.next == 'l') { - return NullVal - } - fail("expected null") - case ':' => - if (blocks.peek == ARRAY) fail("Colon in an invalid position") - fieldNameMode = false - case '[' => - blocks.addFirst(ARRAY) - return OpenArr - case ']' => - fieldNameMode = true - blocks.poll - return CloseArr - case c @ ('0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '-') => - fieldNameMode = true - return parseValue(c) - case ' ' | '\n' | ',' | '\r' | '\t' => - // ignore - case c => - c match { - case `EOF` => - buf.automaticClose - return End - case _ => - fail("unknown token " + c) - } - } - } - buf.automaticClose - End - } - - sealed abstract class BlockMode - case object ARRAY extends BlockMode - case object OBJECT extends BlockMode - } - - /* Buffer used to parse JSON. - * Buffer is divided to one or more segments (preallocated in segmentPool). - */ - private[json] final class Buffer(in: Reader, closeAutomatically: Boolean, segmentPool: SegmentPool = Segments) { - // Reused by the parser when appropriate, allows for a single builder to be - // used throughout the parse process, and to be written to directly from the - // substring method, so as to avoid allocating new builders when avoidable. - private[json] final val builder = new java.lang.StringBuilder(32) - - var offset = 0 // how far into the current segment we've read data - var curMark = -1 - var curMarkSegment = -1 - var eofIsFailure = false - private[this] var segments = scala.collection.mutable.ArrayBuffer(segmentPool.apply()) - private[this] var segment: Array[Char] = segments.head.seg - private[this] var cur = 0 // Pointer which points current parsing location - private[this] var curSegmentIdx = 0 // Pointer which points current segment - - // Mark the current point so that future substring calls will extract the - // value from this point to whatever point the buffer has advanced to. - def mark = { - if (curSegmentIdx > 0) { - segments(0) = segments.remove(curSegmentIdx) - curSegmentIdx = 0 - } - - curMark = cur - curMarkSegment = curSegmentIdx - } - def back = cur = cur-1 - def forward = cur = cur+1 - - // Read the next character; reads new data from the reader if necessary. - def next: Char = { - if (cur >= offset && read < 0) { - if (eofIsFailure) throw new ParseException("unexpected eof", null) else EOF - } else { - val c = segment(cur) - cur += 1 - c - } - } - - private[this] final val emptyArray = new Array[Char](0) - // Slices from the last marked point to the current index. If intoBuilder is - // true, appends it to the buffer's builder and returns an empty array. If - // false, slices it into a new array and returns that array. - final def substring(intoBuilder: Boolean = false) = { - if (curSegmentIdx == curMarkSegment) { - val substringLength = cur - curMark - 1 - if (intoBuilder) { - builder.append(segment, curMark, substringLength) - emptyArray - } else if (substringLength == 0) { - emptyArray - } else { - val array = new Array[Char](substringLength) - System.arraycopy(segment, curMark, array, 0, substringLength) - array - } - } else { // slower path for case when string is in two or more segments - val segmentCount = curSegmentIdx - curMarkSegment + 1 - val substringLength = segmentCount * segmentPool.segmentSize - curMark - (segmentPool.segmentSize - cur) - 1 - val chars = - if (intoBuilder) { - emptyArray - } else { - new Array[Char](substringLength) - } - - var i = curMarkSegment - var offset = 0 - while (i <= curSegmentIdx) { - val s = segments(i).seg - val start = if (i == curMarkSegment) curMark else 0 - val end = if (i == curSegmentIdx) cur else s.length+1 - val partLen = end-start-1 - if (intoBuilder) { - builder.append(s, start, partLen) - } else { - System.arraycopy(s, start, chars, offset, partLen) - } - offset += partLen - i = i+1 - } - - curMarkSegment = -1 - curMark = -1 - - chars - } - } - - def near = { - val start = (cur - 20) max 0 - val len = ((cur + 1) min segmentPool.segmentSize) - start - new String(segment, start, len) - } - - def release = segments.foreach(segmentPool.release) - - private[JsonParser] def automaticClose = if (closeAutomatically) in.close - - // Reads the next available block from the reader. Returns -1 if there's - // nothing more to read. - private[this] def read = { - if (offset >= segment.length) { - offset = 0 - val segmentToUse = - (curMarkSegment: @scala.annotation.switch) match { - case -1 => - curSegmentIdx = 0 - segments(0) - case _ => - curSegmentIdx += 1 - if (curSegmentIdx < segments.length) { - segments(curSegmentIdx) - } else { - val segment = segmentPool.apply() - segments.append(segment) - segment - } - } - - segment = segmentToUse.seg - } - - val length = in.read(segment, offset, segment.length-offset) - if (length != -1) { - cur = offset - offset += length - length - } else -1 - } - } - - private[json] trait SegmentPool { - def apply(): Segment - def release(segment: Segment): Unit - def segmentSize: Int - } - - private[json] class ArrayBlockingSegmentPool(override val segmentSize: Int) extends SegmentPool { - import java.util.concurrent.ArrayBlockingQueue - import java.util.concurrent.atomic.AtomicInteger - - private[this] val maxNumOfSegments = 10000 - private[this] var segmentCount = new AtomicInteger(0) - private[this] val segments = new ArrayBlockingQueue[Segment](maxNumOfSegments) - private[json] def clear = segments.clear - - def apply(): Segment = { - val s = acquire - // Give back a disposable segment if pool is exhausted. - if (s != null) s else DisposableSegment(new Array(segmentSize)) - } - - private[this] def acquire: Segment = { - val curCount = segmentCount.get - val createNew = - if (segments.size == 0 && curCount < maxNumOfSegments) - segmentCount.compareAndSet(curCount, curCount + 1) - else false - - if (createNew) RecycledSegment(new Array(segmentSize)) else segments.poll - } - - def release(s: Segment) = s match { - case _: RecycledSegment => segments.offer(s) - case _ => - } - } - - /* - * A pool of preallocated char arrays. - */ - private object Segments extends ArrayBlockingSegmentPool(1000) - - sealed trait Segment { - val seg: Array[Char] - } - case class RecycledSegment(seg: Array[Char]) extends Segment - case class DisposableSegment(seg: Array[Char]) extends Segment -} diff --git a/core/json/src/main/scala/net/liftweb/json/Merge.scala b/core/json/src/main/scala/net/liftweb/json/Merge.scala deleted file mode 100644 index f1fb6edc65..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/Merge.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -/** Use fundep encoding to improve return type of merge function - * (see: http://www.chuusai.com/2011/07/16/fundeps-in-scala/) - * - * JObject merge JObject = JObject - * JArray merge JArray = JArray - * _ merge _ = JValue - */ -private [json] trait MergeDep[A <: JValue, B <: JValue, R <: JValue] { - def apply(val1: A, val2: B): R -} - -private [json] trait LowPriorityMergeDep { - implicit def jjj[A <: JValue, B <: JValue] = new MergeDep[A, B, JValue] { - def apply(val1: A, val2: B): JValue = merge(val1, val2) - - private def merge(val1: JValue, val2: JValue): JValue = (val1, val2) match { - case (JObject(xs), JObject(ys)) => JObject(Merge.mergeFields(xs, ys)) - case (JArray(xs), JArray(ys)) => JArray(Merge.mergeVals(xs, ys)) - case (JNothing, x) => x - case (x, JNothing) => x - case (_, y) => y - } - } -} - -private [json] trait MergeDeps extends LowPriorityMergeDep { - implicit object ooo extends MergeDep[JObject, JObject, JObject] { - def apply(val1: JObject, val2: JObject): JObject = JObject(Merge.mergeFields(val1.obj, val2.obj)) - } - - implicit object aaa extends MergeDep[JArray, JArray, JArray] { - def apply(val1: JArray, val2: JArray): JArray = JArray(Merge.mergeVals(val1.arr, val2.arr)) - } -} - -/** Function to merge two JSONs. - */ -object Merge { - /** Return merged JSON. - *

- * Example:

-   * val m = ("name", "joe") ~ ("age", 10) merge ("name", "joe") ~ ("iq", 105)
-   * m: JObject(List((name,JString(joe)), (age,JInt(10)), (iq,JInt(105))))
-   * 
- */ - def merge[A <: JValue, B <: JValue, R <: JValue] - (val1: A, val2: B)(implicit instance: MergeDep[A, B, R]): R = instance(val1, val2) - - private[json] def mergeFields(vs1: List[JField], vs2: List[JField]): List[JField] = { - def mergeRec(xleft: List[JField], yleft: List[JField]): List[JField] = xleft match { - case Nil => yleft - case JField(xn, xv) :: xs => yleft find (_.name == xn) match { - case Some(y @ JField(yn, yv)) => - JField(xn, merge(xv, yv)) :: mergeRec(xs, yleft filterNot (_ == y)) - case None => JField(xn, xv) :: mergeRec(xs, yleft) - } - } - - mergeRec(vs1, vs2) - } - - private[json] def mergeVals(vs1: List[JValue], vs2: List[JValue]): List[JValue] = { - def mergeRec(xleft: List[JValue], yleft: List[JValue]): List[JValue] = xleft match { - case Nil => yleft - case x :: xs => yleft find (_ == x) match { - case Some(y) => merge(x, y) :: mergeRec(xs, yleft filterNot (_ == y)) - case None => x :: mergeRec(xs, yleft) - } - } - - mergeRec(vs1, vs2) - } - - private[json] trait Mergeable extends MergeDeps { - implicit class MergeSyntax[A <: JValue](val json: A) { - /** Return merged JSON. - * @see net.liftweb.json.Merge#merge - */ - def merge[B <: JValue, R <: JValue](other: B)(implicit instance: MergeDep[A, B, R]): R = - Merge.merge(json, other)(instance) - } - } -} diff --git a/core/json/src/main/scala/net/liftweb/json/Meta.scala b/core/json/src/main/scala/net/liftweb/json/Meta.scala deleted file mode 100644 index 57f23aca67..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/Meta.scala +++ /dev/null @@ -1,464 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -// FIXME Needed to due to https://issues.scala-lang.org/browse/SI-6541, -// which causes existential types to be inferred for the generated -// unapply of a case class with a wildcard parameterized type. -// Ostensibly should be fixed in 2.12, which means we're a ways away -// from being able to remove this, though. -import scala.language.existentials - -import java.lang.reflect.{Constructor => JConstructor, Field, Type, ParameterizedType, GenericArrayType} -import java.util.Date -import java.sql.Timestamp - -case class TypeInfo(clazz: Class[_], parameterizedType: Option[ParameterizedType]) - -trait ParameterNameReader { - def lookupParameterNames(constructor: JConstructor[_]): Traversable[String] -} - -private[json] object Meta { - import com.thoughtworks.paranamer._ - - /** Intermediate metadata format for case classes. - * This ADT is constructed (and then memoized) from given case class using reflection. - * - * Example mapping. - * - * package xx - * case class Person(name: String, address: Address, children: List[Child]) - * case class Address(street: String, city: String) - * case class Child(name: String, age: BigInt) - * - * will produce following Mapping: - * - * Constructor("xx.Person", List( - * Arg("name", Value(classOf[String])), - * Arg("address", Constructor("xx.Address", List(Value("street"), Value("city")))), - * Arg("children", Col(classOf[List[_]], Constructor("xx.Child", List(Value("name"), Value("age"))))))) - */ - sealed abstract class Mapping - case class Arg(path: String, mapping: Mapping, optional: Boolean) extends Mapping - case class Value(targetType: Class[_]) extends Mapping - case class Cycle(targetType: Type) extends Mapping - case class Dict(mapping: Mapping) extends Mapping - case class Col(targetType: TypeInfo, mapping: Mapping) extends Mapping - case class HCol(targetType: TypeInfo, mappings: List[Mapping]) extends Mapping - case class Constructor(targetType: TypeInfo, choices: List[DeclaredConstructor]) extends Mapping { - def bestMatching(argNames: List[String]): Option[DeclaredConstructor] = { - val names = Set(argNames: _*) - def countOptionals(args: List[Arg]) = - args.foldLeft(0)((n, x) => if (x.optional) n+1 else n) - def score(args: List[Arg]) = - args.foldLeft(0)((s, arg) => if (names.contains(arg.path)) s+1 else -100) - - if (choices.isEmpty) None - else { - val best = choices.tail.foldLeft((choices.head, score(choices.head.args))) { (best, c) => - val newScore = score(c.args) - if (newScore == best._2) { - if (countOptionals(c.args) < countOptionals(best._1.args)) - (c, newScore) else best - } else if (newScore > best._2) (c, newScore) else best - } - Some(best._1) - } - } - } - - case class DeclaredConstructor(constructor: JConstructor[_], args: List[Arg]) - - // Current constructor parsing context. (containingClass + allArgs could be replaced with Constructor) - case class Context(argName: String, containingClass: Class[_], allArgs: List[(String, Type)]) - - private val mappings = new Memo[(Type, Seq[Class[_]]), Mapping] - private val unmangledNames = new Memo[String, String] - private val paranamer = new CachingParanamer(new BytecodeReadingParanamer) - - object ParanamerReader extends ParameterNameReader { - def lookupParameterNames(constructor: JConstructor[_]): Traversable[String] = - paranamer.lookupParameterNames(constructor) - } - - private[json] def mappingOf(clazz: Type, typeArgs: Seq[Class[_]] = Seq()) - (implicit formats: Formats): Mapping = { - import Reflection._ - - def constructors(t: Type, visited: Set[Type], context: Option[Context]): List[DeclaredConstructor] = { - Reflection.constructors(t, formats.parameterNameReader, context).map { case (c, args) => - DeclaredConstructor(c, args.map { case (name, t) => - toArg(unmangleName(name), t, visited, Context(name, c.getDeclaringClass, args)) }) - } - } - - def toArg(name: String, genericType: Type, visited: Set[Type], context: Context): Arg = { - def mkContainer(t: Type, k: Kind, valueTypeIndex: Int, factory: Mapping => Mapping) = { - if (typeConstructor_?(t)) { - val typeArgs = typeConstructors(t, k)(valueTypeIndex) - factory(fieldMapping(typeArgs)._1) - } else { - factory(fieldMapping(typeParameters(t, k, context)(valueTypeIndex))._1) - } - } - - def mkHeteroContainer(baseType: Type): Mapping = { - val heteroContainerTypes = baseType match { - case ptype: ParameterizedType => - ptype.getActualTypeArguments().map { - case c: Class[_] => - c - case p: ParameterizedType => - p.getRawType.asInstanceOf[Class[_]] - case x => - fail("do not know how to get type parameter from " + x) - } - } - - val parameters = heteroContainerTypes.map(fieldMapping(_)._1) - HCol(TypeInfo(rawClassOf(baseType), parameterizedTypeOpt(baseType)), parameters.toList) - } - - def parameterizedTypeOpt(t: Type) = t match { - case x: ParameterizedType => - val typeArgs = x.getActualTypeArguments.toList.zipWithIndex - .map { case (t, idx) => - if (t == classOf[java.lang.Object]) - ScalaSigReader.readConstructor(context.argName, context.containingClass, idx, context.allArgs.map(_._1)) - else t - } - Some(mkParameterizedType(x.getRawType, typeArgs)) - case _ => None - } - - def mkConstructor(t: Type) = - if (visited.contains(t)) (Cycle(t), false) - else (Constructor(TypeInfo(rawClassOf(t), parameterizedTypeOpt(t)), constructors(t, visited + t, Some(context))), false) - - def fieldMapping(t: Type): (Mapping, Boolean) = { - t match { - case pType: ParameterizedType => - val raw = rawClassOf(pType) - val info = TypeInfo(raw, Some(pType)) - - if (classOf[Set[_]].isAssignableFrom(raw)) - (mkContainer(t, `* -> *`, 0, Col.apply(info, _)), false) - else if (raw.isArray) - (mkContainer(t, `* -> *`, 0, Col.apply(info, _)), false) - else if (classOf[Option[_]].isAssignableFrom(raw)) - (mkContainer(t, `* -> *`, 0, identity _), true) - else if (classOf[Map[_, _]].isAssignableFrom(raw)) - (mkContainer(t, `(*,*) -> *`, 1, Dict.apply _), false) - else if (classOf[Seq[_]].isAssignableFrom(raw)) - (mkContainer(t, `* -> *`, 0, Col.apply(info, _)), false) - else if (tuples.find(_.isAssignableFrom(raw)).isDefined && formats.tuplesAsArrays) - (mkHeteroContainer(t), false) - else - mkConstructor(t) - case aType: GenericArrayType => - // Couldn't find better way to reconstruct proper array type: - val raw = java.lang.reflect.Array.newInstance(rawClassOf(aType.getGenericComponentType), 0: Int).getClass - (Col(TypeInfo(raw, None), fieldMapping(aType.getGenericComponentType)._1), false) - case raw: Class[_] => - if (primitive_?(raw)) (Value(raw), false) - else if (raw.isArray) - (mkContainer(t, `* -> *`, 0, Col.apply(TypeInfo(raw, None), _)), false) - else - mkConstructor(t) - case x => (Constructor(TypeInfo(classOf[AnyRef], None), Nil), false) - } - } - - val (mapping, optional) = fieldMapping(genericType) - Arg(name, mapping, optional) - } - - if (primitive_?(clazz)) { - Value(rawClassOf(clazz)) - } else { - mappings.memoize((clazz, typeArgs), { case (t, _) => - val c = rawClassOf(t) - val (pt, typeInfo) = - if (typeArgs.isEmpty) { - (t, TypeInfo(c, None)) - } else { - val t = mkParameterizedType(c, typeArgs) - (t, TypeInfo(c, Some(t))) - } - - Constructor(typeInfo, constructors(pt, Set(), None)) - }) - } - } - - private[json] def rawClassOf(t: Type): Class[_] = t match { - case c: Class[_] => c - case p: ParameterizedType => rawClassOf(p.getRawType) - case x => fail("Raw type of " + x + " not known") - } - - private[json] def mkParameterizedType(owner: Type, typeArgs: Seq[Type]) = - new ParameterizedType { - def getActualTypeArguments = typeArgs.toArray - def getOwnerType = owner - def getRawType = owner - override def toString = getOwnerType + "[" + getActualTypeArguments.mkString(",") + "]" - } - - private[json] def unmangleName(name: String) = - unmangledNames.memoize(name, scala.reflect.NameTransformer.decode) - - private[json] def fail(msg: String, cause: Exception = null) = throw new MappingException(msg, cause) - - private class Memo[A, R] { - private val cache = new java.util.concurrent.atomic.AtomicReference(Map[A, R]()) - - def memoize(x: A, f: A => R): R = { - val c = cache.get - def addToCache() = { - val ret = f(x) - cache.set(c + (x -> ret)) - ret - } - c.getOrElse(x, addToCache) - } - } - - object Reflection { - import java.lang.reflect._ - - sealed abstract class Kind - case object `* -> *` extends Kind - case object `(*,*) -> *` extends Kind - - val primitives = Map[Class[_], Unit]() ++ (List[Class[_]]( - classOf[String], classOf[Int], classOf[Long], classOf[Double], - classOf[Float], classOf[Byte], classOf[BigInt], classOf[Boolean], - classOf[Short], classOf[java.lang.Integer], classOf[java.lang.Long], - classOf[java.lang.Double], classOf[java.lang.Float], - classOf[java.lang.Byte], classOf[java.lang.Boolean], classOf[Number], - classOf[java.lang.Short], classOf[Date], classOf[Timestamp], classOf[Symbol], classOf[JValue], - classOf[JObject], classOf[JArray]).map((_, ()))) - - val tuples = Seq( - classOf[Tuple1[_]], classOf[Tuple2[_,_]], classOf[Tuple3[_,_,_]], classOf[Tuple4[_,_,_,_]], - classOf[Tuple5[_,_,_,_,_]], classOf[Tuple6[_,_,_,_,_,_]], - classOf[Tuple7[_,_,_,_,_,_,_]], classOf[Tuple8[_,_,_,_,_,_,_,_]], - classOf[Tuple9[_,_,_,_,_,_,_,_,_]], classOf[Tuple10[_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple11[_,_,_,_,_,_,_,_,_,_,_]], classOf[Tuple12[_,_,_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple13[_,_,_,_,_,_,_,_,_,_,_,_,_]], classOf[Tuple14[_,_,_,_,_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple15[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]], classOf[Tuple16[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple17[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple18[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple19[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple20[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple21[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]], - classOf[Tuple22[_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_]] - ) - - val tupleConstructors: Map[Int, JConstructor[_]] = tuples.zipWithIndex.map({ - case (tupleClass, index) => - index -> tupleClass.getConstructors()(0) - }).toMap - - private val primaryConstructorArgumentsMemo = new Memo[Class[_], List[(String, Type)]] - private val declaredFieldsMemo = new Memo[Class[_], Map[String,Field]] - - def constructors(t: Type, names: ParameterNameReader, context: Option[Context]): List[(JConstructor[_], List[(String, Type)])] = - rawClassOf(t).getDeclaredConstructors.map(c => (c, constructorArgs(t, c, names, context))).toList - - def constructorArgs(t: Type, constructor: JConstructor[_], - nameReader: ParameterNameReader, context: Option[Context]): List[(String, Type)] = { - def argsInfo(c: JConstructor[_], typeArgs: Map[TypeVariable[_], Type]) = { - val Name = """^((?:[^$]|[$][^0-9]+)+)([$][0-9]+)?$""".r - def clean(name: String) = name match { - case Name(text, junk) => text - } - try { - val names = nameReader.lookupParameterNames(c).map(clean) - val types = c.getGenericParameterTypes.toList.zipWithIndex map { - case (v: TypeVariable[_], idx) => - val arg = typeArgs.getOrElse(v, v) - if (arg == classOf[java.lang.Object]) - context.map(ctx => ScalaSigReader.readConstructor(ctx.argName, ctx.containingClass, idx, ctx.allArgs.map(_._1))).getOrElse(arg) - else arg - case (x, _) => x - } - names.toList.zip(types) - } catch { - case e: ParameterNamesNotFoundException => Nil - } - } - - t match { - case c: Class[_] => argsInfo(constructor, Map()) - case p: ParameterizedType => - val vars = - Map() ++ rawClassOf(p).getTypeParameters.toList.map(_.asInstanceOf[TypeVariable[_]]).zip(p.getActualTypeArguments.toList) // FIXME this cast should not be needed - argsInfo(constructor, vars) - case x => fail("Do not know how query constructor info for " + x) - } - } - - def primaryConstructorArgs(c: Class[_])(implicit formats: Formats) = { - def findMostComprehensive(c: Class[_]): List[(String, Type)] = { - val ord = Ordering[Int].on[JConstructor[_]](_.getParameterTypes.size) - val primary = c.getDeclaredConstructors.max(ord) - constructorArgs(c, primary, formats.parameterNameReader, None) - } - - primaryConstructorArgumentsMemo.memoize(c, findMostComprehensive(_)) - } - - def typeParameters(t: Type, k: Kind, context: Context): List[Class[_]] = { - def term(i: Int) = t match { - case ptype: ParameterizedType => ptype.getActualTypeArguments()(i) match { - case c: Class[_] => - if (c == classOf[java.lang.Object]) - ScalaSigReader.readConstructor(context.argName, context.containingClass, i, context.allArgs.map(_._1)) - else c - case p: ParameterizedType => p.getRawType.asInstanceOf[Class[_]] - case x => fail("do not know how to get type parameter from " + x) - } - case clazz: Class[_] if (clazz.isArray) => i match { - case 0 => clazz.getComponentType.asInstanceOf[Class[_]] - case _ => fail("Arrays only have one type parameter") - } - case clazz: GenericArrayType => i match { - case 0 => clazz.getGenericComponentType.asInstanceOf[Class[_]] - case _ => fail("Arrays only have one type parameter") - } - case _ => fail("Unsupported Type: " + t + " (" + t.getClass + ")") - } - - k match { - case `* -> *` => List(term(0)) - case `(*,*) -> *` => List(term(0), term(1)) - } - } - - def typeConstructors(t: Type, k: Kind): List[Type] = { - def types(i: Int): Type = { - val ptype = t.asInstanceOf[ParameterizedType] - ptype.getActualTypeArguments()(i) match { - case p: ParameterizedType => p - case c: Class[_] => c - } - } - - k match { - case `* -> *` => List(types(0)) - case `(*,*) -> *` => List(types(0), types(1)) - } - } - - def primitive_?(t: Type) = t match { - case clazz: Class[_] => primitives contains clazz - case _ => false - } - - def tuple_?(t: Type) = t match { - case clazz: Class[_] => - tuples contains clazz - case _ => - false - } - - def static_?(f: Field) = Modifier.isStatic(f.getModifiers) - def typeConstructor_?(t: Type) = t match { - case p: ParameterizedType => - p.getActualTypeArguments.exists(_.isInstanceOf[ParameterizedType]) - case _ => false - } - - def array_?(x: Any) = x != null && classOf[scala.Array[_]].isAssignableFrom(x.asInstanceOf[AnyRef].getClass) - - def fields(clazz: Class[_]): List[(String, TypeInfo)] = { - val fs = clazz.getDeclaredFields.toList - .filterNot(f => Modifier.isStatic(f.getModifiers) || Modifier.isTransient(f.getModifiers)) - .map(f => (f.getName, TypeInfo(f.getType, f.getGenericType match { - case p: ParameterizedType => Some(p) - case _ => None - }))) - fs ::: (if (clazz.getSuperclass == null) Nil else fields(clazz.getSuperclass)) - } - - def setField(a: AnyRef, name: String, value: Any) = { - val f = findField(a.getClass, name) - f.setAccessible(true) - f.set(a, value) - } - - def getField(a: AnyRef, name: String) = { - val f = findField(a.getClass, name) - f.setAccessible(true) - f.get(a) - } - - def findField(clazz: Class[_], name: String): Field = try { - clazz.getDeclaredField(name) - } catch { - case e: NoSuchFieldException => - if (clazz.getSuperclass == null) throw e - else findField(clazz.getSuperclass, name) - } - - def getDeclaredFields(clazz: Class[_]) : Map[String,Field] = { - def extractDeclaredFields = clazz.getDeclaredFields.map(field => (field.getName, field)).toMap - declaredFieldsMemo.memoize(clazz, _ => extractDeclaredFields) - } - - def mkJavaArray(x: Any, componentType: Class[_]) = { - val arr = x.asInstanceOf[scala.Array[_]] - val a = java.lang.reflect.Array.newInstance(componentType, arr.size) - var i = 0 - while (i < arr.size) { - java.lang.reflect.Array.set(a, i, arr(i)) - i += 1 - } - a - } - - def primitive2jvalue(a: Any)(implicit formats: Formats) = a match { - case x: String => JString(x) - case x: Int => JInt(x) - case x: Long => JInt(x) - case x: Double => JDouble(x) - case x: Float => JDouble(x) - case x: Byte => JInt(BigInt(x)) - case x: BigInt => JInt(x) - case x: Boolean => JBool(x) - case x: Short => JInt(BigInt(x)) - case x: java.lang.Integer => JInt(BigInt(x.asInstanceOf[Int])) - case x: java.lang.Long => JInt(BigInt(x.asInstanceOf[Long])) - case x: java.lang.Double => JDouble(x.asInstanceOf[Double]) - case x: java.lang.Float => JDouble(x.asInstanceOf[Float]) - case x: java.lang.Byte => JInt(BigInt(x.asInstanceOf[Byte])) - case x: java.lang.Boolean => JBool(x.asInstanceOf[Boolean]) - case x: java.lang.Short => JInt(BigInt(x.asInstanceOf[Short])) - case x: Date => JString(formats.dateFormat.format(x)) - case x: Symbol => JString(x.name) - case _ => sys.error("not a primitive " + a.asInstanceOf[AnyRef].getClass) - } - } -} - -case class MappingException(msg: String, cause: Exception) extends Exception(msg, cause) { - def this(msg: String) = this(msg, null) -} diff --git a/core/json/src/main/scala/net/liftweb/json/ScalaSig.scala b/core/json/src/main/scala/net/liftweb/json/ScalaSig.scala deleted file mode 100644 index 4b640e17e8..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/ScalaSig.scala +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import scala.tools.scalap.scalax.rules.scalasig._ - -import java.util.concurrent.ConcurrentHashMap - -import scala.collection.JavaConverters._ -import scala.collection.concurrent.{Map=>ConcurrentScalaMap} - -private[json] object ScalaSigReader { - // Originally, we used `method.children` and expected all children of a - // MethodSymbol to be parameters. In Scala 2.13, a change was made that never - // returns parameters in `children`. To get around this, we look up parameter - // symbols separately here. - // - // This works across Scala versions, so we don't scope it to 2.13 - // specifically. See Scala bug 11747, currently at - // https://github.com/scala/bug/issues/11747 , for more. - private def paramSymbolsFor(method: MethodSymbol): Seq[Symbol] = { - method - .applyScalaSigRule(ScalaSigParsers.symbols) - .filter(symbol => symbol.parent == Some(method) && symbol.isParam) - } - - - def readConstructor(argName: String, clazz: Class[_], typeArgIndex: Int, argNames: List[String]): Class[_] = { - val cl = findClass(clazz) - val cstr = findConstructor(cl, argNames).getOrElse(Meta.fail("Can't find constructor for " + clazz)) - findArgType(cstr, argNames.indexOf(argName), typeArgIndex) - } - - def readField(name: String, clazz: Class[_], typeArgIndex: Int): Class[_] = { - def read(current: Class[_]): MethodSymbol = { - if (current == null) - Meta.fail("Can't find field " + name + " from " + clazz) - else - findField(findClass(current), name).getOrElse(read(current.getSuperclass)) - } - findArgTypeForField(read(clazz), typeArgIndex) - } - - private def findClass(clazz: Class[_]): ClassSymbol = { - val sig = findScalaSig(clazz).getOrElse(Meta.fail("Can't find ScalaSig for " + clazz)) - findClass(sig, clazz).getOrElse(Meta.fail("Can't find " + clazz + " from parsed ScalaSig")) - } - - private def findClass(sig: ScalaSig, clazz: Class[_]): Option[ClassSymbol] = { - sig.symbols.collect { case c: ClassSymbol if !c.isModule => c }.find(_.name == clazz.getSimpleName).orElse { - sig.topLevelClasses.find(_.symbolInfo.name == clazz.getSimpleName).orElse { - sig.topLevelObjects.map { obj => - val t = obj.infoType.asInstanceOf[TypeRefType] - t.symbol.children collect { case c: ClassSymbol => c } find(_.symbolInfo.name == clazz.getSimpleName) - }.head - } - } - } - - private def findConstructor(c: ClassSymbol, argNames: List[String]): Option[MethodSymbol] = { - val ms = c.children collect { case m: MethodSymbol if m.name == "" => m } - ms.find(m => paramSymbolsFor(m).map(_.name) == argNames) - } - - private def findField(c: ClassSymbol, name: String): Option[MethodSymbol] = - (c.children collect { case m: MethodSymbol if m.name == name => m }).headOption - - private def findArgType(s: MethodSymbol, argIdx: Int, typeArgIndex: Int): Class[_] = { - def findPrimitive(t: Type): Symbol = t match { - case TypeRefType(ThisType(_), symbol, _) if isPrimitive(symbol) => symbol - case TypeRefType(_, _, TypeRefType(ThisType(_), symbol, _) :: xs) => symbol - case TypeRefType(_, symbol, Nil) => symbol - case TypeRefType(_, _, args) if typeArgIndex >= args.length => findPrimitive(args(0)) - case TypeRefType(_, _, args) => - args(typeArgIndex) match { - case ref @ TypeRefType(_, _, _) => findPrimitive(ref) - case x => Meta.fail("Unexpected type info " + x) - } - case x => Meta.fail("Unexpected type info " + x) - } - toClass(findPrimitive(paramSymbolsFor(s)(argIdx).asInstanceOf[SymbolInfoSymbol].infoType)) - } - - private def findArgTypeForField(s: MethodSymbol, typeArgIdx: Int): Class[_] = { - val t = s.infoType match { - case NullaryMethodType(TypeRefType(_, _, args)) => args(typeArgIdx) - } - - @scala.annotation.tailrec - def findPrimitive(t: Type): Symbol = t match { - case TypeRefType(ThisType(_), symbol, _) => symbol - case ref @ TypeRefType(_, _, _) => findPrimitive(ref) - case x => Meta.fail("Unexpected type info " + x) - } - toClass(findPrimitive(t)) - } - - private def toClass(s: Symbol) = s.path match { - case "scala.Short" => classOf[Short] - case "scala.Int" => classOf[Int] - case "scala.Long" => classOf[Long] - case "scala.Boolean" => classOf[Boolean] - case "scala.Float" => classOf[Float] - case "scala.Double" => classOf[Double] - case _ => classOf[AnyRef] - } - - private def isPrimitive(s: Symbol) = toClass(s) != classOf[AnyRef] - - private def findScalaSig(clazz: Class[_]): Option[ScalaSig] = - ScalaSigParser.parse(clazz).orElse(findScalaSig(clazz.getDeclaringClass)) -} diff --git a/core/json/src/main/scala/net/liftweb/json/Serialization.scala b/core/json/src/main/scala/net/liftweb/json/Serialization.scala deleted file mode 100644 index 15e26189c6..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/Serialization.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import scala.reflect.Manifest - -/** Functions to serialize and deserialize a case class. - * Custom serializer can be inserted if a class is not a case class. - *

- * Example:

- * val hints = new ShortTypeHints( ... )
- * implicit val formats = Serialization.formats(hints)
- * 
- * - * @see net.liftweb.json.TypeHints - */ -object Serialization { - import java.io.{Reader, StringWriter, Writer} - - /** Serialize to String. - */ - def write[A <: Any](a: A)(implicit formats: Formats): String = - compactRender(Extraction.decompose(a)(formats)) - - /** Serialize to Writer. - */ - def write[A <: Any, W <: Writer](a: A, out: W)(implicit formats: Formats): W = { - JsonAST.compactRender(Extraction.decompose(a)(formats), out) - out - } - - /** Serialize to String (pretty format). - */ - def writePretty[A <: Any](a: A)(implicit formats: Formats): String = - (writePretty(a, new StringWriter)(formats)).toString - - /** Serialize to Writer (pretty format). - */ - def writePretty[A <: Any, W <: Writer](a: A, out: W)(implicit formats: Formats): W = { - JsonAST.prettyRender(Extraction.decompose(a)(formats), out) - out - } - - /** Deserialize from a String. - */ - def read[A](json: String)(implicit formats: Formats, mf: Manifest[A]): A = - parse(json).extract(formats, mf) - - /** Deserialize from a Reader. - */ - def read[A](in: Reader)(implicit formats: Formats, mf: Manifest[A]): A = - JsonParser.parse(in).extract(formats, mf) - - /** Create Serialization formats with given type hints. - *

- * Example:

-   * val hints = new ShortTypeHints( ... )
-   * implicit val formats = Serialization.formats(hints)
-   * 
- */ - def formats(hints: TypeHints) = new Formats { - val dateFormat = DefaultFormats.lossless.dateFormat - override val typeHints = hints - } -} diff --git a/core/json/src/main/scala/net/liftweb/json/Xml.scala b/core/json/src/main/scala/net/liftweb/json/Xml.scala deleted file mode 100644 index c7f58d1a0e..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/Xml.scala +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -/** Functions to convert between JSON and XML. - */ -object Xml { - import scala.xml._ - - /** - * Converts the given XML to JSON. - * - * The following rules are used in the conversion: - * - * - an XML leaf element is converted to a JSON string - * - an XML parent element is converted to a JSON object and its children to JSON fields - * - XML elements with the same name at the same level are converted to a JSON array - * - XML attributes are converted to JSON fields - * - * For example: - * {{{ - * scala> val xml = - * - * - * 1 - * Harry - * - * - * 2 - * David - * - * - * - * scala> val json = toJson(xml) - * scala> prettyRender(json) - * - * { - * "users":{ - * "user":[{ - * "id":"1", - * "name":"Harry" - * },{ - * "id":"2", - * "name":"David" - * }] - * } - * } - * }}} - * - * Now, the above example has two problems. First, the id is converted to a - * `String` while we might want it as an `Int`. This is easy to fix by mapping - * `JString(s)` to `JInt(s.toInt)`. The second problem is more subtle: the - * conversion function decides to use a JSON array because there's more than - * one `user` element in the XML. Therefore a structurally equivalent XML - * document which happens to have just one `user` element will generate a JSON - * document without a JSON array. This is rarely a desired outcome. Both of - * these problems can be fixed by the following `map` invocation: - * - * {{{ - * json mapField { - * case JField("id", JString(s)) => JField("id", JInt(s.toInt)) - * case JField("user", x: JObject) => JField("user", JArray(x :: Nil)) - * case x => x - * } - * }}} - */ - def toJson(xml: NodeSeq): JValue = { - def empty_?(node: Node) = node.child.isEmpty - - /* Checks if given node is leaf element. For instance these are considered leafs: - * bar, { doSomething() }, etc. - */ - def leaf_?(node: Node) = { - def descendant(n: Node): List[Node] = n match { - case g: Group => g.nodes.toList.flatMap(x => x :: descendant(x)) - case _ => n.child.toList.flatMap { x => x :: descendant(x) } - } - - !descendant(node).find(_.isInstanceOf[Elem]).isDefined - } - - def array_?(nodeNames: Seq[String]) = nodeNames.size != 1 && nodeNames.toList.distinct.size == 1 - def directChildren(n: Node): NodeSeq = n.child.filter(c => c.isInstanceOf[Elem]) - def nameOf(n: Node) = (if (n.prefix ne null) n.prefix + ":" else "") + n.label - def buildAttrs(n: Node) = n.attributes.map((a: MetaData) => (a.key, XValue(a.value.text))).toList - - sealed trait XElem - case class XValue(value: String) extends XElem - case class XLeaf(value: (String, XElem), attrs: List[(String, XValue)]) extends XElem - case class XNode(fields: List[(String, XElem)]) extends XElem - case class XArray(elems: List[XElem]) extends XElem - - def toJValue(x: XElem): JValue = x match { - case XValue(s) => JString(s) - case XLeaf((name, value), attrs) => (value, attrs) match { - case (_, Nil) => toJValue(value) - case (XValue(""), xs) => JObject(mkFields(xs)) - case (_, xs) => JObject(JField(name, toJValue(value)) :: mkFields(xs)) - } - case XNode(xs) => JObject(mkFields(xs)) - case XArray(elems) => JArray(elems.map(toJValue)) - } - - def mkFields(xs: List[(String, XElem)]) = - xs.flatMap { case (name, value) => (value, toJValue(value)) match { - // This special case is needed to flatten nested objects which resulted from - // XML attributes. Flattening keeps transformation more predicatable. - // x -> {"a":{"foo":{"foo":"x","id":"1"}}} vs - // x -> {"a":{"foo":"x","id":"1"}} - case (XLeaf(v, x :: xs), o: JObject) => o.obj - case (_, json) => JField(name, json) :: Nil }} - - def buildNodes(xml: NodeSeq): List[XElem] = xml match { - case n: Node => - if (empty_?(n)) XLeaf((nameOf(n), XValue("")), buildAttrs(n)) :: Nil - else if (leaf_?(n)) XLeaf((nameOf(n), XValue(n.text)), buildAttrs(n)) :: Nil - else { - val children = directChildren(n) - XNode(buildAttrs(n) ::: children.map(nameOf).toList.zip(buildNodes(children))) :: Nil - } - case nodes: NodeSeq => - val allLabels = nodes.map(_.label) - if (array_?(allLabels)) { - val arr = XArray(nodes.toList.flatMap { n => - if (leaf_?(n) && n.attributes.length == 0) XValue(n.text) :: Nil - else buildNodes(n) - }) - XLeaf((allLabels(0), arr), Nil) :: Nil - } else nodes.toList.flatMap(buildNodes) - } - - buildNodes(xml) match { - case List(x @ XLeaf(_, _ :: _)) => toJValue(x) - case List(x) => JObject(JField(nameOf(xml.head), toJValue(x)) :: Nil) - case x => JArray(x.map(toJValue)) - } - } - - /** - * Converts the given JSON to XML. - * - * The following rules are used in conversion: - * - * - JSON primitives are converted to XML leaf elements - * - JSON objects are converted to XML elements - * - JSON arrays are recursively converted to XML elements - * - * Use the `map` function to preprocess JSON before conversion to adjust - * the end result. For instance a common conversion is to encode arrays as - * comma separated Strings since XML does not have an array type: - * - * {{{ - * toXml(json map { - * case JField("nums",JArray(ns)) => JField("nums",JString(ns.map(_.values).mkString(","))) - * case x => x - * }) - * }}} - */ - def toXml(json: JValue): NodeSeq = { - def toXml(name: String, json: JValue): NodeSeq = json match { - case JObject(fields) => new XmlNode(name, fields flatMap { case JField(n, v) => toXml(n, v) }) - case JArray(xs) => xs flatMap { v => toXml(name, v) } - case JInt(x) => new XmlElem(name, x.toString) - case JDouble(x) => new XmlElem(name, x.toString) - case JString(x) => new XmlElem(name, x) - case JBool(x) => new XmlElem(name, x.toString) - case JNull => new XmlElem(name, "null") - case JNothing => Text("") - } - - json match { - case JObject(fields) => fields flatMap { case JField(name, value) => toXml(name, value) } - case x => toXml("root", x) - } - } - - private[json] class XmlNode(name: String, children: Seq[Node]) extends Elem(null, name, scala.xml.Null, TopScope, true, children :_*) - - private[json] class XmlElem(name: String, value: String) extends Elem(null, name, scala.xml.Null, TopScope, true, Text(value)) -} diff --git a/core/json/src/main/scala/net/liftweb/json/package.scala b/core/json/src/main/scala/net/liftweb/json/package.scala deleted file mode 100644 index cbb905d072..0000000000 --- a/core/json/src/main/scala/net/liftweb/json/package.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb - -package object json { - import java.io.Reader - - type JValue = JsonAST.JValue - val JNothing = JsonAST.JNothing - val JNull = JsonAST.JNull - type JString = JsonAST.JString - val JString = JsonAST.JString - type JDouble = JsonAST.JDouble - val JDouble = JsonAST.JDouble - type JInt = JsonAST.JInt - val JInt = JsonAST.JInt - type JBool = JsonAST.JBool - val JBool = JsonAST.JBool - type JField = JsonAST.JField - val JField = JsonAST.JField - type JObject = JsonAST.JObject - val JObject = JsonAST.JObject - type JArray = JsonAST.JArray - val JArray = JsonAST.JArray - - def parse(s: String): JValue = JsonParser.parse(s) - def parseOpt(s: String): Option[JValue] = JsonParser.parseOpt(s) - - def prettyRender(value: JValue): String = JsonAST.prettyRender(value) - def compactRender(value: JValue): String = JsonAST.compactRender(value) -} diff --git a/core/json/src/test/resources/diff-example-expected-additions.json b/core/json/src/test/resources/diff-example-expected-additions.json deleted file mode 100644 index 8f97905884..0000000000 --- a/core/json/src/test/resources/diff-example-expected-additions.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "content": { - "content": [ - { - "style": { - "name": "Arial" - } - }, - { - "style": { - "name": "Arial" - } - }, - { - "style": { - "name": "Arial" - } - }, - { - "style": { - "name": "Arial" - } - }, - { - "content": " button up above.", - "style": { - "size": 20, - "color": "FFFFFF", - "name": "Arial", - "bold": 2, - "italic": 2, - "underline": 2 - } - } - ] - } -} \ No newline at end of file diff --git a/core/json/src/test/resources/diff-example-expected-changes.json b/core/json/src/test/resources/diff-example-expected-changes.json deleted file mode 100644 index d68a4242d6..0000000000 --- a/core/json/src/test/resources/diff-example-expected-changes.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "content": { - "content": [ - { - "style": { - "underline": 2 - } - }, - { - "style": { - "underline": 2, - "color": "4DC2FF" - }, - "content": "foo" - }, - { - "style": { - "underline": 2 - } - }, - { - "style": { - "underline": 2, - "color": "4DC2FF" - } - } - ] - }, - "version": 4.0 -} diff --git a/core/json/src/test/resources/diff-example-expected-deletions.json b/core/json/src/test/resources/diff-example-expected-deletions.json deleted file mode 100644 index f077a8203e..0000000000 --- a/core/json/src/test/resources/diff-example-expected-deletions.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "content": { - "content": [ - { - "style": { - "fontFamily": "Arial" - } - }, - { - "style": { - "fontFamily": "Arial" - } - }, - { - "style": { - "fontFamily": "Arial" - } - }, - { - "style": { - "fontFamily": "Arial" - } - } - ] - } -} \ No newline at end of file diff --git a/core/json/src/test/resources/diff-example-json1.json b/core/json/src/test/resources/diff-example-json1.json deleted file mode 100644 index f0f04bedc4..0000000000 --- a/core/json/src/test/resources/diff-example-json1.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "format": "example", - "content": [ - { - "align": "center", - "depth": 0, - "list": false, - "content": [ - { - "style": { - "underline": 1, - "bold": 2, - "size": 20, - "italic": 2, - "color": "FFFFFF", - "fontFamily": "Arial" - }, - "content": "And please, feel free to send us your feedback and comments to " - }, - { - "style": { - "underline": 1, - "bold": 2, - "size": 20, - "italic": 2, - "color": "4DC3FF", - "fontFamily": "Arial" - }, - "content": "hello world" - }, - { - "style": { - "underline": 1, - "bold": 2, - "size": 20, - "italic": 2, - "color": "FFFFFF", - "fontFamily": "Arial" - }, - "content": ", or just by clicking on the " - }, - { - "style": { - "underline": 1, - "bold": 2, - "size": 20, - "italic": 2, - "color": "4DC3FF", - "fontFamily": "Arial" - }, - "content": "feedback" - } - ], - "ordered": false - } - ], - "version": 3.0 -} \ No newline at end of file diff --git a/core/json/src/test/resources/diff-example-json2.json b/core/json/src/test/resources/diff-example-json2.json deleted file mode 100644 index 4c6e5d5865..0000000000 --- a/core/json/src/test/resources/diff-example-json2.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "format": "example", - "version": 4.0, - "content": [ - { - "list": false, - "depth": 0, - "ordered": false, - "content": [ - { - "content": "And please, feel free to send us your feedback and comments to ", - "style": { - "size": 20, - "color": "FFFFFF", - "name": "Arial", - "bold": 2, - "italic": 2, - "underline": 2 - } - }, - { - "content": "foo", - "style": { - "size": 20, - "color": "4DC2FF", - "name": "Arial", - "bold": 2, - "italic": 2, - "underline": 2 - } - }, - { - "content": ", or just by clicking on the ", - "style": { - "size": 20, - "color": "FFFFFF", - "name": "Arial", - "bold": 2, - "italic": 2, - "underline": 2 - } - }, - { - "content": "feedback", - "style": { - "size": 20, - "color": "4DC2FF", - "name": "Arial", - "bold": 2, - "italic": 2, - "underline": 2 - } - }, - { - "content": " button up above.", - "style": { - "size": 20, - "color": "FFFFFF", - "name": "Arial", - "bold": 2, - "italic": 2, - "underline": 2 - } - } - ], - "align": "center" - } - ] -} \ No newline at end of file diff --git a/core/json/src/test/scala/net/liftweb/json/DiffExamples.scala b/core/json/src/test/scala/net/liftweb/json/DiffExamples.scala deleted file mode 100644 index cf202f7269..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/DiffExamples.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - - -object DiffExamples extends Specification { - import MergeExamples.{scala1, scala2, lotto1, lotto2, mergedLottoResult} - - "Diff example" in { - val Diff(changed, added, deleted) = scala1 diff scala2 - (changed mustEqual expectedChanges) and - (added mustEqual expectedAdditions) and - (deleted mustEqual expectedDeletions) - } - - val expectedChanges = parse(""" - { - "tags": ["static-typing","fp"], - "features": { - "key2":"newval2" - } - }""") - - val expectedAdditions = parse(""" - { - "features": { - "key3":"val3" - }, - "compiled": true - }""") - - val expectedDeletions = parse(""" - { - "year":2006, - "features":{ "key1":"val1" } - }""") - - "Lotto example" in { - val Diff(changed, added, deleted) = mergedLottoResult diff lotto1 - (changed mustEqual JNothing) and - (added mustEqual JNothing) and - (deleted mustEqual lotto2) - } - - "Example from http://tlrobinson.net/projects/js/jsondiff/" in { - val json1 = read("/diff-example-json1.json") - val json2 = read("/diff-example-json2.json") - val expectedChanges = read("/diff-example-expected-changes.json") - val expectedAdditions = read("/diff-example-expected-additions.json") - val expectedDeletions = read("/diff-example-expected-deletions.json") - - json1 diff json2 mustEqual Diff(expectedChanges, expectedAdditions, expectedDeletions) - } - - private def read(resource: String) = - parse(scala.io.Source.fromInputStream(getClass.getResourceAsStream(resource)).getLines().mkString) -} diff --git a/core/json/src/test/scala/net/liftweb/json/Examples.scala b/core/json/src/test/scala/net/liftweb/json/Examples.scala deleted file mode 100644 index 14b2875ed8..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/Examples.scala +++ /dev/null @@ -1,220 +0,0 @@ -/* - * Copyright 2009-2013 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - -class Examples extends AbstractExamples { - override def print(value: JValue): String = compactRender(value) -} - - -trait AbstractExamples extends Specification { - import Examples._ - import JsonAST.concat - import JsonDSL._ - - def print(value: JValue): String - - "Lotto example" in { - val json = parse(lotto) - val renderedLotto = print(json) - json mustEqual parse(renderedLotto) - } - - "Person example" in { - val json = parse(person) - val renderedPerson = prettyRender(json) - (json mustEqual parse(renderedPerson)) and - (print(json \\ "name") mustEqual """{"name":"Joe","name":"Marilyn"}""") and - (print(json \ "person" \ "name") mustEqual "\"Joe\"") - } - - "Transformation example" in { - val uppercased = parse(person).transformField { case JField(n, v) => JField(n.toUpperCase, v) } - val rendered = compactRender(uppercased) - rendered mustEqual - """{"PERSON":{"NAME":"Joe","AGE":35,"SPOUSE":{"PERSON":{"NAME":"Marilyn","AGE":33}}}}""" - } - - "Remove example" in { - val json = parse(person) removeField { _ == JField("name", "Marilyn") } - compactRender(json \\ "name") mustEqual """{"name":"Joe"}""" - } - - "Queries on person example" in { - val json = parse(person) - val filtered = json filterField { - case JField("name", _) => true - case _ => false - } - filtered mustEqual List(JField("name", JString("Joe")), JField("name", JString("Marilyn"))) - - val found = json findField { - case JField("name", _) => true - case _ => false - } - found mustEqual Some(JField("name", JString("Joe"))) - } - - "Object array example" in { - val json = parse(objArray) - (print(json \ "children" \ "name") mustEqual """["Mary","Mazy"]""") and - (print((json \ "children")(0) \ "name") mustEqual "\"Mary\"") and - (print((json \ "children")(1) \ "name") mustEqual "\"Mazy\"") and - ((for { JObject(o) <- json; JField("name", JString(y)) <- o } yield y) mustEqual List("joe", "Mary", "Mazy")) - } - - "Unbox values using XPath-like type expression" in { - (parse(objArray) \ "children" \\ classOf[JInt] mustEqual List(5, 3)) and - (parse(lotto) \ "lotto" \ "winning-numbers" \ classOf[JInt] mustEqual List(2, 45, 34, 23, 7, 5, 3)) and - (parse(lotto) \\ "winning-numbers" \ classOf[JInt] mustEqual List(2, 45, 34, 23, 7, 5, 3)) - } - - "Quoted example" in { - val json = parse(quoted) - List("foo \" \n \t \r bar") mustEqual json.values - } - - "Null example" in { - print(parse(""" {"name": null} """)) mustEqual """{"name":null}""" - } - - "Null rendering example" in { - print(nulls) mustEqual """{"f1":null,"f2":[null,"s"]}""" - } - - "Symbol example" in { - print(symbols) mustEqual """{"f1":"foo","f2":"bar"}""" - } - - "Unicode example" in { - parse("[\" \\u00e4\\u00e4li\\u00f6t\"]") mustEqual JArray(List(JString(" \u00e4\u00e4li\u00f6t"))) - } - - "Exponent example" in { - (parse("""{"num": 2e5 }""") mustEqual JObject(List(JField("num", JDouble(200000.0))))) and - (parse("""{"num": -2E5 }""") mustEqual JObject(List(JField("num", JDouble(-200000.0))))) and - (parse("""{"num": 2.5e5 }""") mustEqual JObject(List(JField("num", JDouble(250000.0))))) and - (parse("""{"num": 2.5e-5 }""") mustEqual JObject(List(JField("num", JDouble(2.5e-5))))) - } - - "JSON building example" in { - val json = JObject(JField("name", JString("joe")), JField("age", JInt(34))) ++ JObject(JField("name", ("mazy")), JField("age", JInt(31))) - print(json) mustEqual """[{"name":"joe","age":34},{"name":"mazy","age":31}]""" - } - - "JSON building with implicit primitive conversions example" in { - import Implicits._ - val json = JObject(JField("name", "joe"), JField("age", 34)) ++ JObject(JField("name", "mazy"), JField("age", 31)) - print(json) mustEqual """[{"name":"joe","age":34},{"name":"mazy","age":31}]""" - } - - "Example which collects all integers and forms a new JSON" in { - val json = parse(person) - val ints = json.fold(JNothing: JValue) { (a, v) => v match { - case x: JInt => a ++ x - case _ => a - }} - print(ints) mustEqual """[35,33]""" - } - - "Generate JSON with DSL example" in { - val json: JValue = - ("id" -> 5) ~ - ("tags" -> Map("a" -> 5, "b" -> 7)) - print(json) mustEqual """{"id":5,"tags":{"a":5,"b":7}}""" - } - - "Naked JArray with null values" in { - val json = JArray(List(null)) - print(json) mustEqual """[null]""" - } - -} - -object Examples { - import JsonDSL._ - - val lotto = """ -{ - "lotto":{ - "lotto-id":5, - "winning-numbers":[2,45,34,23,7,5,3], - "winners":[ { - "winner-id":23, - "numbers":[2,45,34,23,3, 5] - },{ - "winner-id" : 54 , - "numbers":[ 52,3, 12,11,18,22 ] - }] - } -} -""" - - val person = """ -{ - "person": { - "name": "Joe", - "age": 35, - "spouse": { - "person": { - "name": "Marilyn", - "age": 33 - } - } - } -} -""" - - val personDSL = - ("person" -> - ("name" -> "Joe") ~ - ("age" -> 35) ~ - ("spouse" -> - ("person" -> - ("name" -> "Marilyn") ~ - ("age" -> 33) - ) - ) - ) - - val objArray = -""" -{ "name": "joe", - "address": { - "street": "Bulevard", - "city": "Helsinki" - }, - "children": [ - { - "name": "Mary", - "age": 5 - }, - { - "name": "Mazy", - "age": 3 - } - ] -} -""" - - val nulls = ("f1" -> (null: String)) ~ ("f2" -> List(null, "s")) - val quoted = """["foo \" \n \t \r bar"]""" - val symbols = ("f1" -> 'foo) ~ ("f2" -> 'bar) -} diff --git a/core/json/src/test/scala/net/liftweb/json/ExtractionBugs.scala b/core/json/src/test/scala/net/liftweb/json/ExtractionBugs.scala deleted file mode 100644 index 9830da067d..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/ExtractionBugs.scala +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - -object ExtractionBugs extends Specification { - "Extraction bugs Specification".title - - implicit val formats = DefaultFormats - - case class Response(data: List[Map[String, Int]]) - - case class OptionOfInt(opt: Option[Int]) - - case class PMap(m: Map[String, List[String]]) - - case class ManyConstructors(id: Long, name: String, lastName: String, email: String) { - def this() = this(0, "John", "Doe", "") - def this(name: String) = this(0, name, "Doe", "") - def this(name: String, email: String) = this(0, name, "Doe", email) - } - - case class ExtractWithAnyRef() - - case class UnicodeFieldNames(`foo.bar,baz`: String) - - object HasCompanion { - def hello = "hello" - } - case class HasCompanion(nums: List[Int]) - - "ClassCastException (BigInt) regression 2 must pass" in { - val opt = OptionOfInt(Some(39)) - Extraction.decompose(opt).extract[OptionOfInt].opt.get mustEqual 39 - } - - "Extraction should not fail when Maps values are Lists" in { - val m = PMap(Map("a" -> List("b"), "c" -> List("d"))) - Extraction.decompose(m).extract[PMap] mustEqual m - } - - "Extraction should always choose constructor with the most arguments if more than one constructor exists" in { - val args = Meta.Reflection.primaryConstructorArgs(classOf[ManyConstructors]) - args.size mustEqual 4 - } - - "Extraction should handle AnyRef" in { - implicit val formats = DefaultFormats.withHints(FullTypeHints(classOf[ExtractWithAnyRef] :: Nil)) - val json = JObject(JField("jsonClass", JString(classOf[ExtractWithAnyRef].getName)) :: Nil) - val extracted = Extraction.extract[AnyRef](json) - extracted mustEqual ExtractWithAnyRef() - } - - "Extraction should work with unicode encoded field names (issue 1075)" in { - parse("""{"foo.bar,baz":"x"}""").extract[UnicodeFieldNames] mustEqual UnicodeFieldNames("x") - } - - "Extraction should not fail if case class has a companion object" in { - parse("""{"nums":[10]}""").extract[HasCompanion] mustEqual HasCompanion(List(10)) - } - - "Issue 1169" in { - val json = JsonParser.parse("""{"data":[{"one":1, "two":2}]}""") - json.extract[Response] mustEqual Response(List(Map("one" -> 1, "two" -> 2))) - } - - "Extraction should handle List[Option[String]]" in { - val json = JsonParser.parse("""["one", "two", null]""") - json.extract[List[Option[String]]] mustEqual List(Some("one"), Some("two"), None) - } - - "Extraction should fail if you're attempting to extract an option and you're given data of the wrong type" in { - val json = JsonParser.parse("""{"opt": "hi"}""") - json.extract[OptionOfInt] must throwA[MappingException].like { - case e => e.getMessage mustEqual "No usable value for opt\nDo not know how to convert JString(hi) into int" - } - - val json2 = JString("hi") - json2.extract[Option[Int]] must throwA[MappingException].like { - case e => e.getMessage mustEqual "Do not know how to convert JString(hi) into int" - } - } - - "deserialize list of homogonous tuples w/ array tuples disabled" in { - implicit val formats = DefaultFormats - - case class Holder(items: List[(String, String)]) - - val holder = Holder(List(("string", "string"))) - val serialized = compactRender(Extraction.decompose(holder)) - - val deserialized = parse(serialized).extract[Holder] - deserialized must_== holder - } - - "deserialize a list of heterogenous tuples w/ array tuples disabled" in { - implicit val formats = DefaultFormats - - // MSF: This currently doesn't work with scala primitives?! The type arguments appear as - // java.lang.Object instead of scala.Int. :/ - case class Holder2(items: List[(String, Integer)]) - - val holder = Holder2(List(("string", 10))) - val serialized = compactRender(Extraction.decompose(holder)) - - val deserialized = parse(serialized).extract[Holder2] - deserialized must_== holder - } - - "deserialize list of homogonous tuples w/ array tuples enabled" in { - implicit val formats = new DefaultFormats { - override val tuplesAsArrays = true - } - - case class Holder(items: List[(String, String)]) - - val holder = Holder(List(("string", "string"))) - val serialized = compactRender(Extraction.decompose(holder)) - - val deserialized = parse(serialized).extract[Holder] - deserialized must_== holder - } - - "deserialize a list of heterogenous tuples w/ array tuples enabled" in { - implicit val formats = new DefaultFormats { - override val tuplesAsArrays = true - } - - // MSF: This currently doesn't work with scala primitives?! The type arguments appear as - // java.lang.Object instead of scala.Int. :/ - case class Holder2(items: List[(String, Integer)]) - - val holder = Holder2(List(("string", 10))) - val serialized = compactRender(Extraction.decompose(holder)) - - val deserialized = parse(serialized).extract[Holder2] - deserialized must_== holder - } - - "deserialize an out of order old-style tuple w/ array tuples enabled" in { - implicit val formats = new DefaultFormats { - override val tuplesAsArrays = true - } - - val outOfOrderTuple: JObject = JObject(List( - JField("_1", JString("apple")), - JField("_3", JString("bacon")), - JField("_2", JString("sammich")) - )) - - val extracted = outOfOrderTuple.extract[(String, String, String)] - - extracted must_== ("apple", "sammich", "bacon") - } - - "throw the correct exceptions when things go wrong during extraction" in { - implicit val formats = DefaultFormats - - class Holder(bacon: String) { - throw new Exception("I'm an exception!") - } - - val correctArgsAstRepresentation: JObject = JObject(List( - JField("bacon", JString("apple")) - )) - - correctArgsAstRepresentation.extract[Holder] must throwA[MappingException].like { - case e => - e.getMessage mustEqual "An exception was thrown in the class constructor during extraction" - e.getCause.getCause.getMessage mustEqual "I'm an exception!" - } - } -} diff --git a/core/json/src/test/scala/net/liftweb/json/ExtractionExamplesSpec.scala b/core/json/src/test/scala/net/liftweb/json/ExtractionExamplesSpec.scala deleted file mode 100644 index 1433a7117c..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/ExtractionExamplesSpec.scala +++ /dev/null @@ -1,354 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import java.util.Date -import org.specs2.mutable.Specification - -class ExtractionExamples extends Specification { - "Extraction Examples Specification".title - - implicit val formats = DefaultFormats - - "Extraction example" in { - val json = parse(testJson) - json.extract[Person] mustEqual Person("joe", Address("Bulevard", "Helsinki"), List(Child("Mary", 5, Some(date("2004-09-04T18:06:22Z"))), Child("Mazy", 3, None))) - } - - "Extraction with path expression example" in { - val json = parse(testJson) - (json \ "address").extract[Address] mustEqual Address("Bulevard", "Helsinki") - } - - "Partial extraction example" in { - val json = parse(testJson) - json.extract[SimplePerson] mustEqual SimplePerson("joe", Address("Bulevard", "Helsinki")) - } - - "Extract with a default value" in { - val json = parse(testJson) - (json \ "address2").extractOrElse(Address("Tie", "Helsinki")) mustEqual Address("Tie", "Helsinki") - } - - "Map with primitive values extraction example" in { - val json = parse(testJson) - json.extract[PersonWithMap] mustEqual - PersonWithMap("joe", Map("street" -> "Bulevard", "city" -> "Helsinki")) - } - - "Map with object values extraction example" in { - val json = parse(twoAddresses) - json.extract[PersonWithAddresses] mustEqual - PersonWithAddresses("joe", Map("address1" -> Address("Bulevard", "Helsinki"), - "address2" -> Address("Soho", "London"))) - } - - "Simple value extraction example" in { - val json = parse(testJson) - json.extract[Name] mustEqual Name("joe") - (json \ "children")(0).extract[Name] mustEqual Name("Mary") - (json \ "children")(1).extract[Name] mustEqual Name("Mazy") - } - - "Primitive value extraction example" in { - val json = parse(testJson) - (json \ "name").extract[String] mustEqual "joe" - (json \ "name").extractOpt[String] mustEqual Some("joe") - (json \ "name").extractOpt[Int] mustEqual None - ((json \ "children")(0) \ "birthdate").extract[Date] mustEqual date("2004-09-04T18:06:22Z") - - JInt(1).extract[Int] mustEqual 1 - JInt(1).extract[String] mustEqual "1" - } - - "Primitive extraction example" in { - val json = parse(primitives) - json.extract[Primitives] mustEqual Primitives(124, 123L, 126.5, 127.5.floatValue, "128", 'symb, 125, 129.byteValue, true) - } - - "Null extraction example" in { - val json = parse("""{ "name": null, "age": 5, "birthdate": null }""") - json.extract[Child] mustEqual Child(null, 5, None) - } - - "Date extraction example" in { - val json = parse("""{"name":"e1","timestamp":"2009-09-04T18:06:22Z"}""") - json.extract[Event] mustEqual Event("e1", date("2009-09-04T18:06:22Z")) - } - - "Timestamp extraction example" in { - val json = parse("""{"timestamp":"2009-09-04T18:06:22Z"}""") - new Date((json \ "timestamp").extract[java.sql.Timestamp].getTime) mustEqual date("2009-09-04T18:06:22Z") - } - - "Option extraction example" in { - val json = parse("""{ "name": null, "age": 5, "mother":{"name":"Marilyn"}}""") - json.extract[OChild] mustEqual OChild(None, 5, Some(Parent("Marilyn")), None) - } - - "Missing JSON array can be extracted as an empty List" in { - parse(missingChildren).extract[Person] mustEqual Person("joe", Address("Bulevard", "Helsinki"), Nil) - } - - "Multidimensional array extraction example" in { - parse(multiDimensionalArrays).extract[MultiDim] mustEqual MultiDim( - List(List(List(1, 2), List(3)), List(List(4), List(5, 6))), - List(List(Name("joe"), Name("mary")), List(Name("mazy")))) - } - - "Flatten example with simple case class" in { - val f = Extraction.flatten(Extraction.decompose(SimplePerson("joe", Address("Bulevard", "Helsinki")))) - val e = Map(".name" -> "\"joe\"", ".address.street" -> "\"Bulevard\"", ".address.city" -> "\"Helsinki\"") - - f mustEqual e - } - - "Unflatten example with top level string and int" in { - val m = Map(".name" -> "\"joe\"", ".age" -> "32") - - Extraction.unflatten(m) mustEqual JObject(List(JField("name",JString("joe")), JField("age",JInt(32)))) - } - - "Unflatten example with top level string and double" in { - val m = Map(".name" -> "\"joe\"", ".age" -> "32.2") - - Extraction.unflatten(m) mustEqual JObject(List(JField("name",JString("joe")), JField("age",JDouble(32.2)))) - } - - "Unflatten example with two-level string properties" in { - val m = Map(".name" -> "\"joe\"", ".address.street" -> "\"Bulevard\"", ".address.city" -> "\"Helsinki\"") - - Extraction.unflatten(m) mustEqual JObject(List(JField("name", JString("joe")), JField("address", JObject(List(JField("street", JString("Bulevard")), JField("city", JString("Helsinki"))))))) - } - - "Unflatten example with top level array" in { - val m = Map(".foo[2]" -> "2", ".foo[0]" -> "0", ".foo[1]" -> "1") - - Extraction.unflatten(m) mustEqual JObject(List(JField("foo", JArray(List(JInt(0), JInt(1), JInt(2)))))) - } - - "Unflatten example with common prefixes" in { - val m = Map(".photo" -> "\"photo string\"", ".photographer" -> "\"photographer string\"", ".other" -> "\"other string\"") - - Extraction.unflatten(m) mustEqual JObject(List(JField("photo", JString("photo string")), JField("photographer", JString("photographer string")), JField("other", JString("other string")))) - } - - "Flatten and unflatten are symmetric" in { - val parsed = parse(testJson) - - Extraction.unflatten(Extraction.flatten(parsed)) mustEqual parsed - } - - "Flatten preserves empty sets" in { - val s = SetWrapper(Set()) - - Extraction.flatten(Extraction.decompose(s)).get(".set") mustEqual Some("[]") - } - - "Flatten and unflatten are symmetric with empty sets" in { - val s = SetWrapper(Set()) - - Extraction.unflatten(Extraction.flatten(Extraction.decompose(s))).extract[SetWrapper] mustEqual s - } - - "List extraction example" in { - val json = parse(testJson) \ "children" - json.extract[List[Name]] mustEqual List(Name("Mary"), Name("Mazy")) - } - - "Map extraction example" in { - val json = parse(testJson) \ "address" - json.extract[Map[String, String]] mustEqual Map("street" -> "Bulevard", "city" -> "Helsinki") - } - - "Extraction and decomposition are symmetric" in { - val person = parse(testJson).extract[Person] - Extraction.decompose(person).extract[Person] mustEqual person - } - - "Extraction failure message example" in { - val json = parse("""{"city":"San Francisco"}""") - json.extract[Address] must throwA(MappingException("No usable value for street\nDid not find value which can be converted into java.lang.String", null)) - } - - "Best matching constructor selection example" in { - parse("""{"name":"john","age":32,"size":"M"}""").extract[MultipleConstructors] mustEqual - MultipleConstructors("john", 32, Some("M")) - - parse("""{"name":"john","age":32}""").extract[MultipleConstructors] mustEqual - MultipleConstructors("john", 32, Some("S")) - - parse("""{"name":"john","foo":"xxx"}""").extract[MultipleConstructors] mustEqual - MultipleConstructors("john", 30, None) - - parse("""{"name":"john","age":32,"size":null}""").extract[MultipleConstructors] mustEqual - MultipleConstructors("john", 32, None) - - parse("""{"birthYear":1990,"name":"john","foo":2}""").extract[MultipleConstructors] mustEqual - MultipleConstructors("john", 20, None) - - parse("""{"foo":2,"age":12,"size":"XS"}""").extract[MultipleConstructors] mustEqual - MultipleConstructors("unknown", 12, Some("XS")) - } - - "Partial JSON extraction" in { - parse(stringField).extract[ClassWithJSON] mustEqual ClassWithJSON("one", JString("msg")) - parse(objField).extract[ClassWithJSON] mustEqual ClassWithJSON("one", JObject(List(JField("yes", JString("woo"))))) - } - - "Double can be coerced to Int or Long" in { - JDouble(2.1).extract[Int] mustEqual 2 - JDouble(2.1).extract[Long] mustEqual 2L - } - - "Map with nested non-polymorphic list extraction example" in { - parse("""{"a":["b"]}""").extract[Map[String, List[String]]] mustEqual Map("a" -> List("b")) - } - - "List with nested non-polymorphic list extraction example" in { - parse("""[["a"]]""").extract[List[List[String]]] mustEqual List(List("a")) - } - - "Complex nested non-polymorphic collections extraction example" in { - parse("""{"a":[{"b":"c"}]}""").extract[Map[String, List[Map[String, String]]]] mustEqual Map("a" -> List(Map("b" -> "c"))) - } - - val testJson = -""" -{ "name": "joe", - "address": { - "street": "Bulevard", - "city": "Helsinki" - }, - "children": [ - { - "name": "Mary", - "age": 5 - "birthdate": "2004-09-04T18:06:22Z" - }, - { - "name": "Mazy", - "age": 3 - } - ] -} -""" - - val missingChildren = -""" -{ - "name": "joe", - "address": { - "street": "Bulevard", - "city": "Helsinki" - } -} -""" - - val twoAddresses = -""" -{ - "name": "joe", - "addresses": { - "address1": { - "street": "Bulevard", - "city": "Helsinki" - }, - "address2": { - "street": "Soho", - "city": "London" - } - } -} -""" - - val primitives = -""" -{ - "l": 123, - "i": 124, - "sh": 125, - "d": 126.5, - "f": 127.5, - "s": "128", - "b": 129, - "bool": true, - "sym":"symb" -} -""" - - val multiDimensionalArrays = -""" -{ - "ints": [[[1, 2], [3]], [[4], [5, 6]]], - "names": [[{"name": "joe"}, {"name": "mary"}], [[{"name": "mazy"}]]] -} -""" - - val stringField = -""" -{ - "name": "one", - "message": "msg" -} -""" - - val objField = -""" -{ - "name": "one", - "message": { - "yes": "woo" - } -} -""" - - def date(s: String) = DefaultFormats.dateFormat.parse(s).get -} - -case class SetWrapper(set: Set[String]) - -case class Person(name: String, address: Address, children: List[Child]) -case class Address(street: String, city: String) -case class Child(name: String, age: Int, birthdate: Option[java.util.Date]) - -case class SimplePerson(name: String, address: Address) - -case class PersonWithMap(name: String, address: Map[String, String]) -case class PersonWithAddresses(name: String, addresses: Map[String, Address]) - -case class Name(name: String) - -case class Primitives(i: Int, l: Long, d: Double, f: Float, s: String, sym: Symbol, sh: Short, b: Byte, bool: Boolean) - -case class OChild(name: Option[String], age: Int, mother: Option[Parent], father: Option[Parent]) -case class Parent(name: String) - -case class Event(name: String, timestamp: Date) - -case class MultiDim(ints: List[List[List[Int]]], names: List[List[Name]]) - -case class MultipleConstructors(name: String, age: Int, size: Option[String]) { - def this(name: String) = this(name, 30, None) - def this(age: Int, name: String) = this(name, age, Some("S")) - def this(name: String, birthYear: Int) = this(name, 2010 - birthYear, None) - def this(size: Option[String], age: Int) = this("unknown", age, size) -} - -case class ClassWithJSON(name: String, message: JValue) - diff --git a/core/json/src/test/scala/net/liftweb/json/FieldSerializerBugs.scala b/core/json/src/test/scala/net/liftweb/json/FieldSerializerBugs.scala deleted file mode 100644 index e597e9179f..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/FieldSerializerBugs.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - -object FieldSerializerBugs extends Specification { - import Serialization.{read, write => swrite} - - implicit val formats = DefaultFormats + FieldSerializer[AnyRef]() - -/* FIXME: For some reason this fails on CI - "AtomicInteger should not cause stack overflow" in { - import java.util.concurrent.atomic.AtomicInteger - - val ser = swrite(new AtomicInteger(1)) - val atomic = read[AtomicInteger](ser) - atomic.get mustEqual 1 - } - */ - - "Name with symbols is correctly serialized" in { - implicit val formats = DefaultFormats + FieldSerializer[AnyRef]() - - val s = WithSymbol(5) - val str = Serialization.write(s) - (str mustEqual """{"a-b*c":5}""") and - (read[WithSymbol](str) mustEqual s) - } - - "FieldSerialization should work with Options" in { - implicit val formats = DefaultFormats + FieldSerializer[ClassWithOption]() - - val t = new ClassWithOption - t.field = Some(5) - read[ClassWithOption](Serialization.write(t)).field mustEqual Some(5) - } - - case class WithSymbol(`a-b*c`: Int) - - class ClassWithOption { - var field: Option[Int] = None - } -} - - diff --git a/core/json/src/test/scala/net/liftweb/json/FieldSerializerExamples.scala b/core/json/src/test/scala/net/liftweb/json/FieldSerializerExamples.scala deleted file mode 100644 index e603d02759..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/FieldSerializerExamples.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - -object FieldSerializerExamples extends Specification { - import Serialization.{read, write => swrite} - import FieldSerializer._ - - val dog = new WildDog("black") - dog.name = "pluto" - dog.owner = Owner("joe", 35) - - val cat = new WildCat(100) - cat.name = "tommy" - - "All fields are serialized by default" in { - implicit val formats = DefaultFormats + FieldSerializer[WildDog]() - val ser = swrite(dog) - val dog2 = read[WildDog](ser) - (dog2.name mustEqual dog.name) and - (dog2.color mustEqual dog.color) and - (dog2.owner mustEqual dog.owner) and - (dog2.size mustEqual dog.size) - } - - "Fields can be ignored and renamed" in { - val dogSerializer = FieldSerializer[WildDog]( - renameTo("name", "animalname") orElse ignore("owner"), - renameFrom("animalname", "name") - ) - - implicit val formats = DefaultFormats + dogSerializer - - val ser = swrite(dog) - val dog2 = read[WildDog](ser) - (dog2.name mustEqual dog.name) - (dog2.color mustEqual dog.color) - (dog2.owner must beNull) - (dog2.size mustEqual dog.size) - ((parse(ser) \ "animalname") mustEqual JString("pluto")) - } - - "Selects best matching serializer" in { - val dogSerializer = FieldSerializer[WildDog](ignore("name")) - implicit val formats = DefaultFormats + FieldSerializer[AnyRef]() + dogSerializer - - val dog2 = read[WildDog](swrite(dog)) - val cat2 = read[WildCat](swrite(cat)) - - (dog2.name mustEqual "") and - (cat2.name mustEqual "tommy") - } -} - -abstract class Mammal { - var name: String = "" - var owner: Owner = null - val size = List(10, 15) -} - -class WildDog(val color: String) extends Mammal -class WildCat(val cuteness: Int) extends Mammal - -case class Owner(name: String, age: Int) diff --git a/core/json/src/test/scala/net/liftweb/json/JValueGen.scala b/core/json/src/test/scala/net/liftweb/json/JValueGen.scala deleted file mode 100644 index 873b8f204a..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/JValueGen.scala +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.scalacheck._ - import rng.Seed -import Gen._ -import Arbitrary.arbitrary - -trait JValueGen { - def genJValue: Gen[JValue] = frequency((5, genSimple), (1, delay(genArray)), (1, delay(genObject))) - def genSimple: Gen[JValue] = oneOf( - const(JNull), - arbitrary[Int].map(JInt(_)), - arbitrary[Double].map(JDouble(_)), - arbitrary[Boolean].map(JBool(_)), - arbitrary[String].map(JString(_))) - - def genArray: Gen[JValue] = for (l <- genList) yield JArray(l) - def genObject: Gen[JObject] = for (l <- genFieldList) yield JObject(l) - - implicit def cogenJValue: Cogen[JValue] = - Cogen[JValue]({ (seed: Seed, jvalue: JValue) => - jvalue match { - case JNothing => implicitly[Cogen[Unit]].perturb(seed, ()) - case JNull => implicitly[Cogen[Unit]].perturb(seed, ()) - case JInt(value) => implicitly[Cogen[BigInt]].perturb(seed, value) - case JDouble(value) => implicitly[Cogen[Double]].perturb(seed, value) - case JString(value) => implicitly[Cogen[String]].perturb(seed, value) - case JBool(value) => implicitly[Cogen[Boolean]].perturb(seed, value) - case JArray(value) => implicitly[Cogen[List[JValue]]].perturb(seed, value) - case JObject(value) => implicitly[Cogen[List[JField]]].perturb(seed, value) - } - }) - implicit def cogenJField: Cogen[JField] = - Cogen[JField]({ (seed: Seed, field: JField) => - Cogen.perturbPair(seed, (field.name -> field.value)) - }) - - val genJValueFn: Gen[JValue=>JValue] = function1(genJValue) - - def genList = Gen.containerOfN[List, JValue](listSize, genJValue) - def genFieldList = Gen.containerOfN[List, JField](listSize, genField) - def genField = for (name <- identifier; value <- genJValue; id <- choose(0, 1000000)) yield JField(name+id, value) - - def genJValueClass: Gen[Class[_ <: JValue]] = oneOf( - JNull.getClass.asInstanceOf[Class[JValue]], JNothing.getClass.asInstanceOf[Class[JValue]], classOf[JInt], - classOf[JDouble], classOf[JBool], classOf[JString], classOf[JArray], classOf[JObject]) - - def listSize = choose(0, 5).sample.get -} - -trait NodeGen { - import Xml.{XmlNode, XmlElem} - import scala.xml.{Node, NodeSeq, Text} - - def genXml: Gen[Node] = frequency((2, delay(genNode)), (3, genElem)) - - def genNode = for { - name <- genName - node <- Gen.containerOfN[List, Node](children, genXml) map { seq => new XmlNode(name, seq) } - } yield node - - def genElem = for { - name <- genName - value <- arbitrary[String] - } yield new XmlElem(name, value) - - def genName = frequency((2, identifier), (1, const("const"))) - private def children = choose(1, 3).sample.get -} diff --git a/core/json/src/test/scala/net/liftweb/json/JsonAstSpec.scala b/core/json/src/test/scala/net/liftweb/json/JsonAstSpec.scala deleted file mode 100644 index e67276c271..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/JsonAstSpec.scala +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification -import org.specs2.ScalaCheck -import org.scalacheck._ - import Arbitrary._ - import Prop.{forAll, forAllNoShrink} - -class JsonAstSpec extends Specification with JValueGen with ScalaCheck { - "Functor identity" in { - val identityProp = (json: JValue) => json == (json map identity) - forAll(identityProp) - } - - "Functor composition" in { - val compositionProp = (json: JValue, fa: JValue => JValue, fb: JValue => JValue) => { - json.map(fb).map(fa) == json.map(fa compose fb) - } - - forAll(compositionProp) - }.pendingUntilFixed("Requires a fundamental change to map; see https://github.com/lift/framework/issues/1816 .") - - "Monoid identity" in { - val identityProp = (json: JValue) => (json ++ JNothing == json) && (JNothing ++ json == json) - forAll(identityProp) - } - - "Monoid associativity" in { - val assocProp = (x: JValue, y: JValue, z: JValue) => x ++ (y ++ z) == (x ++ y) ++ z - forAll(assocProp) - } - - "Merge identity" in { - val identityProp = (json: JValue) => (json merge JNothing) == json && (JNothing merge json) == json - forAll(identityProp) - } - - "Merge idempotency" in { - val idempotencyProp = (x: JValue) => (x merge x) == x - forAll(idempotencyProp) - } - - "Diff identity" in { - val identityProp = (json: JValue) => - (json diff JNothing) == Diff(JNothing, JNothing, json) && - (JNothing diff json) == Diff(JNothing, json, JNothing) - - forAll(identityProp) - } - - "Diff with self is empty" in { - val emptyProp = (x: JValue) => (x diff x) == Diff(JNothing, JNothing, JNothing) - forAll(emptyProp) - } - - "Diff is subset of originals" in { - val subsetProp = (x: JObject, y: JObject) => { - val Diff(c, a, d) = x diff y - y == (y merge (c merge a)) - } - forAll(subsetProp) - } - - "Diff result is same when fields are reordered" in { - val reorderProp = (x: JObject) => (x diff reorderFields(x)) == Diff(JNothing, JNothing, JNothing) - forAll(reorderProp) - } - - "Remove all" in { - val removeAllProp = (x: JValue) => (x remove { _ => true }) == JNothing - forAll(removeAllProp) - } - - "Remove nothing" in { - val removeNothingProp = (x: JValue) => (x remove { _ => false }) == x - forAll(removeNothingProp) - } - - "Remove removes only matching elements" in { - forAllNoShrink(genJValue, genJValueClass) { (json: JValue, x: Class[_ <: JValue]) => { - val removed = json remove typePredicate(x) - val Diff(c, a, d) = json diff removed - val elemsLeft = removed filter { - case _ => true - } - c == JNothing && a == JNothing && elemsLeft.forall(_.getClass != x) - }} - } - - "Replace one" in { - val anyReplacement = (x: JValue, replacement: JObject) => { - def findOnePath(jv: JValue, l: List[String]): List[String] = jv match { - case JObject(fl) => fl match { - case field :: xs => findOnePath(field.value, l) - case Nil => l - } - case _ => l - } - - val path = findOnePath(x, Nil).reverse - val result = x.replace(path, replacement) - - def replaced(path: List[String], in: JValue): Boolean = { - path match { - case Nil => x == in - - case name :: Nil => (in \ name) match { - case `replacement` => true - case _ => false - } - - case name :: xs => (in \ name) match { - case JNothing => false - case value => replaced(xs, value) - } - } - } - - replaced(path, result) - } - - // ensure that we test some JObject instances - val fieldReplacement = (x: JObject, replacement: JObject) => anyReplacement(x, replacement) - - forAll(fieldReplacement) - forAll(anyReplacement) - } - - "allow escaping arbitrary characters when serializing" in { - JsonAST.render( - JString("aaabbb"), - JsonAST.RenderSettings(0, Set('c')) - ) must not be matching("a".r) - } - - "escape bad JSON characters by default" in { - val allCharacters: String = - ('\u0000' to '\uffff').mkString("") - - val rendered = - JsonAST.render( - JString(allCharacters), - JsonAST.RenderSettings.compact - ) - - "[\u0000-\u0019]".r - .pattern - .matcher(rendered) - .find() must beFalse - } - - "allow escaping bad JavaScript characters when serializing" in { - val allCharacters = - ('\u0000' to '\uffff').mkString("") - - val rendered = - JsonAST.render( - JString(allCharacters), - JsonAST.RenderSettings.compactJs - ) - - "[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]".r - .pattern - .matcher(rendered) - .find() must beFalse - } - - "equals hashCode" in prop({ x: JObject => - val y = JObject(scala.util.Random.shuffle(x.obj)) - - x must_== y - x.## must_== y.## - }) - - "find all children" in { - val subject = JObject( - JField("alpha", JString("apple")) :: - JField("beta", JObject( - JField("alpha", JString("bacon")) :: - JField("charlie", JString("i'm a masseuse")) :: - Nil - )) :: - Nil - ) - - subject \\ "alpha" must_== - JObject( - JField("alpha", JString("apple")) :: - JField("alpha", JString("bacon")) :: - Nil - ) - subject \\ "charlie" must_== JObject(List(JField("charlie", JString("i'm a masseuse")))) - } - - private def reorderFields(json: JValue) = json map { - case JObject(xs) => JObject(xs.reverse) - case x => x - } - - private def typePredicate(clazz: Class[_])(json: JValue) = json match { - case x if x.getClass == clazz => true - case _ => false - } - - implicit def arbJValue: Arbitrary[JValue] = Arbitrary(genJValue) - implicit def arbJObject: Arbitrary[JObject] = Arbitrary(genObject) - implicit val arbJValueFn: Arbitrary[JValue=>JValue] = Arbitrary(genJValueFn) -} diff --git a/core/json/src/test/scala/net/liftweb/json/JsonFormatsSpec.scala b/core/json/src/test/scala/net/liftweb/json/JsonFormatsSpec.scala deleted file mode 100644 index 4256e85b53..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/JsonFormatsSpec.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - -/** - * System under specification for JSON Formats. - */ -class JsonFormatsSpec extends Specification with TypeHintExamples { - "JsonFormats Specification".title - - implicit val formats = ShortTypeHintExamples.formats + FullTypeHintExamples.formats.typeHints - - val hintsForFish = ShortTypeHintExamples.formats.typeHints.hintFor(classOf[Fish]) - val hintsForDog = ShortTypeHintExamples.formats.typeHints.hintFor(classOf[Dog]) - val hintsForAnimal = FullTypeHintExamples.formats.typeHints.hintFor(classOf[Animal]) - - "hintsFor across composite formats" in { - (formats.typeHints.hintFor(classOf[Fish]) mustEqual (hintsForFish)) and - (formats.typeHints.hintFor(classOf[Dog]) mustEqual (hintsForDog)) and - (formats.typeHints.hintFor(classOf[Animal]) mustEqual (hintsForAnimal)) - } - - "classFor across composite formats" in { - (formats.typeHints.classFor(hintsForFish) mustEqual (ShortTypeHintExamples.formats.typeHints.classFor(hintsForFish))) and - (formats.typeHints.classFor(hintsForDog) mustEqual (ShortTypeHintExamples.formats.typeHints.classFor(hintsForDog))) and - (formats.typeHints.classFor(hintsForAnimal) mustEqual (FullTypeHintExamples.formats.typeHints.classFor(hintsForAnimal))) - } - - "parameter name reading strategy can be changed" in { - object TestReader extends ParameterNameReader { - def lookupParameterNames(constructor: java.lang.reflect.Constructor[_]) = List("name", "age") - } - implicit val formats = new DefaultFormats { override val parameterNameReader = TestReader } - val json = parse("""{"name":"joe","age":35}""") - json.extract[NamesNotSameAsInJson] mustEqual NamesNotSameAsInJson("joe", 35) - } -} - -case class NamesNotSameAsInJson(n: String, a: Int) diff --git a/core/json/src/test/scala/net/liftweb/json/JsonParserSpec.scala b/core/json/src/test/scala/net/liftweb/json/JsonParserSpec.scala deleted file mode 100644 index 1703c45399..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/JsonParserSpec.scala +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import java.io.StringReader - -import org.specs2.mutable.Specification -import org.specs2.ScalaCheck -import org.scalacheck.{Arbitrary, Gen} -import org.scalacheck.Prop._ - -/** - * System under specification for JSON Parser. - */ -class JsonParserSpec extends Specification with JValueGen with ScalaCheck { - - private def parseBadThing(): String = try { - parse("""{"user":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"<}""") - "x" * 1000 - } catch { - case e: Throwable => e.getMessage - } - - "JSON Parser Specification".title - - "Any valid json can be parsed" in { - val parsing = (json: JValue) => { parse(prettyRender(json)); true } - forAll(genJValue)(parsing) - } - - "Parsing is thread safe" in { - import java.util.concurrent._ - - val json = Examples.person - val executor = Executors.newFixedThreadPool(100) - val results = (0 to 100).map(_ => - executor.submit(new Callable[JValue] { def call = parse(json) })).toList.map(_.get) - results.zip(results.tail).forall(pair => pair._1 == pair._2) mustEqual true - } - - "All valid string escape characters can be parsed" in { - parse("[\"abc\\\"\\\\\\/\\b\\f\\n\\r\\t\\u00a0\"]") must_== JArray(JString("abc\"\\/\b\f\n\r\t\u00a0")::Nil) - } - - - "Parser does not bleed prior results" in { - parse("""{"a": "now is the time for all good men to come to the aid of their dog and eat dog food with other dogs and bark and woof and do dog things. now is the time for all good men to come to the aid of their dog and eat dog food with other dogs and bark and woof and do dog things. now is the time for all good men to come to the aid of their dog and eat dog food with other dogs and bark and woof and do dog things. now is the time for all good men to come to the aid of their dog and eat dog food with other dogs and bark and woof and do dog things. now is the time for all good men to come to the aid of their dog and eat dog food with other dogs and bark and woof and do dog things. now is the time for all good men to come to the aid of their dog and eat dog food with other dogs and bark and woof and do dog things. now is the time for all good men to come to the aid of their dog and eat dog food with other dogs and bark and woof and do dog things.now is the time for all good men to come to the aid of their dog and eat dog food with other dogs and bark and woof and do dog things"}""") - - val msg = parseBadThing() - - msg.length must be_<=(50) - } - - "Unclosed string literal fails parsing" in { - parseOpt("{\"foo\":\"sd") mustEqual None - parseOpt("{\"foo\":\"sd}") mustEqual None - } - - "The EOF has reached when the Reader returns EOF" in { - class StingyReader(s: String) extends java.io.StringReader(s) { - override def read(cbuf: Array[Char], off: Int, len: Int): Int = { - val c = read() - if (c == -1) -1 - else { - cbuf(off) = c.toChar - 1 - } - } - } - - val json = JsonParser.parse(new StingyReader(""" ["hello"] """)) - json mustEqual JArray(JString("hello") :: Nil) - } - - "Segment size does not change parsing result" in { - val bufSize = Gen.choose(2, 64) - val parsing = (x: JValue, s1: Int, s2: Int) => { parseVal(x, s1) == parseVal(x, s2) } - forAll(genObject, bufSize, bufSize)(parsing) - } - - implicit def arbJValue: Arbitrary[JValue] = Arbitrary(genObject) - - private def parseVal(json: JValue, bufSize: Int) = { - val segmentPool = new JsonParser.ArrayBlockingSegmentPool(bufSize) - JsonParser.parse(new JsonParser.Buffer( - new StringReader(compactRender(json)), - false, - segmentPool - )) - } -} diff --git a/core/json/src/test/scala/net/liftweb/json/JsonPrintingSpec.scala b/core/json/src/test/scala/net/liftweb/json/JsonPrintingSpec.scala deleted file mode 100644 index 350dec7f26..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/JsonPrintingSpec.scala +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import scala.util.Random - -import org.specs2.mutable.Specification -import org.specs2.ScalaCheck -import org.scalacheck.Arbitrary -import org.scalacheck.Prop.forAll - - -/** - * System under specification for JSON Printing. - */ -class JsonPrintingSpec extends Specification with JValueGen with ScalaCheck { - "JSON Printing Specification".title - - "rendering does not change semantics" in { - val rendering = (json: JValue) => parse(JsonAST.prettyRender(json)) == parse(JsonAST.compactRender(json)) - forAll(rendering) - } - - "rendering special double values by default" should { - "render a standard double as is" in { - val double = Random.nextDouble - JsonAST.compactRender(JDouble(double)) must_== double.toString - } - - "render positive infinity as null" in { - JsonAST.compactRender(JDouble(Double.PositiveInfinity)) must_== "null" - } - - "render negative infinity as null" in { - JsonAST.compactRender(JDouble(Double.NegativeInfinity)) must_== "null" - } - - "render NaN as null" in { - JsonAST.compactRender(JDouble(Double.NaN)) must_== "null" - } - } - - "rendering special double values with as-is handling" should { - def render(json: JValue) = { - JsonAST.render( - json, - JsonAST.RenderSettings(0, doubleRenderer = JsonAST.RenderSpecialDoubleValuesAsIs) - ) - } - - "render a standard double as is" in { - val double = Random.nextDouble - render(JDouble(double)) must_== double.toString - } - - "render positive infinity as null" in { - render(JDouble(Double.PositiveInfinity)) must_== "Infinity" - } - - "render negative infinity as null" in { - render(JDouble(Double.NegativeInfinity)) must_== "-Infinity" - } - - "render NaN as null" in { - render(JDouble(Double.NaN)) must_== "NaN" - } - } - - "rendering special double values with special value exceptions enabled" should { - def render(json: JValue) = { - JsonAST.render( - json, - JsonAST.RenderSettings(0, doubleRenderer = JsonAST.FailToRenderSpecialDoubleValues) - ) - } - - "render a standard double as is" in { - val double = Random.nextDouble - render(JDouble(double)) must_== double.toString - } - - "throw an exception when attempting to render positive infinity" in { - render(JDouble(Double.PositiveInfinity)) must throwAn[IllegalArgumentException] - } - - "throw an exception when attempting to render negative infinity" in { - render(JDouble(Double.NegativeInfinity)) must throwAn[IllegalArgumentException] - } - - "throw an exception when attempting to render NaN" in { - render(JDouble(Double.NaN)) must throwAn[IllegalArgumentException] - } - } - - private def parse(json: String) = scala.util.parsing.json.JSON.parseRaw(json) - - implicit def arbDoc: Arbitrary[JValue] = Arbitrary(genJValue) -} diff --git a/core/json/src/test/scala/net/liftweb/json/JsonQueryExamples.scala b/core/json/src/test/scala/net/liftweb/json/JsonQueryExamples.scala deleted file mode 100644 index ff2fc8af90..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/JsonQueryExamples.scala +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - - -/** - * System under specification for JSON Query Examples. - */ -object JsonQueryExamples extends Specification { - "JSON Query Examples".title - - "List of IPs" in { - val ips = for { - JField("ip", JString(ip)) <- (json \\ "ip").obj - } yield { - ip - } - - ips mustEqual List("192.168.1.125", "192.168.1.126", "192.168.1.127", "192.168.2.125", "192.168.2.126") - } - - "List of IPs converted to XML" in { - val ipsList = (json \\ "ip").obj - - val ips = { - for { - field <-ipsList - JString(ip) <- field.value - } yield { ip } - } - - ips mustEqual 192.168.1.125192.168.1.126192.168.1.127192.168.2.125192.168.2.126 - } - - "List of IPs in cluster2" in { - val ips = for { - cluster @ JObject(x) <- json \ "data_center" - if (x contains JField("name", JString("cluster2"))) - JField("ip", JString(ip)) <- (cluster \\ "ip").obj - } yield { - ip - } - - ips mustEqual List("192.168.2.125", "192.168.2.126") - } - - "Total cpus in data center" in { - val computerCpuCount = for { - JField("cpus", JInt(x)) <- (json \\ "cpus").obj - } yield { - x - } - - computerCpuCount reduceLeft (_ + _) mustEqual 40 - } - - "Servers sorted by uptime" in { - case class Server(ip: String, uptime: Long) - - val servers = for { - JField("servers", JArray(servers)) <- (json \\ "servers").obj - JObject(server) <- servers - JField("ip", JString(ip)) <- server - JField("uptime", JInt(uptime)) <- server - } yield { - Server(ip, uptime.longValue) - } - - servers sortWith (_.uptime > _.uptime) mustEqual List(Server("192.168.1.127", 901214), Server("192.168.2.125", 453423), Server("192.168.2.126", 214312), Server("192.168.1.126", 189822), Server("192.168.1.125", 150123)) - } - - "Clusters administered by liza" in { - val clusters = for { - JObject(cluster) <- json - JField("admins", JArray(admins)) <- cluster - if admins contains JString("liza") - JField("name", JString(name)) <- cluster - } yield name - - clusters mustEqual List("cluster2") - } - - val json = parse(""" - { "data_center": [ - { - "name": "cluster1", - "servers": [ - {"ip": "192.168.1.125", "uptime": 150123, "specs": {"cpus": 8, "ram": 2048}}, - {"ip": "192.168.1.126", "uptime": 189822, "specs": {"cpus": 16, "ram": 4096}}, - {"ip": "192.168.1.127", "uptime": 901214, "specs": {"cpus": 8, "ram": 4096}} - ], - "links": [ - {"href": "http://www.example.com/admin", "name": "admin"}, - {"href": "http://www,example.com/home", "name": "home"} - ], - "admins": ["jim12", "joe", "maddog"] - }, - { - "name": "cluster2", - "servers": [ - {"ip": "192.168.2.125", "uptime": 453423, "specs": {"cpus": 4, "ram": 2048}}, - {"ip": "192.168.2.126", "uptime": 214312, "specs": {"cpus": 4, "ram": 2048}}, - ], - "links": [ - {"href": "http://www.example2.com/admin", "name": "admin"}, - {"href": "http://www,example2.com/home", "name": "home"} - ], - "admins": ["joe", "liza"] - } - ]} - """) -} diff --git a/core/json/src/test/scala/net/liftweb/json/JsonXmlSpec.scala b/core/json/src/test/scala/net/liftweb/json/JsonXmlSpec.scala deleted file mode 100644 index 17bccd3e64..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/JsonXmlSpec.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification -import org.specs2.ScalaCheck -import org.scalacheck.Arbitrary -import org.scalacheck.Prop.forAll - - -/** - * System under specification for JSON XML. - */ -class JsonXmlSpec extends Specification with NodeGen with JValueGen with ScalaCheck { - "JSON XML Specification".title - - import Xml._ - import scala.xml.Node - - "Valid XML can be converted to JSON and back (symmetric op)" in { - val conversion = (xml: Node) => { toXml(toJson(xml)).head == xml } - forAll(conversion) - } - - "JSON can be converted to XML, and back to valid JSON (non symmetric op)" in { - val conversion = (json: JValue) => { parse(compactRender(toJson(toXml(json)))); true } - forAll(conversion) - } - - implicit def arbXml: Arbitrary[Node] = Arbitrary(genXml) - implicit def arbJValue: Arbitrary[JValue] = Arbitrary(genObject) -} diff --git a/core/json/src/test/scala/net/liftweb/json/LottoExample.scala b/core/json/src/test/scala/net/liftweb/json/LottoExample.scala deleted file mode 100644 index 885af08a77..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/LottoExample.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - - -object LottoExample extends Specification { - import JsonDSL._ - - implicit val formats = DefaultFormats - - case class Winner(`winner-id`: Long, numbers: List[Int]) - case class Lotto(id: Long, `winning-numbers`: List[Int], winners: List[Winner], - `draw-date`: Option[java.util.Date]) - - val winners = List(Winner(23, List(2, 45, 34, 23, 3, 5)), Winner(54, List(52, 3, 12, 11, 18, 22))) - val lotto = Lotto(5, List(2, 45, 34, 23, 7, 5, 3), winners, None) - - val json = - ("lotto" -> - ("id" -> lotto.id) ~ - ("winning-numbers" -> lotto.`winning-numbers`) ~ - ("draw-date" -> lotto.`draw-date`.map(_.toString)) ~ - ("winners" -> - lotto.winners.map { w => - (("winner-id" -> w.`winner-id`) ~ - ("numbers" -> w.numbers))})) - - "Parse Lotto" in { - - (compactRender(json) mustEqual """{"lotto":{"id":5,"winning-numbers":[2,45,34,23,7,5,3],"winners":[{"winner-id":23,"numbers":[2,45,34,23,3,5]},{"winner-id":54,"numbers":[52,3,12,11,18,22]}]}}""") and - ((json \ "lotto" \ "winners")(0).extract[Winner] mustEqual Winner(23, List(2, 45, 34, 23, 3, 5))) and - ((json \ "lotto").extract[Lotto] mustEqual lotto) and - (json.values mustEqual Map("lotto" -> Map("id" -> 5, - "winning-numbers" -> List(2, 45, 34, 23, 7, 5, 3), - "draw-date" -> None, - "winners" -> List(Map("winner-id" -> 23, - "numbers" -> List(2, 45, 34, 23, 3, 5)), - Map("winner-id" -> 54, "numbers" -> List(52, 3, 12, 11, 18, 22)))))) - } -} diff --git a/core/json/src/test/scala/net/liftweb/json/MergeExamples.scala b/core/json/src/test/scala/net/liftweb/json/MergeExamples.scala deleted file mode 100644 index 42308b3fa1..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/MergeExamples.scala +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - - - -object MergeExamples extends Specification { - "Merge Examples".title - - "Merge example" in { - (scala1 merge scala2) mustEqual expectedMergeResult - } - - val scala1 = parse(""" - { - "lang": "scala", - "year": 2006, - "tags": ["fp", "oo"], - "features": { - "key1":"val1", - "key2":"val2" - } - }""") - - val scala2 = parse(""" - { - "tags": ["static-typing","fp"], - "compiled": true, - "lang": "scala", - "features": { - "key2":"newval2", - "key3":"val3" - } - }""") - - val expectedMergeResult = parse(""" - { - "lang": "scala", - "year": 2006, - "tags": ["fp", "oo", "static-typing"], - "features": { - "key1":"val1", - "key2":"newval2", - "key3":"val3" - }, - "compiled": true, - }""") - - "Lotto example" in { - (lotto1 merge lotto2) mustEqual mergedLottoResult - } - - val lotto1 = parse(""" - { - "lotto":{ - "lotto-id":5, - "winning-numbers":[2,45,34,23,7,5,3] - "winners":[{ - "winner-id":23, - "numbers":[2,45,34,23,3,5] - }] - } - }""") - - val lotto2 = parse(""" - { - "lotto":{ - "winners":[{ - "winner-id":54, - "numbers":[52,3,12,11,18,22] - }] - } - }""") - - val mergedLottoResult = parse(""" - { - "lotto":{ - "lotto-id":5, - "winning-numbers":[2,45,34,23,7,5,3], - "winners":[{ - "winner-id":23, - "numbers":[2,45,34,23,3,5] - },{ - "winner-id":54, - "numbers":[52,3,12,11,18,22] - }] - } - }""") -} diff --git a/core/json/src/test/scala/net/liftweb/json/ParserBugs.scala b/core/json/src/test/scala/net/liftweb/json/ParserBugs.scala deleted file mode 100644 index e2389dfa05..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/ParserBugs.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import util.control.Exception._ - -import org.specs2.mutable.Specification - -object ParserBugs extends Specification { - "Unicode ffff is a valid char in string literal" in { - parseOpt(""" {"x":"\uffff"} """).isDefined mustEqual true - } - - "Does not allow colon at start of array (1039)" in { - parseOpt("""[:"foo", "bar"]""") mustEqual None - } - - "Does not allow colon instead of comma in array (1039)" in { - parseOpt("""["foo" : "bar"]""") mustEqual None - } - - "Solo quote mark should fail cleanly (not StringIndexOutOfBoundsException) (1041)" in { - JsonParser.parse("\"", discardParser) must throwA[JsonParser.ParseException].like { - case e => e.getMessage must startWith("unexpected eof") - } - } - - "Field names must be quoted" in { - val json = JObject(List(JField("foo\nbar", JInt(1)))) - val s = compactRender(json) - (s mustEqual """{"foo\nbar":1}""") and - (parse(s) mustEqual json) - } - - "Double in scientific notation with + can be parsed" in { - val json = JObject(List(JField("t", JDouble(12.3)))) - val s = """{"t" : 1.23e+1}""" - parse(s) mustEqual json - } - - private val discardParser = (p : JsonParser.Parser) => { - var token: JsonParser.Token = null - do { - token = p.nextToken - } while (token != JsonParser.End) - } -} diff --git a/core/json/src/test/scala/net/liftweb/json/PullParserExamples.scala b/core/json/src/test/scala/net/liftweb/json/PullParserExamples.scala deleted file mode 100644 index 69f66f4329..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/PullParserExamples.scala +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - - -/** - * System under specification for JSON Pull Parser. - */ -object PullParserExamples extends Specification { - "JSON Pull Parser Examples".title - import JsonParser._ - - "Pull parsing example" in { - val parser = (p: Parser) => { - def parse: BigInt = p.nextToken match { - case FieldStart("postalCode") => p.nextToken match { - case IntVal(code) => code - case _ => p.fail("expected int") - } - case End => p.fail("no field named 'postalCode'") - case _ => parse - } - - parse - } - - val postalCode = parse(json, parser) - postalCode mustEqual 10021 - } - - val json = """ - { - "firstName": "John", - "lastName": "Smith", - "address": { - "streetAddress": "21 2nd Street", - "city": "New York", - "state": "NY", - "postalCode": 10021 - }, - "phoneNumbers": [ - { "type": "home", "number": "212 555-1234" }, - { "type": "fax", "number": "646 555-4567" } - ], - "newSubscription": false, - "companyName": null - }""" -} diff --git a/core/json/src/test/scala/net/liftweb/json/SerializationBugs.scala b/core/json/src/test/scala/net/liftweb/json/SerializationBugs.scala deleted file mode 100644 index b7184ca7de..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/SerializationBugs.scala +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification -import java.util.UUID - -object SerializationBugs extends Specification { - import Serialization.{read, write => swrite} - - implicit val formats = Serialization.formats(NoTypeHints) - - "plan1.Plan can be serialized (issue 341)" in { - import plan1._ - - val game = Game(Map("a" -> Plan(Some(Action(1, None))))) - val ser = swrite(game) - read[Game](ser) mustEqual game - } - - "plan2.Plan can be serialized (issue 341)" in { - import plan2._ - - val g1 = Game(Map("a" -> Plan(Some(Action("f1", "s", Array(), None)), - Some("A"), - Some(Action("f2", "s2", Array(0, 1, 2), None))))) - val ser = swrite(g1) - val g2 = read[Game](ser) - val plan = g2.buy("a") - val leftOp = plan.leftOperand.get - val rightOp = plan.rightOperand.get - - (g2.buy.size mustEqual 1) and - (leftOp.functionName mustEqual "f1") and - (leftOp.symbol mustEqual "s") and - (leftOp.inParams.toList mustEqual Nil) and - (leftOp.subOperand mustEqual None) and - (plan.operator mustEqual Some("A")) and - (rightOp.functionName mustEqual "f2") and - (rightOp.symbol mustEqual "s2") and - (rightOp.inParams.toList mustEqual List(0, 1, 2)) and - (rightOp.subOperand mustEqual None) - } - - "null serialization bug" in { - val x = new X(null) - val ser = swrite(x) - read[X](ser) mustEqual x - } - - "StackOverflowError with large Lists" in { - val xs = LongList(List.fill(5000)(0).map(Num)) - val ser = swrite(xs) - read[LongList](ser).xs.length mustEqual 5000 - } - - "Custom serializer should work with Option" in { - class UUIDFormat extends Serializer[UUID] { - val UUIDClass = classOf[UUID] - - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), UUID] = { - case (TypeInfo(UUIDClass, _), JString(x)) => UUID.fromString(x) - } - - def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { - case x: UUID => JString(x.toString) - } - } - - implicit val formats = Serialization.formats(NoTypeHints) + new UUIDFormat - val o1 = OptionalUUID(None) - val o2 = OptionalUUID(Some(UUID.randomUUID)) - - (read[OptionalUUID](swrite(o1)) mustEqual o1) and - (read[OptionalUUID](swrite(o2)) mustEqual o2) - } - - "TypeInfo is not correctly constructed for customer serializer -- 970" in { - class SeqFormat extends Serializer[Seq[_]] { - val SeqClass = classOf[Seq[_]] - - def serialize(implicit format: Formats) = { - case seq: Seq[_] => JArray(seq.toList.map(Extraction.decompose)) - } - - def deserialize(implicit format: Formats) = { - case (TypeInfo(SeqClass, parameterizedType), JArray(xs)) => - val typeInfo = TypeInfo(parameterizedType - .map(_.getActualTypeArguments()(0)) - .getOrElse(failure("No type parameter info for type Seq")).asInstanceOf[Class[_]], None) - xs.map(x => Extraction.extract(x, typeInfo)) - } - } - - implicit val formats = DefaultFormats + new SeqFormat - - val seq = Seq(1, 2, 3) - val ser = Extraction.decompose(seq) - Extraction.extract[Seq[Int]](ser) mustEqual seq - } - - "Serialization of an opaque value should not fail" in { - val o = Opaque(JObject(JField("some", JString("data")) :: Nil)) - val ser = Serialization.write(o) - ser mustEqual """{"x":{"some":"data"}}""" - } - - "Map with Map value" in { - val a = Map("a" -> Map("a" -> 5)) - val b = Map("b" -> 1) - val str = Serialization.write(MapWithMap(a, b)) - read[MapWithMap](str) mustEqual MapWithMap(a, b) - } - - "Either can't be deserialized with type hints" in { - implicit val formats = DefaultFormats + FullTypeHints(classOf[Either[_, _]] :: Nil) - val x = Eith(Left("hello")) - val s = Serialization.write(x) - read[Eith](s) mustEqual x - } - - "Custom serializer should work as Map key (scala 2.9) (issue #1077)" in { - class SingleOrVectorSerializer extends Serializer[SingleOrVector[Double]] { - private val singleOrVectorClass = classOf[SingleOrVector[Double]] - - def deserialize(implicit format: Formats) = { - case (TypeInfo(`singleOrVectorClass`, _), json) => json match { - case JObject(List(JField("val", JDouble(x)))) => SingleValue(x) - case JObject(List(JField("val", JArray(xs: List[_])))) => - VectorValue(xs.asInstanceOf[List[JDouble]].map(_.num).toIndexedSeq) - case x => throw new MappingException("Can't convert " + x + " to SingleOrVector") - } - } - - def serialize(implicit format: Formats) = { - case SingleValue(x: Double) => JObject(List(JField("val", JDouble(x)))) - case VectorValue(x: Vector[_]) => - JObject(List(JField("val", JArray(x.asInstanceOf[Vector[Double]].toList.map(JDouble(_)))))) - } - } - - implicit val formats = DefaultFormats + new SingleOrVectorSerializer - - val ser = swrite(MapHolder(Map("hello" -> SingleValue(2.0)))) - read[MapHolder](ser) mustEqual MapHolder(Map("hello" -> SingleValue(2.0))) - } - - "Constructor memoization should not ignore type parameters" in { - val jsonA = """ { "data": { "foo": "string" }, "success": true } """ - val jsonB = """ { "data": { "bar": "string" }, "success": true } """ - - (read[SomeContainer[TypeA]](jsonA) mustEqual SomeContainer(TypeA("string"))) and - (read[SomeContainer[TypeB]](jsonB) mustEqual SomeContainer(TypeB("string"))) - } -} - -case class TypeA(foo: String) -case class TypeB(bar: String) -case class SomeContainer[D](data: D) - -case class Eith(x: Either[String, Int]) - -case class MapWithMap(a: Map[String, Map[String, Int]], b: Map[String, Int]) - -case class LongList(xs: List[Num]) -case class Num(x: Int) - -case class X(yy: Y) -case class Y(ss: String) - -case class OptionalUUID(uuid: Option[UUID]) - -package plan1 { - case class Plan(plan: Option[Action]) - case class Game(game: Map[String, Plan]) - case class Action(id: Int, subAction: Option[Action]) -} - -package plan2 { - case class Plan(leftOperand: Option[Action], operator: Option[String], - rightOperand: Option[Action]) - case class Game(buy: Map[String, Plan]) - case class Action(functionName: String, symbol: String, - inParams: Array[Number], subOperand: Option[Action]) -} - -case class Opaque(x: JValue) - -sealed trait SingleOrVector[A] -case class SingleValue[A](value: A) extends SingleOrVector[A] -case class VectorValue[A](value: IndexedSeq[A]) extends SingleOrVector[A] - -case class MapHolder(a: Map[String, SingleOrVector[Double]]) diff --git a/core/json/src/test/scala/net/liftweb/json/SerializationExamples.scala b/core/json/src/test/scala/net/liftweb/json/SerializationExamples.scala deleted file mode 100644 index b5ebb8a851..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/SerializationExamples.scala +++ /dev/null @@ -1,423 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import java.util.Date -import org.specs2.mutable.Specification - -object SerializationExamples extends Specification { - import Serialization.{read, write => swrite} - - implicit val formats = Serialization.formats(NoTypeHints) - - val project = Project("test", new Date, Some(Language("Scala", 2.75)), List( - Team("QA", List(Employee("John Doe", 5), Employee("Mike", 3))), - Team("Impl", List(Employee("Mark", 4), Employee("Mary", 5), Employee("Nick Noob", 1))))) - - "Project serialization example" in { - val ser = swrite(project) - read[Project](ser) mustEqual project - } - - case class Project(name: String, startDate: Date, lang: Option[Language], teams: List[Team]) - case class Language(name: String, version: Double) - case class Team(role: String, members: List[Employee]) - case class Employee(name: String, experience: Int) - - "Null example" in { - val ser = swrite(Nullable(null)) - read[Nullable](ser) mustEqual Nullable(null) - } - - case class Nullable(name: String) - - "Lotto serialization example" in { - import LottoExample.{Lotto, lotto} - - val ser = swrite(lotto) - read[Lotto](ser) mustEqual lotto - } - - "Primitive-wrapping case class serialization example" in { - val primitives = Primitives(124, 123L, 126.5, 127.5.floatValue, "128", 's, 125, 129.byteValue, true) - val ser = swrite(primitives) - read[Primitives](ser) mustEqual primitives - } - - "Primitive Int serialization" in { - read[Int](swrite(42)) mustEqual 42 - } - - "Primitive Long serialization" in { - read[Long](swrite(42L)) mustEqual 42L - } - - "Primitive Double serialization" in { - read[Double](swrite(3.14)) mustEqual 3.14 - } - - "Primitive Float serialization" in { - read[Float](swrite(3.14.floatValue)) mustEqual 3.14.floatValue - } - - "Primitive Short serialization" in { - read[Short](swrite(88.shortValue)) mustEqual 88.shortValue - } - - "Primitive Byte serialization" in { - read[Byte](swrite(129.byteValue)) mustEqual 129.byteValue - } - - "Primitive Boolean serialization" in { - read[Boolean](swrite(false)) mustEqual false - } - - "String serialization" in { - val rt = "Roll Tide, Farmer!!" - read[String](swrite(rt)) mustEqual rt - } - - "Symbol serialization" in { - read[Symbol](swrite('j)) mustEqual 'j - } - - "Multidimensional list example" in { - val ints = Ints(List(List(1, 2), List(3), List(4, 5))) - val ser = swrite(ints) - read[Ints](ser) mustEqual ints - } - - "Map serialization example" in { - val p = PersonWithAddresses("joe", Map("address1" -> Address("Bulevard", "Helsinki"), - "address2" -> Address("Soho", "London"))) - val ser = swrite(p) - read[PersonWithAddresses](ser) mustEqual p - } - - "Recursive type serialization example" in { - val r1 = Rec(1, Nil) - val r2 = Rec(2, Nil) - val r3 = Rec(3, r1 :: r2 :: Nil) - - val ser = swrite(r3) - read[Rec](ser) mustEqual r3 - } - - "Set serialization example" in { - val s = SetContainer(Set("foo", "bar")) - val ser = swrite(s) - read[SetContainer](ser) mustEqual s - } - - "Array serialization example" in { - val s = ArrayContainer(Array("foo", "bar")) - val ser = swrite(s); - val unser = read[ArrayContainer](ser) - s.array.toList mustEqual unser.array.toList - } - - "Seq serialization example" in { - val s = SeqContainer(List("foo", "bar")) - val ser = swrite(s) - read[SeqContainer](ser) mustEqual s - } - - "Option serialization example" in { - val ser = swrite(Some(List(1, 2))) - (read[Option[List[Int]]](ser) mustEqual Some(List(1, 2))) and - (read[Option[List[Int]]]("") mustEqual None) - } - - "None Option of tuple serialization example" in { - // This is a regression test case, failed in lift json - val s = OptionOfTupleOfDouble(None) - val ser = swrite(s) - read[OptionOfTupleOfDouble](ser) mustEqual s - } - - "Case class with internal state example" in { - val m = Members("s", 1) - val ser = swrite(m) - (ser mustEqual """{"x":"s","y":1}""") and - (read[Members](ser) mustEqual m) - } - - "Case class from type constructors example" in { - val p = ProperType(TypeConstructor(Chicken(10)), Pair(25, Player("joe"))) - val ser = swrite(p) - read[ProperType](ser) mustEqual p - } - - case class Ints(x: List[List[Int]]) - - case class Rec(n: Int, xs: List[Rec]) - - case class Members(x: String, y: Int) { - val foo1 = "foo" - lazy val foo2 = "foo" - } -} - -object ShortTypeHintExamples extends TypeHintExamples { - implicit val formats = Serialization.formats(ShortTypeHints(classOf[Fish] :: classOf[Dog] :: Nil)) - - "Deserialization succeeds even if jsonClass is not the first field" in { - val ser = """{"animals":[],"pet":{"name":"pluto","jsonClass":"Dog"}}""" - Serialization.read[Animals](ser) mustEqual Animals(Nil, Dog("pluto")) - } -} - -object FullTypeHintExamples extends TypeHintExamples { - import Serialization.{read, write => swrite} - - implicit val formats = Serialization.formats(FullTypeHints(List[Class[_]](classOf[Animal], classOf[True], classOf[False], classOf[Falcon], classOf[Chicken]))) - - "Ambiguous field decomposition example" in { - val a = Ambiguous(False()) - - val ser = swrite(a) - read[Ambiguous](ser) mustEqual a - } - - "Ambiguous parameterized field decomposition example" in { - val o = AmbiguousP(Chicken(23)) - - val ser = swrite(o) - read[AmbiguousP](ser) mustEqual o - } - - "Option of ambiguous field decomposition example" in { - val o = OptionOfAmbiguous(Some(True())) - - val ser = swrite(o) - read[OptionOfAmbiguous](ser) mustEqual o - } - - "Option of ambiguous parameterized field decomposition example" in { - val o = OptionOfAmbiguousP(Some(Falcon(200.0))) - - val ser = swrite(o) - read[OptionOfAmbiguousP](ser) mustEqual o - } -} - -object CustomTypeHintFieldNameExample extends TypeHintExamples { - import Serialization.{read, write => swrite} - - implicit val formats = new Formats { - val dateFormat = DefaultFormats.lossless.dateFormat - override val typeHints = ShortTypeHints(classOf[Fish] :: classOf[Dog] :: Nil) - override val typeHintFieldName = "$type$" - } - - "Serialized JSON contains configured field name" in { - val animals = Animals(Dog("pluto") :: Fish(1.2) :: Nil, Dog("pluto")) - val ser = swrite(animals) - ser mustEqual """{"animals":[{"$type$":"Dog","name":"pluto"},{"$type$":"Fish","weight":1.2}],"pet":{"$type$":"Dog","name":"pluto"}}""" - } -} - -trait TypeHintExamples extends Specification { - import Serialization.{read, write => swrite} - - implicit val formats: Formats - - "Polymorphic List serialization example" in { - val animals = Animals(Dog("pluto") :: Fish(1.2) :: Dog("devil") :: Nil, Dog("pluto")) - val ser = swrite(animals) - read[Animals](ser) mustEqual animals - } - - "Parameterized type serialization example" in { - val objs = Objs(Obj(Fish(1.2)) :: Obj(Dog("pluto")) :: Nil) - val ser = swrite(objs) - read[Objs](ser) mustEqual objs - } - - "Tuple serialization example" in { - val t: (Animal, Animal) = (Fish(1.5), Dog("pluto")) - val ser = swrite(t) - read[(Animal, Animal)](ser) mustEqual t - } -} - -case class Animals(animals: List[Animal], pet: Animal) -trait Animal -case class Dog(name: String) extends Animal -case class Fish(weight: Double) extends Animal - -case class Objs(objects: List[Obj[_]]) -case class Obj[A](a: A) - -object CustomSerializerExamples extends Specification { - import Serialization.{read, write => swrite} - import JsonAST._ - import java.util.regex.Pattern - - class IntervalSerializer extends CustomSerializer[Interval](format => ( - { - case JObject(JField("start", JInt(s)) :: JField("end", JInt(e)) :: Nil) => - new Interval(s.longValue, e.longValue) - }, - { - case x: Interval => - JObject(JField("start", JInt(BigInt(x.startTime))) :: - JField("end", JInt(BigInt(x.endTime))) :: Nil) - } - )) - - class PatternSerializer extends CustomSerializer[Pattern](format => ( - { - case JObject(JField("$pattern", JString(s)) :: Nil) => Pattern.compile(s) - }, - { - case x: Pattern => JObject(JField("$pattern", JString(x.pattern)) :: Nil) - } - )) - - class DateSerializer extends CustomSerializer[Date](format => ( - { - case JObject(List(JField("$dt", JString(s)))) => - format.dateFormat.parse(s).getOrElse(throw new MappingException("Can't parse "+ s + " to Date")) - }, - { - case x: Date => JObject(JField("$dt", JString(format.dateFormat.format(x))) :: Nil) - } - )) - - class IndexedSeqSerializer extends Serializer[IndexedSeq[_]] { - def deserialize(implicit formats: Formats) = { - case (TypeInfo(clazz, ptype), json) if classOf[IndexedSeq[_]].isAssignableFrom(clazz) => json match { - case JArray(xs) => - val t = ptype.getOrElse(throw new MappingException("parameterized type not known")) - xs.map(x => Extraction.extract(x, TypeInfo(t.getActualTypeArguments()(0).asInstanceOf[Class[_]], None))).toIndexedSeq - case x => throw new MappingException("Can't convert " + x + " to IndexedSeq") - } - } - - def serialize(implicit formats: Formats) = { - case i: IndexedSeq[_] => JArray(i.map(Extraction.decompose).toList) - } - } - - implicit val formats = Serialization.formats(NoTypeHints) + - new IntervalSerializer + new PatternSerializer + new DateSerializer + new IndexedSeqSerializer - - "Interval serialization example" in { - val i = new Interval(1, 4) - val ser = swrite(i) - ser mustEqual """{"start":1,"end":4}""" - val i2 = read[Interval](ser) - i2.startTime mustEqual i.startTime - i2.endTime mustEqual i.endTime - } - - "Pattern serialization example" in { - val pat = Pattern.compile("^Curly") - val pser = swrite(pat) - pser mustEqual """{"$pattern":"^Curly"}""" - read[Pattern](pser).pattern mustEqual pat.pattern - } - - "Date serialization example" in { - val d = new Date(0) - val dser = swrite(d) - dser mustEqual """{"$dt":"1970-01-01T00:00:00.000Z"}""" - read[Date](dser) mustEqual d - } - - "Indexed serialization example" in { - val xs = Indexed(Vector("a", "b", "c")) - val iser = swrite(xs) - iser mustEqual """{"xs":["a","b","c"]}""" - read[Indexed](iser).xs.toList mustEqual List("a","b","c") - } -} - -case class Indexed(xs: IndexedSeq[String]) - -class Interval(start: Long, end: Long) { - val startTime = start - val endTime = end -} - -object CustomClassWithTypeHintsExamples extends Specification { - import Serialization.{read, write => swrite} - import JsonAST._ - - val hints = new ShortTypeHints(classOf[DateTime] :: Nil) { - override def serialize: PartialFunction[Any, JObject] = { - case t: DateTime => JObject(JField("t", JInt(t.time)) :: Nil) - } - - override def deserialize: PartialFunction[(String, JObject), Any] = { - case ("DateTime", JObject(JField("t", JInt(t)) :: Nil)) => new DateTime(t.longValue) - } - } - implicit val formats = Serialization.formats(hints) - - "Custom class serialization using provided serialization and deserialization functions" in { - val m = Meeting("The place", new DateTime(1256681210802L)) - val ser = swrite(m) - val m2 = read[Meeting](ser) - m.place mustEqual m2.place - m.time.time mustEqual m2.time.time - } - - "List of custom classes example" in { - val ts = Times(List(new DateTime(123L), new DateTime(234L))) - val ser = swrite(ts) - val ts2 = read[Times](ser) - ts2.times(0).time mustEqual 123L - ts2.times(1).time mustEqual 234L - ts2.times.size mustEqual 2 - } -} - -case class Meeting(place: String, time: DateTime) -class DateTime(val time: Long) - -case class Times(times: List[DateTime]) - -sealed abstract class Bool -case class True() extends Bool -case class False() extends Bool -case class Ambiguous(child: Bool) - -trait Bird -case class Falcon(weight: Double) extends Bird -case class Chicken(eggs: Int) extends Bird - -case class AmbiguousP(bird: Bird) - -case class OptionOfAmbiguous(opt: Option[Bool]) - -case class OptionOfAmbiguousP(opt: Option[Bird]) - -case class SetContainer(set: Set[String]) - -case class ArrayContainer(array: Array[String]) - -case class SeqContainer(seq: Seq[String]) - -case class OptionOfTupleOfDouble(position: Option[Tuple2[Double, Double]]) - -case class Player(name: String) -case class TypeConstructor[A](x: A) -case class Pair[A, B](fst: A, snd: B) -case class ProperType(x: TypeConstructor[Chicken], t: Pair[Int, Player]) diff --git a/core/json/src/test/scala/net/liftweb/json/XmlBugs.scala b/core/json/src/test/scala/net/liftweb/json/XmlBugs.scala deleted file mode 100644 index a44ff3ffa3..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/XmlBugs.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - - -object XmlBugs extends Specification { - import Xml._ - import scala.xml.{Group, Text} - - "HarryH's XML parses correctly" in { - val xml1 = 123 - val xml2 = {"1"}{"23"} - Xml.toJson(xml1) must_== Xml.toJson(xml2) - } - - "HarryH's XML with attributes parses correctly" in { - val json = toJson(10) - compactRender(json) mustEqual """{"tips":{"group":{"type":"Nearby","tip":{"id":"10"}}}}""" - } - - "Jono's XML with attributes parses correctly" in { - val example1 = content - val expected1 = """{"word":"content","self":"http://localhost:8080/word/example","term":"example","available":"true"}""" - - val example2 = - val expected2 = """{"self":"http://localhost:8080/word/example","term":"example","available":"true"}""" - - (toJson(example1) diff parse(expected1)) mustEqual Diff(JNothing, JNothing, JNothing) - (toJson(example2) diff parse(expected2)) mustEqual Diff(JNothing, JNothing, JNothing) - } - - "Nodes with attributes converted to correct JSON" in { - val xml = - - - - - val expected = """{"root":{"n":[{"x":"abc","id":"10"},{"x":"bcd","id":"11"}]}}""" - val expected210 = """{"root":{"n":[{"id":"10","x":"abc"},{"id":"11","x":"bcd"}]}}""" - val json = compactRender(toJson(xml)) - (json == expected || json == expected210) mustEqual true - } - - "XML with empty node is converted correctly to JSON" in { - val xml = - xxxyyy - val expected = """{"tips":{"group":[{"type":"Foo"},{"type":"Bar","tip":[{"text":"xxx"},{"text":"yyy"}]}]}}""" - compactRender(toJson(xml)) mustEqual expected - } -} diff --git a/core/json/src/test/scala/net/liftweb/json/XmlExamples.scala b/core/json/src/test/scala/net/liftweb/json/XmlExamples.scala deleted file mode 100644 index b09b5cf4ff..0000000000 --- a/core/json/src/test/scala/net/liftweb/json/XmlExamples.scala +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package json - -import org.specs2.mutable.Specification - -object XmlExamples extends Specification { - "XML Examples".title - import JsonDSL._ - import Xml._ - import scala.xml.{Group, Text} - - "Basic conversion example" in { - val json = toJson(users1) - compactRender(json) mustEqual """{"users":{"count":"2","user":[{"disabled":"true","id":"1","name":"Harry"},{"id":"2","name":"David","nickname":"Dave"}]}}""" - } - - "Conversion transformation example 1" in { - val json = toJson(users1).transformField { - case JField("id", JString(s)) => JField("id", JInt(s.toInt)) - } - compactRender(json) mustEqual """{"users":{"count":"2","user":[{"disabled":"true","id":1,"name":"Harry"},{"id":2,"name":"David","nickname":"Dave"}]}}""" - } - - "Conversion transformation example 2" in { - val json = toJson(users2).transformField { - case JField("id", JString(s)) => JField("id", JInt(s.toInt)) - case JField("user", x: JObject) => JField("user", JArray(x :: Nil)) - } - compactRender(json) mustEqual """{"users":{"user":[{"id":1,"name":"Harry"}]}}""" - } - - "Primitive array example" in { - val xml = abc - compactRender(toJson(xml)) mustEqual """{"chars":{"char":["a","b","c"]}}""" - } - - "Lotto example which flattens number arrays into encoded string arrays" in { - def flattenArray(nums: List[JValue]) = JString(nums.map(_.values).mkString(",")) - - val printer = new scala.xml.PrettyPrinter(100,2) - val lotto: JObject = LottoExample.json - val xml = toXml(lotto.transformField { - case JField("winning-numbers", JArray(nums)) => JField("winning-numbers", flattenArray(nums)) - case JField("numbers", JArray(nums)) => JField("numbers", flattenArray(nums)) - }) - - printer.format(xml(0)) mustEqual printer.format( - - 5 - 2,45,34,23,7,5,3 - - 23 - 2,45,34,23,3,5 - - - 54 - 52,3,12,11,18,22 - - ) - } - - "Band example with namespaces" in { - val json = toJson(band) - json mustEqual parse("""{ - "b:band":{ - "name":"The Fall", - "genre":"rock", - "influence":"", - "playlists":{ - "playlist":[{ - "name":"hits", - "song":["Hit the north","Victoria"] - },{ - "name":"mid 80s", - "song":["Eat your self fitter","My new house"] - }] - } - } -}""") - } - - val band = - - The Fall - rock - - - - Hit the north - Victoria - - - Eat your self fitter - My new house - - - - - "Grouped text example" in { - val json = toJson(groupedText) - compactRender(json) mustEqual """{"g":{"group":"foobar","url":"http://example.com/test"}}""" - } - - val users1 = - - - 1 - Harry - - - 2 - David - - - - val users2 = - - - 1 - Harry - - - - val url = "test" - val groupedText = - - { Group(List(Text("foo"), Text("bar"))) } - http://example.com/{ url } - - - // Examples by Jonathan Ferguson. See http://groups.google.com/group/liftweb/browse_thread/thread/f3bdfcaf1c21c615/c311a91e44f9c178?show_docid=c311a91e44f9c178 - // This example shows how to use a transformation function to correct JSON generated by - // default conversion rules. The transformation function 'attrToObject' makes following conversion: - // { ..., "fieldName": "", "attrName":"someValue", ...} -> - // { ..., "fieldName": { "attrName": f("someValue") }, ... } - def attrToObject(fieldName: String, attrName: String, f: JString => JValue)(json: JValue) = json.transformField { - case JField(n, v: JString) if n == attrName => JField(fieldName, JObject(JField(n, f(v)) :: Nil)) - case JField(n, JString("")) if n == fieldName => JField(n, JNothing) - } transformField { - case JField(n, x: JObject) if n == attrName => JField(fieldName, x) - } - - "Example with multiple attributes, multiple nested elements " in { - val a1 = attrToObject("stats", "count", s => JInt(s.s.toInt)) _ - val a2 = attrToObject("messages", "href", identity) _ - val json = a1(a2(toJson(messageXml1))) - (json diff parse(expected1)) mustEqual Diff(JNothing, JNothing, JNothing) - } - - "Example with one attribute, one nested element " in { - val a = attrToObject("stats", "count", s => JInt(s.s.toInt)) _ - compactRender(a(toJson(messageXml2))) mustEqual expected2 - compactRender(a(toJson(messageXml3))) mustEqual expected2 - } - - val messageXml1 = - - - - - - val expected1 = """{"message":{"expiry_date":"20091126","word":"ant","text":"text","self":"me","stats":{"count":0},"messages":{"href":"https://domain.com/message/ant"}}}""" - - val messageXml2 = - - - - - val messageXml3 = - - val expected2 = """{"message":{"expiry_date":"20091126","stats":{"count":0}}}""" -} diff --git a/core/markdown/src/main/scala/net/liftweb/markdown/BaseParsers.scala b/core/markdown/src/main/scala/net/liftweb/markdown/BaseParsers.scala index 75ce5bb4f6..82e65359c8 100644 --- a/core/markdown/src/main/scala/net/liftweb/markdown/BaseParsers.scala +++ b/core/markdown/src/main/scala/net/liftweb/markdown/BaseParsers.scala @@ -20,10 +20,9 @@ package net.liftweb.markdown */ import scala.language.postfixOps - -import util.parsing.json.Parser import util.parsing.combinator.RegexParsers import collection.SortedMap +import scala.annotation.nowarn /** * Basic parsers for Markdown Source. @@ -148,10 +147,10 @@ trait BaseParsers extends RegexParsers { if (in.atEnd) Failure("End of input.", in) else { val c = in.first - val lower:SortedMap[Char,Char] = rs.to(c) + val lower:SortedMap[Char,Char] = rs.rangeTo(c) val (begin:Char, end:Char) = if (lower.isEmpty) ('\u0001', '\u0000') //this invalid pair always causes failure else lower.last - + if (begin <= c && c <= end) Success(c, in.rest) else Failure(verboseString(c) + " not in range " + verboseString(begin) + " - " + verboseString(end), @@ -161,7 +160,7 @@ trait BaseParsers extends RegexParsers { /** * Succeeds if the given parsers succeeds and the given function is defined at the parse result. - * Returns the result of the method applied to the given parsers result. + * Returns the result of the method applied to the given parsers result. */ def acceptMatch[S,T](f:PartialFunction[S,T])(p:Parser[S]):Parser[T] = Parser { in => p(in) match { @@ -237,12 +236,13 @@ trait BaseParsers extends RegexParsers { def xmlNameChar:Parser[Char] = ranges(xmlNameCharRanges) /** Parses an XML name (tag or attribute name) */ - def xmlName:Parser[String] = xmlNameStartChar ~ (xmlNameChar*) ^^ {case c ~ cs => c + cs.mkString} + def xmlName:Parser[String] = xmlNameStartChar ~ (xmlNameChar*) ^^ {case c ~ cs => s"${c}${cs.mkString}"} /** Parses a Simplified xml attribute: everything between quotes ("foo") * everything between the quotes is run through the escape handling * That way you can omit xml escaping when writing inline XML in markdown. */ - def xmlAttrVal:Parser[String] = + @nowarn("msg=method \\+ in class Char is deprecated.*") // keeping + because it's just much more readable in that case + def xmlAttrVal:Parser[String] = ('"' ~> ((not('"') ~> aChar)*) <~ '"' ^^ {'"' + _.mkString + '"' }) | ('\'' ~> ((not('\'') ~> aChar)*) <~ '\'' ^^ {'\'' + _.mkString + '\''}) /** Parses an XML Attribute with simplified value handling like xmlAttrVal. @@ -253,7 +253,7 @@ trait BaseParsers extends RegexParsers { /** Parses an xml start or empty tag, attribute values are escaped. */ def xmlStartOrEmptyTag:Parser[String] = '<' ~> xmlName ~ (xmlAttr*) ~ ows ~ (">" | "/>") ^^ { - case name ~ attrs ~ w ~ e => '<' + name + attrs.mkString + w + e + case name ~ attrs ~ w ~ e => s"<${name}${attrs.mkString}${w}${e}" } /** Parses closing xml tags. diff --git a/core/markdown/src/main/scala/net/liftweb/markdown/BlockParsers.scala b/core/markdown/src/main/scala/net/liftweb/markdown/BlockParsers.scala index b68b586cd2..a6a7e77619 100644 --- a/core/markdown/src/main/scala/net/liftweb/markdown/BlockParsers.scala +++ b/core/markdown/src/main/scala/net/liftweb/markdown/BlockParsers.scala @@ -46,14 +46,14 @@ trait BlockParsers extends Parsers { /** * returns the current indentation string repeated the given number of levels */ - def indent(level:Int):String = deco.indentation * level + def indent(level:Int):String = deco().indentation() * level private val tokenizer = new LineTokenizer() /** A markdown block element. */ sealed abstract class MarkdownBlock extends InlineParsers{ - override def deco = BlockParsers.this.deco + override def deco() = BlockParsers.this.deco() /** adds the resulting xhtml snippet to the given string builder */ @@ -77,24 +77,24 @@ trait BlockParsers extends Parsers { * Represents a block of verbatim xml */ class VerbatimXml(line:XmlChunk) extends MarkdownBlock { - def addResult(level:Int, out:StringBuilder) {out.append(line.content)} + def addResult(level:Int, out:StringBuilder): Unit = {out.append(line.content)} } /** * Represents a horizontal ruler */ object Ruler extends MarkdownBlock { - def addResult(level:Int, out:StringBuilder) {out.append(indent(level)).append(deco.decorateRuler)} + def addResult(level:Int, out:StringBuilder): Unit = {out.append(indent(level)).append(this.deco().decorateRuler())} } /** * Represents a header */ case class Header(content:String, headerLevel:Int, lookup:Map[String, LinkDefinition]) extends MarkdownBlock{ - def addResult(level:Int, out:StringBuilder) { - out.append(indent(level)).append(deco.decorateHeaderOpen(headerLevel)) + def addResult(level:Int, out:StringBuilder): Unit = { + out.append(indent(level)).append(this.deco().decorateHeaderOpen(headerLevel)) .append(applyInline(content, lookup)) - .append(indent(level)).append(deco.decorateHeaderClose(headerLevel)) + .append(indent(level)).append(this.deco().decorateHeaderClose(headerLevel)) } } @@ -102,26 +102,26 @@ trait BlockParsers extends Parsers { * Represents a block of verbatim qouted code */ class CodeBlock(lines:List[MarkdownLine]) extends MarkdownBlock{ - def addResult(level:Int, out:StringBuilder) { - out.append(indent(level)).append(deco.decorateCodeBlockOpen) + def addResult(level:Int, out:StringBuilder): Unit = { + out.append(indent(level)).append(this.deco().decorateCodeBlockOpen()) for (line <- lines) { val escaped = escapeXml(line.payload) out.append(escaped).append('\n') //out.append(line.content) } - out.append(indent(level)).append(deco.decorateCodeBlockClose) + out.append(indent(level)).append(this.deco().decorateCodeBlockClose()) } } class FencedCodeBlock(language:String, lines:List[MarkdownLine]) extends MarkdownBlock{ - def addResult(level:Int, out:StringBuilder) { - out.append(indent(level)).append(deco.decorateCodeBlockOpen) + def addResult(level:Int, out:StringBuilder): Unit = { + out.append(indent(level)).append(this.deco().decorateCodeBlockOpen()) for (line <- lines) { val escaped = escapeXml(line.fullLine) out.append(escaped).append('\n') //out.append(line.content) } - out.append(indent(level)).append(deco.decorateCodeBlockClose) + out.append(indent(level)).append(this.deco().decorateCodeBlockClose()) } } @@ -131,17 +131,17 @@ trait BlockParsers extends Parsers { class Paragraph(lines:List[MarkdownLine], lookup:Map[String, LinkDefinition]) extends MarkdownBlock{ - def addResult(level:Int, out:StringBuilder) { - out.append(indent(level)).append(deco.decorateParagraphOpen) + def addResult(level:Int, out:StringBuilder): Unit = { + out.append(indent(level)).append(this.deco().decorateParagraphOpen()) addResultPlain(level, out) - out.append(indent(level)).append(deco.decorateParagraphClose) + out.append(indent(level)).append(this.deco().decorateParagraphClose()) } /** * Adds the result without any decoration, (no wrapping tags) * Used for building list items that don't have their content wrappend in paragraphs */ - def addResultPlain(level:Int, out:StringBuilder) { + def addResultPlain(level:Int, out:StringBuilder): Unit = { val temp = new StringBuilder() lines.foreach(line => temp.append(indent(level)).append(line.payload).append('\n')) @@ -164,16 +164,16 @@ trait BlockParsers extends Parsers { */ class Blockquote(lines:List[MarkdownLine], lookup:Map[String, LinkDefinition]) extends MarkdownBlock { - def addResult(level:Int, out:StringBuilder) { + def addResult(level:Int, out:StringBuilder):Unit = { //the block parser needs to recurse: val innerLines = lines.map(line => line.payload) val reader = BlockParsers.this.tokenizer.innerTokenize(innerLines, lookup) //now apply the normal markdown parser to the new content val innerBlocks = BlockParsers.this.applyBlocks(reader) //wrap the resulting blocks in blockquote tags - out.append(indent(level)).append(deco.decorateBlockQuoteOpen) + out.append(indent(level)).append(this.deco().decorateBlockQuoteOpen()) innerBlocks.foreach(block => block.addResult(level+1, out)) - out.append(indent(level)).append(deco.decorateBlockQuoteClose) + out.append(indent(level)).append(this.deco().decorateBlockQuoteClose()) } } @@ -183,12 +183,12 @@ trait BlockParsers extends Parsers { */ class ListItem(val lines:List[MarkdownLine], lookup:Map[String, LinkDefinition]) extends LineParsers { - override def deco(): Decorator = BlockParsers.this.deco + override def deco(): Decorator = BlockParsers.this.deco() def endsWithNewline = lines.size > 1 && (lines.last.isInstanceOf[EmptyLine]) - def addResult(level:Int, out:StringBuilder, paragraph_? : Boolean) { - out.append(indent(level)).append(deco.decorateItemOpen) + def addResult(level:Int, out:StringBuilder, paragraph_? : Boolean):Unit = { + out.append(indent(level)).append(this.deco().decorateItemOpen()) //the block parser needs to recurse: val innerLines = lines.map(line => line.payload) val reader = BlockParsers.this.tokenizer.innerTokenize(innerLines, lookup) @@ -198,7 +198,7 @@ trait BlockParsers extends Parsers { case (p:Paragraph) :: Nil if (!paragraph_?) => p.addResultPlain(level+1, out) case _ => innerBlocks.foreach(block => block.addResult(level+1, out)) } - out.append(indent(level)).append(deco.decorateItemClose) + out.append(indent(level)).append(this.deco().decorateItemClose()) } } @@ -224,7 +224,7 @@ trait BlockParsers extends Parsers { /** * calls recursive handling of nested items */ - def addResult(level:Int, out:StringBuilder) { + def addResult(level:Int, out:StringBuilder):Unit = { addResult(level, out, items.head::items) } } @@ -233,10 +233,10 @@ trait BlockParsers extends Parsers { * An ordered (i.e. numbered) list of items. */ class OList (items:List[ListItem]) extends ListBlock(items) { - override def addResult(level:Int, out:StringBuilder) { - out.append(indent(level)).append(deco.decorateOListOpen) + override def addResult(level:Int, out:StringBuilder):Unit = { + out.append(indent(level)).append(this.deco().decorateOListOpen()) super.addResult(level, out) - out.append(indent(level)).append(deco.decorateOListClose) + out.append(indent(level)).append(this.deco().decorateOListClose()) } } @@ -244,10 +244,10 @@ trait BlockParsers extends Parsers { * An unordered list of items. */ class UList (items:List[ListItem]) extends ListBlock(items) { - override def addResult(level:Int, out:StringBuilder) { - out.append(indent(level)).append(deco.decorateUListOpen) + override def addResult(level:Int, out:StringBuilder): Unit = { + out.append(indent(level)).append(this.deco().decorateUListOpen()) super.addResult(level, out) - out.append(indent(level)).append(deco.decorateUListClose) + out.append(indent(level)).append(this.deco().decorateUListClose()) } } @@ -304,7 +304,7 @@ trait BlockParsers extends Parsers { /////////////////// def atxHeader:Parser[Header] = line(classOf[AtxHeaderLine]) ~ lookup ^^ { - case l ~ lu => new Header(l.trimHashes, l.headerLevel, lu) + case l ~ lu => new Header(l.trimHashes(), l.headerLevel, lu) } def setExtHeader:Parser[Header] = diff --git a/core/markdown/src/main/scala/net/liftweb/markdown/InlineParsers.scala b/core/markdown/src/main/scala/net/liftweb/markdown/InlineParsers.scala index 4a0b281549..88e3b18608 100644 --- a/core/markdown/src/main/scala/net/liftweb/markdown/InlineParsers.scala +++ b/core/markdown/src/main/scala/net/liftweb/markdown/InlineParsers.scala @@ -174,7 +174,7 @@ trait InlineParsers extends BaseParsers { /** Parser for inline markdown, always consumes all input, returns the resulting HTML. */ - def inline(m:LinkMap):Parser[String] = (oneInline(new InlineContext(m))*) ^^ {_.mkString} + def inlineParser(m:LinkMap):Parser[String] = (oneInline(new InlineContext(m))*) ^^ {_.mkString} @@ -185,7 +185,7 @@ trait InlineParsers extends BaseParsers { /** Parses two spaces at the end of a line to a manual break (
) */ - val br:Parser[String] = (" \n") ^^^ {deco.decorateBreak() + "\n"} + val br:Parser[String] = (" \n") ^^^ {deco().decorateBreak() + "\n"} /** Parses an inline code element. @@ -194,13 +194,13 @@ trait InlineParsers extends BaseParsers { */ val code:Parser[String] = ((("``" ~> ((not("``")~> aChar)+) <~ "``")^^{_.mkString}) | ('`' ~> markdownText(Set('`'), false) <~ '`') ) ^^ { - c => deco.decorateCode(c.mkString) + c => deco().decorateCode(c.mkString) } /** Parses any xml tag and escapes attribute values. */ - val xmlTag:Parser[String] = if (deco.allowVerbatimXml) (xmlEndTag | xmlStartOrEmptyTag) + val xmlTag:Parser[String] = if (this.deco().allowVerbatimXml()) (xmlEndTag | xmlStartOrEmptyTag) else failure("Inline XML processing disabled.") @@ -210,7 +210,7 @@ trait InlineParsers extends BaseParsers { if (ctx.tags.contains("a")){ failure("Cannot nest a link in a link.") } else { - elem('<') ~> markdownText(Set('>',' ', '<', '\n'), true) <~ '>' ^^ { u => deco.decorateLink(u, u, None) } + elem('<') ~> markdownText(Set('>',' ', '<', '\n'), true) <~ '>' ^^ { u => deco().decorateLink(u, u, None) } } /** A link started by square brackets, either a reference or a a link with the full URL. @@ -224,7 +224,7 @@ trait InlineParsers extends BaseParsers { failure("Cannot nest a link in a link.") } else { '[' ~> linkInline(ctx.addTag("a")) ~ ("](" ~ ows) ~ url ~ ows ~ title <~ (ows ~ ')') ^^ { - case txt ~ _ ~ u ~ _ ~ ttl => deco.decorateLink(txt, u, ttl) + case txt ~ _ ~ u ~ _ ~ ttl => deco().decorateLink(txt, u, ttl) } } @@ -235,7 +235,7 @@ trait InlineParsers extends BaseParsers { failure("Cannot nest a link in a link.") } else { ref(ctx.addTag("a")) ^^ { - case (LinkDefinition(_, u, ttl), txt) => deco.decorateLink(txt, u, ttl) + case (LinkDefinition(_, u, ttl), txt) => deco().decorateLink(txt, u, ttl) } } @@ -293,13 +293,13 @@ trait InlineParsers extends BaseParsers { */ val directImg:Parser[String] = elem('[') ~> refText ~ ("](" ~ ows) ~ url ~ ows ~ title <~ (ows ~ ')') ^^ { - case altText ~ _ ~ path ~ _ ~ ttl => deco.decorateImg(altText, path, ttl) + case altText ~ _ ~ path ~ _ ~ ttl => deco().decorateImg(altText, path, ttl) } /** * Parses a referenced image. */ def refImg(ctx:InlineContext):Parser[String] = ref(ctx) ^^ { - case (LinkDefinition(_, u, ttl), alt) => deco.decorateImg(alt, u, ttl) + case (LinkDefinition(_, u, ttl), alt) => deco().decorateImg(alt, u, ttl) } /** Parses inline in a span element like bold or emphasis or link up until the given end marker @@ -330,7 +330,7 @@ trait InlineParsers extends BaseParsers { if (ctx.tags.contains("em")) { failure("Cannot nest emphasis.") } else { - span("*", ctx.addTag("em")) ^^ { deco.decorateEmphasis(_) } + span("*", ctx.addTag("em")) ^^ { deco().decorateEmphasis(_) } } @@ -340,7 +340,7 @@ trait InlineParsers extends BaseParsers { if (ctx.tags.contains("em")) { failure("Cannot nest emphasis.") } else { - span("_", ctx.addTag("em")) ^^ { deco.decorateEmphasis(_) } + span("_", ctx.addTag("em")) ^^ { deco().decorateEmphasis(_) } } /**Parses strong text in asterisks: **foo** @@ -349,7 +349,7 @@ trait InlineParsers extends BaseParsers { if (ctx.tags.contains("strong")) { failure("Cannot nest strong text.") } else { - span("**", ctx.addTag("strong")) ^^ { deco.decorateStrong(_) } + span("**", ctx.addTag("strong")) ^^ { deco().decorateStrong(_) } } /**Parses strong text in underscores: __foo__ @@ -358,14 +358,14 @@ trait InlineParsers extends BaseParsers { if (ctx.tags.contains("strong")) { failure("Cannot nest strong text.") } else { - span("__", ctx.addTag("strong")) ^^ { deco.decorateStrong(_) } + span("__", ctx.addTag("strong")) ^^ { deco().decorateStrong(_) } } /** * Runs the inline parser on the given input and returns the result */ - def applyInline(s:String, m:LinkMap) = apply(inline(m), s) + def applyInline(s:String, m:LinkMap) = apply(inlineParser(m), s) /** * Escapes the given string so it it can be embedded in xml. @@ -388,7 +388,7 @@ trait InlineParsers extends BaseParsers { result.toString } - private lazy val entList = List(("quot",34), ("amp",38), ("lt",60), ("gt",62), ("nbsp",160), ("iexcl",161), ("cent",162), ("pound",163), ("curren",164), ("yen",165), + private lazy val entList = List(("quot",34), ("amp",38), ("lt",60), ("gt",62), ("nbsp",160), ("iexcl",161), ("cent",162), ("pound",163), ("curren",164), ("yen",165), ("euro",8364), ("brvbar",166), ("sect",167), ("uml",168), ("copy",169), ("ordf",170), ("laquo",171), ("shy",173), ("reg",174), ("trade",8482), ("macr",175), ("deg",176), ("plusmn",177), ("sup2",178), ("sup3",179), ("acute",180), ("micro",181), ("para",182), ("middot",183), ("cedil",184), ("sup1",185), ("ordm",186), ("raquo",187), ("frac14",188), ("frac12",189), ("frac34",190), ("iquest",191), ("times",215), ("divide",247), @@ -416,9 +416,9 @@ trait InlineParsers extends BaseParsers { ("xi",958), ("omicron",959), ("pi",960), ("rho",961), ("sigmaf",962), ("sigma",963), ("tau",964), ("upsilon",965), ("phi",966), ("chi",967), ("psi",968), ("omega",969), ("thetasym",977), ("upsih",978), ("piv",982)) - private lazy val validEntitySet = Set(entList.map(_._1) :_*) + private lazy val validEntitySet = Set(entList.map(_._1) :_*) - private def checkForSemi(i: Int, s: CharSequence, end: Int): Boolean = { + private def checkForSemi(i: Int, s: CharSequence, end: Int): Boolean = { var pos = i + 1 val last = i + 10 val sb = new StringBuffer(20) diff --git a/core/markdown/src/main/scala/net/liftweb/markdown/LineTokenizer.scala b/core/markdown/src/main/scala/net/liftweb/markdown/LineTokenizer.scala index 488067eca8..a2064706bd 100644 --- a/core/markdown/src/main/scala/net/liftweb/markdown/LineTokenizer.scala +++ b/core/markdown/src/main/scala/net/liftweb/markdown/LineTokenizer.scala @@ -190,13 +190,13 @@ class LineTokenizer() extends Parsers { /** Parses tokens that may occur inside a block. Works like the normal token parser except that * it does not check for link definitions and verbatim XML. */ - def innerTokens(lookup:Map[String, LinkDefinition]):Parser[MarkdownLineReader] = phrase(lineToken *) ^^ { + def innerTokens(lookup:Map[String, LinkDefinition]):Parser[MarkdownLineReader] = phrase(lineToken.*) ^^ { case ts => new MarkdownLineReader(ts, lookup) } /** Parses first level line tokens, i.e. Markdown lines, XML chunks and link definitions. */ - def tokens:Parser[MarkdownLineReader] = phrase((preprocessToken | lineToken) *) ^^ { case ts => + def tokens:Parser[MarkdownLineReader] = phrase((preprocessToken | lineToken).*) ^^ { case ts => val lines = new ArrayBuffer[MarkdownLine]() val lookup = new HashMap[String, LinkDefinition]() for (t <- ts) { t match { diff --git a/core/markdown/src/main/scala/net/liftweb/markdown/TimeTest.scala b/core/markdown/src/main/scala/net/liftweb/markdown/TimeTest.scala index 9a010d93f7..768f4a1f2c 100644 --- a/core/markdown/src/main/scala/net/liftweb/markdown/TimeTest.scala +++ b/core/markdown/src/main/scala/net/liftweb/markdown/TimeTest.scala @@ -35,16 +35,16 @@ trait TimedTransformer { def deco():Decorator = Decorator private object lineTokenizer extends LineTokenizer { - override def allowXmlBlocks() = TimedTransformer.this.deco().allowVerbatimXml() + override def allowXmlBlocks: Boolean = TimedTransformer.this.deco().allowVerbatimXml() } private object blockParser extends BlockParsers { - override def deco() = TimedTransformer.this.deco() + override def deco(): Decorator = TimedTransformer.this.deco() } /** * This is the method that turns markdown source into xhtml. */ - def apply(s:String) = { + def apply(s:String): String = { //first, run the input through the line parser val (ms1,lineReader:MarkdownLineReader) = TimeTest.executionTime(()=>lineTokenizer.tokenize(s)) @@ -66,11 +66,11 @@ object TimeTest { val reader = new InputStreamReader(Files.newInputStream(Paths.get(path))) val writer = new StringWriter() val buffer = new Array[Char](1024) - var read = reader.read(buffer) - while (read != -1) { - writer.write(buffer, 0, read) - read = reader.read(buffer) - } + var read = reader.read(buffer) + while (read != -1) { + writer.write(buffer, 0, read) + read = reader.read(buffer) + } //turn read input into a string writer.toString } @@ -82,12 +82,12 @@ object TimeTest { (end - start, t) } - private def runActuarius(markdown:String, iterations:Int) { + private def runActuarius(markdown:String, iterations:Int): Unit = { for (i <- 0 until iterations) actuariusProcessor(markdown) } - def testRun(markdown:String, iterations:Int) { + def testRun(markdown:String, iterations:Int): Unit = { println("Running Actuarius " + iterations + " times...") println("... took " + (executionTime(() => runActuarius(markdown, iterations)))._1 + "ms") } @@ -96,7 +96,7 @@ object TimeTest { //def ws1:Parser[String] = """( |\t|\v)+""".r def ws2:Parser[String] = rep1(elem(' ') | elem('\t') | elem('\u000B')) ^^ {_.mkString} - def runParser(s:String, p:Parser[String], iterations:Int) { + def runParser(s:String, p:Parser[String], iterations:Int) : Unit = { for (i <- 0 until iterations) { apply(p, s) } @@ -124,7 +124,7 @@ object TimeTest { } - def main(args:Array[String]) { + def main(args:Array[String]):Unit = { /* val markdown = readFile("/home/chris/sbt_projects/markdown_race/test.txt").mkString*100 val iterations = 10 diff --git a/core/markdown/src/main/scala/net/liftweb/markdown/Transformer.scala b/core/markdown/src/main/scala/net/liftweb/markdown/Transformer.scala index d6afbe77ea..5fe538901a 100644 --- a/core/markdown/src/main/scala/net/liftweb/markdown/Transformer.scala +++ b/core/markdown/src/main/scala/net/liftweb/markdown/Transformer.scala @@ -34,10 +34,10 @@ trait Transformer { def deco():Decorator = Decorator private object lineTokenizer extends LineTokenizer { - override def allowXmlBlocks() = Transformer.this.deco().allowVerbatimXml() + override def allowXmlBlocks: Boolean = Transformer.this.deco().allowVerbatimXml() } private object blockParser extends BlockParsers { - override def deco() = Transformer.this.deco() + override def deco(): Decorator = Transformer.this.deco() } /** diff --git a/core/markdown/src/test/scala/net/liftweb/markdown/BaseParsersTest.scala b/core/markdown/src/test/scala/net/liftweb/markdown/BaseParsersTest.scala index ccc0e12efe..5427face02 100644 --- a/core/markdown/src/test/scala/net/liftweb/markdown/BaseParsersTest.scala +++ b/core/markdown/src/test/scala/net/liftweb/markdown/BaseParsersTest.scala @@ -19,18 +19,15 @@ package net.liftweb.markdown * Christoph Henkelmann http://henkelmann.eu/ */ -import org.scalatestplus.junit.JUnitRunner -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import collection.SortedMap -import org.junit.runner.RunWith /** * Tests basic parsers that are used by the more complex parsing steps. */ -@RunWith(classOf[JUnitRunner]) -class BaseParsersTest extends FlatSpec with Matchers with BaseParsers{ +class BaseParsersTest extends AnyFlatSpec with Matchers with BaseParsers{ "The BaseParsers" should "parse a newline" in { val p = nl @@ -46,13 +43,13 @@ class BaseParsersTest extends FlatSpec with Matchers with BaseParsers{ apply(p, " ") should equal (" ") apply(p, "\t\t") should equal ("\t\t") apply(p, " \t \t ") should equal (" \t \t ") - //we want newlines to be treated diferrently from other ws + //we want newlines to be treated differently from other ws an [IllegalArgumentException] should be thrownBy(apply(p, "\n")) } it should "be able to look behind" in { - apply (((elem('a') ~ lookbehind(Set('a')) ~ elem('b'))^^{case a~lb~b=>a+""+b}), "ab") should equal ("ab") - an [IllegalArgumentException] should be thrownBy { apply (((elem('a') ~ lookbehind(Set('b')) ~ elem('b'))^^{case a~b=>a+""+b}), "ab") } + apply (((elem('a') ~ lookbehind(Set('a')) ~ elem('b'))^^{case a~lb~b=>s"$a$b"}), "ab") should equal ("ab") + an [IllegalArgumentException] should be thrownBy { apply (((elem('a') ~ lookbehind(Set('b')) ~ elem('b'))^^{case a~b=>s"$a$b"}), "ab") } apply( (elem('a') ~ not(lookbehind(Set(' ', '\t', '\n'))) ~ '*' ), "a*" ) diff --git a/core/markdown/src/test/scala/net/liftweb/markdown/BlockParsersTest.scala b/core/markdown/src/test/scala/net/liftweb/markdown/BlockParsersTest.scala index 15ff828e11..452a58f97b 100644 --- a/core/markdown/src/test/scala/net/liftweb/markdown/BlockParsersTest.scala +++ b/core/markdown/src/test/scala/net/liftweb/markdown/BlockParsersTest.scala @@ -19,47 +19,46 @@ package net.liftweb.markdown * Christoph Henkelmann http://henkelmann.eu/ */ -import org.junit.runner.RunWith -import org.scalatestplus.junit.JUnitRunner -import org.scalatest.{Matchers,FlatSpec} -import scala.xml.{Group, NodeSeq} +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import scala.xml.Group +import scala.xml.NodeSeq /** * Tests the parsing on block level. */ -@RunWith(classOf[JUnitRunner]) -class BlockParsersTest extends FlatSpec with Matchers with BlockParsers{ +class BlockParsersTest extends AnyFlatSpec with Matchers with BlockParsers { - "The BlockParsers" should "parse optional empty lines" in { - val p = optEmptyLines - val el = new EmptyLine(" \n") - apply(p, Nil) should equal (Nil) - apply(p, List(el)) should equal (List(el)) - apply(p, List(el, el)) should equal (List(el, el)) - } + "The BlockParsers" should "parse optional empty lines" in { + val p = optEmptyLines + val el = new EmptyLine(" \n") + apply(p, Nil) should equal(Nil) + apply(p, List(el)) should equal(List(el)) + apply(p, List(el, el)) should equal(List(el, el)) + } - it should "accept empty documents" in { - val p = markdown - val el = new EmptyLine(" \n") - apply(p, Nil) should equal (Nil) - apply(p, List(el)) should equal (Nil) - apply(p, List(el, el)) should equal (Nil) - } + it should "accept empty documents" in { + val p = markdown + val el = new EmptyLine(" \n") + apply(p, Nil) should equal(Nil) + apply(p, List(el)) should equal(Nil) + apply(p, List(el, el)) should equal(Nil) + } - it should "detect line types" in { - val p = line(classOf[CodeLine]) - apply(p, List(new CodeLine(" ", "code"))) should equal (new CodeLine(" ", "code")) - an [IllegalArgumentException] should be thrownBy(apply(p, List(new OtherLine("foo")))) - } + it should "detect line types" in { + val p = line(classOf[CodeLine]) + apply(p, List(new CodeLine(" ", "code"))) should equal(new CodeLine(" ", "code")) + an[IllegalArgumentException] should be thrownBy (apply(p, List(new OtherLine("foo")))) + } - it should "correctly override list items markup" in { - object MyDecorator extends Decorator { - override def decorateItemOpen(): String = "" - override def decorateItemClose(): String = "" - } - object MyTransformer extends Transformer { - override def deco(): Decorator = MyDecorator - } - MyTransformer.apply("* Content") should equal ("\n") + it should "correctly override list items markup" in { + object MyDecorator extends Decorator { + override def decorateItemOpen(): String = "" + override def decorateItemClose(): String = "" + } + object MyTransformer extends Transformer { + override def deco(): Decorator = MyDecorator } + MyTransformer.apply("* Content") should equal("\n") + } } diff --git a/core/markdown/src/test/scala/net/liftweb/markdown/InlineParsersTest.scala b/core/markdown/src/test/scala/net/liftweb/markdown/InlineParsersTest.scala index 96c69ccd45..74c871060f 100644 --- a/core/markdown/src/test/scala/net/liftweb/markdown/InlineParsersTest.scala +++ b/core/markdown/src/test/scala/net/liftweb/markdown/InlineParsersTest.scala @@ -19,21 +19,17 @@ package net.liftweb.markdown * Christoph Henkelmann http://henkelmann.eu/ */ -import org.scalatest.FlatSpec -import org.scalatest.Matchers -import org.junit.runner.RunWith -import org.scalatestplus.junit.JUnitRunner - +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers /** * Tests Inline Parsing, i.e. emphasis , strong text, links, escapes etc. */ -@RunWith(classOf[JUnitRunner]) -class InlineParsersTest extends FlatSpec with Matchers with InlineParsers{ +class InlineParsersTest extends AnyFlatSpec with Matchers with InlineParsers{ /////////////////////////////////////////////////////////////// // Inline parsing Tests // /////////////////////////////////////////////////////////////// - def runSucceedingParsingTests(p:Parser[String], l:List[(String, String)]) { + def runSucceedingParsingTests(p:Parser[String], l:List[(String, String)]) : Unit = { for ((a, b) <- l) { try { apply(p, a) should equal (b) @@ -43,7 +39,7 @@ class InlineParsersTest extends FlatSpec with Matchers with InlineParsers{ } } - def runExceptionParsingTests(p:Parser[String], l:List[String]) { + def runExceptionParsingTests(p:Parser[String], l:List[String]) : Unit = { for (s <- l) { an [IllegalArgumentException] should be thrownBy { apply(p, s) } } @@ -221,11 +217,11 @@ class InlineParsersTest extends FlatSpec with Matchers with InlineParsers{ } it should "allow inline xml and escape its parameters" in { - runSucceedingParsingTests(inline(Map()), xmlInlineTests) + runSucceedingParsingTests(inlineParser(Map()), xmlInlineTests) } it should "parse mixed inline cases" in { - runSucceedingParsingTests(inline(Map()), mixedTests) + runSucceedingParsingTests(inlineParser(Map()), mixedTests) } val ld1 = new LinkDefinition("id", "http://www.example.com", Some("Title")) @@ -245,7 +241,7 @@ class InlineParsersTest extends FlatSpec with Matchers with InlineParsers{ } it should "resolve reference links" in { - val p = inline(map) + val p = inlineParser(map) apply(p, "[text][id]") should equal ("""text""") apply(p, "[text] [id]") should equal ("""text""") apply(p, "[id][]") should equal ("""id""") @@ -259,7 +255,7 @@ class InlineParsersTest extends FlatSpec with Matchers with InlineParsers{ } it should "resolve reference images" in { - val p = inline(map) + val p = inlineParser(map) apply(p, "![text][id]") should equal ("""text""") apply(p, "![text] [id]") should equal ("""text""") apply(p, "![id][]") should equal ("""id""") @@ -273,12 +269,12 @@ class InlineParsersTest extends FlatSpec with Matchers with InlineParsers{ } it should "handle all inline cases with the inline replacer" in { - runSucceedingParsingTests(inline(Map()), allInlineTests) + runSucceedingParsingTests(inlineParser(Map()), allInlineTests) val concatTests = for ( (a1, a2) <- allInlineTests; (b1, b2) <- allInlineTests; (c1, c2) <- allInlineTests) yield (a1+ " " + b1 + " " + c1, a2 + " " + b2 + " " +c2); - runSucceedingParsingTests(inline(Map()), concatTests) + runSucceedingParsingTests(inlineParser(Map()), concatTests) } } diff --git a/core/markdown/src/test/scala/net/liftweb/markdown/LineParsersTest.scala b/core/markdown/src/test/scala/net/liftweb/markdown/LineParsersTest.scala index 968635d740..129c7f89f4 100644 --- a/core/markdown/src/test/scala/net/liftweb/markdown/LineParsersTest.scala +++ b/core/markdown/src/test/scala/net/liftweb/markdown/LineParsersTest.scala @@ -19,15 +19,13 @@ package net.liftweb.markdown * Christoph Henkelmann http://henkelmann.eu/ */ -import org.scalatest.{Matchers,FlatSpec} -import org.junit.runner.RunWith -import org.scalatestplus.junit.JUnitRunner +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers /** * tests parsing of individual lines */ -@RunWith(classOf[JUnitRunner]) -class LineParsersTest extends FlatSpec with Matchers with LineParsers{ +class LineParsersTest extends AnyFlatSpec with Matchers with LineParsers{ "The LineParsers" should "parse horizontal rulers" in { val p = ruler diff --git a/core/markdown/src/test/scala/net/liftweb/markdown/LineTokenizerTest.scala b/core/markdown/src/test/scala/net/liftweb/markdown/LineTokenizerTest.scala index 6399f797b7..5ee2750246 100644 --- a/core/markdown/src/test/scala/net/liftweb/markdown/LineTokenizerTest.scala +++ b/core/markdown/src/test/scala/net/liftweb/markdown/LineTokenizerTest.scala @@ -19,15 +19,13 @@ package net.liftweb.markdown * Christoph Henkelmann http://henkelmann.eu/ */ -import org.scalatest.{Matchers,FlatSpec} -import org.junit.runner.RunWith -import org.scalatestplus.junit.JUnitRunner +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers /** * Tests the Line Tokenizer that prepares input for parsing. */ -@RunWith(classOf[JUnitRunner]) -class LineTokenizerTest extends FlatSpec with Matchers { +class LineTokenizerTest extends AnyFlatSpec with Matchers { val tokenizer = new LineTokenizer diff --git a/core/markdown/src/test/scala/net/liftweb/markdown/TransformerTest.scala b/core/markdown/src/test/scala/net/liftweb/markdown/TransformerTest.scala index b0a613e3fb..10521e63d8 100644 --- a/core/markdown/src/test/scala/net/liftweb/markdown/TransformerTest.scala +++ b/core/markdown/src/test/scala/net/liftweb/markdown/TransformerTest.scala @@ -19,15 +19,13 @@ package net.liftweb.markdown * Christoph Henkelmann http://henkelmann.eu/ */ -import org.scalatest.{Matchers,FlatSpec} -import org.junit.runner.RunWith -import org.scalatestplus.junit.JUnitRunner +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers /** * Tests the behavior of the complete parser, i.e. all parsing steps together. */ -@RunWith(classOf[JUnitRunner]) -class TransformerTest extends FlatSpec with Matchers with Transformer { +class TransformerTest extends AnyFlatSpec with Matchers with Transformer { "The Transformer" should "create xhtml fragments from markdown" in { apply("") should equal ("") diff --git a/core/util/src/main/java/net/liftweb/util/BCrypt.java b/core/util/src/main/java/net/liftweb/util/BCrypt.java deleted file mode 100644 index 6eb74f63ad..0000000000 --- a/core/util/src/main/java/net/liftweb/util/BCrypt.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// --------------- -// This is a derivative work of jBCrypt, distributed under BSD licence -// and copyrighted as follows: -// -// Copyright (c) 2006 Damien Miller -// -// Permission to use, copy, modify, and distribute this software for any -// purpose with or without fee is hereby granted, provided that the above -// copyright notice and this permission notice appear in all copies. -// -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -package net.liftweb.util; - -import java.io.UnsupportedEncodingException; - -import java.security.SecureRandom; - -/** - * This class is a passthrough to org.mindrot.jbcrypt.BCrypt, provided for - * backwards compatibility as we kept a copy in the Lift codebase before it was - * available for package dependencies. Prefer using org.mindrot.jbcrypt.BCrypt. - */ -@Deprecated -public class BCrypt { - /** - * @see org.mindrot.jbcrypt.BCrypt#hashpw(String,String) - */ - @Deprecated - public static String hashpw(String password, String salt) { - return org.mindrot.jbcrypt.BCrypt.hashpw(password, salt); - } - - /** - * @see org.mindrot.jbcrypt.BCrypt#gensalt(int,SecureRandom) - */ - @Deprecated - public static String gensalt(int log_rounds, SecureRandom random) { - return org.mindrot.jbcrypt.BCrypt.gensalt(log_rounds, random); - } - - /** - * @see org.mindrot.jbcrypt.BCrypt#gensalt(int) - */ - @Deprecated - public static String gensalt(int log_rounds) { - return org.mindrot.jbcrypt.BCrypt.gensalt(log_rounds); - } - - /** - * @see org.mindrot.jbcrypt.BCrypt#gensalt() - */ - @Deprecated - public static String gensalt() { - return org.mindrot.jbcrypt.BCrypt.gensalt(); - } - - /** - * @see org.mindrot.jbcrypt.BCrypt#checkpw(String,String) - */ - @Deprecated - public static boolean checkpw(String plaintext, String hashed) { - return org.mindrot.jbcrypt.BCrypt.checkpw(plaintext, hashed); - } -} diff --git a/core/util/src/main/scala/net/liftweb/util/AnyVar.scala b/core/util/src/main/scala/net/liftweb/util/AnyVar.scala index 4b0c90ca6a..3786fb210a 100644 --- a/core/util/src/main/scala/net/liftweb/util/AnyVar.scala +++ b/core/util/src/main/scala/net/liftweb/util/AnyVar.scala @@ -107,14 +107,14 @@ trait AnyVarTrait[T, MyType <: AnyVarTrait[T, MyType]] extends PSettableValueHol protected def setFunc(name: String, value: T): Unit protected def clearFunc(name: String): Unit - private def _setFunc(name: String, value: T) { + private def _setFunc(name: String, value: T): Unit = { setFunc(name, value) val sd = settingDefault_? changeFuncs.foreach(f => Helpers.tryo(f(Full(value), sd))) } - private def _clearFunc(name: String) { + private def _clearFunc(name: String): Unit = { clearFunc(name) changeFuncs.foreach(f => Helpers.tryo(f(Empty, false))) } @@ -145,7 +145,7 @@ trait AnyVarTrait[T, MyType <: AnyVarTrait[T, MyType]] extends PSettableValueHol * * @param f the function to execute on change */ - def onChange(f: FuncType) { + def onChange(f: FuncType): Unit = { changeFuncs ::= f } @@ -253,7 +253,7 @@ trait AnyVarTrait[T, MyType <: AnyVarTrait[T, MyType]] extends PSettableValueHol protected def registerCleanupFunc(in: CleanUpParam => Unit): Unit - protected final def registerGlobalCleanupFunc(in: CleanUpParam => Unit) { + protected final def registerGlobalCleanupFunc(in: CleanUpParam => Unit): Unit = { cuf ::= in } diff --git a/core/util/src/main/scala/net/liftweb/util/BaseField.scala b/core/util/src/main/scala/net/liftweb/util/BaseField.scala index f48d99ccb1..8f8f04870f 100644 --- a/core/util/src/main/scala/net/liftweb/util/BaseField.scala +++ b/core/util/src/main/scala/net/liftweb/util/BaseField.scala @@ -31,7 +31,7 @@ trait FieldIdentifier { * Associate a FieldIdentifier with a NodeSeq */ case class FieldError(field: FieldIdentifier, msg: NodeSeq) { - override def toString = field.uniqueFieldId + " : " + msg + override def toString = s"${field.uniqueFieldId} : ${msg}" } object FieldError { diff --git a/core/util/src/main/scala/net/liftweb/util/BasicTypesHelpers.scala b/core/util/src/main/scala/net/liftweb/util/BasicTypesHelpers.scala index a06bd564b8..3fc3798975 100644 --- a/core/util/src/main/scala/net/liftweb/util/BasicTypesHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/BasicTypesHelpers.scala @@ -108,8 +108,8 @@ trait BasicTypesHelpers { self: StringHelpers with ControlHelpers => */ def compareElem(left: Elem, right: Elem): Boolean = compareXml(left.child, right.child) && - left.label == right.label && - (((null eq left.prefix) && (null eq right.prefix)) || left.prefix == right.prefix) && + left.label == right.label && + (((null eq left.prefix) && (null eq right.prefix)) || left.prefix == right.prefix) && left.scope == right.scope && compareMetaData(left.attributes.toList, right.attributes.toList) diff --git a/core/util/src/main/scala/net/liftweb/util/BundleBuilder.scala b/core/util/src/main/scala/net/liftweb/util/BundleBuilder.scala index 1922c68a80..2139c7d931 100644 --- a/core/util/src/main/scala/net/liftweb/util/BundleBuilder.scala +++ b/core/util/src/main/scala/net/liftweb/util/BundleBuilder.scala @@ -103,7 +103,7 @@ object BundleBuilder { val it = res.keys.iterator new Enumeration[String] { def hasMoreElements() = it.hasNext - def nextElement() = it.next + def nextElement() = it.next() } } diff --git a/core/util/src/main/scala/net/liftweb/util/CSSHelpers.scala b/core/util/src/main/scala/net/liftweb/util/CSSHelpers.scala index b938c8ff7e..001c63ff3e 100644 --- a/core/util/src/main/scala/net/liftweb/util/CSSHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/CSSHelpers.scala @@ -19,10 +19,11 @@ package util import scala.language.postfixOps import scala.language.implicitConversions - import scala.util.parsing.combinator._ import common._ + import java.io._ +import scala.annotation.nowarn // FIXME This needs a capitalization update, but that may be impossible to do // FIXME without breaking code :/ @@ -35,6 +36,7 @@ object CSSHelpers extends ControlHelpers { * @param rootPrefix - the prefix to be added * @return (Box[String], String) - returns the tuple containing the parsing output and the original input (as a String) */ + @nowarn(s"msg=method .* is deprecated .* we are unifying capitalization across Lift.*") def fixCSS(in: Reader, rootPrefix: String): (Box[String], String) = { val reader = new BufferedReader(in) val res = new StringBuilder; diff --git a/core/util/src/main/scala/net/liftweb/util/CanResolveAsync.scala b/core/util/src/main/scala/net/liftweb/util/CanResolveAsync.scala index 80534148ac..0a6158496d 100644 --- a/core/util/src/main/scala/net/liftweb/util/CanResolveAsync.scala +++ b/core/util/src/main/scala/net/liftweb/util/CanResolveAsync.scala @@ -45,7 +45,7 @@ trait LowPriorityCanResolveAsyncImplicits { self: CanResolveAsync.type => // Low priority implicit for resolving Scala Futures. - implicit def resolveFuture[T](implicit executionContext: ExecutionContext) = { + implicit def resolveFuture[T](implicit executionContext: ExecutionContext): CanResolveAsync[Future[T], T] = { new CanResolveAsync[Future[T],T] { def resolveAsync(future: Future[T], onResolved: (T)=>Unit) = { future.foreach(onResolved) @@ -54,7 +54,7 @@ trait LowPriorityCanResolveAsyncImplicits { } // Low priority implicit for resolving Lift LAFutures. - implicit def resolveLaFuture[T] = { + implicit def resolveLaFuture[T]: CanResolveAsync[LAFuture[T], T] = { new CanResolveAsync[LAFuture[T],T] { def resolveAsync(future: LAFuture[T], onResolved: (T)=>Unit) = { future.onSuccess(onResolved) diff --git a/core/util/src/main/scala/net/liftweb/util/ClassHelpers.scala b/core/util/src/main/scala/net/liftweb/util/ClassHelpers.scala index e8aa422027..ebd3cf7e60 100644 --- a/core/util/src/main/scala/net/liftweb/util/ClassHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/ClassHelpers.scala @@ -14,8 +14,8 @@ * limitations under the License. */ -package net.liftweb -package util +package net.liftweb +package util import java.lang.reflect.{Method, InvocationTargetException} import java.lang.reflect.Modifier._ @@ -197,10 +197,10 @@ trait ClassHelpers { self: ControlHelpers => */ def invokeControllerMethod(clz: Class[_], meth: String) = { try { - clz.getMethod(meth).invoke(clz.newInstance) + clz.getMethod(meth).invoke(clz.getDeclaredConstructor().newInstance()) } catch { case c : InvocationTargetException => { - def findRoot(e : Throwable) { if (e.getCause == null || e.getCause == e) throw e else findRoot(e.getCause) } + def findRoot(e : Throwable): Unit = { if (e.getCause == null || e.getCause == e) throw e else findRoot(e.getCause) } findRoot(c) } } @@ -332,7 +332,7 @@ trait ClassHelpers { self: ControlHelpers => * * @return a Full can with the instance or a Failure if the instance can't be created */ - def instantiate[C](clz: Class[C]): Box[C] = tryo { clz.newInstance } + def instantiate[C](clz: Class[C]): Box[C] = tryo { clz.getDeclaredConstructor().newInstance() } /** * Create a function (the 'invoker') which will trigger any public, parameterless method diff --git a/core/util/src/main/scala/net/liftweb/util/CombParserHelpers.scala b/core/util/src/main/scala/net/liftweb/util/CombParserHelpers.scala index 7bcbfa2e57..310de6f7f7 100644 --- a/core/util/src/main/scala/net/liftweb/util/CombParserHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/CombParserHelpers.scala @@ -85,8 +85,8 @@ trait CombParserHelpers { * @return a unit parser which will succeed if the input matches the list of characters regardless * of the case (uppercase or lowercase) */ - def acceptCI[ES <% List[Elem]](es: ES): Parser[List[Elem]] = - es.foldRight[Parser[List[Elem]]]( + def acceptCI[ES](es: ES)(implicit ev: ES => List[Elem]): Parser[List[Elem]] = + ev(es).foldRight[Parser[List[Elem]]]( success(Nil)){(x, pxs) => acceptCIChar(x) ~ pxs ^^ mkList} def xform(in: Char): Char = Character.toUpperCase(in) diff --git a/core/util/src/main/scala/net/liftweb/util/CssSel.scala b/core/util/src/main/scala/net/liftweb/util/CssSel.scala index de4a4b4e91..763fdccccb 100755 --- a/core/util/src/main/scala/net/liftweb/util/CssSel.scala +++ b/core/util/src/main/scala/net/liftweb/util/CssSel.scala @@ -442,20 +442,20 @@ private class SelectorMap(binds: List[CssBind]) extends Function1[NodeSeq, NodeS } - final def forId(in: Elem, buff: ListBuffer[CssBind]) { + final def forId(in: Elem, buff: ListBuffer[CssBind]): Unit = { for { rid <- id bind <- idMap.get(rid) } buff ++= bind } - final def forElem(in: Elem, buff: ListBuffer[CssBind]) { + final def forElem(in: Elem, buff: ListBuffer[CssBind]): Unit = { for { bind <- elemMap.get(in.label) } buff ++= bind } - final def forStar(buff: ListBuffer[CssBind], depth: Int) { + final def forStar(buff: ListBuffer[CssBind], depth: Int): Unit = { for { binds <- starFunc bind <- binds if (bind match { @@ -465,14 +465,14 @@ private class SelectorMap(binds: List[CssBind]) extends Function1[NodeSeq, NodeS } buff += bind } - final def forName(in: Elem, buff: ListBuffer[CssBind]) { + final def forName(in: Elem, buff: ListBuffer[CssBind]): Unit = { for { rid <- name bind <- nameMap.get(rid) } buff ++= bind } - def findClass(clz: List[String], buff: ListBuffer[CssBind]) { + def findClass(clz: List[String], buff: ListBuffer[CssBind]): Unit = { clz match { case Nil => () case x :: xs => { @@ -485,11 +485,11 @@ private class SelectorMap(binds: List[CssBind]) extends Function1[NodeSeq, NodeS } } - def forClass(in: Elem, buff: ListBuffer[CssBind]) { + def forClass(in: Elem, buff: ListBuffer[CssBind]): Unit = { findClass(classes, buff) } - def forAttr(in: Elem, buff: ListBuffer[CssBind]) { + def forAttr(in: Elem, buff: ListBuffer[CssBind]): Unit = { if (attrMap.isEmpty || attrs.isEmpty) () else { for { @@ -870,8 +870,7 @@ object CanBind extends CssBindImplicits { implicit def iterableDouble[T[Double]](implicit f: T[Double] => Iterable[Double]): CanBind[T[Double]] = new CanBind[T[Double]] { - def apply(info: => T[Double])(ns: NodeSeq): Seq[NodeSeq] = f(info).toSeq.flatMap(a => - if (a equals null) Nil else List(Text(a.toString))) + def apply(info: => T[Double])(ns: NodeSeq): Seq[NodeSeq] = f(info).toSeq.flatMap(a => List(Text(a.toString))) } implicit def iterableBindableTransform[T[_]](implicit f: T[Bindable] => Iterable[Bindable]): CanBind[T[Bindable]] = diff --git a/core/util/src/main/scala/net/liftweb/util/CssSelector.scala b/core/util/src/main/scala/net/liftweb/util/CssSelector.scala index 063ca029aa..758d61a781 100755 --- a/core/util/src/main/scala/net/liftweb/util/CssSelector.scala +++ b/core/util/src/main/scala/net/liftweb/util/CssSelector.scala @@ -27,7 +27,7 @@ sealed trait CssSelector { def withSubnode(sn: SubNode): CssSelector } -final case class ElemSelector(elem: String, subNodes: Box[SubNode]) extends +final case class ElemSelector(elem: String, subNodes: Box[SubNode]) extends CssSelector { def withSubnode(sn: SubNode): CssSelector = this.copy(subNodes = Full(sn)) } @@ -36,7 +36,7 @@ final case class StarSelector(subNodes: Box[SubNode], singleDepth: Boolean) exte def withSubnode(sn: SubNode): CssSelector = this.copy(subNodes = Full(sn)) } -final case class IdSelector(id: String, subNodes: Box[SubNode]) extends +final case class IdSelector(id: String, subNodes: Box[SubNode]) extends CssSelector { def withSubnode(sn: SubNode): CssSelector = this.copy(subNodes = Full(sn)) } @@ -56,7 +56,7 @@ final case class EnclosedSelector(selector: CssSelector, kid: CssSelector) exten def withSubnode(sn: SubNode): CssSelector = this } -final case class AttrSelector(name: String, value: String, +final case class AttrSelector(name: String, value: String, subNodes: Box[SubNode]) extends CssSelector { def withSubnode(sn: SubNode): CssSelector = this.copy(subNodes = Full(sn)) } @@ -64,7 +64,7 @@ subNodes: Box[SubNode]) extends CssSelector { sealed trait SubNode object SubNode { - def unapply(bind: CssBind): Option[Box[SubNode]] = + def unapply(bind: CssBind): Option[Box[SubNode]] = Some(bind.css.flatMap(_.subNodes)) } @@ -80,7 +80,6 @@ final case class PrependKidsSubNode() extends SubNode with WithKids { def transform(original: NodeSeq, newNs: NodeSeq): NodeSeq = newNs ++ original } -@deprecated("Please use DontMergeClassValue instead.", "3.3.0") final case object DontMergeAttributes extends SubNode final case object DontMergeClass extends SubNode @@ -231,7 +230,7 @@ object CssSelectorParser extends PackratParsers with ImplicitConversions { } - private lazy val id: Parser[String] = letter ~ + private lazy val id: Parser[String] = letter ~ rep(letter | number | '-' | '_' | ':' | '.') ^^ { case first ~ rest => (first :: rest).mkString } @@ -240,7 +239,7 @@ object CssSelectorParser extends PackratParsers with ImplicitConversions { private def isNumber(c: Char): Boolean = c.isDigit - + private lazy val letter: Parser[Char] = elem("letter", isLetter) private lazy val number: Parser[Char] = elem("number", isNumber) @@ -248,7 +247,7 @@ object CssSelectorParser extends PackratParsers with ImplicitConversions { private lazy val subNode: Parser[SubNode] = rep(' ') ~> ((opt('*') ~ '[' ~> attrName <~ '+' ~ ']' ^^ { name => AttrAppendSubNode(name) - }) | + }) | (opt('*') ~ '[' ~> attrName <~ '!' ~ ']' ^^ { name => AttrRemoveSubNode(name) }) | (opt('*') ~ '[' ~> attrName <~ ']' ^^ { @@ -286,9 +285,9 @@ object CssSelectorParser extends PackratParsers with ImplicitConversions { })) ^^ { case s => s.mkString }) - + } - + } diff --git a/core/util/src/main/scala/net/liftweb/util/CurrencyZone.scala b/core/util/src/main/scala/net/liftweb/util/CurrencyZone.scala index d37b267736..228603f659 100644 --- a/core/util/src/main/scala/net/liftweb/util/CurrencyZone.scala +++ b/core/util/src/main/scala/net/liftweb/util/CurrencyZone.scala @@ -18,7 +18,8 @@ package net.liftweb package util import java.util.Locale -import java.text.{NumberFormat, DecimalFormat} +import java.text.{DecimalFormat, NumberFormat} +import scala.annotation.nowarn trait TwoFractionDigits { def numberOfFractionDigits = 2 @@ -30,31 +31,32 @@ trait DollarCurrency extends TwoFractionDigits { } /* Various Currencies */ +@nowarn("msg=constructor Locale in class Locale is deprecated \\(since 19\\)") // Use Locale.of when only JDK 19+ is supported object AU extends CurrencyZone { type Currency = AUD - var locale = new Locale("en", "AU") - def make(x: BigDecimal) = new Currency{def amount = x} + var locale: Locale = new Locale("en", "AU") + def make(x: BigDecimal): AUD = new Currency{def amount: BigDecimal = x} abstract class AUD extends AbstractCurrency("AUD") with DollarCurrency {} } object US extends CurrencyZone { type Currency = USD - var locale = Locale.US - def make(x: BigDecimal) = new Currency{def amount = x} + var locale: Locale = Locale.US + def make(x: BigDecimal): USD = new Currency{def amount: BigDecimal = x} abstract class USD extends AbstractCurrency("USD") with DollarCurrency {} } object GB extends CurrencyZone { type Currency = GBP - var locale = Locale.UK - def make(x: BigDecimal) = new Currency{def amount = x} + var locale: Locale = Locale.UK + def make(x: BigDecimal): GBP = new Currency{def amount: BigDecimal = x} abstract class GBP extends AbstractCurrency("GBP") with TwoFractionDigits {def currencySymbol = "£"} } object EU extends CurrencyZone { type Currency = EUR - var locale = Locale.GERMANY // guess this is why its a var - def make(x: BigDecimal) = new Currency{def amount = x; override val _locale = locale} + var locale: Locale = Locale.GERMANY // guess this is why its a var + def make(x: BigDecimal): EUR = new Currency{def amount: BigDecimal = x; override val _locale: Locale = locale} abstract class EUR extends AbstractCurrency("EUR") with TwoFractionDigits {def currencySymbol = "€"} } @@ -85,8 +87,8 @@ abstract class CurrencyZone { val _locale: Locale = locale def amount: BigDecimal - def floatValue = amount.floatValue - def doubleValue = amount.doubleValue + def floatValue: Float = amount.floatValue + def doubleValue: Double = amount.doubleValue def currencySymbol: String def numberOfFractionDigits: Int def scale: Int @@ -101,21 +103,21 @@ abstract class CurrencyZone { def -(that: Int): Currency = this - make(that) def /(that: Currency): Currency = - make(new BigDecimal(this.amount.bigDecimal.divide(that.amount.bigDecimal, scale, java.math.BigDecimal.ROUND_HALF_UP)) ) + make(new BigDecimal(this.amount.bigDecimal.divide(that.amount.bigDecimal, scale, java.math.RoundingMode.HALF_UP)) ) def /(that: Int): Currency = this / make(that) - def compare(that: Currency) = this.amount compare that.amount + def compare(that: Currency): Int = this.amount compare that.amount - override def equals(that: Any) = that match { + override def equals(that: Any): Boolean = that match { case that: AbstractCurrency => this.designation+this.format("", scale) == that.designation+that.format("", scale) case _ => false } - override def hashCode = (this.designation+format("", scale)).hashCode + override def hashCode: Int = (this.designation+format("", scale)).hashCode def round(precision: Int) = make(BigDecimal(get(precision))) - override def toString = format("", numberOfFractionDigits) + override def toString: String = format("", numberOfFractionDigits) def format(fd: Int): String = format(currencySymbol, fd) @@ -127,9 +129,9 @@ abstract class CurrencyZone { case _ => amount.setScale(numberOfFractionDigits, BigDecimal.RoundingMode.HALF_UP).doubleValue; } - val numberFormat = NumberFormat.getCurrencyInstance(_locale); - numberFormat.setMinimumFractionDigits(numberOfFractionDigits); - numberFormat.setMaximumFractionDigits(numberOfFractionDigits); + val numberFormat = NumberFormat.getCurrencyInstance(_locale) + numberFormat.setMinimumFractionDigits(numberOfFractionDigits) + numberFormat.setMaximumFractionDigits(numberOfFractionDigits) val symbol=numberFormat.getCurrency.getSymbol(_locale) numberFormat.format(moneyValue).replace(symbol, currencySymbol) @@ -142,7 +144,7 @@ abstract class CurrencyZone { val df = nf.asInstanceOf[DecimalFormat] val groupingSeparator = df.getDecimalFormatSymbols.getGroupingSeparator - format("", numberOfFractionDigits).replaceAll(groupingSeparator+"", ""); + format("", numberOfFractionDigits).replaceAll(groupingSeparator.toString+"", "") } } diff --git a/core/util/src/main/scala/net/liftweb/util/EnumWithDescription.scala b/core/util/src/main/scala/net/liftweb/util/EnumWithDescription.scala deleted file mode 100644 index 6cbe3564fe..0000000000 --- a/core/util/src/main/scala/net/liftweb/util/EnumWithDescription.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package util - - - -/* - A wrapper arround a Scala Enumeration Value that has a name, description for each object - */ -trait ValueWithDescription { - def description: String - def name: String -} - -abstract class EnumWithDescription { - import scala.language.reflectiveCalls - - type Value = enum.Value with ValueWithDescription - - private var _values: List[Value] = Nil - def values = _values - - // possibly not a good idea using this directly - val enum = new Enumeration { - def Value(inName: String, inDescription: String): Value with ValueWithDescription = { - new Val(nextId, inName) with ValueWithDescription { - def description = inDescription - def name = inName - } - } - } - - def Value(name: String, description: String): Value = { - val value = enum.Value(name, description) - _values = _values ::: List(value) // build in order - value - } - - def Value(name: String): Value = Value(name, name) - - def valueOf(name: String) = values find (_.name == name) - - def nameDescriptionList = values map(x => (x.name, x.description) ) - -} - diff --git a/core/util/src/main/scala/net/liftweb/util/FatLazy.scala b/core/util/src/main/scala/net/liftweb/util/FatLazy.scala index 994e617334..51602560a7 100644 --- a/core/util/src/main/scala/net/liftweb/util/FatLazy.scala +++ b/core/util/src/main/scala/net/liftweb/util/FatLazy.scala @@ -58,7 +58,7 @@ class FatLazy[T](f: => T) { * Test whether the value of this class has been set or initialized from the default. */ def defined_? = synchronized { - value != None + value.toOption != None } /** diff --git a/core/util/src/main/scala/net/liftweb/util/HtmlHelpers.scala b/core/util/src/main/scala/net/liftweb/util/HtmlHelpers.scala index 9957b8d2c8..375234caa9 100644 --- a/core/util/src/main/scala/net/liftweb/util/HtmlHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/HtmlHelpers.scala @@ -320,7 +320,7 @@ trait HtmlHelpers extends CssBindImplicits { ) - case _ => Empty + case _ => Empty } } diff --git a/core/util/src/main/scala/net/liftweb/util/HtmlParser.scala b/core/util/src/main/scala/net/liftweb/util/HtmlParser.scala index ddd81a5190..0186a96e96 100644 --- a/core/util/src/main/scala/net/liftweb/util/HtmlParser.scala +++ b/core/util/src/main/scala/net/liftweb/util/HtmlParser.scala @@ -35,7 +35,7 @@ trait Html5Writer { * @param m the attributes * @param writer the place to write the attribute */ - protected def writeAttributes(m: MetaData, writer: Writer) { + protected def writeAttributes(m: MetaData, writer: Writer): Unit = { m match { case null => case Null => @@ -117,7 +117,7 @@ trait Html5Writer { * @param str the String to escape * @param the place to send the escaped characters */ - protected def escape(str: String, sb: Writer, reverse: Boolean) { + protected def escape(str: String, sb: Writer, reverse: Boolean): Unit = { val len = str.length var pos = 0 while (pos < len) { @@ -238,11 +238,6 @@ trait Html5Writer { pc.buildString(sb) writer.append(sb) } - case pc: scala.xml.PCData => { - val sb = new StringBuilder() - pc.buildString(sb) - writer.append(sb) - } case Unparsed(text) => writer.append(text) case a: Atom[_] if a.getClass eq classOf[Atom[_]] => writer.append(a.data.toString) @@ -363,20 +358,29 @@ trait Html5Parser { if (capture) { val text = buffer.toString() if (text.length() > 0) { - hStack.push(createText(text)) + hStack = hStack.prepended(createText(text)) } } buffer.setLength(0) } + + // Override endDocument to handle empty hStack case in Scala 3 + // Scala 3's List.last throws "None.get" error on empty lists + override def endDocument(): Unit = { + if (hStack.nonEmpty) { + epilogue = hStack.init.reverse + hStack = hStack.last :: Nil + } + } } - saxer.scopeStack.push(TopScope) + saxer.scopeStack = saxer.scopeStack.prepended(TopScope) hp.setContentHandler(saxer) val is = new InputSource(in) is.setEncoding("UTF-8") hp.parse(is) - saxer.scopeStack.pop + saxer.scopeStack = saxer.scopeStack.drop(1) in.close() saxer.rootElem match { diff --git a/core/util/src/main/scala/net/liftweb/util/HttpHelpers.scala b/core/util/src/main/scala/net/liftweb/util/HttpHelpers.scala index 164579acf5..26694de8ab 100644 --- a/core/util/src/main/scala/net/liftweb/util/HttpHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/HttpHelpers.scala @@ -264,7 +264,7 @@ trait HttpHelpers { */ def findElems(nodes: NodeSeq)(f: Elem => Boolean): NodeSeq = { val ret = new ListBuffer[Elem] - def find(what: NodeSeq) { + def find(what: NodeSeq): Unit = { what.foreach { case Group(g) => find(g) case e: Elem => @@ -287,7 +287,7 @@ trait HttpHelpers { def findInElems[T](nodes: NodeSeq)(f: Elem => Iterable[T]): List[T] = { val ret = new ListBuffer[T] - def find(what: NodeSeq) { + def find(what: NodeSeq): Unit = { what.foreach { case Group(g) => find(g) case e: Elem => @@ -328,7 +328,7 @@ trait HttpHelpers { def deepFindKids(in: NodeSeq, prefix: String, label: String): NodeSeq = { val ret: ListBuffer[Node] = new ListBuffer - def doIt(in: NodeSeq) { + def doIt(in: NodeSeq): Unit = { in.foreach { case e: Elem if e.prefix == prefix && e.label == label => e.child.foreach(ret.+=) diff --git a/core/util/src/main/scala/net/liftweb/util/IoHelpers.scala b/core/util/src/main/scala/net/liftweb/util/IoHelpers.scala index 406751bb9a..a4f339d47e 100644 --- a/core/util/src/main/scala/net/liftweb/util/IoHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/IoHelpers.scala @@ -33,7 +33,7 @@ trait IoHelpers { def exec(cmds: String*): Box[String] = { try { class ReadItAll(in: InputStream, done: String => Unit) extends Runnable { - def run { + def run: Unit = { val br = new BufferedReader(new InputStreamReader(in)) // default to platform character set val lines = new ListBuffer[String] var line = "" @@ -71,7 +71,7 @@ trait IoHelpers { val bos = new StringBuilder val ba = new Array[Char](4096) - def readOnce { + def readOnce: Unit = { val len = in.read(ba) if (len < 0) return if (len > 0) bos.appendAll(ba, 0, len) @@ -100,7 +100,7 @@ trait IoHelpers { val bos = new ByteArrayOutputStream val ba = new Array[Byte](4096) - def readOnce { + def readOnce: Unit = { val len = in.read(ba) if (len > 0) bos.write(ba, 0, len) if (len >= 0) readOnce diff --git a/core/util/src/main/scala/net/liftweb/util/IterableConst.scala b/core/util/src/main/scala/net/liftweb/util/IterableConst.scala index 77be6ca191..35a95846d6 100644 --- a/core/util/src/main/scala/net/liftweb/util/IterableConst.scala +++ b/core/util/src/main/scala/net/liftweb/util/IterableConst.scala @@ -14,7 +14,7 @@ trait IterableConst { def constList(nodeSeq: NodeSeq): Seq[NodeSeq] } -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * The implementation for a NodeSeq Iterable Const @@ -140,5 +140,5 @@ object IterableConst { implicit def optionBindablePromotable(it: Option[Bindable]): IterableConst = SeqBindableIterableConst(it.toList) - implicit def optionStringPromotable[T](o: Option[T])(implicit view:T=>StringPromotable) = optionString(o.map(view(_).toString)) + implicit def optionStringPromotable[T](o: Option[T])(implicit view:T=>StringPromotable): IterableConst = optionString(o.map(view(_).toString)) } diff --git a/core/util/src/main/scala/net/liftweb/util/IterableFunc.scala b/core/util/src/main/scala/net/liftweb/util/IterableFunc.scala index e885dba42e..b7f6a79965 100644 --- a/core/util/src/main/scala/net/liftweb/util/IterableFunc.scala +++ b/core/util/src/main/scala/net/liftweb/util/IterableFunc.scala @@ -11,7 +11,7 @@ sealed trait IterableFunc extends Function1[NodeSeq, Seq[NodeSeq]] { } object IterableFunc { - implicit def itNodeSeq[C <% Iterable[NodeSeq]](it: NodeSeq => C): IterableFunc = + implicit def itNodeSeq[C <: Iterable[NodeSeq]](it: NodeSeq => C)(implicit ev: C => Iterable[NodeSeq]): IterableFunc = new IterableFunc { def apply(in: NodeSeq): Seq[NodeSeq] = it(in).toSeq } diff --git a/core/util/src/main/scala/net/liftweb/util/JsonCmd.scala b/core/util/src/main/scala/net/liftweb/util/JsonCmd.scala index 7a62d1c34f..f9b2b2dde9 100644 --- a/core/util/src/main/scala/net/liftweb/util/JsonCmd.scala +++ b/core/util/src/main/scala/net/liftweb/util/JsonCmd.scala @@ -31,7 +31,8 @@ trait HasParams { case class JsonCmd(command: String, target: String, params: Any, all: scala.collection.Map[String, Any]) -import net.liftweb.json.JsonAST._ +import org.json4s._ +import org.json4s.native.JsonMethods._ /** * A helpful extractor to take the JValue sent from the client-side JSON stuff and @@ -67,7 +68,7 @@ class ResponseInfoHolder { def docType = _docType - def docType_=(in: Box[String]) { + def docType_=(in: Box[String]): Unit = { _docType = in _setDocType = true } diff --git a/core/util/src/main/scala/net/liftweb/util/LD.scala b/core/util/src/main/scala/net/liftweb/util/LD.scala index 31b1abc082..7817c50967 100644 --- a/core/util/src/main/scala/net/liftweb/util/LD.scala +++ b/core/util/src/main/scala/net/liftweb/util/LD.scala @@ -50,11 +50,11 @@ object LD { (ly: @unchecked) match { case w :: Nil => (w, this(root, f(w))) - case w :: ws => - val tv = this(root, f(w)) - val rest = this(root, ws, f) - if (tv < rest._2) (w, tv) - else rest + case w :: ws => + val tv = this(root, f(w)) + val rest = this(root, ws, f) + if (tv < rest._2) (w, tv) + else rest } /** @@ -77,10 +77,10 @@ object LD { case Nil => acc.toList case c :: cs => val cost = if (c == ch) 0 else 1 - val i = dist.head - val calc = min(left + cost, i + 1, top + 1) - acc += calc - column(cs, dist.tail, i, calc, ch, acc) + val i = dist.head + val calc = min(left + cost, i + 1, top + 1) + acc += calc + column(cs, dist.tail, i, calc, ch, acc) } def matrix(word: List[Char], pos: Int, dist: List[Int]): List[Int] = diff --git a/core/util/src/main/scala/net/liftweb/util/ListHelpers.scala b/core/util/src/main/scala/net/liftweb/util/ListHelpers.scala index ae7b09f90d..4594185711 100644 --- a/core/util/src/main/scala/net/liftweb/util/ListHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/ListHelpers.scala @@ -69,7 +69,7 @@ trait ListHelpers { val ret: ListBuffer[Res] = new ListBuffer() var insertAfter: Box[T] = Empty - @tailrec def loop(o: List[T], n: List[T]) { + @tailrec def loop(o: List[T], n: List[T]): Unit = { (o, n) match { case (o, Nil) => o.foreach(t => ret += f(RemoveDelta(t))) case (Nil, n) => { @@ -169,16 +169,16 @@ trait ListHelpers { /** * Convert a java.util.Enumeration to a List[T] */ - def enumToList[T](enum: java.util.Enumeration[T]): List[T] = { - import scala.collection.JavaConverters._ - enum.asScala.toList + def enumToList[T](`enum`: java.util.Enumeration[T]): List[T] = { + import scala.jdk.CollectionConverters._ + `enum`.asScala.toList } /** * Convert a java.util.Enumeration to a List[String] using the toString method on each element */ - def enumToStringList[C](enum: java.util.Enumeration[C]): List[String] = - enumToList(enum).map(_.toString) + def enumToStringList[C](`enum`: java.util.Enumeration[C]): List[String] = + enumToList(`enum`).map(_.toString) /** * Return the first element of a List or a default value if the list is empty diff --git a/core/util/src/main/scala/net/liftweb/util/Mailer.scala b/core/util/src/main/scala/net/liftweb/util/Mailer.scala deleted file mode 100644 index 11eabfc941..0000000000 --- a/core/util/src/main/scala/net/liftweb/util/Mailer.scala +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package util - -import javax.mail._ -import javax.mail.internet._ -import javax.naming.{Context, InitialContext} -import java.util.Properties - -import scala.language.implicitConversions -import scala.xml.{Text, Elem, Node, NodeSeq} - -import common._ -import actor._ - - -/** - * Utilities for sending email. - */ -object Mailer extends Mailer { - - sealed abstract class MailTypes - /** - * Add message headers to outgoing messages - */ - final case class MessageHeader(name: String, value: String) extends MailTypes - abstract class MailBodyType extends MailTypes - final case class PlusImageHolder(name: String, mimeType: String, bytes: Array[Byte], attachment: Boolean = false) - - /** - * Represents a text/plain mail body. The given text will - * be encoded as UTF-8 when sent. - */ - final case class PlainMailBodyType(text: String) extends MailBodyType - - /** - * Represents a text/plain mail body that is encoded with the - * specified charset - */ - final case class PlainPlusBodyType(text: String, charset: String) extends MailBodyType - - final case class XHTMLMailBodyType(text: NodeSeq) extends MailBodyType - final case class XHTMLPlusImages(text: NodeSeq, items: PlusImageHolder*) extends MailBodyType - - - sealed abstract class RoutingType extends MailTypes - sealed abstract class AddressType extends RoutingType { - def address: String - def name: Box[String] - } - final case class From(address: String, name: Box[String] = Empty) extends AddressType - final case class To(address: String, name: Box[String] = Empty) extends AddressType - final case class CC(address: String, name: Box[String] = Empty) extends AddressType - final case class Subject(subject: String) extends RoutingType - final case class BCC(address: String, name: Box[String] = Empty) extends AddressType - final case class ReplyTo(address: String, name: Box[String] = Empty) extends AddressType - - final case class MessageInfo(from: From, subject: Subject, info: List[MailTypes]) -} - -/** - * This trait implmenets the mail sending. You can create subclasses of this class/trait and - * implement your own mailer functionality - */ -trait Mailer extends SimpleInjector { - import Mailer._ - private val logger = Logger(classOf[Mailer]) - - implicit def xmlToMailBodyType(html: NodeSeq): MailBodyType = XHTMLMailBodyType(html) - - implicit def addressToAddress(in: AddressType): Address = { - val ret = new InternetAddress(in.address) - in.name.foreach{n => ret.setPersonal(n)} - ret - } - - implicit def adListToAdArray(in: List[AddressType]): Array[Address] = in.map(addressToAddress).toArray - - /** - * Passwords cannot be accessed via System.getProperty. Instead, we - * provide a means of explicitlysetting the authenticator. - */ - //def authenticator = authenticatorFunc - var authenticator: Box[Authenticator] = Empty - - /** - * Use the mailer resource in your container by specifying the JNDI name - */ - var jndiName: Box[String] = Empty - - /** - * Custom properties for the JNDI session - */ - var customProperties: Map[String, String] = Map() - - lazy val jndiSession: Box[Session] = - for{ - name <- jndiName - contextObj <- Helpers.tryo(new InitialContext().lookup("java:comp/env")) - context <- Box.asA[Context](contextObj) - sessionObj <- Helpers.tryo(context.lookup(name)) - session <- Box.asA[Session](sessionObj) - } yield session - - lazy val properties: Properties = { - val p = System.getProperties.clone.asInstanceOf[Properties] - customProperties.foreach {case (name, value) => p.put(name, value)} - // allow the properties file to set/override system properties - - Props.props.foreach { - case (name, value) => - p.setProperty(name, value) - } - p - } - - /** - * The host that should be used to send mail. - */ - def host = hostFunc() - - /** - * To change the way the host is calculated, set this to the function that calcualtes the host name. - * By default: System.getProperty("mail.smtp.host") - */ - var hostFunc: () => String = _host _ - - private def _host = properties.getProperty("mail.smtp.host") match { - case null => "localhost" - case s => s - } - - def buildProps: Properties = { - val p = properties.clone.asInstanceOf[Properties] - p.getProperty("mail.smtp.host") match { - case null => p.put("mail.smtp.host", host) - case _ => - } - - p - } - - /** - * Set the mail.charset property to something other than UTF-8 for non-UTF-8 - * mail. - */ - lazy val charSet = properties.getProperty("mail.charset") match { - case null => "UTF-8" - case x => x - } - - // def host_=(hostname: String) = System.setProperty("mail.smtp.host", hostname) - - protected class MsgSender extends SpecializedLiftActor[MessageInfo] { - protected def messageHandler = { - case MessageInfo(from, subject, info) => - try { - msgSendImpl(from, subject, info) - } catch { - case e: Exception => logger.error("Couldn't send mail", e) - } - } - } - - protected def performTransportSend(msg: MimeMessage) = { - import Props.RunModes._ - (Props.mode match { - case Development => devModeSend.vend - case Test => testModeSend.vend - case Staging => stagingModeSend.vend - case Production => productionModeSend.vend - case Pilot => pilotModeSend.vend - case Profile => profileModeSend.vend - }).apply(msg) - } - - /** - * How to send a message in dev mode. By default, use Transport.send(msg) - */ - lazy val devModeSend: Inject[MimeMessage => Unit] = new Inject[MimeMessage => Unit]((m: MimeMessage) => Transport.send(m)) {} - - /** - * How to send a message in test mode. By default, log the message - */ - lazy val testModeSend: Inject[MimeMessage => Unit] = new Inject[MimeMessage => Unit]((m: MimeMessage) => logger.info("Sending Mime Message: "+m)) {} - - /** - * How to send a message in staging mode. By default, use Transport.send(msg) - */ - lazy val stagingModeSend: Inject[MimeMessage => Unit] = new Inject[MimeMessage => Unit]((m: MimeMessage) => Transport.send(m)) {} - - /** - * How to send a message in production mode. By default, use Transport.send(msg) - */ - lazy val productionModeSend: Inject[MimeMessage => Unit] = new Inject[MimeMessage => Unit]((m: MimeMessage) => Transport.send(m)) {} - - /** - * How to send a message in pilot mode. By default, use Transport.send(msg) - */ - lazy val pilotModeSend: Inject[MimeMessage => Unit] = new Inject[MimeMessage => Unit]((m: MimeMessage) => Transport.send(m)) {} - - /** - * How to send a message in profile mode. By default, use Transport.send(msg) - */ - lazy val profileModeSend: Inject[MimeMessage => Unit] = new Inject[MimeMessage => Unit]((m: MimeMessage) => Transport.send(m)) {} - - /** - * Synchronously send an email. - */ - def blockingSendMail(from: From, subject: Subject, rest: MailTypes*) { - msgSendImpl(from, subject, rest.toList) - } - - def msgSendImpl(from: From, subject: Subject, info: List[MailTypes]) { - val session = authenticator match { - case Full(a) => jndiSession openOr Session.getInstance(buildProps, a) - case _ => jndiSession openOr Session.getInstance(buildProps) - } - val subj = MimeUtility.encodeText(subject.subject, "utf-8", "Q") - val message = new MimeMessage(session) - message.setFrom(from) - message.setRecipients(Message.RecipientType.TO, info.flatMap {case x: To => Some[To](x) case _ => None}) - message.setRecipients(Message.RecipientType.CC, info.flatMap {case x: CC => Some[CC](x) case _ => None}) - message.setRecipients(Message.RecipientType.BCC, info.flatMap {case x: BCC => Some[BCC](x) case _ => None}) - message.setSentDate(new java.util.Date()) - // message.setReplyTo(filter[MailTypes, ReplyTo](info, {case x @ ReplyTo(_) => Some(x); case _ => None})) - message.setReplyTo(info.flatMap {case x: ReplyTo => Some[ReplyTo](x) case _ => None}) - message.setSubject(subj) - info.foreach { - case MessageHeader(name, value) => message.addHeader(name, value) - case _ => - } - - val bodyTypes = info.flatMap {case x: MailBodyType => Some[MailBodyType](x); case _ => None} - bodyTypes match { - case PlainMailBodyType(txt) :: Nil => - message.setText(txt) - - case _ => - val multiPart = new MimeMultipart("alternative") - bodyTypes.foreach { - tab => - val bp = buildMailBody(tab) - multiPart.addBodyPart(bp) - } - message.setContent(multiPart); - } - - Mailer.this.performTransportSend(message) - } - - protected lazy val msgSender = new MsgSender - - /** - * The default mechanism for encoding a NodeSeq to a String representing HTML. By default, use Html5.toString(node) - */ - protected def encodeHtmlBodyPart(in: NodeSeq): String = Html5.toString(firstNode(in)) - - protected def firstNode(in: NodeSeq): Node = in match { - case n: Node => n - case ns => ns.toList.collect { - case e: Elem => e - } match { - case Nil => if (ns.length == 0) Text("") else ns(0) - case x :: xs => x - } - } - - /** - * Given a MailBodyType, convert it to a javax.mail.BodyPart. You can override this method if you - * add custom MailBodyTypes - */ - protected def buildMailBody(tab: MailBodyType): BodyPart = { - val bp = new MimeBodyPart - - tab match { - case PlainMailBodyType(txt) => - bp.setText(txt, "UTF-8") - - case PlainPlusBodyType(txt, charset) => - bp.setText(txt, charset) - - case XHTMLMailBodyType(html) => - bp.setContent(encodeHtmlBodyPart(html), "text/html; charset=" + charSet) - - case XHTMLPlusImages(html, img@_*) => - val (attachments, images) = img.partition(_.attachment) - val relatedMultipart = new MimeMultipart("related") - - val htmlBodyPart = new MimeBodyPart - htmlBodyPart.setContent(encodeHtmlBodyPart(html), "text/html; charset=" + charSet) - relatedMultipart.addBodyPart(htmlBodyPart) - - images.foreach { image => - relatedMultipart.addBodyPart(buildAttachment(image)) - } - - if (attachments.isEmpty) { - bp.setContent(relatedMultipart) - } else { - // Some old versions of Exchange server will not behave correclty without - // a mixed multipart wrapping file attachments. This appears to be linked to - // specific versions of Exchange and Outlook. See the discussion at - // https://github.com/lift/framework/pull/1569 for more details. - val mixedMultipart = new MimeMultipart("mixed") - - val relatedMultipartBodypart = new MimeBodyPart - relatedMultipartBodypart.setContent(relatedMultipart) - mixedMultipart.addBodyPart(relatedMultipartBodypart) - - attachments.foreach { attachment => - mixedMultipart.addBodyPart(buildAttachment(attachment)) - } - - bp.setContent(mixedMultipart) - } - } - - bp - } - - private def buildAttachment(holder: PlusImageHolder) = { - val part = new MimeBodyPart - - part.setFileName(holder.name) - part.setContentID(holder.name) - part.setDisposition(if (holder.attachment) Part.ATTACHMENT else Part.INLINE) - part.setDataHandler(new javax.activation.DataHandler(new javax.activation.DataSource { - def getContentType = holder.mimeType - def getInputStream = new java.io.ByteArrayInputStream(holder.bytes) - def getName = holder.name - def getOutputStream = throw new java.io.IOException("Unable to write to item") - })) - - part - } - - - /** - * Asynchronously send an email. - */ - def sendMail(from: From, subject: Subject, rest: MailTypes*) { - // forward it to an actor so there's no time on this thread spent sending the message - msgSender ! MessageInfo(from, subject, rest.toList) - } -} diff --git a/core/util/src/main/scala/net/liftweb/util/Maker.scala b/core/util/src/main/scala/net/liftweb/util/Maker.scala index ce5c2b0eb1..e8cacfbe8a 100644 --- a/core/util/src/main/scala/net/liftweb/util/Maker.scala +++ b/core/util/src/main/scala/net/liftweb/util/Maker.scala @@ -50,7 +50,7 @@ trait SimpleInjector extends Injector { /** * Register a function that will inject for the given Manifest */ - def registerInjection[T](f: () => T)(implicit man: Manifest[T]) { + def registerInjection[T](f: () => T)(implicit man: Manifest[T]): Unit = { diHash.put(man.toString, f) } diff --git a/core/util/src/main/scala/net/liftweb/util/MonadicConversions.scala b/core/util/src/main/scala/net/liftweb/util/MonadicConversions.scala index b203412abb..5d5b17b236 100644 --- a/core/util/src/main/scala/net/liftweb/util/MonadicConversions.scala +++ b/core/util/src/main/scala/net/liftweb/util/MonadicConversions.scala @@ -14,8 +14,8 @@ * limitations under the License. */ -package net.liftweb -package util +package net.liftweb +package util /** * Holds the implicit conversions from/to MonadicCondition @@ -23,7 +23,7 @@ package util object MonadicConversions { import scala.language.implicitConversions - implicit def bool2Monadic(cond: Boolean) = cond match { + implicit def bool2Monadic(cond: Boolean): MonadicCondition = cond match { case true => True case _ => False(Nil) } diff --git a/core/util/src/main/scala/net/liftweb/util/PCDataMarkupParser.scala b/core/util/src/main/scala/net/liftweb/util/PCDataMarkupParser.scala index 709c22417c..8d7d78fb83 100644 --- a/core/util/src/main/scala/net/liftweb/util/PCDataMarkupParser.scala +++ b/core/util/src/main/scala/net/liftweb/util/PCDataMarkupParser.scala @@ -83,13 +83,13 @@ trait PCDataMarkupParser[PCM <: MarkupParser with MarkupHandler] extends MarkupP val sb: StringBuilder = new StringBuilder() while (true) { if (ch==']' && - { sb.append(ch); nextch; ch == ']' } && - { sb.append(ch); nextch; ch == '>' } ) { + { sb.append(ch); nextch(); ch == ']' } && + { sb.append(ch); nextch(); ch == '>' } ) { sb.setLength(sb.length - 2); - nextch; + nextch(); return PCData(sb.toString) } else sb.append( ch ); - nextch; + nextch(); } throw FatalError("this cannot happen"); } @@ -99,6 +99,9 @@ class PCDataXmlParser(val input: Source) extends ConstructingHandler with PCData val preserveWS = true ent ++= HtmlEntities() import scala.xml._ + + /** Public accessor for curInput.hasNext to work around Scala 3 protected access restrictions */ + def hasMoreInput: Boolean = curInput.hasNext /** parse attribute and create namespace scope, metadata * [41] Attributes ::= { S Name Eq AttValue } */ @@ -109,7 +112,7 @@ class PCDataXmlParser(val input: Source) extends ConstructingHandler with PCData val pos = this.pos val qname = xName - val _ = xEQ + val _ = xEQ() val value = xAttributeValue() Utility.prefix(qname) match { @@ -129,7 +132,7 @@ class PCDataXmlParser(val input: Source) extends ConstructingHandler with PCData } if ((ch != '/') && (ch != '>') && ('?' != ch)) - xSpace; + xSpace(); } def findIt(base: MetaData, what: MetaData): MetaData = (base, what) match { @@ -151,7 +154,7 @@ class PCDataXmlParser(val input: Source) extends ConstructingHandler with PCData /** * report a syntax error */ - override def reportSyntaxError(pos: Int, msg: String) { + override def reportSyntaxError(pos: Int, msg: String): Unit = { //error("MarkupParser::synerr") // DEBUG import scala.io._ @@ -190,8 +193,8 @@ object PCDataXmlParser { private def apply(source: Source): Box[NodeSeq] = { for { p <- tryo{new PCDataXmlParser(source)} - _ = while (p.ch != '<' && p.curInput.hasNext) p.nextch // side effects, baby - bd <- tryo(p.document) + _ = while (p.ch != '<' && p.hasMoreInput) p.nextch() // side effects, baby + bd <- tryo(p.document()) doc <- Box !! bd } yield (doc.children: NodeSeq) @@ -200,7 +203,7 @@ object PCDataXmlParser { def apply(in: String): Box[NodeSeq] = { var pos = 0 val len = in.length - def moveToLT() { + def moveToLT(): Unit = { while (pos < len && in.charAt(pos) != '<') { pos += 1 } @@ -327,7 +330,7 @@ object AltXML { case _ => // dunno what it is, but ignore it } - private def escape(str: String, sb: StringBuilder, reverse: Boolean) { + private def escape(str: String, sb: StringBuilder, reverse: Boolean): Unit = { val len = str.length var pos = 0 while (pos < len) { @@ -439,7 +442,7 @@ object AltXML { convertAmp: Boolean, legacyIeCompatibilityMode: Boolean): Unit = { val it = children.iterator while (it.hasNext) { - toXML(it.next, pscope, sb, stripComment, convertAmp, legacyIeCompatibilityMode) + toXML(it.next(), pscope, sb, stripComment, convertAmp, legacyIeCompatibilityMode) } } @@ -454,7 +457,7 @@ object AltXML { convertAmp: Boolean): Unit = { val it = children.iterator while (it.hasNext) { - toXML(it.next, pscope, sb, stripComment, convertAmp) + toXML(it.next(), pscope, sb, stripComment, convertAmp) } } diff --git a/core/util/src/main/scala/net/liftweb/util/Props.scala b/core/util/src/main/scala/net/liftweb/util/Props.scala index 13f8ef4d5f..6cb1198d85 100644 --- a/core/util/src/main/scala/net/liftweb/util/Props.scala +++ b/core/util/src/main/scala/net/liftweb/util/Props.scala @@ -51,7 +51,7 @@ private[util] trait Props extends Logger { } val interpolated = for { - interpolateRegex(before, key, after) <- interpolateRegex.findAllMatchIn(value.toString) + case interpolateRegex(before, key, after) <- interpolateRegex.findAllMatchIn(value.toString) } yield { val lookedUp = lookup(key).getOrElse(("${" + key + "}")) @@ -87,7 +87,7 @@ private[util] trait Props extends Logger { * Ensure that all of the specified properties exist; throw an exception if * any of the specified values are not keys for available properties. */ - def requireOrDie(what: String*) { + def requireOrDie(what: String*): Unit = { require(what :_*).toList match { case Nil => case bad => throw new Exception("The following required properties are not defined: "+bad.mkString(",")) @@ -214,7 +214,7 @@ private[util] trait Props extends Logger { def allowModification = !runModeInitialised - def onModificationProhibited() { + def onModificationProhibited(): Unit = { warn("Setting property " + name + " has no effect. Run mode already initialised to " + mode + ".") } } @@ -384,12 +384,12 @@ private[util] trait Props extends Logger { ret } } match { - // if we've got a propety file, create name/value pairs and turn them into a Map + // if we've got a property file, create name/value pairs and turn them into a Map case Full(prop) => Map(prop.entrySet.toArray.flatMap{ case s: JMap.Entry[_, _] => List((s.getKey.toString, s.getValue.toString)) case _ => Nil - } :_*) + }.toSeq :_*) case _ => error("Failed to find a properties file (but properties were accessed). Searched: "+tried.reverse.mkString(", ")) diff --git a/core/util/src/main/scala/net/liftweb/util/RE.scala b/core/util/src/main/scala/net/liftweb/util/RE.scala index c9be965959..48a4900451 100644 --- a/core/util/src/main/scala/net/liftweb/util/RE.scala +++ b/core/util/src/main/scala/net/liftweb/util/RE.scala @@ -129,8 +129,8 @@ class REMatcher(val str: String, val compiled: Pattern) { matcher.reset val cnt = matcher.groupCount - def doIt { - def runIt(pos: Int) { + def doIt: Unit = { + def runIt(pos: Int): Unit = { if (pos >= cnt) return else {ab += f(matcher.group(pos + 1)) ; runIt(pos + 1)} } diff --git a/core/util/src/main/scala/net/liftweb/util/RestoringWeakReference.scala b/core/util/src/main/scala/net/liftweb/util/RestoringWeakReference.scala index 5b8a95265a..4f60a0d5c9 100644 --- a/core/util/src/main/scala/net/liftweb/util/RestoringWeakReference.scala +++ b/core/util/src/main/scala/net/liftweb/util/RestoringWeakReference.scala @@ -74,10 +74,10 @@ class RestoringWeakReference[T <: AnyRef](private var reference:WeakReference[T] } object RestoringWeakReference { def apply[T <: AnyRef](restorer:()=>T) = { - new RestoringWeakReference(new WeakReference(restorer()), restorer) + new RestoringWeakReference(new WeakReference(restorer()), restorer) } def apply[T <: AnyRef](starter:T, restorer:()=>T) = { - new RestoringWeakReference(new WeakReference(starter), restorer) + new RestoringWeakReference(new WeakReference(starter), restorer) } } diff --git a/core/util/src/main/scala/net/liftweb/util/ScalaPosition.scala b/core/util/src/main/scala/net/liftweb/util/ScalaPosition.scala index fcade4c012..a73c5825eb 100644 --- a/core/util/src/main/scala/net/liftweb/util/ScalaPosition.scala +++ b/core/util/src/main/scala/net/liftweb/util/ScalaPosition.scala @@ -71,16 +71,16 @@ abstract class ScalaPosition { final def column(pos: Int): Int = pos & COLUMN_MASK /** Returns a string representation of the encoded position. */ - def toString(pos: Int): String = line(pos) + ":" + column(pos) + def toString(pos: Int): String = s"{line(pos)}:${column(pos)}" } object ScalaPosition extends ScalaPosition { - def checkInput(line: Int, column: Int) { + def checkInput(line: Int, column: Int): Unit = { if (line < 0) - throw new IllegalArgumentException(line + " < 0") + throw new IllegalArgumentException(s"${line} < 0") if ((line == 0) && (column != 0)) - throw new IllegalArgumentException(line + "," + column + " not allowed") + throw new IllegalArgumentException(s"${line}, ${column} not allowed") if (column < 0) - throw new IllegalArgumentException(line + "," + column + " not allowed") + throw new IllegalArgumentException(s"${line}, ${column} not allowed") } } \ No newline at end of file diff --git a/core/util/src/main/scala/net/liftweb/util/Schedule.scala b/core/util/src/main/scala/net/liftweb/util/Schedule.scala index bc12e7f068..cb0fff3d32 100644 --- a/core/util/src/main/scala/net/liftweb/util/Schedule.scala +++ b/core/util/src/main/scala/net/liftweb/util/Schedule.scala @@ -58,7 +58,7 @@ sealed trait Schedule extends Loggable { */ @volatile var blockingQueueSize: Box[Int] = Full(200000) - @volatile var buildExecutor: () => ThreadPoolExecutor = + @volatile var buildExecutor: () => ExecutorService = () => new ThreadPoolExecutor(threadPoolSize, maxThreadPoolSize, 60, @@ -80,12 +80,12 @@ sealed trait Schedule extends Loggable { /** * Re-create the underlying SingleThreadScheduledExecutor */ - def restart: Unit = synchronized - { if ((service eq null) || service.isShutdown) - service = buildService() - if ((pool eq null) || pool.isShutdown) - pool = buildExecutor() - } + def restart: Unit = synchronized { if ((service eq null) || service.isShutdown) + if ((service eq null) || service.isShutdown) + service = buildService() + if ((pool eq null) || pool.isShutdown) + pool = buildExecutor() + } /** @@ -150,7 +150,7 @@ sealed trait Schedule extends Loggable { def schedule(f: () => Unit, delay: TimeSpan): ScheduledFuture[Unit] = synchronized { val r = new Runnable { - def run() { + def run(): Unit = { try { f.apply() } catch { diff --git a/core/util/src/main/scala/net/liftweb/util/SecurityHelpers.scala b/core/util/src/main/scala/net/liftweb/util/SecurityHelpers.scala index 43ffb7ecd2..e03ef99878 100644 --- a/core/util/src/main/scala/net/liftweb/util/SecurityHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/SecurityHelpers.scala @@ -17,20 +17,14 @@ package net.liftweb package util -import java.io._ -import java.security._ -import javax.crypto._ -import javax.crypto.spec._ -import javax.xml.parsers.SAXParserFactory -import javax.xml.XMLConstants - -import scala.xml.{Elem, XML} -import scala.xml.factory.XMLLoader - import org.apache.commons.codec.binary.Base64 import org.apache.xerces.impl.Constants -import common._ +import java.security._ +import javax.xml.XMLConstants +import javax.xml.parsers.SAXParserFactory +import scala.xml.factory.XMLLoader +import scala.xml.{Elem, XML} object SecurityHelpers extends StringHelpers with IoHelpers with SecurityHelpers @@ -189,7 +183,7 @@ trait SecurityHelpers { def hexEncode(in: Array[Byte]): String = { val sb = new StringBuilder val len = in.length - def addDigit(in: Array[Byte], pos: Int, len: Int, sb: StringBuilder) { + def addDigit(in: Array[Byte], pos: Int, len: Int, sb: StringBuilder): Unit = { if (pos < len) { val b: Int = in(pos) val msb = (b & 0xf0) >> 4 diff --git a/core/util/src/main/scala/net/liftweb/util/SoftReferenceCache.scala b/core/util/src/main/scala/net/liftweb/util/SoftReferenceCache.scala index 4db2159575..07de422640 100644 --- a/core/util/src/main/scala/net/liftweb/util/SoftReferenceCache.scala +++ b/core/util/src/main/scala/net/liftweb/util/SoftReferenceCache.scala @@ -54,8 +54,8 @@ object SoftReferenceCache { // A daemon thread is more approapriate here then an Actor as // we'll do blocking reads from the reference queue val thread = new Thread(new Runnable() { - def run() { - processQueue + def run(): Unit = { + processQueue() } }) thread.setDaemon(true) @@ -70,7 +70,7 @@ object SoftReferenceCache { terminated = true; } - private def processQueue { + private def processQueue(): Unit = { while (!terminated) { tryo { // Wait 30 seconds for something to appear in the queue. diff --git a/core/util/src/main/scala/net/liftweb/util/SourceInfo.scala b/core/util/src/main/scala/net/liftweb/util/SourceInfo.scala deleted file mode 100644 index 0780840228..0000000000 --- a/core/util/src/main/scala/net/liftweb/util/SourceInfo.scala +++ /dev/null @@ -1,132 +0,0 @@ -package net.liftweb.util - -import net.liftweb.common.Box -import scala.xml.NodeSeq -import util.parsing.json.JSONArray -import net.liftweb.json.JsonAST.JValue -import scala.reflect.runtime.universe._ - -/** - * A trait that allows an object to tell you about itself - * rather than using reflection - */ -trait SourceInfo { - /** - * Given a name, look up the field - * @param name the name of the field - * @return the metadata - */ - def findSourceField(name: String): Box[SourceFieldInfo] - - /** - * Get a list of all the fields - * @return a list of all the fields - */ - def allFieldNames(): Seq[(String, SourceFieldMetadata)] -} - -case class SourceFieldMetadataRep[A](name: String, manifest: TypeTag[A], converter: FieldConverter{type T = A}) extends SourceFieldMetadata { - type ST = A -} - -/** - * Metadata about a specific field - */ -trait SourceFieldMetadata { - /** - * The field's type - */ - type ST - - /** - * The fields name - * @return the field's name - */ - def name: String - - /** - * The field's manifest - * @return the field's manifest - */ - def manifest: TypeTag[ST] - - /** - * Something that will convert the field into known types like String and NodeSeq - * @return - */ - def converter: FieldConverter{ type T = ST} -} - -/** - * An inplementation of SourceFieldInfo - * - * @param value the value - * @param metaData the metadata - * @tparam A the type - */ -case class SourceFieldInfoRep[A](value: A, metaData: SourceFieldMetadata{type ST = A}) extends SourceFieldInfo { - type T = A -} - -/** - * Value and metadata for a field - */ -trait SourceFieldInfo{ - - /** - * The type of the field - */ - type T - - /** - * The field's value - * @return - */ - def value: T - - /** - * Metadata about the field - * @return - */ - def metaData: SourceFieldMetadata {type ST = T} -} - - -/** - * Convert the field into other representations - */ -trait FieldConverter { - /** - * The type of the field - */ - type T - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] -} diff --git a/core/util/src/main/scala/net/liftweb/util/StringHelpers.scala b/core/util/src/main/scala/net/liftweb/util/StringHelpers.scala index 458b60eeb5..cfe8511564 100644 --- a/core/util/src/main/scala/net/liftweb/util/StringHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/StringHelpers.scala @@ -54,11 +54,11 @@ trait StringHelpers { def splitNameValuePairs(props: String): Map[String, String] = { val list = props.split(",").toList.map(in => { val pair = in.roboSplit("=") - (pair(0), unquote(pair(1))) + (pair(0), unquote(pair(1))) }) val map: Map[String, String] = Map.empty - (map /: list)((m, next) => m + (next)) + list.foldLeft(map) { (m, next) => m + (next) } } /** diff --git a/core/util/src/main/scala/net/liftweb/util/TemplateCache.scala b/core/util/src/main/scala/net/liftweb/util/TemplateCache.scala index 495a759ee1..bfbc66379c 100644 --- a/core/util/src/main/scala/net/liftweb/util/TemplateCache.scala +++ b/core/util/src/main/scala/net/liftweb/util/TemplateCache.scala @@ -53,7 +53,7 @@ object NoCache extends TemplateCache[(Locale, List[String]), NodeSeq] { def set(key: T, node: NodeSeq): NodeSeq = node - def delete(key: T) { + def delete(key: T): Unit = { } } @@ -82,7 +82,7 @@ TemplateCache[(Locale, List[String]), NodeSeq] { node } - override def delete(key: T) { + override def delete(key: T): Unit = { cache.synchronized(cache.remove(key)) } diff --git a/core/util/src/main/scala/net/liftweb/util/TimeHelpers.scala b/core/util/src/main/scala/net/liftweb/util/TimeHelpers.scala index 51c49af1da..24e9023065 100644 --- a/core/util/src/main/scala/net/liftweb/util/TimeHelpers.scala +++ b/core/util/src/main/scala/net/liftweb/util/TimeHelpers.scala @@ -274,45 +274,17 @@ trait TimeHelpers { self: ControlHelpers => (total._1 / div._1, (total._1 % div._1, div._2) :: total._2) }._2 def formatAmount(amountUnit: (Long, String)) = amountUnit match { - case (amount, unit) if (amount == 1) => amount + " " + unit - case (amount, unit) => amount + " " + unit + "s" + case (amount, unit) if (amount == 1) => s"${amount} ${unit}" + case (amount, unit) => s"${amount} ${unit}s" } divideInUnits(millis).filter(_._1 > 0).map(formatAmount(_)).mkString(", ") } - /** - * Convert a Date to a TimeSpan - */ - @deprecated("Date to TimeSpan conversion will be removed for possibility of mistakes in on-duration operations", "3.0.0") - implicit def dateToTS(in: Date): TimeSpan = - new TimeSpan(Left(new Duration(in.getTime))) - - /** * Convert a Duration to a TimeSpan */ implicit def durationToTS(in: Duration): TimeSpan = new TimeSpan(Left(in)) - - /** - * Convert a Period to a TimeSpan - */ - @deprecated("Implicit conversion from Period to TimeSpan will be removed due to its unclear behavior; use new Period(timeSpan.millis) instead.", "3.0.0") - implicit def periodToTS(in: Period): TimeSpan = - new TimeSpan(Right(in)) - - /** - * Convert a TimeSpan to a Period - */ - @deprecated("Implicit conversion from TimeSpan to Period will be removed due to its unclear behavior; use new TimeSpan(period.toDurationFrom(startDateTime)) instead.", "3.0.0") - implicit def tsToPeriod[TS <% TimeSpan](in: TS): Period = in.toPeriod - - /** - * Convert a DateTime to a TimeSpan - */ - @deprecated("Implicit conversion from DateTime to TimeSpan will be removed due to its unclear behavior; use new TimeSpan(dateTime.getMillis) instead.", "3.0.0") - implicit def dateTimeToTS(in: DateTime): TimeSpan = - new TimeSpan(Left(new Duration(in.getMillis))) } /** @return the current System.nanoTime() */ @@ -337,7 +309,7 @@ trait TimeHelpers { self: ControlHelpers => def weeks(in: Long): Long = days(in) * 7L /** implicit def used to add the noTime method to the Date class */ - implicit def toDateExtension(d: Date) = new DateExtension(d) + implicit def toDateExtension(d: Date): DateExtension = new DateExtension(d) /** This class adds a noTime method the Date class, in order to get at Date object starting at 00:00 */ class DateExtension(date: Date) { @@ -359,7 +331,7 @@ trait TimeHelpers { self: ControlHelpers => } /** implicit def used to add the setXXX methods to the Calendar class */ - implicit def toCalendarExtension(c: Calendar) = new CalendarExtension(c) + implicit def toCalendarExtension(c: Calendar): CalendarExtension = new CalendarExtension(c) /** This class adds the setXXX methods to the Calendar class. Each setter returns the updated Calendar */ class CalendarExtension(c: Calendar) { @@ -517,13 +489,6 @@ trait TimeHelpers { self: ControlHelpers => case e: Exception => logger.debug("Error parsing date "+in, e); Failure("Bad date: "+in, Full(e), Empty) } } - - implicit class PeriodExtension[P <% Period](period: P) { - def later: DateTime = new DateTime(millis).plus(period) - - def ago: DateTime = new DateTime(millis).minus(period) - } - } trait ConvertableToDate { @@ -536,4 +501,4 @@ object ConvertableToDate { implicit def toDate(in: ConvertableToDate): Date = in.toDate implicit def toDateTime(in: ConvertableToDate): DateTime = in.toDateTime implicit def toMillis(in: ConvertableToDate): Long = in.millis -} \ No newline at end of file +} diff --git a/core/util/src/test/scala/net/liftweb/util/ClassHelpersSpec.scala b/core/util/src/test/scala/net/liftweb/util/ClassHelpersSpec.scala index 48bc36a423..287cdf1469 100644 --- a/core/util/src/test/scala/net/liftweb/util/ClassHelpersSpec.scala +++ b/core/util/src/test/scala/net/liftweb/util/ClassHelpersSpec.scala @@ -177,7 +177,7 @@ class ClassHelpersSpec extends Specification { createInvoker("length", "").openOrThrowException("Test").apply() must_== Full(0) } "The invoker function will throw the cause exception if the method can't be called" in { - (() => createInvoker("get", "").openOrThrowException("Test").apply)() must throwA[Exception] + (() => createInvoker("get", "").openOrThrowException("Test").apply())() must throwA[Exception] } } diff --git a/core/util/src/test/scala/net/liftweb/util/CombParserHelpersSpec.scala b/core/util/src/test/scala/net/liftweb/util/CombParserHelpersSpec.scala index 20e111ba49..a39549f811 100644 --- a/core/util/src/test/scala/net/liftweb/util/CombParserHelpersSpec.scala +++ b/core/util/src/test/scala/net/liftweb/util/CombParserHelpersSpec.scala @@ -125,7 +125,7 @@ class CombParserHelpersSpec extends Specification with ScalaCheck { } "provide a permuteAll parser succeeding if any permutation of the list given parsers, or a sublist of the given parsers succeeds" in { def permuteAllParsers(s: String) = shouldSucceed(permuteAll(parserA, parserB, parserC, parserD)(s)) - implicit def pick3Letters = AbcdStringGen.pickN(3, List("a", "b", "c")) + implicit def pick3Letters: Arbitrary[String] = AbcdStringGen.pickN(3, List("a", "b", "c")) forAll { (s: String) => ((new scala.collection.immutable.StringOps(s)).nonEmpty) ==> permuteAllParsers(s) @@ -133,7 +133,7 @@ class CombParserHelpersSpec extends Specification with ScalaCheck { } "provide a repNN parser succeeding if an input can be parsed n times with a parser" in { def repNNParser(s: String) = shouldSucceed(repNN(3, parserA)(s)) - implicit def pick3Letters = AbcdStringGen.pickN(3, List("a", "a", "a")) + implicit def pick3Letters: Arbitrary[String] = AbcdStringGen.pickN(3, List("a", "a", "a")) forAll { (s: String) => ((new scala.collection.immutable.StringOps(s)).nonEmpty) ==> repNNParser(s) @@ -144,7 +144,7 @@ class CombParserHelpersSpec extends Specification with ScalaCheck { object AbcdStringGen { - implicit def abcdString = + implicit def abcdString: Gen[String] = for ( len <- choose(4, 4); string <- pick(len, List("a", "b", "c", "d")) diff --git a/core/util/src/test/scala/net/liftweb/util/CssSelectorSpec.scala b/core/util/src/test/scala/net/liftweb/util/CssSelectorSpec.scala index 6a65929c25..828ed38202 100755 --- a/core/util/src/test/scala/net/liftweb/util/CssSelectorSpec.scala +++ b/core/util/src/test/scala/net/liftweb/util/CssSelectorSpec.scala @@ -37,7 +37,7 @@ class CssSelectorSpec extends Specification with XmlMatchers { } "select an id" in { - CssSelectorParser.parse("#foo").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse("#foo").openOrThrowException("If the box is empty, we want a failure") must_== IdSelector("foo", Empty) } @@ -50,53 +50,53 @@ class CssSelectorSpec extends Specification with XmlMatchers { } ":button must parse" in { - CssSelectorParser.parse(":button").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(":button").openOrThrowException("If the box is empty, we want a failure") must_== AttrSelector("type", "button", Empty) } ":checkbox must parse" in { - CssSelectorParser.parse(":checkbox").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(":checkbox").openOrThrowException("If the box is empty, we want a failure") must_== AttrSelector("type", "checkbox", Empty) } ":file must parse" in { - CssSelectorParser.parse(":file").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(":file").openOrThrowException("If the box is empty, we want a failure") must_== AttrSelector("type", "file", Empty) } ":password must parse" in { - CssSelectorParser.parse(":password").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(":password").openOrThrowException("If the box is empty, we want a failure") must_== AttrSelector("type", "password", Empty) } ":radio must parse" in { - CssSelectorParser.parse(":radio").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(":radio").openOrThrowException("If the box is empty, we want a failure") must_== AttrSelector("type", "radio", Empty) } ":reset must parse" in { - CssSelectorParser.parse(":reset").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(":reset").openOrThrowException("If the box is empty, we want a failure") must_== AttrSelector("type", "reset", Empty) } ":submit must parse" in { - CssSelectorParser.parse(":submit").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(":submit").openOrThrowException("If the box is empty, we want a failure") must_== AttrSelector("type", "submit", Empty) } ":text must parse" in { - CssSelectorParser.parse(":text").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(":text").openOrThrowException("If the box is empty, we want a failure") must_== AttrSelector("type", "text", Empty) } "select an id with attr subnodes" in { - CssSelectorParser.parse("#foo *[dog] ").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse("#foo *[dog] ").openOrThrowException("If the box is empty, we want a failure") must_== IdSelector("foo", Full(AttrSubNode("dog"))) } "select an id with no star attr subnodes" in { - CssSelectorParser.parse("#foo [woof] ").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse("#foo [woof] ").openOrThrowException("If the box is empty, we want a failure") must_== IdSelector("foo", Full(AttrSubNode("woof"))) } @@ -184,27 +184,27 @@ class CssSelectorSpec extends Specification with XmlMatchers { } "select a class with subnodes" in { - CssSelectorParser.parse(".foo * ").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(".foo * ").openOrThrowException("If the box is empty, we want a failure") must_== ClassSelector("foo", Full(KidsSubNode())) } "Support selecting this node" in { - CssSelectorParser.parse(".foo ^^ ").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(".foo ^^ ").openOrThrowException("If the box is empty, we want a failure") must_== ClassSelector("foo", Full(SelectThisNode(false))) } "Support selecting this node" in { - CssSelectorParser.parse(".foo ^* ").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(".foo ^* ").openOrThrowException("If the box is empty, we want a failure") must_== ClassSelector("foo", Full(SelectThisNode(true))) } "select a class with attr subnodes" in { - CssSelectorParser.parse(".foo *[dog] ").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(".foo *[dog] ").openOrThrowException("If the box is empty, we want a failure") must_== ClassSelector("foo", Full(AttrSubNode("dog"))) } "select an id with no star attr subnodes" in { - CssSelectorParser.parse(".foo [woof] ").openOrThrowException("If the box is empty, we want a failure") must_== + CssSelectorParser.parse(".foo [woof] ").openOrThrowException("If the box is empty, we want a failure") must_== ClassSelector("foo", Full(AttrSubNode("woof"))) } @@ -564,7 +564,7 @@ class CssBindHelpersSpec extends Specification with XmlMatchers { "Java number support" in { - val f = "a *" #> Full(new java.lang.Long(12)) + val f = "a *" #> Full(java.lang.Long.valueOf(12)) val xml = Hello f(xml) must ==/ (12) @@ -925,7 +925,7 @@ object CheckTheImplicitConversionsForToCssBindPromoter { bog #> "Hello" bog #> bog #> 1 - bog #> 'foo + bog #> Symbol("foo") bog #> 44L bog #> 1.22 bog #> false @@ -964,14 +964,14 @@ object CheckTheImplicitConversionsForToCssBindPromoter { val nsf: NodeSeq => NodeSeq = bog #> "Hello" & bog #> & bog #> 1 & - bog #> 'foo & + bog #> Symbol("foo") & bog #> 44L & bog #> false "foo" #> "Hello" "foo" #> "foo" #> 1 - "foo" #> 'foo + "foo" #> Symbol("foo") "foo" #> 44L "foo" #> false @@ -1004,7 +1004,7 @@ object CheckTheImplicitConversionsForToCssBindPromoter { val nsf2: NodeSeq => NodeSeq = "foo" #> "Hello" & "foo" #> & "foo" #> 1 & - "foo" #> 'foo & + "foo" #> Symbol("foo") & "foo" #> 44L & "foo" #> false diff --git a/core/util/src/test/scala/net/liftweb/util/EnumWithDescriptionSpec.scala b/core/util/src/test/scala/net/liftweb/util/EnumWithDescriptionSpec.scala deleted file mode 100644 index 9996640a86..0000000000 --- a/core/util/src/test/scala/net/liftweb/util/EnumWithDescriptionSpec.scala +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package util - -import org.specs2.mutable.Specification - - -/** - * Systems under specification for EnumWithDescription. - */ -class EnumWithDescriptionSpec extends Specification { - "EnumWithDescription Specification".title - - "An enumWithDescription" should { - "have a name" in { - val t = Title2.valueOf("MR") == Some(Title2.mr) - t must beTrue - } - - "have a name" in { - Title1.mr.toString must_== "MR" - } - - "have a type 1" in { - Title1.mr mustEqual Title1.mr - } - - "have a type 2" in { - Title1.mr mustEqual Title1.valueOf("MR").getOrElse(null) - } - - "have a type 3" in { - Title1.dr mustEqual Title1.valueOf("DR").getOrElse(null) - } - - "have a mr description" in { - Title1.mr.description must_== "Mr" - } - - "be able to be created from a string name" in { - Title1.valueOf("MRS").getOrElse(null) mustEqual Title1.mrs - } - - "have a mrs description" in { - Title1.valueOf("MRS").getOrElse(null).description must beMatching ("Mrs") - } - } - -} - -object Title1 extends EnumWithDescription { - val mr = Value("MR", "Mr") - val mrs = Value("MRS", "Mrs") - val dr = Value("DR", "Dr") - val sir = Value("SirS", "Sir") -} - - -object Title2 extends EnumWithDescription { - val mr = Value("MR", "Mr") - val mrs = Value("MRS", "Mrs") -} - diff --git a/core/util/src/test/scala/net/liftweb/util/JsonCommandSpec.scala b/core/util/src/test/scala/net/liftweb/util/JsonCommandSpec.scala index a234c80913..40347fd5ed 100644 --- a/core/util/src/test/scala/net/liftweb/util/JsonCommandSpec.scala +++ b/core/util/src/test/scala/net/liftweb/util/JsonCommandSpec.scala @@ -19,7 +19,9 @@ package util import org.specs2.mutable.Specification -import json._ +import org.json4s._ +import org.json4s.native.JsonParser +import org.json4s.native.JsonMethods._ /** diff --git a/core/util/src/test/scala/net/liftweb/util/MailerSpec.scala b/core/util/src/test/scala/net/liftweb/util/MailerSpec.scala deleted file mode 100644 index a5cb0ba238..0000000000 --- a/core/util/src/test/scala/net/liftweb/util/MailerSpec.scala +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package util - -import javax.mail.internet.{MimeMessage, MimeMultipart} - -import org.specs2.mutable.Specification - -import common._ - -import Mailer.{From, To, Subject, PlainMailBodyType, XHTMLMailBodyType, XHTMLPlusImages, PlusImageHolder} - -import scala.io.Source - -trait MailerForTesting { - def lastMessage_=(message: Box[MimeMessage]): Unit - def lastMessage: Box[MimeMessage] -} - -/** - * Systems under specification for Lift Mailer. - */ -class MailerSpec extends Specification { - "Mailer Specification".title - sequential - - Props.mode // touch the lazy val so it's detected correctly - - val myMailer = new Mailer with MailerForTesting { - @volatile var lastMessage: Box[MimeMessage] = Empty - - testModeSend.default.set((msg: MimeMessage) => { - lastMessage = Full(msg) - }) - } - - import myMailer._ - - private def doNewMessage(send: => Unit): MimeMessage = { - lastMessage = Empty - - send - - eventually { - lastMessage.isEmpty must_== false - } - lastMessage openOrThrowException("Checked") - } - - "A Mailer" should { - - "deliver simple messages as simple messages" in { - val msg = doNewMessage { - sendMail( - From("sender@nowhere.com"), - Subject("This is a simple email"), - To("recipient@nowhere.com"), - PlainMailBodyType("Here is some plain text.") - ) - } - - msg.getContent must beAnInstanceOf[String] - } - - "deliver multipart messages as multipart" in { - val msg = doNewMessage { - sendMail( - From("sender@nowhere.com"), - Subject("This is a multipart email"), - To("recipient@nowhere.com"), - PlainMailBodyType("Here is some plain text."), - PlainMailBodyType("Here is some more plain text.") - ) - } - - msg.getContent must beAnInstanceOf[MimeMultipart] - } - - "deliver rich messages as multipart" in { - val msg = doNewMessage { - sendMail( - From("sender@nowhere.com"), - Subject("This is a rich email"), - To("recipient@nowhere.com"), - XHTMLMailBodyType( Here is some rich text ) - ) - } - - msg.getContent must beAnInstanceOf[MimeMultipart] - } - - "deliver emails with attachments as mixed multipart" in { - val attachmentBytes = Source.fromInputStream( - getClass.getClassLoader.getResourceAsStream("net/liftweb/util/Html5ParserSpec.page1.html") - ).map(_.toByte).toArray - val msg = doNewMessage { - sendMail( - From("sender@nowhere.com"), - Subject("This is a mixed email"), - To("recipient@nowhere.com"), - XHTMLPlusImages( - Here is some rich text , - PlusImageHolder("awesome.pdf", "text/html", attachmentBytes, true) - ) - ) - } - - msg.getContent must beLike { - case mp: MimeMultipart => - mp.getContentType.substring(0, 21) must_== "multipart/alternative" - - mp.getBodyPart(0).getContent must beLike { - case mp2: MimeMultipart => - mp2.getContentType.substring(0, 15) must_== "multipart/mixed" - } - } - } - } -} diff --git a/core/util/src/test/scala/net/liftweb/util/ScheduleSpec.scala b/core/util/src/test/scala/net/liftweb/util/ScheduleSpec.scala index 0f5ddd1ccc..bd06058cbf 100644 --- a/core/util/src/test/scala/net/liftweb/util/ScheduleSpec.scala +++ b/core/util/src/test/scala/net/liftweb/util/ScheduleSpec.scala @@ -46,14 +46,14 @@ class ScheduleSpec extends Specification with PendingUntilFixed with PingedServi service.pinged must eventually(beTrue) } "honor shutdown followed by restart" in { - Schedule.shutdown + Schedule.shutdown() Schedule.restart Schedule.schedule(service, Alive, TimeSpan(10)) service.pinged must eventually(beTrue) } "not honor multiple shutdowns" in { - Schedule.shutdown - Schedule.shutdown + Schedule.shutdown() + Schedule.shutdown() // service.pinged must eventually(beFalse) service.pinged must throwA[ActorPingException] }.pendingUntilFixed diff --git a/core/util/src/test/scala/net/liftweb/util/TimeHelpersSpec.scala b/core/util/src/test/scala/net/liftweb/util/TimeHelpersSpec.scala index a837274cfd..2792fe20cc 100644 --- a/core/util/src/test/scala/net/liftweb/util/TimeHelpersSpec.scala +++ b/core/util/src/test/scala/net/liftweb/util/TimeHelpersSpec.scala @@ -66,17 +66,6 @@ class TimeHelpersSpec extends Specification with ScalaCheck with TimeAmountsGen "be converted to a date starting from the epoch time, using the date method" in forAllTimeZones { 3.seconds.after(new Date(0)) must beTrue } - "be implicitly converted to a Long" in forAllTimeZones { - (3.seconds == 3000L) must_== true - } - "be compared to an int" in forAllTimeZones { - (3.seconds == 3000) must_== true - (3.seconds != 2000) must_== true - } - "be compared to a long" in forAllTimeZones { - (3.seconds == 3000L) must_== true - (3.seconds != 2000L) must_== true - } "be compared to another TimeSpan" in forAllTimeZones { 3.seconds must_== 3.seconds 3.seconds must_!= 2.seconds @@ -93,16 +82,6 @@ class TimeHelpersSpec extends Specification with ScalaCheck with TimeAmountsGen "return a new TimeSpan representing the difference of the 2 times when substracted with another TimeSpan" in forAllTimeZones { 3.seconds - 4.seconds must_== (-1).seconds } - "have a later method returning a date relative to now plus the time span" in forAllTimeZones { - val expectedTime = new Date().getTime + 3.seconds.millis - - 3.seconds.later.getMillis must beCloseTo(expectedTime, 1000L) - } - "have an ago method returning a date relative to now minus the time span" in forAllTimeZones { - val expectedTime = new Date().getTime - 3.seconds.millis - - 3.seconds.ago.getMillis must beCloseTo(expectedTime, 1000L) - } "have a toString method returning the relevant number of weeks, days, hours, minutes, seconds, millis" in forAllTimeZones { val conversionIsOk = forAll(timeAmounts)((t: TimeAmounts) => { val (timeSpanToString, timeSpanAmounts) = t timeSpanAmounts forall { case (amount, unit) => @@ -140,11 +119,6 @@ class TimeHelpersSpec extends Specification with ScalaCheck with TimeAmountsGen hourFormat(now.noTime) must_== "00:00:00" } - "make sure noTime does not change the day" in forAllTimeZones { - dateFormatter.format(0.days.ago.noTime.toDate) must_== dateFormatter.format(new DateTime().toDate) - dateFormatter.format(3.days.ago.noTime.toDate) must_== dateFormatter.format(new Date(millis - (3 * 24 * 60 * 60 * 1000))) - } - "provide a day function returning the day of month corresponding to a given date (relative to UTC)" in forAllTimeZones { day(today.setTimezone(utc).setDay(3).getTime) must_== 3 } @@ -232,7 +206,7 @@ object forAllTimeZones extends Around { import MatchersImplicits._ override def around[T: AsResult](f: => T) = synchronized { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ // setDefault is on static context so tests should be sequenced // some timezones for java (used in formatters) and for Joda (other computations) has other offset val commonJavaAndJodaTimeZones = (TimeZone.getAvailableIDs.toSet & DateTimeZone.getAvailableIDs.asScala.toSet).filter { timeZoneId => diff --git a/core/util/src/test/scala/net/liftweb/util/VCardParserSpec.scala b/core/util/src/test/scala/net/liftweb/util/VCardParserSpec.scala index 3905ef27d3..0607b22749 100644 --- a/core/util/src/test/scala/net/liftweb/util/VCardParserSpec.scala +++ b/core/util/src/test/scala/net/liftweb/util/VCardParserSpec.scala @@ -27,20 +27,19 @@ import org.specs2.mutable.Specification class VCardParserSpec extends Specification with XmlMatchers { "VCardParser Specification".title + val vcard = + """BEGIN:VCARD + |VERSION:2.1 + |N:Gump;Forrest + |FN:Forrest Gump + |ORG:Bubba Gump Shrimp Co. + |TITLE:Shrimp Man + |TEL;WORK;VOICE:(111) 555-1212 + |TEL;HOME;VOICE:(404) 555-1212 + |END:VCARD""".stripMargin + "VCard" should { "parse a basic VCard (2.1) correctly" in { - - val vcard = - """BEGIN:VCARD - |VERSION:2.1 - |N:Gump;Forrest - |FN:Forrest Gump - |ORG:Bubba Gump Shrimp Co. - |TITLE:Shrimp Man - |TEL;WORK;VOICE:(111) 555-1212 - |TEL;HOME;VOICE:(404) 555-1212 - |END:VCARD""".stripMargin - val list = VCardParser.parse(vcard) list must beLike { case Left(l) => { @@ -58,7 +57,6 @@ class VCardParserSpec extends Specification with XmlMatchers { VCardEntry(VCardKey("END", Nil), List("VCARD"))) } } - } "parse a basic Apple VCard (3.0) correctly" in { diff --git a/dexy.conf b/dexy.conf deleted file mode 100644 index 4fde382cc2..0000000000 --- a/dexy.conf +++ /dev/null @@ -1 +0,0 @@ -outputroot: docs diff --git a/dexy.yaml b/dexy.yaml deleted file mode 100644 index 88a79420ea..0000000000 --- a/dexy.yaml +++ /dev/null @@ -1,14 +0,0 @@ - - html-docs: - - docs/*.adoc|asciidoctor - - - example-extraction: - - docs/scripts/extract-css-selector-examples.sh|bash: - - sh: - timeout: 500 - - html-docs - - - example-verification: - - docs/scripts/run-css-selector-examples.sh|bash: - - sh: - timeout: 500 - - example-extraction diff --git a/dexyplugin.yaml b/dexyplugin.yaml deleted file mode 100644 index c0a2b50b8f..0000000000 --- a/dexyplugin.yaml +++ /dev/null @@ -1,3 +0,0 @@ -reporter:output: - dir: target/docs - readme-filename: None diff --git a/liftsh b/liftsh deleted file mode 100755 index f96e94f03f..0000000000 --- a/liftsh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh - -# Make sure to change the name of the launcher jar and the source when bumping sbt version -# so that the existence test below fails and we download the new jar. -SBT_LAUNCHER_PATH="project/sbt-launch-0.13.15.jar" -SBT_LAUNCHER_SOURCE="https://dl.bintray.com/sbt/native-packages/sbt/0.13.15/sbt-0.13.15.tgz" - -# Download the sbt launcher on-the-fly if it's not already in the repository. -if test ! -f $SBT_LAUNCHER_PATH; then - BASEDIR=$(dirname $SBT_LAUNCHER_PATH) - echo "Downloading sbt launcher..." - curl -L -o ${SBT_LAUNCHER_PATH}.tgz ${SBT_LAUNCHER_SOURCE} - tar xf $SBT_LAUNCHER_PATH.tgz -C $BASEDIR - mv $BASEDIR/sbt/bin/sbt-launch.jar $SBT_LAUNCHER_PATH - rm -rf $BASEDIR/sbt -fi - -# Load custom liftsh config -if test -f ~/.liftsh.config; then - . ~/.liftsh.config -fi - -# Internal options, always specified -INTERNAL_OPTS="-Dfile.encoding=UTF-8 -Xss256m -Xmx2048m -noverify -XX:ReservedCodeCacheSize=296m -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:MaxPermSize=812m" - -# Add 64bit specific option -exec java -version 2>&1 | grep -q "64-Bit" && INTERNAL_OPTS="${INTERNAL_OPTS} -XX:+UseCompressedOops -XX:ReservedCodeCacheSize=328m" - -# Default options, if nothing is specified -DEFAULT_OPTS="" - -cd `dirname $0` - -# Call with INTERNAL_OPTS followed by LIFTSH_OPTS (or DEFAULT_OPTS). java always takes the last option when duplicate. -exec java ${INTERNAL_OPTS} ${LIFTSH_OPTS:-${DEFAULT_OPTS}} -jar ${SBT_LAUNCHER_PATH} "$@" diff --git a/liftsh.cmd b/liftsh.cmd deleted file mode 100644 index a47587972f..0000000000 --- a/liftsh.cmd +++ /dev/null @@ -1,27 +0,0 @@ -@rem This script needs to be updated for 0.13.8. It probably isn't working. - -@echo off - -set SBT_LAUNCHER_PATH="project\sbt-launch-0.13.5.jar" -set SBT_LAUNCHER_SOURCE="http://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/0.13.5/sbt-launch.jar" - -if not exist %SBT_LAUNCHER_PATH% powershell -Command "(New-Object Net.WebClient).DownloadFile('%SBT_LAUNCHER_SOURCE%', '%SBT_LAUNCHER_PATH%')" - -@REM Internal options, always specified -set INTERNAL_OPTS=-Dfile.encoding=UTF-8 -Xmx768m -noverify -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:MaxPermSize=512m - -@REM Add 64bit specific option -java -version 2>&1 | find "64-Bit" >nul: -if not errorlevel 1 ( - set INTERNAL_OPTS=%INTERNAL_OPTS% -XX:+UseCompressedOops -XX:ReservedCodeCacheSize=128m -) - -@REM Default options, if nothing is specified -set DEFAULT_OPTS= - -if "%LIFTSH_OPTS%"=="" ( - set LIFTSH_OPTS=%DEFAULT_OPTS% -) - -@REM Call with INTERNAL_OPTS followed by LIFTSH_OPTS (or DEFAULT_OPTS). java always takes the last option when duplicate. -java %INTERNAL_OPTS% %LIFTSH_OPTS% -jar "%~dp0\%SBT_LAUNCHER_PATH%" %* diff --git a/npmsh b/npmsh deleted file mode 100755 index dd0cfc7936..0000000000 --- a/npmsh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set -ev - -cd web/webkit -npm run lint -npm run test -cd - diff --git a/persistence/db/src/main/scala/net/liftweb/db/ConnectionManager.scala b/persistence/db/src/main/scala/net/liftweb/db/ConnectionManager.scala deleted file mode 100644 index 293c5b3125..0000000000 --- a/persistence/db/src/main/scala/net/liftweb/db/ConnectionManager.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2006-2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package db - -import java.sql.Connection -import net.liftweb.common._ -import net.liftweb.util.ConnectionIdentifier - -/** - * Vend JDBC connections - */ -trait ConnectionManager { - def newConnection(name: ConnectionIdentifier): Box[Connection] - def releaseConnection(conn: Connection) - def newSuperConnection(name: ConnectionIdentifier): Box[SuperConnection] = Empty -} - diff --git a/persistence/db/src/main/scala/net/liftweb/db/DB.scala b/persistence/db/src/main/scala/net/liftweb/db/DB.scala deleted file mode 100644 index 39871470cb..0000000000 --- a/persistence/db/src/main/scala/net/liftweb/db/DB.scala +++ /dev/null @@ -1,1231 +0,0 @@ -/* - * Copyright 2006-2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package db - -import common._ -import util._ -import Helpers._ - -import net.liftweb.http.S - -import javax.sql.{DataSource} -import java.sql.{ResultSetMetaData, SQLException} -import java.sql.{Statement, ResultSet, Types, PreparedStatement, Connection, DriverManager} -import scala.collection.mutable.{HashMap, ListBuffer} -import javax.naming.{Context, InitialContext} - -trait DB1 - -object DB1 { - implicit def db1ToDb(in: DB1): DB = DB.theDB -} - -object DB extends DB1 { - - private[db] lazy val theDB = new DB{} -} - -trait DB extends Loggable { - private val threadStore = new ThreadLocal[HashMap[ConnectionIdentifier, ConnectionHolder]] - private val _postCommitFuncs = new ThreadLocal[List[() => Unit]] - - var globalDefaultSchemaName: Box[String] = Empty - - var queryTimeout: Box[Int] = Empty - - type LogFunc = (DBLog, Long) => Any - private var logFuncs: List[LogFunc] = Nil - - def addLogFunc(f: LogFunc): List[LogFunc] = { - logFuncs = logFuncs ::: List(f) - logFuncs - } - - def loggingEnabled_? = !logFuncs.isEmpty - - /** - * queryCollector can be used to collect all statements executed in a single request when passed to addLogFunc - * - * Use S.queryLog to get the list of (statement, duration) entries or set an analyzer function using - * S.addAnalyzer - */ - @volatile var queryCollector: LogFunc = { - case (query:DBLog, time) => - } - - - - /** - * Try to obtain a Connection using the jndiName of the ConnectionIdentifier - */ - private def jndiConnection(name: ConnectionIdentifier) : Box[Connection] = { - val toTry: List[() => Connection] = List( - () => { - logger.trace("Trying JNDI lookup on java:/comp/env followed by lookup on %s".format(name.jndiName)) - (new InitialContext).lookup("java:/comp/env").asInstanceOf[Context].lookup(name.jndiName).asInstanceOf[DataSource].getConnection - }, - () => { - logger.trace("Trying JNDI lookup on java:/comp/env/%s".format(name.jndiName)) - (new InitialContext).lookup("java:/comp/env/" + name.jndiName).asInstanceOf[DataSource].getConnection - - }, - () => { - logger.trace("Trying JNDI lookup on %s".format(name.jndiName)) - (new InitialContext).lookup(name.jndiName).asInstanceOf[DataSource].getConnection - - } - ) - - first(toTry) (f => tryo{t:Throwable => logger.trace("JNDI Lookup failed: "+t)}(f())) or { - logger.trace("Unable to obtain Connection for JNDI name %s".format(name.jndiName)) - Empty - } - } - - /** - * can we get a JDBC connection from JNDI? - */ - def jndiJdbcConnAvailable_? : Boolean = jndiConnection(DefaultConnectionIdentifier).isDefined - - private val connectionManagers = new HashMap[ConnectionIdentifier, ConnectionManager] - - private val threadLocalConnectionManagers = new ThreadGlobal[Map[ConnectionIdentifier, ConnectionManager]] - - def defineConnectionManager(name: ConnectionIdentifier, mgr: ConnectionManager) { - connectionManagers(name) = mgr - } - - /** - * Allows you to override the connection manager associated with particular connection identifiers for the duration - * of the call. - */ - def doWithConnectionManagers[T](mgrs: (ConnectionIdentifier, ConnectionManager)*)(f: => T): T = { - val newMap = mgrs.foldLeft(threadLocalConnectionManagers.box openOr Map())(_ + _) - threadLocalConnectionManagers.doWith(newMap)(f) - } - - case class ConnectionHolder(conn: SuperConnection, cnt: Int, postTransaction: List[Boolean => Unit], rolledBack: Boolean) - - private def info: HashMap[ConnectionIdentifier, ConnectionHolder] = { - threadStore.get match { - case null => - val tinfo = new HashMap[ConnectionIdentifier, ConnectionHolder] - threadStore.set(tinfo) - tinfo - - case v => v - } - } - - private def postCommit: List[() => Unit] = - _postCommitFuncs.get match { - case null => - _postCommitFuncs.set(Nil) - Nil - - case v => v - } - - private def postCommit_=(lst: List[() => Unit]): Unit = _postCommitFuncs.set(lst) - - // remove thread-local association - private def clearThread(success: Boolean): Unit = { - val ks = info.keySet - if (ks.isEmpty) { - postCommit.foreach(f => tryo(f.apply())) - - _postCommitFuncs.remove - threadStore.remove - } else { - ks.foreach(n => releaseConnectionNamed(n, !success)) - clearThread(success) - } - } - - private def newConnection(name: ConnectionIdentifier): SuperConnection = { - def cmSuperConnection(cm: ConnectionManager): Box[SuperConnection] = - cm.newSuperConnection(name) or cm.newConnection(name).map(c => new SuperConnection(c, () => cm.releaseConnection(c))) - - def jndiSuperConnection: Box[SuperConnection] = jndiConnection(name).map(c => { - val uniqueId = if (logger.isDebugEnabled) Helpers.nextNum.toString else "" - logger.debug("Connection ID " + uniqueId + " for JNDI connection " + name.jndiName + " opened") - new SuperConnection(c, () => {logger.debug("Connection ID " + uniqueId + " for JNDI connection " + name.jndiName + " closed"); c.close}) - }) - - - val cmConn = for { - connectionManager <- threadLocalConnectionManagers.box.flatMap(_.get(name)) or Box(connectionManagers.get(name)) - connection <- cmSuperConnection(connectionManager) - } yield connection - - val ret = cmConn or jndiSuperConnection - - ret.foreach (_.setAutoCommit(false)) - - ret openOr { - throw new NullPointerException("Looking for Connection Identifier " + name + " but failed to find either a JNDI data source " + - "with the name " + name.jndiName + " or a lift connection manager with the correct name") - } - } - - private class ThreadBasedConnectionManager(connections: List[ConnectionIdentifier]) { - private var used: Set[ConnectionIdentifier] = Set() - - def use(conn: ConnectionIdentifier): Int = if (connections.contains(conn)) { - used += conn - 1 - } else 0 - } - - private object CurrentConnectionSet extends DynoVar[ThreadBasedConnectionManager] - - /** - * Build a LoanWrapper to pass into S.addAround() to make requests for - * the DefaultConnectionIdentifier transactional for the complete HTTP request - */ - def buildLoanWrapper(): LoanWrapper = - buildLoanWrapper(List(DefaultConnectionIdentifier)) - - /** - * Build a LoanWrapper to pass into S.addAround() to make requests for - * the List of ConnectionIdentifiers transactional for the complete HTTP request - */ - def buildLoanWrapper(in: List[ConnectionIdentifier]): LoanWrapper = - buildLoanWrapper(true, in) - - /** - * Build a LoanWrapper to pass into S.addAround() to make requests for - * the DefaultConnectionIdentifier transactional for the complete HTTP request - */ - def buildLoanWrapper(eager: Boolean): LoanWrapper = - buildLoanWrapper(eager, List(DefaultConnectionIdentifier)) - - /** - * Build a LoanWrapper to pass into S.addAround() to make requests for - * the List of ConnectionIdentifiers transactional for the complete HTTP request - */ - def buildLoanWrapper(eager: Boolean, in: List[ConnectionIdentifier]): LoanWrapper = - new LoanWrapper { - private object DepthCnt extends DynoVar[Boolean] - - def apply[T](f: => T): T = if (DepthCnt.is == Full(true)) f - else DepthCnt.run(true) { - - var success = false - if (eager) { - def recurseMe(lst: List[ConnectionIdentifier]): T = lst match { - case Nil => - try { - try { - val ret = f - success = !S.exceptionThrown_? - ret - } catch { - // this is the case when we want to commit the transaction - // but continue to throw the exception - case e: LiftFlowOfControlException => { - success = !S.exceptionThrown_? - throw e - } - } - - } finally { - clearThread(success) - } - - case x :: xs => DB.use(x) {ignore => recurseMe(xs)} - } - recurseMe(in) - } else { - CurrentConnectionSet.run(new ThreadBasedConnectionManager(in)) { - try { - try { - val ret = f - success = !S.exceptionThrown_? - ret - } catch { - // this is the case when we want to commit the transaction - // but continue to throw the exception - case e: LiftFlowOfControlException => { - success = !S.exceptionThrown_? - throw e - } - } - } finally { - clearThread(success) - } - } - } - - } - } - - private def releaseConnection(conn: SuperConnection): Unit = conn.close - - private def calcBaseCount(conn: ConnectionIdentifier): Int = - CurrentConnectionSet.is.map(_.use(conn)) openOr 0 - - private def getConnection(name: ConnectionIdentifier): SuperConnection = { - logger.trace("Acquiring " + name + " On thread " + Thread.currentThread) - var ret = info.get(name) match { - case None => ConnectionHolder(newConnection(name), calcBaseCount(name) + 1, Nil, false) - case Some(ConnectionHolder(conn, cnt, post, rb)) => ConnectionHolder(conn, cnt + 1, post, rb) - } - info(name) = ret - logger.trace("Acquired " + name + " on thread " + Thread.currentThread + - " count " + ret.cnt) - ret.conn - } - - private def releaseConnectionNamed(name: ConnectionIdentifier, rollback: Boolean) { - logger.trace("Request to release %s on thread %s, auto rollback=%s".format(name,Thread.currentThread, rollback)) - - (info.get(name): @unchecked) match { - case Some(ConnectionHolder(c, 1, post, manualRollback)) => { - // stale and unexpectedly closed connections may throw here - try { - if (! (c.getAutoCommit() || manualRollback)) { - if (rollback) c.rollback - else c.commit - } - } catch { - case e: SQLException => - logger.error("Swallowed exception during connection release. ", e) - } finally { - tryo(c.releaseFunc()) - info -= name - val rolledback = rollback | manualRollback - logger.trace("Invoking %d postTransaction functions. rollback=%s".format(post.size, rolledback)) - post.reverse.foreach(f => tryo(f(!rolledback))) - logger.trace("Released %s on thread %s".format(name,Thread.currentThread)) - } - } - case Some(ConnectionHolder(c, n, post, rb)) => - logger.trace("Did not release " + name + " on thread " + Thread.currentThread + " count " + (n - 1)) - info(name) = ConnectionHolder(c, n - 1, post, rb) - case x => - // ignore - } - } - - /** - * Append a function to be invoked after the transaction on the specified connection identifier has ended. - * The value passed to the function indicates true for success/commit or false for failure/rollback. - * - * Note: the function will only be called when automatic transaction management is in effect, either by executing within - * the context of a buildLoanWrapper or a DB.use {} - */ - def appendPostTransaction(name: ConnectionIdentifier, func: Boolean => Unit) { - info.get(name) match { - case Some(ConnectionHolder(c, n, post, rb)) => - info(name) = ConnectionHolder(c, n, func :: post, rb) - logger.trace("Appended postTransaction function on %s, new count=%d".format(name, post.size+1)) - case _ => throw new IllegalStateException("Tried to append postTransaction function on illegal ConnectionIdentifer or outside transaction context") - } - } - - /** - * Append function to be invoked after the current transaction on DefaultConnectionIdentifier has ended - * - */ - def appendPostTransaction(func: Boolean => Unit):Unit = appendPostTransaction(DefaultConnectionIdentifier, func) - - private def runLogger(logged: Statement, time: Long) = logged match { - case st: DBLog => logFuncs.foreach(_(st, time)) - case _ => // NOP - } - - def statement[T](db: SuperConnection)(f: (Statement) => T): T = { - Helpers.calcTime { - val st = - if (loggingEnabled_?) { - DBLog.createStatement(db.connection) - } else { - db.createStatement - } - - queryTimeout.foreach(to => st.setQueryTimeout(to)) - try { - (st, f(st)) - } finally { - st.close - } - } match { - case (time, (query, res)) => runLogger(query, time); res - } - } - - def exec[T](db: SuperConnection, query: String)(f: (ResultSet) => T): T = - statement(db) { - st => - f(st.executeQuery(query)) - } - - private def asString(pos: Int, rs: ResultSet, md: ResultSetMetaData): String = { - import java.sql.Types._ - md.getColumnType(pos) match { - case ARRAY | BINARY | BLOB | DATALINK | DISTINCT | JAVA_OBJECT | LONGVARBINARY | NULL | OTHER | REF | STRUCT | VARBINARY => rs.getObject(pos) match { - case null => null - case s => s.toString - } - - case DECIMAL | NUMERIC => - rs.getBigDecimal(pos) match { - case null => null - case x => x.toString - } - - case BIGINT | INTEGER | /* DECIMAL | NUMERIC | */ SMALLINT | TINYINT => checkNull(rs, pos, rs.getLong(pos).toString) - - case BIT | BOOLEAN => checkNull(rs, pos, rs.getBoolean(pos).toString) - - case VARCHAR | CHAR | CLOB | LONGVARCHAR => rs.getString(pos) - - case DATE | TIME | TIMESTAMP => rs.getTimestamp(pos) match { - case null => null - case x => x.toString - } - - case DOUBLE | FLOAT | REAL => checkNull(rs, pos, rs.getDouble(pos).toString) - } - } - - /* - If the column is null, return null rather than the boxed primitive - */ - def checkNull[T](rs: ResultSet, pos: Int, res: => T): T = { - if (null eq rs.getObject(pos)) null.asInstanceOf[T] - else res - } - - private def asAny(pos: Int, rs: ResultSet, md: ResultSetMetaData): Any = { - import java.sql.Types._ - md.getColumnType(pos) match { - case ARRAY | BINARY | BLOB | DATALINK | DISTINCT | JAVA_OBJECT | LONGVARBINARY | NULL | OTHER | REF | STRUCT | VARBINARY => rs.getObject(pos) - - case DECIMAL | NUMERIC => rs.getBigDecimal(pos) - - case BIGINT | INTEGER | /* DECIMAL | NUMERIC | */ SMALLINT | TINYINT => checkNull(rs, pos, rs.getLong(pos)) - - case BIT | BOOLEAN => checkNull(rs, pos, rs.getBoolean(pos)) - - case VARCHAR | CHAR | CLOB | LONGVARCHAR => rs.getString(pos) - - case DATE | TIME | TIMESTAMP => rs.getTimestamp(pos) - - case DOUBLE | FLOAT | REAL => checkNull(rs, pos, rs.getDouble(pos)) - } - } - - def resultSetTo(rs: ResultSet): (List[String], List[List[String]]) = { - val md = rs.getMetaData - val cnt = md.getColumnCount - val cntList = (1 to cnt).toList - val colNames = cntList.map(i => md.getColumnName(i)) - - val lb = new ListBuffer[List[String]]() - - while (rs.next) { - lb += cntList.map(i => asString(i, rs, md)) - } - - (colNames, lb.toList) - } - - def resultSetToAny(rs: ResultSet): (List[String], List[List[Any]]) = { - val md = rs.getMetaData - val cnt = md.getColumnCount - val cntList = (1 to cnt).toList - val colNames = cntList.map(i => md.getColumnName(i)) - - val lb = new ListBuffer[List[Any]]() - - while (rs.next) { - lb += cntList.map(i => asAny(i, rs, md)) - } - - (colNames, lb.toList) - } - - /* - * This method handles the common task of setting arguments on a prepared - * statement based on argument type. Returns the properly updated PreparedStatement. - */ - private def setPreparedParams(ps : PreparedStatement, params: List[Any]): PreparedStatement = { - params.zipWithIndex.foreach { - case (null, idx) => ps.setNull(idx + 1, Types.VARCHAR) - case (i: Int, idx) => ps.setInt(idx + 1, i) - case (l: Long, idx) => ps.setLong(idx + 1, l) - case (d: Double, idx) => ps.setDouble(idx + 1, d) - case (f: Float, idx) => ps.setFloat(idx + 1, f) - // Allow the user to specify how they want the Date handled based on the input type - case (t: java.sql.Timestamp, idx) => ps.setTimestamp(idx + 1, t) - case (d: java.sql.Date, idx) => ps.setDate(idx + 1, d) - case (t: java.sql.Time, idx) => ps.setTime(idx + 1, t) - /* java.util.Date has to go last, since the java.sql date/time classes subclass it. By default we - * assume a Timestamp value */ - case (d: java.util.Date, idx) => ps.setTimestamp(idx + 1, new java.sql.Timestamp(d.getTime)) - case (b: Boolean, idx) => ps.setBoolean(idx + 1, b) - case (s: String, idx) => ps.setString(idx + 1, s) - case (bn: java.math.BigDecimal, idx) => ps.setBigDecimal(idx + 1, bn) - case (obj, idx) => ps.setObject(idx + 1, obj) - } - ps - } - - /** - * Executes the given parameterized query string with the given parameters. - * Parameters are substituted in order. For Date/Time types, passing a java.util.Date will result in a - * Timestamp parameter. If you want a specific SQL Date/Time type, use the corresponding - * java.sql.Date, java.sql.Time, or java.sql.Timestamp classes. - */ - def runQuery(query: String, params: List[Any]): (List[String], List[List[String]]) = - runQuery(query, params, DefaultConnectionIdentifier) - - /** - * Executes the given parameterized query string with the given parameters. - * Parameters are substituted in order. For Date/Time types, passing a java.util.Date will result in a - * Timestamp parameter. If you want a specific SQL Date/Time type, use the corresponding - * java.sql.Date, java.sql.Time, or java.sql.Timestamp classes. - */ - def runQuery(query: String, params: List[Any], connectionIdentifier: ConnectionIdentifier): (List[String], List[List[String]]) = { - use(connectionIdentifier)(conn => prepareStatement(query, conn) { - ps => resultSetTo(setPreparedParams(ps, params).executeQuery) - }) - } - - /** - * Executes the given parameterized query string with the given parameters. - * Parameters are substituted in order. For Date/Time types, passing a java.util.Date will result in a - * Timestamp parameter. If you want a specific SQL Date/Time type, use the corresponding - * java.sql.Date, java.sql.Time, or java.sql.Timestamp classes. - */ - def performQuery(query: String, params: List[Any]): (List[String], List[List[Any]]) = - performQuery(query, params, DefaultConnectionIdentifier) - - /** - * Executes the given parameterized query string with the given parameters. - * Parameters are substituted in order. For Date/Time types, passing a java.util.Date will result in a - * Timestamp parameter. If you want a specific SQL Date/Time type, use the corresponding - * java.sql.Date, java.sql.Time, or java.sql.Timestamp classes. - */ - def performQuery(query: String, params: List[Any], connectionIdentifier: ConnectionIdentifier): (List[String], List[List[Any]]) = { - use(connectionIdentifier)(conn => prepareStatement(query, conn) { - ps => resultSetToAny(setPreparedParams(ps, params).executeQuery) - }) - } - - /** - * Executes the given parameterized update string with the given parameters. - * Parameters are substituted in order. For Date/Time types, passing a java.util.Date will result in a - * Timestamp parameter. If you want a specific SQL Date/Time type, use the corresponding - * java.sql.Date, java.sql.Time, or java.sql.Timestamp classes. - */ - def runUpdate(query: String, params: List[Any]): Int = - runUpdate(query, params, DefaultConnectionIdentifier) - - /** - * Executes the given parameterized update string with the given parameters. - * Parameters are substituted in order. For Date/Time types, passing a java.util.Date will result in a - * Timestamp parameter. If you want a specific SQL Date/Time type, use the corresponding - * java.sql.Date, java.sql.Time, or java.sql.Timestamp classes. - */ - def runUpdate(query: String, params: List[Any], connectionIdentifier: ConnectionIdentifier): Int = { - use(connectionIdentifier)(conn => prepareStatement(query, conn) { - ps => setPreparedParams(ps, params).executeUpdate - }) - } - - def runQuery(query: String): (List[String], List[List[String]]) = - use(DefaultConnectionIdentifier)(conn => exec(conn, query)(resultSetTo)) - - - def performQuery(query: String): (List[String], List[List[Any]]) = - use(DefaultConnectionIdentifier)(conn => exec(conn, query)(resultSetToAny)) - - - def rollback(name: ConnectionIdentifier): Unit = { - info.get(name) match { - case Some(ConnectionHolder(c, n, post, _)) => - info(name) = ConnectionHolder(c, n, post, true) - logger.trace("Manual rollback on %s".format(name)) - use(name)(conn => conn.rollback) - case _ => throw new IllegalStateException("Tried to rollback transaction on illegal ConnectionIdentifer or outside transaction context") - } - } - - def rollback: Unit = rollback(DefaultConnectionIdentifier) - - /** - * Executes { @code statement } and converts the { @code ResultSet } to model - * instance { @code T } using { @code f } - */ - def exec[T](statement: PreparedStatement)(f: (ResultSet) => T): T = { - queryTimeout.foreach(to => statement.setQueryTimeout(to)) - val rs = statement.executeQuery - try { - f(rs) - } finally { - statement.close - rs.close - } - } - - /** - * Prepares the given statement and then passes it to the given function for use. This method - * represents a loan pattern, and will automatically handle creation and closing of the - * PreparedStatement. - */ - def prepareStatement[T](statement: String, conn: SuperConnection)(f: (PreparedStatement) => T): T = { - val st = - if (loggingEnabled_?) { - DBLog.prepareStatement(conn.connection, statement) - } else { - conn.prepareStatement(statement) - } - runPreparedStatement(st)(f) - } - - /** - * Prepares the given statement and then passes it to the given function for use. This method - * represents a loan pattern, and will automatically handle creation and closing of the - * PreparedStatement. - * - * Retrieval of generated keys is controlled with the autokeys parameter, corresponding to the - * constants defined on java.sql.Statement: RETURN_GENERATED_KEYS or NO_GENERATED_KEYS - */ - def prepareStatement[T](statement: String, autokeys: Int, conn: SuperConnection)(f: (PreparedStatement) => T): T = { - val st = - if (loggingEnabled_?) { - DBLog.prepareStatement(conn.connection, statement, autokeys) - } else { - conn.prepareStatement(statement, autokeys) - } - runPreparedStatement(st)(f) - } - - /** - * Prepares the given statement and then passes it to the given function for use. This method - * represents a loan pattern, and will automatically handle creation and closing of the - * PreparedStatement. - * - * If the driver supports it, generated keys for the given column indices can be retrieved. - */ - def prepareStatement[T](statement: String, autoColumns: Array[Int], conn: SuperConnection)(f: (PreparedStatement) => T): T = { - val st = - if (loggingEnabled_?) { - DBLog.prepareStatement(conn.connection, statement, autoColumns) - } else { - conn.prepareStatement(statement, autoColumns) - } - runPreparedStatement(st)(f) - } - - /** - * Prepares the given statement and then passes it to the given function for use. This method - * represents a loan pattern, and will automatically handle creation and closing of the - * PreparedStatement. - * - * If the driver supports it, generated keys for the given column names can be retrieved. - */ - def prepareStatement[T](statement: String, autoColumns: Array[String], conn: SuperConnection)(f: (PreparedStatement) => T): T = { - val st = - if (loggingEnabled_?) { - DBLog.prepareStatement(conn.connection, statement, autoColumns) - } else { - conn.prepareStatement(statement, autoColumns) - } - runPreparedStatement(st)(f) - } - - private def runPreparedStatement[T](st: PreparedStatement)(f: (PreparedStatement) => T): T = { - queryTimeout.foreach(to => st.setQueryTimeout(to)) - Helpers.calcTime { - try { - (st,f(st)) - } finally { - st.close - } - } match { - case (time, (query, res)) => runLogger(query, time); res - } - } - - private object currentConn extends DynoVar[SuperConnection] - - def currentConnection: Box[SuperConnection] = currentConn.is - - /** - * Executes function { @code f } with the connection named { @code name }. Releases the connection - * before returning. - * - * Only use within a stateful request - */ - def use[T](name: ConnectionIdentifier)(f: (SuperConnection) => T): T = { - val conn = getConnection(name) - currentConn.run(conn) { - var rollback = true - try { - val ret = f(conn) - rollback = S.exceptionThrown_? - ret - } catch { - // this is the case when we want to commit the transaction - // but continue to throw the exception - case e: LiftFlowOfControlException => { - rollback = S.exceptionThrown_? - throw e - } - } finally { - releaseConnectionNamed(name, rollback) - } - } - } - - /** - * The SQL reserved words. These words will be changed if they are used for column or table names. - */ - def reservedWords: scala.collection.immutable.Set[String] = userReservedWords openOr defaultReservedWords - - /** - * If you need to change some of the reserved word, you can supply your own set in Boot.scala: - * DB.userReservedWords = Full(Set("foo", "bar")) - */ - @volatile var userReservedWords: Box[ scala.collection.immutable.Set[String]] = Empty - - - /** - * The default reserved words. - * - * TODO : Maybe this should be refactored to allow for driver-specific reserved words - */ - lazy val defaultReservedWords: scala.collection.immutable.Set[String] = scala.collection.immutable.HashSet("abort", - "accept", - "access", - "add", - "admin", - "after", - "all", - "allocate", - "alter", - "analyze", - "and", - "any", - "archive", - "archivelog", - "array", - "arraylen", - "as", - "asc", - "assert", - "assign", - "at", - "audit", - "authorization", - "avg", - "backup", - "base_table", - "become", - "before", - "begin", - "between", - "binary_integer", - "blob", - "block", - "body", - "boolean", - "by", - "cache", - "cancel", - "cascade", - "case", - "change", - "char", - "character", - "char_base", - "check", - "checkpoint", - "close", - "cluster", - "clusters", - "cobol", - "colauth", - "column", - "columns", - "comment", - "commit", - "compile", - "compress", - "connect", - "constant", - "constraint", - "constraints", - "contents", - "continue", - "controlfile", - "count", - "crash", - "create", - "current", - "currval", - "cursor", - "cycle", - "database", - "data_base", - "datafile", - "date", - "dba", - "debugoff", - "debugon", - "dec", - "decimal", - "declare", - "default", - "definition", - "delay", - "delete", - "delta", - "desc", - "digits", - "disable", - "dismount", - "dispose", - "distinct", - "do", - "double", - "drop", - "dump", - "each", - "else", - "elsif", - "enable", - "end", - "entry", - "escape", - "events", - "except", - "exception", - "exception_init", - "exceptions", - "exclusive", - "exec", - "execute", - "exists", - "exit", - "explain", - "extent", - "externally", - "false", - "fetch", - "file", - "float", - "flush", - "for", - "force", - "foreign", - "form", - "fortran", - "found", - "freelist", - "freelists", - "from", - "function", - "generic", - "go", - "goto", - "grant", - "group", - "having", - "identified", - "if", - "immediate", - "in", - "including", - "increment", - "index", - "indexes", - "indicator", - "initial", - "initrans", - "insert", - "instance", - "notnull", // reserved word for PostgreSQL - "int", - "integer", - "intersect", - "into", - "is", - "key", - "language", - "layer", - "level", - "like", - "limit", // reserved word for PostgreSQL - "limited", - "link", - "lists", - "lock", - "logfile", - "long", - "loop", - "manage", - "manual", - "max", - "maxdatafiles", - "maxextents", - "maxinstances", - "maxlogfiles", - "maxloghistory", - "maxlogmembers", - "maxtrans", - "maxvalue", - "min", - "minextents", - "minus", - "minvalue", - "mlslabel", - "mod", - "mode", - "modify", - "module", - "mount", - "natural", - "new", - "next", - "nextval", - "noarchivelog", - "noaudit", - "nocache", - "nocompress", - "nocycle", - "nomaxvalue", - "nominvalue", - "none", - "noorder", - "noresetlogs", - "normal", - "nosort", - "not", - "notfound", - "nowait", - "null", - "number", - "number_base", - "numeric", - "of", - "off", - "offline", - "old", - "on", - "online", - "only", - "open", - "optimal", - "option", - "or", - "order", - "others", - "out", - "own", - "package", - "parallel", - "partition", - "pctfree", - "pctincrease", - "pctused", - "plan", - "pli", - "positive", - "pragma", - "precision", - "primary", - "prior", - "private", - "privileges", - "procedure", - "profile", - "public", - "quota", - "raise", - "range", - "raw", - "read", - "real", - "record", - "recover", - "references", - "referencing", - "release", - "remr", - "rename", - "resetlogs", - "resource", - "restricted", - "return", - "reuse", - "reverse", - "revoke", - "role", - "roles", - "rollback", - "row", - "rowid", - "rowlabel", - "rownum", - "rows", - "rowtype", - "run", - "savepoint", - "schema", - "scn", - "section", - "segment", - "select", - "separate", - "sequence", - "session", - "set", - "share", - "shared", - "show", // MySQL reserved word - "size", - "smallint", - "snapshot", - "some", - "sort", - "space", - "sql", - "sqlbuf", - "sqlcode", - "sqlerrm", - "sqlerror", - "sqlstate", - "start", - "statement", - "statement_id", - "statistics", - "stddev", - "stop", - "storage", - "subtype", - "successful", - "sum", - "switch", - "synonym", - "sysdate", - "system", - "tabauth", - "table", - "tables", - "tablespace", - "task", - "temporary", - "terminate", - "then", - "thread", - "time", - "timestamp", // reserved in Oracle - "to", - "tracing", - "transaction", - "trigger", - "triggers", - "true", - "truncate", - "type", - "uid", - "under", - "union", - "unique", - "unlimited", - "until", - "update", - "use", - "user", - "using", - "validate", - "values", - "varchar", - "varchar2", - "variance", - "view", - "views", - "when", - "whenever", - "where", - "while", - "with", - "work", - "write", - "xor") -} - -class SuperConnection(val connection: Connection, val releaseFunc: () => Unit, val schemaName: Box[String]) { - def this(c: Connection, rf: () => Unit) = this (c, rf, Empty) - - lazy val brokenLimit_? = driverType.brokenLimit_? - - def createTablePostpend: String = driverType.createTablePostpend - - def supportsForeignKeys_? : Boolean = driverType.supportsForeignKeys_? - - lazy val driverType: DriverType = DriverType.calcDriver(connection) - - lazy val metaData = connection.getMetaData -} - -object SuperConnection { - implicit def superToConn(in: SuperConnection): Connection = in.connection -} - -/** - * The standard DB vendor. - * @param driverName the name of the database driver - * @param dbUrl the URL for the JDBC data connection - * @param dbUser the optional username - * @param dbPassword the optional db password - */ -class StandardDBVendor(driverName: String, - dbUrl: String, - dbUser: Box[String], - dbPassword: Box[String]) extends ProtoDBVendor { - - private val logger = Logger(classOf[StandardDBVendor]) - - protected def createOne: Box[Connection] = { - tryo{t:Throwable => logger.error("Cannot load database driver: %s".format(driverName), t)}{Class.forName(driverName);()} - - (dbUser, dbPassword) match { - case (Full(user), Full(pwd)) => - tryo{t:Throwable => logger.error("Unable to get database connection. url=%s, user=%s".format(dbUrl, user),t)}(DriverManager.getConnection(dbUrl, user, pwd)) - case _ => - tryo{t:Throwable => logger.error("Unable to get database connection. url=%s".format(dbUrl),t)}(DriverManager.getConnection(dbUrl)) - } - } -} - -trait ProtoDBVendor extends ConnectionManager { - private val logger = Logger(classOf[ProtoDBVendor]) - private var pool: List[Connection] = Nil - private var poolSize = 0 - private var tempMaxSize = maxPoolSize - - /** - * Override and set to false if the maximum pool size can temporarilly be expanded to avoid pool starvation - */ - protected def allowTemporaryPoolExpansion = true - - /** - * Override this method if you want something other than - * 4 connections in the pool - */ - protected def maxPoolSize = 4 - - /** - * The absolute maximum that this pool can extend to - * The default is 20. Override this method to change. - */ - protected def doNotExpandBeyond = 20 - - /** - * The logic for whether we can expand the pool beyond the current size. By - * default, the logic tests allowTemporaryPoolExpansion && poolSize <= doNotExpandBeyond - */ - protected def canExpand_? : Boolean = allowTemporaryPoolExpansion && poolSize <= doNotExpandBeyond - - /** - * How is a connection created? - */ - protected def createOne: Box[Connection] - - /** - * Test the connection. By default, setAutoCommit(false), - * but you can do a real query on your RDBMS to see if the connection is alive - */ - protected def testConnection(conn: Connection) { - conn.setAutoCommit(false) - } - - def newConnection(name: ConnectionIdentifier): Box[Connection] = - synchronized { - pool match { - case Nil if poolSize < tempMaxSize => - val ret = createOne - ret.foreach(_.setAutoCommit(false)) - poolSize = poolSize + 1 - logger.debug("Created new pool entry. name=%s, poolSize=%d".format(name, poolSize)) - ret - - case Nil => - val curSize = poolSize - logger.trace("No connection left in pool, waiting...") - wait(50L) - // if we've waited 50 ms and the pool is still empty, temporarily expand it - if (pool.isEmpty && poolSize == curSize && canExpand_?) { - tempMaxSize += 1 - logger.debug("Temporarily expanding pool. name=%s, tempMaxSize=%d".format(name, tempMaxSize)) - } - newConnection(name) - - case x :: xs => - logger.trace("Found connection in pool, name=%s".format(name)) - pool = xs - try { - this.testConnection(x) - Full(x) - } catch { - case e: Exception => try { - logger.debug("Test connection failed, removing connection from pool, name=%s".format(name)) - poolSize = poolSize - 1 - tryo(x.close) - newConnection(name) - } catch { - case e: Exception => newConnection(name) - } - } - } - } - - def releaseConnection(conn: Connection): Unit = synchronized { - if (tempMaxSize > maxPoolSize) { - tryo {conn.close()} - tempMaxSize -= 1 - poolSize -= 1 - } else { - pool = conn :: pool - } - logger.debug("Released connection. poolSize=%d".format(poolSize)) - notifyAll - } - - def closeAllConnections_!(): Unit = _closeAllConnections_!(0) - - - private def _closeAllConnections_!(cnt: Int): Unit = synchronized { - logger.info("Closing all connections") - if (poolSize <= 0 || cnt > 10) () - else { - pool.foreach {c => tryo(c.close); poolSize -= 1} - pool = Nil - - if (poolSize > 0) wait(250) - - _closeAllConnections_!(cnt + 1) - } - } -} diff --git a/persistence/db/src/main/scala/net/liftweb/db/Driver.scala b/persistence/db/src/main/scala/net/liftweb/db/Driver.scala deleted file mode 100644 index 8fd76d2278..0000000000 --- a/persistence/db/src/main/scala/net/liftweb/db/Driver.scala +++ /dev/null @@ -1,489 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package db - -import java.sql.{Connection,PreparedStatement,ResultSet,Statement} -import net.liftweb.common._ - -/** - * JDBC Driver Abstraction base class. New driver types should extend this base - * class. New drivers should "register" in the companion object - * DriverType.calcDriver method. - */ -abstract class DriverType(val name : String) { - def binaryColumnType: String - def clobColumnType: String - def varcharColumnType(len : Int) : String = "VARCHAR(%d)".format(len) - def booleanColumnType: String - def dateTimeColumnType: String - def dateColumnType: String - def timeColumnType: String - def integerColumnType: String - def integerIndexColumnType: String - def enumColumnType: String - def longForeignKeyColumnType: String - def longIndexColumnType: String - def enumListColumnType: String - def longColumnType: String - def doubleColumnType: String - - /** - * This specifies that the driver supports FKs in tables. Note that - * to enable FK generation in Schemifier, you also need to set - * MapperRules.createForeignKeys_? to true before running it. - */ - def supportsForeignKeys_? : Boolean = false - - /** - * This indicates that Schemifier needs to run with a non-transacted - * connection. Certain databases require that gathering information - * on tables (which Schemifier uses for updates) run outside of a transaction. - */ - def schemifierMustAutoCommit_? : Boolean = false - - def createTablePostpend: String = "" - - /** - * Whether this database supports LIMIT clause in SELECTs. - */ - def brokenLimit_? : Boolean = false - - /** - * Whether the primary key has been defined by the index column. - */ - def pkDefinedByIndexColumn_? : Boolean = false - - /** - * Maximum value of the LIMIT clause in SELECT. - */ - def maxSelectLimit : String = java.lang.Long.MAX_VALUE.toString - - /** - * Performs an insert and optionally returns the ResultSet of the generated keys that were inserted. If no keys are - * specified, return the number of rows updated. - * - * @param conn A connection that the method can optionally use if it needs to execute ancillary statements - * @param query The prepared query string to use for the insert - * @param setter A function that will set the parameters on the prepared statement - * @param pkName Zero or more generated column names that need to be returned - */ - def performInsert [T](conn : SuperConnection, query : String, setter : PreparedStatement => Unit, tableName : String, genKeyNames : List[String])(handler : Either[ResultSet,Int] => T) : T = - genKeyNames match { - case Nil => - DB.prepareStatement(query, conn) { - stmt => - setter(stmt) - handler(Right(stmt.executeUpdate)) - } - case pk => - performInsertWithGenKeys(conn, query, setter, tableName, pk, handler) - } - - /* - * Subclasses should override this method if they don't have proper getGeneratedKey support (JDBC3) - */ - protected def performInsertWithGenKeys [T](conn : SuperConnection, query : String, setter : PreparedStatement => Unit, tableName : String, genKeyNames : List[String], handler : Either[ResultSet,Int] => T) : T = - DB.prepareStatement(query, Statement.RETURN_GENERATED_KEYS, conn) { - stmt => - setter(stmt) - stmt.executeUpdate - handler(Left(stmt.getGeneratedKeys)) - } - - /** - * Name of the default db schema. If not set, then the schema is assumed to - * equal the db user name. - */ - def defaultSchemaName : Box[String] = Empty - - type TypeMapFunc = PartialFunction[Int,Int] - /** - * Allow the driver to do specific remapping of column types for cases - * where not all types are supported. Classes that want to do custom type - * mapping for columns should override the customColumnTypeMap method. - */ - def columnTypeMap : TypeMapFunc = - customColumnTypeMap orElse { - case x => x - } - - /** - * Allows the Vendor-specific Driver to do custom type mapping for a particular - * column type. - */ - protected def customColumnTypeMap : TypeMapFunc = new TypeMapFunc { - def apply (in : Int) = -1 - def isDefinedAt (in : Int) = false - } - - /** - * This method can be overriden by DriverType impls to allow for custom setup - * of Primary Key Columns (creating sequeneces or special indices, for example). - * The List of commands will be executed in order. - */ - def primaryKeySetup(tableName : String, columnName : String) : List[String] = { - List("ALTER TABLE "+tableName+" ADD CONSTRAINT "+tableName+"_PK PRIMARY KEY("+columnName+")") - } - - /** This defines the syntax for adding a column in an alter. This is - * used because some DBs (Oracle, for one) use slightly different syntax. */ - def alterAddColumn = "ADD COLUMN" -} - -object DriverType { - var calcDriver: Connection => DriverType = conn => { - val meta = conn.getMetaData - - (meta.getDatabaseProductName,meta.getDatabaseMajorVersion,meta.getDatabaseMinorVersion) match { - case (DerbyDriver.name,_,_) => DerbyDriver - case (MySqlDriver.name,_,_) => MySqlDriver - case (PostgreSqlDriver.name, major, minor) if ((major == 8 && minor >= 2) || major > 8) => PostgreSqlDriver - case (PostgreSqlDriver.name, _, _) => PostgreSqlOldDriver - case (H2Driver.name,_,_) => H2Driver - case (SqlServerDriver.name,major,_) if major >= 9 => SqlServerDriver - case (SqlServerDriver.name,_,_) => SqlServerPre2005Driver - case (SybaseSQLAnywhereDriver.name,_,_) => SybaseSQLAnywhereDriver - case (SybaseASEDriver.name,_,_) => SybaseASEDriver - case (OracleDriver.name,_,_) => OracleDriver - case (MaxDbDriver.name,_,_) => MaxDbDriver - case (other, _, _) if other.startsWith(DB2Driver.name) => DB2Driver - case x => throw new Exception( - "Lift mapper does not support JDBC driver %s.\n".format(x) + - "See http://wiki.liftweb.net/index.php/Category:Database for a list of supported databases.") - } - } -} - -object DB2Driver extends DriverType("DB2") { - def binaryColumnType = "LONG VARCHAR FOR BIT DATA" - def booleanColumnType = "SMALLINT" - def clobColumnType = "LONG VARCHAR" - def dateTimeColumnType = "TIMESTAMP" - def dateColumnType = "DATE" - def timeColumnType = "TIME" - def integerColumnType = "INT" - def integerIndexColumnType = "INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY" - def enumColumnType = "INT" - def longForeignKeyColumnType = "INT" - def longIndexColumnType = "BIGINT NOT NULL GENERATED BY DEFAULT AS IDENTITY" - def enumListColumnType = "INT" - def longColumnType = "INT" - def doubleColumnType = "DOUBLE" - - override def brokenLimit_? : Boolean = true - - override def pkDefinedByIndexColumn_? = true - - // This will let DB2 handle the schema name without case issues - override def defaultSchemaName = Full(null) -} - -object DerbyDriver extends DriverType("Apache Derby") { - def binaryColumnType = "LONG VARCHAR FOR BIT DATA" - def booleanColumnType = "SMALLINT" - def clobColumnType = "LONG VARCHAR" - def dateTimeColumnType = "TIMESTAMP" - def dateColumnType = "DATE" - def timeColumnType = "TIME" - def integerColumnType = "INTEGER" - def integerIndexColumnType = "INTEGER NOT NULL GENERATED BY DEFAULT AS IDENTITY" - def enumColumnType = "BIGINT" - def longForeignKeyColumnType = "BIGINT" - def longIndexColumnType = "BIGINT NOT NULL GENERATED BY DEFAULT AS IDENTITY" - def enumListColumnType = "BIGINT" - def longColumnType = "BIGINT" - def doubleColumnType = "DOUBLE" - - override def brokenLimit_? : Boolean = true -} - -object MySqlDriver extends DriverType("MySQL") { - def binaryColumnType = "MEDIUMBLOB" - def clobColumnType = "LONGTEXT" - def booleanColumnType = "BOOLEAN" - def dateTimeColumnType = "DATETIME" - def dateColumnType = "DATE" - def timeColumnType = "TIME" - def integerColumnType = "INTEGER" - def integerIndexColumnType = "INTEGER NOT NULL AUTO_INCREMENT UNIQUE" - def enumColumnType = "BIGINT" - def longForeignKeyColumnType = "BIGINT UNSIGNED" - def longIndexColumnType = "BIGINT UNSIGNED NOT NULL AUTO_INCREMENT UNIQUE KEY" - def enumListColumnType = "BIGINT" - def longColumnType = "BIGINT" - def doubleColumnType = "DOUBLE" - - override def createTablePostpend: String = " ENGINE = InnoDB " -} - -object H2Driver extends DriverType("H2") { - def binaryColumnType = "BINARY" - def clobColumnType = "LONGVARCHAR" - def booleanColumnType = "BOOLEAN" - def dateTimeColumnType = "TIMESTAMP" - def dateColumnType = "DATE" - def timeColumnType = "TIME" - def integerColumnType = "INTEGER" - def integerIndexColumnType = "INTEGER NOT NULL AUTO_INCREMENT" - def enumColumnType = "BIGINT" - def longForeignKeyColumnType = "BIGINT" - def longIndexColumnType = "BIGINT NOT NULL AUTO_INCREMENT" - def enumListColumnType = "BIGINT" - def longColumnType = "BIGINT" - def doubleColumnType = "DOUBLE" - - /** - * Whether the primary key has been defined by the index column. - * H2 creates primary key for a table, when AUTO_INCREMENT type - * is used. <--- NOT TRUE - * I went into the H2 console, created a table with auto_increment - * and was able to insert duplicate ids. Then I created it with - * AUTO_INCREMENT PRIMARY KEY and it did not allow it. - */ - override def pkDefinedByIndexColumn_? : Boolean = false //changed to false by nafg - override def supportsForeignKeys_? = true - override def maxSelectLimit = "0"; - override def defaultSchemaName : Box[String] = Full("PUBLIC") -} - -/** - * Provides some base definitions for PostgreSql databases. - */ -abstract class BasePostgreSQLDriver extends DriverType("PostgreSQL") { - def binaryColumnType = "BYTEA" - def clobColumnType = "TEXT" - def booleanColumnType = "BOOLEAN" - def dateTimeColumnType = "TIMESTAMP" - def dateColumnType = "DATE" - def timeColumnType = "TIME" - def integerColumnType = "INTEGER" - def integerIndexColumnType = "SERIAL" - def enumColumnType = "BIGINT" - def longForeignKeyColumnType = "BIGINT" - def longIndexColumnType = "BIGSERIAL" - def enumListColumnType = "BIGINT" - def longColumnType = "BIGINT" - def doubleColumnType = "DOUBLE PRECISION" - - override def maxSelectLimit = "ALL" - - /** - * "$user" schema is searched before "public", but it does not exist by default, - * so "public" is our default choice. - */ - override def defaultSchemaName : Box[String] = Full("public") -} - -/** - * PostgreSql driver for versions 8.2 and up. Tested with: - * - *
    - *
  • 8.3
  • - *
- */ -object PostgreSqlDriver extends BasePostgreSQLDriver { - /* PostgreSQL doesn't support generated keys via the JDBC driver. Instead, we use the RETURNING clause on the insert. - * From: http://www.postgresql.org/docs/8.2/static/sql-insert.html - */ - override def performInsertWithGenKeys [T](conn : SuperConnection, query : String, setter : PreparedStatement => Unit, tableName : String, genKeyNames : List[String], handler : Either[ResultSet,Int] => T) : T = - DB.prepareStatement(query + " RETURNING " + genKeyNames.mkString(","), conn) { - stmt => - setter(stmt) - handler(Left(stmt.executeQuery)) - } - - override def supportsForeignKeys_? = true -} - -/** - * PostgreSql driver for versions 8.1 and earlier. Tested with - * - *
    - *
  • 8.1
  • - *
  • 8.0
  • - *
- * - * Successfuly use of earlier versions should be reported to liftweb@googlegroups.com. - */ -object PostgreSqlOldDriver extends BasePostgreSQLDriver { - /* PostgreSQL doesn't support generated keys via the JDBC driver. - * Instead, we use the lastval() function to get the last inserted - * key from the DB. - */ - override def performInsertWithGenKeys [T](conn : SuperConnection, query : String, setter : PreparedStatement => Unit, tableName : String, genKeyNames : List[String], handler : Either[ResultSet,Int] => T) : T = { - DB.prepareStatement(query, conn) { - stmt => - setter(stmt) - stmt.executeUpdate - } - val pkValueQuery = genKeyNames.map(String.format("currval('%s_%s_seq')", tableName, _)).mkString(", ") - DB.statement(conn) { - stmt => - handler(Left(stmt.executeQuery("SELECT " + pkValueQuery))) - } - } -} - - -abstract class SqlServerBaseDriver extends DriverType("Microsoft SQL Server") { - def binaryColumnType = "IMAGE" - def booleanColumnType = "BIT" - override def varcharColumnType(len : Int) : String = "NVARCHAR(%d)".format(len) - def clobColumnType = "NTEXT" - def dateTimeColumnType = "DATETIME" - def dateColumnType = "DATE" - def timeColumnType = "TIME" - def integerColumnType = "INT" - def integerIndexColumnType = "INT IDENTITY NOT NULL" - def enumColumnType = "BIGINT" - def longForeignKeyColumnType = "BIGINT" - def longIndexColumnType = "BIGINT IDENTITY NOT NULL" - def enumListColumnType = "BIGINT" - def longColumnType = "BIGINT" - def doubleColumnType = "FLOAT" - - override def supportsForeignKeys_? = true - - override def defaultSchemaName : Box[String] = Full("dbo") - - // Microsoft doesn't use "COLUMN" syntax when adding a column to a table - override def alterAddColumn = "ADD" - - override def brokenLimit_? = true - -} - -/** - * Microsoft SQL Server driver for versions 2000 and below - */ -object SqlServerPre2005Driver extends SqlServerBaseDriver - -object SqlServerDriver extends SqlServerBaseDriver { - override def binaryColumnType = "VARBINARY(MAX)" - override def clobColumnType = "NVARCHAR(MAX)" -} - -/** - * Sybase SQL Anywhere Driver. Tested against version 10.0 - */ -object SybaseSQLAnywhereDriver extends SqlServerBaseDriver { - override val name = "SQL Anywhere" - - // SQL Anywhere prefers the default schema name for metadata calls - override val defaultSchemaName = Full(null) -} - -/** - * Sybase ASE Driver. Tested with ASE version 15, but should - * work with lower versions as well. - */ -object SybaseASEDriver extends SqlServerBaseDriver { - override val name = "ASE" - override def binaryColumnType = "VARBINARY(MAX)" - override def clobColumnType = "NVARCHAR(MAX)" - override def brokenLimit_? = true - override def schemifierMustAutoCommit_? = true -} - -/** - * Driver for Oracle databases. Tested with: - * - *
    - *
  • Oracle XE 10.2.0.1
  • - *
  • Oracle Database 11g Enterprise Edition Release 11.2.0.1.0 - 64bit Production
  • - *
- * - * Other working install versions should be reported to liftweb@googlegroups.com. - */ -object OracleDriver extends DriverType("Oracle") { - def binaryColumnType = "LONG RAW" - def booleanColumnType = "NUMBER" - def clobColumnType = "CLOB" - def dateTimeColumnType = "TIMESTAMP" - /* - * It's unclear whether DATE would suffice here. The PL/SQL ref at - * http://download.oracle.com/docs/cd/B19306_01/java.102/b14355/apxref.htm - * seems to indicate that DATE and TIMESTAMP can both be used - * for java.sql.Date and java.sql.Time representations. - */ - def dateColumnType = "TIMESTAMP" - def timeColumnType = "TIMESTAMP" - def integerColumnType = "NUMBER" - def integerIndexColumnType = "NUMBER NOT NULL" - def enumColumnType = "NUMBER" - def longForeignKeyColumnType = "NUMBER" - def longIndexColumnType = "NUMBER NOT NULL" - def enumListColumnType = "NUMBER" - def longColumnType = "NUMBER" - def doubleColumnType = "NUMBER" - - /** - * Whether this database supports LIMIT clause in SELECTs. - */ - override def brokenLimit_? : Boolean = true - - import java.sql.Types - override def customColumnTypeMap = { - case Types.BOOLEAN => Types.INTEGER - } - - override def primaryKeySetup(tableName : String, columnName : String) : List[String] = { - /* - * This trigger and sequence setup is taken from http://www.databaseanswers.org/sql_scripts/ora_sequence.htm - */ - super.primaryKeySetup(tableName, columnName) ::: - List("CREATE SEQUENCE " + tableName + "_sequence START WITH 1 INCREMENT BY 1", - "CREATE OR REPLACE TRIGGER " + tableName + "_trigger BEFORE INSERT ON " + tableName + " " + - "FOR EACH ROW " + - "WHEN (new." + columnName + " is null) " + - "BEGIN " + - "SELECT " + tableName + "_sequence.nextval INTO :new." + columnName + " FROM DUAL; " + - "END;") - } - - // Oracle supports returning generated keys only if we specify the names of the column(s) to return. - override def performInsertWithGenKeys [T](conn : SuperConnection, query : String, setter : PreparedStatement => Unit, tableName : String , genKeyNames : List[String], handler : Either[ResultSet,Int] => T) : T = - DB.prepareStatement(query, genKeyNames.toArray, conn) { - stmt => - setter(stmt) - stmt.executeUpdate - handler(Left(stmt.getGeneratedKeys)) - } - - // Oracle doesn't use "COLUMN" syntax when adding a column to a table - override def alterAddColumn = "ADD" - - override def supportsForeignKeys_? = true -} - -object MaxDbDriver extends DriverType("MaxDB") { - def binaryColumnType = "BLOB" - def booleanColumnType = "BOOLEAN" - def clobColumnType = "CLOB" - def dateTimeColumnType = "TIMESTAMP" - def dateColumnType = "DATE" - def timeColumnType = "TIME" - def integerColumnType = "INTEGER" - def integerIndexColumnType = "FIXED(10) DEFAULT SERIAL" - def enumColumnType = "FIXED(38)" - def longForeignKeyColumnType = "FIXED(38)" - def longIndexColumnType = "FIXED(38) DEFAULT SERIAL" - def enumListColumnType = "FIXED(38)" - def longColumnType = "FIXED(38)" - def doubleColumnType = "FLOAT(38)" -} diff --git a/persistence/db/src/main/scala/net/liftweb/db/LoggingStatementWrappers.scala b/persistence/db/src/main/scala/net/liftweb/db/LoggingStatementWrappers.scala deleted file mode 100644 index 1ec5233a0c..0000000000 --- a/persistence/db/src/main/scala/net/liftweb/db/LoggingStatementWrappers.scala +++ /dev/null @@ -1,612 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package db - -import java.lang.reflect.{InvocationHandler,Method,Proxy} -import java.sql.{Array => SqlArray, _} - -import net.liftweb.util._ -import net.liftweb.common.{Box,Loggable} - -trait DBLogEntry { - def statement : String - def duration : Long -} -object DBLogEntry { - def unapply(obj : Any) = obj match { - case entry : DBLogEntry => Some(entry.statement,entry.duration) - case _ => None - } -} -case class DBStatementEntry(statement : String, duration : Long) extends DBLogEntry -case class DBMetaEntry(statement : String, duration : Long) extends DBLogEntry - -/** - * This trait is applied to JDBC statements and similar constructs that can log operations. - * - * To enable logging of DB operations, use DB.addLogFunc - */ -trait DBLog { - protected var executedStatements = List[DBLogEntry]() - - /* - Some convenience methods to simplify the statements. We defined methods that can either take a raw description, - or a function that can use the result of the operation to construct a description. - */ - protected def logStatement[T](description : String)(f : => T) : T = logStatement({ignore : T => description})(f) - - protected def logStatement[T](description : T => String)(f : => T) : T = Helpers.calcTime(f) match { - case (duration, result) => executedStatements ::= DBStatementEntry(description(result), duration); result - } - - protected def logMeta[T](description : String)(f : => T) : T = logMeta({ignore : T => description})(f) - - protected def logMeta[T](description : T => String)(f : => T) : T = Helpers.calcTime(f) match { - case (duration, result) => executedStatements ::= DBMetaEntry(description(result), duration); result - } - - /** Return a list of all of the DBStatementEntry instances in the log buffer */ - def statementEntries : List[DBStatementEntry] = executedStatements.filter(_.isInstanceOf[DBStatementEntry]).reverse.asInstanceOf[List[DBStatementEntry]] - - /** Return a list of all of the DBMetaEntry instances in the log buffer */ - def metaEntries : List[DBMetaEntry] = executedStatements.filter(_.isInstanceOf[DBMetaEntry]).reverse.asInstanceOf[List[DBMetaEntry]] - - /** Return all log buffer entries */ - def allEntries : List[DBLogEntry] = executedStatements.reverse -} - -object DBLog { - def createStatement (conn : Connection) = { - val stmt = conn.createStatement - Proxy.newProxyInstance(this.getClass.getClassLoader, - Array(classOf[java.sql.Statement], classOf[DBLog]), - new LoggedStatementHandler(stmt)).asInstanceOf[Statement] - } - - def prepareStatement (conn : Connection, query : String) = - proxyPreparedStatement(conn.prepareStatement(query), query) - - def prepareStatement (conn : Connection, query : String, autoKeys : Int) = - proxyPreparedStatement(conn.prepareStatement(query, autoKeys), query) - - def prepareStatement (conn : Connection, query : String, autoKeys : Array[Int]) = - proxyPreparedStatement(conn.prepareStatement(query, autoKeys), query) - - def prepareStatement (conn : Connection, query : String, autoKeys : Array[String]) = - proxyPreparedStatement(conn.prepareStatement(query, autoKeys), query) - - private def proxyPreparedStatement(stmt : => PreparedStatement, query : String) = { - try { - Proxy.newProxyInstance(this.getClass.getClassLoader, - Array(classOf[java.sql.PreparedStatement], classOf[DBLog]), - new LoggedPreparedStatementHandler(query, stmt)).asInstanceOf[PreparedStatement] - } catch { - case sqle : SQLException => throw new SQLException("Error preparing statement: \"%s\"".format(query), sqle) - } - } - - /** - * This class corresponds to a logged version of java.sql.Statement. All operations - * are supported via dynamic dispatch. This is done so that we can support both - * JDBC3 and JDBC4 without having two code trees. - * - * To enable logging of DB operations, use DB.addLogFunc - */ - sealed private[DBLog] class LoggedStatementHandler(underlying : Statement) extends InvocationHandler with DBLog with Loggable { - def underlyingClassname = "java.sql.Statement" - lazy val representative : Class[_] = Class.forName(underlyingClassname) - - def invoke (proxy : Object, method : Method, args : Array[Object]) : Object = method.getName match { - // Handle DBLog methods first. We have to do this since the end user expects a DBLog interface - // via the proxy. - case "statementEntries" => this.statementEntries - case "metaEntries" => this.metaEntries - case "allEntries" => this.allEntries - - // The rest are from Statement - case "addBatch" => { - logStatement("Batched: \"%s\"".format(args(0))) { - chain(method, args) - } - } - case "cancel" => { - logMeta("Cancelled Statement") { - chain(method, Array()) - } - } - case "clearBatch" => { - logMeta("Cleared Batch") { - chain(method, Array()) - } - } - case "clearWarnings" => { - logMeta("Cleared Warnings") { - chain(method, Array()) - } - } - case "close" => { - logMeta("Closed Statement") { - chain(method, Array()) - } - } - case "execute" if args.length == 1 => { - logStatement({ret : Object => "\"%s\" : result = %s".format(args(0), ret)}) { - chain(method, args) - } - } - case "execute" if args(1).getClass == classOf[Int] => { - logStatement({ret : Object => "Exec \"%s\", Auto-gen keys = %s : result = %s".format(args(0), StatementConstantDescriptions.genKeyDescriptions(args(1).asInstanceOf[Int]), ret)}) { - chain(method, args) - } - } - case "execute" => { - logStatement({ret : Object => "Exec \"%s\", Auto-gen keys for columns %s".format(args(0), args(1).asInstanceOf[Array[_]].mkString(", "), ret)}) { - chain(method, args) - } - } - case "executeBatch" => { - logStatement({result : Object => "Exec batch, counts = " + result.asInstanceOf[Array[Int]].mkString("(", ", ", ")")}) { - chain(method, Array()) - } - } - case "executeQuery" => { - logStatement({rs : Object => "Exec query \"%s\" : rs = %s".format(args(0),rs)}) { - chain(method, args) - } - } - case "executeUpdate" if args.length == 1 => { - logStatement({count : Object => "Exec update \"%s\" : count = %d".format(args(0),count)}) { - chain(method, args) - } - } - case "executeUpdate" if args(1).getClass == classOf[Int] => { - logStatement({count : Object => "Exec update \"%s\", Auto-gen keys = %s".format(args(0), StatementConstantDescriptions.genKeyDescriptions(args(1).asInstanceOf[Int]), count)}) { - chain(method, args) - } - } - case "executeUpdate" => { - logStatement({count : Object => "Exec update \"%s\", Auto-gen keys for columns %s".format(args(0), args(1).asInstanceOf[Array[_]].mkString(", "), count)}) { - chain(method, args) - } - } - case "getConnection" => { - logMeta("Get underlying Connection") { - chain(method, Array()) - } - } - case "getFetchDirection" => { - logMeta({ret : Object => "Get fetch direction : " + StatementConstantDescriptions.fetchDirDescriptions(ret.asInstanceOf[Int])}) { - chain(method, Array()) - } - } - case "getFetchSize" => { - logMeta({size : Object => "Get fetch size : " + size}) { - chain(method, Array()) - } - } - case "getGeneratedKeys" => { - logMeta({rs : Object => "Get generated keys : rs = " + rs}) { - chain(method, Array()) - } - } - case "getMaxFieldSize" => { - logMeta({size : Object => "Get max field size : " + size}) { - chain(method, Array()) - } - } - case "getMaxRows" => { - logMeta({maxRows : Object => "Get max rows : " + maxRows}) { - chain(method, Array()) - } - } - case "getMoreResults" if args.length == 0 => { - logMeta({hasMore : Object => "Get more results : " + hasMore}) { - chain(method, Array()) - } - } - case "getMoreResults" => { - logMeta({ret : Object => "Get more results (%s) : %s".format(StatementConstantDescriptions.getMoreResultsDescriptions(args(0).asInstanceOf[Int]), ret)}) { - chain(method, args) - } - } - case "getQueryTimeout" => { - logMeta({timeout : Object => "Get query timeout : %d seconds ".format(timeout)}) { - chain(method, Array()) - } - } - case "getResultSet" => { - logMeta({rs : Object => "Get result set : " + rs}) { - chain(method, Array()) - } - } - case "getResultSetConcurrency" => { - logMeta({ret : Object => "Get result set concurrency : " + StatementConstantDescriptions.resultSetConcurrencyDescs(ret.asInstanceOf[Int])}) { - chain(method, Array()) - } - } - case "getResultSetHoldability" => { - logMeta({ret : Object => "Get ResultSet holdability : " + StatementConstantDescriptions.resultSetHoldabilityDescs(ret.asInstanceOf[Int])}) { - chain(method, Array()) - } - } - case "getResultSetType" => { - logMeta({ret : Object => "Get ResultSet type : " + StatementConstantDescriptions.resultSetTypeDescs(ret.asInstanceOf[Int])}) { - chain(method, Array()) - } - } - case "getUpdateCount" => { - logMeta({count : Object => "Get update count : " + count}) { - chain(method, Array()) - } - } - case "getWarnings" => { - logMeta({ret : Object => "Get SQL Warnings: " + Box.!!(ret).map(_.toString).openOr("None")}) { - chain(method, Array()) - } - } - case "isClosed" => { - logMeta({ret : Object => "Check isClosed : " + ret}) { - chain(method, Array()) - } - } - case "isPoolable" => { - logMeta({ret : Object => "Check isPoolable : " + ret}) { - chain(method, Array()) - } - } - case "setCursorName" => { - logMeta("Set cursor name = %s" + args(0)) { - chain(method, args) - } - } - case "setEscapeProcessing" => { - logMeta("Set escape processing = " + args(0)) { - chain(method, args) - } - } - case "setFetchDirection" => { - logMeta("Set fetch direction = " + StatementConstantDescriptions.fetchDirDescriptions(args(0).asInstanceOf[Int])) { - chain(method, args) - } - } - case "setFetchSize" => { - logMeta("Set fetch size = " + args(0)) { - chain(method, args) - } - } - case "setMaxFieldSize" => { - logMeta("Set max field size = " + args(0)) { - chain(method, args) - } - } - case "setMaxRows" => { - logMeta("Set max rows = " + args(0)) { - chain(method, args) - } - } - case "setPoolable" => { - logMeta("Set poolable = " + args(0)) { - chain(method, args) - } - } - case "setQueryTimeout" => { - logMeta("Set query timeout = " + args(0)) { - chain(method, args) - } - } - case "toString" => { - // We'll call into our own representation here - this.toString - } - - // These are from wrapper and are required - case "isWrapperFor" => args(0).getClass match { - case `representative` => Boolean.box(true) - case _ => chain(method, args) - } - case "unwrap" => args(0).getClass match { - case `representative` => underlying - case _ => chain(method, args) - } - - case methodName => throw new NoSuchMethodException(methodName + " is not implemented here") - } - - protected def chain(method : Method, args : Array[Object]) : Object = - try { - val m = representative.getMethod(method.getName, method.getParameterTypes : _*) - - m.invoke(underlying, args : _*) - } catch { - case ite: java.lang.reflect.InvocationTargetException => throw ite.getCause - case nsme : NoSuchMethodException => logger.warn("Could not locate method %s for %s : %s".format(method.getName, underlyingClassname, nsme.getMessage)) - throw nsme - } - - /* This toString only gets invoked if we target this instance as a - * LoggedStatementHandler directly, or via the proxied "toString" above. - */ - override def toString = "Logged Statements =\n" + executedStatements.reverse.map(" " + _).mkString("\n") - } - - /** - * This class corresponds to a logged version of java.sql.PreparedStatement. All operations - * should be supported. - * - * To enable logging of DB operations, use DB.addLogFunc - */ - sealed private[DBLog] class LoggedPreparedStatementHandler (stmt : String, underlying : PreparedStatement) extends LoggedStatementHandler(underlying) { - override def underlyingClassname = "java.sql.PreparedStatement" - - private var paramMap = Map.empty[Int,Any] - - // utility method to fill in params - private def paramified : String = { - val sb = new StringBuilder(500) - def substitute (in : String, index : Int): Unit = in.indexOf('?') match { - case -1 => - sb.append(in) - - case j => - sb.append(in.substring(0,j)) - sb.append(paramMap(index)) - substitute(in.substring(j + 1), index + 1) - } - - substitute(stmt, 1) - sb.toString - } - - override def invoke (proxy : Object, method : Method, args : Array[Object]) : Object = { - method.getName match { - // All of the simple cases can be handled in one spot - case "setArray" | "setBigDecimal" | "setBoolean" | "setByte" | - "setBytes" | "setDouble" | "setFloat" | "setInt" | "setLong" | - "setNString" | "setRef" | "setRowId" | "setShort" | "setSQLXML" - => { - paramMap += args(0).asInstanceOf[Int] -> args(1) - chain(method, args) - } - - // Everything else gets special treatment - - case "addBatch" => { - logStatement("Batching \"%s\"".format(paramified)) { - chain(method, Array()) - } - } - - case "clearParameters" => { - paramMap = Map.empty[Int,Any] - logMeta("Clear parameters") { - chain(method, Array()) - } - } - - case "execute" => { - logStatement({ret : Object => "Exec \"%s\" : %s".format(paramified, ret)}) { - chain(method, Array()) - } - } - - case "executeQuery" => { - logStatement({rs : Object => "Exec query \"%s\" : %s".format(paramified, rs)}) { - chain(method, Array()) - } - } - - case "executeUpdate" => { - logStatement({ret : Object => "Exec update \"%s\" : updated %d rows".format(paramified, ret)}) { - chain(method, Array()) - } - } - - case "getMetaData" => { - logMeta({ret : Object => "Get metadata : " + ret}) { - chain(method, Array()) - } - } - - case "getParameterMetaData" => { - logMeta({ret : Object => "Get param metadata : " + ret}) { - chain(method, Array()) - } - } - - case "setAsciiStream" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> "(Ascii Stream: %s)".format(args(1)) - chain(method, args) - } - - case "setAsciiStream" => { - paramMap += args(0).asInstanceOf[Int] -> "(Ascii Stream: %s (%d bytes))".format(args(1), args(2)) - chain(method, args) - } - - case "setBinaryStream" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> "(Binary Stream: %s)".format(args(1)) - chain(method, args) - } - - case "setBinaryStream" => { - paramMap += args(0).asInstanceOf[Int] -> "(Binary Stream: %s (%d bytes))".format(args(1), args(2)) - chain(method, args) - } - - case "setBlob" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> "(Blob : %s)".format(args(1)) - chain(method, args) - } - - case "setBlob" => { - paramMap += args(0).asInstanceOf[Int] -> "(Blob : %s (%d bytes))".format(args(1), args(2)) - chain(method, args) - } - - case "setCharacterStream" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> "(Char stream : %s)".format(args(1)) - chain(method, args) - } - - case "setCharacterStream" => { - paramMap += args(0).asInstanceOf[Int] -> "(Char stream : %s (%d bytes))".format(args(1), args(2)) - chain(method, args) - } - - case "setClob" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> "(Clob : %s)".format(args(1)) - chain(method, args) - } - - case "setClob" => { - paramMap += args(0).asInstanceOf[Int] -> "(Clob : %s (%d bytes))".format(args(1), args(2)) - chain(method, args) - } - - case "setDate" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> args(1) - chain(method, args) - } - - case "setDate" => { - paramMap += args(0).asInstanceOf[Int] -> (args(1) + ":" + args(2)) - chain(method, args) - } - - case "setNCharacterStream" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> "(NChar Stream : %s)".format(args(1)) - chain(method, args) - } - - case "setNCharacterStream" => { - paramMap += args(0).asInstanceOf[Int] -> "(NChar Stream : %s (%d bytes))".format(args(1), args(2)) - chain(method, args) - } - - case "setNClob" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> "(NClob : %s)".format(args(1)) - chain(method, args) - } - - case "setNClob" => { - paramMap += args(0).asInstanceOf[Int] -> "(NClob : %s (%d bytes))".format(args(1), args(2)) - chain(method, args) - } - - case "setNull" => { - paramMap += args(0).asInstanceOf[Int] -> "NULL" - chain(method, args) - } - - case "setObject" if (args.length >= 2 && args.length < 4) => { - paramMap += args(0).asInstanceOf[Int] -> args(1) - chain(method, args) - } - - case "setObject" if args.length == 4 => { - paramMap += args(0).asInstanceOf[Int] -> "%s (scale %d)".format(args(1), args(3)) - chain(method, args) - } - - case "setString" => { - paramMap += args(0).asInstanceOf[Int] -> "\"%s\"".format(args(1)) - chain(method, args) - } - - case "setTime" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> args(1) - chain(method, args) - } - - case "setTime" => { - paramMap += args(0).asInstanceOf[Int] -> (args(1) + ":" + args(2)) - chain(method, args) - } - - case "setTimestamp" if args.length == 2 => { - paramMap += args(0).asInstanceOf[Int] -> args(1) - chain(method, args) - } - - case "setTimestamp" => { - paramMap += args(0).asInstanceOf[Int] -> (args(1) + ":" + args(2)) - chain(method, args) - } - - case "setUnicodeStream" => { - paramMap += args(0).asInstanceOf[Int] -> "(Unicode Stream : %s (%d bytes))".format(args(1), args(2)) - chain(method, args) - } - - case "setURL" => { - paramMap += args(0).asInstanceOf[Int] -> "\"%s\"".format(args(1)) - chain(method, args) - } - - // Chain up to LoggedStatement if we don't handle it here - case _ => super.invoke(proxy, method, args) - } - } - } -} - -/** - * This object defines some conversions from Int JDBC constants to - * descriptive strings - */ -object StatementConstantDescriptions { - def genKeyDescriptions (in : Int) = in match { - case Statement.NO_GENERATED_KEYS => "NO_GENERATED_KEYS" - case Statement.RETURN_GENERATED_KEYS => "RETURN_GENERATED_KEYS" - case x => "Invalid Generated Keys Constant: " + x - } - - def fetchDirDescriptions (in : Int) = in match { - case ResultSet.FETCH_FORWARD => "FETCH_FORWARD" - case ResultSet.FETCH_REVERSE => "FETCH_REVERSE" - case ResultSet.FETCH_UNKNOWN => "FETCH_UNKNOWN" - case x => "Invalid Fetch Direction Constant: " + x - } - - def getMoreResultsDescriptions (in : Int) = in match { - case Statement.CLOSE_CURRENT_RESULT => "CLOSE_CURRENT_RESULT" - case Statement.KEEP_CURRENT_RESULT => "KEEP_CURRENT_RESULT" - case Statement.CLOSE_ALL_RESULTS => "CLOSE_ALL_RESULTS" - case x => "Invalid getMoreResults constant: " + x - } - - def resultSetConcurrencyDescs (in : Int) = in match { - case ResultSet.CONCUR_READ_ONLY => "CONCUR_READ_ONLY" - case ResultSet.CONCUR_UPDATABLE => "CONCUR_UPDATABLE" - case x => "Invalid ResultSet concurrency constant: " + x - } - - def resultSetHoldabilityDescs (in : Int) = in match { - case ResultSet.HOLD_CURSORS_OVER_COMMIT => "HOLD_CURSORS_OVER_COMMIT" - case ResultSet.CLOSE_CURSORS_AT_COMMIT => "CLOSE_CURSORS_AT_COMMIT" - case x => "Invalid ResultSet holdability constant: " + x - } - - def resultSetTypeDescs (in : Int) = in match { - case ResultSet.TYPE_FORWARD_ONLY => "TYPE_FORWARD_ONLY" - case ResultSet.TYPE_SCROLL_INSENSITIVE => "TYPE_SCROLL_INSENSITIVE" - case ResultSet.TYPE_SCROLL_SENSITIVE => "TYPE_SCROLL_SENSITIVE" - case x => "Invalid ResultSet type constant: " + x - } -} - diff --git a/persistence/db/src/test/scala/net/liftweb/db/DBSpec.scala b/persistence/db/src/test/scala/net/liftweb/db/DBSpec.scala deleted file mode 100644 index 29aae14f61..0000000000 --- a/persistence/db/src/test/scala/net/liftweb/db/DBSpec.scala +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright 2011-2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package db - -import org.specs2.mutable.Specification -import org.specs2.mock.Mockito - -import common._ -import util.DefaultConnectionIdentifier -import util.ControlHelpers._ - -import java.sql._ - -class DBSpec extends Specification with Mockito { - sequential - - trait CommitFunc { - def f(success: Boolean): Unit - } - - def dBVendor(connection: Connection): ProtoDBVendor = new ProtoDBVendor { - def createOne: Box[Connection] = { - connection.createStatement returns mock[PreparedStatement] - Full(connection) - } - } - - "eager buildLoanWrapper" should { - "call postTransaction functions with true if transaction is committed" in { - val m = mock[CommitFunc] - val activeConnection = mock[Connection] - DB.defineConnectionManager(DefaultConnectionIdentifier, dBVendor(activeConnection)) - - DB.buildLoanWrapper(true) { - DB.appendPostTransaction(DefaultConnectionIdentifier, m.f _) - DB.currentConnection.map{c => DB.exec(c, "stuff") {dummy => }} - } - there was one(activeConnection).commit - there was one(m).f(true) - } - - "call postTransaction functions with false if transaction is rolled back" in { - val m = mock[CommitFunc] - val activeConnection = mock[Connection] - DB.defineConnectionManager(DefaultConnectionIdentifier, dBVendor(activeConnection)) - - val lw = DB.buildLoanWrapper(true) - - tryo(lw.apply { - DB.appendPostTransaction(DefaultConnectionIdentifier, m.f _) - DB.currentConnection.map{c => DB.exec(c, "stuff") {dummy => }} - throw new RuntimeException("oh no") - 42 - }) - there was one(activeConnection).rollback - there was one(m).f(false) - } - } - - "lazy buildLoanWrapper" should { - "call postTransaction functions with true if transaction is committed" in { - val m = mock[CommitFunc] - val activeConnection = mock[Connection] - DB.defineConnectionManager(DefaultConnectionIdentifier, dBVendor(activeConnection)) - - DB.buildLoanWrapper(false) { - DB.use(DefaultConnectionIdentifier) {c => - DB.appendPostTransaction(DefaultConnectionIdentifier, m.f _) - DB.exec(c, "stuff") { - dummy => - } - } - DB.use(DefaultConnectionIdentifier) {c => - DB.exec(c, "more stuff") { dummy => } - } - } - there was one(activeConnection).commit - there was one(m).f(true) - } - - "call postTransaction functions with false if transaction is rolled back" in { - val m = mock[CommitFunc] - val activeConnection = mock[Connection] - DB.defineConnectionManager(DefaultConnectionIdentifier, dBVendor(activeConnection)) - - val lw = DB.buildLoanWrapper(false) - - tryo(lw.apply { - DB.use(DefaultConnectionIdentifier) {c => - DB.exec(c, "more stuff") { dummy => } - } - DB.use(DefaultConnectionIdentifier) {c => - DB.appendPostTransaction (m.f _) - DB.exec(c, "stuff") {dummy => throw new RuntimeException("oh no")} - } - 42 - }) - there was one(activeConnection).rollback - there was one(m).f(false) - } - } - - "DB.use" should { - "call postTransaction functions with true if transaction is committed" in { - val m = mock[CommitFunc] - val activeConnection = mock[Connection] - DB.defineConnectionManager(DefaultConnectionIdentifier, dBVendor(activeConnection)) - - DB.use(DefaultConnectionIdentifier) {c => - DB.appendPostTransaction(DefaultConnectionIdentifier, m.f _) - DB.exec(c, "stuff") {dummy => } - } - - there was one(activeConnection).commit - there was one(m).f(true) - } - - "call postTransaction functions with false if transaction is rolled back" in { - val m = mock[CommitFunc] - val activeConnection = mock[Connection] - DB.defineConnectionManager(DefaultConnectionIdentifier, dBVendor(activeConnection)) - - tryo(DB.use(DefaultConnectionIdentifier) {c => - DB.appendPostTransaction(DefaultConnectionIdentifier, m.f _) - DB.exec(c, "stuff") {dummy => throw new RuntimeException("Oh no")} - 42 - }) - - there was one(activeConnection).rollback - there was one(m).f(false) - success - } - } - - "appendPostTransaction" should { - "throw if called outside tx context" in { - DB.appendPostTransaction {committed => ()} must throwA[IllegalStateException] - } - } - - "DB.rollback" should { - "call postTransaction functions with false" in { - val m = mock[CommitFunc] - val activeConnection = mock[Connection] - DB.defineConnectionManager(DefaultConnectionIdentifier, dBVendor(activeConnection)) - - tryo(DB.use(DefaultConnectionIdentifier) {c => - DB.appendPostTransaction(DefaultConnectionIdentifier, m.f _) - DB.rollback(DefaultConnectionIdentifier) - 42 - }) - - there was one(activeConnection).rollback - there was one(m).f(false) - } - } -} diff --git a/persistence/mapper/src/main/resources/tableeditor/default.html b/persistence/mapper/src/main/resources/tableeditor/default.html deleted file mode 100644 index f967f50fb0..0000000000 --- a/persistence/mapper/src/main/resources/tableeditor/default.html +++ /dev/null @@ -1,39 +0,0 @@ - - - <table:title /> - -

- - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
-
-
-
- -
diff --git a/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/FieldFinder.scala b/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/FieldFinder.scala deleted file mode 100644 index 7736b1c190..0000000000 --- a/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/FieldFinder.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import scala.reflect.{ClassTag, classTag} - -class FieldFinder[T: ClassTag](metaMapper: AnyRef, logger: common.Logger) { - - import java.lang.reflect._ - - logger.debug("Created FieldFinder for " + classTag[T].runtimeClass) - - def isMagicObject(m: Method): Boolean = m.getReturnType.getName.endsWith("$" + m.getName + "$") && m.getParameterTypes.length == 0 - - def typeFilter: Class[_] => Boolean = classTag[T].runtimeClass.isAssignableFrom - - /** - * Find the magic mapper fields on the superclass - */ - def findMagicFields(onMagic: AnyRef, startingClass: Class[_]): List[Method] = { - // If a class name ends in $module, it's a subclass created for scala object instances - def deMod(in: String): String = - if (in.endsWith("$module")) in.substring(0, in.length - 7) - else in - - // find the magic fields for the given superclass - def findForClass(clz: Class[_]): List[Method] = clz match { - case null => Nil - case c => - // get the names of fields that represent the type we want - - val fields = Map(c.getDeclaredFields - .filter { f => - val ret = typeFilter(f.getType) - logger.trace("typeFilter(" + f.getType + "); T=" + classTag[T].runtimeClass) - ret - } - .map(f => (deMod(f.getName), f)): _*) - - logger.trace("fields: " + fields) - - // this method will find all the super classes and super-interfaces - def getAllSupers(clz: Class[_]): List[Class[_]] = clz match { - case null => Nil - case c => - c :: c.getInterfaces.toList.flatMap(getAllSupers) ::: - getAllSupers(c.getSuperclass) - } - - // does the method return an actual instance of an actual class that's - // associated with this Mapper class - def validActualType(meth: Method): Boolean = { - try { - // invoke the method - meth.invoke(onMagic) match { - case null => - logger.debug("Not a valid mapped field: %s".format(meth.getName)) - false - case inst => - // do we get a T of some sort back? - if (!typeFilter(inst.getClass)) false - else { - // find out if the class name of the actual thing starts - // with the name of this class or some superclass... - // basically, is an inner class of this class - getAllSupers(clz).exists(c => inst.getClass.getName.startsWith(c.getName)) - } - } - - } catch { - case e: Exception => - logger.debug("Not a valid mapped field: %s, got exception: %s".format(meth.getName, e)) - false - } - } - - // find all the declared methods - val meths = c.getDeclaredMethods.toList. - filter(_.getParameterTypes.length == 0). // that take no parameters - filter(m => Modifier.isPublic(m.getModifiers)). // that are public - filter(m => fields.contains(m.getName) && // that are associated with private fields - fields(m.getName).getType == m.getReturnType). - filter(validActualType) // and have a validated type - - meths ::: findForClass(clz.getSuperclass) - } - - findForClass(startingClass).distinct - } - - lazy val accessorMethods = findMagicFields(metaMapper, metaMapper.getClass.getSuperclass) -} diff --git a/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/ManyToMany.scala b/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/ManyToMany.scala deleted file mode 100644 index 441612eceb..0000000000 --- a/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/ManyToMany.scala +++ /dev/null @@ -1,217 +0,0 @@ -package net.liftweb -package mapper - -import common.{Empty, Full} - -import scala.annotation.tailrec - -/** - * Add this trait to a Mapper to add support for many-to-many relationships - * - * @author nafg - */ -trait ManyToMany extends BaseKeyedMapper { - this: KeyedMapper[_, _] => - - private[this] type K = TheKeyType - private[this] type T = KeyedMapperType - - private var manyToManyFields: List[MappedManyToMany[_,_,_]] = Nil - - /** - * An override for save to propagate the save to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - abstract override def save: Boolean = { - super.save && manyToManyFields.forall(_.save) - } - - /** - * An override for delete_! to propogate the deletion to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - abstract override def delete_! : Boolean = { - super.delete_! && - manyToManyFields.forall( _.delete_!) - } - - - /** - * This is the base class to extend for fields that track many-to-many relationships. - * @param joinMeta The singleton of the join table - * @param thisField The foreign key in the join table that refers to this mapper's primaryKey. - * @param otherField The foreign key in the join table that refers to the other mapper's primaryKey - * @param otherMeta The singleton of the other mapper - * @param qp Any QueryParams to limit entries in the join table (other than matching thisField to primaryKey) - * To limit children based on fields in the other table (not the join table), it is currently necessary - * to point the join mapper to a view which pulls the join table's fields as well as fields of the other table. - */ - class MappedManyToMany[O<:Mapper[O], K2, T2 <: KeyedMapper[K2,T2]]( - val joinMeta: MetaMapper[O], - thisField: MappedForeignKey[K,O,_ <: KeyedMapper[_,_]], - val otherField: MappedForeignKey[K2, O, T2], - val otherMeta: MetaMapper[T2], - val qp: QueryParam[O]*) extends scala.collection.mutable.Buffer[T2] { - - def otherFK[A](join: O)(f: MappedForeignKey[K2,O,T2] => A): A = - otherField.actualField(join) match { case mfk: MappedForeignKey[K2,O,T2] => f(mfk) } - - protected def children: List[T2] = joins.flatMap(otherFK(_)(_.obj)) - - protected var _joins: List[O] = _ - - /** - * Get the list of instances of joinMeta - */ - def joins: List[O] = _joins // read only to the public - protected var removedJoins: List[O] = Nil - - refresh - manyToManyFields ::= this - - protected def isJoinForChild(e: T2)(join: O): Boolean = otherField.actualField(join).get == e.primaryKeyField.get - - protected def joinForChild(e: T2): Option[O] = joins.find(isJoinForChild(e)) - - protected def own(e: T2): O = { - joinForChild(e).fold { - removedJoins - // first check if we can recycle a removed join - .find(otherField.actualField(_).get == e.primaryKeyField) - .fold{ - val newJoin = joinMeta.create - thisField.actualField(newJoin) match { - case mfk: MappedForeignKey[K, O, T] => mfk.set(primaryKeyField.get.asInstanceOf[K]) - } - otherFK(newJoin)(_.apply(e)) - newJoin - }{ removedJoin => - removedJoins = removedJoins filter removedJoin.ne - removedJoin // well, noLongerRemovedJoin... - } - }(join => join) - } - - protected def unown(e: T2): Option[O] = - joinForChild(e).map{ join => - removedJoins = join :: removedJoins - val o = otherField.actualField(join) - o.set(o.defaultValue) - thisField.actualField(join) match { case mfk => mfk set mfk.defaultValue } - join - } - - /** - * Get the List backing this Buffer. - */ - def all: List[T2] = children - - def length: Int = children.length - - def iterator: Iterator[T2] = children.iterator - - protected def childAt(n: Int): T2 = children(n) - def apply(n: Int): T2 = childAt(n) - def indexOf(e: T2): Int = children.indexWhere(e.eq) - - def insertAll(n: Int, traversable: Traversable[T2]) { - val ownedJoins = traversable map own - val n2 = joins.indexWhere(isJoinForChild(children(n))) - val before = joins.take(n2) - val after = joins.drop(n2) - - _joins = before ++ ownedJoins ++ after - } - - def +=:(elem: T2): MappedManyToMany.this.type = { - _joins ::= own(elem) - this - } - - def +=(elem: T2): MappedManyToMany.this.type = { - _joins ++= List(own(elem)) - this - } - - def update(n: Int, newelem: T2): Unit = { - unown(childAt(n)) match { - case Some(join) => - val n2 = joins.indexOf(join) - val (before, after) = (joins.take(n2), joins.drop(n2+1)) - _joins = before ++ List(own(newelem)) ++ after - case None => - } - } - - def remove(n: Int): T2 = { - val child = childAt(n) - unown(child).foreach(join => _joins = joins filterNot join.eq) - child - } - - override def remove(idx: Int, count: Int): Unit = { - if (count > 0) { - @tailrec - def loop(c: Int, a: List[T2]): List[T2] = - if (c == 0) childAt(idx) :: a - else loop(c - 1, childAt(idx + c) :: a) - - val joins0 = loop(count - 1, Nil) flatMap unown - _joins = joins filterNot joins0.contains - } - } - - def clear(): Unit = { - children foreach unown - _joins = Nil - } - - /** - * Discard the cached state of this MappedManyToMany's children and reinitialize it from the database - */ - def refresh: List[T2] = { - val by = new Cmp[O, TheKeyType](thisField, OprEnum.Eql, Full(primaryKeyField.get.asInstanceOf[K]), Empty, Empty) - - _joins = joinMeta.findAll( (by :: qp.toList): _*) - all - } - - /** - * Save the state of this MappedManyToMany to the database. - * This will do the following: - * 1) Prune join table instances whose "child" foreign key's value is its defaultValue, i.e., -1 - * 2) Set all join table instances' "parent" foreign key - * 3) Delete all join table instances whose child instance was removed - * 4) Save all child instances - * 5) If step 3 succeeds save all join instances - * 6) Return true if steps 2-4 all returned true; otherwise false - */ - def save: Boolean = { - _joins = joins.filter { join => - otherFK(join)(f => f.get != f.defaultValue) - } - _joins foreach { - thisField.actualField(_).asInstanceOf[MappedForeignKey[K,O,X] forSome {type X <: KeyedMapper[K,X]}] set ManyToMany.this.primaryKeyField.get.asInstanceOf[K] - } - - removedJoins.forall {_.delete_!} & ( // continue saving even if deleting fails - children.forall(_.save) && - joins.forall(_.save) - ) - } - - /** - * Deletes all join rows, including those - * marked for removal. - * Returns true if both succeed, otherwise false - */ - def delete_! : Boolean = { - removedJoins.forall(_.delete_!) & - joins.forall(_.delete_!) - } - } -} diff --git a/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/OneToMany.scala b/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/OneToMany.scala deleted file mode 100644 index b959516f7d..0000000000 --- a/persistence/mapper/src/main/scala-2.11/net/liftweb/mapper/OneToMany.scala +++ /dev/null @@ -1,284 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - - -private[mapper] object RecursiveType { - val rec: { type R0 <: Mapper[R0] } = null - type Rec = rec.R0 -} -import net.liftweb.mapper.RecursiveType._ - -/** - * Add this trait to a Mapper for managed one-to-many support - * For example: class Contact extends LongKeyedMapper[Contact] with OneToMany[Long, Contact] { ... } - * @tparam K the type of the primary key - * @tparam T the mapper type - * @author nafg - */ -trait OneToMany[K,T<:KeyedMapper[K, T]] extends KeyedMapper[K,T] { this: T => - - private[mapper] lazy val oneToManyFields: List[MappedOneToManyBase[Rec]] = { - new FieldFinder[MappedOneToManyBase[Rec]]( - getSingleton, - net.liftweb.common.Logger(classOf[OneToMany[K,T]]) - ).accessorMethods map (_.invoke(this).asInstanceOf[MappedOneToManyBase[Rec]]) - } - - /** - * An override for save to propagate the save to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - override def save: Boolean = { - val ret = super.save && - oneToManyFields.forall(_.save) - ret - } - - /** - * An override for delete_! to propagate the deletion - * to all children of one-to-many fields implementing Cascade. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - override def delete_! : Boolean = DB.use(connectionIdentifier){ _ => - if(oneToManyFields.forall{(_: MappedOneToManyBase[_ <: Mapper[_]]) match { - case f: Cascade[_] => f.delete_! - case _ => true - } - }) - super.delete_! - else { - DB.rollback(connectionIdentifier) - false - } - } - - - /** - * This implicit allows a MappedForeignKey to be used as foreignKey function. - * Returns a function that takes a Mapper and looks up the actualField of field on the Mapper. - */ - implicit def foreignKey[K, O<:Mapper[O], T<:KeyedMapper[K,T]](field: MappedForeignKey[K,O,T]): O=>MappedForeignKey[K,O,T] = - field.actualField(_).asInstanceOf[MappedForeignKey[K,O,T]] - - /** - * Simple OneToMany support for children from the same table - */ - class MappedOneToMany[O <: Mapper[O]](meta: MetaMapper[O], foreign: MappedForeignKey[K,O,T], qp: QueryParam[O]*) - extends MappedOneToManyBase[O]( - ()=>{ - val ret = meta.findAll(By(foreign, primaryKeyField.get) :: qp.toList : _*) - for(child <- ret) { - foreign.actualField(child).asInstanceOf[MappedForeignKey[K,O,T]].primeObj(net.liftweb.common.Full(OneToMany.this : T)) - } - ret - }, - foreign - ) - - /** - * This is the base class to use for fields that represent one-to-many or parent-child relationships. - * Maintains a list of children, tracking pending additions and deletions, and - * keeping their foreign key pointed to this mapper. - * Implements Buffer, so the children can be managed as one. - * Most users will use MappedOneToMany, however to support children from multiple tables - * it is necessary to use MappedOneToManyBase. - * @param reloadFunc A function that returns a sequence of children from storage. - * @param foreign A function that gets the MappedForeignKey on the child that refers to this parent - */ - class MappedOneToManyBase[O <: Mapper[_]](val reloadFunc: () => Seq[O], - val foreign: O => MappedForeignKey[K,_,T]) extends scala.collection.mutable.Buffer[O] { - private var inited = false - private var _delegate: List[O] = _ - /** - * children that were added before the parent was ever saved - */ - private var unlinked: List[O] = Nil - protected def delegate: List[O] = { - if(!inited) { - refresh() - inited = true - } - _delegate - } - protected def delegate_=(d: List[O]): Unit = _delegate = d - - /** - * Takes ownership of e. Sets e's foreign key to our primary key - */ - protected def own(e: O): O = { - val f0 = foreign(e).asInstanceOf[Any] - f0 match { - case f: MappedLongForeignKey[O,T] with MappedForeignKey[K,_,T] => - f.apply(OneToMany.this) - case f: MappedForeignKey[K,_,T] => - f.set(OneToMany.this.primaryKeyField.get) - } - if(!OneToMany.this.saved_?) - unlinked ::= e - e - } - /** - * Relinquishes ownership of e. Resets e's foreign key to its default value. - */ - protected def unown(e: O): O = { - val f = foreign(e) - f.set(f.defaultValue) - unlinked = unlinked filter {e.ne} - e - } - /** - * Returns the backing List - */ - def all: List[O] = delegate - - // 2.8: return this - def +=(elem: O): MappedOneToManyBase.this.type = { - delegate = delegate ++ List(own(elem)) - this - } - // 2.7 - //def readOnly = all - def length: Int = delegate.length - // 2.7 - //def elements = delegate.elements - // 2.8 - def iterator: Iterator[O] = delegate.iterator - - def apply(n: Int): O = delegate(n) - - // 2.7 - /* def +:(elem: O) = { - delegate ::= own(elem) - this - } */ - // 2.8 - def +=:(elem: O): MappedOneToManyBase.this.type = { - delegate ::= own(elem) - this - } - - override def indexOf[B >: O](e: B): Int = delegate.indexWhere(e.asInstanceOf[AnyRef].eq) - - // 2.7 - // def insertAll(n: Int, iter: Iterable[O]) { - // 2.8 - def insertAll(n: Int, iter: Traversable[O]) { - val (before, after) = delegate.splitAt(n) - iter foreach own - delegate = before ++ iter ++ after - } - - def update(n: Int, newelem: O): Unit = { - unown(delegate(n)) - delegate = delegate.take(n) ++ List(own(newelem)) ++ delegate.drop(n+1) - } - - def remove(n: Int): O = { - val e = unown(delegate(n)) - delegate = delegate.filterNot(e.eq) - e - } - - def clear(): Unit = { - while(delegate.nonEmpty) - remove(0) - } - - /** - * Reloads the children from storage. - * NOTE: This may leave children in an inconsistent state. - * It is recommended to call save or clear() before calling refresh. - */ - def refresh(): Unit = { - delegate = reloadFunc().toList - if(saved_?) - unlinked = Nil - else - unlinked = _delegate - } - - /** - * Saves this "field," i.e., all the children it represents. - * Returns false as soon as save on a child returns false. - * Returns true if all children were saved successfully. - */ - def save: Boolean = { - unlinked foreach {u => - val f = foreign(u) - if(f.obj.map(_ eq OneToMany.this) openOr true) // obj is Empty or this - f.apply(OneToMany.this) - } - unlinked = Nil - delegate = delegate.filter {e => - foreign(e).get == OneToMany.this.primaryKeyField.get || - foreign(e).obj.map(_ eq OneToMany.this).openOr(false) // obj is this but not Empty - } - delegate.forall(_.save) - } - - override def toString: String = { - val c = getClass.getSimpleName - val l = c.lastIndexOf("$") - c.substring(c.lastIndexOf("$",l-1)+1, l) + delegate.mkString("[",", ","]") - } - } - - /** - * Adds behavior to delete orphaned fields before save. - */ - trait Owned[O<:Mapper[_]] extends MappedOneToManyBase[O] { - var removed: List[O] = Nil - override def unown(e: O) = { - removed = e :: removed - super.unown(e) - } - override def own(e: O) = { - removed = removed filter {e.ne} - super.own(e) - } - override def save: Boolean = { - val unowned = removed.filter{ e => - val f = foreign(e) - f.get == f.defaultValue - } - unowned foreach {_.delete_!} - super.save - } - } - - /** - * Trait that indicates that the children represented - * by this field should be deleted when the parent is deleted. - */ - trait Cascade[O<:Mapper[_]] extends MappedOneToManyBase[O] { - def delete_! : Boolean = { - delegate.forall { e => - if(foreign(e).get == - OneToMany.this.primaryKeyField.get) { - e.delete_! - } - else - true // doesn't constitute a failure - } - } - } -} diff --git a/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/FieldFinder.scala b/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/FieldFinder.scala deleted file mode 100644 index 7736b1c190..0000000000 --- a/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/FieldFinder.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import scala.reflect.{ClassTag, classTag} - -class FieldFinder[T: ClassTag](metaMapper: AnyRef, logger: common.Logger) { - - import java.lang.reflect._ - - logger.debug("Created FieldFinder for " + classTag[T].runtimeClass) - - def isMagicObject(m: Method): Boolean = m.getReturnType.getName.endsWith("$" + m.getName + "$") && m.getParameterTypes.length == 0 - - def typeFilter: Class[_] => Boolean = classTag[T].runtimeClass.isAssignableFrom - - /** - * Find the magic mapper fields on the superclass - */ - def findMagicFields(onMagic: AnyRef, startingClass: Class[_]): List[Method] = { - // If a class name ends in $module, it's a subclass created for scala object instances - def deMod(in: String): String = - if (in.endsWith("$module")) in.substring(0, in.length - 7) - else in - - // find the magic fields for the given superclass - def findForClass(clz: Class[_]): List[Method] = clz match { - case null => Nil - case c => - // get the names of fields that represent the type we want - - val fields = Map(c.getDeclaredFields - .filter { f => - val ret = typeFilter(f.getType) - logger.trace("typeFilter(" + f.getType + "); T=" + classTag[T].runtimeClass) - ret - } - .map(f => (deMod(f.getName), f)): _*) - - logger.trace("fields: " + fields) - - // this method will find all the super classes and super-interfaces - def getAllSupers(clz: Class[_]): List[Class[_]] = clz match { - case null => Nil - case c => - c :: c.getInterfaces.toList.flatMap(getAllSupers) ::: - getAllSupers(c.getSuperclass) - } - - // does the method return an actual instance of an actual class that's - // associated with this Mapper class - def validActualType(meth: Method): Boolean = { - try { - // invoke the method - meth.invoke(onMagic) match { - case null => - logger.debug("Not a valid mapped field: %s".format(meth.getName)) - false - case inst => - // do we get a T of some sort back? - if (!typeFilter(inst.getClass)) false - else { - // find out if the class name of the actual thing starts - // with the name of this class or some superclass... - // basically, is an inner class of this class - getAllSupers(clz).exists(c => inst.getClass.getName.startsWith(c.getName)) - } - } - - } catch { - case e: Exception => - logger.debug("Not a valid mapped field: %s, got exception: %s".format(meth.getName, e)) - false - } - } - - // find all the declared methods - val meths = c.getDeclaredMethods.toList. - filter(_.getParameterTypes.length == 0). // that take no parameters - filter(m => Modifier.isPublic(m.getModifiers)). // that are public - filter(m => fields.contains(m.getName) && // that are associated with private fields - fields(m.getName).getType == m.getReturnType). - filter(validActualType) // and have a validated type - - meths ::: findForClass(clz.getSuperclass) - } - - findForClass(startingClass).distinct - } - - lazy val accessorMethods = findMagicFields(metaMapper, metaMapper.getClass.getSuperclass) -} diff --git a/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/ManyToMany.scala b/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/ManyToMany.scala deleted file mode 100644 index 441612eceb..0000000000 --- a/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/ManyToMany.scala +++ /dev/null @@ -1,217 +0,0 @@ -package net.liftweb -package mapper - -import common.{Empty, Full} - -import scala.annotation.tailrec - -/** - * Add this trait to a Mapper to add support for many-to-many relationships - * - * @author nafg - */ -trait ManyToMany extends BaseKeyedMapper { - this: KeyedMapper[_, _] => - - private[this] type K = TheKeyType - private[this] type T = KeyedMapperType - - private var manyToManyFields: List[MappedManyToMany[_,_,_]] = Nil - - /** - * An override for save to propagate the save to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - abstract override def save: Boolean = { - super.save && manyToManyFields.forall(_.save) - } - - /** - * An override for delete_! to propogate the deletion to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - abstract override def delete_! : Boolean = { - super.delete_! && - manyToManyFields.forall( _.delete_!) - } - - - /** - * This is the base class to extend for fields that track many-to-many relationships. - * @param joinMeta The singleton of the join table - * @param thisField The foreign key in the join table that refers to this mapper's primaryKey. - * @param otherField The foreign key in the join table that refers to the other mapper's primaryKey - * @param otherMeta The singleton of the other mapper - * @param qp Any QueryParams to limit entries in the join table (other than matching thisField to primaryKey) - * To limit children based on fields in the other table (not the join table), it is currently necessary - * to point the join mapper to a view which pulls the join table's fields as well as fields of the other table. - */ - class MappedManyToMany[O<:Mapper[O], K2, T2 <: KeyedMapper[K2,T2]]( - val joinMeta: MetaMapper[O], - thisField: MappedForeignKey[K,O,_ <: KeyedMapper[_,_]], - val otherField: MappedForeignKey[K2, O, T2], - val otherMeta: MetaMapper[T2], - val qp: QueryParam[O]*) extends scala.collection.mutable.Buffer[T2] { - - def otherFK[A](join: O)(f: MappedForeignKey[K2,O,T2] => A): A = - otherField.actualField(join) match { case mfk: MappedForeignKey[K2,O,T2] => f(mfk) } - - protected def children: List[T2] = joins.flatMap(otherFK(_)(_.obj)) - - protected var _joins: List[O] = _ - - /** - * Get the list of instances of joinMeta - */ - def joins: List[O] = _joins // read only to the public - protected var removedJoins: List[O] = Nil - - refresh - manyToManyFields ::= this - - protected def isJoinForChild(e: T2)(join: O): Boolean = otherField.actualField(join).get == e.primaryKeyField.get - - protected def joinForChild(e: T2): Option[O] = joins.find(isJoinForChild(e)) - - protected def own(e: T2): O = { - joinForChild(e).fold { - removedJoins - // first check if we can recycle a removed join - .find(otherField.actualField(_).get == e.primaryKeyField) - .fold{ - val newJoin = joinMeta.create - thisField.actualField(newJoin) match { - case mfk: MappedForeignKey[K, O, T] => mfk.set(primaryKeyField.get.asInstanceOf[K]) - } - otherFK(newJoin)(_.apply(e)) - newJoin - }{ removedJoin => - removedJoins = removedJoins filter removedJoin.ne - removedJoin // well, noLongerRemovedJoin... - } - }(join => join) - } - - protected def unown(e: T2): Option[O] = - joinForChild(e).map{ join => - removedJoins = join :: removedJoins - val o = otherField.actualField(join) - o.set(o.defaultValue) - thisField.actualField(join) match { case mfk => mfk set mfk.defaultValue } - join - } - - /** - * Get the List backing this Buffer. - */ - def all: List[T2] = children - - def length: Int = children.length - - def iterator: Iterator[T2] = children.iterator - - protected def childAt(n: Int): T2 = children(n) - def apply(n: Int): T2 = childAt(n) - def indexOf(e: T2): Int = children.indexWhere(e.eq) - - def insertAll(n: Int, traversable: Traversable[T2]) { - val ownedJoins = traversable map own - val n2 = joins.indexWhere(isJoinForChild(children(n))) - val before = joins.take(n2) - val after = joins.drop(n2) - - _joins = before ++ ownedJoins ++ after - } - - def +=:(elem: T2): MappedManyToMany.this.type = { - _joins ::= own(elem) - this - } - - def +=(elem: T2): MappedManyToMany.this.type = { - _joins ++= List(own(elem)) - this - } - - def update(n: Int, newelem: T2): Unit = { - unown(childAt(n)) match { - case Some(join) => - val n2 = joins.indexOf(join) - val (before, after) = (joins.take(n2), joins.drop(n2+1)) - _joins = before ++ List(own(newelem)) ++ after - case None => - } - } - - def remove(n: Int): T2 = { - val child = childAt(n) - unown(child).foreach(join => _joins = joins filterNot join.eq) - child - } - - override def remove(idx: Int, count: Int): Unit = { - if (count > 0) { - @tailrec - def loop(c: Int, a: List[T2]): List[T2] = - if (c == 0) childAt(idx) :: a - else loop(c - 1, childAt(idx + c) :: a) - - val joins0 = loop(count - 1, Nil) flatMap unown - _joins = joins filterNot joins0.contains - } - } - - def clear(): Unit = { - children foreach unown - _joins = Nil - } - - /** - * Discard the cached state of this MappedManyToMany's children and reinitialize it from the database - */ - def refresh: List[T2] = { - val by = new Cmp[O, TheKeyType](thisField, OprEnum.Eql, Full(primaryKeyField.get.asInstanceOf[K]), Empty, Empty) - - _joins = joinMeta.findAll( (by :: qp.toList): _*) - all - } - - /** - * Save the state of this MappedManyToMany to the database. - * This will do the following: - * 1) Prune join table instances whose "child" foreign key's value is its defaultValue, i.e., -1 - * 2) Set all join table instances' "parent" foreign key - * 3) Delete all join table instances whose child instance was removed - * 4) Save all child instances - * 5) If step 3 succeeds save all join instances - * 6) Return true if steps 2-4 all returned true; otherwise false - */ - def save: Boolean = { - _joins = joins.filter { join => - otherFK(join)(f => f.get != f.defaultValue) - } - _joins foreach { - thisField.actualField(_).asInstanceOf[MappedForeignKey[K,O,X] forSome {type X <: KeyedMapper[K,X]}] set ManyToMany.this.primaryKeyField.get.asInstanceOf[K] - } - - removedJoins.forall {_.delete_!} & ( // continue saving even if deleting fails - children.forall(_.save) && - joins.forall(_.save) - ) - } - - /** - * Deletes all join rows, including those - * marked for removal. - * Returns true if both succeed, otherwise false - */ - def delete_! : Boolean = { - removedJoins.forall(_.delete_!) & - joins.forall(_.delete_!) - } - } -} diff --git a/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/OneToMany.scala b/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/OneToMany.scala deleted file mode 100644 index d31e6a93f0..0000000000 --- a/persistence/mapper/src/main/scala-2.12/net/liftweb/mapper/OneToMany.scala +++ /dev/null @@ -1,284 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - - -private[mapper] object RecursiveType { - val rec: { type R0 <: Mapper[R0] } = null - type Rec = rec.R0 -} -import net.liftweb.mapper.RecursiveType._ - -/** - * Add this trait to a Mapper for managed one-to-many support - * For example: class Contact extends LongKeyedMapper[Contact] with OneToMany[Long, Contact] { ... } - * @tparam K the type of the primary key - * @tparam T the mapper type - * @author nafg - */ -trait OneToMany[K,T<:KeyedMapper[K, T]] extends KeyedMapper[K,T] { this: T => - - private[mapper] lazy val oneToManyFields: List[MappedOneToManyBase[Rec]] = { - new FieldFinder[MappedOneToManyBase[Rec]]( - getSingleton, - net.liftweb.common.Logger(classOf[OneToMany[K,T]]) - ).accessorMethods map (_.invoke(this).asInstanceOf[MappedOneToManyBase[Rec]]) - } - - /** - * An override for save to propagate the save to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - override def save: Boolean = { - val ret = super.save && - oneToManyFields.forall(_.save) - ret - } - - /** - * An override for delete_! to propagate the deletion - * to all children of one-to-many fields implementing Cascade. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - override def delete_! : Boolean = DB.use(connectionIdentifier){ _ => - if(oneToManyFields.forall{(_: MappedOneToManyBase[_ <: Mapper[_]]) match { - case f: Cascade[_] => f.delete_! - case _ => true - } - }) - super.delete_! - else { - DB.rollback(connectionIdentifier) - false - } - } - - - /** - * This implicit allows a MappedForeignKey to be used as foreignKey function. - * Returns a function that takes a Mapper and looks up the actualField of field on the Mapper. - */ - implicit def foreignKey[K, O<:Mapper[O], T<:KeyedMapper[K,T]](field: MappedForeignKey[K,O,T]): O=>MappedForeignKey[K,O,T] = - field.actualField(_).asInstanceOf[MappedForeignKey[K,O,T]] - - /** - * Simple OneToMany support for children from the same table - */ - class MappedOneToMany[O <: Mapper[O]](meta: MetaMapper[O], foreign: MappedForeignKey[K,O,T], qp: QueryParam[O]*) - extends MappedOneToManyBase[O]( - ()=>{ - val ret = meta.findAll(By(foreign, primaryKeyField.get) :: qp.toList : _*) - for(child <- ret) { - foreign.actualField(child).asInstanceOf[MappedForeignKey[K,O,T]].primeObj(net.liftweb.common.Full(OneToMany.this : T)) - } - ret - }, - foreign - ) - - /** - * This is the base class to use for fields that represent one-to-many or parent-child relationships. - * Maintains a list of children, tracking pending additions and deletions, and - * keeping their foreign key pointed to this mapper. - * Implements Buffer, so the children can be managed as one. - * Most users will use MappedOneToMany, however to support children from multiple tables - * it is necessary to use MappedOneToManyBase. - * @param reloadFunc A function that returns a sequence of children from storage. - * @param foreign A function that gets the MappedForeignKey on the child that refers to this parent - */ - class MappedOneToManyBase[O <: Mapper[_]](val reloadFunc: () => Seq[O], - val foreign: O => MappedForeignKey[K,_,T]) extends scala.collection.mutable.Buffer[O] { - private var inited = false - private var _delegate: List[O] = _ - /** - * children that were added before the parent was ever saved - */ - private var unlinked: List[O] = Nil - protected def delegate: List[O] = { - if(!inited) { - refresh() - inited = true - } - _delegate - } - protected def delegate_=(d: List[O]): Unit = _delegate = d - - /** - * Takes ownership of e. Sets e's foreign key to our primary key - */ - protected def own(e: O): O = { - val f0 = foreign(e).asInstanceOf[Any] - f0 match { - case f: MappedLongForeignKey[O,T] with MappedForeignKey[K,_,T] => - f.apply(OneToMany.this) - case f: MappedForeignKey[K,_,T] => - f.set(OneToMany.this.primaryKeyField.get) - } - if(!OneToMany.this.saved_?) - unlinked ::= e - e - } - /** - * Relinquishes ownership of e. Resets e's foreign key to its default value. - */ - protected def unown(e: O): O = { - val f = foreign(e) - f.set(f.defaultValue) - unlinked = unlinked filter {e.ne} - e - } - /** - * Returns the backing List - */ - def all: List[O] = delegate - - // 2.8: return this - def +=(elem: O): MappedOneToManyBase.this.type = { - delegate = delegate ++ List(own(elem)) - this - } - // 2.7 - //def readOnly = all - def length: Int = delegate.length - // 2.7 - //def elements = delegate.elements - // 2.8 - def iterator: Iterator[O] = delegate.iterator - - def apply(n: Int): O = delegate(n) - - // 2.7 - /* def +:(elem: O) = { - delegate ::= own(elem) - this - } */ - // 2.8 - def +=:(elem: O): MappedOneToManyBase.this.type = { - delegate ::= own(elem) - this - } - - override def indexOf[B >: O](e: B): Int = delegate.indexWhere(e.asInstanceOf[AnyRef].eq) - - // 2.7 - // def insertAll(n: Int, iter: Iterable[O]) { - // 2.8 - def insertAll(n: Int, iter: Traversable[O]) { - val (before, after) = delegate.splitAt(n) - iter foreach own - delegate = before ++ iter ++ after - } - - def update(n: Int, newelem: O): Unit = { - unown(delegate(n)) - delegate = delegate.take(n) ++ List(own(newelem)) ++ delegate.drop(n+1) - } - - def remove(n: Int): O = { - val e = unown(delegate(n)) - delegate = delegate.filterNot(e.eq) - e - } - - def clear(): Unit = { - while(delegate.nonEmpty) - remove(0) - } - - /** - * Reloads the children from storage. - * NOTE: This may leave children in an inconsistent state. - * It is recommended to call save or clear() before calling refresh. - */ - def refresh(): Unit = { - delegate = reloadFunc().toList - if(saved_?) - unlinked = Nil - else - unlinked = _delegate - } - - /** - * Saves this "field," i.e., all the children it represents. - * Returns false as soon as save on a child returns false. - * Returns true if all children were saved successfully. - */ - def save: Boolean = { - unlinked foreach {u => - val f = foreign(u) - if(f.obj.map(_ eq OneToMany.this) openOr true) // obj is Empty or this - f.apply(OneToMany.this) - } - unlinked = Nil - delegate = delegate.filter {e => - foreign(e).get == OneToMany.this.primaryKeyField.get || - foreign(e).obj.map(_ eq OneToMany.this).openOr(false) // obj is this but not Empty - } - delegate.forall(_.save) - } - - override def toString: String = { - val c = getClass.getSimpleName - val l = c.lastIndexOf("$") - c.substring(c.lastIndexOf("$",l-1)+1, l) + delegate.mkString("[",", ","]") - } - } - - /** - * Adds behavior to delete orphaned fields before save. - */ - trait Owned[O<:Mapper[_]] extends MappedOneToManyBase[O] { - var removed: List[O] = Nil - override def unown(e: O) = { - removed = e :: removed - super.unown(e) - } - override def own(e: O) = { - removed = removed filter {e.ne} - super.own(e) - } - override def save: Boolean = { - val unowned = removed.filter{ e => - val f = foreign(e) - f.get == f.defaultValue - } - unowned foreach {_.delete_!} - super.save - } - } - - /** - * Trait that indicates that the children represented - * by this field should be deleted when the parent is deleted. - */ - trait Cascade[O<:Mapper[_]] extends MappedOneToManyBase[O] { - def delete_! : Boolean = { - delegate.forall { e => - if(foreign(e).get == - OneToMany.this.primaryKeyField.get) { - e.delete_! - } - else - true // doesn't constitute a failure - } - } - } -} diff --git a/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/FieldFinder.scala b/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/FieldFinder.scala deleted file mode 100644 index ee258ab82c..0000000000 --- a/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/FieldFinder.scala +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import scala.reflect.{ClassTag, classTag} - -class FieldFinder[T: ClassTag](metaMapper: AnyRef, logger: common.Logger) { - import java.lang.reflect._ - - logger.debug("Created FieldFinder for " + classTag[T].runtimeClass) - - def isMagicObject(m: Method): Boolean = m.getReturnType.getName.endsWith("$"+m.getName+"$") && m.getParameterTypes.length == 0 - - def typeFilter: Class[_]=>Boolean = classTag[T].runtimeClass.isAssignableFrom - - /** - * Find the magic mapper fields on the superclass - */ - def findMagicFields(onMagic: AnyRef, startingClass: Class[_]): List[Method] = { - // If a class name ends in $module, it's a subclass created for scala object instances - def deMod(in: String): String = - if (in.endsWith("$module")) in.substring(0, in.length - 7) - else in - - // find the magic fields for the given superclass - def findForClass(clz: Class[_]): List[Method] = clz match { - case null => Nil - case c => - // get the names of fields that represent the type we want - - val fields = Map.from(c.getDeclaredFields - .filter{f => - val ret = typeFilter(f.getType) - logger.trace("typeFilter(" + f.getType + "); T=" + classTag[T].runtimeClass) - ret - } - .map(f => (deMod(f.getName), f))) - - logger.trace("fields: " + fields) - - // this method will find all the super classes and super-interfaces - def getAllSupers(clz: Class[_]): List[Class[_]] = clz match { - case null => Nil - case c => - c :: c.getInterfaces.toList.flatMap(getAllSupers) ::: - getAllSupers(c.getSuperclass) - } - - // does the method return an actual instance of an actual class that's - // associated with this Mapper class - def validActualType(meth: Method): Boolean = { - try { - // invoke the method - meth.invoke(onMagic) match { - case null => - logger.debug("Not a valid mapped field: %s".format(meth.getName)) - false - case inst => - // do we get a T of some sort back? - if (!typeFilter(inst.getClass)) false - else { - // find out if the class name of the actual thing starts - // with the name of this class or some superclass... - // basically, is an inner class of this class - getAllSupers(clz).exists(c => inst.getClass.getName.startsWith(c.getName)) - } - } - - } catch { - case e: Exception => - logger.debug("Not a valid mapped field: %s, got exception: %s".format(meth.getName, e)) - false - } - } - - // find all the declared methods - val meths = c.getDeclaredMethods.toList. - filter(_.getParameterTypes.length == 0). // that take no parameters - filter(m => Modifier.isPublic(m.getModifiers)). // that are public - filter(m => fields.contains(m.getName) && // that are associated with private fields - fields(m.getName).getType == m.getReturnType). - filter(validActualType) // and have a validated type - - meths ::: findForClass(clz.getSuperclass) - } - - findForClass(startingClass).distinct - } - - lazy val accessorMethods = findMagicFields(metaMapper, metaMapper.getClass.getSuperclass) -} diff --git a/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/ManyToMany.scala b/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/ManyToMany.scala deleted file mode 100644 index 636dd633db..0000000000 --- a/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/ManyToMany.scala +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import common._ - -import scala.annotation.tailrec -import scala.language.existentials - -/** - * Add this trait to a Mapper to add support for many-to-many relationships - * @author nafg - */ -trait ManyToMany extends BaseKeyedMapper { - this: KeyedMapper[_, _] => - - private[this] type K = TheKeyType - private[this] type T = KeyedMapperType - - private var manyToManyFields: List[MappedManyToMany[_,_,_]] = Nil - - /** - * An override for save to propagate the save to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - abstract override def save: Boolean = { - super.save && manyToManyFields.forall(_.save) - } - - /** - * An override for delete_! to propogate the deletion to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - abstract override def delete_! : Boolean = { - super.delete_! && - manyToManyFields.forall( _.delete_!) - } - - - /** - * This is the base class to extend for fields that track many-to-many relationships. - * @param joinMeta The singleton of the join table - * @param thisField The foreign key in the join table that refers to this mapper's primaryKey. - * @param otherField The foreign key in the join table that refers to the other mapper's primaryKey - * @param otherMeta The singleton of the other mapper - * @param qp Any QueryParams to limit entries in the join table (other than matching thisField to primaryKey) - * To limit children based on fields in the other table (not the join table), it is currently necessary - * to point the join mapper to a view which pulls the join table's fields as well as fields of the other table. - */ - class MappedManyToMany[O<:Mapper[O], K2, T2 <: KeyedMapper[K2,T2]]( - val joinMeta: MetaMapper[O], - thisField: MappedForeignKey[K,O,_ <: KeyedMapper[_,_]], - val otherField: MappedForeignKey[K2, O, T2], - val otherMeta: MetaMapper[T2], - val qp: QueryParam[O]*) extends scala.collection.mutable.Buffer[T2] { - - def otherFK[A](join: O)(f: MappedForeignKey[K2,O,T2] => A): A = - otherField.actualField(join) match { case mfk: MappedForeignKey[K2,O,T2] => f(mfk) } - - protected def children: List[T2] = joins.flatMap(otherFK(_)(_.obj)) - - protected var _joins: List[O] = _ - - /** - * Get the list of instances of joinMeta - */ - def joins: List[O] = _joins // read only to the public - protected var removedJoins: List[O] = Nil - - refresh - manyToManyFields ::= this - - protected def isJoinForChild(e: T2)(join: O): Boolean = otherField.actualField(join).get == e.primaryKeyField.get - - protected def joinForChild(e: T2): Option[O] = joins.find(isJoinForChild(e)) - - protected def own(e: T2): O = { - joinForChild(e).fold { - removedJoins - // first check if we can recycle a removed join - .find(otherField.actualField(_).get == e.primaryKeyField) - .fold{ - val newJoin = joinMeta.create - thisField.actualField(newJoin) match { - case mfk: MappedForeignKey[K, O, T] => mfk.set(primaryKeyField.get.asInstanceOf[K]) - } - otherFK(newJoin)(_.apply(e)) - newJoin - }{ removedJoin => - removedJoins = removedJoins filter removedJoin.ne - removedJoin // well, noLongerRemovedJoin... - } - }(join => join) - } - - protected def unown(e: T2): Option[O] = - joinForChild(e).map{ join => - removedJoins = join :: removedJoins - val o = otherField.actualField(join) - o.set(o.defaultValue) - thisField.actualField(join) match { case mfk => mfk set mfk.defaultValue } - join - } - - /** - * Get the List backing this Buffer. - */ - def all: List[T2] = children - - def length: Int = children.length - - def iterator: Iterator[T2] = children.iterator - - protected def childAt(n: Int): T2 = children(n) - def apply(n: Int): T2 = childAt(n) - def indexOf(e: T2): Int = children.indexWhere(e.eq) - - def insert(idx: Int, elem: T2): Unit = insertAll(idx, Seq(elem)) - - def insertAll(n: Int, traversable: IterableOnce[T2]): Unit = { - val ownedJoins = traversable.iterator.map(own) - val n2 = joins.indexWhere(isJoinForChild(children(n))) - val before = joins.take(n2) - val after = joins.drop(n2) - - _joins = before ++ ownedJoins ++ after - } - - def prepend(elem: T2): MappedManyToMany.this.type = { - _joins ::= own(elem) - this - } - - def addOne(elem: T2): MappedManyToMany.this.type = { - _joins ++= List(own(elem)) - this - } - - def update(n: Int, newelem: T2): Unit = { - unown(childAt(n)) match { - case Some(join) => - val n2 = joins.indexOf(join) - val (before, after) = (joins.take(n2), joins.drop(n2+1)) - _joins = before ++ List(own(newelem)) ++ after - case None => - } - } - - def remove(n: Int): T2 = { - val child = childAt(n) - unown(child).foreach(join => _joins = joins filterNot join.eq) - child - } - - override def remove(idx: Int, count: Int): Unit = { - if (count > 0) { - @tailrec - def loop(c: Int, a: List[T2]): List[T2] = - if (c == 0) childAt(idx) :: a - else loop(c - 1, childAt(idx + c) :: a) - - val joins0 = loop(count - 1, Nil) flatMap unown - _joins = joins filterNot joins0.contains - } - } - - override def patchInPlace(from: Int, patch: IterableOnce[T2], replaced: Int): MappedManyToMany.this.type = { - remove(from, replaced) - insertAll(from, patch) - this - } - - def clear(): Unit = { - children foreach unown - _joins = Nil - } - - /** - * Discard the cached state of this MappedManyToMany's children and reinitialize it from the database - */ - def refresh: List[T2] = { - val by = new Cmp[O, TheKeyType](thisField, OprEnum.Eql, Full(primaryKeyField.get.asInstanceOf[K]), Empty, Empty) - - _joins = joinMeta.findAll( (by :: qp.toList): _*) - all - } - - /** - * Save the state of this MappedManyToMany to the database. - * This will do the following: - * 1) Prune join table instances whose "child" foreign key's value is its defaultValue, i.e., -1 - * 2) Set all join table instances' "parent" foreign key - * 3) Delete all join table instances whose child instance was removed - * 4) Save all child instances - * 5) If step 3 succeeds save all join instances - * 6) Return true if steps 2-4 all returned true; otherwise false - */ - def save: Boolean = { - _joins = joins.filter { join => - otherFK(join)(f => f.get != f.defaultValue) - } - _joins foreach { - thisField.actualField(_).asInstanceOf[MappedForeignKey[K,O,X] forSome {type X <: KeyedMapper[K,X]}] set ManyToMany.this.primaryKeyField.get.asInstanceOf[K] - } - - removedJoins.forall {_.delete_!} & ( // continue saving even if deleting fails - children.forall(_.save) && - joins.forall(_.save) - ) - } - - /** - * Deletes all join rows, including those - * marked for removal. - * Returns true if both succeed, otherwise false - */ - def delete_! : Boolean = { - removedJoins.forall(_.delete_!) & - joins.forall(_.delete_!) - } - } -} diff --git a/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/OneToMany.scala b/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/OneToMany.scala deleted file mode 100644 index b868894f6c..0000000000 --- a/persistence/mapper/src/main/scala-2.13/net/liftweb/mapper/OneToMany.scala +++ /dev/null @@ -1,296 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mapper - -private[mapper] object RecursiveType { - val rec: { type R0 <: Mapper[R0] } = null - type Rec = rec.R0 -} -import RecursiveType._ - -/** - * Add this trait to a Mapper for managed one-to-many support - * For example: class Contact extends LongKeyedMapper[Contact] with OneToMany[Long, Contact] { ... } - * @tparam K the type of the primary key - * @tparam T the mapper type - * @author nafg - */ -trait OneToMany[K,T<:KeyedMapper[K, T]] extends KeyedMapper[K,T] { this: T => - - private[mapper] lazy val oneToManyFields: List[MappedOneToManyBase[Rec]] = { - new FieldFinder[MappedOneToManyBase[Rec]]( - getSingleton, - net.liftweb.common.Logger(classOf[OneToMany[K,T]]) - ).accessorMethods map (_.invoke(this).asInstanceOf[MappedOneToManyBase[Rec]]) - } - - /** - * An override for save to propagate the save to all children - * of this parent. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - override def save: Boolean = { - val ret = super.save && - oneToManyFields.forall(_.save) - ret - } - - /** - * An override for delete_! to propagate the deletion - * to all children of one-to-many fields implementing Cascade. - * Returns false as soon as the parent or a one-to-many field returns false. - * If they are all successful returns true. - */ - override def delete_! : Boolean = DB.use(connectionIdentifier){ _ => - if(oneToManyFields.forall{(_: MappedOneToManyBase[_ <: Mapper[_]]) match { - case f: Cascade[_] => f.delete_! - case _ => true - } - }) - super.delete_! - else { - DB.rollback(connectionIdentifier) - false - } - } - - - /** - * This implicit allows a MappedForeignKey to be used as foreignKey function. - * Returns a function that takes a Mapper and looks up the actualField of field on the Mapper. - */ - implicit def foreignKey[K, O<:Mapper[O], T<:KeyedMapper[K,T]](field: MappedForeignKey[K,O,T]): O=>MappedForeignKey[K,O,T] = - field.actualField(_).asInstanceOf[MappedForeignKey[K,O,T]] - - /** - * Simple OneToMany support for children from the same table - */ - class MappedOneToMany[O <: Mapper[O]](meta: MetaMapper[O], foreign: MappedForeignKey[K,O,T], qp: QueryParam[O]*) - extends MappedOneToManyBase[O]( - ()=>{ - val ret = meta.findAll(By(foreign, primaryKeyField.get) :: qp.toList : _*) - for(child <- ret) { - foreign.actualField(child).asInstanceOf[MappedForeignKey[K,O,T]].primeObj(net.liftweb.common.Full(OneToMany.this : T)) - } - ret - }, - foreign - ) - - /** - * This is the base class to use for fields that represent one-to-many or parent-child relationships. - * Maintains a list of children, tracking pending additions and deletions, and - * keeping their foreign key pointed to this mapper. - * Implements Buffer, so the children can be managed as one. - * Most users will use MappedOneToMany, however to support children from multiple tables - * it is necessary to use MappedOneToManyBase. - * @param reloadFunc A function that returns a sequence of children from storage. - * @param foreign A function that gets the MappedForeignKey on the child that refers to this parent - */ - class MappedOneToManyBase[O <: Mapper[_]](val reloadFunc: () => Seq[O], - val foreign: O => MappedForeignKey[K,_,T]) extends scala.collection.mutable.Buffer[O] { - private var inited = false - private var _delegate: List[O] = _ - /** - * children that were added before the parent was ever saved - */ - private var unlinked: List[O] = Nil - protected def delegate: List[O] = { - if(!inited) { - refresh() - inited = true - } - _delegate - } - protected def delegate_=(d: List[O]): Unit = _delegate = d - - /** - * Takes ownership of e. Sets e's foreign key to our primary key - */ - protected def own(e: O): O = { - val f0 = foreign(e).asInstanceOf[Any] - f0 match { - case f: MappedLongForeignKey[O,T] with MappedForeignKey[K,_,T] => - f.apply(OneToMany.this) - case f: MappedForeignKey[K,_,T] => - f.set(OneToMany.this.primaryKeyField.get) - } - if(!OneToMany.this.saved_?) - unlinked ::= e - e - } - /** - * Relinquishes ownership of e. Resets e's foreign key to its default value. - */ - protected def unown(e: O): O = { - val f = foreign(e) - f.set(f.defaultValue) - unlinked = unlinked filter {e.ne} - e - } - /** - * Returns the backing List - */ - def all: List[O] = delegate - - // 2.8: return this - def addOne(elem: O): MappedOneToManyBase.this.type = { - delegate = delegate ++ List(own(elem)) - this - } - // 2.7 - //def readOnly = all - def length: Int = delegate.length - // 2.7 - //def elements = delegate.elements - // 2.8 - def iterator: Iterator[O] = delegate.iterator - - def apply(n: Int): O = delegate(n) - - // 2.7 - /* def +:(elem: O) = { - delegate ::= own(elem) - this - } */ - // 2.8 - def prepend(elem: O): MappedOneToManyBase.this.type = { - delegate ::= own(elem) - this - } - - override def indexOf[B >: O](e: B): Int = delegate.indexWhere(e.asInstanceOf[AnyRef].eq) - - override def insert(idx: Int, elem: O): Unit = insertAll(idx, List(elem)) - - // 2.7 - // def insertAll(n: Int, iter: Iterable[O]) { - // 2.8 - def insertAll(n: Int, iter: IterableOnce[O]): Unit = { - val (before, after) = delegate.splitAt(n) - delegate = before ++ iter.iterator.map(own) ++ after - } - - def patchInPlace(from: Int, patch: IterableOnce[O], replaced: Int): MappedOneToManyBase.this.type = { - val endIds = from + replaced - delegate.slice(from, endIds).foreach(unown) - delegate = delegate.take(from) ++ patch.iterator.map(own) ++ delegate.drop(endIds) - this - } - - def update(n: Int, newelem: O): Unit = { - unown(delegate(n)) - delegate = delegate.take(n) ++ List(own(newelem)) ++ delegate.drop(n+1) - } - - def remove(n: Int): O = { - val e = unown(delegate(n)) - delegate = delegate.filterNot(e.eq) - e - } - - def remove(idx: Int, count: Int): Unit = { - val endIds = idx + count - delegate.slice(idx, endIds).foreach(unown) - delegate = delegate.take(idx) ++ delegate.drop(endIds) - } - - def clear(): Unit = { - while(delegate.nonEmpty) - remove(0) - } - - /** - * Reloads the children from storage. - * NOTE: This may leave children in an inconsistent state. - * It is recommended to call save or clear() before calling refresh. - */ - def refresh(): Unit = { - delegate = reloadFunc().toList - if(saved_?) - unlinked = Nil - else - unlinked = _delegate - } - - /** - * Saves this "field," i.e., all the children it represents. - * Returns false as soon as save on a child returns false. - * Returns true if all children were saved successfully. - */ - def save: Boolean = { - unlinked foreach {u => - val f = foreign(u) - if(f.obj.map(_ eq OneToMany.this) openOr true) // obj is Empty or this - f.apply(OneToMany.this) - } - unlinked = Nil - delegate = delegate.filter {e => - foreign(e).get == OneToMany.this.primaryKeyField.get || - foreign(e).obj.map(_ eq OneToMany.this).openOr(false) // obj is this but not Empty - } - delegate.forall(_.save) - } - - override def toString: String = { - val c = getClass.getSimpleName - val l = c.lastIndexOf("$") - c.substring(c.lastIndexOf("$",l-1)+1, l) + delegate.mkString("[",", ","]") - } - } - - /** - * Adds behavior to delete orphaned fields before save. - */ - trait Owned[O<:Mapper[_]] extends MappedOneToManyBase[O] { - var removed: List[O] = Nil - override def unown(e: O) = { - removed = e :: removed - super.unown(e) - } - override def own(e: O) = { - removed = removed filter {e.ne} - super.own(e) - } - override def save: Boolean = { - val unowned = removed.filter{ e => - val f = foreign(e) - f.get == f.defaultValue - } - unowned foreach {_.delete_!} - super.save - } - } - - /** - * Trait that indicates that the children represented - * by this field should be deleted when the parent is deleted. - */ - trait Cascade[O<:Mapper[_]] extends MappedOneToManyBase[O] { - def delete_! : Boolean = { - delegate.forall { e => - if(foreign(e).get == - OneToMany.this.primaryKeyField.get) { - e.delete_! - } - else - true // doesn't constitute a failure - } - } - } -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/AjaxMapper.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/AjaxMapper.scala deleted file mode 100644 index 565794bced..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/AjaxMapper.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import scala.xml.Node -import net.liftweb.http.SHtml - -/** - * This trait can be added to existing Mapper fields to make them use AjaxUtils.editable - * for field display. - */ -trait AjaxEditableField[FieldType,OwnerType <: Mapper[OwnerType]] extends MappedField[FieldType,OwnerType] { - override def asHtml : Node = - if (editableField) { - { - toForm.map { form => - SHtml.ajaxEditable(super.asHtml, form, () => {fieldOwner.save; onSave(); net.liftweb.http.js.JsCmds.Noop}) - } openOr super.asHtml - } - } else { - super.asHtml - } - - /** This method is called when the element's data are saved. The default is to do nothing */ - def onSave(): Unit = {} - - /** This method allows you to do programmatic control of whether the field will display - * as editable. The default is true */ - def editableField = true -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/CRUDify.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/CRUDify.scala deleted file mode 100644 index 9ef7d5e594..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/CRUDify.scala +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import util._ -import common._ - -import scala.xml._ - -/** - * This trait automatically adds CRUD (Create, read, update and delete) operations - * to an existing MetaMapper object. Various methods can be overridden to - * customize which operations are available to a user and how things are displayed. - * For example, you can disable deletion of entities by overriding deleteMenuLoc to Empty. - * - * Note: Compilation will fail if you try to mix this into a Mapper instead of the - * associated MetaMapper. You have been warned. - */ -trait CRUDify[KeyType, CrudType <: KeyedMapper[KeyType, CrudType]] extends - net.liftweb.proto.Crudify { - self: CrudType with KeyedMetaMapper[KeyType, CrudType] => - - /** - * What's the record type for the underlying CRUDify? - */ - type TheCrudType = CrudType - - /** - * What's a field pointer for the underlying CRUDify - */ - type FieldPointerType = MappedField[_, CrudType] - - /** - * Given a field pointer and an instance, get the field on that instance - */ - protected def computeFieldFromPointer(instance: TheCrudType, pointer: FieldPointerType): Box[BaseField] = Full(getActualField(instance, pointer)) - - /** - * Given a String that represents the primary key, find an instance of - * TheCrudType - */ - def findForParam(in: String): Box[TheCrudType] = find(in) - - /** - * Get a List of items from the databased - */ - def findForList(start: Long, count: Int): List[TheCrudType] = - findAll(StartAt[CrudType](start) :: MaxRows[CrudType](count) :: - findForListParams :_*) - - /** - * What are the query parameters? Default to ascending on primary key - */ - def findForListParams: List[QueryParam[CrudType]] = - List(OrderBy(primaryKeyField, Ascending)) - - /** - * The fields to be displayed. By default all the displayed fields, - * but this list - * can be shortened. - */ - def fieldsForDisplay: List[MappedField[_, CrudType]] = - mappedFieldsForModel.filter(_.dbDisplay_?) - - /** - * What's the prefix for this CRUD. Typically the table name - */ - def calcPrefix = List(_dbTableNameLC) - - - protected class MyBridge(in: CrudType) extends CrudBridge { - /** - * Delete the instance of TheCrudType from the backing store - */ - def delete_! : Boolean = in.delete_! - - /** - * Save an instance of TheCrudType in backing store - */ - def save : Boolean = in.save - - /** - * Validate the fields in TheCrudType and return a List[FieldError] - * representing the errors. - */ - def validate: List[FieldError] = in.validate - - /** - * Return a string representation of the primary key field - */ - def primaryKeyFieldAsString: String = in.primaryKeyField.toString - } - - /** - * This method will instantiate a bridge from TheCrudType so - * that the appropriate logical operations can be performed - * on TheCrudType - */ - protected implicit def buildBridge(from: TheCrudType): CrudBridge = - new MyBridge(from) - - protected class MyPointer(in: MappedField[_, CrudType]) extends FieldPointerBridge { - /** - * What is the display name of this field? - */ - def displayHtml: NodeSeq = in.displayHtml - } - - /** - * Based on a FieldPointer, build a FieldPointerBridge - */ - protected implicit def buildFieldBridge(from: FieldPointerType): FieldPointerBridge = new MyPointer(from) - - -} - - -/** - * A specialization of CRUDify for LongKeyedMetaMappers. - */ -trait LongCRUDify[CrudType <: KeyedMapper[Long, CrudType]] extends CRUDify[Long, CrudType] { - self: CrudType with KeyedMetaMapper[Long, CrudType] => -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/DB.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/DB.scala deleted file mode 100644 index d6301d2dd0..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/DB.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import http.S - -object DB extends db.DB1 { - db.DB.queryCollector = { - case (query, time) => - query.statementEntries.foreach{ case db.DBLogEntry(stmt, duration) => S.logQuery(stmt, duration) } - } -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/HasManyThrough.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/HasManyThrough.scala deleted file mode 100644 index 3fa143467f..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/HasManyThrough.scala +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import util.FatLazy -import common._ - -import scala.collection.mutable - -class HasManyThrough[From <: KeyedMapper[ThroughType, From], - To <: Mapper[To], - Through <: Mapper[Through], - ThroughType <: Any]( - owner: From, - otherSingleton: MetaMapper[To], - through: MetaMapper[Through], - throughFromField: MappedField[ThroughType, Through], - throughToField: MappedField[ThroughType, Through]) - extends LifecycleCallbacks { - private var theSetList: Seq[ThroughType] = Nil - - private val others = FatLazy[List[To]] { - DB.use(owner.connectionIdentifier) { conn => - val query = "SELECT DISTINCT "+otherSingleton._dbTableNameLC+".* FROM "+otherSingleton._dbTableNameLC+","+ - through._dbTableNameLC+" WHERE "+ - otherSingleton._dbTableNameLC+"."+otherSingleton.indexedField(otherSingleton.asInstanceOf[To]).openOrThrowException("legacy code")._dbColumnNameLC+" = "+ - through._dbTableNameLC+"."+throughToField._dbColumnNameLC+" AND "+ - through._dbTableNameLC+"."+throughFromField._dbColumnNameLC+" = ?" - DB.prepareStatement(query, conn) { st => - owner.getSingleton.indexedField(owner).map { indVal => - if (indVal.dbIgnoreSQLType_?) - st.setObject(1, indVal.jdbcFriendly) - else - st.setObject(1, indVal.jdbcFriendly, indVal.targetSQLType) - - DB.exec(st) { rs => - otherSingleton.createInstances(owner.connectionIdentifier, rs, Empty, Empty) - } - } openOr Nil - } - } - } - - def apply(): List[To] = others.get - - def get: List[To] = this() - - def reset(): Unit = others.reset - - def set(what: Seq[ThroughType]): Seq[ThroughType] = { - theSetList = what - theSetList - } - - override def beforeDelete: Unit = { - through.findAll(By(throughFromField, owner.primaryKeyField.get)).foreach { - toDelete => toDelete.delete_! - } - } - - override def afterUpdate: Unit = { - val current = through.findAll(By(throughFromField, owner.primaryKeyField.get)) - - val newKeys = new mutable.HashSet[ThroughType] - - theSetList.foreach(i => newKeys += i) - val toDelete = current.filter(c => !newKeys.contains(throughToField.actualField(c).get)) - toDelete.foreach(_.delete_!) - - val oldKeys = new mutable.HashSet[ThroughType] - current.foreach(i => oldKeys += throughToField.actualField(i).get) - - theSetList.toList.distinct.filter(i => !oldKeys.contains(i)).foreach { i => - val toCreate = through.createInstance - throughFromField.actualField(toCreate).set(owner.primaryKeyField.get) - throughToField.actualField(toCreate).set(i) - toCreate.save - } - - theSetList = Nil - others.reset - super.afterUpdate - } - - override def afterCreate: Unit = { - theSetList.toList.distinct.foreach { i => - val toCreate = through.createInstance - throughFromField.actualField(toCreate)(owner.primaryKeyField.get) - throughToField.actualField(toCreate)(i) - toCreate.save - } - theSetList = Nil - others.reset - super.afterCreate - } -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/JSONComet.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/JSONComet.scala deleted file mode 100644 index 4a2778ea43..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/JSONComet.scala +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import net.liftweb.common._ -import net.liftweb.util._ -import Helpers._ -import net.liftweb.http.js._ -import JsCmds._ -import JE._ -import net.liftweb.http._ -import scala.xml.NodeSeq - -/** -* Allows for the creation of JSON-based JavaScript widgets with very little -* code -*/ -/* -trait JsonComet { - self: CometActor => - - val keyStore = new KeyObfuscator - - trait JsonBridge[KeyType, FieldType, KMType <: KeyedMapper[KeyType, KMType]] { - def meta: KeyedMetaMapper[KeyType, KMType] - - def field: MappedField[FieldType, KMType] - - val FieldId = Helpers.nextFuncName - val handler: PartialFunction[Any, JsCmd] = { - case JsonCmd(FieldId, target, value, _) => - (for (key <- keyStore.recover(meta, target); - obj <- meta.find(key); - cannedNewValue <- Box(cvt)(value); - newValue <- cannedNewValue - ) yield { - val record = meta.getActualField(obj, field)(newValue) - record.validate match { - case Nil => record.save // FIXME notice updated - Noop - case xs => // FIXME display errors - Noop - } - }) openOr Noop - } - - def theCall(value: JsExp) = jsonCall(FieldId, JsVar("it", meta.primaryKeyField.name), value) - - - def cvt: PartialFunction[Any, Box[FieldType]] - - self.appendJsonHandler(handler) - } - - class JxCheckbox[KeyType, KMType <: KeyedMapper[KeyType, KMType]](val meta: KeyedMetaMapper[KeyType, KMType], - val field: MappedField[Boolean, KMType]) extends JxNodeBase with JsonBridge[KeyType, Boolean, KMType] { - - def child = Nil - - def appendToParent(parentName: String) = { - (renderExp).appendToParent(parentName) - } - - def renderExp: JsExp = (Jx(buildCheckbox).toJs ~> JsFunc("apply", JsRaw("null"), JsRaw("[it]"))) - - def buildCheckbox = - - def cvt: PartialFunction[Any, Box[Boolean]] = { - case b: Boolean => Full(b) - case "on" => Full(true) - case "off" => Full(false) - case x => Full(toBoolean(x)) - } - } - - class JxTextfield[KeyType, KMType <: KeyedMapper[KeyType, KMType]](val meta: KeyedMetaMapper[KeyType, KMType], - val field: MappedField[String, KMType]) extends JxNodeBase with JsonBridge[KeyType, String, KMType] { - - def child = Nil - - def appendToParent(parentName: String) = { - (renderExp).appendToParent(parentName) - } - - def renderExp: JsExp = Jx(buildInput).toJs ~> JsFunc("apply", JsRaw("null"), JsRaw("[it]")) - - def buildInput: NodeSeq = - - def onBlurCmd: JsCmd = theCall(JsRaw("this.value")) - - def cvt: PartialFunction[Any, Box[String]] = { - case null => Empty - case x => Full(x.toString) - } - } - - abstract class JxSelect[KeyType, FieldType, KMType <: KeyedMapper[KeyType, KMType]](val meta: KeyedMetaMapper[KeyType, KMType], - val field: MappedField[FieldType, KMType], val enum: List[(String, FieldType)]) extends JxNodeBase with JsonBridge[KeyType, FieldType, KMType] { - - def child = Nil - - def appendToParent(parentName: String) = { - (renderExp).appendToParent(parentName) - } - - def renderExp: JsExp = Jx(buildInput).toJs ~> JsFunc("apply", JsRaw("null"), JsRaw("[it]")) - - def buildInput: NodeSeq = - - def buildLine(v: (String, FieldType)) = - JxIfElse(JsRaw("it."+field.name+" == "+v._2), - , - ) - - def onChangeCmd: JsCmd = theCall(JsRaw("this.options[this.selectedIndex].value")) & JsRaw("this.blur()") - - - def values: List[(String, FieldType)] = enum - } - - abstract class JxBuiltSelect[KeyType, FieldType, KMType <: KeyedMapper[KeyType, KMType]](val meta: KeyedMetaMapper[KeyType, KMType], - val field: MappedField[FieldType, KMType]) extends JxNodeBase with JsonBridge[KeyType, FieldType, KMType] { - - def child = Nil - - def appendToParent(parentName: String) = { - (renderExp).appendToParent(parentName) - } - - def renderExp: JsExp = Jx(buildInput).toJs ~> JsFunc("apply", JsRaw("null"), JsRaw("[it]")) - - /** - * A JavaScript expression that builds an array of Name, Value pairs for valid - * select box stuff - */ - def buildMapList: JsExp - - def buildInput: NodeSeq = - - def buildLine = - Jx(JxIfElse(JsRaw("current."+field.name+" == it[1]"), - , - )) - - def onChangeCmd: JsCmd = theCall(JsRaw("this.options[this.selectedIndex].value")) & JsRaw("this.blur()") - - } - - class JxEnumSelect[KeyType, Enum <: Enumeration, KMType <: KeyedMapper[KeyType, KMType]](val meta: KeyedMetaMapper[KeyType, KMType], - val field: MappedEnum[KMType, Enum], val enum: Enum) extends JxNodeBase with JsonBridge[KeyType, Enum#Value, KMType] { - - def child = Nil - - def appendToParent(parentName: String) = { - (renderExp).appendToParent(parentName) - } - - def renderExp: JsExp = Jx(buildInput).toJs ~> JsFunc("apply", JsRaw("null"), JsRaw("[it]")) - - def buildInput: NodeSeq = - - def buildLine(v: Enum#Value) = - JxIfElse(JsRaw("it."+field.name+" == "+v.id), - , - ) - - def onChangeCmd: JsCmd = theCall(JsRaw("this.options[this.selectedIndex].value")) & JsRaw("this.blur()") - - def cvt: PartialFunction[Any, Box[Enum#Value]] = { - case null => Empty - case x: Int => tryo(enum(x)) - case x: String => tryo(x.toInt).flatMap(i => tryo(enum(i))) - case _ => Empty - } - - def values: List[Enum#Value] = enum.iterator.toList - } - -} -*/ diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedBinary.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedBinary.scala deleted file mode 100644 index 08c34f952f..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedBinary.scala +++ /dev/null @@ -1,411 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.Types -import java.lang.reflect.Method -import java.util.Date -import net.liftweb.util._ -import Helpers._ -import net.liftweb.common._ -import net.liftweb.http.js._ -import net.liftweb.json._ -import scala.reflect.runtime.universe._ -import scala.xml.{Text, NodeSeq} -import json.JsonAST.JValue - - -abstract class MappedBinary[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Array[Byte], T] { - private val data : FatLazy[Array[Byte]] = FatLazy(defaultValue) - private val orgData: FatLazy[Array[Byte]] = FatLazy(defaultValue) - - protected def real_i_set_!(value : Array[Byte]) : Array[Byte] = { - data() = value - this.dirty_?( true) - value - } - - def manifest: TypeTag[Array[Byte]] = typeTag[Array[Byte]] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Array[Byte]} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Array[Byte] - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = "" - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Empty - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Empty - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - def dbFieldClass: Class[Array[Byte]] = classOf[Array[Byte]] - - /** - * Get the JDBC SQL Type for this field - */ - // def getTargetSQLType(field : String) = Types.BINARY - def targetSQLType: Int = Types.BINARY - - def defaultValue: Array[Byte] = null - override def writePermission_? = true - override def readPermission_? = true - - protected def i_is_! : Array[Byte] = data.get - - protected def i_was_! : Array[Byte] = orgData.get - - protected[mapper] def doneWithSave(): Unit = {orgData.setFrom(data)} - - protected def i_obscure_!(in : Array[Byte]) : Array[Byte] = { - new Array[Byte](0) - } - - override def renderJs_? = false - - def asJsExp: JsExp = throw new NullPointerException("No way") - - def asJsonValue: Box[JsonAST.JValue] = Full(get match { - case null => JsonAST.JNull - case value => JsonAST.JString(base64Encode(value)) - }) - - override def setFromAny(f: Any): Array[Byte] = f match { - case null | JsonAST.JNull => this.set(null) - case JsonAST.JString(base64) => this.set(base64Decode(base64)) - case array: Array[Byte] => this.set(array) - case s => this.set(s.toString.getBytes("UTF-8")) - } - - def jdbcFriendly(field : String) : Object = get - - def real_convertToJDBCFriendly(value: Array[Byte]): Object = value - - def buildSetActualValue(accessor: Method, inst: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedBinary[T] => - val toSet = v match { - case null => null - case ba: Array[Byte] => ba - case other => other.toString.getBytes("UTF-8") - } - f.data() = toSet - f.orgData() = toSet - }) - - def buildSetLongValue(accessor : Method, columnName : String): (T, Long, Boolean) => Unit = null - def buildSetStringValue(accessor : Method, columnName : String): (T, String) => Unit = null - def buildSetDateValue(accessor : Method, columnName : String): (T, Date) => Unit = null - def buildSetBooleanValue(accessor : Method, columnName : String): (T, Boolean, Boolean) => Unit = null - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.binaryColumnType + notNullAppender() -} - -abstract class MappedText[T<:Mapper[T]](val fieldOwner: T) extends MappedField[String, T] { - private val data : FatLazy[String] = FatLazy(defaultValue) - private val orgData: FatLazy[String] = FatLazy(defaultValue) - - protected def real_i_set_!(value: String): String = { - data() = value - this.dirty_?( true) - value - } - - - def manifest: TypeTag[String] = typeTag[String] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = String} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = String - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(v)) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JString(v)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - def dbFieldClass: Class[String] = classOf[String] - - /** - * Get the JDBC SQL Type for this field - */ - // def getTargetSQLType(field : String) = Types.BINARY - def targetSQLType: Int = Types.VARCHAR - - def defaultValue: String = null - override def writePermission_? = true - override def readPermission_? = true - - protected def i_is_! : String = data.get - - protected def i_was_! : String = orgData.get - - protected[mapper] def doneWithSave(): Unit = {orgData.setFrom(data)} - - def asJsExp: JsExp = JE.Str(get) - - def asJsonValue: Box[JsonAST.JValue] = Full(get match { - case null => JsonAST.JNull - case str => JsonAST.JString(str) - }) - - protected def i_obscure_!(in: String): String = "" - - override def setFromAny(in: Any): String = { - in match { - case JsonAST.JNull => this.set(null) - case JsonAST.JString(str) => this.set(str) - case seq: Seq[_] if seq.nonEmpty => seq.map(setFromAny).head - case (s: String) :: _ => this.set(s) - case s :: _ => this.setFromAny(s) - case null => this.set(null) - case s: String => this.set(s) - case Some(s: String) => this.set(s) - case Full(s: String) => this.set(s) - case None | Empty | Failure(_, _, _) => this.set(null) - case o => this.set(o.toString) - } - } - - def jdbcFriendly(field : String): Object = real_convertToJDBCFriendly(data.get) - - def real_convertToJDBCFriendly(value: String): Object = value match { - case null => null - case s => s - } - - def buildSetActualValue(accessor: Method, inst: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedText[T] => - val toSet = v match { - case null => null - case s: String => s - case ba: Array[Byte] => new String(ba, "UTF-8") - case clob: java.sql.Clob => clob.getSubString(1,clob.length.toInt) - case other => other.toString - } - f.data() = toSet - f.orgData() = toSet - }) - - def buildSetLongValue(accessor : Method, columnName : String): (T, Long, Boolean) => Unit = null - def buildSetStringValue(accessor : Method, columnName : String): (T, String) => Unit = (inst, v) => doField(inst, accessor, {case f: MappedText[T] => - val toSet = v - f.data() = toSet - f.orgData() = toSet - }) - def buildSetDateValue(accessor : Method, columnName : String): (T, Date) => Unit = null - def buildSetBooleanValue(accessor : Method, columnName : String): (T, Boolean, Boolean) => Unit = null - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.clobColumnType + notNullAppender() -} - -abstract class MappedFakeClob[T<:Mapper[T]](val fieldOwner: T) extends MappedField[String, T] { - private val data : FatLazy[String] = FatLazy(defaultValue) - private val orgData: FatLazy[String] = FatLazy(defaultValue) - - protected def real_i_set_!(value: String): String = { - data() = value - this.dirty_?( true) - value - } - - def dbFieldClass: Class[String] = classOf[String] - - def manifest: TypeTag[String] = typeTag[String] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = String} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = String - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(v)) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JString(v)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - - /** - * Get the JDBC SQL Type for this field - */ - // def getTargetSQLType(field : String) = Types.BINARY - def targetSQLType: Int = Types.BINARY - - def defaultValue: String = null - override def writePermission_? = true - override def readPermission_? = true - - protected def i_is_! : String = data.get - - protected def i_was_! : String = orgData.get - - protected[mapper] def doneWithSave(): Unit = {orgData.setFrom(data)} - - protected def i_obscure_!(in: String): String = "" - - def asJsExp: JsExp = JE.Str(get) - - def asJsonValue: Box[JsonAST.JValue] = Full(get match { - case null => JsonAST.JNull - case str => JsonAST.JString(str) - }) - - override def setFromAny(in: Any): String = { - in match { - case JsonAST.JNull => this.set(null) - case JsonAST.JString(str) => this.set(str) - case seq: Seq[_] if seq.nonEmpty => seq.map(setFromAny).head - case (s: String) :: _ => this.set(s) - case s :: _ => this.setFromAny(s) - case null => this.set(null) - case s: String => this.set(s) - case Some(s: String) => this.set(s) - case Full(s: String) => this.set(s) - case None | Empty | Failure(_, _, _) => this.set(null) - case o => this.set(o.toString) - } - } - - def jdbcFriendly(field : String): Object = real_convertToJDBCFriendly(data.get) - - def real_convertToJDBCFriendly(value: String): Object = value match { - case null => null - case s => s.getBytes("UTF-8") - } - - def buildSetActualValue(accessor: Method, inst: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedFakeClob[T] => - val toSet = v match { - case null => null - case ba: Array[Byte] => new String(ba, "UTF-8") - case clob: java.sql.Clob => clob.getSubString(1,clob.length.toInt) - case other => other.toString - } - f.data() = toSet - f.orgData() = toSet - }) - - def buildSetLongValue(accessor : Method, columnName : String): (T, Long, Boolean) => Unit = null - def buildSetStringValue(accessor : Method, columnName : String): (T, String) => Unit = (inst, v) => doField(inst, accessor, {case f: MappedFakeClob[T] => - val toSet = v - f.data() = toSet - f.orgData() = toSet - }) - def buildSetDateValue(accessor : Method, columnName : String): (T, Date) => Unit = null - def buildSetBooleanValue(accessor : Method, columnName : String): (T, Boolean, Boolean) => Unit = null - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.binaryColumnType + notNullAppender() -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedBoolean.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedBoolean.scala deleted file mode 100644 index 9c526e5520..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedBoolean.scala +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.Types -import java.lang.reflect.Method -import net.liftweb.util.Helpers._ -import net.liftweb.http.{S, SHtml} -import java.util.Date -import net.liftweb.util._ -import net.liftweb.common._ -import net.liftweb.json._ -import net.liftweb.http.js._ -import scala.xml._ -import scala.reflect.runtime.universe._ -import json.JsonAST.JValue - -abstract class MappedBoolean[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Boolean, T] { - private var data : Box[Boolean] = Full(defaultValue) - private var orgData: Box[Boolean] = Full(defaultValue) - - def defaultValue: Boolean = false - - def dbFieldClass = classOf[Boolean] - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType = Types.BOOLEAN - - - def manifest: TypeTag[Boolean] = typeTag[Boolean] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Boolean} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Boolean - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v.toString - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(v.toString)) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JBool(v)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - - protected def i_is_! : Boolean = data openOr false - protected def i_was_! : Boolean = orgData openOr false - protected[mapper] def doneWithSave(): Unit = {orgData = data} - - protected def real_i_set_!(value : Boolean) : Boolean = { - val boxed = Full(value) - if (boxed != data) { - data = boxed - dirty_?(true) - } - value - } - override def readPermission_? = true - override def writePermission_? = true - - def asJsonValue: Box[JsonAST.JValue] = Full(JsonAST.JBool(get)) - - def real_convertToJDBCFriendly(value: Boolean): Object = new java.lang.Integer(if (value) 1 else 0) - - def jdbcFriendly(field : String) = data.map(v => new java.lang.Integer(if(v) 1 else 0)) openOr null - - def asJsExp: JsExp = if (get) JE.JsTrue else JE.JsFalse - - override def setFromAny(in: Any): Boolean = { - in match { - case b: Boolean => this.set(b) - case JsonAST.JBool(v) => this.set(v) - case (b: Boolean) :: _ => this.set(b) - case Some(b: Boolean) => this.set(b) - case Full(b: Boolean) => this.set(b) - case Empty | Failure(_, _, _) | None => this.set(false) - case (s: String) :: _ => this.set(toBoolean(s)) - case s :: _ => this.setFromAny(s) - case null => this.set(false) - case s: String => this.set(toBoolean(s)) - case o => this.set(toBoolean(o)) - } - } - - protected def i_obscure_!(in : Boolean) = false - - def buildSetActualValue(accessor : Method, inst : AnyRef, columnName : String) : (T, AnyRef) => Unit = { - inst match { - case null => {(inst : T, v : AnyRef) => {val tv = getField(inst, accessor).asInstanceOf[MappedBoolean[T]]; tv.data = Full(false)}} - case _ => {(inst : T, v : AnyRef) => {val tv = getField(inst, accessor).asInstanceOf[MappedBoolean[T]]; tv.data = Full(toBoolean(v))}} - } - } - - private def allSet(in: Box[Boolean]): Unit = { - this.data = in - this.orgData = in - } - - def buildSetLongValue(accessor : Method, columnName : String): (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case tv: MappedBoolean[T] => tv.allSet(if (isNull) Empty else Full(v != 0L))}) - - def buildSetStringValue(accessor : Method, columnName : String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case tv: MappedBoolean[T] => tv.allSet(if (v == null) Empty else Full(toBoolean(v)))}) - - def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case tv: MappedBoolean[T] => tv.allSet(if (v == null) Empty else Full(true))}) - - def buildSetBooleanValue(accessor: Method, columnName : String) : (T, Boolean, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case tv: MappedBoolean[T] => tv.allSet(if (isNull) Empty else Full(v))}) - - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.booleanColumnType + notNullAppender() - - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = Full(SHtml.checkbox(get,this.apply _)) -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDate.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDate.scala deleted file mode 100644 index f414e975f5..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDate.scala +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.{ResultSet, Types} -import java.util.Date -import java.lang.reflect.Method - -import net.liftweb._ -import util._ -import common._ -import Helpers._ -import http._ -import S._ -import js._ -import json._ -import scala.xml.{Text, NodeSeq} -import scala.reflect.runtime.universe._ -/** - * Represents a date without hour, minute or second fields. The underlying type is - * java.util.Date to keep things simple, but be aware that the hour, minute and second - * values will most likely be discarded when this is saved to the database. - * - * @see MappedDateTime - * @see MappedTime - */ -abstract class MappedDate[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Date, T] { - private val data = FatLazy(defaultValue) - private val orgData = FatLazy(defaultValue) - - - def manifest: TypeTag[Date] = typeTag[Date] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Date} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Date - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = format(v) - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JInt(v.getTime)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - - /** - * This defines the string parsing semantics of this field. Used in setFromAny. - * By default uses LiftRules.dateTimeConverter's parseDate; override for field-specific behavior - */ - def parse(s: String): Box[Date] = LiftRules.dateTimeConverter().parseDate(s) - /** - * This method defines the string parsing semantics of this field. Used in toString, _toForm. - * By default uses LiftRules.dateTimeConverter's formatDate; override for field-specific behavior - */ - def format(d: Date): String = LiftRules.dateTimeConverter().formatDate(d) - - protected def real_i_set_!(value: Date): Date = { - if (value != data.get) { - data() = value - this.dirty_?( true) - } - data.get - } - - def dbFieldClass = classOf[Date] - - /** Returns the date as the number of seconds (not milliseconds) since January 1, 1970 */ - def toLong: Long = get match { - case null => 0L - case d: Date => d.getTime / 1000L - } - - def asJsExp: JsExp = JE.Num(toLong) - - def asJsonValue: Box[JsonAST.JValue] = Full(get match { - case null => JsonAST.JNull - case v => JsonAST.JInt(v.getTime) - }) - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType = Types.DATE - - def defaultValue: Date = null - // private val defaultValue_i = new Date - - override def writePermission_? = true - override def readPermission_? = true - - protected def i_is_! = data.get - protected def i_was_! = orgData.get - protected[mapper] def doneWithSave(): Unit = {orgData.setFrom(data)} - - protected def i_obscure_!(in : Date) : Date = { - new Date(0L) - } - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = - S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName => - Full(appendFieldId( "" case s => format(s)}}/>)) - } - - override def setFromAny(f : Any): Date = f match { - case JsonAST.JNull => this.set(null) - case JsonAST.JInt(v) => this.set(new Date(v.longValue)) - case n: Number => this.set(new Date(n.longValue)) - case "" | null => this.set(null) - case s: String => parse(s).map(d => this.set(d)).openOr(this.get) - case (s: String) :: _ => parse(s).map(d => this.set(d)).openOr(this.get) - case d: Date => this.set(d) - case Some(d: Date) => this.set(d) - case Full(d: Date) => this.set(d) - case None | Empty | Failure(_, _, _) => this.set(null) - case _ => this.get - } - - def jdbcFriendly(field : String) : Object = get match { - case null => null - case d => new java.sql.Date(d.getTime) - } - - def real_convertToJDBCFriendly(value: Date): Object = if (value == null) null else new java.sql.Date(value.getTime) - - private def st(in: Box[Date]): Unit = - in match { - case Full(d) => data.set(d); orgData.set(d) - case _ => data.set(null); orgData.set(null) - } - - def buildSetActualValue(accessor: Method, v: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedDate[_] => f.st(toDate(v))}) - - def buildSetLongValue(accessor: Method, columnName: String): (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedDate[_] => f.st(if (isNull) Empty else Full(new Date(v)))}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedDate[_] => f.st(toDate(v))}) - - def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedDate[_] => f.st(Full(v))}) - - def buildSetBooleanValue(accessor: Method, columnName: String): (T, Boolean, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedDate[_] => f.st(Empty)}) - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.dateColumnType + notNullAppender() - - def inFuture_? = data.get match { - case null => false - case d => d.getTime > millis - } - def inPast_? = data.get match { - case null => false - case d => d.getTime < millis - } - - override def toString: String = if(get == null) "NULL" else format(get) -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDateTime.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDateTime.scala deleted file mode 100644 index 14dba95d00..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDateTime.scala +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - - - -import java.sql.{ResultSet, Types} -import java.util.Date -import java.lang.reflect.Method - -import net.liftweb._ -import util._ -import common._ -import Helpers._ -import http._ -import json._ -import S._ -import js._ - -import scala.xml.{Text, NodeSeq} - -abstract class MappedDateTime[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Date, T] { - private val data = FatLazy(defaultValue) - private val orgData = FatLazy(defaultValue) - - /** - * This method defines the string parsing semantics of this field. Used in setFromAny. - * By default uses LiftRules.dateTimeConverter's parseDateTime; override for field-specific behavior - */ - def parse(s: String): Box[Date] = LiftRules.dateTimeConverter().parseDateTime(s) - /** - * This method defines the string parsing semantics of this field. Used in toString, _toForm. - * By default uses LiftRules.dateTimeConverter's formatDateTime; override for field-specific behavior - */ - def format(d: Date): String = LiftRules.dateTimeConverter().formatDateTime(d) - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[Date] = typeTag[Date] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Date} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Date - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = format(v) - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JInt(v.getTime)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - protected def real_i_set_!(value: Date): Date = { - if (value != data.get) { - data() = value - this.dirty_?( true) - } - data.get - } - - def dbFieldClass = classOf[Date] - - def asJsonValue: Box[JsonAST.JValue] = Full(get match { - case null => JsonAST.JNull - case v => JsonAST.JInt(v.getTime) - }) - - def toLong: Long = get match { - case null => 0L - case d: Date => d.getTime / 1000L - } - - def asJsExp: JsExp = JE.Num(toLong) - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType = Types.TIMESTAMP - - def defaultValue: Date = null - // private val defaultValue_i = new Date - - override def writePermission_? = true - override def readPermission_? = true - - protected def i_is_! = data.get - protected def i_was_! = orgData.get - protected[mapper] def doneWithSave(): Unit = {orgData.setFrom(data)} - - protected def i_obscure_!(in : Date) : Date = { - new Date(0L) - } - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = - S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName => - Full(appendFieldId( "" case s => format(s)}}/>)) - } - - override def setFromAny(f: Any): Date = f match { - case JsonAST.JNull => this.set(null) - case JsonAST.JInt(v) => this.set(new Date(v.longValue)) - case n: Number => this.set(new Date(n.longValue)) - case "" | null => this.set(null) - case s: String => parse(s).map(d => this.set(d)).openOr(this.get) - case (s: String) :: _ => parse(s).map(d => this.set(d)).openOr(this.get) - case d: Date => this.set(d) - case Some(d: Date) => this.set(d) - case Full(d: Date) => this.set(d) - case None | Empty | Failure(_, _, _) => this.set(null) - case _ => this.get - } - - def jdbcFriendly(field : String) : Object = get match { - case null => null - case d => new java.sql.Timestamp(d.getTime) - } - - def real_convertToJDBCFriendly(value: Date): Object = if (value == null) null else new java.sql.Timestamp(value.getTime) - - private def st(in: Box[Date]): Unit = - in match { - case Full(d) => data.set(d); orgData.set(d) - case _ => data.set(null); orgData.set(null) - } - - def buildSetActualValue(accessor: Method, v: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(toDate(v))}) - - def buildSetLongValue(accessor: Method, columnName: String): (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(if (isNull) Empty else Full(new Date(v)))}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(toDate(v))}) - - def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(Full(v))}) - - def buildSetBooleanValue(accessor: Method, columnName: String): (T, Boolean, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedDateTime[T] => f.st(Empty)}) - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.dateTimeColumnType + notNullAppender() - - def inFuture_? = data.get match { - case null => false - case d => d.getTime > millis - } - def inPast_? = data.get match { - case null => false - case d => d.getTime < millis - } - - override def toString: String = if(get==null) "NULL" else format(get) -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDecimal.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDecimal.scala deleted file mode 100644 index 5a7582934a..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDecimal.scala +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.math.{MathContext,RoundingMode} -import java.sql.{ResultSet, Types} -import java.lang.reflect.Method -import net.liftweb.util.Helpers._ -import net.liftweb.http.{S, SHtml} -import java.util.Date -import net.liftweb.util._ -import net.liftweb.common._ -import net.liftweb.json._ -import net.liftweb.http.js._ -import scala.xml.{Text, NodeSeq} - -/** - *

- * A field that maps to a decimal value. Decimal precision and rounding - * are controlled via the context parameter. The default value is zero. - *

- * - *

Note:
- * Using MathContext.UNLIMITED, whether explicitly or implicitly, means - * that no precision or scaling will be used for the SQL field definition; the - * default scale for DECIMAL is zero per the SQL standard, but the precision - * for DECIMAL is vendor-specific. For example, PostgreSQL uses maximum precision - * if it's not specified, but SQL Server uses a default precision of 18. - *

- * - * @author Derek Chen-Becker - * - * @param fieldOwner The Mapper that owns this field - * @param context The MathContext that controls precision and rounding - * @param scale Controls the scale of the underlying BigDecimal - */ -abstract class MappedDecimal[T <: Mapper[T]] (val fieldOwner : T, val context : MathContext, val scale : Int) extends MappedField[BigDecimal,T] { - - /** - * Constructs a MappedDecimal with the specified initial value and context. - * The scale is taken from the initial value. - * - * @param fieldOwner The Mapper that owns this field - * @param value The initial value - * @param context The MathContext that controls precision and rounding - */ - def this(fieldOwner : T, value : BigDecimal, context : MathContext) = { - this(fieldOwner, context, value.scale) - wholeSet(coerce(value)) - } - - /** - * Constructs a MappedDecimal with the specified initial value. The context - * is set to MathContext.UNLIMITED (see note above about default precision). - * The scale is taken from the initial value. - * - * @param fieldOwner The Mapper that owns this field - * @param value The initial value - */ - def this(fieldOwner : T, value : BigDecimal) = { - this(fieldOwner, MathContext.UNLIMITED, value.scale) - wholeSet(coerce(value)) - } - - private val zero = BigDecimal("0") - - def defaultValue = zero.setScale(scale) - - def dbFieldClass = classOf[BigDecimal] - - private var data : BigDecimal = defaultValue - private var orgData : BigDecimal = defaultValue - - private def wholeSet (in : BigDecimal) = { - data = in - orgData = in - } - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[BigDecimal] = typeTag[BigDecimal] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = BigDecimal} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = BigDecimal - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v.toString() - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JDouble(v.toDouble)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - protected def i_is_! = data - protected def i_was_! = orgData - - override def doneWithSave(): Unit = { - orgData = data - } - - override def readPermission_? = true - override def writePermission_? = true - - protected def i_obscure_!(in : BigDecimal) = defaultValue - - protected def real_i_set_!(value : BigDecimal): BigDecimal = { - if (value != data) { - data = value - dirty_?(true) - } - data - } - - def asJsExp: JsExp = JE.Num(get) - def asJsonValue: Box[JsonAST.JValue] = Full(JsonAST.JDouble(get.doubleValue)) - - def setFromAny (in : Any) : BigDecimal = - in match { - // FIXME set for big decimal - // case JsonAST.JDouble(db) => MappedDecimal.this.setAll(java.math.BigDecimal.valueOf(db)) - // case JsonAST.JInt(bi) => MappedDecimal.this.set(new java.math.BigDecimal(bi.bigInteger)) - case bd : BigDecimal => setAll(bd) - case n :: _ => setFromString(n.toString) - case Some(n) => setFromString(n.toString) - case Full(n) => setFromString(n.toString) - case None | Empty | Failure(_, _, _) | null => setFromString("0") - case n => setFromString(n.toString) - } - - def setFromString (in : String) : BigDecimal = { - this.setAll(BigDecimal(in)) - data - } - - /** Set the value along with proper scale, precision, and rounding */ - protected def setAll (in : BigDecimal) = this.set(coerce(in)) - - // Set the scale on the given input - protected def coerce (in : BigDecimal) = new BigDecimal(in.bigDecimal.setScale(scale, context.getRoundingMode)) - - def targetSQLType = Types.DECIMAL - - def jdbcFriendly(field : String) = i_is_!.bigDecimal - - def real_convertToJDBCFriendly(value: BigDecimal): Object = value.bigDecimal - - def buildSetBooleanValue(accessor : Method, columnName : String) : (T, Boolean, Boolean) => Unit = null - - def buildSetDateValue(accessor : Method, columnName : String) : (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedDecimal[T] => f.wholeSet(if (v == null) defaultValue else coerce(BigDecimal(v.getTime)))}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => - Unit = (inst, v) => doField(inst, accessor, {case f: MappedDecimal[T] => f.wholeSet(if (v == null) defaultValue else coerce(BigDecimal(v)))}) - - def buildSetLongValue(accessor: Method, columnName : String) : (T, Long, Boolean) => - Unit = (inst, v, isNull) => doField(inst, accessor, {case f: MappedDecimal[T] => f.wholeSet(if (isNull) defaultValue else coerce(BigDecimal(v)))}) - - def buildSetActualValue(accessor: Method, data: AnyRef, columnName: String) : (T, AnyRef) => - Unit = (inst, v) => doField(inst, accessor, {case f: MappedDecimal[T] => f.wholeSet(if (v == null) defaultValue else coerce(BigDecimal(v.toString)))}) - - /** - * Returns the SQL creation string for this field. See the note at the - * top of the page concerning default precision. - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = { - val suffix = if (context.getPrecision == 0) { - "" - } else { - "(" + context.getPrecision + "," + scale + ")" - } - - colName + " DECIMAL" + suffix + notNullAppender() - } -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDouble.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDouble.scala deleted file mode 100644 index bcff6bd756..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedDouble.scala +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.Types -import java.lang.reflect.Method -import net.liftweb.common._ -import net.liftweb.util._ -import java.util.Date -import net.liftweb.http._ -import scala.xml.{Text, NodeSeq} -import js._ -import net.liftweb.json._ - -abstract class MappedDouble[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Double, T] { - private var data: Double = defaultValue - private var orgData: Double = defaultValue - - private def st(in: Double): Unit = { - data = in - orgData = in - } - - def defaultValue: Double = 0.0 - def dbFieldClass: Class[Double] = classOf[Double] - - protected def i_is_! : Double = data - protected def i_was_! : Double = orgData - - override def doneWithSave(): Unit = { - orgData = data - } - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[Double] = typeTag[Double] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Double} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Double - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v.toString - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JsonAST.JDouble(v)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - def toDouble(in: Any): Double = { - in match { - case null => 0.0 - case i: Int => i - case n: Long => n.toDouble - case n : Number => n.doubleValue - case (n: Number) :: _ => n.doubleValue - case Full(n) => toDouble(n) // fixes issue 185 - case _: EmptyBox => 0.0 - case Some(n) => toDouble(n) - case None => 0.0 - case s: String => s.toDouble - case x :: _ => toDouble(x) - case o => toDouble(o.toString) - } - } - - override def readPermission_? = true - override def writePermission_? = true - - protected def i_obscure_!(in : Double): Double = defaultValue - - protected def real_i_set_!(value : Double): Double = { - if (value != data) { - data = value - dirty_?(true) - } - data - } - - def asJsExp: JsExp = JE.Num(get) - - def asJsonValue: Box[JsonAST.JValue] = Full(JsonAST.JDouble(get)) - - override def setFromAny(in: Any): Double = { - in match { - case JsonAST.JDouble(db) => this.set(db) - case JsonAST.JInt(bi) => this.set(bi.doubleValue) - case n: Double => this.set(n) - case n: Number => this.set(n.doubleValue) - case (n: Number) :: _ => this.set(n.doubleValue) - case Some(n: Number) => this.set(n.doubleValue) - case None => this.set(0.0) - case (s: String) :: _ => this.set(toDouble(s)) - case null => this.set(0L) - case s: String => this.set(toDouble(s)) - case o => this.set(toDouble(o)) - } - } - - def real_convertToJDBCFriendly(value: Double): Object = new java.lang.Double(value) - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int = Types.DOUBLE - def jdbcFriendly(field : String) = new java.lang.Double(i_is_!) - def buildSetBooleanValue(accessor : Method, columnName : String) : (T, Boolean, Boolean) => Unit = null - def buildSetDateValue(accessor : Method, columnName : String) : (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedDouble[T] => f.st(if (v == null) defaultValue else v.getTime.toDouble)}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => - Unit = (inst, v) => doField(inst, accessor, {case f: MappedDouble[T] => f.st(toDouble(v))}) - - def buildSetLongValue(accessor: Method, columnName : String) : (T, Long, Boolean) => - Unit = (inst, v, isNull) => doField(inst, accessor, {case f: MappedDouble[T] => f.st(if (isNull) defaultValue else v.toDouble)}) - - def buildSetActualValue(accessor: Method, data: AnyRef, columnName: String) : (T, AnyRef) => - Unit = (inst, v) => doField(inst, accessor, {case f: MappedDouble[T] => f.st(toDouble(v))}) - - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.doubleColumnType + notNullAppender() -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedEmail.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedEmail.scala deleted file mode 100644 index 538406cf0b..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedEmail.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import http.S -import util.FieldError -import proto._ - -import scala.xml.Text - -object MappedEmail { - def emailPattern = ProtoRules.emailRegexPattern.vend - - def validEmailAddr_?(email: String): Boolean = emailPattern.matcher(email).matches -} - -abstract class MappedEmail[T<:Mapper[T]](owner: T, maxLen: Int) extends MappedString[T](owner, maxLen) { - - override def setFilter = notNull _ :: toLower _ :: trim _ :: super.setFilter - - override def validate = - (if (MappedEmail.emailPattern.matcher(i_is_!).matches) Nil else List(FieldError(this, Text(S.?("invalid.email.address"))))) ::: - super.validate - -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedField.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedField.scala deleted file mode 100644 index 256545095f..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedField.scala +++ /dev/null @@ -1,723 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import scala.collection.mutable._ -import java.lang.reflect.Method - -import scala.xml._ -import java.util.Date - -import net.liftweb.http.{S, SHtml} -import net.liftweb.http.js._ -import net.liftweb.common._ -import net.liftweb.json._ -import net.liftweb.util._ - -import scala.annotation.tailrec -import scala.reflect.runtime.universe._ - -/** - * This is the supertrait of all traits that can be mixed into a MappedField. - * All methods should be abstract. They will be made concrete in implementations. - */ -trait MixableMappedField extends BaseField { - /** - * Will be set to the type of the owner of the field - */ - type TheOwnerType <: Mapper[TheOwnerType] - - /** - * Return the field name and field value, delimited by an '=' - */ - def asString: String - - def dbColumnCount: Int - - def dbIndexed_? : Boolean - - def dbNotNull_? : Boolean - - def dbPrimaryKey_? : Boolean - - /** - * Is the field a foreign key reference - */ - def dbForeignKey_? : Boolean - - def asHtml: NodeSeq -} - -/** - * The base (not Typed) trait that defines a field that is mapped to a column or more than 1 column - * (e.g., MappedPassword) in the database - */ -trait BaseMappedField extends SelectableField with Bindable with MixableMappedField with Serializable{ - - def dbDisplay_? = true - - def dbIncludeInForm_? : Boolean = dbDisplay_? - - def asJsonField: Box[JsonAST.JField] = - asJsonValue.map(v => JsonAST.JField(name, v)) - - def asJsonValue: Box[JsonAST.JValue] - - /** - * Get a JDBC friendly representation of the named field (this is used for MappedFields that correspond to more than - * 1 column in the database.) - * @param field -- the name of the field being mapped to - */ - def jdbcFriendly(field : String): AnyRef - - /** - * Get a JDBC friendly object for the part of this field that maps to the first - * column in the database - */ - def jdbcFriendly: AnyRef - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType(field: String): Int - - /** - * Do we ignore the targetSQLType for setObject - */ - def dbIgnoreSQLType_? : Boolean = false - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String - - /** - * Given the driver type, return a list of statements to create the columns in the database - */ - def fieldCreatorString(dbType: DriverType): List[String] - - /** - * Convert the field to its name/value pair (e.g., name=David) - */ - def asString: String - - /** - * The number of database columns that this field represents - */ - def dbColumnCount: Int - - def dbColumnNames(in: String): List[String] - - def dbColumnName: String - - /** - * The forced lower case column names - */ - final def _dbColumnNameLC: String = { - val name = dbColumnName - - val conn = DB.currentConnection - conn.map{ - c => - if (c.metaData.storesMixedCaseIdentifiers) name - else name.toLowerCase - }.openOr(name) - } - - /** - * Should the field be indexed? - */ - def dbIndexed_? : Boolean - - /** - * Set to true if the field should be created as NOT NULL - */ - def dbNotNull_? : Boolean = false - - /** - * Is the field the table's primary key - */ - def dbPrimaryKey_? : Boolean - - /** - * Is the primary key autogenerated - */ - def dbAutogenerated_? : Boolean = dbPrimaryKey_? - - /** - * Is the field a foreign key reference - */ - def dbForeignKey_? : Boolean - - /** - * Called when a column has been added to the database via Schemifier - */ - def dbAddedColumn: Box[() => Unit] - - /** - * Called when a column has indexed via Schemifier - */ - def dbAddedIndex: Box[() => Unit] - - def asHtml: NodeSeq - - /** - * Called after the field is saved to the database - */ - protected[mapper] def doneWithSave(): Unit - - def asJsExp: JsExp - - def asJs: List[(String, JsExp)] = List((name, asJsExp)) - - /** - * What form elements are we going to add to this field? - */ - def formElemAttrs: scala.Seq[SHtml.ElemAttr] = Nil - - def renderJs_? = true - - /** - * This is where the instance creates its "toForm" stuff. - * The actual toForm method wraps the information based on - * mode. - */ - def _toForm: Box[NodeSeq] -} - -/** - * Mix this trait into a BaseMappedField and it will be indexed - */ -trait DBIndexed extends BaseMappedField { - override def dbIndexed_? = true -} - - - -trait BaseOwnedMappedField[OwnerType <: Mapper[OwnerType]] extends BaseMappedField - -trait TypedField[FieldType] { - /** - * The default value for the field - */ - def defaultValue: FieldType - - /** - * What is the real class that corresponds to FieldType - */ - def dbFieldClass: Class[FieldType] -} - -/** -* A Mapped field that is Nullable in the database. Will return Empty box for NULL values and Full for non-null values -*/ -trait MappedNullableField[NullableFieldType <: Any,OwnerType <: Mapper[OwnerType]] extends MappedField[Box[NullableFieldType], OwnerType] { - /** - * All fields of this type are NULLable - */ - override final def dbNotNull_? : Boolean = false - - override def toString: String = get.map(_.toString) openOr "" - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = - S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName => - Full(appendFieldId( "" - case Full(null) => "" - case Full(s) => s.toString - case _ => "" - }}/>)) - } -} - -/** - * The strongly typed field that's mapped to a column (or many columns) in the database. - * FieldType is the type of the field and OwnerType is the Owner of the field - */ -trait MappedField[FieldType <: Any,OwnerType <: Mapper[OwnerType]] extends TypedField[FieldType] with BaseOwnedMappedField[OwnerType] with FieldIdentifier with PSettableValueHolder[FieldType] with scala.Equals { - - /** - * Will be set to the type of the field - */ - override type ValueType = FieldType - - /** - * Will be set to the type of the owner of the field - */ - type TheOwnerType = OwnerType - - /** - * Should the field be ignored by the OR Mapper? - */ - def ignoreField_? = false - - - def manifest: TypeTag[FieldType] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = FieldType} - - def sourceFieldInfo(): SourceFieldInfo{type T = FieldType} = SourceFieldInfoRep(get, sourceInfoMetadata()) - - /** - * Get the field that this prototypical field represents - * - * @param actual the object to find the field on - */ - def actualField(actual: OwnerType): MappedField[FieldType, OwnerType] = actual.getSingleton.getActualField(actual, this) - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String - - /** - * Given the driver type, return a list of SQL creation strings for the columns represented by this field - */ - def fieldCreatorString(dbType: DriverType): List[String] = dbColumnNames(name).map{c => fieldCreatorString(dbType, c)} - - def notNullAppender(): String = if (dbNotNull_?) " NOT NULL " else "" - - /** - * Is the field dirty - */ - private var _dirty_? = false - - /** - * Is the field dirty (has it been changed since the record was loaded from the database - */ - def dirty_? : Boolean = !dbPrimaryKey_? && _dirty_? - - /** - * Make the field dirty - */ - protected def dirty_?(b: Boolean): Unit = _dirty_? = b - - /** - * Called when a column has been added to the database via Schemifier - */ - def dbAddedColumn: Box[() => Unit] = Empty - - /** - * Called when a column has indexed via Schemifier - */ - def dbAddedIndex: Box[() => Unit] = Empty - - /** - * override this method in indexed fields to indicate that the field has been saved - */ - def dbIndexFieldIndicatesSaved_? = false; - - /** - * Return the owner of this field - */ - def fieldOwner: OwnerType - - /** - * Are we in "safe" mode (i.e., the value of the field can be read or written without any security checks.) - */ - final def safe_? : Boolean = fieldOwner.safe_? - - /** - * Given the current execution state, can the field be written? - */ - def writePermission_? = false - - /** - * Given the current execution state, can the field be read? - */ - def readPermission_? = false - - /** - * Assignment from the underlying type. It's ugly, but:
- * field() = new_value
- * field set new_value
- * field.set(new_value)
- * are all the same - */ - def update[Q](v: Q)(implicit implFn: Q => FieldType): Unit = { - this.set(v) - } - - def apply[Q](v: Q)(implicit implFn: Q => FieldType): OwnerType = { - this.set(v) - fieldOwner - } - - def apply(v: FieldType): OwnerType = { // issue 154 - this.set(v) - fieldOwner - } - - /** - * The unique field id is the field name and the mapper name - */ - override def uniqueFieldId: Box[String] = - Full(fieldOwner.getSingleton.dbTableName+"_"+name) - - /** - * Set the field to the value - */ - def set(value: FieldType): FieldType = { - if (safe_? || writePermission_?) i_set_!(value) - else throw new Exception("Do not have permissions to set this field") - } - - def :=[Q](v: Q)(implicit implFn: Q => FieldType): FieldType = { - set(v) - } - - def :=(v: FieldType): FieldType = { - set(v) - } - - private var _name : String = _ - - /** - * The internal name of this field. Use name - */ - private[mapper] final def i_name_! : String = _name - - /** - * The name of this field - */ - final def name: String = synchronized { - if (_name eq null) { - fieldOwner.checkNames() - } - _name - } - - /** - * Set the name of this field - */ - private[mapper] final def setName_!(newName : String) : String = { - if(safe_?) _name = newName - _name - } - - /** - * The display name of this field (e.g., "First Name") - */ - override def displayName: String = MapperRules.displayNameCalculator.vend(fieldOwner, S.locale, name) - - def resetDirty(): Unit = { - if (safe_?) dirty_?(false) - } - - /** - * Attempt to figure out what the incoming value is and set the field to that value. Return true if - * the value could be assigned - */ - def setFromAny(value: Any): FieldType - - def toFormAppendedAttributes: MetaData = - if (Props.mode == Props.RunModes.Test) - new PrefixedAttribute("lift", "field_name", Text(calcFieldName), Null) - else Null - - def calcFieldName: String = fieldOwner.getSingleton.internal_dbTableName+":"+name - - - def toForm: Box[NodeSeq] = { - def mf(in: scala.xml.Node): NodeSeq = in match { - case g: Group => g.nodes.flatMap(mf) - case e: Elem => e % toFormAppendedAttributes - case other => other - } - - _toForm.map(_.flatMap(mf) ).map(SHtml.ElemAttr.applyToAllElems(_, formElemAttrs)) - } - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = - S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName => - Full(appendFieldId( "" case s => s.toString}}/>)) - } - - /** - * When building the form field, what's the input element's - * type attribute. Defaults to 'text', but change to 'email' - * or other HTML5 values. - */ - protected def formInputType = "text" - - /** - * If the field has a defined fieldId, append it - */ - protected def appendFieldId(in: Elem): Elem = fieldId match { - case Some(i) => - import util.Helpers._ - in % ("id" -> i) - case _ => in - } - - /** - * Set the field to the Box value if the Box is Full - */ - def set_?(value: Box[FieldType]): Box[FieldType] = { - value.foreach(v => this.set(v)) - value - } - - /** - * A list of functions that transform the value before it is set. The transformations - * are also applied before the value is used in a query. Typical applications - * of this are trimming and/or toLowerCase-ing strings - */ - def setFilter: List[FieldType => FieldType] = Nil - - protected final def i_set_!(value: FieldType): FieldType = { - real_i_set_!(runFilters(value, setFilter)) - } - - def runFilters(in: FieldType, filter: List[FieldType => FieldType]): FieldType = - filter match { - case Nil => in - case x :: xs => runFilters(x(in), xs) - } - - /** - * Must be implemented to store the value of the field - */ - protected def real_i_set_!(value: FieldType): FieldType - - def buildSetActualValue(accessor: Method, inst : AnyRef, columnName : String) : (OwnerType, AnyRef) => Unit - def buildSetLongValue(accessor: Method, columnName: String): (OwnerType, Long, Boolean) => Unit - def buildSetStringValue(accessor: Method, columnName: String): (OwnerType, String) => Unit - def buildSetDateValue(accessor: Method, columnName: String): (OwnerType, Date) => Unit - def buildSetBooleanValue(accessor: Method, columnName: String) : (OwnerType, Boolean, Boolean) => Unit - protected def getField(inst: OwnerType, meth: Method) = meth.invoke(inst).asInstanceOf[MappedField[FieldType,OwnerType]]; - protected def doField(inst: OwnerType, meth: Method, func: PartialFunction[MappedField[FieldType, OwnerType], Unit]): Unit = { - val f = getField(inst, meth) - if (func.isDefinedAt(f)) func(f) - } - - /** - * Convert the field to its "context free" type (e.g., String, Int, Long, etc.) - * If there are no read permissions, the value will be obscured - */ - def get: FieldType = { - if (safe_? || readPermission_?) i_is_! - else i_obscure_!(i_is_!) - } - - /** - * What value was the field's value when it was pulled from the DB? - */ - def was: FieldType = { - if (safe_? || readPermission_?) i_was_! - else i_obscure_!(i_was_!) - } - - - /** - * The actual value of the field - */ - protected def i_is_! : FieldType - - /** - * The value of the field when it was pulled from the DB - */ - protected def i_was_! : FieldType - - /** - * Obscure the incoming value to a "safe" value (e.g., if there are - * not enough rights to view the entire social security number 123-45-5678, this - * method might return ***-**-*678 - */ - protected def i_obscure_!(in : FieldType): FieldType - - /** - * Return the field name and field value, delimited by an '=' - */ - def asString: String = name + "=" + toString - - def dbColumnCount = 1 - - def dbColumnNames(in : String): List[String] = if (dbColumnCount == 1) List(_dbColumnNameLC) else List(in.toLowerCase) - - def dbColumnName: String = { - val columnName = MapperRules.columnName(fieldOwner.connectionIdentifier, name) - if(DB.reservedWords.contains(columnName.toLowerCase)) - columnName+"_c" - else - columnName - } - - def dbSelectString: String = fieldOwner.getSingleton._dbTableNameLC + "." + _dbColumnNameLC - - def dbIndexed_? : Boolean = false - - /** - * Set to true if the field should be created as NOT NULL - */ - override def dbNotNull_? : Boolean = false - - def dbPrimaryKey_? : Boolean = false - - /** - * Is the field a foreign key reference - */ - def dbForeignKey_? : Boolean = false - - def jdbcFriendly(field : String) : Object - - def jdbcFriendly: Object = jdbcFriendly(_dbColumnNameLC) - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType(field : String): Int = targetSQLType - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int - - override def toString : String = - get match { - case null => "" - case v => v.toString - } - - def validations: List[FieldType => List[FieldError]] = Nil - - def validate: List[FieldError] = { - val cv = get - val errorRet: ListBuffer[FieldError] = new ListBuffer - - /* - validations.flatMap{ - case pf: PartialFunction[FieldType, List[FieldError]] => - if (pf.isDefinedAt(cv)) pf(cv) - else Nil - case f => f(cv) - } - */ - - @tailrec - def runValidations(validators: List[FieldType => List[FieldError]]): Unit = { - validators match { - case Nil => () - case x :: rest => - val errors = x match { - case pf: PartialFunction[FieldType, List[FieldError]] => - if (pf.isDefinedAt(cv)) pf(cv) - else Nil - case f => f(cv) - } - - (errors, x) match { - case (Nil, _) => runValidations(rest) - case (errors, e: StopValidationOnError[FieldType]) => errorRet.appendAll(errors) - case (errors, _) => errorRet.appendAll(errors) - runValidations(rest) - } - } - } - runValidations(validations) - errorRet.toList - } - - final def convertToJDBCFriendly(value: FieldType): Object = real_convertToJDBCFriendly(runFilters(value, setFilter)) - - protected def real_convertToJDBCFriendly(value: FieldType): Object - - override def hashCode(): Int = i_is_! match { - case null => 0 - case x => x.hashCode - } - - - /** - * Does the "right thing" comparing mapped fields - */ - override def equals(other: Any): Boolean = { - ( - other match { - case e: scala.Equals => e canEqual this - case _ => true} - ) && ( - other match { - case mapped: MappedField[_, _] => this.i_is_! == mapped.i_is_! - case ov: AnyRef if (ov ne null) && dbFieldClass.isAssignableFrom(ov.getClass) => this.get == runFilters(ov.asInstanceOf[FieldType], setFilter) - case ov => this.get == ov - } - ) - } - - def canEqual(that: Any): Boolean = that match { - case ar: AnyRef => ar.getClass==this.getClass - case _ => false - } - - override def asHtml: scala.xml.Node = Text(toString) -} - -trait IndexedField[O] extends BaseIndexedField { - def convertKey(in: String): Box[O] - def convertKey(in: Int): Box[O] - def convertKey(in: Long): Box[O] - def convertKey(in: AnyRef): Box[O] - def makeKeyJDBCFriendly(in: O): AnyRef - override def dbDisplay_? = false -} - -trait BaseIndexedField extends BaseMappedField { - -} - - -trait LifecycleCallbacks { - def beforeValidation: Unit = {} - def beforeValidationOnCreate: Unit = {} - def beforeValidationOnUpdate: Unit = {} - def afterValidation: Unit = {} - def afterValidationOnCreate: Unit = {} - def afterValidationOnUpdate: Unit = {} - - def beforeSave: Unit = {} - def beforeCreate: Unit = {} - def beforeUpdate: Unit = {} - - def afterSave: Unit = {} - def afterCreate: Unit = {} - def afterUpdate: Unit = {} - - def beforeDelete: Unit = {} - def afterDelete: Unit = {} -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedForeignKey.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedForeignKey.scala deleted file mode 100644 index d6ba790b06..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedForeignKey.scala +++ /dev/null @@ -1,321 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import common._ - - -import scala.xml.{NodeSeq, Text, Elem} -import http.{js, S, SHtml} -import js._ -import S.? -import json._ -import util.FieldError - - -/** - * A trait that defines foreign key references - */ -trait BaseForeignKey extends BaseMappedField { - - type KeyType - type KeyedForeignType <: KeyedMapper[KeyType, KeyedForeignType] - - type OwnerType <: Mapper[OwnerType] - - /** - * Is the key defined? - */ - def defined_? : Boolean - - /** - * get the object referred to by this foreign key - */ - - def dbKeyToTable: BaseMetaMapper - - def dbKeyToColumn: BaseMappedField - - def findFor(key: KeyType): List[OwnerType] - - def findFor(key: KeyedForeignType): List[OwnerType] - - /** - * Called when Schemifier adds a foreign key. Return a function that will be called when Schemifier - * is done with the schemification. - */ - def dbAddedForeignKey: Box[() => Unit] -} - - -object MappedForeignKey { - implicit def getObj[KeyType, - MyOwner <: Mapper[MyOwner], - Other <: KeyedMapper[KeyType, - Other]](in: - MappedForeignKey[KeyType, - MyOwner, - Other]): - Box[Other] = in.obj -} - -/** - * The Trait that defines a field that is mapped to a foreign key - */ -trait MappedForeignKey[KeyType, MyOwner <: Mapper[MyOwner], Other <: KeyedMapper[KeyType, Other]] -extends MappedField[KeyType, MyOwner] -with LifecycleCallbacks { - type FieldType <: KeyType - // type ForeignType <: KeyedMapper[KeyType, Other] - - /** - * What's the MetaMapper for the foreign key - */ - def foreignMeta: KeyedMetaMapper[KeyType, Other] - - /** - * Make sure the MetaMapper for the KeyedMapper we're checking - * is in fact the same one as we are associated with. Issue #532. - */ - private def checkTypes(km: KeyedMapper[KeyType, _]): Boolean = - km.getSingleton eq foreignMeta - - override def equals(other: Any): Boolean = other match { - case km: KeyedMapper[KeyType, Other] if checkTypes(km) => this.get == km.primaryKeyField.get - case _ => super.equals(other) - } - - def dbKeyToTable: KeyedMetaMapper[KeyType, Other] - - def validSelectValues: Box[List[(KeyType, String)]] = Empty - - - def immutableMsg: NodeSeq = Text(?("Can't change")) - - override def _toForm: Box[Elem] = Full(validSelectValues.flatMap{ - case Nil => Empty - - case xs => - Full(SHtml.selectObj(xs, Full(this.get), this.set)) - }.openOr({immutableMsg})) - - /** - * Is the key defined - */ - def defined_? : Boolean - - /** - * Is the obj field cached - */ - def cached_? : Boolean = synchronized{ _calcedObj} - - override protected def dirty_?(b: Boolean): Unit = synchronized { // issue 165 - // invalidate if the primary key has changed Issue 370 - if (_obj.isEmpty || (_calcedObj && _obj.isDefined && - _obj.openOrThrowException("_obj was just checked as full.").primaryKeyField.get != this.i_is_!)) { - _obj = Empty - _calcedObj = false - } - super.dirty_?(b) - } - - /** - * Some people prefer the name foreign to materialize the - * foreign reference. This is a proxy to the obj method. - */ - def foreign: Box[Other] = obj - - /** - * Load and cache the record that this field references - */ - def obj: Box[Other] = synchronized { - if (!_calcedObj) { - _calcedObj = true - this._obj = if(defined_?) dbKeyToTable.find(i_is_!) else Empty - } - _obj - } - - private[mapper] def _primeObj(obj: Box[Any]): Unit = - primeObj(obj.asInstanceOf[Box[Other]]) - - /** - * Prime the reference of this FK reference - */ - def primeObj(obj: Box[Other]): Unit = synchronized { - _obj = obj - _calcedObj = true - } - - private var _obj: Box[Other] = Empty - private var _calcedObj = false - - - /** - * Set the value from a possible instance of the foreign mapper class. - * v will be cached in obj. - * If v is Empty, set the value to defaultValue (-1) - * @return the Mapper containing this field - */ - def apply(v: Box[Other]): MyOwner = { - apply(v.dmap(defaultValue)(_.primaryKeyField.get)) - primeObj(v) - fieldOwner - } - - /** - * Set the value from an instance of the foreign mapper class. - * obj will be set to Full(v) - * @return the Mapper containing this field - */ - def apply(v: Other): MyOwner = { - apply(v.primaryKeyField.get) - primeObj(Full(v)) - fieldOwner - } - - /** - * This method, which gets called when the mapper class is going to be saved, - * sets the field's value from obj if it's set to the default (!defined_?). - * Overrides LifecycleCallbacks.beforeSave - */ - override def beforeSave: Unit = { - if(!defined_?) - for(o <- obj) - set(o.primaryKeyField.get) - super.beforeSave - } - - /** - * A validation function that checks that obj is nonempty - */ - val valHasObj = (value: Long) => - if (obj.isEmpty) List(FieldError(this, scala.xml.Text("Required field: " + name))) - else Nil -} - - -abstract class MappedLongForeignKey[T<:Mapper[T],O<:KeyedMapper[Long, O]](theOwner: T, _foreignMeta: => KeyedMetaMapper[Long, O]) -extends MappedLong[T](theOwner) with MappedForeignKey[Long,T,O] with BaseForeignKey { - def defined_? : Boolean = i_is_! > 0L - - def foreignMeta = _foreignMeta - - def box: Box[Long] = if (defined_?) Full(get) else Empty - - type KeyType = Long - type KeyedForeignType = O - type OwnerType = T - - override def jdbcFriendly(field : String) = if (defined_?) new java.lang.Long(i_is_!) else null - override def jdbcFriendly = if (defined_?) new java.lang.Long(i_is_!) else null - - lazy val dbKeyToTable: KeyedMetaMapper[Long, O] = foreignMeta - - def dbKeyToColumn = dbKeyToTable.primaryKeyField - - override def dbIndexed_? = true - - override def dbForeignKey_? = true - - - def asSafeJs(obs: Box[KeyObfuscator]): JsExp = - obs.map(o => JE.Str(o.obscure(dbKeyToTable, get))).openOr(JE.Num(get)) - - override def asJsExp: JsExp = if (defined_?) super.asJsExp else JE.JsNull - - override def asJsonValue: Box[JsonAST.JValue] = - if (defined_?) super.asJsonValue else Full(JsonAST.JNull) - - override def setFromAny(in: Any): Long = - in match { - case JsonAST.JNull => this.set(0L) - case JsonAST.JInt(bigint) => this.set(bigint.longValue) - case o => super.setFromAny(o) - } - - /** - * Called when Schemifier adds a foreign key. Return a function that will be called when Schemifier - * is done with the schemification. - */ - def dbAddedForeignKey: Box[() => Unit] = Empty - - override def toString: String = if (defined_?) super.toString else "NULL" - - def findFor(key: KeyType): List[OwnerType] = theOwner.getSingleton.findAll(By(this, key)) - - def findFor(key: KeyedForeignType): List[OwnerType] = theOwner.getSingleton.findAll(By(this, key)) - - // def +(in: Long): Long = is + in - - /** - * Given the driver type, return the string required to create the column in the database - */ - override def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.longForeignKeyColumnType + notNullAppender() - -} - -abstract class MappedStringForeignKey[T<:Mapper[T],O<:KeyedMapper[String, O]](override val fieldOwner: T, foreign: => KeyedMetaMapper[String, O],override val maxLen: Int) -extends MappedString[T](fieldOwner, maxLen) with MappedForeignKey[String,T,O] with BaseForeignKey { - def defined_? : Boolean = i_is_! ne null - - type KeyType = String - type KeyedForeignType = O - type OwnerType = T - - override def jdbcFriendly(field: String) = i_is_! - override def jdbcFriendly = i_is_! - - def dbKeyToTable: KeyedMetaMapper[String, O] = foreign - def dbKeyToColumn = dbKeyToTable.primaryKeyField - - override def dbIndexed_? = true - - override def dbForeignKey_? = true - - def asSafeJs(obs: Box[KeyObfuscator]): JsExp = - obs.map(o => JE.Str(o.obscure(dbKeyToTable, get))).openOr(JE.Str(get)) - - /** - * Called when Schemifier adds a foreign key. Return a function that will be called when Schemifier - * is done with the schemification. - */ - def dbAddedForeignKey: Box[() => Unit] = Empty - - override def toString: String = if (defined_?) super.toString else "NULL" - - def set(v: Box[O]): T = { - val toSet: String = v match { - case Full(i) => i.primaryKeyField.get - case _ => null - } - - this(toSet) - } - - def findFor(key: KeyType): List[OwnerType] = fieldOwner.getSingleton.findAll(By(this, key)) - - def findFor(key: KeyedForeignType): List[OwnerType] = fieldOwner.getSingleton.findAll(By(this, key)) - - /** - * Given the driver type, return the string required to create the column in the database - */ - // defect 79 override def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.longForeignKeyColumnType - -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedInt.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedInt.scala deleted file mode 100644 index 71c80a066e..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedInt.scala +++ /dev/null @@ -1,376 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.Types -import java.lang.reflect.Method -import net.liftweb.common._ -import net.liftweb.util._ -import Helpers._ -import java.util.Date -import net.liftweb.http._ -import reflect.runtime.universe._ -import net.liftweb.json._ -import scala.xml.{Text, NodeSeq} -import js._ - - -/** - * Warning: Do not use unnamed Enumerations with 2.8.1 as this will cause too many items to be displayed in the dropdown. - * - * See https://issues.scala-lang.org/browse/SI-3687 for details - */ -abstract class MappedEnum[T<:Mapper[T], ENUM <: Enumeration](val fieldOwner: T, val enum: ENUM)(implicit val manifest: TypeTag[ENUM#Value]) extends MappedField[ENUM#Value, T] { - private var data: ENUM#Value = defaultValue - private var orgData: ENUM#Value = defaultValue - def defaultValue: ENUM#Value = enum.values.iterator.next - def dbFieldClass: Class[ENUM#Value] = classOf[ENUM#Value] - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int = Types.BIGINT - - protected def i_is_! : ENUM#Value = data - protected def i_was_! : ENUM#Value = orgData - /** - * Called after the field is saved to the database - */ - override protected[mapper] def doneWithSave(): Unit = { - orgData = data - } - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = ENUM#Value} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = ENUM#Value - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v.toString - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JsonAST.JInt(v.id)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - protected def real_i_set_!(value: ENUM#Value): ENUM#Value = { - if (value != data) { - data = value - dirty_?(true) - - } - data - } - override def readPermission_? = true - override def writePermission_? = true - - def real_convertToJDBCFriendly(value: ENUM#Value): Object = new java.lang.Integer(value.id) - - def toInt = get.id - def fromInt(in: Int): ENUM#Value = enum(in) - - def jdbcFriendly(field: String) = new java.lang.Integer(toInt) - override def jdbcFriendly = new java.lang.Integer(toInt) - - def asJsExp: JsExp = JE.Num(get.id) - - def asJsonValue: Box[JsonAST.JValue] = Full(JsonAST.JInt(get.id)) - - - override def setFromAny(in: Any): ENUM#Value = { - in match { - case JsonAST.JInt(bi) => this.set(fromInt(bi.intValue)) - case n: Int => this.set(fromInt(n)) - case n: Long => this.set(fromInt(n.toInt)) - case n: Number => this.set(fromInt(n.intValue)) - case (n: Number) :: _ => this.set(fromInt(n.intValue)) - case Some(n: Number) => this.set(fromInt(n.intValue)) - case Full(n: Number) => this.set(fromInt(n.intValue)) - case None | Empty | Failure(_, _, _) => this.set(defaultValue) - case (s: String) :: _ => this.set(fromInt(Helpers.toInt(s))) - case vs: ENUM#Value => this.set(vs) - case null => this.set(defaultValue) - case s: String => this.set(fromInt(Helpers.toInt(s))) - case o => this.set(fromInt(Helpers.toInt(o))) - } - } - - protected def i_obscure_!(in : ENUM#Value) = defaultValue - - private def st(in: ENUM#Value): Unit = { - data = in - orgData = in - } - - def buildSetActualValue(accessor: Method, data: AnyRef, columnName: String) : (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedEnum[T, ENUM] => f.st(if (v eq null) defaultValue else fromInt(Helpers.toInt(v.toString)))}) - - def buildSetLongValue(accessor: Method, columnName: String): (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedEnum[T, ENUM] => f.st(if (isNull) defaultValue else fromInt(v.toInt))}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedEnum[T, ENUM] => f.st(if (v eq null) defaultValue else fromInt(Helpers.toInt(v)))}) - - def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedEnum[T, ENUM] => f.st(if (v eq null) defaultValue else fromInt(Helpers.toInt(v)))}) - - def buildSetBooleanValue(accessor: Method, columnName: String): (T, Boolean, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedEnum[T, ENUM] => f.st(defaultValue)}) - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.enumColumnType + notNullAppender() - - /* - Mapper dependency on Widgets is the wrong order. There should be a trait in Widgets that's - mixed into this class that provides autocomplete. dpp 2009/12/01 - - /** - * Whether or not to use autocomplete in toForm - */ - def autocomplete_? = false -*/ - - /** - * Build a list for the select. Return a tuple of (String, String) where the first string - * is the id.string of the Value and the second string is the Text name of the Value. - */ - def buildDisplayList: List[(Int, String)] = enum.values.toList.map(a => (a.id, a.toString)) - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = - /* - if (autocomplete_?) - Full(AutoComplete.autocompleteObj[Int](buildDisplayList, Full(toInt), - v => this.set(fromInt(v)))) - else - */ - Full(SHtml.selectObj[Int](buildDisplayList, Full(toInt), - v => this.set(fromInt(v)))) -} - -abstract class MappedIntIndex[T<:Mapper[T]](owner : T) extends MappedInt[T](owner) with IndexedField[Int] { - - override def writePermission_? = false // not writable - - override def dbPrimaryKey_? = true - - override def defaultValue = -1 - - def defined_? = i_is_! != defaultValue - - override def dbIndexFieldIndicatesSaved_? = {i_is_! != defaultValue} - - def makeKeyJDBCFriendly(in : Int) = new java.lang.Integer(in) - - def convertKey(in : String): Box[Int] = { - if (in eq null) Empty - try { - val what = if (in.startsWith(name + "=")) in.substring((name + "=").length) else in - Full(Integer.parseInt(what)) - } catch { - case _: Exception => Empty - } - } - - override def dbDisplay_? = false - - def convertKey(in : Int): Box[Int] = { - if (in < 0) Empty - else Full(in) - } - - def convertKey(in : Long): Box[Int] = { - if (in < 0 || in > Integer.MAX_VALUE) Empty - else Full(in.asInstanceOf[Int]) - } - - def convertKey(in : AnyRef): Box[Int] = { - if ((in eq null) || (in eq None)) None - try { - convertKey(in.toString) - } catch { - case _: Exception => Empty - } - } - - override def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.integerIndexColumnType + notNullAppender() - -} - - -abstract class MappedInt[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Int, T] { - private var data: Int = defaultValue - private var orgData: Int = defaultValue - - def defaultValue = 0 - def dbFieldClass: Class[Int] = classOf[Int] - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int = Types.INTEGER - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[Int] = typeTag[Int] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Int} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Int - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v.toString - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JsonAST.JInt(v)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - protected def i_is_! : Int = data - protected def i_was_! : Int = orgData - /** - * Called after the field is saved to the database - */ - override protected[mapper] def doneWithSave(): Unit = { - orgData = data - } - - def asJsExp: JsExp = JE.Num(get) - - def asJsonValue: Box[JsonAST.JValue] = Full(JsonAST.JInt(get)) - - protected def real_i_set_!(value : Int) : Int = { - if (value != data) { - data = value - this.dirty_?( true) - } - data - } - override def readPermission_? = true - override def writePermission_? = true - - def +(in: Int): Int = get + in - - def real_convertToJDBCFriendly(value: Int): Object = new java.lang.Integer(value) - - def jdbcFriendly(field : String) = new java.lang.Integer(get) - - override def setFromAny(in: Any): Int = { - in match { - case n: Int => this.set(n) - case JsonAST.JInt(bigint) => this.set(bigint.intValue) - case n: Number => this.set(n.intValue) - case (n: Number) :: _ => this.set(n.intValue) - case Some(n: Number) => this.set(n.intValue) - case Full(n: Number) => this.set(n.intValue) - case None | Empty | Failure(_, _, _) => this.set(0) - case (s: String) :: _ => this.set(toInt(s)) - case null => this.set(0) - case s: String => this.set(toInt(s)) - case o => this.set(toInt(o)) - } - } - - protected def i_obscure_!(in : Int) = 0 - - private def st(in: Int): Unit = { - data = in - orgData = in - } - - def buildSetActualValue(accessor: Method, v: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedInt[T] => f.st(toInt(v))}) - - def buildSetLongValue(accessor: Method, columnName: String): (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedInt[T] => f.st(if (isNull) 0 else v.toInt)}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedInt[T] => f.st(toInt(v))}) - - def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedInt[T] => f.st(toInt(v))}) - - def buildSetBooleanValue(accessor: Method, columnName: String): (T, Boolean, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedInt[T] => f.st(if (isNull || !v) 0 else 1)}) - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.integerColumnType + notNullAppender() -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedLong.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedLong.scala deleted file mode 100644 index d06bd9af5e..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedLong.scala +++ /dev/null @@ -1,495 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.Types -import java.lang.reflect.Method -import java.util.Date - -import common._ -import util.Helpers._ -import util._ -import http.SHtml -import http.js._ -import json._ - -import scala.xml.{Text, NodeSeq} - - -abstract class MappedLongIndex[T<:Mapper[T]](theOwner: T) extends MappedLong[T](theOwner) with IndexedField[Long] { - - override def writePermission_? = false // not writable - - override def dbIndexed_? = true - - def defined_? = i_is_! != defaultValue - override def dbPrimaryKey_? = true - - override def defaultValue = -1L - - override def dbIndexFieldIndicatesSaved_? = {i_is_! != defaultValue} - - def makeKeyJDBCFriendly(in: Long) = new java.lang.Long(in) - - def convertKey(in: String): Box[Long] = { - if (in eq null) Empty - else tryo(toLong(if (in.startsWith(name + "=")) in.substring((name + "=").length) else in)) - } - - override def dbDisplay_? = false - - def convertKey(in : Long): Box[Long] = { - if (in < 0L) Empty - else Full(in) - } - - def convertKey(in : Int): Box[Long] = { - if (in < 0) Empty - else Full(in) - } - - def convertKey(in : AnyRef): Box[Long] = { - if ((in eq null) || (in eq None)) Empty - else tryo(convertKey(in.toString)).flatMap(s => s) - } - - override def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.longIndexColumnType + notNullAppender() - -} - -import scala.reflect.runtime.universe._ - -abstract class MappedEnumList[T<:Mapper[T], ENUM <: Enumeration](val fieldOwner: T, val enum: ENUM)(implicit val manifest: TypeTag[Seq[ENUM#Value]]) extends MappedField[Seq[ENUM#Value], T] { - type MyElem = ENUM#Value - type MyType = Seq[MyElem] - - private var data: Seq[ENUM#Value] = defaultValue - private var orgData: Seq[ENUM#Value] = defaultValue - - def defaultValue: Seq[ENUM#Value] = Nil - def dbFieldClass: Class[Seq[ENUM#Value]] = classOf[Seq[ENUM#Value]] - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int = Types.BIGINT - - protected def i_is_! : Seq[ENUM#Value] = data - protected def i_was_! : Seq[ENUM#Value] = orgData - /** - * Called after the field is saved to the database - */ - override protected[mapper] def doneWithSave(): Unit = orgData = data - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Seq[ENUM#Value]} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Seq[ENUM#Value] - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v.map(_.toString).mkString(", ") - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JArray(v.toList.map(x => JsonAST.JInt(x.id)))) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - protected def real_i_set_!(value: Seq[ENUM#Value]): Seq[ENUM#Value] = { - if (value != data) { - data = value - dirty_?(true) - } - data - } - override def readPermission_? = true - override def writePermission_? = true - - def asJsExp: JsExp = JE.JsArray(get.map(v => JE.Num(v.id)) :_*) - - def asJsonValue: Box[JsonAST.JValue] = Full(JsonAST.JInt(toLong)) - - def real_convertToJDBCFriendly(value: Seq[ENUM#Value]): Object = new java.lang.Long(Helpers.toLong(value)) - - private def rot(in: Int): Long = 1L << in - - private def toLong: Long = get.foldLeft(0L)((a,b) => a + rot(b.id)) - - def fromLong(in: Long): Seq[ENUM#Value] = - enum.values.iterator.toList.filter(v => (in & rot(v.id)) != 0) - - def jdbcFriendly(field: String) = new java.lang.Long(toLong) - override def jdbcFriendly = new java.lang.Long(toLong) - - - - override def setFromAny(in: Any): Seq[ENUM#Value] = { - in match { - case JsonAST.JInt(bi) => this.set(fromLong(bi.longValue)) - case n: Long => this.set( fromLong(n)) - case n: Number => this.set(fromLong(n.longValue)) - case (n: Number) :: _ => this.set(fromLong(n.longValue)) - case Some(n: Number) => this.set(fromLong(n.longValue)) - case None => this.set(Nil) - case (s: String) :: _ => this.set(fromLong(Helpers.toLong(s))) - case vs: List[_] => this.set(vs.asInstanceOf[List[ENUM#Value]]) - case null => this.set(Nil) - case s: String => this.set(fromLong(Helpers.toLong(s))) - case o => this.set(fromLong(Helpers.toLong(o))) - } - } - - protected def i_obscure_!(in : Seq[ENUM#Value]) = Nil - - private def st(in: Seq[ENUM#Value]): Unit = { - data = in - orgData = in - } - - def buildSetActualValue(accessor: Method, data: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedEnumList[T, ENUM] => f.st(if (v eq null) defaultValue else fromLong(Helpers.toLong(v)))}) - - def buildSetLongValue(accessor: Method, columnName: String): (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedEnumList[T, ENUM] => f.st(if (isNull) defaultValue else fromLong(v))}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedEnumList[T, ENUM] => f.st(if (v eq null) defaultValue else fromLong(Helpers.toLong(v)))}) - - def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedEnumList[T, ENUM] => f.st(if (v eq null) defaultValue else fromLong(Helpers.toLong(v)))}) - - def buildSetBooleanValue(accessor : Method, columnName : String): (T, Boolean, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedEnumList[T, ENUM] => f.st(defaultValue)}) - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.enumListColumnType + notNullAppender() - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = - Full(SHtml.checkbox[ENUM#Value](enum.values.iterator.toList, get,this(_)).toForm) -} - -/** - * Mix with MappedLong to give a default time of millis - */ -trait DefaultMillis extends TypedField[Long] { - override def defaultValue: Long = millis -} - - -abstract class MappedNullableLong[T<:Mapper[T]](val fieldOwner: T) extends MappedNullableField[Long, T] { - private var data: Box[Long] = defaultValue - private var orgData: Box[Long] = defaultValue - - def defaultValue: Box[Long] = Empty - def dbFieldClass: Class[Box[Long]] = classOf[Box[Long]] - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int = Types.BIGINT - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[Box[Long]] = typeTag[Box[Long]] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Box[Long]} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Box[Long] - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v.map(_.toString) openOr "" - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = v.map(x => Text(x.toString)) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = v.map(JsonAST.JInt(_)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - - protected def i_is_! : Box[Long] = data - protected def i_was_! : Box[Long] = orgData - /** - * Called after the field is saved to the database - */ - override protected[mapper] def doneWithSave(): Unit = { - orgData = data - } - - protected def real_i_set_!(value: Box[Long]): Box[Long] = { - if (value != data) { - data = value - dirty_?(true) - } - data - } - - def asJsExp: JsExp = get.map(v => JE.Num(v)) openOr JE.JsNull - - def asJsonValue: Box[JsonAST.JValue] = - Full(get.map(v => JsonAST.JInt(v)) openOr JsonAST.JNull) - - override def readPermission_? = true - override def writePermission_? = true - - def real_convertToJDBCFriendly(value: Box[Long]): Object = value match { - case Full(value) => new java.lang.Long(value) - case _ => null - } - - // def asJsExp = JE.Num(is) - - def jdbcFriendly(field : String) = real_convertToJDBCFriendly(i_is_!) - override def jdbcFriendly = real_convertToJDBCFriendly(i_is_!) - - override def setFromAny(in: Any): Box[Long] = { - in match { - case n: Long => this.set(Full(n)) - case n: Number => this.set(Full(n.longValue)) - case JsonAST.JNothing | JsonAST.JNull => this.set(Empty) - case JsonAST.JInt(n) => this.set(Full(n.longValue)) - case (n: Number) :: _ => this.set(Full(n.longValue)) - case Some(n: Number) => this.set(Full(n.longValue)) - case Full(n: Number) => this.set(Full(n.longValue)) - case Empty | Failure(_, _, _) => this.set(Empty) - case None => this.set(Empty) - case (s: String) :: _ => this.set(Helpers.asLong(s)) - case s :: _ => this.setFromAny(s) - case null => this.set(Empty) - case s: String => this.set(Helpers.asLong(s)) - case o => this.set(Helpers.asLong(o)) - } - } - - protected def i_obscure_!(in: Box[Long]) = defaultValue - - private def st(in: Box[Long]): Unit = { - data = in - orgData = in - } - - def buildSetActualValue(accessor: Method, data: AnyRef, columnName: String) : (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedNullableLong[T] => f.st(asLong(v))}) - - def buildSetLongValue(accessor: Method, columnName : String) : (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedNullableLong[T] => f.st(if (isNull) Empty else Full(v))}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedNullableLong[T] => f.st(asLong(v))}) - - def buildSetDateValue(accessor : Method, columnName : String) : (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedNullableLong[T] => f.st(if (v == null) Empty else Full(v.getTime))}) - - def buildSetBooleanValue(accessor : Method, columnName : String) : (T, Boolean, Boolean) => Unit = null - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.longColumnType + notNullAppender() -} - -abstract class MappedLong[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Long, T] { - private var data: Long = defaultValue - private var orgData: Long = defaultValue - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[Long] = typeTag[Long] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Long} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Long - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v.toString - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JsonAST.JInt(v)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - def defaultValue: Long = 0L - def dbFieldClass: Class[Long] = classOf[Long] - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int = Types.BIGINT - - protected def i_is_! : Long = data - protected def i_was_! : Long = orgData - /** - * Called after the field is saved to the database - */ - override protected[mapper] def doneWithSave(): Unit = { - orgData = data - } - - protected def real_i_set_!(value : Long): Long = { - if (value != data) { - data = value - dirty_?(true) - } - data - } - - def asJsExp: JsExp = JE.Num(get) - - def asJsonValue: Box[JsonAST.JValue] = Full(JsonAST.JInt(get)) - - override def readPermission_? = true - override def writePermission_? = true - - def real_convertToJDBCFriendly(value: Long): Object = new java.lang.Long(value) - - // def asJsExp: JsExp = JE.Num(is) - - def jdbcFriendly(field : String) = new java.lang.Long(i_is_!) - override def jdbcFriendly = new java.lang.Long(i_is_!) - - override def setFromAny(in: Any): Long = { - in match { - case n: Long => this.set(n) - case JsonAST.JInt(bigint) => this.set(bigint.longValue) - case n: Number => this.set(n.longValue) - case (n: Number) :: _ => this.set(n.longValue) - case Some(n: Number) => this.set(n.longValue) - case Full(n: Number) => this.set(n.longValue) - case Empty | Failure(_, _, _) => this.set(0L) - case None => this.set(0L) - case (s: String) :: _ => this.set(toLong(s)) - case s :: _ => this.setFromAny(s) - case null => this.set(0L) - case s: String => this.set(toLong(s)) - case o => this.set(toLong(o)) - } - } - - protected def i_obscure_!(in : Long) = defaultValue - - private def st(in: Long): Unit = { - data = in - orgData = in - } - - def buildSetActualValue(accessor: Method, data: AnyRef, columnName: String) : (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedLong[T] => f.st(toLong(v))}) - - def buildSetLongValue(accessor: Method, columnName : String) : (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedLong[T] => f.st(if (isNull) defaultValue else v)}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedLong[T] => f.st(toLong(v))}) - - def buildSetDateValue(accessor : Method, columnName : String) : (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedLong[T] => f.st(if (v == null) defaultValue else v.getTime)}) - - def buildSetBooleanValue(accessor : Method, columnName : String) : (T, Boolean, Boolean) => Unit = null - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.longColumnType + notNullAppender() -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedPassword.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedPassword.scala deleted file mode 100644 index 588997e752..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedPassword.scala +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import net.liftweb.util.Helpers._ -import net.liftweb.util.FatLazy -import java.sql.{ResultSet, Types} -import java.lang.reflect.Method -import scala.xml.{Node, Text, NodeSeq} -import java.util.Date -import net.liftweb.http.{S} -import net.liftweb.http.S._ -import net.liftweb.util._ -import net.liftweb.json._ -import net.liftweb.common._ -import net.liftweb.http.js._ - -import org.mindrot.jbcrypt.BCrypt - -object MappedPassword { - val blankPw = "*******" - - /** - * Set this in boot if you want Bcrypt salt strength to be - * something more than the default - */ - var bcryptStrength: Box[Int] = None -} - -abstract class MappedPassword[T<:Mapper[T]](val fieldOwner: T) -extends MappedField[String, T] { - override def dbColumnCount = 2 - def dbFieldClass = classOf[String] - - override def dbColumnNames(in : String) = in.toLowerCase+"_pw" :: in.toLowerCase+"_slt" :: Nil - - override lazy val dbSelectString = - dbColumnNames(name). - map(cn => fieldOwner.getSingleton._dbTableNameLC + "." + cn). - mkString(", ") - - def asJsonValue: Box[JsonAST.JValue] = Full(JsonAST.JNull) - - def salt = this.salt_i - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[String] = typeTag[String] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = String} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = String - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = "" - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Empty - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Empty - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - - private var password = FatLazy(defaultValue) - private val salt_i = FatLazy(util.Safe.randomString(16)) - private var invalidPw = false - private var invalidMsg = "" - - protected def real_i_set_!(value : String) : String = { - value match { - case "*" | null | MappedPassword.blankPw if (value.length < 3) => - invalidPw = true ; invalidMsg = S.?("password.must.be.set") ; password.set("*") - case MappedPassword.blankPw => return "*" - case _ if (value.length > 4) => invalidPw = false; - val bcrypted = BCrypt.hashpw(value, MappedPassword.bcryptStrength.map(BCrypt.gensalt(_)) openOr BCrypt.gensalt()) - password.set("b;"+bcrypted.substring(0,44)) - salt_i.set(bcrypted.substring(44)) - case _ => invalidPw = true ; invalidMsg = S.?("password.too.short"); password.set("*") - } - this.dirty_?( true) - "*" - } - - def setList(in: List[String]): Boolean = - in match { - case x1 :: x2 :: Nil if x1 == x2 => this.set(x1) ; true - case _ => invalidPw = true; invalidMsg = S.?("passwords.do.not.match"); false - } - - - override def setFromAny(f: Any): String = { - f match { - case a : Array[String] if (a.length == 2 && a(0) == a(1)) => - this.set(a(0)) - case l : List[_] if (l.length == 2 && l.head == l(1)) => - this.set(l.head.asInstanceOf[String]) - case _ => - invalidPw = true - invalidMsg = S.?("passwords.do.not.match") - } - get - } - - override def renderJs_? = false - - def asJsExp: JsExp = throw new NullPointerException("No way") - - /** - * Test to see if an incoming password matches - * @param toMatch the password to test - * @return the matched value - */ - def match_?(toMatch : String): Boolean = { - if (password.get.startsWith("b;")) { - BCrypt.checkpw(toMatch, password.get.substring(2)+salt_i.get) - } else - hash("{"+toMatch+"} salt={"+salt_i.get+"}") == password.get - } - - override def validate : List[FieldError] = { - if (!invalidPw && password.get != "*") Nil - else if (invalidPw) List(FieldError(this, Text(invalidMsg))) - else List(FieldError(this, Text(S.?("password.must.be.set")))) - } - - def real_convertToJDBCFriendly(value: String): Object = - BCrypt.hashpw(value, MappedPassword.bcryptStrength.map(BCrypt.gensalt(_)) openOr BCrypt.gensalt()) - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType = Types.VARCHAR - - def defaultValue = "*" - - override def writePermission_? = true - override def readPermission_? = true - - protected def i_is_! = MappedPassword.blankPw - protected def i_was_! = MappedPassword.blankPw - /** - * Called after the field is saved to the database - */ - override protected[mapper] def doneWithSave(): Unit = { - } - - protected def i_obscure_!(in : String) : String = in - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = { - S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName => - Full({appendFieldId()} {S.?("repeat")} ) - } - } - - /** - * When building the form field, what's the input element's - * type attribute. - */ - override protected def formInputType = "password" - - - def jdbcFriendly(columnName : String) = { - if (columnName.endsWith("_slt")) { - salt_i.get - } else if (columnName.endsWith("_pw")) { - password.get - } else { - null - } - } - - def buildSetLongValue(accessor : Method, columnName : String) : (T, Long, Boolean) => Unit = { - if (columnName.endsWith("_slt")) { - {(inst : T, v: Long, isNull: Boolean ) => {val tv = getField(inst, accessor).asInstanceOf[MappedPassword[T]]; tv.salt_i() = if (isNull) null else v.toString}} - } else if (columnName.endsWith("_pw")) { - {(inst : T, v: Long, isNull: Boolean ) => {val tv = getField(inst, accessor).asInstanceOf[MappedPassword[T]]; tv.password() = if (isNull) null else v.toString}} - } else { - null - } - } - def buildSetStringValue(accessor : Method, columnName : String) : (T, String) => Unit = { - if (columnName.endsWith("_slt")) { - {(inst : T, v: String ) => {val tv = getField(inst, accessor).asInstanceOf[MappedPassword[T]]; tv.salt_i() = v}} - } else if (columnName.endsWith("_pw")) { - {(inst : T, v: String ) => {val tv = getField(inst, accessor).asInstanceOf[MappedPassword[T]]; tv.password() = v}} - } else { - null - } - } - def buildSetDateValue(accessor : Method, columnName : String) : (T, Date) => Unit = { - null - } - def buildSetBooleanValue(accessor : Method, columnName : String) : (T, Boolean, Boolean) => Unit = { - null - } - - def buildSetActualValue(accessor : Method, inst : AnyRef, columnName : String) : (T, AnyRef) => Unit = { - if (columnName.endsWith("_slt")) { - inst match { - case null => {(inst : T, v : AnyRef) => {}} - case _ => {(inst : T, v : AnyRef) => {val tv = getField(inst, accessor).asInstanceOf[MappedPassword[T]]; tv.salt_i() = (if (v == null) null else v.toString); tv.resetDirty}} - } - } else if (columnName.endsWith("_pw")) { - inst match { - case null => {(inst : T, v : AnyRef) => {}} - case _ => {(inst : T, v : AnyRef) => {val tv = getField(inst, accessor).asInstanceOf[MappedPassword[T]]; tv.password() = (if (v == null) null else v.toString); tv.resetDirty}} - } - - } else { - null - } - } - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = (if (colName.endsWith("_pw")) colName+" VARCHAR(48)" else colName+" VARCHAR(20)") + notNullAppender() -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedPostalCode.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedPostalCode.scala deleted file mode 100644 index 307b3c0e28..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedPostalCode.scala +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.util.{Locale, TimeZone} - -import scala.xml.{Text, Elem} - -import common._ -import util._ -import Helpers._ -import http.{S, SHtml} - -object Countries extends Enumeration(1) { - - val C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, - C11, C12, C13, C14, C15, C16, C17, C18, C19, C20, - C21, C22, C23, C24, C25, C26, C27, C28, C29, C30, - C31, C32, C33, C34, C35, C36, C37, C38, C39, C40, - C41, C42, C43, C44, C45, C46, C47, C48, C49, C50, - C51, C52, C53, C54, C55, C56, C57, C58, C59, C60, - C61, C62, C63, C64, C65, C66, C67, C68, C69, C70, - C71, C72, C73, C74, C75, C76, C77, C78, C79, C80, - C81, C82, C83, C84, C85, C86, C87, C88, C89, C90, - C91, C92, C93, C94, C95, C96, C97, C98, C99, C100, - C101, C102, C103, C104, C105, C106, C107, C108, C109, C110, - C111, C112, C113, C114, C115, C116, C117, C118, C119, C120, - C121, C122, C123, C124, C125, C126, C127, C128, C129, C130, - C131, C132, C133, C134, C135, C136, C137, C138, C139, C140, - C141, C142, C143, C144, C145, C146, C147, C148, C149, C150, - C151, C152, C153, C154, C155, C156, C157, C158, C159, C160, - C161, C162, C163, C164, C165, C166, C167, C168, C169, C170, - C171, C172, C173, C174, C175, C176, C177, C178, C179, C180, - C181, C182, C183, C184, C185, C186, C187, C188, C189, C190, - C191, C192, C193, C194, C195, C196, C197, C198, C199, C200, - C201, C202, C203, C204, C205, C206, C207, C208, C209, C210, - C211, C212, C213, C214, C215, C216, C217, C218, C219, C220, - C221, C222, C223, C224, C225, C226, C227, C228, C229, C230, - C231, C232, C233, C234, C235, C236, C237, C238, C239, C240, - C241, C242, C243, C244, C245, C246, C247, C248, C249, C250, - C251, C252, C253, C254, C255, C256, C257, C258, C259, C260, - C261, C262, C263, C264, C265, C266, C267, C268, C269, C270, - C271, C272 = I18NCountry - - val USA = C1 - val Australia = C10 - val Canada = C32 - val Sweden = C167 - val UnitedKingdom = C184 - val Germany = C65 - val UK = C184 - - def I18NCountry = new I18NCountry - - class I18NCountry extends Val { - override def toString(): String = S.?("country_" + id) - } -} - -abstract class MappedLocale[T <: Mapper[T]](owner: T) extends MappedString[T](owner, 16) { - override def defaultValue: String = Locale.getDefault.toString - - def isAsLocale: Locale = Locale.getAvailableLocales.filter(_.toString == get).toList match { - case Nil => Locale.getDefault - case x :: _ => x - } - - override def _toForm: Box[Elem] = - Full(SHtml.select(Locale.getAvailableLocales. - toList.sortWith(_.getDisplayName < _.getDisplayName). - map(lo => (lo.toString, lo.getDisplayName)), - Full(this.get), set) % ("id" -> fieldId)) -} - -abstract class MappedTimeZone[T <: Mapper[T]](owner: T) extends MappedString[T](owner, 32) { - override def defaultValue: String = TimeZone.getDefault.getID - - def isAsTimeZone: TimeZone = TimeZone.getTimeZone(get) match { - case null => TimeZone.getDefault - case x => x - } - - override def _toForm: Box[Elem] = - Full(SHtml.select(MappedTimeZone.timeZoneList, Full(this.get), set) % - ("id" -> fieldId)) -} - -object MappedTimeZone { - lazy val timeZoneList = - TimeZone.getAvailableIDs.toList. - filter(!_.startsWith("SystemV/")). - filter(!_.startsWith("Etc/")).filter(_.length > 3). - sortWith(_ < _).map(tz => (tz, tz)) -} - -abstract class MappedCountry[T <: Mapper[T]](owner: T) extends MappedEnum[T, Countries.type](owner, Countries) { - - override def buildDisplayList: List[(Int, String)] = { - val collator = java.text.Collator.getInstance(S.locale) - - super.buildDisplayList.sortWith((s1, s2) => - collator.compare(s1._2, s2._2) < 0) - } - -} - -abstract class MappedPostalCode[T <: Mapper[T]](owner: T, country: MappedCountry[T]) extends MappedString[T](owner, 32) { - override def setFilter = notNull _ :: toUpper _ :: trim _ :: super.setFilter - - private def genericCheck(zip: String): List[FieldError] = { - zip match { - case null => List(FieldError(this, Text(S.?("invalid.postal.code")))) - case s if s.length < 3 => List(FieldError(this, Text(S.?("invalid.postal.code")))) - case _ => Nil - } - } - - import java.util.regex.{Pattern => REPat} - - override def validations = country.get match { - case Countries.USA => valRegex(REPat.compile("[0-9]{5}(\\-[0-9]{4})?"), - S.?("invalid.zip.code")) _ :: super.validations - - case Countries.Sweden => valRegex(REPat.compile("[0-9]{3}[ ]?[0-9]{2}"), - S.?("invalid.postal.code")) _ :: super.validations - - case Countries.Australia => valRegex(REPat.compile("(0?|[1-9])[0-9]{3}"), - S.?("invalid.postal.code")) _ :: super.validations - - case Countries.Canada => valRegex(REPat.compile("[A-Z][0-9][A-Z][ ][0-9][A-Z][0-9]"), - S.?("invalid.postal.code")) _ :: super.validations - - case Countries.Germany => valRegex(REPat.compile("[0-9]{5}"), - S.?("invalid.postal.code")) _ :: super.validations - - case Countries.UK => valRegex(REPat.compile("[A-Z]{1,2}[0-9R][0-9A-Z]?[0-9][ABD-HJLNP-UW-Z]{2}"), - S.?("invalid.postal.code")) _ :: super.validations - - case _ => genericCheck _ :: super.validations - } -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedString.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedString.scala deleted file mode 100644 index 45e47acd1d..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedString.scala +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.Types -import java.lang.reflect.Method -import java.util.Date - -import util._ -import common.{Box, Full, Empty, Failure} -import http.S -import http.js._ -import json._ -import S._ - -import scala.xml.{NodeSeq, Text, Elem} - -/** - * Just like MappedString, except it's defaultValue is "" and the length is auto-cropped to - * fit in the column - */ -abstract class MappedPoliteString[T <: Mapper[T]](towner: T, theMaxLen: Int) extends MappedString[T](towner, theMaxLen) { - override def defaultValue = "" - override def setFilter = crop _ :: super.setFilter -} - -/** - * Mix this trait into a MappedString and it will add maximum length validation to the MappedString - */ -trait ValidateLength extends MixableMappedField { - self: MappedString[_] => - - def defaultErrorMessage = S.?("Field too long. Maximum Length")+": "+maxLen - - abstract override def validations = valMaxLen(maxLen, defaultErrorMessage) _ :: super.validations - -} - -trait HasApplyBoxString[T] { - def apply(x: String): T -} -abstract class MappedString[T<:Mapper[T]](val fieldOwner: T,val maxLen: Int) extends MappedField[String, T] with net.liftweb.util.StringValidators with HasApplyBoxString[T] { - private val data: FatLazy[String] = FatLazy(defaultValue) // defaultValue - private val orgData: FatLazy[String] = FatLazy(defaultValue) // defaultValue - - def dbFieldClass: Class[String] = classOf[String] - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[String] = typeTag[String] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = String} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = String - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = v - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(v)) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JsonAST.JString(v)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - protected def valueTypeToBoxString(in: String): Box[String] = Full(in) - protected def boxStrToValType(in: Box[String]): String = in openOr "" - - - protected def real_i_set_!(value : String) : String = { - if (!data.defined_? || value != data.get) { - data() = value - this.dirty_?( true) - } - data.get - } - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType: Int = Types.VARCHAR - - def defaultValue = "" - - override def writePermission_? = true - override def readPermission_? = true - - protected def i_is_! : String = data.get - protected def i_was_! : String = orgData.get - - def asJsonValue: Box[JsonAST.JValue] = Full(get match { - case null => JsonAST.JNull - case str => JsonAST.JString(str) - }) - - /** - * Called after the field is saved to the database - */ - override protected[mapper] def doneWithSave(): Unit = { - orgData.setFrom(data) - } - - override def _toForm: Box[Elem] = - fmapFunc({s: List[String] => this.setFromAny(s)}){name => - Full(appendFieldId( "" case s => s.toString}}/>))} - - protected def i_obscure_!(in : String) : String = { - "" - } - - override def toForm: Box[Elem] = { - - super.toForm match { - case Full(IsElem(elem)) => Full(elem) - case _ => - Empty - } - } - - override def setFromAny(in: Any): String = { - in match { - case JsonAST.JNull => this.set(null) - case seq: Seq[_] if seq.nonEmpty => seq.map(setFromAny).head - case (s: String) :: _ => this.set(s) - case s :: _ => this.setFromAny(s) - case JsonAST.JString(v) => this.set(v) - case null => this.set(null) - case s: String => this.set(s) - case Some(s: String) => this.set(s) - case Full(s: String) => this.set(s) - case None | Empty | Failure(_, _, _) => this.set(null) - case o => this.set(o.toString) - } - } - - override def apply(v: String): T = super.apply(v) - - def asJsExp: JsExp = JE.Str(get) - - def jdbcFriendly(field : String): String = data.get - - def real_convertToJDBCFriendly(value: String): Object = value - - private def wholeSet(in: String): Unit = { - this.data() = in - this.orgData() = in - } - - def buildSetActualValue(accessor: Method, inst: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedString[T] => f.wholeSet(if (v eq null) null else v.toString)}) - - def buildSetLongValue(accessor: Method, columnName: String): (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedString[T] => f.wholeSet(if (isNull) null else v.toString)}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedString[T] => f.wholeSet(if (v eq null) null else v)}) - - def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedString[T] => f.wholeSet(if (v eq null) null else v.toString)}) - - def buildSetBooleanValue(accessor: Method, columnName: String): (T, Boolean, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedString[T] => f.wholeSet(if (isNull) null else v.toString)}) - - - - /** - * Make sure that the field is unique in the database - */ - def valUnique(msg: => String)(value: String): List[FieldError] = - fieldOwner.getSingleton.findAll(By(this,value)). - filter(!_.comparePrimaryKeys(this.fieldOwner)) match { - case Nil => Nil - case x :: _ => List(FieldError(this, Text(msg))) // issue 179 - } - - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName+" "+dbType.varcharColumnType(maxLen) + notNullAppender() - -} - -private[mapper] object IsElem { - def unapply(in: NodeSeq): Option[Elem] = in match { - case e: Elem => Some(e) - case Seq(e: Elem) => Some(e) - case _ => None - } -} - -sealed trait BoxedStringToken -object BoxedStringToken { - implicit val theBoxedStringToken: BoxedStringToken = new BoxedStringToken {} -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedTextarea.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedTextarea.scala deleted file mode 100644 index c4a1b161f0..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedTextarea.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import http.S -import common._ - -import scala.xml.Elem - -abstract class MappedTextarea[T<:Mapper[T]](owner : T, maxLen: Int) extends MappedString[T](owner, maxLen) { - /** - * Create an input field for the item - */ - override def _toForm: Box[Elem] = { - S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName => - Full(appendFieldId())} - } - - override def toString: String = { - val v = get - if (v == null || v.length < 100) super.toString - else v.substring(0,40)+" ... "+v.substring(v.length - 40) - } - - def textareaRows = 8 - - def textareaCols = 20 - -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedTime.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedTime.scala deleted file mode 100644 index 66ebb6ccb8..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedTime.scala +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.Types -import java.util.Date -import java.lang.reflect.Method - -import util._ -import common._ -import Helpers._ -import http._ -import js._ -import json._ - -import scala.xml.{Text, NodeSeq} - -/** - * Represents a time with hour, minute and second fields. The underlying type is - * java.util.Date to keep things simple, but be aware that the date portion of the - * values will most likely be discarded when this is saved to the database. - * - * @see MappedDateTime - * @see MappedDate - */ - -abstract class MappedTime[T<:Mapper[T]](val fieldOwner: T) extends MappedField[Date, T] { - private val data = FatLazy(defaultValue) - private val orgData = FatLazy(defaultValue) - - /** - * This method defines the string parsing semantics of this field. Used in setFromAny. - * By default uses LiftRules.dateTimeConverter's parseTime; override for field-specific behavior - */ - def parse(s: String): Box[Date] = LiftRules.dateTimeConverter().parseTime(s) - /** - * This method defines the string parsing semantics of this field. Used in toString, _toForm. - * By default uses LiftRules.dateTimeConverter's formatTime; override for field-specific behavior - */ - def format(d: Date): String = LiftRules.dateTimeConverter().formatTime(d) - - - import scala.reflect.runtime.universe._ - def manifest: TypeTag[Date] = typeTag[Date] - - /** - * Get the source field metadata for the field - * @return the source field metadata for the field - */ - def sourceInfoMetadata(): SourceFieldMetadata{type ST = Date} = - SourceFieldMetadataRep(name, manifest, new FieldConverter { - /** - * The type of the field - */ - type T = Date - - /** - * Convert the field to a String - * @param v the field value - * @return the string representation of the field value - */ - def asString(v: T): String = format(v) - - /** - * Convert the field into NodeSeq, if possible - * @param v the field value - * @return a NodeSeq if the field can be represented as one - */ - def asNodeSeq(v: T): Box[NodeSeq] = Full(Text(asString(v))) - - /** - * Convert the field into a JSON value - * @param v the field value - * @return the JSON representation of the field - */ - def asJson(v: T): Box[JValue] = Full(JInt(v.getTime)) - - /** - * If the field can represent a sequence of SourceFields, - * get that - * @param v the field value - * @return the field as a sequence of SourceFields - */ - def asSeq(v: T): Box[Seq[SourceFieldInfo]] = Empty - }) - - protected def real_i_set_!(value: Date): Date = { - if (value != data.get) { - data() = value - this.dirty_?( true) - } - data.get - } - - def dbFieldClass = classOf[Date] - - def toLong: Long = get match { - case null => 0L - case d: Date => d.getTime / 1000L - } - - def asJsExp: JsExp = JE.Num(toLong) - - def asJsonValue: Box[JsonAST.JValue] = Full(get match { - case null => JsonAST.JNull - case x => JsonAST.JInt(x.getTime) - }) - - /** - * Get the JDBC SQL Type for this field - */ - def targetSQLType = Types.TIME - - def defaultValue: Date = null - // private val defaultValue_i = new Date - - override def writePermission_? = true - override def readPermission_? = true - - protected def i_is_! = data.get - protected def i_was_! = orgData.get - protected[mapper] def doneWithSave(): Unit = {orgData.setFrom(data)} - - protected def i_obscure_!(in : Date) : Date = { - new Date(0L) - } - - /** - * Create an input field for the item - */ - override def _toForm: Box[NodeSeq] = - S.fmapFunc({s: List[String] => this.setFromAny(s)}){funcName => - Full(appendFieldId( "" case s => format(s)}}/>)) - } - - override def setFromAny(f : Any): Date = f match { - case JsonAST.JNull => this.set(null) - case JsonAST.JInt(v) => this.set(new Date(v.longValue)) - case "" | null => this.set(null) - case s: String => parse(s).map(s => this.set(s)).openOr(this.get) - case x :: _ => setFromAny(x) - case d: Date => this.set(d) - case Some(d: Date) => this.set(d) - case Full(d: Date) => this.set(d) - case None | Empty | Failure(_, _, _) => this.set(null) - case f => toDate(f).map(d => this.set(d)).openOr(this.get) - } - - def jdbcFriendly(field : String) : Object = get match { - case null => null - case d => new java.sql.Time(d.getTime) - } - - def real_convertToJDBCFriendly(value: Date): Object = if (value == null) null else new java.sql.Time(value.getTime) - - private def st(in: Box[Date]): Unit = - in match { - case Full(d) => data.set(d); orgData.set(d) - case _ => data.set(null); orgData.set(null) - } - - def buildSetActualValue(accessor: Method, v: AnyRef, columnName: String): (T, AnyRef) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedTime[T] => f.st(toDate(v))}) - - def buildSetLongValue(accessor: Method, columnName: String): (T, Long, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedTime[T] => f.st(if (isNull) Empty else Full(new Date(v)))}) - - def buildSetStringValue(accessor: Method, columnName: String): (T, String) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedTime[T] => f.st(toDate(v))}) - - def buildSetDateValue(accessor: Method, columnName: String): (T, Date) => Unit = - (inst, v) => doField(inst, accessor, {case f: MappedTime[T] => f.st(Full(v))}) - - def buildSetBooleanValue(accessor: Method, columnName: String): (T, Boolean, Boolean) => Unit = - (inst, v, isNull) => doField(inst, accessor, {case f: MappedTime[T] => f.st(Empty)}) - - /** - * Given the driver type, return the string required to create the column in the database - */ - def fieldCreatorString(dbType: DriverType, colName: String): String = colName + " " + dbType.timeColumnType + notNullAppender() - - - override def toString = if(get==null) "NULL" else format(get) -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedUniqueId.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedUniqueId.scala deleted file mode 100644 index c61c313d49..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MappedUniqueId.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import common._ -import util._ -import Helpers._ -import http.{S, SHtml} - -import scala.xml.NodeSeq - -abstract class MappedUniqueId[T<:Mapper[T]](override val fieldOwner: T, override val maxLen: Int) extends MappedString[T](fieldOwner, maxLen) { - override def writePermission_? = false - override lazy val defaultValue = randomString(maxLen) - - def reset(): T = this(randomString(maxLen)) -} - -/** - * A field that holds the birth year for the user - */ -abstract class MappedBirthYear[T <: Mapper[T]](owner: T, minAge: Int) extends MappedInt[T](owner) { - override def defaultValue = year(now) - minAge - - override def _toForm: Box[NodeSeq] = { - val end = (year(now) - minAge) - val start = end - 100 - Full(SHtml.selectObj((start to end). - toList. - reverse. - map(y => (y, y.toString)), - Full(get), this.set) % ("id" -> fieldId)) - } -} - -abstract class MappedGender[T <: Mapper[T]](owner: T) extends MappedEnum(owner, Genders) { - override def defaultValue = Genders.Male -} - -object Genders extends Enumeration { - - val Male = new I18NGender(1, "male") - val Female = new I18NGender(2, "female") - - class I18NGender(id : Int, name: String) extends Val(id, name) { - override def toString = { - S.?(name) - } - } -} - -abstract class MappedStringIndex[T<:Mapper[T]](override val fieldOwner: T, override val maxLen: Int) extends MappedUniqueId[T](fieldOwner, maxLen) with IndexedField[String] { - - override def writePermission_? = false // not writable - - override def dbIndexed_? = true - - def defined_? = i_is_! ne null - - override def dbPrimaryKey_? = true - - override def dbDisplay_? = false - - def makeKeyJDBCFriendly(in: String) = in - - def convertKey(in: String): Box[String] = Box.legacyNullTest(in) - def convertKey(in: Int): Box[String] = Full(in.toString) - def convertKey(in: Long): Box[String] = Full(in.toString) - def convertKey(in: AnyRef): Box[String] = - Box.legacyNullTest(in).map(_.toString) -} - - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/Mapper.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/Mapper.scala deleted file mode 100644 index c3d65d03db..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/Mapper.scala +++ /dev/null @@ -1,461 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.util.Date - -import scala.xml.{Elem, NodeSeq} -import http.S -import http.js._ -import util._ -import common.{Box, Empty, Full, ParamFailure} - -import collection.mutable.StringBuilder - -trait BaseMapper extends FieldContainer { - type MapperType <: Mapper[MapperType] - - def dbName: String - def save: Boolean -} - -trait Mapper[A<:Mapper[A]] extends BaseMapper with Serializable with SourceInfo { - self: A => - type MapperType = A - - private var was_deleted_? = false - private var dbConnectionIdentifier: Box[ConnectionIdentifier] = Empty - private[mapper] var addedPostCommit = false - @volatile private[mapper] var persisted_? = false - - def getSingleton : MetaMapper[A]; - final def safe_? : Boolean = { - util.Safe.safe_?(System.identityHashCode(this)) - } - - def dbName:String = getSingleton.dbName - - implicit def thisToMappee(in: Mapper[A]): A = this.asInstanceOf[A] - - def runSafe[T](f : => T) : T = { - util.Safe.runSafe(System.identityHashCode(this))(f) - } - - def connectionIdentifier(id: ConnectionIdentifier): A = { - if (id != getSingleton.dbDefaultConnectionIdentifier || dbConnectionIdentifier.isDefined) dbConnectionIdentifier = Full(id) - thisToMappee(this) - } - - def connectionIdentifier = dbConnectionIdentifier openOr calcDbId - - def dbCalculateConnectionIdentifier: PartialFunction[A, ConnectionIdentifier] = Map.empty - - private def calcDbId = if (dbCalculateConnectionIdentifier.isDefinedAt(this)) dbCalculateConnectionIdentifier(this) - else getSingleton.dbDefaultConnectionIdentifier - - /** - * Append a function to perform after the commit happens - * @param func - the function to perform after the commit happens - */ - def doPostCommit(func: () => Unit): A = { - DB.appendPostTransaction(connectionIdentifier, dontUse => func()) - this - } - - /** - * Save the instance and return the instance - */ - def saveMe(): A = { - this.save - this - } - - def save: Boolean = { - runSafe { - getSingleton.save(this) - } - } - - def htmlLine : NodeSeq = { - getSingleton.doHtmlLine(this) - } - - def asHtml : NodeSeq = { - getSingleton.asHtml(this) - } - - /** - * If the instance calculates any additional - * fields for JSON object, put the calculated fields - * here - */ - def suplementalJs(ob: Box[KeyObfuscator]): List[(String, JsExp)] = Nil - - def validate : List[FieldError] = { - runSafe { - getSingleton.validate(this) - } - } - - /** - * Returns the instance in a Full Box if the instance is valid, otherwise - * returns a Failure with the validation errors - */ - def asValid: Box[A] = validate match { - case Nil => Full(this) - case xs => ParamFailure(xs.map(_.msg.text).mkString(", "), Empty, Empty, xs) - } - - /** - * Convert the model to a JavaScript object - */ - def asJs: JsExp = getSingleton.asJs(this) - - - /** - * Given a name, look up the field - * @param name the name of the field - * @return the metadata - */ - def findSourceField(name: String): Box[SourceFieldInfo] = - for { - mf <- getSingleton.fieldNamesAsMap.get(name.toLowerCase) - f <- fieldByName[mf.ST](name) - } yield SourceFieldInfoRep[mf.ST](f.get.asInstanceOf[mf.ST], mf).asInstanceOf[SourceFieldInfo] - - /** - * Get a list of all the fields - * @return a list of all the fields - */ - def allFieldNames(): Seq[(String, SourceFieldMetadata)] = getSingleton.doAllFieldNames - - /** - * Delete the model from the RDBMS - */ - def delete_! : Boolean = { - if (!db_can_delete_?) false else - runSafe { - was_deleted_? = getSingleton.delete_!(this) - was_deleted_? - } - } - - /** - * Get the fields (in order) for displaying a form - */ - def formFields: List[MappedField[_, A]] = - getSingleton.formFields(this) - - def allFields: Seq[BaseField] = formFields - - /** - * map the fields titles and forms to generate a list - * @param func called with displayHtml, fieldId, form - */ - def mapFieldTitleForm[T](func: (NodeSeq, Box[NodeSeq], NodeSeq) => T): List[T] = - getSingleton.mapFieldTitleForm(this, func) - - - /** - * flat map the fields titles and forms to generate a list - * @param func called with displayHtml, fieldId, form - */ - def flatMapFieldTitleForm[T] - (func: (NodeSeq, Box[NodeSeq], NodeSeq) => Seq[T]): List[T] = - getSingleton.flatMapFieldTitleForm(this, func) - - /** - * flat map the fields titles and forms to generate a list - * @param func called with displayHtml, fieldId, form - */ - def flatMapFieldTitleForm2[T] - (func: (NodeSeq, MappedField[_, A], NodeSeq) => Seq[T]): List[T] = - getSingleton.flatMapFieldTitleForm2(this, func) - - /** - * Present the model as a form and execute the function on submission of the form - * - * @param button - If it's Full, put a submit button on the form with the value of the parameter - * @param onSuccess - redirect to the URL if the model validates, otherwise display the errors - * - * @return the form - */ - def toForm(button: Box[String], onSuccess: String): NodeSeq = - toForm(button, (what: A) => {what.validate match { - case Nil => what.save ; S.redirectTo(onSuccess) - case xs => S.error(xs) - }}) - - /** - * Present the model as a HTML using the same formatting as toForm - * - * @return the html view of the model - */ - def toHtml: NodeSeq = getSingleton.toHtml(this) - - /** - * Present the model as a form and execute the function on submission of the form - * - * @param button - If it's Full, put a submit button on the form with the value of the parameter - * @param f - the function to execute on form submission - * - * @return the form - */ - def toForm(button: Box[String], f: A => Any): NodeSeq = - getSingleton.toForm(this) ++ - S.fmapFunc((ignore: List[String]) => f(this)){ - (name: String) => - ()} ++ - (button.map(b => getSingleton.formatFormElement(   , )) openOr scala.xml.Text("")) - - def toForm(button: Box[String], redoSnippet: NodeSeq => NodeSeq, onSuccess: A => Unit): NodeSeq = { - val snipName = S.currentSnippet - def doSubmit(): Unit = { - this.validate match { - case Nil => onSuccess(this) - case xs => S.error(xs) - snipName.foreach(n => S.mapSnippet(n, redoSnippet)) - } - } - - getSingleton.toForm(this) ++ - S.fmapFunc((ignore: List[String]) => doSubmit())(name => ) ++ - (button.map(b => getSingleton.formatFormElement(   , )) openOr scala.xml.Text("")) - } - - def saved_? : Boolean = getSingleton.saved_?(this) - - /** - * Can this model object be deleted? - */ - def db_can_delete_? : Boolean = getSingleton.saved_?(this) && !was_deleted_? - - def dirty_? : Boolean = getSingleton.dirty_?(this) - - override def toString: String = - new StringBuilder(this.getClass.getName) - .append("={") - .append(getSingleton.appendFieldToStrings(this)) - .append("}") - .toString - - def toXml: Elem = { - getSingleton.toXml(this) - } - - def checkNames(): Unit = { - runSafe { - getSingleton match { - case null => - case s => s.checkFieldNames(this) - } - } - } - - def comparePrimaryKeys(other: A) = false - - /** - * Find the field by name - * @param fieldName -- the name of the field to find - * - * @return Box[MappedField] - */ - def fieldByName[T](fieldName: String): Box[MappedField[T, A]] = getSingleton.fieldByName[T](fieldName, this) - - type FieldPF = PartialFunction[String, NodeSeq => NodeSeq] - - /** - * Given a function that takes a mapper field and returns a NodeSeq - * for the field, return, for this mapper instance, a set of CSS - * selector transforms that will transform a form for those fields - * into a fully-bound form that will interact with this instance. - */ - def fieldMapperTransforms(fieldTransform: (BaseOwnedMappedField[A] => NodeSeq)): scala.collection.Seq[CssSel] = { - getSingleton.fieldMapperTransforms(fieldTransform, this) - } - - private var fieldTransforms_i: scala.collection.Seq[CssSel] = Vector() - - /** - * A list of CSS selector transforms that will help render the fields - * of this mapper object. - */ - def fieldTransforms = fieldTransforms_i - - def appendFieldTransform(transform: CssSel): Unit = { - fieldTransforms_i = fieldTransforms_i :+ transform - } - - def prependFieldTransform(transform: CssSel): Unit = { - fieldTransforms_i = transform +: fieldTransforms_i - } - - /** - * If there's a field in this record that defines the locale, return it - */ - def localeField: Box[MappedLocale[A]] = Empty - - def timeZoneField: Box[MappedTimeZone[A]] = Empty - - def countryField: Box[MappedCountry[A]] = Empty -} - -trait LongKeyedMapper[OwnerType <: LongKeyedMapper[OwnerType]] extends KeyedMapper[Long, OwnerType] with BaseLongKeyedMapper { - self: OwnerType => -} - -trait BaseKeyedMapper extends BaseMapper { - type TheKeyType - type KeyedMapperType <: KeyedMapper[TheKeyType, KeyedMapperType] - - def primaryKeyField: MappedField[TheKeyType, MapperType] with IndexedField[TheKeyType] - /** - * Delete the model from the RDBMS - */ - def delete_! : Boolean -} - -trait BaseLongKeyedMapper extends BaseKeyedMapper { - override type TheKeyType = Long -} - -trait IdPK /* extends BaseLongKeyedMapper */ { - self: BaseLongKeyedMapper => - def primaryKeyField: MappedLongIndex[MapperType] = id - object id extends MappedLongIndex[MapperType](this.asInstanceOf[MapperType]) -} - -/** - * A trait you can mix into a Mapper class that gives you - * a createdat column - */ -trait CreatedTrait { - self: BaseMapper => - - import net.liftweb.util._ - - /** - * Override this method to index the createdAt field - */ - protected def createdAtIndexed_? = false - - /** - * The createdAt field. You can change the behavior of this - * field: - *
-   * override lazy val createdAt = new MyCreatedAt(this) {
-   *   override def dbColumnName = "i_eat_time"
-   * }
-   * 
- */ - lazy val createdAt: MappedDateTime[MapperType] = new MyCreatedAt(this) - - protected class MyCreatedAt(obj: self.type) extends MappedDateTime[MapperType](obj.asInstanceOf[MapperType]) { - override def defaultValue = Helpers.now - override def dbIndexed_? = createdAtIndexed_? - } - -} - -/** - * A trait you can mix into a Mapper class that gives you - * an updatedat column - */ -trait UpdatedTrait { - self: BaseMapper => - - import net.liftweb.util._ - - /** - * Override this method to index the updatedAt field - */ - protected def updatedAtIndexed_? = false - - /** - * The updatedAt field. You can change the behavior of this - * field: - *
-   * override lazy val updatedAt = new MyUpdatedAt(this) {
-   *   override def dbColumnName = "i_eat_time_for_breakfast"
-   * }
-   * 
- */ - lazy val updatedAt: MyUpdatedAt = new MyUpdatedAt(this) - - protected class MyUpdatedAt(obj: self.type) extends MappedDateTime(obj.asInstanceOf[MapperType]) with LifecycleCallbacks { - override def beforeSave: Unit = {super.beforeSave; this.set(Helpers.now)} - override def defaultValue: Date = Helpers.now - override def dbIndexed_? : Boolean = updatedAtIndexed_? - } - -} - -/** -* Mix this trait into your Mapper instance to get createdAt and updatedAt fields. -*/ -trait CreatedUpdated extends CreatedTrait with UpdatedTrait { - self: BaseMapper => - -} - -trait KeyedMapper[KeyType, OwnerType<:KeyedMapper[KeyType, OwnerType]] extends Mapper[OwnerType] with BaseKeyedMapper { - self: OwnerType => - - type TheKeyType = KeyType - type KeyedMapperType = OwnerType - - def primaryKeyField: MappedField[KeyType, OwnerType] with IndexedField[KeyType] - def getSingleton: KeyedMetaMapper[KeyType, OwnerType]; - - override def comparePrimaryKeys(other: OwnerType): Boolean = primaryKeyField.get == other.primaryKeyField.get - - def reload: OwnerType = getSingleton.find(By(primaryKeyField, primaryKeyField.get)) openOr this - - def asSafeJs(f: KeyObfuscator): JsExp = getSingleton.asSafeJs(this, f) - - override def hashCode(): Int = primaryKeyField.get.hashCode - - override def equals(other: Any): Boolean = { - other match { - case null => false - case km: KeyedMapper[_, _] if this.getClass.isAssignableFrom(km.getClass) || - km.getClass.isAssignableFrom(this.getClass) => - this.primaryKeyField == km.primaryKeyField - case k => super.equals(k) - } - } -} - -/** -* If this trait is mixed into a validation function, the validation for a field -* will stop if this validation function returns an error -*/ -trait StopValidationOnError[T] extends Function1[T, List[FieldError]] - -object StopValidationOnError { - def apply[T](f: T => List[FieldError]): StopValidationOnError[T] = - new StopValidationOnError[T] { - def apply(in: T): List[FieldError] = f(in) - } - - def apply[T](f: PartialFunction[T, List[FieldError]]): PartialFunction[T, List[FieldError]] with StopValidationOnError[T] = - new PartialFunction[T, List[FieldError]] with StopValidationOnError[T] { - def apply(in: T): List[FieldError] = f(in) - def isDefinedAt(in: T): Boolean = f.isDefinedAt(in) - } -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/MetaMapper.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/MetaMapper.scala deleted file mode 100644 index a5c7cd89af..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/MetaMapper.scala +++ /dev/null @@ -1,2178 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.lang.reflect.Method -import java.sql.{ResultSet, Types, PreparedStatement} -import java.util.{Date, Locale} - -import scala.language.existentials - -import scala.collection.mutable.{ListBuffer, HashMap} -import scala.collection.immutable.{SortedMap, TreeMap} -import scala.xml._ - -import common._ -import json._ -import util.Helpers._ -import util.{SourceFieldMetadata, NamedPF, FieldError, Helpers,CssSel,PassThru} -import http.{LiftRules, S, SHtml, RequestMemoize, Factory} -import http.js._ - -trait BaseMetaMapper { - type RealType <: Mapper[RealType] - - def beforeSchemifier: Unit - def afterSchemifier: Unit - - def dbTableName: String - def _dbTableNameLC: String - def mappedFields: Seq[BaseMappedField]; - def dbAddTable: Box[() => Unit] - - def dbIndexes: List[BaseIndex[RealType]] -} - -/** - * Rules and functions shared by all Mappers - */ -object MapperRules extends Factory { - /** - * This function converts a header name into the appropriate - * XHTML format for displaying across the headers of a - * formatted block. The default is <th> for use - * in XHTML tables. If you change this function, the change - * will be used for all MetaMappers, unless they've been - * explicitly changed. - */ - var displayNameToHeaderElement: String => NodeSeq = in => {in} - - /** - * This function converts an element into the appropriate - * XHTML format for displaying across a line - * formatted block. The default is <td> for use - * in XHTML tables. If you change this function, the change - * will be used for all MetaMappers, unless they've been - * explicitly changed. - */ - var displayFieldAsLineElement: NodeSeq => NodeSeq = in => {in} - - /** - * This function is the global (for all MetaMappers that have - * not changed their formatFormElement function) that - * converts a name and form for a given field in the - * model to XHTML for presentation in the browser. By - * default, a table row ( <tr> ) is presented, but - * you can change the function to display something else. - */ - var formatFormElement: (NodeSeq, NodeSeq) => NodeSeq = - (name, form) => - - {name} - {form} - - - /** - * What are the rules and mechanisms for putting quotes around table names? - */ - val quoteTableName: FactoryMaker[String => String] = - new FactoryMaker[String => String]((s: String) => if (s.indexOf(' ') >= 0) "\""+s+"\"" else s) {} - - /** - * What are the rules and mechanisms for putting quotes around column names? - */ - val quoteColumnName: FactoryMaker[String => String] = - new FactoryMaker[String => String]((s: String) => if (s.indexOf(' ') >= 0) "\""+s+"\"" else s) {} - - /** - * Function that determines if foreign key constraints are - * created by Schemifier for the specified connection. - * - * Note: The driver choosen must also support foreign keys for - * creation to happen - */ - var createForeignKeys_? : ConnectionIdentifier => Boolean = c => false - - - /** - * This function is used to calculate the displayName of a field. Can be - * used to easily localize fields based on the locale in the - * current request - */ - val displayNameCalculator: FactoryMaker[(BaseMapper, Locale, String) => String] = - new FactoryMaker[(BaseMapper, Locale, String) => String]((m: BaseMapper,l: Locale,name: String) => name) {} - - /** - * Calculate the name of a column based on the name - * of the MappedField. Must be set in Boot before any code - * that touches the MetaMapper. - * - * To get snake_case, use this: - * - * MapperRules.columnName = (_,name) => StringHelpers.snakify(name) - */ - var columnName: (ConnectionIdentifier,String) => String = (_,name) => name.toLowerCase - - /** - * Calculate the name of a table based on the name - * of the Mapper. Must be set in Boot before any code - * that tocuhes the MetaMapper. - * - * To get snake_case, use this - * - * MapperRules.tableName = (_,name) => StringHelpers.snakify(name) - */ - var tableName: (ConnectionIdentifier,String) => String = (_,name) => name.toLowerCase -} - -trait MetaMapper[A<:Mapper[A]] extends BaseMetaMapper with Mapper[A] { - self: A => - - private val logger = Logger(classOf[MetaMapper[A]]) - - case class FieldHolder(name: String, method: Method, field: MappedField[_, A]) - - type RealType = A - - def beforeValidation: List[A => Unit] = Nil - def beforeValidationOnCreate: List[A => Unit] = Nil - def beforeValidationOnUpdate: List[A => Unit] = Nil - def afterValidation: List[A => Unit] = Nil - def afterValidationOnCreate: List[A => Unit] = Nil - def afterValidationOnUpdate: List[A => Unit] = Nil - - def beforeSave: List[A => Unit] = Nil - def beforeCreate: List[(A) => Unit] = Nil - def beforeUpdate: List[(A) => Unit] = Nil - - def afterSave: List[(A) => Unit] = Nil - def afterCreate: List[(A) => Unit] = Nil - def afterUpdate: List[(A) => Unit] = Nil - - def beforeDelete: List[(A) => Unit] = Nil - def afterDelete: List[(A) => Unit] = Nil - - /** - * If there are model-specific validations to perform, override this - * method and return an additional list of validations to perform - */ - def validation: List[A => List[FieldError]] = Nil - - private def clearPostCommit(in: A): Unit = { - in.addedPostCommit = false - } - - private def clearPCFunc: A => Unit = clearPostCommit _ - - def afterCommit: List[A => Unit] = Nil - - def dbDefaultConnectionIdentifier: ConnectionIdentifier = DefaultConnectionIdentifier - - def findAll(): List[A] = - findMapDb(dbDefaultConnectionIdentifier, Nil :_*)(v => Full(v)) - - def findAllDb(dbId:ConnectionIdentifier): List[A] = - findMapDb(dbId, Nil :_*)(v => Full(v)) - - def countByInsecureSql(query: String, checkedBy: IHaveValidatedThisSQL): scala.Long = - countByInsecureSqlDb(dbDefaultConnectionIdentifier, query, checkedBy) - - def countByInsecureSqlDb(dbId: ConnectionIdentifier, query: String, checkedBy: IHaveValidatedThisSQL): scala.Long = - DB.use(dbId)(DB.prepareStatement(query, _)(DB.exec(_)(rs => if (rs.next) rs.getLong(1) else 0L))) - - def findAllByInsecureSql(query: String, checkedBy: IHaveValidatedThisSQL): List[A] = findAllByInsecureSqlDb(dbDefaultConnectionIdentifier, query, checkedBy) - - /** - * Execute a PreparedStatement and return a List of Mapper instances. {@code f} is - * where the user will do the work of creating the PreparedStatement and - * preparing it for execution. - * - * @param f A function that takes a SuperConnection and returns a PreparedStatement. - * @return A List of Mapper instances. - */ - def findAllByPreparedStatement(f: SuperConnection => PreparedStatement): List[A] = { - DB.use(dbDefaultConnectionIdentifier) { - conn => - findAllByPreparedStatement(dbDefaultConnectionIdentifier, f(conn)) - } - } - - def findAllByPreparedStatement(dbId: ConnectionIdentifier, stmt: PreparedStatement): List[A] = findAllByPreparedStatementDb(dbId, stmt)(a => Full(a)) - - def findAllByPreparedStatementDb[T](dbId: ConnectionIdentifier, stmt: PreparedStatement)(f: A => Box[T]): List[T] = { - DB.exec(stmt) { - rs => createInstances(dbId, rs, Empty, Empty, f) - } - } - - def findAllByInsecureSqlDb(dbId: ConnectionIdentifier, query: String, checkedBy: IHaveValidatedThisSQL): List[A] = - findMapByInsecureSqlDb(dbId, query, checkedBy)(a => Full(a)) - - - def findMapByInsecureSql[T](query: String, checkedBy: IHaveValidatedThisSQL) - (f: A => Box[T]): List[T] = - findMapByInsecureSqlDb(dbDefaultConnectionIdentifier, query, checkedBy)(f) - - def findMapByInsecureSqlDb[T](dbId: ConnectionIdentifier, query: String, checkedBy: IHaveValidatedThisSQL)(f: A => Box[T]): List[T] = { - DB.use(dbId) { - conn => - DB.prepareStatement(query, conn) { - st => - DB.exec(st) { - rs => - createInstances(dbId, rs, Empty, Empty, f) - } - } - } - } - - def dbAddTable: Box[() => Unit] = Empty - - def count: Long = countDb(dbDefaultConnectionIdentifier, Nil :_*) - - def count(by: QueryParam[A]*): Long = countDb(dbDefaultConnectionIdentifier, by:_*) - - def countDb(dbId: ConnectionIdentifier, by: QueryParam[A]*): Long = { - DB.use(dbId) { - conn => - val bl = by.toList ::: addlQueryParams.get - val (query, start, max) = addEndStuffs(addFields("SELECT COUNT(*) FROM "+MapperRules.quoteTableName.vend(_dbTableNameLC)+" ", false, bl, conn), bl, conn) - - DB.prepareStatement(query, conn) { - st => - setStatementFields(st, bl, 1, conn) - DB.exec(st) { - rs => - if (rs.next) rs.getLong(1) - else 0 - } - } - } - } - - //type KeyDude = T forSome {type T} - type OtherMapper = KeyedMapper[_, _] // T forSome {type T <: KeyedMapper[KeyDude, T]} - type OtherMetaMapper = KeyedMetaMapper[_, _] // T forSome {type T <: KeyedMetaMapper[KeyDude, OtherMapper]} - //type OtherMapper = KeyedMapper[_, (T forSome {type T})] - //type OtherMetaMapper = KeyedMetaMapper[_, OtherMapper] - - def findAllFields(fields: Seq[SelectableField], - by: QueryParam[A]*): List[A] = - findMapFieldDb(dbDefaultConnectionIdentifier, - fields, by :_*)(v => Full(v)) - - def findAllFieldsDb(dbId: ConnectionIdentifier, - fields: Seq[SelectableField], - by: QueryParam[A]*): - List[A] = findMapFieldDb(dbId, fields, by :_*)(v => Full(v)) - - private def dealWithPrecache(ret: List[A], by: Seq[QueryParam[A]]): List[A] = { - - val precache: List[PreCache[A, _, _]] = by.toList.flatMap{case j: PreCache[A, _, _] => List[PreCache[A, _, _]](j) case _ => Nil} - for (j <- precache) { - type FT = j.field.FieldType - type MT = T forSome {type T <: KeyedMapper[FT, T]} - - val ol: List[MT] = if (!j.deterministic) { - def filter(in: Seq[FT]): Seq[FT] = - in.flatMap{ - case null => Nil - case x: Number if x.longValue == 0L => Nil - case x => List(x) - } - - val lst: Set[FT] = Set(filter(ret.map(v => v.getSingleton.getActualField(v, j.field).get.asInstanceOf[FT])) :_*) - - j.field.dbKeyToTable. - asInstanceOf[MetaMapper[A]]. - findAll(ByList(j.field.dbKeyToTable.primaryKeyField. - asInstanceOf[MappedField[FT, A]], lst.toList)).asInstanceOf[List[MT]] - } else { - j.field.dbKeyToTable. - asInstanceOf[MetaMapper[A]]. - findAll(new InThing[A]{ - type JoinType = FT - type InnerType = A - - val outerField: MappedField[JoinType, A] = - j.field.dbKeyToTable.primaryKeyField.asInstanceOf[MappedField[JoinType, A]] - val innerField: MappedField[JoinType, A] = j.field.asInstanceOf[MappedField[JoinType, A]] - val innerMeta: MetaMapper[A] = j.field.fieldOwner.getSingleton - - def notIn = false - - val queryParams: List[QueryParam[A]] = by.toList - }.asInstanceOf[QueryParam[A]] ).asInstanceOf[List[MT]] - } - - val map: Map[FT, MT] = - Map(ol.map(v => (v.primaryKeyField.get, v)) :_*) - - for (i <- ret) { - val field: MappedForeignKey[FT, A, _] = - getActualField(i, j.field).asInstanceOf[MappedForeignKey[FT, A, _]] - - map.get(field.get) match { - case v => field._primeObj(Box(v)) - } - //field.primeObj(Box(map.get(field.get).map(_.asInstanceOf[QQ]))) - } - } - - ret - } - - def findAll(by: QueryParam[A]*): List[A] = - dealWithPrecache(findMapDb(dbDefaultConnectionIdentifier, by :_*) - (v => Full(v)), by) - - - def findAllDb(dbId: ConnectionIdentifier,by: QueryParam[A]*): List[A] = - dealWithPrecache(findMapDb(dbId, by :_*)(v => Full(v)), by) - - def bulkDelete_!!(by: QueryParam[A]*): Boolean = bulkDelete_!!(dbDefaultConnectionIdentifier, by :_*) - def bulkDelete_!!(dbId: ConnectionIdentifier, by: QueryParam[A]*): Boolean = { - DB.use(dbId) { - conn => - val bl = by.toList ::: addlQueryParams.get - val (query, start, max) = addEndStuffs(addFields("DELETE FROM "+MapperRules.quoteTableName.vend(_dbTableNameLC)+" ", false, bl, conn), bl, conn) - - DB.prepareStatement(query, conn) { - st => - setStatementFields(st, bl, 1, conn) - st.executeUpdate - true - } - } - } - - private def distinct(in: Seq[QueryParam[A]]): String = - in.find {case Distinct() => true case _ => false}.isDefined match { - case false => "" - case true => " DISTINCT " - } - - def findMap[T](by: QueryParam[A]*)(f: A => Box[T]) = - findMapDb(dbDefaultConnectionIdentifier, by :_*)(f) - - def findMapDb[T](dbId: ConnectionIdentifier, - by: QueryParam[A]*)(f: A => Box[T]): List[T] = - findMapFieldDb(dbId, mappedFields, by :_*)(f) - - /** - * Given fields, a connection and the query parameters, build a query and return the query String, - * and Start or MaxRows values (depending on whether the driver supports LIMIT and OFFSET) - * and the complete List of QueryParams based on any synthetic query parameters calculated during the - * query creation. - * - * @param fields -- a Seq of the fields to be selected - * @param conn -- the SuperConnection to be used for calculating the query - * @param by -- the varg of QueryParams - * - * @return a Tuple of the Query String, Start (offset), MaxRows (limit), and the list of all query parameters - * including and synthetic query parameters - */ - def buildSelectString(fields: Seq[SelectableField], conn: SuperConnection, by: QueryParam[A]*): - (String, Box[Long], Box[Long], List[QueryParam[A]]) = { - val bl = by.toList ::: addlQueryParams.get - val selectStatement = "SELECT "+ - distinct(by)+ - fields.map(_.dbSelectString). - mkString(", ")+ - " FROM "+MapperRules.quoteTableName.vend(_dbTableNameLC)+" " - - val (str, start, max) = addEndStuffs(addFields(selectStatement, false, bl, conn), bl, conn) - (str, start, max, bl) - } - - def findMapFieldDb[T](dbId: ConnectionIdentifier, fields: Seq[SelectableField], - by: QueryParam[A]*)(f: A => Box[T]): List[T] = { - DB.use(dbId) { - conn => - - val (query, start, max, bl) = buildSelectString(fields, conn, by :_*) - DB.prepareStatement(query, conn) { - st => - setStatementFields(st, bl, 1, conn) - DB.exec(st)(createInstances(dbId, _, start, max, f)) - } - } - } - - def create: A = createInstance - - object addlQueryParams extends net.liftweb.http.RequestVar[List[QueryParam[A]]](Nil) { - override val __nameSalt = randomString(10) - } - - private[mapper] def addFields(what: String, whereAdded: Boolean, - by: List[QueryParam[A]], conn: SuperConnection): String = { - - var wav = whereAdded - - def whereOrAnd = if (wav) " AND " else {wav = true; " WHERE "} - - class DBFuncWrapper(dbFunc: Box[String]) { - def apply(field: String) = dbFunc match { - case Full(f) => f+"("+field+")" - case _ => field - } - } - - implicit def dbfToFunc(in: Box[String]): DBFuncWrapper = new DBFuncWrapper(in) - - by match { - case Nil => what - case x :: xs => { - var updatedWhat = what - x match { - case Cmp(field, opr, Full(_), _, dbFunc) => - (1 to field.dbColumnCount).foreach { - cn => - updatedWhat = updatedWhat + whereOrAnd + dbFunc(MapperRules.quoteColumnName.vend(field.dbColumnNames(field.name)(cn - 1)))+" "+opr+" ? " - } - - case Cmp(field, opr, _, Full(otherField), dbFunc) => - (1 to field.dbColumnCount).foreach { - cn => - updatedWhat = updatedWhat + whereOrAnd + dbFunc(MapperRules.quoteColumnName.vend(field.dbColumnNames(field.name)(cn - 1)))+" "+opr+" "+ - MapperRules.quoteColumnName.vend(otherField.dbColumnNames(otherField.name)(cn - 1)) - } - - case Cmp(field, opr, Empty, Empty, dbFunc) => - (1 to field.dbColumnCount).foreach (cn => updatedWhat = updatedWhat + whereOrAnd + dbFunc(MapperRules.quoteColumnName.vend(field.dbColumnNames(field.name)(cn - 1)))+" "+opr+" ") - - // For vals, add "AND $fieldname = ? [OR $fieldname = ?]*" to the query. The number - // of fields you add onto the query is equal to vals.length - case ByList(field, orgVals) => - val vals = Set(orgVals :_*).toList // faster than list.removeDuplicates - - if (vals.isEmpty) updatedWhat = updatedWhat + whereOrAnd + " 0 = 1 " - else updatedWhat = updatedWhat + - vals.map(v => MapperRules.quoteColumnName.vend(field._dbColumnNameLC)+ " = ?").mkString(whereOrAnd+" (", " OR ", ")") - - case in: InRaw[A, _] => - updatedWhat = updatedWhat + whereOrAnd + (in.rawSql match { - case null | "" => " 0 = 1 " - case sql => " "+MapperRules.quoteColumnName.vend(in.field._dbColumnNameLC)+" IN ( "+sql+" ) " - }) - - case (in: InThing[A]) => - updatedWhat = updatedWhat + whereOrAnd + - MapperRules.quoteColumnName.vend(in.outerField._dbColumnNameLC)+in.inKeyword+ - "("+in.innerMeta.addEndStuffs(in.innerMeta.addFields("SELECT "+ - in.distinct+ - MapperRules.quoteColumnName.vend(in.innerField._dbColumnNameLC)+ - " FROM "+ - MapperRules.quoteTableName.vend(in.innerMeta._dbTableNameLC)+" ",false, - in.queryParams, conn), in.queryParams, conn)._1+" ) " - - // Executes a subquery with {@code query} - case BySql(query, _, _*) => - updatedWhat = updatedWhat + whereOrAnd + " ( "+ query +" ) " - case _ => - } - addFields(updatedWhat, wav, xs, conn) - } - } - } - - - private[mapper] def setStatementFields(st: PreparedStatement, by: List[QueryParam[A]], curPos: Int, conn: SuperConnection): Int = { - by match { - case Nil => curPos - case Cmp(field, _, Full(value), _, _) :: xs => - setPreparedStatementValue(conn, st, curPos, field, field.targetSQLType, field.convertToJDBCFriendly(value), objectSetterFor(field)) - setStatementFields(st, xs, curPos + 1, conn) - - case ByList(field, orgVals) :: xs => { - val vals = Set(orgVals :_*).toList - var newPos = curPos - vals.foreach(v => { - setPreparedStatementValue(conn, st, newPos, field, field.targetSQLType, field.convertToJDBCFriendly(v), objectSetterFor(field)) - newPos = newPos + 1 - }) - - setStatementFields(st, xs, newPos, conn) - } - - case (in: InThing[A]) :: xs => - val newPos = in.innerMeta.setStatementFields(st, in.queryParams, - curPos, conn) - setStatementFields(st, xs, newPos, conn) - - case BySql(query, who, params @ _*) :: xs => { - params.toList match { - case Nil => setStatementFields(st, xs, curPos, conn) - case List(i: Int) => - st.setInt(curPos, i) - setStatementFields(st, xs, curPos + 1, conn) - case List(lo: Long) => - st.setLong(curPos, lo) - setStatementFields(st, xs, curPos + 1, conn) - case List(s: String) => - st.setString(curPos, s) - setStatementFields(st, xs, curPos + 1, conn) - // Allow specialization of time-related values based on the input parameter - case List(t: java.sql.Timestamp) => - st.setTimestamp(curPos, t) - setStatementFields(st, xs, curPos + 1, conn) - case List(d: java.sql.Date) => - st.setDate(curPos, d) - setStatementFields(st, xs, curPos + 1, conn) - case List(t: java.sql.Time) => - st.setTime(curPos, t) - setStatementFields(st, xs, curPos + 1, conn) - // java.util.Date goes last, since it's a superclass of java.sql.{Date,Time,Timestamp} - case List(d: Date) => - st.setTimestamp(curPos, new java.sql.Timestamp(d.getTime)) - setStatementFields(st, xs, curPos + 1, conn) - case List(field: BaseMappedField) => - setPreparedStatementValue(conn, st, curPos, field, field.targetSQLType, field.jdbcFriendly, objectSetterFor(field)) - setStatementFields(st, xs, curPos + 1, conn) - case p :: ps => - setStatementFields(st, BySql[A](query, who, p) :: BySql[A](query, who, ps: _*) :: xs, curPos, conn) - } - } - case _ :: xs => { - setStatementFields(st, xs, curPos, conn) - } - } - } - - // def find(by: QueryParam): Box[A] = find(List(by)) - - private def _addOrdering(in: String, params: List[QueryParam[A]]): String = { - params.flatMap{ - case OrderBy(field, order, nullOrder) => List(MapperRules.quoteColumnName.vend(field._dbColumnNameLC)+" "+order.sql+" "+(nullOrder.map(_.getSql).openOr(""))) - case OrderBySql(sql, _) => List(sql) - case _ => Nil - } match { - case Nil => in - case xs => in + " ORDER BY "+xs.mkString(" , ") - } - } - - protected def addEndStuffs(in: String, params: List[QueryParam[A]], conn: SuperConnection): (String, Box[Long], Box[Long]) = { - val tmp = _addOrdering(in, params) - val max = params.foldRight(Empty.asInstanceOf[Box[Long]]){(a,b) => a match {case MaxRows(n) => Full(n); case _ => b}} - val start = params.foldRight(Empty.asInstanceOf[Box[Long]]){(a,b) => a match {case StartAt(n) => Full(n); case _ => b}} - - if (conn.brokenLimit_?) (tmp, start, max) else { - val ret = (max, start) match { - case (Full(max), Full(start)) => tmp + " LIMIT "+max+" OFFSET "+start - case (Full(max), _) => tmp + " LIMIT "+max - case (_, Full(start)) => tmp + " LIMIT "+conn.driverType.maxSelectLimit+" OFFSET "+start - case _ => tmp - } - (ret, Empty, Empty) - } - } - - def delete_!(toDelete : A): Boolean = - toDelete match { - case x: MetaMapper[_] => throw new MapperException("Cannot delete the MetaMapper singleton") - - case _ => - thePrimaryKeyField.map(im => - DB.use(toDelete.connectionIdentifier) { - conn => - _beforeDelete(toDelete) - val ret = DB.prepareStatement("DELETE FROM "+MapperRules.quoteTableName.vend(_dbTableNameLC) +" WHERE "+im+" = ?", conn) { - st => - val indVal = indexedField(toDelete) - indVal.map{indVal => - setPreparedStatementValue(conn, st, 1, indVal, im, objectSetterFor(indVal)) - st.executeUpdate == 1 - } openOr false - } - _afterDelete(toDelete) - ret - } - ).openOr(false) - } - - - - type AnyBound = T forSome {type T} - - private[mapper] def ??(meth: Method, inst: A) = meth.invoke(inst).asInstanceOf[MappedField[AnyBound, A]] - - def dirty_?(toTest: A): Boolean = mappedFieldList.exists( - mft => - ??(mft.method, toTest).dirty_? - ) - - def indexedField(toSave: A): Box[MappedField[Any, A]] = - thePrimaryKeyField.map(im => ??(mappedColumns(im.toLowerCase), toSave)) - - def saved_?(toSave: A): Boolean = - toSave match { - case x: MetaMapper[_] => throw new MapperException("Cannot test the MetaMapper singleton for saved status") - - case _ => toSave.persisted_? - } - - /** - * This method will update the instance from JSON. It allows for - * attacks from untrusted JSON as it bypasses normal security. By - * default, the method is protected. You can write a proxy method - * to expose the functionality. - */ - protected def updateFromJSON_!(toUpdate: A, json: JsonAST.JObject): A = { - import JsonAST._ - - toUpdate.runSafe { - - for { - field <- json.obj - meth <- _mappedFields.get(field.name) - } { - val f = ??(meth, toUpdate) - f.setFromAny(field.value) - } - } - - toUpdate - } - - /** - * This method will encode the instance as JSON. It may reveal - * data in fields that might otherwise be proprietary. It should - * be used with caution and only exposed as a public method - * after a security review. - */ - protected def encodeAsJSON_! (toEncode: A): JsonAST.JObject = { - toEncode.runSafe { - JsonAST.JObject(JsonAST.JField("$persisted", - JsonAST.JBool(toEncode.persisted_?)) :: - this.mappedFieldList. - flatMap(fh => ??(fh.method, toEncode).asJsonField)) - } - } - - /** - * Decode the fields from a JSON Object. Should the fields be marked as dirty? - */ - protected def decodeFromJSON_!(json: JsonAST.JObject, markFieldsAsDirty: Boolean): A = { - val ret: A = createInstance - import JsonAST._ - - ret.runSafe { - json.findField { - case JField("$persisted", JBool(per)) => - ret.persisted_? = per - true - case _ => false - } - - for { - field <- json.obj - meth <- _mappedFields.get(field.name) - } { - val f = ??(meth, ret) - f.setFromAny(field.value) - if (!markFieldsAsDirty) f.resetDirty - } - } - - ret - } - - - def whatToSet(toSave : A) : String = { - mappedColumns.filter{c => ??(c._2, toSave).dirty_?}.map{c => c._1 + " = ?"}.toList.mkString("", ",", "") - } - - /** - * Run the list of field validations, etc. This is the raw validation, - * without the notifications. This method can be over-ridden. - */ - protected def runValidationList(toValidate: A): List[FieldError] = - mappedFieldList.flatMap(f => ??(f.method, toValidate).validate) ::: - validation.flatMap{ - case pf: PartialFunction[A, List[FieldError]] => - if (pf.isDefinedAt(toValidate)) pf(toValidate) - else Nil - - case f => f(toValidate) - } - - final def validate(toValidate: A): List[FieldError] = { - logger.debug("Validating dbName=%s, entity=%s".format(dbName, toValidate)) - val saved_? = this.saved_?(toValidate) - _beforeValidation(toValidate) - if (saved_?) _beforeValidationOnUpdate(toValidate) else _beforeValidationOnCreate(toValidate) - - val ret: List[FieldError] = runValidationList(toValidate) - - _afterValidation(toValidate) - if (saved_?) _afterValidationOnUpdate(toValidate) else _afterValidationOnCreate(toValidate) - - logger.debug("Validated dbName=%s, entity=%s, result=%s".format(dbName, toValidate, ret)) - - ret - } - - val elemName = getClass.getSuperclass.getName.split("\\.").toList.last - - def toXml(what: A): Elem = - Elem(null,elemName, - mappedFieldList.foldRight[MetaData](Null) {(p, md) => val fld = ??(p.method, what) - new UnprefixedAttribute(p.name, Text(fld.toString), md)} - ,TopScope, true) - - /** - * Returns true if none of the fields are dirty - */ - def clean_?(toCheck: A): Boolean = mappedColumns.foldLeft(true)((bool, ptr) => bool && !(??(ptr._2, toCheck).dirty_?)) - - /** - * Sets a prepared statement value based on the given MappedField's value - * and column name. This delegates to the BaseMappedField overload of - * setPreparedStatementValue by retrieving the necessary values. - * - * @param conn The connection for this prepared statement - * @param st The prepared statement - * @param index The index for this prepared statement value - * @param field The field corresponding to this prepared statement value - * @param columnName The column name to use to retrieve the type and value - * @param setObj A function that we can delegate to for setObject calls - */ - private def setPreparedStatementValue(conn: SuperConnection, - st: PreparedStatement, - index: Int, - field: MappedField[_, A], - columnName : String, - setObj : (PreparedStatement, Int, AnyRef, Int) => Unit): Unit = { - setPreparedStatementValue(conn, st, index, field, - field.targetSQLType(columnName), - field.jdbcFriendly(columnName), - setObj) - } - - /** - * Sets a prepared statement value based on the given BaseMappedField's type and value. This - * allows us to do special handling based on the type in a central location. - * - * @param conn The connection for this prepared statement - * @param st The prepared statement - * @param index The index for this prepared statement value - * @param field The field corresponding to this prepared statement value - * @param columnType The JDBC SQL Type for this value - * @param value The value itself - * @param setObj A function that we can delegate to for setObject calls - */ - private def setPreparedStatementValue(conn: SuperConnection, - st: PreparedStatement, - index: Int, - field: BaseMappedField, - columnType : Int, - value : Object, - setObj : (PreparedStatement, Int, AnyRef, Int) => Unit): Unit = { - // Remap the type if the driver wants - val mappedColumnType = conn.driverType.columnTypeMap(columnType) - - // We generally use setObject for everything, but we've found some broken JDBC drivers - // which has prompted us to use type-specific handling for certain types - mappedColumnType match { - case Types.VARCHAR => - // Set a string with a simple guard for null values - st.setString(index, if (value ne null) value.toString else value.asInstanceOf[String]) - - // Sybase SQL Anywhere and DB2 choke on using setObject for boolean data - case Types.BOOLEAN => value match { - case intData : java.lang.Integer => st.setBoolean(index, intData.intValue != 0) - case b : java.lang.Boolean => st.setBoolean(index, b.booleanValue) - // If we can't figure it out, maybe the driver can - case other => setObj(st, index, other, mappedColumnType) - } - - // In all other cases, delegate to the driver - case _ => setObj(st, index, value, mappedColumnType) - } - } - - /** - * This is a utility method to simplify using setObject. It's intended use is to - * generate a setObject proxy so that the intermediate code doesn't need to be aware - * of drivers that ignore column types. - */ - private def objectSetterFor(field : BaseMappedField) = { - (st : PreparedStatement, index : Int, value : AnyRef, columnType : Int) => { - if (field.dbIgnoreSQLType_?) { - st.setObject(index, value) - } else { - st.setObject(index, value, columnType) - } - } - } - - def save(toSave: A): Boolean = { - toSave match { - case x: MetaMapper[_] => throw new MapperException("Cannot save the MetaMapper singleton") - - case _ => - logger.debug("Saving dbName=%s, entity=%s".format(dbName, toSave)) - /** - * @return true if there was exactly one row in the result set, false if not. - */ - def runAppliers(rs: ResultSet) : Boolean = { - try { - if (rs.next) { - val meta = rs.getMetaData - toSave.runSafe { - for { - indexMap <- thePrimaryKeyField - auto <- primaryKeyAutogenerated if auto - } { - findApplier(indexMap, rs.getObject(1)) match { - case Full(ap) => ap.apply(toSave, rs.getObject(1)) - case _ => - } - } - } - !rs.next - } else false - } finally { - rs.close - } - } - - /** - * Checks whether the result set has exactly one row. - */ - def hasOneRow(rs: ResultSet) : Boolean = { - try { - val firstRow = rs.next - (firstRow && !rs.next) - } finally { - rs.close - } - } - - if (saved_?(toSave) && clean_?(toSave)) true else { - val ret = DB.use(toSave.connectionIdentifier) { - conn => - _beforeSave(toSave) - val ret = if (saved_?(toSave)) { - _beforeUpdate(toSave) - val ret: Boolean = if (!dirty_?(toSave)) true else { - val ret: Boolean = DB.prepareStatement("UPDATE "+MapperRules.quoteTableName.vend(_dbTableNameLC)+" SET "+whatToSet(toSave)+" WHERE "+thePrimaryKeyField.openOrThrowException("Cross your fingers") +" = ?", conn) { - st => - var colNum = 1 - - // Here we apply each column's value to the prepared statement - for (col <- mappedColumns) { - val colVal = ??(col._2, toSave) - if (!columnPrimaryKey_?(col._1) && colVal.dirty_?) { - setPreparedStatementValue(conn, st, colNum, colVal, col._1, objectSetterFor(colVal)) - colNum = colNum + 1 - } - } - - for { - indVal <- indexedField(toSave) - indexColumnName <- thePrimaryKeyField - } { - setPreparedStatementValue(conn, st, colNum, indVal, indexColumnName, objectSetterFor(indVal)) - } - - st.executeUpdate - true - } - ret - } - _afterUpdate(toSave) - ret - } else { - _beforeCreate(toSave) - - val query = "INSERT INTO "+MapperRules.quoteTableName.vend(_dbTableNameLC)+" ("+columnNamesForInsert+") VALUES ("+columnQueriesForInsert+")" - - def prepStat(st : PreparedStatement): Unit = { - var colNum = 1 - - for (col <- mappedColumns) { - if (!columnPrimaryKey_?(col._1)) { - val colVal = col._2.invoke(toSave).asInstanceOf[MappedField[AnyRef, A]] - setPreparedStatementValue(conn, st, colNum, colVal, col._1, objectSetterFor(colVal)) - colNum = colNum + 1 - } - } - } - - // Figure out which columns are auto-generated - val generatedColumns = (mappedColumnInfo.filter(_._2.dbAutogenerated_?).map(_._1)).toList - - val ret = conn.driverType.performInsert(conn, query, prepStat, MapperRules.quoteTableName.vend(_dbTableNameLC), generatedColumns) { - case Right(count) => count == 1 - case Left(rs) => runAppliers(rs) - } - - _afterCreate(toSave) - toSave.persisted_? = true - ret - } - _afterSave(toSave) - ret - } - - // clear dirty and get rid of history - for (col <- mappedColumns) { - val colVal = ??(col._2, toSave) - if (!columnPrimaryKey_?(col._1) && colVal.dirty_?) { - colVal.resetDirty - colVal.doneWithSave - } - } - - ret - } - } - } - - /** - * This method returns true if the named column is the primary key and - * it is autogenerated - */ - def columnPrimaryKey_?(name: String) = mappedColumnInfo.get(name).map(c => (c.dbPrimaryKey_? && c.dbAutogenerated_?)) getOrElse false - - def createInstances(dbId: ConnectionIdentifier, rs: ResultSet, start: Box[Long], omax: Box[Long]) : List[A] = createInstances(dbId, rs, start, omax, v => Full(v)) - - - def createInstances[T](dbId: ConnectionIdentifier, rs: ResultSet, start: Box[Long], omax: Box[Long], f: A => Box[T]) : List[T] = { - var ret = new ListBuffer[T] - val bm = buildMapper(rs) - var pos = (start openOr 0L) * -1L - val max = omax openOr java.lang.Long.MAX_VALUE - - while (pos < max && rs.next()) { - if (pos >= 0L) { - f(createInstance(dbId, rs, bm)).foreach(v => ret += v) - } - pos = pos + 1L - } - - ret.toList - } - - def appendFieldToStrings(in: A): String = mappedFieldList.map(p => ??(p.method, in).asString).mkString(",") - - private val columnNameToMappee = new HashMap[String, Box[(ResultSet, Int, A) => Unit]] - - def buildMapper(rs: ResultSet): List[Box[(ResultSet,Int,A) => Unit]] = columnNameToMappee.synchronized { - val meta = rs.getMetaData - val colCnt = meta.getColumnCount - for { - pos <- (1 to colCnt).toList - colName = meta.getColumnName(pos).toLowerCase - } yield - columnNameToMappee.get(colName) match { - case None => - val colType = meta.getColumnType(pos) - - Box(mappedColumns.get(colName)).flatMap{ - fieldInfo => - val setTo = { - val tField = fieldInfo.invoke(this).asInstanceOf[MappedField[AnyRef, A]] - - Some(colType match { - case Types.INTEGER | Types.BIGINT => { - val bsl = tField.buildSetLongValue(fieldInfo, colName) - (rs: ResultSet, pos: Int, objInst: A) => bsl(objInst, rs.getLong(pos), rs.wasNull)} - case Types.VARCHAR => { - val bsl = tField.buildSetStringValue(fieldInfo, colName) - (rs: ResultSet, pos: Int, objInst: A) => bsl(objInst, rs.getString(pos))} - case Types.DATE | Types.TIME | Types.TIMESTAMP => - val bsl = tField.buildSetDateValue(fieldInfo, colName) - (rs: ResultSet, pos: Int, objInst: A) => bsl(objInst, rs.getTimestamp(pos)) - case Types.BOOLEAN | Types.BIT =>{ - val bsl = tField.buildSetBooleanValue(fieldInfo, colName) - (rs: ResultSet, pos: Int, objInst: A) => bsl(objInst, rs.getBoolean(pos), rs.wasNull)} - case _ => { - (rs: ResultSet, pos: Int, objInst: A) => { - val res = rs.getObject(pos) - findApplier(colName, res).foreach(f => f(objInst, res)) - } - } - }) - } - - columnNameToMappee(colName) = Box(setTo) - setTo - } - - case Some(of) => of - } - } - - def createInstance(dbId: ConnectionIdentifier, rs : ResultSet, mapFuncs: List[Box[(ResultSet,Int,A) => Unit]]) : A = { - val ret: A = createInstance.connectionIdentifier(dbId) - - ret.persisted_? = true - - for { - (fb, pos) <- mapFuncs.zipWithIndex - f <- fb - } f(rs, pos + 1, ret) - - ret - } - - protected def findApplier(name: String, inst: AnyRef): Box[((A, AnyRef) => Unit)] = synchronized { - val clz = inst match { - case null => null - case _ => inst.getClass.asInstanceOf[Class[(C forSome {type C})]] - } - val look = (name.toLowerCase, if (clz ne null) Full(clz) else Empty) - Box(mappedAppliers.get(look) orElse { - val newFunc = createApplier(name, inst) - mappedAppliers(look) = newFunc - Some(newFunc) - }) - } - - - private def createApplier(name : String, inst : AnyRef /*, clz : Class*/) : (A, AnyRef) => Unit = { - val accessor = mappedColumns.get(name) orElse mappedColumns.get(name.toLowerCase) - if ((accessor eq null) || accessor == None) { - null - } else { - (accessor.get.invoke(this).asInstanceOf[MappedField[AnyRef, A]]).buildSetActualValue(accessor.get, inst, name) - } - } - - /** - * A set of CssSels that can be used to bind this MetaMapper's fields. - * - * Elements with a class matching the field name are mapped to the NodeSeq - * produced by the fieldHtml function that is passed in. - * - * So, with a MetaMapper that has three fields, name, date, and description, - * the resulting CSS selector transforms are: - * - * {{{ - * Seq( - * ".name" #> fieldHtml(-name field-), - * ".date" #> fieldHtml(-date field-), - * ".description" #> fieldHtml(-description field-) - * ) - * }}} - * - * Above, -name field-, -date field-, and -description field- refer to the - * actual MappedField objects for those fields. - */ - def fieldMapperTransforms(fieldHtml: (BaseOwnedMappedField[A]=>NodeSeq), mappedObject: A): Seq[CssSel] = { - mappedFieldList.map { field => - s".${field.name}" #> fieldHtml(??(field.method, mappedObject)) - } - } - - private[mapper] def checkFieldNames(in: A): Unit = { - mappedFieldList.foreach(f => - ??(f.method, in) match { - case field if (field.i_name_! eq null) => field.setName_!(f.name) - case _ => - }) - } - - /** - * Get a field by the field name - * @param fieldName -- the name of the field to get - * @param actual -- the instance to get the field on - * - * @return Box[The Field] (Empty if the field is not found) - */ - def fieldByName[T](fieldName: String, actual: A): Box[MappedField[T, A]] = - Box(_mappedFields.get(fieldName)). - map(meth => ??(meth, actual).asInstanceOf[MappedField[T,A]]) - - /** - * A partial function that takes an instance of A and a field name and returns the mapped field - */ - lazy val fieldMatcher: PartialFunction[(A, String), MappedField[Any, A]] = { - case (actual, fieldName) if _mappedFields.contains(fieldName) => fieldByName[Any](fieldName, actual).openOrThrowException("we know this is defined") - } - - def createInstance: A = rootClass.newInstance.asInstanceOf[A] - - def fieldOrder: List[BaseOwnedMappedField[A]] = Nil - - protected val rootClass = this.getClass.getSuperclass - - private val mappedAppliers = new HashMap[(String, Box[Class[(C forSome {type C})]]), (A, AnyRef) => Unit]; - - private val _mappedFields = new HashMap[String, Method]; - - private[mapper] var mappedFieldList: List[FieldHolder] = Nil; // new Array[Triple[String, Method, MappedField[Any,Any]]](); - - private var mappedCallbacks: List[(String, Method)] = Nil - - private var mappedColumns: SortedMap[String, Method] = TreeMap() - - private var mappedColumnInfo: SortedMap[String, MappedField[AnyRef, A]] = TreeMap() - - - /** - * The primary key column. This used to be indexMap - */ - private var thePrimaryKeyField: Box[String] = Empty - - /** - * If the primary key field is autogenerated, this will be Full(true) - */ - private var primaryKeyAutogenerated: Box[Boolean] = Empty - - this.runSafe { - logger.debug("Initializing MetaMapper for %s".format(internalTableName_$_$)) - val tArray = new ListBuffer[FieldHolder] - def isLifecycle(m: Method) = classOf[LifecycleCallbacks].isAssignableFrom(m.getReturnType) - - val mapperAccessMethods = new FieldFinder[MappedField[_,_]](this, logger).accessorMethods - - mappedCallbacks = mapperAccessMethods.filter(isLifecycle).map(v => (v.getName, v)) - - for (v <- mapperAccessMethods) { - v.invoke(this) match { - case untypedMf: MappedField[_, _] if !untypedMf.ignoreField_? => - val mf = untypedMf.asInstanceOf[MappedField[AnyRef,A]] - - mf.setName_!(v.getName) - tArray += FieldHolder(mf.name, v, mf) - for (colName <- mf.dbColumnNames(v.getName).map(MapperRules.quoteColumnName.vend).map(_.toLowerCase)) { - mappedColumnInfo += colName -> mf - mappedColumns += colName -> v - } - if (mf.dbPrimaryKey_?) { - thePrimaryKeyField = Full(MapperRules.quoteColumnName.vend(mf._dbColumnNameLC)) - primaryKeyAutogenerated = Full(mf.dbAutogenerated_?) - } - - case _ => - } - } - - def findPos(in: AnyRef): Box[Int] = { - tArray.toList.zipWithIndex.filter(mft => in eq mft._1.field) match { - case Nil => Empty - case x :: xs => Full(x._2) - } - } - - val resArray = new ListBuffer[FieldHolder]; - - fieldOrder.foreach(f => findPos(f).foreach(pos => resArray += tArray.remove(pos))) - - tArray.foreach(mft => resArray += mft) - - mappedFieldList = resArray.toList - mappedFieldList.foreach(ae => _mappedFields(ae.name) = ae.method) - - logger.trace("Mapped fields for %s: %s".format(dbName, mappedFieldList.map(_.name).mkString(","))) - } - - val columnNamesForInsert = (mappedColumnInfo.filter(c => !(c._2.dbPrimaryKey_? && c._2.dbAutogenerated_?)).map(_._1)).toList.mkString(",") - - val columnQueriesForInsert = { - (mappedColumnInfo.filter(c => !(c._2.dbPrimaryKey_? && c._2.dbAutogenerated_?)).map(p => "?")).toList.mkString(",") - } - - private def fixTableName(name: String) = { - val tableName = MapperRules.tableName(connectionIdentifier,clean(name)) - - if (DB.reservedWords.contains(tableName.toLowerCase)) - tableName+"_t" - else - tableName - } - - private def internalTableName_$_$ = getClass.getSuperclass.getName.split("\\.").toList.last; - - /** - * This function converts a header name into the appropriate - * XHTML format for displaying across the headers of a - * formatted block. The default is <th> for use - * in XHTML tables. If you change this function, the change - * will be used for this MetaMapper unless you override the - * htmlHeades method - */ - var displayNameToHeaderElement: String => NodeSeq = MapperRules.displayNameToHeaderElement - - def htmlHeaders: NodeSeq = - mappedFieldList.filter(_.field.dbDisplay_?). - flatMap(mft => displayNameToHeaderElement(mft.field.displayName)) - - /** - * The mapped fields - */ - lazy val mappedFields: Seq[BaseMappedField] = mappedFieldList.map(f => f.field) - - /** - * the mapped fields as MappedField rather than BaseMappedField - */ - lazy val mappedFieldsForModel: List[MappedField[_, A]] = mappedFieldList.map(_.field) - - /** - * This function converts an element into the appropriate - * XHTML format for displaying across a line - * formatted block. The default is <td> for use - * in XHTML tables. If you change this function, the change - * will be used for this MetaMapper unless you override the - * doHtmlLine method. - */ - var displayFieldAsLineElement: NodeSeq => NodeSeq = - MapperRules.displayFieldAsLineElement - - - def doHtmlLine(toLine: A): NodeSeq = - mappedFieldList.filter(_.field.dbDisplay_?). - flatMap(mft => displayFieldAsLineElement(??(mft.method, toLine).asHtml)) - - def asJs(actual: A): JsExp = { - JE.JsObj(("$lift_class", JE.Str(dbTableName)) :: mappedFieldList. - map(f => ??(f.method, actual)).filter(_.renderJs_?).flatMap(_.asJs).toList ::: - actual.suplementalJs(Empty) :_*) - } - - /** - * Get a list of all the fields - * @return a list of all the fields - */ - lazy val doAllFieldNames: Seq[(String, SourceFieldMetadata)] = - mappedFieldList.map(fh => fh.name.toLowerCase -> fh.field.sourceInfoMetadata()) - - /** - * Get a list of all the fields as a map - * @return a list of all the fields - */ - lazy val fieldNamesAsMap: Map[String, SourceFieldMetadata] = Map(doAllFieldNames :_*) - - def asHtml(toLine: A): NodeSeq = - Text(internalTableName_$_$) :: Text("={ ") :: - (for { - mft <- mappedFieldList if mft.field.dbDisplay_? - field = ??(mft.method, toLine) - } yield { - {field.displayName}={field.asHtml}  - }) ::: List(Text(" }")) - - - /** - * This function converts a name and form for a given field in the - * model to XHTML for presentation in the browser. By - * default, a table row ( <tr> ) is presented, but - * you can change the function to display something else. - */ - var formatFormElement: (NodeSeq, NodeSeq) => NodeSeq = - MapperRules.formatFormElement - - def formatFormLine(displayName: NodeSeq, form: NodeSeq): NodeSeq = - formatFormElement(displayName, form) - - def toForm(toMap: A): NodeSeq = - mappedFieldList.map(e => ??(e.method, toMap)). - filter(f => f.dbDisplay_? && f.dbIncludeInForm_?).flatMap ( - field => - field.toForm.toList. - flatMap(form => formatFormLine(Text(field.displayName), form)) - ) - - /** - * Present the model as a HTML using the same formatting as toForm - * - * @param toMap the instance to generate the HTML for - * - * @return the html view of the model - */ - def toHtml(toMap: A): NodeSeq = - mappedFieldList.map(e => ??(e.method, toMap)). - filter(f => f.dbDisplay_?).flatMap ( - field => - formatFormLine(Text(field.displayName), field.asHtml) - ) - - /** - * Get the fields (in order) for displaying a form - */ - def formFields(toMap: A): List[MappedField[_, A]] = - mappedFieldList.map(e => ??(e.method, toMap)).filter(f => f.dbDisplay_? && - f.dbIncludeInForm_?) - - - /** - * map the fields titles and forms to generate a list - * @param func called with displayHtml, fieldId, form - */ - def mapFieldTitleForm[T](toMap: A, - func: (NodeSeq, Box[NodeSeq], NodeSeq) => T): List[T] = - formFields(toMap).flatMap(field => field.toForm. - map(fo => func(field.displayHtml, field.fieldId, fo))) - - - /** - * flat map the fields titles and forms to generate a list - * @param func called with displayHtml, fieldId, form - */ - def flatMapFieldTitleForm[T](toMap: A, - func: (NodeSeq, Box[NodeSeq], NodeSeq) => Seq[T]): List[T] = - formFields(toMap).flatMap(field => field.toForm.toList. - flatMap(fo => func(field.displayHtml, - field.fieldId, fo))) - -/** - * flat map the fields titles and forms to generate a list - * @param func called with displayHtml, fieldId, form - */ - def flatMapFieldTitleForm2[T](toMap: A, - func: (NodeSeq, MappedField[_, A], NodeSeq) => Seq[T]): List[T] = - formFields(toMap).flatMap(field => field.toForm.toList. - flatMap(fo => func(field.displayHtml, - field, fo))) - - - /** - * Given the prototype field (the field on the Singleton), get the field from the instance - * @param actual -- the Mapper instance - * @param protoField -- the field from the MetaMapper (Singleton) - * - * @return the field from the actual object - */ - def getActualField[T](actual: A, protoField: MappedField[T, A]): MappedField[T, A] = - ??(_mappedFields(protoField.name), actual).asInstanceOf[MappedField[T,A]] - - - /** - * Given the prototype field (the field on the Singleton), get the field from the instance - * @param actual -- the Mapper instance - * @param protoField -- the field from the MetaMapper (Singleton) - * - * @return the field from the actual object - */ - def getActualBaseField(actual: A, protoField: BaseOwnedMappedField[A]): BaseOwnedMappedField[A] = - ??(_mappedFields(protoField.name), actual) // .asInstanceOf[MappedField[T,A]] - - /** - * The name of the database table. Override this method if you - * want to change the table to something other than the name of the Mapper class - */ - def dbTableName = internal_dbTableName - - /** - * The name of the mapped object - */ - override def dbName: String = internalTableName_$_$ - - /** - * The table name, to lower case... ensures that it works on all DBs - */ - final def _dbTableNameLC = { - val name = dbTableName - - val conn = DB.currentConnection - if (conn.isDefined) { - val rc = conn.openOrThrowException("We just checked that this is a Full Box") - if (rc.metaData.storesMixedCaseIdentifiers) name - else name.toLowerCase - } else name - } // dbTableName.toLowerCase - - private[mapper] lazy val internal_dbTableName = fixTableName(internalTableName_$_$) - - private def setupInstanceForPostCommit(inst: A): Unit = { - afterCommit match { - case Nil => - // If there's no post-commit functions, then don't - // record (and retain) the instance - - case pcf => - if (!inst.addedPostCommit) { - DB.appendPostTransaction(inst.connectionIdentifier, dontUse => (clearPCFunc :: pcf).foreach(_(inst))) - inst.addedPostCommit = true - } - } - } - - private def eachField(what: A, toRun: List[(A) => Any])(f: (LifecycleCallbacks) => Any): Unit = { - mappedCallbacks.foreach (e => - e._2.invoke(what) match { - case lccb: LifecycleCallbacks => f(lccb) - case _ => - }) - toRun.foreach{tf => tf(what)} - } - private def _beforeValidation(what: A): Unit = {setupInstanceForPostCommit(what); eachField(what, beforeValidation) { field => field.beforeValidation} } - private def _beforeValidationOnCreate(what: A): Unit = {eachField(what, beforeValidationOnCreate) { field => field.beforeValidationOnCreate} } - private def _beforeValidationOnUpdate(what: A): Unit = {eachField(what, beforeValidationOnUpdate) { field => field.beforeValidationOnUpdate} } - private def _afterValidation(what: A): Unit = { eachField(what, afterValidation) { field => field.afterValidation} } - private def _afterValidationOnCreate(what: A): Unit = {eachField(what, afterValidationOnCreate) { field => field.afterValidationOnCreate} } - private def _afterValidationOnUpdate(what: A): Unit = {eachField(what, afterValidationOnUpdate) { field => field.afterValidationOnUpdate} } - - private def _beforeSave(what: A): Unit = {setupInstanceForPostCommit(what); eachField(what, beforeSave) { field => field.beforeSave} } - private def _beforeCreate(what: A): Unit = { eachField(what, beforeCreate) { field => field.beforeCreate} } - private def _beforeUpdate(what: A): Unit = { eachField(what, beforeUpdate) { field => field.beforeUpdate} } - - private def _afterSave(what: A): Unit = {eachField(what, afterSave) { field => field.afterSave} } - private def _afterCreate(what: A): Unit = {eachField(what, afterCreate) { field => field.afterCreate} } - private def _afterUpdate(what: A): Unit = {eachField(what, afterUpdate) { field => field.afterUpdate} } - - private def _beforeDelete(what: A): Unit = {setupInstanceForPostCommit(what); eachField(what, beforeDelete) { field => field.beforeDelete} } - private def _afterDelete(what: A): Unit = {eachField(what, afterDelete) { field => field.afterDelete} } - - def beforeSchemifier: Unit = {} - def afterSchemifier: Unit = {} - - def dbIndexes: List[BaseIndex[A]] = Nil - - implicit def fieldToItem[T](in: MappedField[T, A]): IndexItem[A] = IndexField(in) - implicit def boundedFieldToItem(in: (MappedField[String, A], Int)): BoundedIndexField[A] = BoundedIndexField(in._1, in._2) - - // protected def getField(inst : Mapper[A], meth : Method) = meth.invoke(inst, null).asInstanceOf[MappedField[AnyRef,A]] -} - -object OprEnum extends Enumeration { - val Eql = Value(1, "=") - val <> = Value(2, "<>") - val >= = Value(3, ">=") - val != = <> - val <= = Value(4, "<=") - val > = Value(5, ">") - val < = Value(6, "<") - val IsNull = Value(7, "IS NULL") - val IsNotNull = Value(8, "IS NOT NULL") - val Like = Value(9, "LIKE") - val NotLike = Value(10, "NOT LIKE") -} - -sealed trait BaseIndex[A <: Mapper[A]] { - def columns: Seq[IndexItem[A]] -} - -final case class Index[A <: Mapper[A]](columns: List[IndexItem[A]]) extends BaseIndex[A] // (columns :_*) - -object Index { - def apply[A <: Mapper[A]](cols: IndexItem[A] *): Index[A] = new Index[A](cols.toList) -} - -/** - * Represents a unique index on the given columns - */ -final case class UniqueIndex[A <: Mapper[A]](columns: List[IndexItem[A]]) extends BaseIndex[A] // (uniqueColumns : _*) - -object UniqueIndex { - def apply[A <: Mapper[A]](cols: IndexItem[A] *): UniqueIndex[A] = new UniqueIndex[A](cols.toList) -} - -/** - * Represents a generic user-specified index on the given columns. The user provides a function to generate the SQL needed to create - * the index based on the table and columns. Validation is required since this is raw SQL being run on the database server. - */ -final case class GenericIndex[A <: Mapper[A]](createFunc: (String,List[String]) => String, validated: IHaveValidatedThisSQL, columns: List[IndexItem[A]]) extends BaseIndex[A] // (indexColumns : _*) - -object GenericIndex { - def apply[A <: Mapper[A]](createFunc: (String,List[String]) => String, validated: IHaveValidatedThisSQL, cols: IndexItem[A] *): GenericIndex[A] = - new GenericIndex[A](createFunc, validated, cols.toList) -} - -abstract class IndexItem[A <: Mapper[A]] { - def field: BaseMappedField - def indexDesc: String -} - -case class IndexField[A <: Mapper[A], T](field: MappedField[T, A]) extends IndexItem[A] { - def indexDesc: String = MapperRules.quoteColumnName.vend(field._dbColumnNameLC) -} -case class BoundedIndexField[A <: Mapper[A]](field: MappedField[String, A], len: Int) extends IndexItem[A] { - def indexDesc: String = MapperRules.quoteColumnName.vend(field._dbColumnNameLC)+"("+len+")" -} - -sealed trait QueryParam[O<:Mapper[O]] -final case class Cmp[O<:Mapper[O], T](field: MappedField[T,O], opr: OprEnum.Value, value: Box[T], - otherField: Box[MappedField[T, O]], dbFunc: Box[String]) extends QueryParam[O] - -final case class OrderBy[O<:Mapper[O], T](field: MappedField[T,O], - order: AscOrDesc, - nullOrder: Box[NullOrder]) extends QueryParam[O] - -sealed trait NullOrder { - def getSql: String -} -case object NullsFirst extends NullOrder { - def getSql: String = " NULLS FIRST " -} -case object NullsLast extends NullOrder { - def getSql: String = " NULLS LAST " -} - -object OrderBy { - def apply[O <: Mapper[O], T](field: MappedField[T, O], - order: AscOrDesc): OrderBy[O, T] = - new OrderBy[O, T](field, order, Empty) - - def apply[O <: Mapper[O], T](field: MappedField[T, O], - order: AscOrDesc, - no: NullOrder): OrderBy[O, T] = - new OrderBy[O, T](field, order, Full(no)) -} - - -trait AscOrDesc { - def sql: String -} - -case object Ascending extends AscOrDesc { - def sql: String = " ASC " -} - -case object Descending extends AscOrDesc { - def sql: String = " DESC " -} - -final case class Distinct[O <: Mapper[O]]() extends QueryParam[O] - -final case class OrderBySql[O <: Mapper[O]](sql: String, - checkedBy: IHaveValidatedThisSQL) extends QueryParam[O] - -final case class ByList[O<:Mapper[O], T](field: MappedField[T,O], vals: Seq[T]) extends QueryParam[O] -/** - * Represents a query criterion using a parameterized SQL string. Parameters are - * substituted in order. For Date/Time types, passing a java.util.Date will result in a - * Timestamp parameter. If you want a specific SQL Date/Time type, use the corresponding - * java.sql.Date, java.sql.Time, or java.sql.Timestamp classes. - */ -final case class BySql[O<:Mapper[O]](query: String, - checkedBy: IHaveValidatedThisSQL, - params: Any*) extends QueryParam[O] -final case class MaxRows[O<:Mapper[O]](max: Long) extends QueryParam[O] -final case class StartAt[O<:Mapper[O]](start: Long) extends QueryParam[O] -final case class Ignore[O <: Mapper[O]]() extends QueryParam[O] - -sealed abstract class InThing[OuterType <: Mapper[OuterType]] extends QueryParam[OuterType] { - type JoinType - type InnerType <: Mapper[InnerType] - - def outerField: MappedField[JoinType, OuterType] - def innerField: MappedField[JoinType, InnerType] - def innerMeta: MetaMapper[InnerType] - def queryParams: List[QueryParam[InnerType]] - - def notIn: Boolean - - def inKeyword = if (notIn) " NOT IN " else " IN " - - def distinct: String = - queryParams.find {case Distinct() => true case _ => false}.isDefined match { - case false => "" - case true => " DISTINCT " - } -} - -/** - * This QueryParam can be put in a query and will cause the given foreign key field - * to be precached. - * @param field - the field to precache - * @param deterministic - true if the query is deterministic. Will be more efficient. - * false if the query is not deterministic. In this case, a SELECT * FROM FK_TABLE WHERE primary_key in (xxx) will - * be generated - */ -final case class PreCache[TheType <: Mapper[TheType], FieldType, OtherType <: KeyedMapper[FieldType, OtherType]](field: MappedForeignKey[FieldType, TheType, OtherType], deterministic: Boolean) -extends QueryParam[TheType] - -object PreCache { - def apply[TheType <: Mapper[TheType], FieldType, OtherType <: KeyedMapper[FieldType, OtherType]](field: MappedForeignKey[FieldType , TheType, OtherType]) = - new PreCache(field, true) -} - -final case class InRaw[TheType <: - Mapper[TheType], T](field: MappedField[T, TheType], - rawSql: String, - checkedBy: IHaveValidatedThisSQL) -extends QueryParam[TheType] - -object NotIn { - def fk[InnerMapper <: Mapper[InnerMapper], JoinTypeA, Zoom, OuterMapper <: KeyedMapper[JoinTypeA, OuterMapper]]( - fielda: MappedForeignKey[JoinTypeA, InnerMapper, OuterMapper], - qp: Zoom* - )(implicit ev: Zoom => QueryParam[InnerMapper]): InThing[OuterMapper] = { - new InThing[OuterMapper] { - type JoinType = JoinTypeA - type InnerType = InnerMapper - - val outerField: MappedField[JoinType, OuterMapper] = fielda.dbKeyToTable.primaryKeyField - val innerField: MappedField[JoinType, InnerMapper] = fielda - val innerMeta: MetaMapper[InnerMapper] = fielda.fieldOwner.getSingleton - - def notIn: Boolean = true - - val queryParams: List[QueryParam[InnerMapper]] = - qp.map{v => val r: QueryParam[InnerMapper] = v; r}.toList - } - } - - def apply[InnerMapper <: Mapper[InnerMapper], JoinTypeA, Zoom, OuterMapper <: Mapper[OuterMapper]]( - _outerField: MappedField[JoinTypeA, OuterMapper], - _innerField: MappedField[JoinTypeA, InnerMapper], - qp: Zoom* - )(implicit ev: Zoom => QueryParam[InnerMapper]): InThing[OuterMapper] = { - new InThing[OuterMapper] { - type JoinType = JoinTypeA - type InnerType = InnerMapper - - val outerField: MappedField[JoinType, OuterMapper] = _outerField - val innerField: MappedField[JoinType, InnerMapper] = _innerField - val innerMeta: MetaMapper[InnerMapper] = innerField.fieldOwner.getSingleton - - def notIn: Boolean = true - - val queryParams: List[QueryParam[InnerMapper]] = { - qp.map{v => val r: QueryParam[InnerMapper] = v; r}.toList - } - } - } -} - -object In { - def fk[InnerMapper <: Mapper[InnerMapper], JoinTypeA, Zoom, OuterMapper <: KeyedMapper[JoinTypeA, OuterMapper]]( - fielda: MappedForeignKey[JoinTypeA, InnerMapper, OuterMapper], - qp: Zoom* - )(implicit ev: Zoom => QueryParam[InnerMapper]): InThing[OuterMapper] = { - new InThing[OuterMapper] { - type JoinType = JoinTypeA - type InnerType = InnerMapper - - val outerField: MappedField[JoinType, OuterMapper] = fielda.dbKeyToTable.primaryKeyField - val innerField: MappedField[JoinType, InnerMapper] = fielda - val innerMeta: MetaMapper[InnerMapper] = fielda.fieldOwner.getSingleton - - def notIn: Boolean = false - - val queryParams: List[QueryParam[InnerMapper]] = - qp.map{v => val r: QueryParam[InnerMapper] = v; r}.toList - } - } - - def apply[InnerMapper <: Mapper[InnerMapper], JoinTypeA, Zoom, OuterMapper <: Mapper[OuterMapper]]( - _outerField: MappedField[JoinTypeA, OuterMapper], - _innerField: MappedField[JoinTypeA, InnerMapper], - qp: Zoom* - )(implicit ev: Zoom => QueryParam[InnerMapper]): InThing[OuterMapper] = { - new InThing[OuterMapper] { - type JoinType = JoinTypeA - type InnerType = InnerMapper - - val outerField: MappedField[JoinType, OuterMapper] = _outerField - val innerField: MappedField[JoinType, InnerMapper] = _innerField - val innerMeta: MetaMapper[InnerMapper] = innerField.fieldOwner.getSingleton - - def notIn: Boolean = false - - val queryParams: List[QueryParam[InnerMapper]] = { - qp.map{v => val r: QueryParam[InnerMapper] = v; r}.toList - } - } - } -} - -object Like { - def apply[O <: Mapper[O]](field: MappedField[String, O], value: String) = - Cmp[O, String](field, OprEnum.Like, Full(value), Empty, Empty) -} - -object NotLike { - def apply[O <: Mapper[O]](field: MappedField[String, O], value: String) = - Cmp[O, String](field, OprEnum.NotLike, Full(value), Empty, Empty) -} - -object By { - import OprEnum._ - - def apply[O <: Mapper[O], T, U](field: MappedField[T, O], value: U)(implicit ev: U => T) = Cmp[O,T](field, Eql, Full(value), Empty, Empty) - def apply[O <: Mapper[O], T](field: MappedNullableField[T, O], value: Box[T]) = value match { - case Full(x) => Cmp[O,Box[T]](field, Eql, Full(value), Empty, Empty) - case _ => NullRef(field) - } - def apply[O <: Mapper[O],T, Q <: KeyedMapper[T, Q]](field: MappedForeignKey[T, O, Q], value: Q) = - Cmp[O,T](field, Eql, Full(value.primaryKeyField.get), Empty, Empty) - - def apply[O <: Mapper[O],T, Q <: KeyedMapper[T, Q]](field: MappedForeignKey[T, O, Q], value: Box[Q]) = - value match { - case Full(v) => Cmp[O,T](field, Eql, Full(v.primaryKeyField.get), Empty, Empty) - case _ => Cmp(field, IsNull, Empty, Empty, Empty) - } -} - -object By_>= { - - import OprEnum._ - - def apply[O <: Mapper[O], T, U](field: MappedField[T, O], - value: U)(implicit ev: U => T) = Cmp[O, T](field, >=, Full(value), Empty, Empty) - - def apply[O <: Mapper[O], T](field: MappedField[T, O], otherField: - MappedField[T, O]) = Cmp[O, T](field, >=, Empty, Full(otherField), - Empty) -} - -object By_<= { - - import OprEnum._ - - def apply[O <: Mapper[O], T, U](field: MappedField[T, O], - value: U)(implicit ev: U => T) = Cmp[O, T](field, <=, Full(value), Empty, Empty) - - def apply[O <: Mapper[O], T](field: MappedField[T, O], otherField: - MappedField[T, O]) = Cmp[O, T](field, <=, Empty, Full(otherField), - Empty) -} - -object NotBy { - import OprEnum._ - - def apply[O <: Mapper[O], T, U](field: MappedField[T, O], value: U)(implicit ev: U => T) = Cmp[O,T](field, <>, Full(value), Empty, Empty) - - def apply[O <: Mapper[O], T](field: MappedNullableField[T, O], value: Box[T]) = value match { - case Full(x) => Cmp[O,Box[T]](field, <>, Full(value), Empty, Empty) - case _ => NotNullRef(field) - } - - def apply[O <: Mapper[O],T, Q <: KeyedMapper[T, Q]](field: MappedForeignKey[T, O, Q], value: Q) = - Cmp[O,T](field, <>, Full(value.primaryKeyField.get), Empty, Empty) - def apply[O <: Mapper[O],T, Q <: KeyedMapper[T, Q]](field: MappedForeignKey[T, O, Q], value: Box[Q]) = - value match { - case Full(v) => Cmp[O,T](field, <>, Full(v.primaryKeyField.get), Empty, Empty) - case _ => Cmp(field, IsNotNull, Empty, Empty, Empty) - } -} - -object ByRef { - import OprEnum._ - - def apply[O <: Mapper[O], T](field: MappedField[T, O], otherField: MappedField[T,O]) = Cmp[O,T](field, Eql, Empty, Full(otherField), Empty) -} - -object NotByRef { - import OprEnum._ - - def apply[O <: Mapper[O], T](field: MappedField[T, O], otherField: MappedField[T,O]) = Cmp[O,T](field, <>, Empty, Full(otherField), Empty) -} - -object By_> { - import OprEnum._ - - def apply[O <: Mapper[O], T, U](field: MappedField[T, O], value: U)(implicit ev: U => T) = Cmp[O,T](field, >, Full(value), Empty, Empty) - def apply[O <: Mapper[O], T](field: MappedField[T, O], otherField: MappedField[T,O]) = Cmp[O,T](field, >, Empty, Full(otherField), Empty) -} - -object By_< { - import OprEnum._ - - def apply[O <: Mapper[O], T, U](field: MappedField[T, O], value: U)(implicit ev: U => T) = Cmp[O,T](field, <, Full(value), Empty, Empty) - def apply[O <: Mapper[O], T](field: MappedField[T, O], otherField: MappedField[T,O]) = Cmp[O,T](field, <, Empty, Full(otherField), Empty) -} - -object NullRef { - import OprEnum._ - def apply[O <: Mapper[O], T](field: MappedField[T, O]) = Cmp(field, IsNull, Empty, Empty, Empty) -} - -object NotNullRef { - import OprEnum._ - def apply[O <: Mapper[O], T](field: MappedField[T, O]) = Cmp(field, IsNotNull, Empty, Empty, Empty) -} - -trait LongKeyedMetaMapper[A <: LongKeyedMapper[A]] extends KeyedMetaMapper[Long, A] { self: A => } - - -trait KeyedMetaMapper[Type, A<:KeyedMapper[Type, A]] extends MetaMapper[A] with KeyedMapper[Type, A] { - self: A with MetaMapper[A] with KeyedMapper[Type, A] => - - private def testProdArity(prod: Product): Boolean = { - var pos = 0 - while (pos < prod.productArity) { - if (!prod.productElement(pos).isInstanceOf[QueryParam[A]]) return false - pos = pos + 1 - } - true - } - - type Q = MappedForeignKey[AnyBound, A, OO] with MappedField[AnyBound, A] forSome - {type OO <: KeyedMapper[AnyBound, OO]} - - def asSafeJs(actual: A, f: KeyObfuscator): JsExp = { - val pk = actual.primaryKeyField - val first = (pk.name, JE.Str(f.obscure(self, pk.get))) - JE.JsObj( - first :: - ("$lift_class", JE.Str(dbTableName)) :: - mappedFieldList - .map(f => this.??(f.method, actual)) - .filter(f => !f.dbPrimaryKey_? && f.renderJs_?) - .flatMap{ - case fk0: MappedForeignKey[_, _, _] with MappedField[_, _] => - val fk = fk0.asInstanceOf[Q] - val key = f.obscure(fk.dbKeyToTable, fk.get) - List( - (fk.name, JE.Str(key)), - (fk.name+"_obj", JE.AnonFunc("index", JE.JsRaw("return index["+key.encJs+"];").cmd)) - ) - case x => x.asJs - } - .toList ::: - actual.suplementalJs(Full(f)) : _* - ) - } - - private def convertToQPList(prod: Product): Array[QueryParam[A]] = { - var pos = 0 - val ret = new Array[QueryParam[A]](prod.productArity) - while (pos < prod.productArity) { - ret(pos) = prod.productElement(pos).asInstanceOf[QueryParam[A]] - pos = pos + 1 - } - ret - } - - private def anyToFindString(in: Any): Box[String] = - in match { - case Empty | None | null | Failure(_, _, _) => Empty - case Full(n) => anyToFindString(n) - case Some(n) => anyToFindString(n) - case v => Full(v.toString) - } - - private object unapplyMemo extends RequestMemoize[Any, Box[A]] { - override protected def __nameSalt = Helpers.randomString(20) - } - - def unapply(key: Any): Option[A] = { - if (S.inStatefulScope_?) unapplyMemo(key, this.find(key)) - else this.find(key) - } - - def find(key: Any): Box[A] = - key match { - case qp: QueryParam[A] => find(qp) - case prod: Product if (testProdArity(prod)) => find(convertToQPList(prod).toIndexedSeq :_*) - case key => anyToFindString(key) flatMap (find(_)) - } - - def findDb(dbId: ConnectionIdentifier, key: Any): Box[A] = - key match { - case qp: QueryParam[A] => findDb(dbId, List(qp.asInstanceOf[QueryParam[A]]) :_*) - case prod: Product if (testProdArity(prod)) => findDb(dbId, convertToQPList(prod).toIndexedSeq :_*) - case key => anyToFindString(key) flatMap (find(dbId, _)) - } - - /** - * Find the element based on the first element of the List - */ - def find(key: List[String]): Box[A] = key match { - case Nil => Empty - case x :: _ => find(x) - } - - /** - * Find an element by primary key or create a new one - */ - def findOrCreate(key: Any): A = find(key) openOr create - - /** - * Find an element by primary key or create a new one - */ - def findOrCreate(key: List[String]): A = find(key) openOr create - - def find(key: String): Box[A] = dbStringToKey(key) flatMap (realKey => findDbByKey(selectDbForKey(realKey), realKey)) - - def find(dbId: ConnectionIdentifier, key: String): Box[A] = dbStringToKey(key) flatMap (realKey => findDbByKey(dbId, realKey)) - - def findByKey(key: Type): Box[A] = findDbByKey(selectDbForKey(key), key) - - def dbStringToKey(in: String): Box[Type] = primaryKeyField.convertKey(in) - - private def selectDbForKey(key: Type): ConnectionIdentifier = - if (dbSelectDBConnectionForFind.isDefinedAt(key)) dbSelectDBConnectionForFind(key) - else dbDefaultConnectionIdentifier - - def dbSelectDBConnectionForFind: PartialFunction[Type, ConnectionIdentifier] = Map.empty - - def findDbByKey(dbId: ConnectionIdentifier, key: Type): Box[A] = - findDbByKey(dbId, mappedFields, key) - - def findDbByKey(dbId: ConnectionIdentifier, fields: Seq[SelectableField], - key: Type): Box[A] = - DB.use(dbId) { conn => - val field = primaryKeyField - - DB.prepareStatement("SELECT "+ - fields.map(_.dbSelectString). - mkString(", ")+ - " FROM "+MapperRules.quoteTableName.vend(_dbTableNameLC)+" WHERE "+MapperRules.quoteColumnName.vend(field._dbColumnNameLC)+" = ?", conn) { - st => - if (field.dbIgnoreSQLType_?) - st.setObject(1, field.makeKeyJDBCFriendly(key)) - else - st.setObject(1, field.makeKeyJDBCFriendly(key), - conn.driverType. - columnTypeMap(field. - targetSQLType(field._dbColumnNameLC))) - DB.exec(st) { - rs => - val mi = buildMapper(rs) - if (rs.next) Full(createInstance(dbId, rs, mi)) - else Empty - } - } - } - - def find(by: QueryParam[A]): Box[A] = find(Seq(by): _*) - - def find(by: QueryParam[A]*): Box[A] = - findDb(dbDefaultConnectionIdentifier, by :_*) - - def findDb(dbId: ConnectionIdentifier, by: QueryParam[A]*): Box[A] = - findDb(dbId, mappedFields, by :_*) - - def findDb(dbId: ConnectionIdentifier, fields: Seq[SelectableField], - by: QueryParam[A]*): Box[A] = { - DB.use(dbId) { - conn => - - val (query, start, max, bl) = buildSelectString(fields, conn, by :_*) - DB.prepareStatement(query, conn) { - st => - setStatementFields(st, bl, 1, conn) - DB.exec(st) { - rs => - val mi = buildMapper(rs) - if (rs.next) Full(createInstance(dbId, rs, mi)) - else Empty - } - - } - } - } - - override def afterSchemifier: Unit = { - if (crudSnippets_?) { - LiftRules.snippets.append(crudSnippets) - } - } - - /** - * Override this definition in your model to enable CRUD snippets - * for that model. Set to false by default. - * - * Remember to override editSnippetSetup and viewSnippetSetup as well, - * as the defaults are broken. - * - * @return false - */ - def crudSnippets_? = false - - /** - * Defines the default CRUD snippets. Override if you want to change - * the names of the snippets. Defaults are "add", "edit", and "view". - * - * (No, there's no D in CRUD.) - */ - def crudSnippets: LiftRules.SnippetPF = { - val Name = internal_dbTableName - - NamedPF("crud "+Name) { - case Name :: "addForm" :: _ => addFormSnippet - case Name :: "editForm" :: _ => editFormSnippet - case Name :: "viewTransform" :: _ => viewTransform - } - } - - /** - * Provides basic transformation of html to a form for the - * given obj. When the form is submitted, cleanup - * is run. - */ - def formSnippet(html: NodeSeq, obj: A, cleanup: (A => Unit)): NodeSeq = { - val name = internal_dbTableName - - def callback(): Unit = { - cleanup(obj) - } - - val submitTransform: (NodeSeq)=>NodeSeq = - "type=submit" #> SHtml.onSubmitUnit(callback _) - - val otherTransforms = - obj.fieldMapperTransforms(_.toForm openOr Text("")).reverse ++ - obj.fieldTransforms.reverse - - otherTransforms.foldRight(submitTransform)(_ andThen _) apply html - } - - /** - * Base add form snippet. Fetches object from - * addSnippetSetup and invokes - * addSnippetCallback when the form is submitted. - */ - def addFormSnippet(html: NodeSeq): NodeSeq = { - formSnippet(html, addSnippetSetup, addSnippetCallback _) - } - - /** - * Base edit form snippet. Fetches object from - * editSnippetSetup and invokes - * editSnippetCallback when the form is submitted. - */ - def editFormSnippet(html: NodeSeq): NodeSeq = { - formSnippet(html, editSnippetSetup, editSnippetCallback _) - } - - /** - * Basic transformation of html to HTML for displaying - * the object from viewSnippetSetup. - */ - def viewTransform(html: NodeSeq): NodeSeq = { - val name = internal_dbTableName - val obj: A = viewSnippetSetup - - val otherTransforms = - obj.fieldMapperTransforms(_.asHtml).reverse ++ - obj.fieldTransforms.reverse - - otherTransforms.foldRight(PassThru: (NodeSeq)=>NodeSeq)(_ andThen _) apply html - } - - /** - * Lame attempt at automatically getting an object from the HTTP parameters. - * BROKEN! DO NOT USE! Only here so that existing sub-classes KeyedMetaMapper - * don't have to implement new methods when I commit the CRUD snippets code. - */ - def objFromIndexedParam: Box[A] = { - val found = for ( - req <- S.request.toList; - (param, value :: _) <- req.params; - fh <- mappedFieldList if fh.field.dbIndexed_? == true && fh.name.equals(param) - ) yield find(value) - - found.filter(obj => obj match { - case Full(obj) => true - case _ => false - }) match { - case obj :: _ => obj - case _ => Empty - } - } - - /** - * Default setup behavior for the add snippet. Creates a new mapped object. - * - * @return new mapped object - */ - def addSnippetSetup: A = { - this.create - } - - /** - * Default setup behavior for the edit snippet. BROKEN! MUST OVERRIDE IF - * USING CRUD SNIPPETS! - * - * @return a mapped object of this metamapper's type - */ - def editSnippetSetup: A = { - objFromIndexedParam.openOrThrowException("Comment says this is broken") - } - /** - * Default setup behavior for the view snippet. BROKEN! MUST OVERRIDE IF - * USING CRUD SNIPPETS! - * - * @return a mapped object of this metamapper's type - */ - def viewSnippetSetup: A = { - objFromIndexedParam.openOrThrowException("Comment says this is broken") - } - /** - * Default callback behavior of the edit snippet. Called when the user - * presses submit. Saves the passed in object. - * - * @param obj mapped object of this metamapper's type - */ - def editSnippetCallback(obj: A): Unit = { obj.save } - /** - * Default callback behavior of the add snippet. Called when the user - * presses submit. Saves the passed in object. - * - * @param obj mapped object of this metamapper's type - */ - def addSnippetCallback(obj: A): Unit = { obj.save } -} - - -class KeyObfuscator { - private var to: Map[String, Map[Any, String]] = Map.empty - private var from: Map[String, Map[String, Any]] = Map.empty - - def obscure[KeyType, MetaType <: KeyedMapper[KeyType, MetaType]](theType: - KeyedMetaMapper[KeyType, MetaType], key: KeyType): String = synchronized { - val local: Map[Any, String] = to.getOrElse(theType._dbTableNameLC, Map.empty) - local.get(key) match { - case Some(s) => s - case _ => val ret = "r"+randomString(15) - - val l2: Map[Any, String] = local + ( (key -> ret) ) - to = to + ( (theType._dbTableNameLC -> l2) ) - - val lf: Map[String, Any] = from.getOrElse(theType._dbTableNameLC, Map.empty) + ( (ret -> key)) - // lf(ret) = key - from = from + ( (theType._dbTableNameLC -> lf) ) - - ret - } - } - - def obscure[KeyType, MetaType <: KeyedMapper[KeyType, MetaType]](what: KeyedMapper[KeyType, MetaType]): String = - { - obscure(what.getSingleton, what.primaryKeyField.get) - } - - def apply[KeyType, MetaType <: KeyedMapper[KeyType, MetaType], Q](theType: - KeyedMetaMapper[KeyType, MetaType], key: Q)(implicit ev: Q => KeyType): String = { - val k: KeyType = key - obscure(theType, k) - } - - def apply[KeyType, MetaType <: KeyedMapper[KeyType, MetaType]](what: KeyedMapper[KeyType, MetaType]): String = - { - obscure(what) - } - - - def recover[KeyType, MetaType <: KeyedMapper[KeyType, MetaType]](theType: - KeyedMetaMapper[KeyType, MetaType], id: String): Box[KeyType] = synchronized { - for { - map <- from.get(theType._dbTableNameLC) - item <- map.get(id) - } yield item.asInstanceOf[KeyType] - } -} - -case class IHaveValidatedThisSQL(who: String, date: String) - -trait SelectableField { - def dbSelectString: String -} - -class MapperException(msg: String) extends Exception(msg) - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoExtendedSession.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoExtendedSession.scala deleted file mode 100644 index a28bfc7ba1..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoExtendedSession.scala +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import http.provider._ -import common._ -import util._ -import http._ -import Helpers._ - - -trait ProtoExtendedSession[T <: ProtoExtendedSession[T]] extends KeyedMapper[Long, T] { - self: T => - - override def primaryKeyField: MappedLongIndex[T] = id - - // the primary key for the database - object id extends MappedLongIndex(this) - - // uniqueId - object cookieId extends MappedUniqueId(this, 32) { - override def dbIndexed_? = true - } - - object userId extends MappedString(this, 64) - - object expiration extends MappedLong(this) { - override def defaultValue = expirationTime - override def dbColumnName = expirationColumnName - } - - /** - * Change this string to "experation" for compatibility with - * old mis-spelling - */ - protected def expirationColumnName = "expiration" - - def expirationTime: Long = millis + 180.days -} - -trait UserIdAsString { - def userIdAsString: String -} - -/** - * The root trait for defining the session cookie path for extended sessions - * that defines the default session cookie path: "/". - */ -trait ProtoSessionCookiePath { - def sessionCookiePath: String = "/" -} - -trait MetaProtoExtendedSession[T <: ProtoExtendedSession[T]] extends -KeyedMetaMapper[Long, T] with ProtoSessionCookiePath { - self: T => - - def CookieName = "ext_id" - type UserType <: UserIdAsString - - /* - private object myWrapper extends LoanWrapper { - def apply[N](f: => N): N = { - (recoverUserId, S.findCookie(CookieName)) match { - case (Empty, Full(c)) => - find(By(cookieId, c.value openOr "")) match { - case Full(es) if es.expiration.is < millis => es.delete_! - case Full(es) => logUserIdIn(es.userId) - case _ => - } - - case _ => - } - f - } - }*/ - - def logUserIdIn(uid: String): Unit - - def recoverUserId: Box[String] - - def userDidLogin(uid: UserType): Unit = { - userDidLogout(Full(uid)) - val inst = create.userId(uid.userIdAsString).saveMe() - val cookie = HTTPCookie(CookieName, inst.cookieId.get). - setMaxAge(((inst.expiration.get - millis) / 1000L).toInt). - setPath(sessionCookiePath) - S.addCookie(cookie) - } - - def userDidLogout(uid: Box[UserType]): Unit = { - for (cook <- S.findCookie(CookieName)) { - S.deleteCookie(cook) - find(By(cookieId, cook.value openOr "")).foreach(_.delete_!) - } - } - - // def requestLoans: List[LoanWrapper] = myWrapper :: Nil - - /** - * This does the cookie to User lookup. In Boot.scala: - * - LiftRules.earlyInStateful.append(ExtendedSession.testCookieEarlyInStateful) - * - */ - def testCookieEarlyInStateful: Box[Req] => Unit = { - ignoredReq => { - (recoverUserId, S.findCookie(CookieName)) match { - case (Empty, Full(c)) => - find(By(cookieId, c.value openOr "")) match { - case Full(es) if es.expiration.get < millis => es.delete_! - case Full(es) => logUserIdIn(es.userId.get) - case _ => - } - - case _ => - } - } - } -} - -/** - * Mix this in to your extended session singleton to set the cookie path - * to the context path for your application. This is useful if you have - * multiple applications on a single application server and want to ensure - * their cookies don't cross-pollinate. - * - * Example usage: - * - * {{{ - * case class AppExtendedSession extends ProtoExtendedSession[AppExtendedSession] - * object MetaAppExtendedSession extends MetaProtoExtendedSession[AppExtendedSession] - * with ContextPathExtendedCookie - * }}} - */ -trait ContextPathExtendedCookie extends ProtoSessionCookiePath { - override def sessionCookiePath = S.contextPath -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoTag.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoTag.scala deleted file mode 100644 index e36e8eaebf..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoTag.scala +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import net.liftweb.util._ -import net.liftweb.common._ -import Helpers._ - -trait MetaProtoTag[ModelType <: ProtoTag[ModelType]] extends KeyedMetaMapper[Long, ModelType] { - self: ModelType => - override def dbTableName: String // = "tags" - def cacheSize: Int - - private val idCache = new LRU[Long, ModelType](cacheSize) - private val tagCache = new LRU[String, ModelType](cacheSize) - - def findOrCreate(ntag: String): ModelType = synchronized { - val tag = capify(ntag) - if (tagCache.contains(tag)) tagCache(tag) - else { - find(By(name, tag)) match { - case Full(t) => tagCache(tag) = t; t - case _ => val ret: ModelType = createInstance.name(tag).saveMe - tagCache(tag) = ret - ret - } - } - } - - override def findDbByKey(dbId: ConnectionIdentifier, key: Long): Box[ModelType] = synchronized { - if (idCache.contains(key)) Full(idCache(key)) - else { - val ret = super.findDbByKey(dbId,key) - ret.foreach(v => idCache(key) = v) - ret - } - } - - /** - * Split the String into tags - */ - def split(in: String): List[String] = in.roboSplit(",").map(capify) - - /** - * Split the String into tags and find all the tags - */ - def splitAndFind(in: String): List[ModelType] = split(in).map(findOrCreate) - - def capify: String => String = Helpers.capify _ -} - -abstract class ProtoTag[MyType <: ProtoTag[MyType]] extends KeyedMapper[Long, MyType] with Ordered[MyType] { - self: MyType => - - def getSingleton: MetaProtoTag[MyType] - - // the primary key for the database - object id extends MappedLongIndex(this) - - def primaryKeyField: MappedLongIndex[MyType] = id - - object name extends MappedPoliteString(this, 256) { - override def setFilter = getSingleton.capify :: super.setFilter - override def dbIndexed_? = true - } - - def compare(other: MyType): Int = name.get.compare(other.name.get) -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoUser.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoUser.scala deleted file mode 100644 index d181721212..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoUser.scala +++ /dev/null @@ -1,426 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import http._ -import util._ -import common._ -import proto.{ProtoUser => GenProtoUser} - -import scala.xml.{NodeSeq, Text} - -/** - * ProtoUser is a base class that gives you a "User" that has a first name, - * last name, email, etc. - */ -trait ProtoUser[T <: ProtoUser[T]] extends KeyedMapper[Long, T] with UserIdAsString { - self: T => - - override def primaryKeyField: MappedLongIndex[T] = id - - /** - * The primary key field for the User. You can override the behavior - * of this field: - *
-   * override lazy val id = new MyMappedLongClass(this) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val id: MappedLongIndex[T] = new MyMappedLongClass(this) - - protected class MyMappedLongClass(obj: T) extends MappedLongIndex(obj) - - /** - * Convert the id to a String - */ - def userIdAsString: String = id.get.toString - - /** - * The first name field for the User. You can override the behavior - * of this field: - *
-   * override lazy val firstName = new MyFirstName(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - - lazy val firstName: MappedString[T] = new MyFirstName(this, 32) - - protected class MyFirstName(obj: T, size: Int) extends MappedString(obj, size) { - override def displayName = fieldOwner.firstNameDisplayName - override val fieldId = Some(Text("txtFirstName")) - } - - /** - * The string name for the first name field - */ - def firstNameDisplayName = S.?("first.name") - - /** - * The last field for the User. You can override the behavior - * of this field: - *
-   * override lazy val lastName = new MyLastName(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val lastName: MappedString[T] = new MyLastName(this, 32) - - protected class MyLastName(obj: T, size: Int) extends MappedString(obj, size) { - override def displayName = fieldOwner.lastNameDisplayName - override val fieldId = Some(Text("txtLastName")) - } - - /** - * The last name string - */ - def lastNameDisplayName = S.?("last.name") - - /** - * The email field for the User. You can override the behavior - * of this field: - *
-   * override lazy val email = new MyEmail(this, 48) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val email: MappedEmail[T] = new MyEmail(this, 48) - - protected class MyEmail(obj: T, size: Int) extends MappedEmail(obj, size) { - override def dbIndexed_? = true - override def validations = valUnique(S.?("unique.email.address")) _ :: super.validations - override def displayName = fieldOwner.emailDisplayName - override val fieldId = Some(Text("txtEmail")) - } - - /** - * The email first name - */ - def emailDisplayName = S.?("email.address") - - /** - * The password field for the User. You can override the behavior - * of this field: - *
-   * override lazy val password = new MyPassword(this) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val password: MappedPassword[T] = new MyPassword(this) - - protected class MyPassword(obj: T) extends MappedPassword(obj) { - override def displayName = fieldOwner.passwordDisplayName - } - - /** - * The display name for the password field - */ - def passwordDisplayName = S.?("password") - - /** - * The superuser field for the User. You can override the behavior - * of this field: - *
-   * override lazy val superUser = new MySuperUser(this) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val superUser: MappedBoolean[T] = new MySuperUser(this) - - protected class MySuperUser(obj: T) extends MappedBoolean(obj) { - override def defaultValue = false - } - - def niceName: String = (firstName.get, lastName.get, email.get) match { - case (f, l, e) if f.length > 1 && l.length > 1 => f+" "+l+" ("+e+")" - case (f, _, e) if f.length > 1 => f+" ("+e+")" - case (_, l, e) if l.length > 1 => l+" ("+e+")" - case (_, _, e) => e - } - - def shortName: String = (firstName.get, lastName.get) match { - case (f, l) if f.length > 1 && l.length > 1 => f+" "+l - case (f, _) if f.length > 1 => f - case (_, l) if l.length > 1 => l - case _ => email.get - } - - def niceNameWEmailLink = {niceName} -} - -/** - * Mix this trait into the the Mapper singleton for User and you - * get a bunch of user functionality including password reset, etc. - */ -trait MetaMegaProtoUser[ModelType <: MegaProtoUser[ModelType]] extends KeyedMetaMapper[Long, ModelType] with GenProtoUser { - self: ModelType => - - type TheUserType = ModelType - - /** - * What's a field pointer for the underlying CRUDify - */ - type FieldPointerType = MappedField[_, TheUserType] - - /** - * Based on a FieldPointer, build a FieldPointerBridge - */ - protected implicit def buildFieldBridge(from: FieldPointerType): FieldPointerBridge = new MyPointer(from) - - - protected class MyPointer(from: FieldPointerType) extends FieldPointerBridge { - /** - * What is the display name of this field? - */ - def displayHtml: NodeSeq = from.displayHtml - - /** - * Does this represent a pointer to a Password field - */ - def isPasswordField_? : Boolean = from match { - case a: MappedPassword[_] => true - case _ => false - } - } - - /** - * Convert an instance of TheUserType to the Bridge trait - */ - protected implicit def typeToBridge(in: TheUserType): UserBridge = - new MyUserBridge(in) - - /** - * Bridges from TheUserType to methods used in this class - */ - protected class MyUserBridge(in: TheUserType) extends UserBridge { - /** - * Convert the user's primary key to a String - */ - def userIdAsString: String = in.id.toString - - /** - * Return the user's first name - */ - def getFirstName: String = in.firstName.get - - /** - * Return the user's last name - */ - def getLastName: String = in.lastName.get - - /** - * Get the user's email - */ - def getEmail: String = in.email.get - - /** - * Is the user a superuser - */ - def superUser_? : Boolean = in.superUser.get - - /** - * Has the user been validated? - */ - def validated_? : Boolean = in.validated.get - - /** - * Does the supplied password match the actual password? - */ - def testPassword(toTest: Box[String]): Boolean = - toTest.map(in.password.match_?) openOr false - - /** - * Set the validation flag on the user and return the user - */ - def setValidated(validation: Boolean): TheUserType = - in.validated(validation) - - /** - * Set the unique ID for this user to a new value - */ - def resetUniqueId(): TheUserType = { - in.uniqueId.reset() - } - - /** - * Return the unique ID for the user - */ - def getUniqueId(): String = in.uniqueId.get - - /** - * Validate the user - */ - def validate: List[FieldError] = in.validate - - /** - * Given a list of string, set the password - */ - def setPasswordFromListString(pwd: List[String]): TheUserType = { - in.password.setList(pwd) - in - } - - /** - * Save the user to backing store - */ - def save(): Boolean = in.save - } - - /** - * Given a field pointer and an instance, get the field on that instance - */ - protected def computeFieldFromPointer(instance: TheUserType, pointer: FieldPointerType): Box[BaseField] = Full(getActualField(instance, pointer)) - - - /** - * Given an username (probably email address), find the user - */ - protected def findUserByUserName(email: String): Box[TheUserType] = - find(By(this.email, email)) - - /** - * Given a unique id, find the user - */ - protected def findUserByUniqueId(id: String): Box[TheUserType] = - find(By(uniqueId, id)) - - /** - * Create a new instance of the User - */ - protected def createNewUserInstance(): TheUserType = self.create - - /** - * Given a String representing the User ID, find the user - */ - protected def userFromStringId(id: String): Box[TheUserType] = find(id) - - /** - * The list of fields presented to the user at sign-up - */ - def signupFields: List[FieldPointerType] = List(firstName, - lastName, - email, - locale, - timezone, - password) - - /** - * The list of fields presented to the user for editing - */ - def editFields: List[FieldPointerType] = List(firstName, - lastName, - email, - locale, - timezone) - -} - -/** - * ProtoUser is bare bones. MetaProtoUser contains a bunch - * more fields including a validated flag, locale, timezone, etc. - */ -trait MegaProtoUser[T <: MegaProtoUser[T]] extends ProtoUser[T] { - self: T => - - /** - * The unique id field for the User. This field - * is used for validation, lost passwords, etc. - * You can override the behavior - * of this field: - *
-   * override lazy val uniqueId = new MyUniqueId(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val uniqueId: MappedUniqueId[T] = new MyUniqueId(this, 32) - - protected class MyUniqueId(obj: T, size: Int) extends MappedUniqueId(obj, size) { - override def dbIndexed_? = true - override def writePermission_? = true - } - - /** - * The has the user been validated. - * You can override the behavior - * of this field: - *
-   * override lazy val validated = new MyValidated(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val validated: MappedBoolean[T] = new MyValidated(this) - - protected class MyValidated(obj: T) extends MappedBoolean[T](obj) { - override def defaultValue = false - override val fieldId = Some(Text("txtValidated")) - } - - /** - * The locale field for the User. - * You can override the behavior - * of this field: - *
-   * override lazy val locale = new MyLocale(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val locale = new MyLocale(this) - - protected class MyLocale(obj: T) extends MappedLocale[T](obj) { - override def displayName = fieldOwner.localeDisplayName - override val fieldId = Some(Text("txtLocale")) - } - - /** - * The time zone field for the User. - * You can override the behavior - * of this field: - *
-   * override lazy val timezone = new MyTimeZone(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val timezone = new MyTimeZone(this) - - protected class MyTimeZone(obj: T) extends MappedTimeZone[T](obj) { - override def displayName = fieldOwner.timezoneDisplayName - override val fieldId = Some(Text("txtTimeZone")) - } - - /** - * The string for the timezone field - */ - def timezoneDisplayName = S.?("time.zone") - - /** - * The string for the locale field - */ - def localeDisplayName = S.?("locale") - -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/Schemifier.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/Schemifier.scala deleted file mode 100644 index 330f537232..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/Schemifier.scala +++ /dev/null @@ -1,387 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.sql.{Connection, DatabaseMetaData, ResultSet} - -import scala.collection.mutable.{HashMap, ListBuffer} -import common.{Box, Full, Loggable} -import util.Helpers -import Helpers._ - -import scala.annotation.tailrec - -/** - * Given a list of MetaMappers, make sure the database has the right schema - *
    - *
  • Make sure all the tables exists
  • - *
  • Make sure the columns in the tables are correct
  • - *
  • Create the indexes
  • - *
  • Create the foreign keys
  • - *
- */ -object Schemifier extends Loggable { - implicit def superToRegConnection(sc: SuperConnection): Connection = sc.connection - - /** - * Convenience function to be passed to schemify. Will log executed statements at the info level - * using Schemifier's logger - * - */ - def infoF(msg: => AnyRef) = logger.info(msg) - - /** - * Convenience function to be passed to schemify. Will not log any executed statements - */ - def neverF(msg: => AnyRef) = {} - - - def schemify(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, stables: BaseMetaMapper*): List[String] = - schemify(performWrite, logFunc, DefaultConnectionIdentifier, stables :_*) - - def schemify(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, dbId: ConnectionIdentifier, stables: BaseMetaMapper*): List[String] = - schemify(performWrite, false, logFunc, dbId, stables :_*) - - def schemify(performWrite: Boolean, structureOnly: Boolean, logFunc: (=> AnyRef) => Unit, stables: BaseMetaMapper*): List[String] = - schemify(performWrite, structureOnly, logFunc, DefaultConnectionIdentifier, stables :_*) - - private case class Collector(funcs: List[() => Any], cmds: List[String]) { - def +(other: Collector) = Collector(funcs ::: other.funcs, cmds ::: other.cmds) - } - - private val EmptyCollector = new Collector(Nil, Nil) - - private def using[RetType <: Any, VarType <: ResultSet](f: => VarType)(f2: VarType => RetType): RetType = { - val theVar = f - try { - f2(theVar) - } finally { - theVar.close() - } - } - - /** - * Modify database specified in dbId so it matches the structure specified in the MetaMappers - * - * @param performWrite if false, will not write any changes to the database, only collect them - * @param structureOnly if true, will only check tables and columns, not indexes and constraints. - * Useful if schema is maintained outside Lift, but still needs structure to be in sync - * @param logFunc A function that will be called for each statement being executed if performWrite == true - * @param dbId The ConnectionIdentifier to be used - * @param stables The MetaMapper instances to check - * - * @return The list of statements needed to bring the database in a consistent state. This list is created even if performWrite=false - */ - def schemify(performWrite: Boolean, structureOnly: Boolean, logFunc: (=> AnyRef) => Unit, dbId: ConnectionIdentifier, stables: BaseMetaMapper*): List[String] = { - val tables = stables.toList - DB.use(dbId) { con => - // Some databases (Sybase) don't like doing transactional DDL, so we disable transactions - if (con.driverType.schemifierMustAutoCommit_? && !con.connection.getAutoCommit()) { - con.connection.commit - con.connection.setAutoCommit(true) - } - logger.debug("Starting schemify. write=%s, structureOnly=%s, dbId=%s, schema=%s, tables=%s".format(performWrite, structureOnly, dbId, getDefaultSchemaName(con), tables.map(_.dbTableName))) - - val connection = con // SuperConnection(con) - val driver = DriverType.calcDriver(connection) - val actualTableNames = new HashMap[String, String] - if (performWrite) { - tables.foreach{t => - logger.debug("Running beforeSchemifier on table %s".format(t.dbTableName)) - t.beforeSchemifier - } - } - - def tableCheck(t: BaseMetaMapper, desc: String, f: => Collector): Collector = { - actualTableNames.get(t._dbTableNameLC).map(x => f).getOrElse{ - logger.warn("Skipping %s on table '%s' since it doesn't exist".format(desc, t.dbTableName)) - EmptyCollector - } - } - - val toRun = - tables.foldLeft(EmptyCollector)((b, t) => b + ensureTable(performWrite, logFunc, t, connection, actualTableNames)) + - tables.foldLeft(EmptyCollector)((b, t) => b + tableCheck(t, "ensureColumns", ensureColumns(performWrite, logFunc, t, connection, actualTableNames))) + - (if (structureOnly) - EmptyCollector - else - (tables.foldLeft(EmptyCollector)((b, t) => b + tableCheck(t, "ensureIndexes", ensureIndexes(performWrite, logFunc, t, connection, actualTableNames))) + - tables.foldLeft(EmptyCollector)((b, t) => b + tableCheck(t, "ensureConstraints", ensureConstraints(performWrite, logFunc, t, dbId, connection, actualTableNames))))) - - if (performWrite) { - logger.debug("Executing DDL statements") - toRun.funcs.foreach(f => f()) - tables.foreach{t => - logger.debug("Running afterSchemifier on table %s".format(t.dbTableName)) - t.afterSchemifier - } - } - - toRun.cmds - } - } - - def destroyTables_!!(logFunc: (=> AnyRef) => Unit, stables: BaseMetaMapper*): Unit = destroyTables_!!(DefaultConnectionIdentifier, logFunc, stables :_*) - - def destroyTables_!!(dbId: ConnectionIdentifier, logFunc: (=> AnyRef) => Unit, stables: BaseMetaMapper*): Unit = - destroyTables_!!(dbId, 0, logFunc, stables.toList) - - @tailrec - def destroyTables_!!(dbId: ConnectionIdentifier, cnt: Int, logFunc: (=> AnyRef) => Unit, stables: List[BaseMetaMapper]): Unit = { - val th = new HashMap[String, String]() - (DB.use(dbId) { - conn => - val sConn = conn // SuperConnection(conn) - val tables = stables.toList.filter(t => hasTable_?(t, sConn, th)) - - tables.foreach{ - table => - try { - val ct = "DROP TABLE "+table._dbTableNameLC - val st = conn.createStatement - st.execute(ct) - logFunc(ct) - st.close() - } catch { - case e: Exception => // dispose... probably just an SQL Exception - } - } - - tables - }) match { - case t if t.nonEmpty && cnt < 1000 => destroyTables_!!(dbId, cnt + 1, logFunc, t) - case _ => - } - } - - /** - * Retrieves schema name where the unqualified db objects are searched. - */ - def getDefaultSchemaName(connection: SuperConnection): String = - (connection.schemaName or connection.driverType.defaultSchemaName or DB.globalDefaultSchemaName).openOr(connection.getMetaData.getUserName) - - - private def hasTable_? (table: BaseMetaMapper, connection: SuperConnection, actualTableNames: HashMap[String, String]): Boolean = { - val md = connection.getMetaData - using(md.getTables(null, getDefaultSchemaName(connection), null, null)){ rs => - def hasTable(rs: ResultSet): Boolean = - if (!rs.next) false - else rs.getString(3) match { - case s if s.toLowerCase == table._dbTableNameLC.toLowerCase => actualTableNames(table._dbTableNameLC) = s; true - case _ => hasTable(rs) - } - - hasTable(rs) - } - } - - - /** - * Creates an SQL command and optionally executes it. - * - * @param performWrite Whether the SQL command should be executed. - * @param logFunc Logger. - * @param connection Database connection. - * @param makeSql Factory for SQL command. - * - * @return SQL command. - */ - private def maybeWrite(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, connection: SuperConnection) (makeSql: () => String) : String ={ - val ct = makeSql() - logger.trace("maybeWrite DDL: "+ct) - if (performWrite) { - logFunc(ct) - val st = connection.createStatement - st.execute(ct) - st.close - } - ct - } - - private def ensureTable(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, table: BaseMetaMapper, connection: SuperConnection, actualTableNames: HashMap[String, String]): Collector = { - val hasTable = logger.trace("Does table exist?: "+table.dbTableName, hasTable_?(table, connection, actualTableNames)) - val cmds = new ListBuffer[String]() - - if (!hasTable) { - cmds += maybeWrite(performWrite, logFunc, connection) { - () => "CREATE TABLE "+table._dbTableNameLC+" ("+createColumns(table, connection).mkString(" , ")+") "+connection.createTablePostpend - } - if (!connection.driverType.pkDefinedByIndexColumn_?) { - // Add primary key only when it has not been created by the index field itself. - table.mappedFields.filter{f => f.dbPrimaryKey_?}.foreach { - pkField => - connection.driverType.primaryKeySetup(table._dbTableNameLC, pkField._dbColumnNameLC) foreach { command => - cmds += maybeWrite(performWrite, logFunc, connection) { - () => command - } - } - } - } - hasTable_?(table, connection, actualTableNames) - Collector(table.dbAddTable.toList, cmds.toList) - } else Collector(Nil, cmds.toList) - } - - private def createColumns(table: BaseMetaMapper, connection: SuperConnection): Seq[String] = { - table.mappedFields.flatMap(_.fieldCreatorString(connection.driverType)) - } - - private def ensureColumns(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, table: BaseMetaMapper, connection: SuperConnection, actualTableNames: HashMap[String, String]): Collector = { - val cmds = new ListBuffer[String]() - val rc = table.mappedFields.toList.flatMap { - field => - var hasColumn = 0 - var cols: List[String] = Nil - val totalColCnt = field.dbColumnCount - val md = connection.getMetaData - - using(md.getColumns(null, getDefaultSchemaName(connection), actualTableNames(table._dbTableNameLC), null))(rs => - while (hasColumn < totalColCnt && rs.next) { - val tableName = rs.getString(3).toLowerCase - val columnName = rs.getString(4).toLowerCase - - if (tableName == table._dbTableNameLC.toLowerCase && field.dbColumnNames(field.name).map(_.toLowerCase).contains(columnName)) { - cols = columnName :: cols - hasColumn = hasColumn + 1 - logger.trace("Column exists: %s.%s ".format(table.dbTableName, columnName)) - - } - }) - // FIXME deal with column types - (field.dbColumnNames(field.name).filter(f => !cols.map(_.toLowerCase).contains(f.toLowerCase))).foreach {colName => - logger.trace("Column does not exist: %s.%s ".format(table.dbTableName, colName)) - - cmds += maybeWrite(performWrite, logFunc, connection) { - () => "ALTER TABLE "+table._dbTableNameLC+" "+connection.driverType.alterAddColumn+" "+field.fieldCreatorString(connection.driverType, colName) - } - if ((!connection.driverType.pkDefinedByIndexColumn_?) && field.dbPrimaryKey_?) { - // Add primary key only when it has not been created by the index field itself. - cmds += maybeWrite(performWrite, logFunc, connection) { - () => "ALTER TABLE "+table._dbTableNameLC+" ADD CONSTRAINT "+table._dbTableNameLC+"_PK PRIMARY KEY("+field._dbColumnNameLC+")" - } - } - } - - field.dbAddedColumn.toList - - } - - Collector(rc, cmds.toList) - - } - - private def ensureIndexes(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, table: BaseMetaMapper, connection: SuperConnection, actualTableNames: HashMap[String, String]): Collector = { - val cmds = new ListBuffer[String]() - // val byColumn = new HashMap[String, List[(String, String, Int)]]() - val byName = new HashMap[String, List[String]]() - - val md = connection.getMetaData - val q = using(md.getIndexInfo(null, getDefaultSchemaName(connection), actualTableNames(table._dbTableNameLC), false, false)) {rs => - def quad(rs: ResultSet): List[(String, String, Int)] = { - if (!rs.next) Nil else { - if (rs.getString(3).equalsIgnoreCase(table._dbTableNameLC)) { - // Skip index statistics - if (rs.getShort(7) != DatabaseMetaData.tableIndexStatistic) { - (rs.getString(6).toLowerCase, rs.getString(9).toLowerCase, rs.getInt(8)) :: quad(rs) - } - else quad(rs) - } - else Nil - } - } - quad(rs) - } - // val q = quad(rs) - // q.foreach{case (name, col, pos) => byColumn.get(col) match {case Some(li) => byColumn(col) = (name, col, pos) :: li case _ => byColumn(col) = List((name, col, pos))}} - q.foreach{case (name, col, pos) => byName.get(name) match {case Some(li) => byName(name) = col :: li case _ => byName(name) = List(col)}} - val indexedFields: List[List[String]] = byName.map{case (name, value) => value.sortWith(_ < _)}.toList - //rs.close - - val single = table.mappedFields.filter{f => f.dbIndexed_?}.toList.flatMap { - field => - if (!indexedFields.contains(List(field._dbColumnNameLC.toLowerCase))) { - cmds += maybeWrite(performWrite, logFunc, connection) { - () => "CREATE INDEX "+(table._dbTableNameLC+"_"+field._dbColumnNameLC)+" ON "+table._dbTableNameLC+" ( "+field._dbColumnNameLC+" )" - } - field.dbAddedIndex.toList - } else Nil - } - - table.dbIndexes.foreach { - index => - - val columns = index.columns.toList - - val standardCreationStatement = (table._dbTableNameLC+"_"+columns.map(_.field._dbColumnNameLC).mkString("_"))+" ON "+table._dbTableNameLC+" ( "+columns.map(_.indexDesc).comma+" )" - - val createStatement = index match { - case i: net.liftweb.mapper.Index[_] => "CREATE INDEX " + standardCreationStatement - case i: UniqueIndex[_] => "CREATE UNIQUE INDEX " + standardCreationStatement - case GenericIndex(createFunc, _, _) => createFunc(table._dbTableNameLC, columns.map(_.field._dbColumnNameLC)) - case _ => logger.error("Invalid index: " + index); "" - } - - val fn = columns.map(_.field._dbColumnNameLC.toLowerCase).sortWith(_ < _) - if (!indexedFields.contains(fn)) { - cmds += maybeWrite(performWrite, logFunc, connection) { - () => createStatement - } - } - } - - Collector(single, cmds.toList) - } - - private def ensureConstraints(performWrite: Boolean, logFunc: (=> AnyRef) => Unit, table: BaseMetaMapper, dbId: ConnectionIdentifier, connection: SuperConnection, actualTableNames: HashMap[String, String]): Collector = { - val cmds = new ListBuffer[String]() - val ret = if (connection.supportsForeignKeys_? && MapperRules.createForeignKeys_?(dbId)) { - table.mappedFields.flatMap{f => f match {case f: BaseMappedField with BaseForeignKey => List(f); case _ => Nil}}.toList.flatMap { - field => - - val other = field.dbKeyToTable - val otherTable = actualTableNames(other._dbTableNameLC) - val myTable = actualTableNames(table._dbTableNameLC) - - val md = connection.getMetaData - // val rs = md.getCrossReference(null, null,otherTable , null, null, myTable) - var foundIt = false - using(md.getImportedKeys(null, getDefaultSchemaName(connection), myTable))(rs => - //val rs = md.getCrossReference(null, null,myTable , null, null, otherTable) - while (!foundIt && rs.next) { - val pkName = rs.getString(4) - val fkName = rs.getString(8) - foundIt = (field._dbColumnNameLC.toLowerCase == fkName.toLowerCase && field.dbKeyToColumn._dbColumnNameLC.toLowerCase == pkName.toLowerCase) - }) - - if (!foundIt) { - cmds += maybeWrite(performWrite, logFunc, connection) { - () => "ALTER TABLE "+table._dbTableNameLC+" ADD FOREIGN KEY ( "+field._dbColumnNameLC+" ) REFERENCES "+other._dbTableNameLC+" ( "+field.dbKeyToColumn._dbColumnNameLC+" ) " - } - field.dbAddedForeignKey.toList - } else { - Nil - } - } - } else { - Nil - } - - Collector(ret, cmds.toList) - } -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/package.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/package.scala deleted file mode 100644 index 650208e334..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/package.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright 2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb - -package object mapper { - type SuperConnection = db.SuperConnection - type ConnectionIdentifier = util.ConnectionIdentifier - type DriverType = db.DriverType - type ConnectionManager = db.ConnectionManager - type DBLogEntry = db.DBLogEntry - type StandardDBVendor = db.StandardDBVendor - - def DBLogEntry: db.DBLogEntry.type = db.DBLogEntry - def DefaultConnectionIdentifier: util.DefaultConnectionIdentifier.type = util.DefaultConnectionIdentifier - def DriverType: db.DriverType.type = db.DriverType -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/view/ModelView.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/view/ModelView.scala deleted file mode 100644 index 41e11a7502..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/view/ModelView.scala +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper -package view - -import http.{S, StatefulSnippet} -import S.? -import util.CssSel -import util.Helpers._ - -import scala.xml.{NodeSeq, Text} - - -/** - * A snippet that can list and edit items of a particular Mapper class - * This trait can help reduce boilerplate in the common scenario where - * you want a snippet class to provide list and edit snippets for a - * specific Mapper class. - * @author nafg - */ -trait ModelSnippet[T <: Mapper[T]] extends StatefulSnippet { - import mapper.view.{ModelView => MV} - class ModelView(e: T, snippet: ModelSnippet[T]) extends MV[T](e, snippet) { - def this(e: T) = { - this(e, this) - } - } - /** - * The instance of ModelView that wraps the currently loaded entity - */ - val view: MV[T] - - /** - * Action when save is successful. Defaults to using the ModelView's redirectOnSave - */ - var onSave: MV[T] => Unit = (view: MV[T]) => { - view.redirectOnSave.foreach(redirectTo) - } - - /** - * The list snippet - */ - def list(ns: NodeSeq): NodeSeq - /** - * The edit snippet - */ - def edit(ns: NodeSeq): NodeSeq - - def load(entity: T): Unit = view.entity = entity - - def dispatch: DispatchIt = { - case "list" => list _ - case "edit" => edit _ - case "newOrEdit" => view.newOrEdit - } - - /** - * A ".edit" CssSel - */ - def editAction(e: T): CssSel = ".edit" #> link("edit", ()=>load(e), Text(?("Edit"))) - /** - * A ".remove" CssSel - */ - def removeAction(e: T): CssSel = ".remove" #> link("list", ()=>e.delete_!, Text(?("Remove"))) -} - - -/** - * A wrapper around a Mapper that provides view-related utilities. Belongs to a parent ModelSnippet. - * @author nafg - */ -class ModelView[T <: Mapper[T]](var entity: T, val snippet: ModelSnippet[T]) { - /** - * If Some(string), will redirect to string on a successful save. - * If None, will load the same page. - * Defaults to Some("list"). - * This var is used by ModelSnippet.onSave, which is a ModelView=>Unit - */ - var redirectOnSave: Option[String] = Some("list") - - /** - * Loads this entity into the snippet so it can be edited - */ - def load(): Unit = snippet.load(entity) - - /** - * Delete the entity - */ - def remove: Boolean = - entity.delete_! - /** - * This function is used as a snippet in the edit view - * to provide alternate text depending on whether an - * existing entity is being edited or a new one is being - * created. - */ - def newOrEdit: CssSel = { - if (entity.saved_?) - ".edit ^^" #> "ignored" - else - ".new ^^" #> "ignored" - } - - /** - * This method checks whether the entity - * validates; if so it saves it, and if - * successful redirects to the location - * specified by redirectOnSave, if any. - * If save or validation fails, the - * appropriate message(s) is/are displayed - * and no redirect is performed. - */ - def save(): Unit = { - entity.validate match { - case Nil => - if(entity.save) - snippet.onSave(this) - else - S.error("Save failed") - case errors => - S.error(errors) - } - } - - /** - * returns a string that represents the id, or <new> - * if the entity is a new entity. - * If the entity has been saved then the id is determined - * as follows: If it is a KeyedMapper then it calls toString - * on the entity's primaryKeyField. Otherwise it - * calls toString on a field named "id." - */ - def idString: String = if(entity.saved_?) - entity match { - case e: KeyedMapper[_,T] => e.primaryKeyField.toString - case _ => entity.fieldByName("id").dmap("")((f: MappedField[_,T]) => f.toString) - } - else - "" - - - /** - * Returns a CssSel that binds a link to ".edit" to load and edit this entity - */ - lazy val editAction: CssSel = ".edit" #> snippet.link("edit", load, Text(?("Edit"))) - /** - * Returns a CssSel that binds a link to ".remove" that contains a link to delete this entity - */ - lazy val removeAction: CssSel = ".remove" #> snippet.link("list", ()=>remove, Text(?("Remove"))) - /** - * Returns a CssSel that binds the contents of an element with class "." - * to the field named `name`. - * If the field has a Full toForm implementation then that is used; - * otherwise its asHtml is called. - */ - def edit(name: String): CssSel = { - entity.fieldByName(name).map { (field: net.liftweb.mapper.MappedField[_,_]) => - s".$name *" #> field.toForm.openOr(field.asHtml) - }.openOrThrowException("If nobody has complained about this giving a NPE, I'll assume it is safe") - } -} - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/view/Paginator.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/view/Paginator.scala deleted file mode 100644 index 3ab8731088..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/view/Paginator.scala +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package net.liftweb -package mapper -package view - -import net.liftweb.http.{ Paginator, PaginatorSnippet, SortedPaginator, SortedPaginatorSnippet } - -/** - * Helper for when using paginators with a ModelSnippet. - * Adds a dispatch that delegates the "paginate" snippet to the paginator member. - * @author nafg and Timothy Perrett - */ -trait PaginatedModelSnippet[T <: Mapper[T]] extends ModelSnippet[T] { - abstract override def dispatch: DispatchIt = super.dispatch orElse Map("paginate" -> paginator.paginate) - /** - * The paginator to delegate to - */ - val paginator: PaginatorSnippet[T] -} - -/** - * Paginate mapper instances by supplying the model you - * wish to paginate and Paginator will run your query for you etc. - * - * @param meta The singleton of the Mapper class you're paginating - * @author nafg and Timothy Perrett - */ -class MapperPaginator[T <: Mapper[T]](val meta: MetaMapper[T]) extends Paginator[T] { - /** - * QueryParams to use always - */ - var constantParams: Seq[QueryParam[T]] = Nil - - def count: Long = meta.count(constantParams: _*) - def page: Seq[T] = meta.findAll(constantParams ++ Seq[QueryParam[T]](MaxRows(itemsPerPage), StartAt(first)): _*) -} - -/** - * Convenience class that combines MapperPaginator with PaginatorSnippet - * @param meta The singleton of the Mapper class you're paginating - */ -class MapperPaginatorSnippet[T <: Mapper[T]](meta: MetaMapper[T]) - extends MapperPaginator[T](meta) with PaginatorSnippet[T] - -/** - * Implements MapperPaginator and SortedPaginator. - * @param meta The singleton of the Mapper class you're paginating - * @param initialSort The field to sort by initially - * @param _headers Pairs of column labels and MappedFields. - */ -class SortedMapperPaginator[T <: Mapper[T]](meta: MetaMapper[T], - initialSort: net.liftweb.mapper.MappedField[_, T], - _headers: (String, MappedField[_, T])*) - extends MapperPaginator[T](meta) with SortedPaginator[T, MappedField[_, T]] { - - val headers = _headers.toList - sort = (headers.indexWhere{case (_,`initialSort`)=>true; case _ => false}, true) - - override def page: Seq[T] = meta.findAll(constantParams ++ Seq[QueryParam[T]](mapperSort, MaxRows(itemsPerPage), StartAt(first)): _*) - private def mapperSort = sort match { - case (fieldIndex, ascending) => - OrderBy( - headers(fieldIndex) match {case (_,f)=>f}, - if(ascending) Ascending else Descending - ) - } -} - -/** - * Convenience class that combines SortedMapperPaginator and SortedPaginatorSnippet. - * @param meta The singleton of the Mapper class you're paginating - * @param initialSort The field to sort by initially - * @param headers Pairs of column labels and MappedFields. - */ -class SortedMapperPaginatorSnippet[T <: Mapper[T]]( - meta: MetaMapper[T], - initialSort: net.liftweb.mapper.MappedField[_, T], - headers: (String, MappedField[_, T])* -) extends SortedMapperPaginator[T](meta, initialSort, headers: _*) - with SortedPaginatorSnippet[T, MappedField[_, T]] - diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/view/TableEditor.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/view/TableEditor.scala deleted file mode 100644 index 65aa194e90..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/view/TableEditor.scala +++ /dev/null @@ -1,314 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper -package view - -import scala.xml.{NodeSeq, Text} - -import common.Box -import util.Helpers -import Helpers._ -import http.{SHtml, S, DispatchSnippet, js} -import S.? - -import js.JsCmds.{Script, Run} - -import Util._ - - -/** - * Keeps track of pending adds to and removes from a list of mappers. - * Supports in-memory sorting by a field. - * Usage: override metaMapper with a MetaMapper instance, call sortBy - * to specify the field to sort by. If it is already sorted by that - * field it will sort descending, otherwise ascending. - * Call save to actualize changes. - * @author nafg - */ -trait ItemsList[T <: Mapper[T]] { - /** - * The MetaMapper that provides create and findAll functionality etc. - * Must itself be a T (the mapper type it represents) - */ - def metaMapper: T with MetaMapper[T] - - /** - * Whether the sorting algorithm should put null first or last - */ - var sortNullFirst = true - /** - * The list of items that correspond to items in the database - */ - var current: List[T] = Nil - /** - * The list of items pending to be added to the database - */ - var added: List[T] = Nil - /** - * The list of items to be deleted from current - */ - var removed: List[T] = Nil - /** - * The field to sort by, if any - */ - var sortField: Option[MappedField[_, T]] = None - /** - * The sort direction - */ - var ascending = true - - /** - * Returns the items (current + added - removed), sorted. - * Sorting sorts strings case-insensitive, as well as Ordered and java.lang.Comparable. - * Anything else where both values are nonnull are sorted via their toString method (case sensitive) - */ - def items: Seq[T] = { - val unsorted: List[T] = current.filterNot(removed.contains) ++ added - sortField match { - case None => - unsorted - case Some(field) => - unsorted.sortWith { - (a, b) => ((field.actualField(a).get: Any, field.actualField(b).get: Any) match { - case (aval: String, bval: String) => aval.toLowerCase < bval.toLowerCase - case (aval: Ordered[_], bval: Ordered[_]) => - aval.asInstanceOf[Ordered[Any]] < bval.asInstanceOf[Ordered[Any]] - case (aval: java.lang.Comparable[_], bval: java.lang.Comparable[_]) => - (aval.asInstanceOf[java.lang.Comparable[Any]] compareTo bval.asInstanceOf[java.lang.Comparable[Any]]) < 0 - case (null, _) => sortNullFirst - case (_, null) => !sortNullFirst - case (aval, bval) => aval.toString < bval.toString - }) match { - case cmp => - if(ascending) cmp else !cmp - } - } - } - } - /** - * Adds a new, unsaved item - */ - def add(): Unit = { - added ::= metaMapper.create - } - /** - * Marks an item pending for removal - */ - def remove(i: T): Unit = { - if(added.exists(i.eq)) - added = added.filter(i.ne) - else if(current.contains(i)) - removed ::= i - } - /** - * Reset the ItemsList from the database: calls refresh, and 'added' and 'removed' are cleared. - */ - def reload(): Unit = { - refresh() - added = Nil - removed = Nil - } - /** - * Reloads the contents of 'current' from the database - */ - def refresh(): Unit = { - current = metaMapper.findAll() - } - /** - * Sends to the database: - * added is saved - * removed is deleted - * (current - removed) is saved - */ - def save(): Unit = { - val (successAdd, failAdd) = added.partition(_.save) - added = failAdd - - val (successRemove, failRemove) = removed.partition(_.delete_!) - current = current.filterNot(successRemove.contains) - removed = failRemove - - for(c <- current if c.validate.isEmpty) c.save - - current ++= successAdd - } - - - def sortBy(field: MappedField[_, T]): Unit = (sortField, ascending) match { - case (Some(f), true) if f eq field => - ascending = false - case _ | null => - sortField = Some(field) - ascending = true - } - def sortFn(field: MappedField[_, T]): () => Unit = (sortField, ascending) match { - case (Some(f), true) if f eq field => - () => ascending = false - case _ | null => - () => { - sortField = Some(field) - ascending = true - } - } - - reload() -} - - -/** - * Holds a registry of TableEditor delegates - * Call TableEditor.registerTable(name_to_use_in_view, meta_mapper_for_the_table, display_title) - * in Boot after DB.defineConnectionManager. - * Referencing TableEditor triggers registering its snippet package and enabling - * the provided template, /tableeditor/default. - * @author nafg - */ -object TableEditor { - net.liftweb.http.LiftRules.addToPackages("net.liftweb.mapper.view") - - private[view] val map = new scala.collection.mutable.HashMap[String, TableEditorImpl[_]] - def registerTable[T<:Mapper[T]](name: String, meta: T with MetaMapper[T], title: String): Unit = - map(name) = new TableEditorImpl(title, meta) -} - -package snippet { - /** - * This is the snippet that the view references. - * It requires the following contents: - * table:title - the title registered in Boot - * header:fields - repeated for every field of the MetaMapper, for the header. - * field:name - the displayName of the field, capified. Links to sort by the field. - * table:items - repeated for each record - * item:fields - repeated for each field of the current record - * field:form - the result of toForm on the field - * item:removeBtn - a button to remove the current item - * table:insertBtn - a button to insert another item - * For a default layout, use lift:embed what="/tableeditor/default", with - * @author nafg - */ - class TableEditor extends DispatchSnippet { - private def getInstance: Box[TableEditorImpl[_]] = S.attr("table").map(TableEditor.map(_)) - def dispatch: DispatchIt = { - case "edit" => - val o = getInstance.openOrThrowException("if we don't have the table attr, we want the dev to know about it.") - o.edit - } - } -} - -/** - * This class does the actual view binding against a ItemsList. - * The implementation is in the base trait ItemsListEditor - * @author nafg - */ -protected class TableEditorImpl[T <: Mapper[T]](val title: String, meta: T with MetaMapper[T]) extends ItemsListEditor[T] { - var items: ItemsList[T] = new ItemsList[T] { - def metaMapper: T with MetaMapper[T] = meta - } -} - -/** - * General trait to edit an ItemsList. - * @author nafg - */ -trait ItemsListEditor[T<:Mapper[T]] { - def items: ItemsList[T] - def title: String - - def onInsert(): Unit = items.add() - def onRemove(item: T): Unit = items.remove(item) - def onSubmit(): Unit = try { - items.save() - } catch { - case e: java.sql.SQLException => - S.error("Not all items could be saved!") - } - def sortFn(f: MappedField[_, T]): ()=>Unit = items.sortFn(f) - - val fieldFilter: MappedField[_,T]=>Boolean = (f: MappedField[_,T])=>true - - def customBind(item: T): NodeSeq=>NodeSeq = (ns: NodeSeq) => ns - - def edit: (NodeSeq)=>NodeSeq = { - def unsavedScript = ({Script(Run(""" - var safeToContinue = false - window.onbeforeunload = function(evt) {{ // thanks Tim! - if(!safeToContinue) {{ - var reply = "You have unsaved changes!"; - if(typeof evt == 'undefined') evt = window.event; - if(evt) evt.returnValue = reply; - return reply; - }} - }} - """))}) - val noPrompt = "onclick" -> "safeToContinue=true" - val optScript = if( - (items.added.length + items.removed.length == 0) && - items.current.forall(!_.dirty_?) - ) { - NodeSeq.Empty - } else { - unsavedScript - } - - val bindRemovedItems = - items.removed.map { item => - "^" #> customBind(item) andThen - ".fields" #> eachField(item, { f: MappedField[_, T] => ".form" #> {f.asHtml} }) & - ".removeBtn" #> SHtml.submit(?("Remove"), ()=>onRemove(item), noPrompt) & - ".msg" #> Text(?("Deleted")) - } - - val bindRegularItems = - items.items.map { item => - "^" #> customBind(item) andThen - ".fields" #> eachField(item, { f: MappedField[_, T] => ".form" #> f.toForm }) & - ".removeBtn" #> SHtml.submit(?("Remove"), ()=>onRemove(item), noPrompt) & - ".msg" #> { - item.validate match { - case Nil => - if (! item.saved_?) - Text(?("New")) - else if (item.dirty_?) - Text(?("Unsaved")) - else - NodeSeq.Empty - case errors => -
    {errors.flatMap(e =>
  • {e.msg}
  • )}
- } - } - } - - "^ >*" #> optScript andThen - ".fields *" #> { - eachField[T]( - items.metaMapper, - { f: MappedField[_, T] => - ".name" #> SHtml.link(S.uri, sortFn(f), Text(capify(f.displayName))) - }, - fieldFilter - ) - } & - ".table" #> { - ".title *" #> title & - ".insertBtn" #> SHtml.submit(?("Insert"), onInsert _, noPrompt) & - ".item" #> (bindRegularItems ++ bindRemovedItems) & - ".saveBtn" #> SHtml.submit(?("Save"), onSubmit _, noPrompt) - } - } -} diff --git a/persistence/mapper/src/main/scala/net/liftweb/mapper/view/Util.scala b/persistence/mapper/src/main/scala/net/liftweb/mapper/view/Util.scala deleted file mode 100644 index 0cda356c13..0000000000 --- a/persistence/mapper/src/main/scala/net/liftweb/mapper/view/Util.scala +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper -package view - -import common.Full -import util._ -import Helpers._ - -import scala.xml.NodeSeq - -/** - * Provides a number of methods that make complex Mapper-based view snippets - * easier to build. - * @author nafg - */ -object Util { - /** - * Binds all nodes whose names are names of fields on the specified mapper. - * This makes it unnecessary to write repetitious bindings like - * "field1" -> field1.toForm, - * "field2" -> field2.toform - * Instead it automates such bindings but you have to pass it a function - * that will generate a NodeSeq from the field, e.g., - * (f: MappedField[_,_]) => f.toForm - * Usage: Pass as a Full Box to the bind overload that takes a nodeFailureXform - * argument. - */ - def bindFields[T <: Mapper[T]](mapper: T, nsfn: MappedField[_,T]=>NodeSeq): NodeSeq=>NodeSeq = { - case scala.xml.Elem(_, name, _, _, _*) => - mapper.fieldByName(name) match { - case Full(field) => nsfn(field) - case _ => NodeSeq.Empty - } - case ns => ns - } - - /** - * Iterates over the fields of the specified mapper. If the node currently being processed by bind - * has an attribute "fields" then it is taken as a whitespace-delimited list of fields to iterate - * over; otherwise all form fields are used. The specified function returns a BindParam for doing - * processing specific to that field. - * Returns a bind function (NodeSeq=>NodeSeq) that can be used to bind an xml node that should be - * repeated for each field. - * Usage: if you want to repeat xml markup for each field, the view should use the "field:" prefix - * for field-specific nodes. The snippet should bind the containing (repeating) node to the function - * returned by this method, passing this method the mapper instance whose fields should be used and - * a function that returns BindParams to process the "field:" prefixed nodes. - * This method takes an additional filter function to restrict certain fields from being - * displayed. There is an overload without it too. - */ - def eachField[T<:net.liftweb.mapper.Mapper[T]]( - mapper: T, - fn:MappedField[_,T]=>CssSel, - filter: MappedField[_,T]=>Boolean - ): NodeSeq=>NodeSeq = { - def fieldBindIfWanted(fieldName: String) = { - mapper.fieldByName(fieldName).filter(filter) match { - case Full(field) => - Some(fn(field)) - case _ => - None - } - } - - "^" #> { ns: NodeSeq => - val fieldsAttribute = (ns \ "@fields") - - val bind: Seq[CssSel] = - if (fieldsAttribute.nonEmpty) { - for { - fieldName <- fieldsAttribute.text.split("\\s+").toIndexedSeq - // the following hackery is brought to you by the Scala compiler not - // properly typing MapperField[_, T] in the context of the for - // comprehension - fieldBind <- fieldBindIfWanted(fieldName) - } yield { - ".field" #> fieldBind - } - } else { - mapper.formFields.filter(filter).map { - case field: MappedField[_, T] => - ".field" #> fn(field) - } - } - - bind.map(_(ns)) - } - } - - def eachField[T<:net.liftweb.mapper.Mapper[T]]( - mapper: T, - fn: MappedField[_,T] => CssSel - ): NodeSeq => NodeSeq = eachField(mapper, fn, (_: MappedField[_,T]) => true) - -} - diff --git a/persistence/mapper/src/test/resources/logback-test.xml b/persistence/mapper/src/test/resources/logback-test.xml deleted file mode 100644 index 58b9aa21d5..0000000000 --- a/persistence/mapper/src/test/resources/logback-test.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - %d [%thread] %level %logger - %m%n - - - - - - - - - diff --git a/persistence/mapper/src/test/scala/bootstrap/liftweb/Boot.scala b/persistence/mapper/src/test/scala/bootstrap/liftweb/Boot.scala deleted file mode 100644 index dbdc6a0668..0000000000 --- a/persistence/mapper/src/test/scala/bootstrap/liftweb/Boot.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package bootstrap.liftweb - -import net.liftweb.http._ -import net.liftweb.sitemap._ - -/** - * A class that's instantiated early and run. It allows the application - * to modify lift's environment - */ -class Boot { - def boot(): Unit = { - // where to search snippet - LiftRules.addToPackages("net.liftweb.webapptest") - - // Build SiteMap - val entries = Menu("Home") / "index" :: - Menu("htmlFragmentWithHead") / "htmlFragmentWithHead" :: - Menu("htmlSnippetWithHead") / "htmlSnippetWithHead" :: - Nil - - LiftRules.setSiteMap(SiteMap(entries:_*)) - } -} diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/DBProviders.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/DBProviders.scala deleted file mode 100644 index cc415b4475..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/DBProviders.scala +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.io.File -import java.sql.{Connection, DriverManager} - -import common._ -import util._ - - -object DbProviders { - def asList = PostgreSqlProvider :: MySqlProvider :: DerbyProvider :: H2FileProvider :: H2MemoryProvider :: Nil - // Uncomment to run tests faster, but only against H2 def asList = H2MemoryProvider :: Nil - - - case object SnakeConnectionIdentifier extends ConnectionIdentifier { - var jndiName = "snake" - } - - trait Provider { - def name: String - def setupDB: Unit - def required_? : Boolean = Props.getBool(propsPrefix+"required", false) - def propName: String - lazy val propsPrefix: String = "mapper.test."+propName+"." - } - - trait FileDbSetup { - def filePath : String - def vendor : Vendor - - def setupDB: Unit = { - val f = new File(filePath) - DB.defineConnectionManager(DefaultConnectionIdentifier, vendor) - DB.defineConnectionManager(SnakeConnectionIdentifier, vendor) - } - } - - trait DbSetup { - def vendor : Vendor - - def setupDB: Unit = { - DB.defineConnectionManager(DefaultConnectionIdentifier, vendor) - DB.defineConnectionManager(SnakeConnectionIdentifier, vendor) - - def deleteAllTables: Unit = { - DB.use(DefaultConnectionIdentifier) { - conn => - val md = conn.getMetaData - val rs = md.getTables(null, Schemifier.getDefaultSchemaName(conn), null, null) - var toDelete: List[String] = Nil - while (rs.next) { - val tableName = rs.getString(3) - if (rs.getString(4).toLowerCase == "table") toDelete = tableName :: toDelete - } - rs.close - } - } - deleteAllTables - } - } - - abstract class Vendor(driverClass : String) extends ConnectionManager { - def newConnection(name: ConnectionIdentifier): Box[Connection] = { - Class.forName(driverClass) - Full(mkConn) - } - - def releaseConnection(conn: Connection): Unit = { - try { - conn.close - } catch { - case e: Exception => Empty //ignore - } - } - - def mkConn : Connection - } - - - object MySqlProvider extends Provider with DbSetup { - def name = "MySql" - def vendor = new Vendor("com.mysql.jdbc.Driver") { - def mkConn = { - DriverManager.getConnection("jdbc:mysql://localhost:3306/lift_test?autoReconnect=true", "dpp", "") - } - } - def propName: String = "mysql_local" - } - - object PostgreSqlProvider extends Provider with DbSetup { - def name = "PostgreSql" - def vendor = new Vendor("org.postgresql.Driver") { - def mkConn = DriverManager.getConnection("jdbc:postgresql://localhost/lift", "lift", "lift") - } - def propName: String = "psql_local" - } - - object DerbyProvider extends Provider with FileDbSetup { - def name = "Derby" - def filePath = "target/tests_derby_lift" - def vendor = new Vendor("org.apache.derby.jdbc.EmbeddedDriver") { - def mkConn = DriverManager.getConnection("jdbc:derby:" + filePath + ";create=true") - } - def propName: String = "derby_local" - override def required_? = true - } - - object H2FileProvider extends Provider with FileDbSetup { - def name = "H2" - def filePath = "target/tests_h2_lift" - def vendor = new Vendor("org.h2.Driver") { - def mkConn = DriverManager.getConnection("jdbc:h2:" + filePath + "/test.db") - } - def propName: String = "hs_fs" - override def required_? = true - } - - object H2MemoryProvider extends Provider with DbSetup { - def name = "H2 in memory" - def vendor = new Vendor("org.h2.Driver") { - def mkConn = DriverManager.getConnection("jdbc:h2:mem:lift;DB_CLOSE_DELAY=-1") - } - def propName: String = "hs_mem" - override def required_? = true - } - - object SqlServerProvider extends Provider with DbSetup { - def name = "Microsoft SQL Server" - def vendor = new Vendor("net.sourceforge.jtds.jdbc.Driver") { - def mkConn = DriverManager.getConnection("jdbc:jtds:sqlserver://localhost/lift", "lift", "lift") - } - def propName: String = "ms_sqlserver" - } - - object OracleProvider extends Provider with DbSetup { - def name = "Oracle" - def vendor = new Vendor("oracle.jdbc.OracleDriver") { - def mkConn = DriverManager.getConnection("jdbc:oracle:thin:lift/lift@//localhost:1521/lift") - } - def propName: String = "oracle_local" - } - - object MaxDbProvider extends Provider with DbSetup { - def name = "SAP MaxDB" - def vendor = new Vendor("com.sap.dbtech.jdbc.DriverSapDB") { - def mkConn = DriverManager.getConnection("jdbc:sapdb://localhost:7210/lift?user=lift&password=lift") - } - def propName: String = "maxdb_local" - } -} - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/DbSpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/DbSpec.scala deleted file mode 100644 index ebec64ae25..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/DbSpec.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - -import common._ -import util._ -import http.{S, LiftSession} - - -/** - * Systems under specification for DB. - */ -class DbSpec extends Specification { - "DB Specification".title - - val provider = DbProviders.H2MemoryProvider - val logF = Schemifier.infoF _ - - def cleanup(): Unit = { - provider.setupDB - Schemifier.destroyTables_!!(DefaultConnectionIdentifier, logF , User) - Schemifier.schemify(true, logF, DefaultConnectionIdentifier, User) - } - - "DB" should { - "collect queries when queryCollector is added as logFunc" in { - cleanup() - DB.addLogFunc(DB.queryCollector) - - var statements: List[(String, Long)] = Nil - - S.addAnalyzer((r,t,ss) => statements=ss) - - val session = new LiftSession("hello", "", Empty) - val elwood = S.initIfUninitted(session) { - val r = User.find(By(User.firstName, "Elwood")) - S.queryLog.size must_== 1 - r - } - statements.size must_== 1 - elwood.map( _.firstName.get) must_== Full("Elwood") - } - } -} - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/ItemsListSpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/ItemsListSpec.scala deleted file mode 100644 index bcad0bf29a..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/ItemsListSpec.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - -import util._ -import view._ - - -/** - * Systems under specification for ItemsList. - */ -class ItemsListSpec extends Specification { - "ItemsList Specification".title - sequential - - val provider = DbProviders.H2MemoryProvider - - def init = { - provider.setupDB - Schemifier.destroyTables_!!(DefaultConnectionIdentifier, Schemifier.neverF _, SampleItem) - Schemifier.schemify(true, Schemifier.neverF _, SampleItem) - new ItemsList[SampleItem] { - def metaMapper = SampleItem - } - } - - "ItemsList" should { - "buffer items to save" in { - val il = init - il.add - il.add - il.add - il.current.length must_== 0 - il.added.length must_== 3 - - il.save - SampleItem.count must_== 3 - il.current.length must_== 3 - } - - "correctly handle removing an unsaved item" in { - val il = init - il.add - il.add - il.add - il.save - - il.add - il.add - il.add - il.remove(il.added(1)) - il.remove(il.added(0)) - il.save - SampleItem.count must_== 4 - il.added.length must_== 0 - il.removed.length must_== 0 - } - } - -} - -class SampleItem extends LongKeyedMapper[SampleItem] with IdPK { - def getSingleton = SampleItem - object field extends MappedInt(this) -} - -object SampleItem extends SampleItem with LongKeyedMetaMapper[SampleItem] { - var counter = 0 - override def create = { - val x: SampleItem = super.create - x.field(counter) - counter += 1 - x - } -} - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/ManyToManySpecs.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/ManyToManySpecs.scala deleted file mode 100644 index 6cd54a695e..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/ManyToManySpecs.scala +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright 2009-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - -/** - * Systems under specification for ManyToMany. - */ -class ManyToManySpec extends Specification { - "ManyToMany Specification".title - sequential - - val provider = DbProviders.H2MemoryProvider - - private def ignoreLogger(f: => AnyRef): Unit = () - def setupDB: Unit = { - MapperRules.createForeignKeys_? = c => false - provider.setupDB - Schemifier.destroyTables_!!(ignoreLogger _, PersonCompany, Company, Person) - Schemifier.schemify(true, ignoreLogger _, Person, Company, PersonCompany) - } - def createPerson: Person = { - val person = new Person - person.save - val companies = (1 to 10).toList map { i => - val c = new Company - c.name ()= i.toString - c.save - c - } - person.companies ++= companies - person.save - // Break some joins - companies(3).delete_! // delete "4" - companies(6).delete_! // delete "7" - person.companies.refresh // reload joins so joinEntity.company.obj isn't cached - person - } - - "ManyToMany" should { - "skip broken joins in children" in { - setupDB - val person = createPerson - person.companies.joins.length must_== 10 - person.companies.all.length must_== 8 - } - "handle missing joins in insertAll" in { - setupDB - val person = createPerson - val c = new Company - c.name ()= "new" - c.save - person.companies.insertAll(7, Seq(c)) - person.companies(7).name.get must_== "new" - } - - -// from Florian - - "count unsaved children" in { - setupDB - val person = new Person - val company = new Company - person.companies += company - person.companies.length must_== 1 - } - "count saved children" in { - setupDB - val person = new Person - val company = new Company - company.save - person.companies += company - person.companies.length must_== 1 - } -// "count unsaved children of a saved entity" in { -// setupDB -// val person = new Person -// val company = new Company -// person.companies += company -// person.save -// person.companies.length must_== 1 -// } - "count saved children of a saved entity" in { - setupDB - val person = new Person - val company = new Company - company.save - person.companies += company - person.save - person.companies.length must_== 1 - } - "count saved children of a saved entity after refresh" in { - setupDB - val person = new Person - person.save - val company = new Company - company.save - person.companies += company - person.save - person.companies.refresh - person.companies.length must_== 1 - } - } - -} - - - -class Person extends LongKeyedMapper[Person] with IdPK with ManyToMany { - def getSingleton = Person - object companies extends MappedManyToMany(PersonCompany, PersonCompany.person, PersonCompany.company, Company) -} -object Person extends Person with LongKeyedMetaMapper[Person] - -class Company extends LongKeyedMapper[Company] with IdPK { - def getSingleton = Company - object name extends MappedString(this, 10) -} -object Company extends Company with LongKeyedMetaMapper[Company] - -class PersonCompany extends Mapper[PersonCompany] { - def getSingleton = PersonCompany - object person extends MappedLongForeignKey(this, Person) - object company extends MappedLongForeignKey(this, Company) - - override def toString = "PersonCompany(person.is=%s, person.obj=%s, company.is=%s, company.obj=%s)".format(person.get,person.obj,company.get,company.obj) -} -object PersonCompany extends PersonCompany with MetaMapper[PersonCompany] - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedBooleanSpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedBooleanSpec.scala deleted file mode 100644 index 4517f7d68d..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedBooleanSpec.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - -import common._ - - -/** - * Systems under specification for MappedDate. - */ -class MappedBooleanSpec extends Specification { - "MappedBoolean Specification".title - sequential - - val provider = DbProviders.H2MemoryProvider - - private def ignoreLogger(f: => AnyRef): Unit = () - def setupDB: Unit = { - provider.setupDB - Schemifier.destroyTables_!!(ignoreLogger _, Dog2, User) - Schemifier.schemify(true, ignoreLogger _, Dog2, User) - } - - "MappedBoolean" should { - "not be marked dirty on read" in { - setupDB - val charlie = Dog2.create - charlie.isDog(true).save - - val read = Dog2.find(charlie.dog2id) - read.map(_.dirty_?) must_== Full(false) - } - - "be marked dirty on update if value has changed" in { - setupDB - val charlie = Dog2.create - charlie.save - - val read = Dog2.find(charlie.dog2id).openOrThrowException("This is a test") - read.dirty_? must_== false - read.isDog(false) - read.dirty_? must_== false - - read.isDog(true) - read.isDog(true) - read.dirty_? must_== true - } - } -} - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedDateSpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedDateSpec.scala deleted file mode 100644 index 8d92f85198..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedDateSpec.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - -import common._ - - -/** - * Systems under specification for MappedDate. - */ -class MappedDateSpec extends Specification { - "MappedDate Specification".title - - "MappedDate" should { - "handle a Number in setFromAny" in { - val dog = Dog2.create - val currentDate = new java.util.Date() - dog.createdTime.setFromAny(BigInt(currentDate.getTime)) - dog.createdTime.get mustEqual currentDate - } - - "handle a full Box in setFromAny" in { - val dog = Dog2.create - val someDate = new java.util.Date(1000) - dog.createdTime.setFromAny(Full(someDate)) - dog.createdTime.get mustEqual someDate - } - - "handle en empty Box in setFromAny" in { - val dog = Dog2.create - dog.createdTime.setFromAny(Empty) - dog.createdTime.get must beNull - } - } -} - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedDecimalSpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedDecimalSpec.scala deleted file mode 100644 index 1c5486dbf2..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedDecimalSpec.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - -import common._ - - -/** - * Systems under specification for MappedDate. - */ -class MappedDecimalSpec extends Specification { - "MappedDecimal Specification".title - sequential - - val provider = DbProviders.H2MemoryProvider - - private def ignoreLogger(f: => AnyRef): Unit = () - def setupDB: Unit = { - provider.setupDB - Schemifier.destroyTables_!!(ignoreLogger _, Dog, User) - Schemifier.schemify(true, ignoreLogger _, Dog, User) - } - - "MappedDecimal" should { - "not be marked dirty on read" in { - setupDB - val charlie = Dog.create - charlie.price(42.42).save - - val read = Dog.find(charlie.id) - read.map(_.dirty_?) must_== Full(false) - } - - "be marked dirty on update" in { - setupDB - val charlie = Dog.create - charlie.price(42.42).save - - val read = Dog.find(charlie.id).openOrThrowException("This is a test") - read.dirty_? must_== false - read.price(100.42) - read.price(100.42) - read.dirty_? must_== true - } - } -} - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedEnumSpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedEnumSpec.scala deleted file mode 100644 index ef9db241fd..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedEnumSpec.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - -object MyEnum extends Enumeration { - val a = Value - val b = Value - val c = Value - val d = Value - val e = Value -} - -class EnumObj extends LongKeyedMapper[EnumObj] with IdPK { - def getSingleton = EnumObj - - object enum extends MappedEnum(this, MyEnum) -} - -object EnumObj extends EnumObj with LongKeyedMetaMapper[EnumObj] - -class MappedEnumSpec extends Specification { - "MappedEnum Specification".title - - "MappedEnum" should { - "preserve enumeration order when building display list" in { - val v = EnumObj.create - - import MyEnum._ - v.enum.buildDisplayList must_== List(a.id -> a.toString, b.id -> b.toString, c.id -> c.toString, d.id -> d.toString, e.id -> e.toString) - } - } -} - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedLongForeignKeySpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedLongForeignKeySpec.scala deleted file mode 100644 index c09a8386e9..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/MappedLongForeignKeySpec.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - -import common._ - - -/** - * Systems under specification for MappedLongForeignKey. - */ -class MappedLongForeignKeySpec extends Specification with org.specs2.specification.BeforeEach { - "MappedLongForeignKey Specification".title - sequential - - // Make sure we have everything configured first - MapperSpecsModel.setup() - - def provider = DbProviders.H2MemoryProvider - - def before: Unit = MapperSpecsModel.cleanup() - - "MappedLongForeignKey" should { - (try { - provider.setupDB - } catch { - case e if !provider.required_? => 1 must be_==(2).orSkip("Provider %s not available: %s".format(provider, e)) - }) must not(throwA[Exception]).orSkip - - "Not allow comparison to another FK" in { - val dog = Dog.create.name("Froo").saveMe - val user = { - def ret: User = { - val r = User.create.saveMe - if (r.id.get >= dog.id.get) r - else ret - } - - ret - } - dog.owner(user).save - val d2 = Dog.find(dog.id).openOrThrowException("Test") - d2.id.get must_== user.id.get - (d2.owner == user) must_== true - (d2.owner == d2) must_== false - } - - "be primed after setting a reference" in { - val dog = Dog.create - val user = User.create - dog.owner(user) - dog.owner.obj.isDefined must beTrue - } - - "be primed after setting a Boxed reference" in { - val dog = Dog.create - val user = User.create - dog.owner(Full(user)) - dog.owner.obj.isDefined must beTrue - } - - "be empty after setting an Empty" in { - val user = User.create - val dog = Dog.create.owner(user) - dog.owner(Empty) - - dog.owner.obj must_== Empty - dog.owner.get must_== 0L - } - } -} - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/MapperSpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/MapperSpec.scala deleted file mode 100644 index 3b19230a52..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/MapperSpec.scala +++ /dev/null @@ -1,392 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.util.Locale - -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeEach - -import common._ -import json._ -import util._ -import Helpers._ -import http.LiftRules -import http.provider.HTTPRequest - - -/** - * Systems under specification for Mapper. The model classes here are - * defined in MapperSpecsModel.scala - */ -class MapperSpec extends Specification with BeforeEach { - "Mapper Specification".title - // Do everything in order. - sequential - - // Make sure we have everything configured first - MapperSpecsModel.setup() - - def providers: List[DbProviders.Provider] = DbProviders.H2MemoryProvider :: Nil - - /* - private def logDBStuff(log: DBLog, len: Long) { - println(" in log stuff "+log.getClass.getName) - log match { - case null => - case _ => println(log.allEntries) - } - } - - DB.addLogFunc(logDBStuff) - */ - -// if (!DB.loggingEnabled_? && doLog) DB.addLogFunc(logDBStuff) - - def before = MapperSpecsModel.cleanup() // before each example - - providers.foreach(provider => { - try { - provider.setupDB - - ("Mapper for " + provider.name) should { - - "schemify" in { - val elwood = SampleModel.find(By(SampleModel.firstName, "Elwood")).openOrThrowException("Test") - val madeline = SampleModel.find(By(SampleModel.firstName, "Madeline")).openOrThrowException("Test") - val archer = SampleModel.find(By(SampleModel.firstName, "Archer")).openOrThrowException("Test") - val notNull = SampleModel.find(By(SampleModel.firstName, "NotNull")).openOrThrowException("Test") - - elwood.firstName.get must_== "Elwood" - madeline.firstName.get must_== "Madeline" - archer.firstName.get must_== "Archer" - - archer.moose.get must_== Empty - notNull.moose.get must_== Full(99L) - - val disabled = SampleModel.find(By(SampleModel.status, SampleStatus.Disabled)) - - val meow = SampleTag.find(By(SampleTag.tag, "Meow")).openOrThrowException("Test") - - meow.tag.get must_== "Meow" - - elwood.id.get must be_<(madeline.id.get).eventually - } - - "non-snake connection should lower case default table & column names" in { - SampleModel.firstName.name must_== "firstName" - SampleModel.firstName.dbColumnName must_== "firstname" - SampleModel.dbTableName must_== "samplemodel" - } - - "should use displayNameCalculator for displayName" in { - val localeCalculator = LiftRules.localeCalculator - SampleModel.firstName.displayName must_== "DEFAULT:SampleModel.firstName" - - LiftRules.localeCalculator = (request: Box[HTTPRequest]) => request.flatMap(_.locale) - .openOr(new Locale("xx", "YY")) - SampleModel.firstName.displayName must_== "xx_YY:SampleModel.firstName" - - LiftRules.localeCalculator = localeCalculator - success - } - - "snake connection should snakify default table & column names" in { - SampleModelSnake.firstName.name must_== "firstName" - SampleModelSnake.firstName.dbColumnName must_== "first_name" - SampleModelSnake.dbTableName must_== "sample_model_snake" - } - - "user defined names are not changed" in { - SampleTag.extraColumn.name must_== "extraColumn" - SampleTag.extraColumn.dbColumnName must_== "AnExtraColumn" - Mixer.dbTableName must_== "MIXME_UP" - } - - "basic JSON encoding/decoding works" in { - val m = SampleModel.findAll().head - val json = m.encodeAsJson() - val rebuilt = SampleModel.buildFromJson(json) - m must_== rebuilt - } - - "basic JSON encoding/decoding works with snake_case" in { - val m = SampleModelSnake.findAll().head - val json = m.encodeAsJson() - val rebuilt = SampleModelSnake.buildFromJson(json) - m must_== rebuilt - } - - "Can JSON decode and write back" in { - val m = SampleModel.find(2).openOrThrowException("Test") - val json = m.encodeAsJson() - val rebuilt = SampleModel.buildFromJson(json) - rebuilt.firstName("yak").save - val recalled = SampleModel.find(2).openOrThrowException("Test") - recalled.firstName.get must_== "yak" - } - - "You can put stuff in a Set" in { - val m1 = SampleModel.find(1).openOrThrowException("Test") - val m2 = SampleModel.find(1).openOrThrowException("Test") - - (m1 == m2) must_== true - - val s1 = Set(SampleModel.findAll: _*) - - s1.contains(m1) must_== true - - val s2 = s1 ++ SampleModel.findAll - - s1.size must_== s2.size - } - - "Like works" in { - val oo = SampleTag.findAll(Like(SampleTag.tag, "%oo%")) - - (oo.length > 0) must beTrue - - for (t <- oo) - (t.tag.get.indexOf("oo") >= 0) must beTrue - - for (t <- oo) - t.model.cached_? must beFalse - - val mm = SampleTag.findAll(Like(SampleTag.tag, "M%")) - - for (t <- mm) - (t.tag.get.startsWith("M")) must beTrue - - for (t <- mm) yield { - t.model.cached_? must beFalse - t.model.obj - t.model.cached_? must beTrue - } - - (mm.length > 0) must beTrue - } - - "Nullable Long works" in { - SampleModel.create.firstName("fruit").moose(Full(77L)).save - - SampleModel.findAll(By(SampleModel.moose, Empty)).length must_== 3L - SampleModel.findAll(NotBy(SampleModel.moose, Empty)).length must_== 2L - SampleModel.findAll(NotNullRef(SampleModel.moose)).length must_== 2L - SampleModel.findAll(NullRef(SampleModel.moose)).length must_== 3L - } - - "enforce NOT NULL" in { - val nullString: String = null - SampleModel.create.firstName("Not Null").notNull(nullString).save must throwA[java.sql.SQLException] - } - - "enforce FK constraint on DefaultConnection" in { - val supportsFK = DB.use(DefaultConnectionIdentifier) { conn => conn.driverType.supportsForeignKeys_? } - if (!supportsFK) skipped("Driver %s does not support FK constraints".format(provider)) - - SampleTag.create.model(42).save must throwA[java.sql.SQLException] - } - - "not enforce FK constraint on SnakeConnection" in { - SampleTagSnake.create.model(42).save must_== true - } - - "Precache works" in { - val oo = SampleTag.findAll(By(SampleTag.tag, "Meow"), PreCache(SampleTag.model)) - - for (t <- oo) yield t.model.cached_? must beTrue - - (oo.length > 0) must beTrue - } - - "Precache works with OrderBy" in { - if ((provider ne DbProviders.DerbyProvider) - && (provider ne DbProviders.MySqlProvider)) { - // this doesn't work for Derby, but it's a derby bug - // nor does it work in MySQL, but it's a MySQL limitation - // try { provider.setupDB } catch { case e => skip(e.getMessage) } - val dogs = Dog.findAll(By(Dog.name, "fido"), OrderBy(Dog.name, Ascending), PreCache(Dog.owner)) - val oo = SampleTag.findAll(OrderBy(SampleTag.tag, Ascending), MaxRows(2), PreCache(SampleTag.model)) - - (oo.length > 0) must beTrue - for (t <- oo) t.model.cached_? must beTrue - } - success - } - - "Non-deterministic Precache works" in { - val dogs = Dog.findAll(By(Dog.name, "fido"), PreCache(Dog.owner, false)) - val oo = SampleTag.findAll(By(SampleTag.tag, "Meow"), PreCache(SampleTag.model, false)) - - for (t <- oo) yield t.model.cached_? must beTrue - - (oo.length > 0) must beTrue - } - - "Non-deterministic Precache works with OrderBy" in { - val dogs = Dog.findAll(By(Dog.name, "fido"), OrderBy(Dog.name, Ascending), PreCache(Dog.owner, false)) - val oo = SampleTag.findAll(OrderBy(SampleTag.tag, Ascending), MaxRows(2), PreCache(SampleTag.model, false)) - - for (t <- oo) yield t.model.cached_? must beTrue - - (oo.length > 0) must beTrue - } - - "work with Mixed case" in { - val elwood = Mixer.find(By(Mixer.name, "Elwood")).openOrThrowException("Test") - val madeline = Mixer.find(By(Mixer.name, "Madeline")).openOrThrowException("Test") - val archer = Mixer.find(By(Mixer.name, "Archer")).openOrThrowException("Test") - - elwood.name.get must_== "Elwood" - madeline.name.get must_== "Madeline" - archer.name.get must_== "Archer" - - elwood.weight.get must_== 33 - madeline.weight.get must_== 44 - archer.weight.get must_== 105 - } - - "work with Mixed case update and delete" in { - val elwood = Mixer.find(By(Mixer.name, "Elwood")).openOrThrowException("Test") - elwood.name.get must_== "Elwood" - elwood.name("FruitBar").weight(966).save - - val fb = Mixer.find(By(Mixer.weight, 966)).openOrThrowException("Test") - - fb.name.get must_== "FruitBar" - fb.weight.get must_== 966 - fb.delete_! - - Mixer.find(By(Mixer.weight, 966)).isDefined must_== false - Mixer.find(By(Mixer.name, "FruitBar")).isDefined must_== false - Mixer.find(By(Mixer.name, "Elwood")).isDefined must_== false - - } - - "work with Mixed case update and delete for Dog2" in { - val elwood = Dog2.find(By(Dog2.name, "Elwood")).openOrThrowException("Test") - elwood.name.get must_== "Elwood" - elwood.name("FruitBar").actualAge(966).save - - val fb = Dog2.find(By(Dog2.actualAge, 966)).openOrThrowException("Test") - - fb.name.get must_== "FruitBar" - fb.actualAge.get must_== 966 - fb.delete_! - - Dog2.find(By(Dog2.actualAge, 966)).isDefined must_== false - Dog2.find(By(Dog2.name, "FruitBar")).isDefined must_== false - Dog2.find(By(Dog2.name, "Elwood")).isDefined must_== false - } - - "Non-autogenerated primary key items should be savable after a field has been changed" in { - val item = TstItem.create.tmdbId(1L).saveMe - item.name("test").save must_== true - } - - "we can read and write String primary keys" in { - val i1 = Thing.create.name("frog").saveMe - val i2 = Thing.create.name("dog").saveMe - - Thing.find(By(Thing.thing_id, i1.thing_id.get)).openOrThrowException("Test").name.get must_== "frog" - Thing.find(By(Thing.thing_id, i2.thing_id.get)).openOrThrowException("Test").name.get must_== "dog" - } - - - "Precache works with OrderBy with Mixed Case" in { - if ((provider ne DbProviders.DerbyProvider) - && (provider ne DbProviders.MySqlProvider)) { - // this doesn't work for Derby, but it's a derby bug - // nor does it work in MySQL, but it's a MySQL limitation - // try { provider.setupDB } catch { case e => skip(e.getMessage) } - val dogs = Dog2.findAll(By(Dog2.name, "fido"), OrderBy(Dog2.name, Ascending), PreCache(Dog2.owner)) - val oo = SampleTag.findAll(OrderBy(SampleTag.tag, Ascending), MaxRows(2), PreCache(SampleTag.model)) - - (oo.length > 0) must beTrue - for (t <- oo) yield t.model.cached_? must beTrue - } - success - } - - "Non-deterministic Precache works with Mixed Case" in { - val dogs = Dog2.findAll(By(Dog2.name, "fido"), PreCache(Dog2.owner, false)) - val oo = SampleTag.findAll(By(SampleTag.tag, "Meow"), PreCache(SampleTag.model, false)) - - for (t <- oo) yield t.model.cached_? must beTrue - - (oo.length > 0) must beTrue - } - - - "CreatedAt and UpdatedAt work" in { - val now = Helpers.now - val dog = Dog2.find().openOrThrowException("Test") - - val oldUpdate = dog.updatedAt.get - - val d1 = (now.getTime - dog.createdAt.get.getTime) / 100000L - d1 must_== 0L - - val d2 = (now.getTime - dog.updatedAt.get.getTime) / 100000L - d2 must_== 0L - - dog.name("ralph").save - - val dog2 = Dog2.find(dog.dog2id.get).openOrThrowException("Test") - - dog.createdAt.get.getTime must_== dog2.createdAt.get.getTime - oldUpdate.getTime must_!= dog2.updatedAt.get.getTime - } - - "Non-deterministic Precache works with OrderBy with Mixed Case" in { - val dogs = Dog2.findAll(By(Dog2.name, "fido"), OrderBy(Dog2.name, Ascending), PreCache(Dog2.owner, false)) - - val oo = SampleTag.findAll(OrderBy(SampleTag.tag, Ascending), MaxRows(2), PreCache(SampleTag.model, false)) - - for (t <- oo) yield t.model.cached_? must beTrue - - (oo.length > 0) must beTrue - } - - "Save flag results in update rather than insert" in { - val elwood = SampleModel.find(By(SampleModel.firstName, "Elwood")).openOrThrowException("Test") - elwood.firstName.get must_== "Elwood" - elwood.firstName("Frog").save - - val frog = SampleModel.find(By(SampleModel.firstName, "Frog")).openOrThrowException("Test") - frog.firstName.get must_== "Frog" - - SampleModel.findAll().length must_== 4 - SampleModel.find(By(SampleModel.firstName, "Elwood")).isEmpty must_== true - } - - "accept a Seq[T] as argument to ByList query parameter" in { - // See http://github.com/dpp/liftweb/issues#issue/77 for original request - val seq: Seq[String] = List("Elwood", "Archer") - val result = SampleModel.findAll(ByList(SampleModel.firstName, seq)) - result.length must_== 2 - } - } - } catch { - case e if !provider.required_? => skipped("Provider %s not available: %s".format(provider, e)) - case _: Exception => skipped - } - }) -} - - diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/MapperSpecsModel.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/MapperSpecsModel.scala deleted file mode 100644 index a9edbd8449..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/MapperSpecsModel.scala +++ /dev/null @@ -1,419 +0,0 @@ -/* - * Copyright 2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import java.util.Locale - -import common._ -import json._ -import util._ -import Helpers._ - -/* - * This file contains a number of objects that are common to several - * of the Mapper specs. By placing them here we reduce code duplication - * and get rid of some timing errors found when we moved to SBT for build. - * - * Derek Chen-Becker, Mar 8, 2011 - */ - -object MapperSpecsModel { - // These rules are common to all Mapper specs - def snakify(connid: ConnectionIdentifier, name: String): String = { - if (connid.jndiName == "snake") { - StringHelpers.snakify(name) - } else { - name.toLowerCase - } - } - - MapperRules.columnName = snakify - MapperRules.tableName = snakify - - // Simple name calculator - def displayNameCalculator(bm: BaseMapper, l: Locale, name: String) = { - val mapperName = bm.dbName - val displayName = name match { - case "firstName" if l == Locale.getDefault() => "DEFAULT:" + mapperName + "." + name - case "firstName" if l == new Locale("xx", "YY") => "xx_YY:" + mapperName + "." + name - case _ => name - } - displayName - } - - MapperRules.displayNameCalculator.default.set(displayNameCalculator _) - - def setup(): Unit = { - // For now, do nothing. Just force this object to load - } - - def doLog = false - - private def ignoreLogger(f: => AnyRef): Unit = () - - def cleanup(): Unit = { - // Snake connection doesn't create FK constraints (put this here to be absolutely sure it gets set before Schemify) - MapperRules.createForeignKeys_? = c => { - c.jndiName != "snake" - } - - Schemifier.destroyTables_!!(DefaultConnectionIdentifier, if (doLog) Schemifier.infoF _ else ignoreLogger _, SampleTag, SampleModel, Dog, Mixer, Dog2, User, TstItem, Thing) - Schemifier.destroyTables_!!(DbProviders.SnakeConnectionIdentifier, if (doLog) Schemifier.infoF _ else ignoreLogger _, SampleTagSnake, SampleModelSnake) - Schemifier.schemify(true, if (doLog) Schemifier.infoF _ else ignoreLogger _, DefaultConnectionIdentifier, SampleModel, SampleTag, User, Dog, Mixer, Dog2, TstItem, Thing) - Schemifier.schemify(true, if (doLog) Schemifier.infoF _ else ignoreLogger _, DbProviders.SnakeConnectionIdentifier, SampleModelSnake, SampleTagSnake) - } -} - - -object SampleTag extends SampleTag with LongKeyedMetaMapper[SampleTag] { - override def dbAddTable = Full(populate) - - private def populate(): Unit = { - val samp = SampleModel.findAll() - val tags = List("Hello", "Moose", "Frog", "WooHoo", "Sloth", - "Meow", "Moof") - for (t <- tags; - m <- samp) SampleTag.create.tag(t).model(m).save - } -} - - -class SampleTag extends LongKeyedMapper[SampleTag] with IdPK { - def getSingleton = SampleTag - - // what's the "meta" server - object tag extends MappedString(this, 32) - - object model extends MappedLongForeignKey(this, SampleModel) - - object extraColumn extends MappedString(this, 32) { - override def dbColumnName = "AnExtraColumn" - } -} - -object SampleStatus extends Enumeration { - val Active, Disabled, Hiatus = Value -} - -object SampleModel extends SampleModel with KeyedMetaMapper[Long, SampleModel] { - override def dbAddTable = Full(populate) - - def encodeAsJson(in: SampleModel): JsonAST.JObject = encodeAsJSON_!(in) - - def buildFromJson(json: JsonAST.JObject): SampleModel = decodeFromJSON_!(json, false) - - private def populate(): Unit = { - create.firstName("Elwood").save - create.firstName("Madeline").save - create.firstName("Archer").status(SampleStatus.Disabled).save - create.firstName("NotNull").moose(Full(99L)).save - } -} - -class SampleModel extends KeyedMapper[Long, SampleModel] { - def getSingleton = SampleModel - - // what's the "meta" server - def primaryKeyField: MappedLongIndex[SampleModel] = id - - object id extends MappedLongIndex(this) - - object firstName extends MappedString(this, 32) - - object moose extends MappedNullableLong(this) - - object notNull extends MappedString(this, 32) { - override def dbNotNull_? = true - } - - object status extends MappedEnum(this, SampleStatus) - - def encodeAsJson(): JsonAST.JObject = SampleModel.encodeAsJson(this) -} - - -object SampleTagSnake extends SampleTagSnake with LongKeyedMetaMapper[SampleTagSnake] { - override def dbAddTable = Full(populate) - - private def populate(): Unit = { - val samp = SampleModelSnake.findAll() - val tags = List("Hello", "Moose", "Frog", "WooHoo", "Sloth", - "Meow", "Moof") - for (t <- tags; - m <- samp) SampleTagSnake.create.tag(t).model(m).save - } - - override def dbDefaultConnectionIdentifier = DbProviders.SnakeConnectionIdentifier -} - - -class SampleTagSnake extends LongKeyedMapper[SampleTagSnake] with IdPK { - def getSingleton = SampleTagSnake - - // what's the "meta" server - - object tag extends MappedString(this, 32) - - object model extends MappedLongForeignKey(this, SampleModelSnake) - - object extraColumn extends MappedString(this, 32) { - override def dbColumnName = "AnExtraColumn" - } - -} - - -object SampleModelSnake extends SampleModelSnake with KeyedMetaMapper[Long, SampleModelSnake] { - override def dbAddTable = Full(populate) - - def encodeAsJson(in: SampleModelSnake): JsonAST.JObject = encodeAsJSON_!(in) - - def buildFromJson(json: JsonAST.JObject): SampleModelSnake = decodeFromJSON_!(json, false) - - private def populate(): Unit = { - create.firstName("Elwood").save - create.firstName("Madeline").save - create.firstName("Archer").save - create.firstName("NotNull").moose(Full(99L)).save - } - - override def dbDefaultConnectionIdentifier = DbProviders.SnakeConnectionIdentifier -} - - -class SampleModelSnake extends KeyedMapper[Long, SampleModelSnake] { - def getSingleton = SampleModelSnake - - // what's the "meta" server - def primaryKeyField = id - - object id extends MappedLongIndex(this) - - object firstName extends MappedString(this, 32) - - object moose extends MappedNullableLong(this) - - object notNull extends MappedString(this, 32) { - override def dbNotNull_? = true - } - - def encodeAsJson(): JsonAST.JObject = SampleModelSnake.encodeAsJson(this) -} - - -/** - * The singleton that has methods for accessing the database - */ -object User extends User with MetaMegaProtoUser[User] { - override def dbAddTable = Full(populate) - - private def populate(): Unit = { - create.firstName("Elwood").save - create.firstName("Madeline").save - create.firstName("Archer").save - } - - override def dbTableName = "users" - - // define the DB table name - override def screenWrap = Full() - - // define the order fields will appear in forms and output - override def fieldOrder = List(id, firstName, lastName, email, locale, timezone, password, textArea) - - // comment this line out to require email validations - override def skipEmailValidation = true -} - - -/** - * An O-R mapped "User" class that includes first name, last name, password and we add a "Personal Essay" to it - */ -class User extends MegaProtoUser[User] { - def getSingleton = User - - - // what's the "meta" server - - // define an additional field for a personal essay - object textArea extends MappedTextarea(this, 2048) { - override def textareaRows = 10 - - override def textareaCols = 50 - - override def displayName = "Personal Essay" - } - - -} - - -class Dog extends LongKeyedMapper[Dog] with IdPK { - def getSingleton = Dog - - object name extends MappedPoliteString(this, 128) - - object weight extends MappedInt(this) - - object owner extends MappedLongForeignKey(this, User) - - object price extends MappedDecimal(this, new java.math.MathContext(7), 2) -} - - -object Dog extends Dog with LongKeyedMetaMapper[Dog] { - override def dbAddTable = Full(populate) - - private def populate(): Unit = { - create.name("Elwood").save - create.name("Madeline").save - create.name("Archer").save - create.name("fido").owner(User.find(By(User.firstName, "Elwood"))).save - } - - def who(in: Dog): Box[User] = in.owner -} - - -class Mixer extends LongKeyedMapper[Mixer] with IdPK { - def getSingleton = Mixer - - object name extends MappedPoliteString(this, 128) { - override def dbColumnName = "NaM_E" - override def defaultValue = "wrong" - } - - object weight extends MappedInt(this) { - override def dbColumnName = "WEIGHT" - override def defaultValue = -99 - } -} - - -object Mixer extends Mixer with LongKeyedMetaMapper[Mixer] { - override def dbAddTable = Full(populate) - override def dbTableName = "MIXME_UP" - - private def populate(): Unit = { - create.name("Elwood").weight(33).save - create.name("Madeline").weight(44).save - create.name("Archer").weight(105).save - } -} - -object Thing extends Thing with KeyedMetaMapper[String, Thing] { - override def dbTableName = "things" - - import java.util.UUID - override def beforeCreate = List((thing: Thing) => { - thing.thing_id(UUID.randomUUID().toString) - }) -} - - -class Thing extends KeyedMapper[String, Thing] { - def getSingleton = Thing - - def primaryKeyField = thing_id - - object thing_id extends MappedStringIndex(this, 36) { - override def writePermission_? = true - override def dbAutogenerated_? = false - override def dbNotNull_? = true - } - - object name extends MappedString(this, 64) -} - - -/** - * Test class to see if you can have a non-autogenerated primary key - * Issue 552 - */ -class TstItem extends LongKeyedMapper[TstItem] { - def getSingleton = TstItem - def primaryKeyField = tmdbId - - object tmdbId extends MappedLongIndex(this) { - override def writePermission_? = true - override def dbAutogenerated_? = false - } - - object name extends MappedText(this) -} - - -object TstItem extends TstItem with LongKeyedMetaMapper[TstItem] - - -class Dog2 extends LongKeyedMapper[Dog2] with CreatedUpdated { - def getSingleton = Dog2 - override def primaryKeyField = dog2id - - object dog2id extends MappedLongIndex[Dog2](this.asInstanceOf[MapperType]) { - override def dbColumnName = "DOG2_Id" - } - - object name extends MappedPoliteString(this, 128) - - object weight extends MappedInt(this) - - object owner extends MappedLongForeignKey(this, User) - - object actualAge extends MappedInt(this) { - override def dbColumnName = "ACTUAL_AGE" - override def defaultValue = 1 - override def dbIndexed_? = true - } - - - object isDog extends MappedBoolean(this) { - override def dbColumnName = "is_a_dog" - override def defaultValue = false - override def dbIndexed_? = true - } - - - object createdTime extends MappedDateTime(this) { - override def dbColumnName = "CreatedTime" - override def defaultValue = new _root_.java.util.Date() - override def dbIndexed_? = true - } - - -} - - -object Dog2 extends Dog2 with LongKeyedMetaMapper[Dog2] { - override def dbTableName = "DOG2" - override def dbAddTable = Full(populate) - - private def populate(): Unit = { - create.name("Elwood").actualAge(66).save - create.name("Madeline").save - create.name("Archer").save - create.name("fido").owner(User.find(By(User.firstName, "Elwood"))).isDog(true).save - create.name("toto").owner(User.find(By(User.firstName, "Archer"))).actualAge(3).isDog(true) - .createdTime(Dog2.getRefDate).save - } - - // Get new instance of fixed point-in-time reference date - def getRefDate: _root_.java.util.Date = { - new _root_.java.util.Date(1257089309453L) - } -} diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/OneToManySpecs.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/OneToManySpecs.scala deleted file mode 100644 index 099cfac1cf..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/OneToManySpecs.scala +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2009-2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb { -package mapper { - -import org.specs2.mutable.Specification - -class OneToManySpecs extends Specification { - "One to Many Specification".title - sequential - - val provider = DbProviders.H2MemoryProvider - - private def ignoreLogger(f: => AnyRef): Unit = () - def setupDB: Unit = { - MapperRules.createForeignKeys_? = c => false - provider.setupDB - Schemifier.destroyTables_!!(ignoreLogger _, Contact, Phone) - Schemifier.schemify(true, ignoreLogger _, Contact, Phone) - } - - "OneToMany" should { - "detect all MappedOneToMany fields" in { - setupDB - val contact = Contact.create - val fields = contact.oneToManyFields - fields.length must_== 1 - fields(0).asInstanceOf[Any] must_== contact.phones - } - "cascade delete" in { - val contact = Contact.create - contact.phones += Phone.create - contact.save - - Contact.count must_== 1 - Phone.count must_== 1 - - contact.delete_! - - Contact.count must_== 0 - Phone.count must_== 0 - } - } - -} - - - -class Contact extends LongKeyedMapper[Contact] with IdPK with OneToMany[Long, Contact] { - def getSingleton = Contact - object phones extends MappedOneToMany(Phone, Phone.contact) with Cascade[Phone] -} -object Contact extends Contact with LongKeyedMetaMapper[Contact] - -class Phone extends LongKeyedMapper[Phone] with IdPK { - def getSingleton = Phone - object contact extends MappedLongForeignKey(this, Contact) - object number extends MappedString(this, 10) -} -object Phone extends Phone with LongKeyedMetaMapper[Phone] - - -} -} diff --git a/persistence/mapper/src/test/scala/net/liftweb/mapper/SchemifierSpec.scala b/persistence/mapper/src/test/scala/net/liftweb/mapper/SchemifierSpec.scala deleted file mode 100644 index e591375ac2..0000000000 --- a/persistence/mapper/src/test/scala/net/liftweb/mapper/SchemifierSpec.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mapper - -import org.specs2.mutable.Specification - - -/** - * Systems under specification for Schemifier. - */ -class SchemifierSpec extends Specification { - "Schemifier Specification".title - - val provider = DbProviders.H2MemoryProvider - - "Schemifier" should { - "not crash in readonly if table doesn't exist" in { - provider.setupDB - Schemifier.schemify(false, Schemifier.neverF _, Thing) - success - } - } -} - diff --git a/persistence/mapper/src/test/webapp/WEB-INF/web.xml b/persistence/mapper/src/test/webapp/WEB-INF/web.xml deleted file mode 100644 index 677471014e..0000000000 --- a/persistence/mapper/src/test/webapp/WEB-INF/web.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - LiftFilter - Lift Filter - The Filter that intercepts lift calls - net.liftweb.http.LiftFilter - - - - - LiftFilter - /* - - - diff --git a/persistence/mapper/src/test/webapp/htmlFragmentWithHead.html b/persistence/mapper/src/test/webapp/htmlFragmentWithHead.html deleted file mode 100644 index ec053346db..0000000000 --- a/persistence/mapper/src/test/webapp/htmlFragmentWithHead.html +++ /dev/null @@ -1,7 +0,0 @@ - - - - -

Welcome to your project!

-
- diff --git a/persistence/mapper/src/test/webapp/htmlSnippetWithHead.html b/persistence/mapper/src/test/webapp/htmlSnippetWithHead.html deleted file mode 100644 index 71577c1c25..0000000000 --- a/persistence/mapper/src/test/webapp/htmlSnippetWithHead.html +++ /dev/null @@ -1,5 +0,0 @@ - -

Welcome to your project!

-

-
- diff --git a/persistence/mapper/src/test/webapp/index.html b/persistence/mapper/src/test/webapp/index.html deleted file mode 100644 index 5692497160..0000000000 --- a/persistence/mapper/src/test/webapp/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -

Welcome to your project!

-

-
- diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/AsMongoRecord.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/AsMongoRecord.scala deleted file mode 100644 index 9fea1550e0..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/AsMongoRecord.scala +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright 2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import org.bson.types.ObjectId - -/** - * Extend this to create extractors for your MongoRecords. - * - * Example: - * object AsUser extends AsMongoRecord(User) - */ -class AsMongoRecord[A <: MongoRecord[A]](meta: MongoMetaRecord[A]) { - - def unapply(in: String): Option[A] = asMongoRecord(in) - - def asMongoRecord(in: String): Option[A] = - if (ObjectId.isValid(in)) meta.find(new ObjectId(in)) - else None -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/BsonRecord.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/BsonRecord.scala deleted file mode 100644 index 42e01103b6..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/BsonRecord.scala +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Copyright 2011-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import net.liftweb.common._ - -import java.util.prefs.BackingStoreException -import java.util.regex.Pattern -import scala.collection.JavaConverters._ - -import net.liftweb.mongodb.record.codecs.{RecordCodec, RecordTypedCodec} -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.{Field, MetaRecord, Record} -import net.liftweb.record.field._ - -import org.bson._ -import org.bson.codecs.{BsonTypeClassMap, Codec, DecoderContext, EncoderContext} -import org.bson.codecs.configuration.{CodecRegistries, CodecRegistry} -import org.bson.conversions.Bson -import com.mongodb._ - -/** Specialized Record that can be encoded and decoded from BSON (DBObject) */ -trait BsonRecord[MyType <: BsonRecord[MyType]] extends Record[MyType] { - self: MyType => - - /** Refines meta to require a BsonMetaRecord */ - def meta: BsonMetaRecord[MyType] - - /** - * Encode a record instance into a DBObject - */ - @deprecated("RecordCodec is now used instead.", "3.4.3") - def asDBObject: DBObject = meta.asDBObject(this) - - @deprecated("RecordCodec is now used instead.", "3.4.3") - def asDocument: Document = meta.asDocument(this) - - /** - * Set the fields of this record from the given DBObject - */ - @deprecated("RecordCodec is now used instead.", "3.4.3") - def setFieldsFromDBObject(dbo: DBObject): Unit = meta.setFieldsFromDBObject(this, dbo) - - /** - * Save the instance and return the instance - */ - override def saveTheRecord(): Box[MyType] = throw new BackingStoreException("BSON Records don't save themselves") - - /** - * Pattern.equals doesn't work properly so it needs a special check. If you use PatternField, be sure to override equals with this. - */ - @deprecated("PatternField now has a properly functioning `equals` method.", "3.4.1") - protected def equalsWithPatternCheck(other: Any): Boolean = { - other match { - case that: BsonRecord[MyType] => - that.fields.corresponds(this.fields) { (a,b) => - (a.name == b.name) && ((a.valueBox, b.valueBox) match { - case (Full(ap: Pattern), Full(bp: Pattern)) => ap.pattern == bp.pattern && ap.flags == bp.flags - case _ => a.valueBox == b.valueBox - }) - } - case _ => false - } - } -} - -/** Specialized MetaRecord that deals with BsonRecords */ -trait BsonMetaRecord[BaseRecord <: BsonRecord[BaseRecord]] extends MetaRecord[BaseRecord] with JsonFormats with MongoCodecs { - self: BaseRecord => - - def codecRegistry: CodecRegistry = MongoRecordRules.defaultCodecRegistry.vend - - /** - * The `BsonTypeClassMap` to use with this record. - */ - def bsonTypeClassMap: BsonTypeClassMap = MongoRecordRules.defaultBsonTypeClassMap.vend - def bsonTransformer: Transformer = MongoRecordRules.defaultTransformer.vend - - def codec: RecordTypedCodec[BaseRecord] = - RecordCodec(this, introspectedCodecRegistry, bsonTypeClassMap, bsonTransformer) - - /** - * Check this record's fields and add any Codecs needed. - */ - protected lazy val introspectedCodecRegistry: CodecRegistry = { - val fields = metaFields() - - val codecs: List[Codec[_]] = fields.map { field => field match { - case f: BsonRecordTypedField[BaseRecord, _] => - f.valueMeta.codec :: Nil - case f: BsonRecordListField[BaseRecord, _] => - f.valueMeta.codec :: Nil - case f: BsonRecordMapField[BaseRecord, _] => - f.valueMeta.codec :: Nil - case _ => - Nil - }}.flatten - - CodecRegistries.fromRegistries( - CodecRegistries.fromCodecs(codecs.distinct.asJava), - codecRegistry - ) - } - - /** - * Create a BasicDBObject from the field names and values. - * - MongoFieldFlavor types (List) are converted to DBObjects - * using asDBObject - */ - @deprecated("RecordCodec is now used instead.", "3.4.3") - def asDBObject(inst: BaseRecord): DBObject = { - val dbo = BasicDBObjectBuilder.start // use this so regex patterns can be stored. - - for { - field <- fields(inst) - dbValue <- fieldDbValue(field) - } { dbo.add(field.name, dbValue) } - - dbo.get - } - - @deprecated("RecordCodec is now used instead.", "3.4.3") - def asDocument(inst: BaseRecord): Document = { - val dbo = new Document() - - for { - field <- fields(inst) - dbValue <- fieldDbValue(field) - } { dbo.append(field.name, dbValue) } - - dbo - } - - /** - * Return the value of a field suitable to be put in a DBObject - */ - @deprecated("RecordCodec is now used instead.", "3.4.3") - def fieldDbValue(f: Field[_, BaseRecord]): Box[Any] = { - import Meta.Reflection._ - import field.MongoFieldFlavor - - f match { - case field if (field.optional_? && field.valueBox.isEmpty) => Empty // don't add to DBObject - case field: EnumTypedField[_] => - field.asInstanceOf[EnumTypedField[Enumeration]].valueBox map { - v => v.id - } - case field: EnumNameTypedField[_] => - field.asInstanceOf[EnumNameTypedField[Enumeration]].valueBox map { - v => v.toString - } - case field: MongoFieldFlavor[_] => - Full(field.asInstanceOf[MongoFieldFlavor[Any]].asDBObject) - case field => field.valueBox map (_.asInstanceOf[AnyRef] match { - case null => null - case x if primitive_?(x.getClass) => x - case x if mongotype_?(x.getClass) => x - case x if datetype_?(x.getClass) => datetype2dbovalue(x) - case x: BsonRecord[_] => x.asDBObject - case x: Array[Byte] => x - case o => o.toString - }) - } - } - - /** - * Creates a new record, then sets the fields with the given DBObject. - * - * @param dbo - the DBObject - * @return Box[BaseRecord] - */ - @deprecated("RecordCodec is now used instead.", "3.4.3") - def fromDBObject(dbo: DBObject): BaseRecord = { - val inst: BaseRecord = createRecord - setFieldsFromDBObject(inst, dbo) - inst - } - - /** - * Populate the inst's fields with the values from a DBObject. Values are set - * using setFromAny passing it the DBObject returned from Mongo. - * - * @param inst - the record that will be populated - * @param dbo - The DBObject - * @return Unit - */ - @deprecated("RecordCodec is now used instead.", "3.4.3") - def setFieldsFromDBObject(inst: BaseRecord, dbo: DBObject): Unit = { - for (k <- dbo.keySet.asScala; field <- inst.fieldByName(k.toString)) { - field.setFromAny(dbo.get(k.toString)) - } - inst.runSafe { - inst.fields.foreach(_.resetDirty) - } - } - - def setFieldsFromDocument(inst: BaseRecord, doc: Document): Unit = { - for (k <- doc.keySet.asScala; field <- inst.fieldByName(k.toString)) { - field.setFromAny(doc.get(k.toString)) - } - inst.runSafe { - inst.fields.foreach(_.resetDirty) - } - } - - def fromDocument(doc: Document): BaseRecord = { - val inst: BaseRecord = createRecord - setFieldsFromDocument(inst, doc) - inst - } - - def diff(inst: BaseRecord, other: BaseRecord): Seq[(String, Any, Any)] = { - fields(inst).flatMap(field => { - val otherValue = other.fieldByName(field.name).flatMap(_.valueBox) - if (otherValue != field.valueBox) { - Seq((field.name, field.valueBox, otherValue)) - } else { - Seq.empty[(String, String, String)] - } - }) - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoMetaRecord.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoMetaRecord.scala deleted file mode 100644 index 573a327a85..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoMetaRecord.scala +++ /dev/null @@ -1,526 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import scala.reflect.ClassTag -import java.util.UUID - -import net.liftweb.common._ -import net.liftweb.json._ -import net.liftweb.mongodb.record.codecs.CollectibleRecordCodec -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.MandatoryTypedField -import net.liftweb.util.Helpers.tryo - -import org.bson.Document -import org.bson.codecs.Codec -import org.bson.codecs.configuration.CodecRegistries -import org.bson.conversions.Bson -import org.bson.types.ObjectId - -import com.mongodb._ -import com.mongodb.async.SingleResultCallback -import com.mongodb.client.{FindIterable, MongoCollection, MongoDatabase} -import com.mongodb.client.model.Filters.{eq => eqs, in} -import com.mongodb.client.model.Updates._ -import com.mongodb.client.model.{DeleteOptions, InsertOneOptions, ReplaceOptions, UpdateOptions} -import com.mongodb.client.result.{DeleteResult, UpdateResult} - -import scala.collection.JavaConverters._ -import scala.concurrent.{Future, Promise} - -trait MongoMetaRecord[BaseRecord <: MongoRecord[BaseRecord]] - extends BsonMetaRecord[BaseRecord] - with MongoMeta[BaseRecord, BaseRecord] -{ - self: BaseRecord => - - lazy val recordClass: Class[BaseRecord] = createRecord.getClass.asInstanceOf[Class[BaseRecord]] - - override def codec = - CollectibleRecordCodec(this, introspectedCodecRegistry, bsonTypeClassMap, bsonTransformer) - - /** - * Utility method for determining the value of _id. - * This is needed for backwards compatibility with MongoId. This is - * due to the fact that MongoRecord.id is of type Any. That will - * be changed to type MandatoryTypedField in a future version. When - * that happens this will no longer be necessary. - */ - private def idValue(inst: BaseRecord): Any = inst.id match { - case f: MandatoryTypedField[_] => f.value - case x => x - } - - @deprecated("Use useCollection instead", "3.4.3") - def useColl[T](f: DBCollection => T): T = - MongoDB.useCollection(connectionIdentifier, collectionName)(f) - - /** - * A CodecRegistry that contains the codec for this Record. Used by `useCollection` and `useDatabase`. - */ - private lazy val recordCodecRegistry = CodecRegistries.fromRegistries( - CodecRegistries.fromCodecs(codec), - codecRegistry - ) - - /** - * Use the collection associated with this Meta. - */ - def useCollection[T](f: MongoCollection[BaseRecord] => T): T = { - MongoDB.useMongoCollection(connectionIdentifier, collectionName, recordClass) { mc => - f(mc.withCodecRegistry(recordCodecRegistry).withWriteConcern(writeConcern)) - } - } - - /** - * Use the db associated with this Meta. - */ - def useDatabase[T](f: MongoDatabase => T): T = { - MongoDB.useDatabase(connectionIdentifier) { md => - f(md.withCodecRegistry(recordCodecRegistry).withWriteConcern(writeConcern)) - } - } - - @deprecated("Use useDatabase instead", "3.4.3") - def useDb[T](f: DB => T): T = MongoDB.use(connectionIdentifier)(f) - - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def useCollAsync[T](f: com.mongodb.async.client.MongoCollection[Document] => T): T = { - MongoAsync.useCollection[T](connectionIdentifier, collectionName)(f) - } - - /** - * Delete the instance from backing store - */ - def delete_!(inst: BaseRecord): Boolean = { - foreachCallback(inst, _.beforeDelete) - deleteOne("_id", idValue(inst)) - foreachCallback(inst, _.afterDelete) - true - } - - def deleteOne(inst: BaseRecord, opts: DeleteOptions): Box[DeleteResult] = { - foreachCallback(inst, _.beforeDelete) - val result = deleteOne("_id", idValue(inst)) - foreachCallback(inst, _.afterDelete) - result - } - - def deleteOne(inst: BaseRecord): Box[DeleteResult] = { - deleteOne(inst, new DeleteOptions) - } - - def bulkDelete_!!(qry: Bson): Unit = { - useCollection(_.deleteMany(qry)) - } - - def bulkDelete_!!(k: String, o: Any): Unit = bulkDelete_!!(eqs(k, o)) - - /** - * Find a single row by a qry, using a Bson. - */ - def find(qry: Bson): Box[BaseRecord] = { - useCollection { coll => - coll.find(qry).limit(1).first match { - case null => Empty - case doc => Full(doc) - } - } - } - - /** - * Find a single row by an ObjectId - */ - def find(oid: ObjectId): Box[BaseRecord] = find(eqs("_id", oid)) - - /** - * Find a single row by a UUID - */ - def find(uid: UUID): Box[BaseRecord] = find(eqs("_id", uid)) - - /** - * Find a single row by Any - * This doesn't work as find because we need JObject's to be implicitly converted. - */ - def findAny(a: Any): Box[BaseRecord] = find(eqs("_id", a)) - - /** - * Find a single row by a String id - */ - def find(s: String): Box[BaseRecord] = - if (ObjectId.isValid(s)) - find(eqs("_id", new ObjectId(s))) - else - find(eqs("_id", s)) - - /** - * Find a single row by an Int id - */ - def find(id: Int): Box[BaseRecord] = find(eqs("_id", id)) - - /** - * Find a single row by a Long id - */ - def find(id: Long): Box[BaseRecord] = find(eqs("_id", id)) - - /** - * Find a single document by a qry using a json value - */ - def find(json: JObject): Box[BaseRecord] = find(BsonParser.parse(json)) - - /** - * Find a single row by a qry using String key and Any value - */ - def find(k: String, o: Any): Box[BaseRecord] = find(eqs(k, o)) - - /** - * Find all rows in this collection. - * Retrieves all documents and puts them in memory. - */ - def findAll: List[BaseRecord] = useCollection { coll => - /** Mongo Cursors are both Iterable and Iterator, - * so we need to reduce ambiguity for implicits - */ - coll.find.iterator.asScala.toList - } - - /** - * Find all rows using a Bson query. - */ - def findAll(qry: Bson, sort: Option[Bson], opts: FindOption*): List[BaseRecord] = { - findAll(sort, opts:_*) { coll => coll.find(qry) } - } - - /** - * Find all rows and retrieve only keys fields. - */ - def findAll(qry: Bson, keys: Bson, sort: Option[Bson], opts: FindOption*): List[BaseRecord] = { - findAll(sort, opts:_*) { coll => coll.find(qry).projection(keys) } - } - - protected def findAll(sort: Option[Bson], opts: FindOption*)(f: (MongoCollection[BaseRecord]) => FindIterable[BaseRecord]): List[BaseRecord] = { - val findOpts = opts.toList - - useCollection { coll => - val cur = f(coll).limit( - findOpts.find(_.isInstanceOf[Limit]).map(_.value).getOrElse(0) - ).skip( - findOpts.find(_.isInstanceOf[Skip]).map(_.value).getOrElse(0) - ) - sort.foreach(s => cur.sort(s)) - // This retrieves all documents and puts them in memory. - cur.iterator.asScala.toList - } - } - - /** - * Find all rows and retrieve only keys fields. - */ - def findAll(qry: JObject, keys: JObject, sort: Option[JObject], opts: FindOption*): List[BaseRecord] = { - val s = sort.map(BsonParser.parse(_)) - findAll(BsonParser.parse(qry), BsonParser.parse(keys), s, opts :_*) - } - - /** - * Find all documents using a Bson query. These are for passing in regex queries. - */ - def findAll(qry: Bson, opts: FindOption*): List[BaseRecord] = - findAll(qry, None, opts :_*) - - /** - * Find all documents using a Bson query with Bson sort - */ - def findAll(qry: Bson, sort: Bson, opts: FindOption*): List[BaseRecord] = - findAll(qry, Some(sort), opts :_*) - - /** - * Find all documents using a JObject query - */ - def findAll(qry: JObject, opts: FindOption*): List[BaseRecord] = { - findAll(BsonParser.parse(qry), None, opts :_*) - } - - /** - * Find all documents using a JObject query with sort - */ - def findAll(qry: JObject, sort: JObject, opts: FindOption*): List[BaseRecord] = - findAll(BsonParser.parse(qry), Some(BsonParser.parse(sort)), opts :_*) - - /** - * Find all documents using a k, v query - */ - def findAll(k: String, o: Any, opts: FindOption*): List[BaseRecord] = - findAll(eqs(k, o), None, opts :_*) - - /** - * Find all documents using a k, v query with JOBject sort - */ - def findAll(k: String, o: Any, sort: JObject, opts: FindOption*): List[BaseRecord] = - findAll(eqs(k, o), Some(BsonParser.parse(sort)), opts :_*) - - /** - * Find all documents with the given ids - */ - def findAllByList[T](ids: List[T]): List[BaseRecord] = if (ids.isEmpty) Nil else { - val list = new java.util.ArrayList[T]() - for (id <- ids.distinct) list.add(id) - findAll(in("_id", list)) - } - - def findAll(ids: List[ObjectId]): List[BaseRecord] = findAllByList[ObjectId](ids) - - @deprecated("Use saveOperation instead", "3.4.3") - protected def saveOp[T](inst: BaseRecord)(f: => T): Boolean = { - foreachCallback(inst, _.beforeSave) - f - foreachCallback(inst, _.afterSave) - inst.allFields.foreach { _.resetDirty } - true - } - - protected def saveOperation[T](inst: BaseRecord)(f: => T): T = { - foreachCallback(inst, _.beforeSave) - val result = f - foreachCallback(inst, _.afterSave) - inst.allFields.foreach { _.resetDirty } - result - } - - protected def updateOp[T](inst: BaseRecord)(f: => T): T = { - foreachCallback(inst, _.beforeUpdate) - val res = f - foreachCallback(inst, _.afterUpdate) - inst.allFields.foreach { _.resetDirty } - res - } - - /** - * Save the instance in the appropriate backing store. - */ - def save(inst: BaseRecord): UpdateResult = saveOperation(inst) { - val opts = new ReplaceOptions().upsert(true) - useCollection { coll => - val id = idValue(inst) - coll.replaceOne(eqs("_id", id), inst, opts) - } - } - - /** - * Insert multiple records - */ - def insertAll(insts: List[BaseRecord]): Unit = { - val list = new java.util.ArrayList[BaseRecord]() - insts.foreach { rec => list.add(rec) } - - insts.foreach(inst => foreachCallback(inst, _.beforeSave)) - useCollection(_.insertMany(list)) - insts.foreach(inst => foreachCallback(inst, _.afterSave)) - } - - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def insertAsync(inst: BaseRecord): Future[Boolean] = { - useCollAsync { coll => - val cb = new SingleBooleanVoidCallback( () => { - foreachCallback(inst, _.afterSave) - inst.allFields.foreach { _.resetDirty } - }) - foreachCallback(inst, _.beforeSave) - coll.insertOne(inst.asDocument, cb) - cb.future - } - } - - def insertOne(inst: BaseRecord, opts: InsertOneOptions = new InsertOneOptions): Box[BaseRecord] = tryo { - useCollection { coll => - coll.insertOne(inst, opts) - inst - } - } - - def replaceOne(inst: BaseRecord, opts: ReplaceOptions = new ReplaceOptions): Box[UpdateResult] = tryo { - useCollection { coll => - coll.replaceOne(eqs("_id", idValue(inst)), inst, opts) - } - } - - /** - * Replaces document with new one with given id. if `upsert` is set to true inserts eqs - * in similar way as save() from sync api. - * - */ - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def replaceOneAsync(inst: BaseRecord, upsert: Boolean = true, concern: WriteConcern = MongoRules.defaultWriteConcern.vend): Future[BaseRecord] = { - useCollAsync { coll => - val p = Promise[BaseRecord] - val doc: Document = inst.asDocument - val options = new UpdateOptions().upsert(upsert) - foreachCallback(inst, _.beforeSave) - val filter = new Document("_id", doc.get("_id")) - coll.withWriteConcern(concern).replaceOne(filter, doc, options, new SingleResultCallback[UpdateResult] { - override def onResult(result: UpdateResult, t: Throwable): Unit = { - if (Option(t).isEmpty) { - Option(result.getUpsertedId).filter(_.isObjectId).foreach { upsertedId => - inst.fieldByName("_id").foreach(fld => fld.setFromAny(upsertedId.asObjectId().getValue)) - } - foreachCallback(inst, _.afterSave) - inst.allFields.foreach { _.resetDirty } - p.success(inst) - } else { - p.failure(t) - } - } - }) - p.future - } - } - - - /** - * Save the instance in the appropriate backing store - */ - @deprecated("Set WriteConcern in MongoClientOptions or on this MongoMetaRecord", "3.4.3") - def save(inst: BaseRecord, concern: WriteConcern): Boolean = saveOp(inst) { - useColl { coll => - coll.save(inst.asDBObject, concern) - } - } - - /** - * Save a document to the db using the given Mongo instance - */ - @deprecated("Set WriteConcern in MongoClientOptions or on this MongoMetaRecord", "3.4.3") - def save(inst: BaseRecord, db: DB, concern: WriteConcern): Boolean = saveOp(inst) { - db.getCollection(collectionName).save(inst.asDBObject, concern) - } - - /** - * Update records with a JObject query using the given Mongo instance - */ - @deprecated("Use updateOne, updateMany, or replaceOne instead", "3.4.3") - def update(qry: JObject, newbr: BaseRecord, db: DB, opts: UpdateOption*): Unit = { - update(JObjectParser.parse(qry), newbr.asDBObject, db, opts :_*) - } - - /** - * Update records with a JObject query - */ - @deprecated("Use updateOne, updateMany, or replaceOne instead", "3.4.3") - def update(qry: JObject, newbr: BaseRecord, opts: UpdateOption*): Unit = { - useDb ( db => - update(qry, newbr, db, opts :_*) - ) - } - - /** - * Upsert records with a DBObject query - */ - @deprecated("Use updateOne, updateMany, or replaceOne instead", "3.4.3") - def upsert(query: DBObject, update: DBObject): Unit = { - useColl( coll => - coll.update(query, update, true, false) - ) - } - - /** - * Update one record with a DBObject query - */ - @deprecated("Use updateOne, updateMany, or replaceOne instead", "3.4.3") - def update(query: DBObject, update: DBObject): Unit = { - useColl( coll => - coll.update(query, update) - ) - } - - /** - * Update multiple records with a DBObject query - */ - @deprecated("Use updateMany instead", "3.4.3") - def updateMulti(query: DBObject, update: DBObject): Unit = { - useColl( coll => - coll.updateMulti(query, update) - ) - } - - /** - * Update a record with a DBObject query - */ - @deprecated("Use updateOne, or replaceOne instead", "3.4.3") - def update(obj: BaseRecord, update: DBObject): Unit = { - val query = (BasicDBObjectBuilder.start - .add("_id", idValue(obj)) - .get) - this.update(query, update) - } - - def updateOne(inst: BaseRecord, update: Bson, opts: UpdateOptions = new UpdateOptions): Box[UpdateResult] = tryo { - updateOp(inst) { - val id = idValue(inst) - useCollection(_.updateOne(eqs("_id", id), update, opts)) - } - } - - /** - * Update only the dirty fields. - * - * Note: PatternField will always set the dirty flag when set. - */ - @deprecated("Use updateOne, or replaceOne instead", "3.4.3") - def update(inst: BaseRecord): Unit = updateOp(inst) { - val dirtyFields = fields(inst).filter(_.dirty_?) - if (dirtyFields.length > 0) { - val (fullFields, otherFields) = dirtyFields - .map(field => (field.name, fieldDbValue(field))) - .partition(pair => pair._2.isDefined) - - val fieldsToSet = fullFields.map(pair => (pair._1, pair._2.openOrThrowException("these are all Full"))) - - val fieldsToUnset: List[String] = otherFields.filter( - pair => pair._2 match { - case Empty => true - case _ => false - } - ).map(_._1) - - if (fieldsToSet.length > 0 || fieldsToUnset.length > 0) { - val dbo = BasicDBObjectBuilder.start - - if (fieldsToSet.length > 0) { - dbo.add( - "$set", - fieldsToSet.foldLeft(BasicDBObjectBuilder.start) { - (builder, pair) => builder.add(pair._1, pair._2) - }.get - ) - } - - if (fieldsToUnset.length > 0) { - dbo.add( - "$unset", - fieldsToUnset.foldLeft(BasicDBObjectBuilder.start) { - (builder, fieldName) => builder.add(fieldName, 1) - }.get - ) - } - - update(inst, dbo.get) - } - } - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoRecord.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoRecord.scala deleted file mode 100644 index 59cc72840c..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoRecord.scala +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import net.liftweb.record.{MetaRecord, Record} -import net.liftweb.util.Helpers.tryo - -import com.mongodb.{BasicDBObject, DBObject, DBRef, WriteConcern} - -import org.bson.types.ObjectId -import common.{Full, Box} -import scala.concurrent.Future - -trait MongoRecord[MyType <: MongoRecord[MyType]] extends BsonRecord[MyType] { - self: MyType => - - /** - * Every MongoRecord must have an _id field. Use a MongoPkField to - * satisfy this. - * - * This may change to type MandatoryTypedField in the - * future (once MongoId is removed.) - */ - def id: Any - - /** - * The meta record (the object that contains the meta result for this type) - */ - def meta: MongoMetaRecord[MyType] - - /** - * Save the instance and return the instance - */ - @deprecated("Set WriteConcern in MongoClientOptions or on the MongoMetaRecord", "3.4.3") - def save(concern: WriteConcern): MyType = { - runSafe { - meta.save(this, concern) - } - this - } - - /** - * Inserts record and returns Future that completes when mongo driver finishes operation - */ - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def insertAsync():Future[Boolean] = { - runSafe { - meta.insertAsync(this) - } - } - - /** - * Save the instance and return the instance - */ - override def saveTheRecord(): Box[MyType] = saveBox() - - /** - * Save the instance and return the instance - * @param safe - if true will use WriteConcern ACKNOWLEDGED else UNACKNOWLEDGED - */ - @deprecated("Set WriteConcern in MongoClientOptions or on the MongoMetaRecord", "3.4.3") - def save(safe: Boolean = true): MyType = { - save(if (safe) WriteConcern.ACKNOWLEDGED else WriteConcern.UNACKNOWLEDGED) - } - - def save(): MyType = { - runSafe { - meta.save(this) - } - this - } - - /** - * Try to save the instance and return the instance in a Box. - */ - def saveBox(): Box[MyType] = tryo { - runSafe { - meta.save(this) - } - this - } - - /** - * Update only the dirty fields - */ - @deprecated("Use updateOne, or replaceOne instead", "3.4.3") - def update: MyType = { - runSafe { - meta.update(this) - } - this - } - - /** - * Try to update only the dirty fields - */ - @deprecated("Use updateOne, or replaceOne instead", "3.4.3") - def updateBox: Box[MyType] = tryo { - runSafe { - meta.update(this) - } - this - } - - /** - * Delete the instance from backing store - */ - def delete_! : Boolean = { - runSafe { - meta.delete_!(this) - } - } - - /** - * Try to delete the instance from backing store - */ - def deleteBox_! : Box[Boolean] = tryo { - runSafe { - meta.delete_!(this) - } - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoRecordRules.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoRecordRules.scala deleted file mode 100644 index 90c8e88499..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/MongoRecordRules.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import net.liftweb.mongodb.record.codecs.RecordCodec -import net.liftweb.util.SimpleInjector - -import org.bson.Transformer -import org.bson.codecs.BsonTypeClassMap -import org.bson.codecs.configuration.CodecRegistry - -/** - * MongoRecordRules holds Lift Mongo Record's configuration. - */ -object MongoRecordRules extends SimpleInjector { - /** - * The default CodecRegistry used. - */ - val defaultCodecRegistry = new Inject[CodecRegistry](RecordCodec.defaultLegacyRegistry) {} - - /** - * The default BsonTypeClassMap used. - */ - val defaultBsonTypeClassMap = new Inject[BsonTypeClassMap](RecordCodec.defaultLegacyBsonTypeClassMap) {} - - /** - * The default transformer used - */ - val defaultTransformer = new Inject[Transformer](RecordCodec.defaultTransformer) {} -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/codecs/CollectibleRecordCodec.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/codecs/CollectibleRecordCodec.scala deleted file mode 100644 index 6631d6f106..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/codecs/CollectibleRecordCodec.scala +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb -package record -package codecs - -import scala.collection.mutable -import java.util.{Calendar, ArrayList, List => JavaList, Map => JavaMap, UUID} -import java.util.regex.Pattern -import java.util.Arrays.asList - -import net.liftweb.common._ -import net.liftweb.mongodb.codecs._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.{Field, MandatoryTypedField, MetaRecord, Record} -import net.liftweb.util.Helpers.tryo - -import org.bson._ -import org.bson.codecs._ -import org.bson.codecs.configuration.{CodecRegistry, CodecRegistries} -import com.mongodb._ -import com.mongodb.client.gridfs.codecs.GridFSFileCodecProvider - -/** - * A Collectible (requires an _id field) codec for Record instances. - */ -object CollectibleRecordCodec { - private val idFieldName: String = "_id" -} - -case class CollectibleRecordCodec[T <: Record[T]]( - metaRecord: MetaRecord[T], - codecRegistry: CodecRegistry = RecordCodec.defaultLegacyRegistry, - bsonTypeClassMap: BsonTypeClassMap = RecordCodec.defaultLegacyBsonTypeClassMap, - valueTransformer: Transformer = RecordCodec.defaultTransformer -) - extends RecordTypedCodec[T] - with CollectibleCodec[T] -{ - override def getEncoderClass(): Class[T] = metaRecord.createRecord.getClass.asInstanceOf[Class[T]] - - /** - * Fields must be predefined on Records so there's no way to add one if missing. - */ - def generateIdIfAbsentFromDocument(rec: T): T = { - rec - } - - private def findIdField(rec: T): Option[Field[_, T]] = { - rec.fieldByName(CollectibleRecordCodec.idFieldName).toOption - } - - def documentHasId(rec: T): Boolean = { - findIdField(rec).nonEmpty - } - - def getDocumentId(rec: T): BsonValue = { - if (!documentHasId(rec)) { - throw new IllegalStateException("The rec does not contain an _id") - } - - findIdField(rec) match { - case Some(field: Field[_, T] with MandatoryTypedField[_]) => - val idHoldingDocument = new BsonDocument() - val writer = new BsonDocumentWriter(idHoldingDocument) - writer.writeStartDocument() - writer.writeName(CollectibleRecordCodec.idFieldName) - - writeValue(writer, EncoderContext.builder().build(), field.value.asInstanceOf[Object]) - - writer.writeEndDocument() - idHoldingDocument.get(CollectibleRecordCodec.idFieldName) - case _ => - throw new IllegalStateException("The _id field could not be found") - } - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/codecs/RecordCodec.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/codecs/RecordCodec.scala deleted file mode 100644 index d9c0ec465d..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/codecs/RecordCodec.scala +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb -package record -package codecs - -import scala.collection.mutable - -import java.util.{Calendar, GregorianCalendar, ArrayList, List => JavaList, Map => JavaMap, UUID} -import java.util.regex.Pattern -import java.util.Arrays.asList - -import net.liftweb.common._ -import net.liftweb.mongodb.codecs._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.{Field, MandatoryTypedField, MetaRecord, Record} -import net.liftweb.record.field._ -import net.liftweb.record.field.joda.JodaTimeTypedField -import net.liftweb.util.Helpers.tryo - -import org.bson._ -import org.bson.codecs.{BsonTypeCodecMap, Codec, Decoder, DecoderContext, Encoder, EncoderContext} -import org.bson.codecs.configuration.{CodecRegistry, CodecRegistries} -import com.mongodb._ - -import org.joda.time.DateTime - -/** - * A codec for Record instances. - */ -object RecordCodec { - - val defaultLegacyBsonTypeClassMap: BsonTypeClassMap = - BsonTypeClassMap( - (BsonType.REGULAR_EXPRESSION -> classOf[Pattern]), - (BsonType.BINARY -> classOf[Array[Byte]]), - (BsonType.DOCUMENT, classOf[BsonDocument]) - ) - - val defaultLegacyRegistry: CodecRegistry = CodecRegistries.fromRegistries( - MongoClientSettings.getDefaultCodecRegistry(), - CodecRegistries.fromCodecs(BigDecimalStringCodec(), CalendarCodec(), JodaDateTimeCodec()) - ) - - val defaultBsonTypeClassMap: BsonTypeClassMap = - BsonTypeClassMap( - (BsonType.BINARY -> classOf[Array[Byte]]), - (BsonType.DECIMAL128 -> classOf[BigDecimal]), - (BsonType.DOCUMENT, classOf[BsonDocument]) - ) - - val defaultRegistry: CodecRegistry = CodecRegistries.fromRegistries( - MongoClientSettings.getDefaultCodecRegistry(), - CodecRegistries.fromCodecs(BigDecimalCodec()) - ) - - val defaultTransformer: Transformer = new Transformer() { - override def transform(value: Object): Object = value - } -} - -/** - * A Codec trait for Record typed instances - */ -trait RecordTypedCodec[T <: Record[T]] extends Codec[T] with Loggable { - def metaRecord: MetaRecord[T] - def codecRegistry: CodecRegistry - def bsonTypeClassMap: BsonTypeClassMap - def valueTransformer: Transformer - - lazy val bsonTypeCodecMap = new BsonTypeCodecMap(bsonTypeClassMap, codecRegistry) - - private val uuidClass = classOf[UUID] - - def decode(reader: BsonReader, decoderContext: DecoderContext): T = { - val rec: T = metaRecord.createRecord - - reader.readStartDocument() - - while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { - val fieldName: String = reader.readName() - - // find the field and set it - rec.fieldByName(fieldName) match { - case Full(field) => - readAndSetFieldValue(reader, decoderContext, field) match { - case f: Failure => - logger.error(f) - case _ => - } - case _ => - // this field is not in the Record, skip it - reader.skipValue() - } - } - - reader.readEndDocument() - - rec.runSafe { - rec.fields.foreach(_.resetDirty) - } - - rec - } - - /** - * Read the Bson data and set the Field. - */ - private def readAndSetFieldValue(reader: BsonReader, decoderContext: DecoderContext, field: Field[_, T]): Box[Any] = { - val currentBsonType = reader.getCurrentBsonType - - def readNext(): Any = { - bsonTypeCodecMap.get(currentBsonType).decode(reader, decoderContext) - } - - field match { - case f: BsonableField[_] => - f.setFromBsonReader(reader, decoderContext, codecRegistry, bsonTypeCodecMap) - case f: DateTimeTypedField if currentBsonType == BsonType.DATE_TIME => // Calendar - f.setBox(tryo(CalendarCodec().decode(reader, decoderContext).asInstanceOf[Calendar])) - case f: JodaTimeTypedField if currentBsonType == BsonType.DATE_TIME => // joda.DateTime - f.setBox(tryo(JodaDateTimeCodec().decode(reader, decoderContext).asInstanceOf[DateTime])) - case f: DecimalTypedField if currentBsonType == BsonType.STRING => - f.setFromString(readNext().asInstanceOf[String]) - case f: EnumTypedField[_] if currentBsonType == BsonType.INT32 => - f.setFromInt(readNext().asInstanceOf[Int]) - case f: EnumNameTypedField[_] if currentBsonType == BsonType.STRING => - f.setFromString(readNext().asInstanceOf[String]) - case _ => - (currentBsonType match { - case BsonType.NULL => - reader.readNull() - Empty - case BsonType.BINARY if BsonBinarySubType.isUuid(reader.peekBinarySubType) && reader.peekBinarySize == 16 => - tryo(codecRegistry.get(uuidClass).decode(reader, decoderContext)) - case bsonType: BsonType => - tryo(valueTransformer.transform(readNext)) - }) match { - case Full(v: field.MyType) => - field.setBox(Full(v)) - case Empty => - field.setBox(Empty) - case f: Failure => - field.setBox(Failure(s"Error reading Bson value: ${reader.getCurrentBsonType}", Empty, f)) - } - } - } - - private def readValue(reader: BsonReader, decoderContext: DecoderContext): Any = { - reader.getCurrentBsonType match { - case BsonType.NULL => - reader.readNull() - null - case BsonType.ARRAY => - readList(reader, decoderContext) - case BsonType.DOCUMENT => - readMap(reader, decoderContext) - case BsonType.BINARY if BsonBinarySubType.isUuid(reader.peekBinarySubType) && reader.peekBinarySize == 16 => - codecRegistry.get(uuidClass).decode(reader, decoderContext) - case bsonType: BsonType => - valueTransformer.transform(bsonTypeCodecMap.get(bsonType).decode(reader, decoderContext)) - } - } - - private def readMap(reader: BsonReader, decoderContext: DecoderContext): Map[String, _] = { - val map = mutable.Map[String, Any]() - reader.readStartDocument() - while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { - map += (reader.readName -> readValue(reader, decoderContext)) - } - reader.readEndDocument() - map.toMap - } - - private def readList(reader: BsonReader, decoderContext: DecoderContext): List[_] = { - reader.readStartArray() - val list = mutable.ListBuffer[Any]() - while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { - list.append(readValue(reader, decoderContext)) - } - reader.readEndArray() - list.toList - } - - private def readListToDBObject(reader: BsonReader, decoderContext: DecoderContext): DBObject = { - reader.readStartArray() - val list = new BasicDBList - while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { - list.add(readValue(reader, decoderContext).asInstanceOf[Object]) - } - reader.readEndArray() - list - } - - def encode(writer: BsonWriter, record: T, encoderContext: EncoderContext): Unit = { - - writer.writeStartDocument() - - record.fields().foreach { _ match { - case field if (field.optional_? && field.valueBox.isEmpty) => // don't write anything - case field: BsonableField[_] => - Option(field.asInstanceOf[BsonableField[Any]]).foreach { bf => - bf.writeToBsonWriter(writer, encoderContext, codecRegistry, bsonTypeCodecMap) - } - case field: EnumTypedField[_] => - field.asInstanceOf[EnumTypedField[Enumeration]].valueBox - .map(_.id) - .foreach { id => - writer.writeName(field.name) - writer.writeInt32(id) - } - case field: EnumNameTypedField[_] => - field.asInstanceOf[EnumNameTypedField[Enumeration]].valueBox - .map(_.toString) - .foreach { s => - writer.writeName(field.name) - writer.writeString(s) - } - case field => field.valueBox match { - case Empty if field.optional_? => // don't write anything - case Empty => - sys.error(s"Field value is Empty. Field name: ${field.name}.") - case Failure(msg, _, _) => - sys.error(s"Error reading value. Field name: ${field.name}. Error message: ${msg}") - case Full(v: Array[Byte]) => - writer.writeName(field.name) - writer.writeBinaryData(new BsonBinary(v)) - case Full(v) => - writer.writeName(field.name) - writeValue(writer, encoderContext, v) - } - }} - - writer.writeEndDocument() - } - - protected def writeValue[T](writer: BsonWriter, encoderContext: EncoderContext, value: T): Unit = { - value match { - case isNull if value == null => - writer.writeNull() - case map: Map[_, _] => - writeMap(writer, map.asInstanceOf[Map[String, Any]], encoderContext.getChildContext) - case list: Iterable[_] => - writeIterable(writer, list, encoderContext.getChildContext) - case _ => - val codec = codecRegistry.get(value.getClass).asInstanceOf[Encoder[T]] - encoderContext.encodeWithChildContext(codec, writer, value) - } - } - - protected def writeMap(writer: BsonWriter, map: Map[String, Any], encoderContext: EncoderContext): Unit = { - writer.writeStartDocument() - map.foreach(kv => { - writer.writeName(kv._1) - writeValue(writer, encoderContext, kv._2) - }) - writer.writeEndDocument() - } - - protected def writeIterable(writer: BsonWriter, list: Iterable[_], encoderContext: EncoderContext): Unit = { - writer.writeStartArray() - list.foreach(value => writeValue(writer, encoderContext, value)) - writer.writeEndArray() - } -} - -/** - * A Codec for Record instances - */ -case class RecordCodec[T <: Record[T]]( - metaRecord: MetaRecord[T], - codecRegistry: CodecRegistry = RecordCodec.defaultLegacyRegistry, - bsonTypeClassMap: BsonTypeClassMap = RecordCodec.defaultLegacyBsonTypeClassMap, - valueTransformer: Transformer = RecordCodec.defaultTransformer -) extends RecordTypedCodec[T] { - def getEncoderClass(): Class[T] = metaRecord.createRecord.getClass.asInstanceOf[Class[T]] -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/BsonRecordField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/BsonRecordField.scala deleted file mode 100644 index da396059c8..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/BsonRecordField.scala +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Copyright 2011-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import net.liftweb.common._ -import net.liftweb.http.js.JsExp -import net.liftweb.http.js.JE.JsNull -import net.liftweb.json._ -import net.liftweb.record._ -import net.liftweb.util.Helpers.tryo - -import com.mongodb._ - -import org.bson._ -import org.bson.codecs.{BsonDocumentCodec, BsonTypeCodecMap, Codec, DecoderContext, Encoder, EncoderContext, StringCodec} -import org.bson.codecs.configuration.CodecRegistry - -import scala.collection.mutable -import scala.reflect.Manifest -import scala.xml.NodeSeq - -/** Field that contains an entire record represented as an inline object value. Inspired by JSONSubRecordField */ -abstract class BsonRecordTypedField[OwnerType <: BsonRecord[OwnerType], SubRecordType <: BsonRecord[SubRecordType]] -(val owner: OwnerType, val valueMeta: BsonMetaRecord[SubRecordType])(implicit subRecordType: Manifest[SubRecordType]) - extends Field[SubRecordType, OwnerType] - with BsonableField[SubRecordType] -{ - - def this(owner: OwnerType, valueMeta: BsonMetaRecord[SubRecordType], value: Box[SubRecordType]) - (implicit subRecordType: Manifest[SubRecordType]) = { - this(owner, valueMeta) - setBox(value) - } - - def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[MyType] = { - reader.getCurrentBsonType match { - case BsonType.DOCUMENT => - setBox(tryo(valueMeta.codec.decode(reader, context))) - case BsonType.NULL => - reader.readNull() - Empty - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - valueBox match { - case Empty if optional_? => - case Empty => - writer.writeName(name) - val codec = new StringCodec() - context.encodeWithChildContext(codec, writer, "Empty") - case Full(v) => - writer.writeName(name) - val codec = registry.get(v.getClass).asInstanceOf[Encoder[SubRecordType]] - context.encodeWithChildContext(codec, writer, v) - case Failure(msg, _, _) => - writer.writeName(name) - val codec = new StringCodec() - context.encodeWithChildContext(codec, writer, s"Failure: ${msg}") - } - } - - def asJs = asJValue match { - case JNothing => JsNull - case jv => new JsExp { - lazy val toJsCmd = compactRender(jv) - } - } - - def toForm: Box[NodeSeq] = Empty - - def setFromString(s: String): Box[SubRecordType] = valueMeta.fromJsonString(s) - - def setFromAny(in: Any): Box[SubRecordType] = in match { - case dbo: DBObject => setBox(Full(valueMeta.fromDBObject(dbo))) - case dbo: Document => setBox(Full(valueMeta.fromDocument(dbo))) - case _ => genericSetFromAny(in) - } - - def asJValue: JValue = valueBox.map(_.asJValue) openOr (JNothing: JValue) - - def setFromJValue(jvalue: JValue): Box[SubRecordType] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case _ => setBox(valueMeta.fromJValue(jvalue)) - } -} - -class BsonRecordField[OwnerType <: BsonRecord[OwnerType], SubRecordType <: BsonRecord[SubRecordType]] -(@deprecatedName('rec) owner: OwnerType, valueMeta: BsonMetaRecord[SubRecordType])(implicit subRecordType: Manifest[SubRecordType]) - extends BsonRecordTypedField(owner, valueMeta) with MandatoryTypedField[SubRecordType] { - - def this(@deprecatedName('rec) owner: OwnerType, valueMeta: BsonMetaRecord[SubRecordType], value: SubRecordType)(implicit subRecordType: Manifest[SubRecordType]) = { - this(owner, value.meta) - set(value) - } - - def defaultValue = valueMeta.createRecord -} - -class OptionalBsonRecordField[OwnerType <: BsonRecord[OwnerType], SubRecordType <: BsonRecord[SubRecordType]] -(owner: OwnerType, valueMeta: BsonMetaRecord[SubRecordType])(implicit subRecordType: Manifest[SubRecordType]) - extends BsonRecordTypedField(owner, valueMeta) with OptionalTypedField[SubRecordType] - - -/** - * List of BsonRecords - */ -class BsonRecordListField[OwnerType <: BsonRecord[OwnerType], SubRecordType <: BsonRecord[SubRecordType]] - (@deprecatedName('rec) owner: OwnerType, val valueMeta: BsonMetaRecord[SubRecordType])(implicit mf: Manifest[SubRecordType]) - extends MongoListField[OwnerType, SubRecordType](owner: OwnerType) { - - import scala.collection.JavaConverters._ - - override def validations = ((elems: ValueType) => elems.flatMap(_.validate)) :: super.validations - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - override def asDBObject: DBObject = { - val dbl = new BasicDBList - value.foreach { v => dbl.add(v.asDBObject) } - dbl - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - override def setFromDBObject(dbo: DBObject): Box[List[SubRecordType]] = { - setBox(Full(dbo.keySet.asScala.toList.map { k => - valueMeta.fromDBObject(dbo.get(k).asInstanceOf[DBObject]) - })) - } - - override def asJValue: JValue = JArray(value.map(_.asJValue)) - - override def setFromJValue(jvalue: JValue) = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JArray(arr) => setBox(Full(arr.map { jv => - valueMeta.fromJValue(jv) openOr valueMeta.createRecord - })) - case other => setBox(FieldHelpers.expectedA("JArray", other)) - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - override def setFromDocumentList(list: java.util.List[Document]): Box[List[SubRecordType]] = { - setBox(Full( - list.asScala.toList.map { valueMeta.fromDocument } - )) - } - - override def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[List[SubRecordType]] = { - reader.getCurrentBsonType match { - case BsonType.ARRAY => - setBox(tryo { - reader.readStartArray() - val list = mutable.ListBuffer[SubRecordType]() - while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { - list.append(valueMeta.codec.decode(reader, context)) - } - reader.readEndArray() - list.toList - }) - case BsonType.NULL => - reader.readNull() - Empty - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - override def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - writer.writeName(name) - - writer.writeStartArray() - value.foreach { v => - val codec = registry.get(v.getClass).asInstanceOf[Encoder[SubRecordType]] - context.encodeWithChildContext(codec, writer, v) - } - writer.writeEndArray() - } -} - -/** - * Map of BsonRecords - */ -class BsonRecordMapField[OwnerType <: BsonRecord[OwnerType], SubRecordType <: BsonRecord[SubRecordType]] - (owner: OwnerType, val valueMeta: BsonMetaRecord[SubRecordType])(implicit mf: Manifest[SubRecordType]) - extends MongoMapField[OwnerType, SubRecordType](owner: OwnerType) -{ - override def validations = ((elems: ValueType) => elems.values.toList.flatMap(_.validate)) :: super.validations - - override def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[Map[String, SubRecordType]] = { - reader.getCurrentBsonType match { - case BsonType.NULL => - reader.readNull() - Empty - case BsonType.DOCUMENT => - setBox(tryo { - val map = mutable.Map[String, SubRecordType]() - reader.readStartDocument() - while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { - map += (reader.readName -> valueMeta.codec.decode(reader, context)) - } - reader.readEndDocument() - map.toMap - }) - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - override def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - writer.writeName(name) - writer.writeStartDocument() - value.foreach(kv => { - writer.writeName(kv._1) - val codec = registry.get(kv._2.getClass).asInstanceOf[Encoder[SubRecordType]] - context.encodeWithChildContext(codec, writer, kv._2) - }) - writer.writeEndDocument() - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/BsonableField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/BsonableField.scala deleted file mode 100644 index db1ac64fe6..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/BsonableField.scala +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import scala.collection.mutable - -import java.util.UUID - -import net.liftweb.common._ -import net.liftweb.http.js.JsExp -import net.liftweb.http.js.JE.{JsNull, JsRaw} -import net.liftweb.json._ -import net.liftweb.record.Field - -import org.bson._ -import org.bson.codecs._ -import org.bson.codecs.configuration.CodecRegistry - -/** - * A trait for creating custom Fields. Allows writing your own - * functions for encoding to and decoding from Bson. - */ -trait BsonableField[T] { - this: Field[T, _] => - - private val uuidClass = classOf[UUID] - - /** - * Set this field's value from a BsonReader. - */ - def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[T] - - /** - * Write this field's value to a BsonWriter. - */ - def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit - - /** - * Helper function to read a value to a BsonDocument. - */ - protected def readValueToBsonDocument(reader: BsonReader, context: DecoderContext, registry: CodecRegistry): BsonDocument = { - val codec = new BsonDocumentCodec(registry) - val result = codec.decode(reader, context).asInstanceOf[BsonDocument] - result - } - - /** - * Helper function to read an array to a List of BsonDocuments. - */ - protected def readArrayToBsonDocument(reader: BsonReader, context: DecoderContext, registry: CodecRegistry): List[BsonDocument] = { - reader.readStartArray() - val list = mutable.ListBuffer[BsonDocument]() - while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { - val codec = new BsonDocumentCodec(registry) - list.append(codec.decode(reader, context).asInstanceOf[BsonDocument]) - } - reader.readEndArray() - list.toList - } - - /** - * Helper function to write a value to a BsonWriter. - */ - protected def writeValue[T](writer: BsonWriter, encoderContext: EncoderContext, value: T, codecRegistry: CodecRegistry): Unit = { - value match { - case isNull if value == null => - writer.writeNull() - case map: Map[_, _] => - writeMap(writer, map.asInstanceOf[Map[String, Any]], encoderContext.getChildContext, codecRegistry) - case list: Iterable[_] => - writeIterable(writer, list, encoderContext.getChildContext, codecRegistry) - case _ => - val codec = codecRegistry.get(value.getClass).asInstanceOf[Encoder[T]] - encoderContext.encodeWithChildContext(codec, writer, value) - } - } - - /** - * Helper function to write a Map to a BsonWriter. - */ - protected def writeMap(writer: BsonWriter, map: Map[String, Any], encoderContext: EncoderContext, codecRegistry: CodecRegistry): Unit = { - writer.writeStartDocument() - map.foreach(kv => { - writer.writeName(kv._1) - writeValue(writer, encoderContext, kv._2, codecRegistry) - }) - writer.writeEndDocument() - } - - /** - * Helper function to write an Iterable to a BsonWriter. - */ - protected def writeIterable(writer: BsonWriter, list: Iterable[_], encoderContext: EncoderContext, codecRegistry: CodecRegistry): Unit = { - writer.writeStartArray() - list.foreach(value => writeValue(writer, encoderContext, value, codecRegistry)) - writer.writeEndArray() - } - - /** - * Helper function to read a value from a BsonReader. - */ - protected def readValue(reader: BsonReader, decoderContext: DecoderContext, codecRegistry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Any = { - reader.getCurrentBsonType match { - case BsonType.NULL => - reader.readNull() - null - case BsonType.ARRAY => - readList(reader, decoderContext, codecRegistry, bsonTypeCodecMap) - case BsonType.DOCUMENT => - readMap(reader, decoderContext, codecRegistry, bsonTypeCodecMap) - case BsonType.BINARY if BsonBinarySubType.isUuid(reader.peekBinarySubType) && reader.peekBinarySize == 16 => - codecRegistry.get(uuidClass).decode(reader, decoderContext) - case bsonType: BsonType => - bsonTypeCodecMap.get(bsonType).decode(reader, decoderContext) - } - } - - /** - * Helper function to read a Map from a BsonReader. - */ - protected def readMap(reader: BsonReader, decoderContext: DecoderContext, codecRegistry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Map[String, _] = { - val map = mutable.Map[String, Any]() - reader.readStartDocument() - while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { - map += (reader.readName -> readValue(reader, decoderContext, codecRegistry, bsonTypeCodecMap)) - } - reader.readEndDocument() - map.toMap - } - - /** - * Helper function to read a List from a BsonReader. - */ - protected def readList(reader: BsonReader, decoderContext: DecoderContext, codecRegistry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): List[_] = { - reader.readStartArray() - val list = mutable.ListBuffer[Any]() - while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) { - list.append(readValue(reader, decoderContext, codecRegistry, bsonTypeCodecMap)) - } - reader.readEndArray() - list.toList - } -} - -/** - * A trait for creating custom Fields that are based on JObject and are saved as a document (BsonType.DOCUMENT). - */ -trait BsonDocumentJObjectField[T] extends BsonableField[T] { - this: Field[T, _] => - - implicit def formats: Formats = DefaultFormats - - def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[MyType] = { - reader.getCurrentBsonType match { - case BsonType.DOCUMENT => - val doc = readValueToBsonDocument(reader, context, registry) - setFromJValue(BsonParser.serialize(doc)(formats)) - case BsonType.NULL => - reader.readNull() - Empty - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - asJValue match { - case jo: JObject => - writer.writeName(name) - val codec = (new BsonDocumentCodec(registry)).asInstanceOf[Codec[Any]] - context.encodeWithChildContext(codec, writer, BsonParser.parse(jo)(formats)) - case JNothing | JNull if optional_? => - case _ => - writer.writeName(name) - writer.writeNull() - } - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/CaseClassField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/CaseClassField.scala deleted file mode 100644 index a7da757197..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/CaseClassField.scala +++ /dev/null @@ -1,259 +0,0 @@ -/* -* Copyright 2010-2020 WorldWide Conferencing, LLC -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package net.liftweb -package mongodb -package record -package field - -import net.liftweb.common.{Failure, Empty, Full, Box} -import net.liftweb.http.js.JsExp -import net.liftweb.http.js.JE.{JsObj, JsRaw, Num, Str, JsNull} -import net.liftweb.json._ -import net.liftweb.record._ -import net.liftweb.record.RecordHelpers.jvalueToJsExp -import net.liftweb.record.field._ -import net.liftweb.util.Helpers - -import scala.collection.JavaConverters._ -import scala.reflect.Manifest -import scala.xml.{Text, NodeSeq} - -import org.bson._ -import org.bson.codecs.{BsonDocumentCodec, BsonTypeCodecMap, Codec, DecoderContext, EncoderContext} -import org.bson.codecs.configuration.CodecRegistry -import com.mongodb.{BasicDBList, DBObject} - -abstract class CaseClassTypedField[OwnerType <: Record[OwnerType], CaseType](val owner: OwnerType)(implicit mf: Manifest[CaseType]) - extends Field[CaseType, OwnerType] with MongoFieldFlavor[CaseType] with BsonableField[CaseType] { - - // override this for custom formats - def formats: Formats = DefaultFormats - - implicit lazy val _formats = formats - - override type MyType = CaseType - - def toForm: Box[NodeSeq] = Empty - - def asJValue: JValue = valueBox.map(Extraction.decompose) openOr (JNothing: JValue) - - def setFromJValue(jvalue: JValue): Box[CaseType] = jvalue match { - case JNothing | JNull => setBox(Empty) - case s => setBox(Helpers.tryo[CaseType] { s.extract[CaseType] }) - } - - def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[MyType] = { - reader.getCurrentBsonType match { - case BsonType.DOCUMENT => - val doc = readValueToBsonDocument(reader, context, registry) - setFromJValue(BsonParser.serialize(doc)) - case BsonType.NULL => - reader.readNull() - Empty - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - asJValue match { - case jo: JObject => - writer.writeName(name) - val codec = (new BsonDocumentCodec(registry)).asInstanceOf[Codec[Any]] - context.encodeWithChildContext(codec, writer, BsonParser.parse(jo)) - case JNull if optional_? => - case JNull => - writer.writeName(name) - writer.writeNull() - case _ => - } - } - - /** - * Returns the field's value as a valid JavaScript expression - */ - override def asJs = asJValue match { - case JNothing => JsNull - case jv => JsRaw(compactRender(jv)) - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.2") - def asDBObject: DBObject = asJValue match { - case JNothing | JNull => null - case other => JObjectParser.parse(other.asInstanceOf[JObject]) - } - - def setFromDocument(doc: Document): Box[CaseType] = { - val jv = JObjectParser.serialize(doc) - setFromJValue(jv) - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.2") - def setFromDBObject(dbo: DBObject): Box[CaseType] = { - val jvalue = JObjectParser.serialize(dbo) - setFromJValue(jvalue) - } - - def setFromString(in: String): Box[CaseType] = Helpers.tryo { - JsonParser.parse(in).extract[CaseType] - } - - def setFromAny(in: Any): Box[CaseType] = in match { - case dbo: DBObject => setFromDBObject(dbo) - case doc: org.bson.Document => setFromDocument(doc) - case c if mf.runtimeClass.isInstance(c) => setBox(Full(c.asInstanceOf[CaseType])) - case Full(c) if mf.runtimeClass.isInstance(c) => setBox(Full(c.asInstanceOf[CaseType])) - case null|None|Empty => setBox(defaultValueBox) - case (failure: Failure) => setBox(failure) - case _ => setBox(defaultValueBox) - } -} - -class CaseClassField[OwnerType <: Record[OwnerType], CaseType](owner: OwnerType)(implicit mf: Manifest[CaseType]) - extends CaseClassTypedField[OwnerType, CaseType](owner) with MandatoryTypedField[CaseType] { - - - def this(owner: OwnerType, value: CaseType)(implicit mf: Manifest[CaseType]) = { - this(owner) - setBox(Full(value)) - } - - def defaultValue = null.asInstanceOf[MyType] -} - -@deprecated("Use the more consistently named 'CaseClassField' instead. This class will be removed in Lift 4.", "3.2") -class MongoCaseClassField[OwnerType <: Record[OwnerType], CaseType](@deprecatedName('rec) owner: OwnerType)(implicit mf: Manifest[CaseType]) - extends CaseClassField[OwnerType, CaseType](owner) - -class OptionalCaseClassField[OwnerType <: Record[OwnerType], CaseType](owner: OwnerType)(implicit mf: Manifest[CaseType]) - extends CaseClassTypedField[OwnerType, CaseType](owner) with OptionalTypedField[CaseType] { - - def this(owner: OwnerType, value: Box[CaseType])(implicit mf: Manifest[CaseType]) = { - this(owner) - setBox(value) - } -} - -class CaseClassListField[OwnerType <: Record[OwnerType], CaseType](val owner: OwnerType)(implicit mf: Manifest[CaseType]) - extends Field[List[CaseType], OwnerType] - with MandatoryTypedField[List[CaseType]] - with MongoFieldFlavor[List[CaseType]] - with BsonableField[List[CaseType]] -{ - // override this for custom formats - def formats: Formats = DefaultFormats - implicit lazy val _formats = formats - - override type MyType = List[CaseType] - - def asXHtml = Text(value.toString) - - def toForm: Box[NodeSeq] = Empty - - def defaultValue: MyType = Nil - - def asJValue: JValue = JArray(value.map(v => Extraction.decompose(v))) - - /** - * Returns the field's value as a valid JavaScript expression - */ - override def asJs = asJValue match { - case JNothing => JsNull - case jv => JsRaw(compactRender(jv)) - } - - def setFromJValue(jvalue: JValue): Box[MyType] = jvalue match { - case JArray(contents) => setBox(Full(contents.flatMap(s => Helpers.tryo[CaseType]{ s.extract[CaseType] }))) - case _ => setBox(Empty) - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.2") - def setFromDocumentList(list: java.util.List[Document]): Box[MyType] = { - val objs = list.asScala.map { JObjectParser.serialize } - setFromJValue(JArray(objs.toList)) - } - - def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[MyType] = { - reader.getCurrentBsonType match { - case BsonType.ARRAY => - setFromJValue(JArray(readArrayToBsonDocument(reader, context, registry).map { BsonParser.serialize _ })) - case BsonType.NULL => - reader.readNull() - Empty - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - writer.writeName(name) - writer.writeStartArray() - - asJValue match { - case JArray(list) => - list.foreach { v => - val codec = (new BsonDocumentCodec(registry)).asInstanceOf[Codec[Any]] - context.encodeWithChildContext(codec, writer, BsonParser.parse(v.asInstanceOf[JObject])) - } - case _ => - } - - writer.writeEndArray() - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.2") - def asDBObject: DBObject = { - val dbl = new BasicDBList - - asJValue match { - case JArray(list) => - list.foreach(v => dbl.add(JObjectParser.parse(v.asInstanceOf[JObject]))) - case _ => - } - - dbl - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.2") - def setFromDBObject(dbo: DBObject): Box[MyType] = { - val jvalue = JObjectParser.serialize(dbo) - setFromJValue(jvalue) - } - - def setFromAny(in: Any): Box[MyType] = in match { - case dbo: DBObject => setFromDBObject(dbo) - case list@c::xs if mf.runtimeClass.isInstance(c) => setBox(Full(list.asInstanceOf[MyType])) - case jlist: java.util.List[_] => { - if (!jlist.isEmpty) { - val elem = jlist.get(0) - if (elem.isInstanceOf[org.bson.Document]) { - setFromDocumentList(jlist.asInstanceOf[java.util.List[org.bson.Document]]) - } else { - setBox(Full(jlist.asScala.toList.asInstanceOf[MyType])) - } - } else { - setBox(Full(Nil)) - } - } - case _ => setBox(Empty) - } - - def setFromString(in: String): Box[MyType] = { - setFromJValue(JsonParser.parse(in)) - } -} - -@deprecated("Please use the more consistently named 'CaseClassListField' instead. This class will be removed in Lift 4.", "3.2") -class MongoCaseClassListField[OwnerType <: Record[OwnerType], CaseType](@deprecatedName('rec) owner: OwnerType)(implicit mf: Manifest[CaseType]) - extends CaseClassListField[OwnerType, CaseType](owner) diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/DBRefField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/DBRefField.scala deleted file mode 100644 index 6476a22339..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/DBRefField.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.js.JE.Str -import net.liftweb.json.JsonAST.{JNothing, JObject, JValue} -import net.liftweb.record.{Field, MandatoryTypedField, Record} -import scala.xml.NodeSeq - -import com.mongodb.{BasicDBObject, BasicDBObjectBuilder, DBObject, DBRef} -import com.mongodb.util.JSON -import org.bson.types.ObjectId - -/* -* Field for storing a DBRef -*/ -@deprecated("DBref is being removed. See 'MongoRef' for an alternative", "3.4.3") -class DBRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]](rec: OwnerType, ref: RefType) - extends Field[DBRef, OwnerType] with MandatoryTypedField[DBRef] { - - /* - * get the referenced object - */ - def obj = synchronized { - if (!_calcedObj) { - _calcedObj = true - this._obj = ref.meta.findAny(value.getId) - } - _obj - } - - def cached_? : Boolean = synchronized { _calcedObj } - - def primeObj(obj: Box[RefType]) = synchronized { - _obj = obj - _calcedObj = true - } - - private var _obj: Box[RefType] = Empty - private var _calcedObj = false - - def asJs = Str(toString) - - def asJValue: JValue = (JNothing: JValue) // not implemented - - def setFromJValue(jvalue: JValue) = Empty // not implemented - - def asXHtml =
- - def defaultValue = new DBRef("", null) - - def setFromAny(in: Any): Box[DBRef] = in match { - case ref: DBRef => Full(set(ref)) - case Some(ref: DBRef) => Full(set(ref)) - case Full(ref: DBRef) => Full(set(ref)) - case seq: Seq[_] if !seq.isEmpty => seq.map(setFromAny).apply(0) - case (s: String) :: _ => setFromString(s) - case null => Full(set(null)) - case s: String => setFromString(s) - case None | Empty | Failure(_, _, _) => Full(set(null)) - case o => setFromString(o.toString) - } - - // assume string is json - def setFromString(in: String): Box[DBRef] = { - val dbo = JSON.parse(in).asInstanceOf[BasicDBObject] - val id = dbo.get("$id").toString - ObjectId.isValid(id) match { - case true => Full(set(new DBRef(dbo.get("$ref").toString, new ObjectId(id)))) - case false => Full(set(new DBRef(dbo.get("$ref").toString, id))) - } - } - - def toForm: Box[NodeSeq] = Empty - - def owner = rec -} - diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/DateField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/DateField.scala deleted file mode 100644 index d58a599991..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/DateField.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* -* Copyright 2010-2014 WorldWide Conferencing, LLC -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package net.liftweb -package mongodb -package record -package field - -import java.util.Date - -import scala.xml.NodeSeq - -import common._ -import http.js.JE.{JsNull, JsRaw} -import http.S -import json._ -import mongodb.record._ -import net.liftweb.record._ -import net.liftweb.record.field._ -import util.Helpers._ - -trait DateTypedField extends TypedField[Date] { - - def formats: Formats - - def setFromAny(in: Any): Box[Date] = in match { - case d: Date => setBox(Full(d)) - case Some(d: Date) => setBox(Full(d)) - case Full(d: Date) => setBox(Full(d)) - case (d: Date) :: _ => setBox(Full(d)) - case s: String => setFromString(s) - case Some(s: String) => setFromString(s) - case Full(s: String) => setFromString(s) - case null|None|Empty => setBox(defaultValueBox) - case f: Failure => setBox(f) - case o => setFromString(o.toString) - } - - def setFromString(in: String): Box[Date] = formats.dateFormat.parse(in) match { - case Some(d: Date) => setBox(Full(d)) - case other => setBox(Failure("Invalid Date string: "+in)) - } - - def setFromJValue(jvalue: JValue): Box[Date] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JObject(JField("$dt", JString(s)) :: Nil) => setFromString(s) - case other => setBox(FieldHelpers.expectedA("JObject", other)) - } - - private def elem = - S.fmapFunc(S.SFuncHolder(this.setFromAny(_))){funcName => - formats.dateFormat.format(v)) openOr ""} - tabindex={tabIndex.toString}/> - } - - def toForm = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - def asJs = asJValue match { - case JNothing => JsNull - case jv => JsRaw(compactRender(jv)) - } - - def asJValue: JValue = valueBox.map(v => JsonDate(v)(formats)) openOr (JNothing: JValue) -} - -class DateField[OwnerType <: BsonRecord[OwnerType]](rec: OwnerType) - extends Field[Date, OwnerType] with MandatoryTypedField[Date] with DateTypedField { - - def owner = rec - - def formats = owner.meta.formats - - def this(rec: OwnerType, value: Date) = { - this(rec) - setBox(Full(value)) - } - - def defaultValue = new Date - - override def toString = value match { - case null => "null" - case d => valueBox.map { - v => formats.dateFormat.format(v) - } openOr "" - } -} - -class OptionalDateField[OwnerType <: BsonRecord[OwnerType]](rec: OwnerType) - extends Field[Date, OwnerType] with OptionalTypedField[Date] with DateTypedField { - - def owner = rec - - def formats = owner.meta.formats - - def this(rec: OwnerType, value: Box[Date]) = { - this(rec) - setBox(value) - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/JObjectField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/JObjectField.scala deleted file mode 100644 index 9d4b3491ca..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/JObjectField.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import com.mongodb._ -import net.liftweb.common._ -import net.liftweb.json._ -import net.liftweb.record._ -import net.liftweb.util.Helpers.tryo - -import org.bson._ -import org.bson.codecs.{BsonDocumentCodec, BsonTypeCodecMap, Codec, DecoderContext, EncoderContext} -import org.bson.codecs.configuration.CodecRegistry - -import scala.xml.NodeSeq - -trait JObjectTypedField[OwnerType <: BsonRecord[OwnerType]] extends TypedField[JObject] - with Field[JObject, OwnerType] - with MongoFieldFlavor[JObject] - with BsonDocumentJObjectField[JObject] -{ - - override implicit val formats = owner.meta.formats - - def setFromJValue(jvalue: JValue): Box[JObject] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case jo: JObject => setBox(Full(jo)) - case other => setBox(FieldHelpers.expectedA("JObject", other)) - } - - def setFromAny(in: Any): Box[JObject] = in match { - case dbo: DBObject => setBox(setFromDBObject(dbo)) - case doc: Document => setBox(setFromDocument(doc)) - case jv: JObject => setBox(Full(jv)) - case Some(jv: JObject) => setBox(Full(jv)) - case Full(jv: JObject) => setBox(Full(jv)) - case seq: Seq[_] if seq.nonEmpty => seq.map(setFromAny).head - case (s: String) :: _ => setFromString(s) - case null => setBox(Full(null)) - case s: String => setFromString(s) - case None | Empty | Failure(_, _, _) => setBox(Full(null)) - case o => setFromString(o.toString) - } - - // assume string is json - def setFromString(in: String): Box[JObject] = { - // use lift-json to parse string into a JObject - setBox(tryo(JsonParser.parse(in).asInstanceOf[JObject])) - } - - def toForm: Box[NodeSeq] = Empty - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def asDBObject: DBObject = valueBox - .map { v => JObjectParser.parse(v)(owner.meta.formats) } - .openOr(new BasicDBObject) - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def setFromDBObject(obj: DBObject): Box[JObject] = - Full(JObjectParser.serialize(obj)(owner.meta.formats).asInstanceOf[JObject]) - - def setFromDocument(obj: Document): Box[JObject] = - Full(JObjectParser.serialize(obj)(owner.meta.formats).asInstanceOf[JObject]) - - def asJValue: JValue = valueBox openOr (JNothing: JValue) -} - -class JObjectField[OwnerType <: BsonRecord[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends JObjectTypedField[OwnerType] with MandatoryTypedField[JObject] { - - def this(owner: OwnerType, value: JObject) = { - this(owner) - setBox(Full(value)) - } - - def defaultValue = JObject(List()) - -} - -class OptionalJObjectField[OwnerType <: BsonRecord[OwnerType]](val owner: OwnerType) - extends JObjectTypedField[OwnerType] with OptionalTypedField[JObject] { - - def this(owner: OwnerType, value: Box[JObject]) = { - this(owner) - setBox(value) - } - -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/JsonObjectField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/JsonObjectField.scala deleted file mode 100644 index f7aa030ebd..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/JsonObjectField.scala +++ /dev/null @@ -1,199 +0,0 @@ -/* -* Copyright 2010-2020 WorldWide Conferencing, LLC -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package net.liftweb -package mongodb -package record -package field - -import scala.xml.NodeSeq -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.js.JE.{JsNull, JsRaw} -import net.liftweb.json._ -import net.liftweb.record.{Field, FieldHelpers, MandatoryTypedField, OptionalTypedField} -import net.liftweb.util.Helpers.tryo - -import org.bson._ -import org.bson.codecs.{BsonDocumentCodec, BsonTypeCodecMap, Codec, DecoderContext, EncoderContext} -import org.bson.codecs.configuration.CodecRegistry - -import com.mongodb.{BasicDBList, DBObject} - -import scala.collection.JavaConverters._ - -abstract class JsonObjectTypedField[OwnerType <: BsonRecord[OwnerType], JObjectType <: JsonObject[JObjectType]] -(val owner: OwnerType, valueMeta: JsonObjectMeta[JObjectType]) - extends Field[JObjectType, OwnerType] - with MongoFieldFlavor[JObjectType] - with BsonDocumentJObjectField[JObjectType] -{ - - override implicit val formats = owner.meta.formats - - /** - * Convert the field value to an XHTML representation - */ - def toForm: Box[NodeSeq] = Empty // FIXME - - /** Encode the field value into a JValue */ - def asJValue: JValue = valueBox.map(_.asJObject) openOr (JNothing: JValue) - - /** - * Returns the field's value as a valid JavaScript expression - */ - override def asJs = asJValue match { - case JNothing => JsNull - case jv => JsRaw(compactRender(jv)) - } - - /* - * Decode the JValue and set the field to the decoded value. - * Returns Empty or Failure if the value could not be set - */ - def setFromJValue(jvalue: JValue): Box[JObjectType] = jvalue match { - case JNothing | JNull if optional_? => setBox(Empty) - case o: JObject => setBox(tryo(valueMeta.create(o))) - case other => setBox(FieldHelpers.expectedA("JObject", other)) - } - - def setFromAny(in: Any): Box[JObjectType] = in match { - case dbo: DBObject => setFromDBObject(dbo) - case value: JsonObject[_] => setBox(Full(value.asInstanceOf[JObjectType])) - case Some(value: JsonObject[_]) => setBox(Full(value.asInstanceOf[JObjectType])) - case Full(value: JsonObject[_]) => setBox(Full(value.asInstanceOf[JObjectType])) - case (value: JsonObject[_]) :: _ => setBox(Full(value.asInstanceOf[JObjectType])) - case s: String => setFromString(s) - case Some(s: String) => setFromString(s) - case Full(s: String) => setFromString(s) - case null|None|Empty => setBox(defaultValueBox) - case f: Failure => setBox(f) - case o => setFromString(o.toString) - } - - // parse String into a JObject - def setFromString(in: String): Box[JObjectType] = tryo(JsonParser.parse(in)) match { - case Full(jv: JValue) => setFromJValue(jv) - case f: Failure => setBox(f) - case _ => setBox(Failure(s"Error parsing String into a JValue: $in")) - } - - /* - * Convert this field's value into a DBObject so it can be stored in Mongo. - */ - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def asDBObject: DBObject = JObjectParser.parse(asJValue.asInstanceOf[JObject]) - - // set this field's value using a DBObject returned from Mongo. - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def setFromDBObject(dbo: DBObject): Box[JObjectType] = - setFromJValue(JObjectParser.serialize(dbo).asInstanceOf[JObject]) -} - -abstract class JsonObjectField[OwnerType <: BsonRecord[OwnerType], JObjectType <: JsonObject[JObjectType]] -(@deprecatedName('rec) owner: OwnerType, valueMeta: JsonObjectMeta[JObjectType]) - extends JsonObjectTypedField(owner, valueMeta) with MandatoryTypedField[JObjectType] { - - def this(owner: OwnerType, valueMeta: JsonObjectMeta[JObjectType], value: JObjectType) = { - this(owner, valueMeta) - setBox(Full(value)) - } -} - -class OptionalJsonObjectField[OwnerType <: BsonRecord[OwnerType], JObjectType <: JsonObject[JObjectType]] -(owner: OwnerType, valueMeta: JsonObjectMeta[JObjectType]) - extends JsonObjectTypedField(owner, valueMeta) with OptionalTypedField[JObjectType] { - - def this(owner: OwnerType, valueMeta: JsonObjectMeta[JObjectType], valueBox: Box[JObjectType]) = { - this(owner, valueMeta) - setBox(valueBox) - } -} - -/* -* List of JsonObject case classes -*/ -class JsonObjectListField[OwnerType <: BsonRecord[OwnerType], JObjectType <: JsonObject[JObjectType]] -(owner: OwnerType, valueMeta: JsonObjectMeta[JObjectType])(implicit mf: Manifest[JObjectType]) - extends MongoListField[OwnerType, JObjectType](owner: OwnerType) -{ - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - override def asDBObject: DBObject = { - val dbl = new BasicDBList - value.foreach { v => dbl.add(JObjectParser.parse(v.asJObject()(owner.meta.formats))(owner.meta.formats)) } - dbl - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - override def setFromDBObject(dbo: DBObject): Box[List[JObjectType]] = - setBox(Full(dbo.keySet.asScala.toList.map { k => - val v = dbo.get(k) - valueMeta.create(JObjectParser.serialize(v)(owner.meta.formats).asInstanceOf[JObject])(owner.meta.formats) - })) - - override def asJValue: JValue = JArray(value.map(_.asJObject()(owner.meta.formats))) - - /** - * Returns the field's value as a valid JavaScript expression - */ - override def asJs = asJValue match { - case JNothing => JsNull - case jv => JsRaw(compactRender(jv)) - } - - override def setFromJValue(jvalue: JValue) = jvalue match { - case JNothing | JNull if optional_? => setBox(Empty) - case JArray(arr) => setBox(Full(arr.map { jv => - valueMeta.create(jv.asInstanceOf[JObject])(owner.meta.formats) - })) - case other => setBox(FieldHelpers.expectedA("JArray", other)) - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - override def setFromDocumentList(list: java.util.List[Document]): Box[MyType] = { - val objs = list.asScala.map { JObjectParser.serialize } - setFromJValue(JArray(objs.toList)) - } - - override def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[List[JObjectType]] = { - reader.getCurrentBsonType match { - case BsonType.ARRAY => - setFromJValue(JArray(readArrayToBsonDocument(reader, context, registry).map { BsonParser.serialize _ })) - case BsonType.NULL => - reader.readNull() - Empty - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - override def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - writer.writeName(name) - writer.writeStartArray() - - asJValue match { - case JArray(list) => - list.foreach { v => - val codec = (new BsonDocumentCodec(registry)).asInstanceOf[Codec[Any]] - context.encodeWithChildContext(codec, writer, BsonParser.parse(v.asInstanceOf[JObject])) - } - case _ => - } - - writer.writeEndArray() - } -} - -@deprecated("Use the more consistently named 'JsonObjectListField' instead. This class will be removed in Lift 4.", "3.2") -class MongoJsonObjectListField[OwnerType <: BsonRecord[OwnerType], JObjectType <: JsonObject[JObjectType]] -(@deprecatedName('rec) owner: OwnerType, valueMeta: JsonObjectMeta[JObjectType])(implicit mf: Manifest[JObjectType]) extends JsonObjectListField(owner, valueMeta) diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoFieldFlavor.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoFieldFlavor.scala deleted file mode 100644 index cf23eb89ef..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoFieldFlavor.scala +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.js.JE.{JsNull, JsRaw} -import net.liftweb.json._ -import com.mongodb.DBObject - -/** -* Describes common aspects related to Mongo fields -*/ -@deprecated("Please use 'BsonableField' instead.", "3.4.3") -trait MongoFieldFlavor[MyType] { - - /* - * convert this field's value into a DBObject so it can be stored in Mongo. - */ - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def asDBObject: DBObject - - // set this field's value using a DBObject returned from Mongo. - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def setFromDBObject(obj: DBObject): Box[MyType] - - /** - * Returns the field's value as a valid JavaScript expression - */ - def asJs = asJValue match { - case JNothing => JsNull - case jv => JsRaw(compactRender(jv)) - } - - /** Encode the field value into a JValue */ - def asJValue: JValue - -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoListField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoListField.scala deleted file mode 100644 index ac5a81ad7f..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoListField.scala +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import java.util.{List => JavaList, UUID} -import java.util.regex.Pattern - -import com.mongodb._ -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.SHtml -import net.liftweb.json._ -import net.liftweb.record.{Field, FieldHelpers, MandatoryTypedField} -import net.liftweb.util.Helpers._ - -import org.bson._ -import org.bson.codecs.{BsonDocumentCodec, BsonTypeCodecMap, Codec, DecoderContext, Encoder, EncoderContext} -import org.bson.codecs.configuration.CodecRegistry - -import scala.collection.mutable -import scala.collection.JavaConverters._ -import scala.xml.NodeSeq - -object BsonBinary { - def apply(subtype: BsonBinarySubType, data: Array[Byte]): BsonBinary = new BsonBinary(subtype, data) -} - -/** - * List field. - * - * Supported types: - * primitives - String, Int, Long, Double, BigDecimal, Byte, BigInt, - * Boolean (and their Java equivalents) - * date types - java.util.Date, org.joda.time.DateTime - * mongo types - ObjectId, Pattern, UUID - * - * If you need to support other types, you will need to override the - * `readValue` function and either override the `writeValue` function - * or create a custom codec for it and add it to your registry. You'll - * also need to override the `asJValue` and `setFromJValue` functions if you - * will be using them. - * - * Note: setting optional_? = false will result in incorrect equals behavior when using setFromJValue - */ -class MongoListField[OwnerType <: BsonRecord[OwnerType], ListType: Manifest](rec: OwnerType) - extends Field[List[ListType], OwnerType] - with MandatoryTypedField[List[ListType]] - with MongoFieldFlavor[List[ListType]] - with BsonableField[List[ListType]] -{ - import mongodb.Meta.Reflection._ - - lazy val mf = manifest[ListType] - - override type MyType = List[ListType] - - def owner = rec - - def defaultValue = List.empty[ListType] - - implicit def formats = owner.meta.formats - - def setFromAny(in: Any): Box[MyType] = { - in match { - case dbo: DBObject => setFromDBObject(dbo) - case list@c::xs if mf.runtimeClass.isInstance(c) => setBox(Full(list.asInstanceOf[MyType])) - case Some(list@c::xs) if mf.runtimeClass.isInstance(c) => setBox(Full(list.asInstanceOf[MyType])) - case Full(list@c::xs) if mf.runtimeClass.isInstance(c) => setBox(Full(list.asInstanceOf[MyType])) - case jlist: JavaList[_] => { - if(!jlist.isEmpty) { - val elem = jlist.get(0) - if(elem.isInstanceOf[Document]) { - setFromDocumentList(jlist.asInstanceOf[JavaList[Document]]) - } else { - setBox(Full(jlist.asScala.toList.asInstanceOf[MyType])) - } - } else { - setBox(Full(Nil)) - } - } - case s: String => setFromString(s) - case Some(s: String) => setFromString(s) - case Full(s: String) => setFromString(s) - case null|None|Empty => setBox(defaultValueBox) - case f: Failure => setBox(f) - case o => setFromString(o.toString) - } - } - - def setFromJValue(jvalue: JValue): Box[MyType] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JArray(array) => setBox(Full((array.map { - case JsonObjectId(objectId) => objectId - case JsonRegex(regex) => regex - case JsonUUID(uuid) => uuid - case JsonDateTime(dt) if (mf.toString == "org.joda.time.DateTime") => dt - case JsonDate(date) => date - case other => other.values - }).asInstanceOf[MyType])) - case other => setBox(FieldHelpers.expectedA("JArray", other)) - } - - // parse String into a JObject - def setFromString(in: String): Box[List[ListType]] = tryo(JsonParser.parse(in)) match { - case Full(jv: JValue) => setFromJValue(jv) - case f: Failure => setBox(f) - case other => setBox(Failure("Error parsing String into a JValue: "+in)) - } - - /** Options for select list **/ - def options: List[(ListType, String)] = Nil - - private def elem = { - def elem0 = SHtml.multiSelectObj[ListType]( - options, - value, - set(_) - ) % ("tabindex" -> tabIndex.toString) - - SHtml.hidden(() => set(Nil)) ++ (uniqueFieldId match { - case Full(id) => (elem0 % ("id" -> id)) - case _ => elem0 - }) - } - - def toForm: Box[NodeSeq] = - if (options.length > 0) Full(elem) - else Empty - - def asJValue: JValue = JArray(value.map(li => li.asInstanceOf[AnyRef] match { - case x if primitive_?(x.getClass) => primitive2jvalue(x) - case x if mongotype_?(x.getClass) => mongotype2jvalue(x)(owner.meta.formats) - case x if datetype_?(x.getClass) => datetype2jvalue(x)(owner.meta.formats) - case _ => JNothing - })) - - /* - * Convert this field's value into a DBObject so it can be stored in Mongo. - */ - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def asDBObject: DBObject = { - val dbl = new BasicDBList - - value.foreach { - case f => f.asInstanceOf[AnyRef] match { - case x if primitive_?(x.getClass) => dbl.add(x) - case x if mongotype_?(x.getClass) => dbl.add(x) - case x if datetype_?(x.getClass) => dbl.add(datetype2dbovalue(x)) - case o => dbl.add(o.toString) - } - } - dbl - } - - // set this field's value using a DBObject returned from Mongo. - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def setFromDBObject(dbo: DBObject): Box[MyType] = - setBox(Full(dbo.asInstanceOf[BasicDBList].asScala.toList.asInstanceOf[MyType])) - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def setFromDocumentList(list: JavaList[Document]): Box[MyType] = { - throw new RuntimeException("Warning, setting Document as field with no conversion, probably not something you want to do") - } - - def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[List[ListType]] = { - reader.getCurrentBsonType match { - case BsonType.NULL => - reader.readNull() - Empty - case BsonType.ARRAY => - setBox(tryo(readList(reader, context, registry, bsonTypeCodecMap).asInstanceOf[List[ListType]])) - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - writer.writeName(name) - writeIterable(writer, value, context.getChildContext, registry) - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoMapField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoMapField.scala deleted file mode 100644 index 8ef962df68..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoMapField.scala +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import scala.collection.mutable -import scala.collection.JavaConverters._ -import scala.xml.NodeSeq - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.js.JsExp -import net.liftweb.http.js.JE.{JsNull, JsRaw} -import net.liftweb.json._ -import net.liftweb.record._ -import net.liftweb.util.Helpers.tryo - -import com.mongodb._ -import org.bson._ -import org.bson.codecs.{BsonDocumentCodec, BsonTypeCodecMap, Codec, DecoderContext, Encoder, EncoderContext} -import org.bson.codecs.configuration.CodecRegistry - -/** - * Note: setting optional_? = false will result in incorrect equals behavior when using setFromJValue - */ -class MongoMapField[OwnerType <: BsonRecord[OwnerType], MapValueType](rec: OwnerType) - extends Field[Map[String, MapValueType], OwnerType] with MandatoryTypedField[Map[String, MapValueType]] - with MongoFieldFlavor[Map[String, MapValueType]] - with BsonableField[Map[String, MapValueType]] -{ - - import mongodb.Meta.Reflection._ - - def owner = rec - - def defaultValue = Map[String, MapValueType]() - - def setFromAny(in: Any): Box[Map[String, MapValueType]] = { - in match { - case dbo: DBObject => setFromDBObject(dbo) - case doc: Document => setFromDocument(doc) - case map: Map[_, _] => setBox(Full(map.asInstanceOf[Map[String, MapValueType]])) - case Some(map: Map[_, _]) => setBox(Full(map.asInstanceOf[Map[String, MapValueType]])) - case Full(map: Map[_, _]) => setBox(Full(map.asInstanceOf[Map[String, MapValueType]])) - case (map: Map[_, _]) :: _ => setBox(Full(map.asInstanceOf[Map[String, MapValueType]])) - case s: String => setFromString(s) - case Some(s: String) => setFromString(s) - case Full(s: String) => setFromString(s) - case null|None|Empty => setBox(defaultValueBox) - case f: Failure => setBox(f) - case o => setFromString(o.toString) - } - } - - def setFromJValue(jvalue: JValue) = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JObject(obj) => setBox(Full( - Map() ++ obj.map(jf => (jf.name, jf.value.values.asInstanceOf[MapValueType])) - )) - case other => setBox(FieldHelpers.expectedA("JObject", other)) - } - - def setFromString(in: String): Box[Map[String, MapValueType]] = tryo(JsonParser.parse(in)) match { - case Full(jv: JValue) => setFromJValue(jv) - case f: Failure => setBox(f) - case other => setBox(Failure("Error parsing String into a JValue: "+in)) - } - - def toForm: Box[NodeSeq] = Empty - - def asJValue: JValue = JObject(value.keys.map { - k => - JField(k, value(k).asInstanceOf[AnyRef] match { - case x if primitive_?(x.getClass) => primitive2jvalue(x) - case x if mongotype_?(x.getClass) => mongotype2jvalue(x)(owner.meta.formats) - case x if datetype_?(x.getClass) => datetype2jvalue(x)(owner.meta.formats) - case _ => JNothing - }) - }.toList) - - /* - * Convert this field's value into a DBObject so it can be stored in Mongo. - */ - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def asDBObject: DBObject = { - val dbo = new BasicDBObject - value.keys.foreach { key => - value.get(key).foreach { innerValue => - dbo.put(key.toString, innerValue.asInstanceOf[Object]) - } - } - dbo - } - - // set this field's value using a DBObject returned from Mongo. - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def setFromDBObject(dbo: DBObject): Box[Map[String, MapValueType]] = { - setBox(Full( - Map() ++ dbo.keySet.asScala.map { - k => (k.toString, dbo.get(k).asInstanceOf[MapValueType]) - } - )) - } - - // set this field's value using a bson.Document returned from Mongo. - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def setFromDocument(doc: Document): Box[Map[String, MapValueType]] = { - val map = scala.collection.mutable.Map[String, MapValueType]() - - doc.keySet.asScala.foreach { k => - map += k -> doc.get(k).asInstanceOf[MapValueType] - } - - setBox { - Full(map.toMap) - } - } - - @deprecated("This was replaced with the functions from 'BsonableField'.", "3.4.3") - def asDocument: Document = { - val doc = new Document() - - value.keys.view.foreach { k => - doc.append(k, value.getOrElse(k, "").asInstanceOf[AnyRef]) - } - - doc - } - - def setFromBsonReader(reader: BsonReader, context: DecoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Box[Map[String, MapValueType]] = { - reader.getCurrentBsonType match { - case BsonType.NULL => - reader.readNull() - Empty - case BsonType.DOCUMENT => - setBox(tryo(readMap(reader, context, registry, bsonTypeCodecMap).asInstanceOf[Map[String, MapValueType]])) - case bsonType => - Failure(s"Invalid BsonType for field ${name}: ${bsonType}") - } - } - - def writeToBsonWriter(writer: BsonWriter, context: EncoderContext, registry: CodecRegistry, bsonTypeCodecMap: BsonTypeCodecMap): Unit = { - writer.writeName(name) - writeMap(writer, value.asInstanceOf[Map[String, Any]], context.getChildContext, registry) - } -} - diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoPasswordField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoPasswordField.scala deleted file mode 100644 index 3bf015e88e..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoPasswordField.scala +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import scala.xml.{Node, NodeSeq, Text} - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.S -import net.liftweb.http.js.JE._ -import net.liftweb.util.{FatLazy, FieldError, Helpers, Safe} - -import Helpers._ - -case class Password(pwd: String, salt: String) extends JsonObject[Password] { - def meta = Password -} - -object Password extends JsonObjectMeta[Password] { - def apply(in: String): Password = Password(in, "") -} - -object MongoPasswordField { - val blankPw = "*******" - - def encrypt(s: String, salt: String) = hash("{"+s+"} salt={" + salt + "}") -} - -class MongoPasswordField[OwnerType <: BsonRecord[OwnerType]](rec: OwnerType, minLen: Int) extends JsonObjectField[OwnerType, Password](rec, Password) { - - def this(rec: OwnerType) = { - this(rec, 3) - } - - def setPassword(in: String) = set(Password(in)) - - private val salt_i = FatLazy(Safe.randomString(16)) - - var validatorValue: Box[Password] = valueBox - - override def set_!(in: Box[Password]): Box[Password] = { - validatorValue = in - in.map(p => - if (p.salt.length == 0) // only encrypt the password if it hasn't already been encrypted - Password(MongoPasswordField.encrypt(p.pwd, salt_i.get), salt_i.get) - else - p - ) - } - - override def validate: List[FieldError] = runValidation(validatorValue) - - private def elem = S.fmapFunc(S.SFuncHolder(this.setPassword(_))) { - funcName => } - - override def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - private def validatePassword(pwd: Password): List[FieldError] = pwd match { - case null | Password("", _) | Password("*", _) | Password(MongoPasswordField.blankPw, _) => - Text(S.?("password.must.be.set")) - case Password(pwd, _) if pwd.length < minLen => - Text(S.?("password.too.short")) - case _ => Nil - } - - override def validations = validatePassword _ :: Nil - - override def defaultValue = Password("") - - override def asJs = valueBox.map(vb => Str(vb.pwd)) openOr Str(defaultValue.pwd) - - def isMatch(toMatch: String): Boolean = - MongoPasswordField.encrypt(toMatch, value.salt) == value.pwd -} - diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoPk.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoPk.scala deleted file mode 100644 index 03a20d7845..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoPk.scala +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import common.{Box, Empty, Full} -import util.StringHelpers - -import scala.util.Random -import java.util.UUID - -import org.bson.types.ObjectId -import net.liftweb.record.field.{IntField, LongField, StringField} - -/* - * Trait for creating a "Primary Key" Field. These are all an id field - * that is saved as _id in the database. Mix one of these into your - * MongoRecord. - */ -trait MongoPk[PkType] { - def id: PkType - /** Override this to set default value of id field */ - def defaultIdValue: Any -} - -trait ObjectIdPk[OwnerType <: MongoRecord[OwnerType]] - extends MongoPk[ObjectIdField[OwnerType]] -{ - self: OwnerType => - - def defaultIdValue = ObjectId.get - - object id extends ObjectIdField(this.asInstanceOf[OwnerType]) { - override def name = "_id" - override def defaultValue = defaultIdValue - override def shouldDisplay_? = false - } -} - -trait UUIDPk[OwnerType <: MongoRecord[OwnerType]] - extends MongoPk[UUIDField[OwnerType]] -{ - self: OwnerType => - - def defaultIdValue = UUID.randomUUID - - object id extends UUIDField(this.asInstanceOf[OwnerType]) { - override def name = "_id" - override def defaultValue = defaultIdValue - override def shouldDisplay_? = false - } -} - -trait StringPk[OwnerType <: MongoRecord[OwnerType]] - extends MongoPk[StringField[OwnerType]] -{ - self: OwnerType => - - def defaultIdValue = StringHelpers.randomString(maxIdLength) - def maxIdLength: Int = 32 - - object id extends StringField(this.asInstanceOf[OwnerType], maxIdLength) { - override def name = "_id" - override def defaultValue = defaultIdValue - override def shouldDisplay_? = false - } -} - -trait IntPk[OwnerType <: MongoRecord[OwnerType]] - extends MongoPk[IntField[OwnerType]] -{ - self: OwnerType => - - def defaultIdValue = Random.nextInt - - object id extends IntField(this.asInstanceOf[OwnerType]) { - override def name = "_id" - override def defaultValue = defaultIdValue - override def shouldDisplay_? = false - } -} - -trait LongPk[OwnerType <: MongoRecord[OwnerType]] - extends MongoPk[LongField[OwnerType]] -{ - self: OwnerType => - - def defaultIdValue = Random.nextLong - - object id extends LongField(this.asInstanceOf[OwnerType]) { - override def name = "_id" - override def defaultValue = defaultIdValue - override def shouldDisplay_? = false - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoRefField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoRefField.scala deleted file mode 100644 index a77122fe5a..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoRefField.scala +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright 2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import common.{Box, Empty, Full} -import http.SHtml -import util.Helpers._ - -import java.util.UUID - -import org.bson.types.ObjectId -import net.liftweb.record.TypedField -import net.liftweb.record.field._ - -/* - * Trait for creating a Field for storing a "foreign key". Caches the - * item after fetching. Implementations are available for ObjectId, UUID, String, - * Int, and Long, but you can mix this into any Field. - * - * toForm produces a select form element. You just need to supply the - * options by overriding the options method. - */ -trait MongoRefField[RefType <: MongoRecord[RefType], MyType] extends TypedField[MyType] { - - /** The MongoMetaRecord of the referenced object **/ - def refMeta: MongoMetaRecord[RefType] - - /** - * Find the referenced object - */ - def find: Box[RefType] = valueBox.flatMap(v => refMeta.findAny(v)) - - /** - * Get the cacheable referenced object - */ - def obj = synchronized { - if (!_calcedObj) { - _calcedObj = true - this._obj = find - } - _obj - } - - def cached_? : Boolean = synchronized { _calcedObj } - - def primeObj(obj: Box[RefType]) = synchronized { - _obj = obj - _calcedObj = true - } - - private[this] var _obj: Box[RefType] = Empty - private[this] var _calcedObj = false - - override def setBox(in: Box[MyType]): Box[MyType] = synchronized { - _calcedObj = false // invalidate the cache - super.setBox(in) - } - - /** Options for select list **/ - def options: List[(Box[MyType], String)] = Nil - - /** Label for the selection item representing Empty, show when this field is optional. Defaults to the empty string. */ - def emptyOptionLabel: String = "" - - def buildDisplayList: List[(Box[MyType], String)] = { - if (optional_?) (Empty, emptyOptionLabel)::options else options - } - - private[this] def elem = SHtml.selectObj[Box[MyType]]( - buildDisplayList, - Full(valueBox), - setBox - ) % ("tabindex" -> tabIndex.toString) - - override def toForm = - if (options.nonEmpty) { - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - } else { - Empty - } -} - -class ObjectIdRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - @deprecatedName('rec) owner: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends ObjectIdField[OwnerType](owner) with MongoRefField[RefType, ObjectId] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - -class OptionalObjectIdRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - owner: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends OptionalObjectIdField[OwnerType](owner) with MongoRefField[RefType, ObjectId] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - - -class UUIDRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - @deprecatedName('rec) owner: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends UUIDField[OwnerType](owner) with MongoRefField[RefType, UUID] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - -class OptionalUUIDRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - owner: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends OptionalUUIDField[OwnerType](owner) with MongoRefField[RefType, UUID] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - -class StringRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - @deprecatedName('rec) owner: OwnerType, val refMeta: MongoMetaRecord[RefType], maxLen: Int -) extends StringField[OwnerType](owner, maxLen) with MongoRefField[RefType, String] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - -class OptionalStringRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - owner: OwnerType, val refMeta: MongoMetaRecord[RefType], maxLen: Int -) extends OptionalStringField[OwnerType](owner, maxLen) with MongoRefField[RefType, String] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - -class IntRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - @deprecatedName('rec) owner: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends IntField[OwnerType](owner) with MongoRefField[RefType, Int] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - -class OptionalIntRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - owner: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends OptionalIntField[OwnerType](owner) with MongoRefField[RefType, Int] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - -class LongRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - @deprecatedName('rec) owner: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends LongField[OwnerType](owner) with MongoRefField[RefType, Long] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} - -class OptionalLongRefField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - owner: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends OptionalLongField[OwnerType](owner) with MongoRefField[RefType, Long] { - override def find: Box[RefType] = valueBox.flatMap(v => refMeta.find(v)) -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoRefListField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoRefListField.scala deleted file mode 100644 index 41028f9398..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/MongoRefListField.scala +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import common._ -import http.{S, SHtml} -import net.liftweb.record.{Field, MandatoryTypedField, TypedField} - -import java.util.UUID - -import org.bson.types.ObjectId - -/* - * Trait for creating a Field for storing a list of "foreign keys". Caches the - * items after fetching. Implementations are available for ObjectId, UUID, String, - * Int, and Long, but you can extend this. - * - * toForm produces a multi-select form element. You just need to supply the - * options by overriding the options method. - */ -abstract class MongoRefListField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType], MyType] - (rec: OwnerType)(implicit mf: Manifest[MyType]) extends MongoListField[OwnerType, MyType](rec) { - - /** The MongoMetaRecord of the referenced object **/ - def refMeta: MongoMetaRecord[RefType] - - /** - * Find the referenced objects - */ - def findAll = refMeta.findAllByList(this.value) - - /* - * get the referenced objects - */ - def objs = synchronized { - if (!_calcedObjs) { - _calcedObjs = true - this._objs = findAll - } - _objs - } - - def cached_? : Boolean = synchronized { _calcedObjs } - - def primeObjs(objs: List[RefType]) = synchronized { - _objs = objs - _calcedObjs = true - } - - private var _objs: List[RefType] = Nil - private var _calcedObjs = false - - override def setBox(in: Box[MyType]): Box[MyType] = synchronized { - _calcedObjs = false // invalidate the cache - super.setBox(in) - } -} - -class ObjectIdRefListField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - rec: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends MongoRefListField[OwnerType, RefType, ObjectId](rec) {} - -class UUIDRefListField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - rec: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends MongoRefListField[OwnerType, RefType, UUID](rec) {} - -class StringRefListField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - rec: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends MongoRefListField[OwnerType, RefType, String](rec) {} - -class IntRefListField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - rec: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends MongoRefListField[OwnerType, RefType, Int](rec) {} - -class LongRefListField[OwnerType <: BsonRecord[OwnerType], RefType <: MongoRecord[RefType]]( - rec: OwnerType, val refMeta: MongoMetaRecord[RefType] -) extends MongoRefListField[OwnerType, RefType, Long](rec) {} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/ObjectIdField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/ObjectIdField.scala deleted file mode 100644 index 98e610fb15..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/ObjectIdField.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2010-2017 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import java.util.Date - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.S -import net.liftweb.http.js.JE.{JsNull, JsRaw} -import net.liftweb.http.js.JsExp -import net.liftweb.json._ -import net.liftweb.record._ -import net.liftweb.util.Helpers._ -import org.bson.types.ObjectId - -trait ObjectIdTypedField[OwnerType <: BsonRecord[OwnerType]] extends TypedField[ObjectId] with Field[ObjectId, OwnerType] { - - def setFromAny(in: Any): Box[ObjectId] = in match { - case oid: ObjectId => setBox(Full(oid)) - case Some(oid: ObjectId) => setBox(Full(oid)) - case Full(oid: ObjectId) => setBox(Full(oid)) - case (oid: ObjectId) :: _ => setBox(Full(oid)) - case s: String => setFromString(s) - case Some(s: String) => setFromString(s) - case Full(s: String) => setFromString(s) - case null|None|Empty => setBox(defaultValueBox) - case f: Failure => setBox(f) - case o => setFromString(o.toString) - } - - def setFromJValue(jvalue: JValue): Box[ObjectId] = jvalue match { - case JNothing | JNull if optional_? => setBox(Empty) - case JObject(JField("$oid", JString(s)) :: Nil) => setFromString(s) - case JString(s) => setFromString(s) - case other => setBox(FieldHelpers.expectedA("JObject", other)) - } - - def setFromString(in: String): Box[ObjectId] = { - if (ObjectId.isValid(in)) { - setBox (Full(new ObjectId(in))) - } else { - setBox(Failure(s"Invalid ObjectId string: $in")) - } - } - - private def elem = - S.fmapFunc(S.SFuncHolder(this.setFromAny(_))) { funcName => - s.toString) openOr ""} - tabindex={tabIndex.toString}/> - } - - def toForm = uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - def asJs: JsExp = asJValue match { - case JNothing => JsNull - case jv => JsRaw(compactRender(jv)) - } - - def asJValue: JValue = valueBox.map(v => JsonObjectId.asJValue(v, owner.meta.formats)) openOr (JNothing: JValue) - -} - -class ObjectIdField[OwnerType <: BsonRecord[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends MandatoryTypedField[ObjectId] with ObjectIdTypedField[OwnerType] { - - def this(owner: OwnerType, value: ObjectId) = { - this(owner) - setBox(Full(value)) - } - - def defaultValue = new ObjectId - - def createdAt: Date = this.get.getDate - -} - -class OptionalObjectIdField[OwnerType <: BsonRecord[OwnerType]](val owner: OwnerType) - extends OptionalTypedField[ObjectId] with ObjectIdTypedField[OwnerType] { - - def this(owner: OwnerType, value: Box[ObjectId]) = { - this(owner) - setBox(value) - } - -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/PatternField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/PatternField.scala deleted file mode 100644 index e251c8b14a..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/PatternField.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* -* Copyright 2010-2020 WorldWide Conferencing, LLC -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package net.liftweb -package mongodb -package record -package field - -import java.util.regex.Pattern - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.js.JE.{JsNull, Str} -import net.liftweb.json._ -import net.liftweb.record.{Field, FieldHelpers, MandatoryTypedField, OptionalTypedField, Record} -import net.liftweb.util.Helpers.tryo - -import scala.xml.NodeSeq - -abstract class PatternTypedField[OwnerType <: Record[OwnerType]](val owner: OwnerType) extends Field[Pattern, OwnerType] { - def setFromAny(in: Any): Box[Pattern] = in match { - case p: Pattern => setBox(Full(p)) - case Some(p: Pattern) => setBox(Full(p)) - case Full(p: Pattern) => setBox(Full(p)) - case (p: Pattern) :: _ => setBox(Full(p)) - case s: String => setFromString(s) - case Some(s: String) => setFromString(s) - case Full(s: String) => setFromString(s) - case null|None|Empty => setBox(defaultValueBox) - case f: Failure => setBox(f) - case o => { - setFromString(o.toString) - } - } - - def setFromJValue(jvalue: JValue): Box[Pattern] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JObject(JField("$regex", JString(s)) :: JField("$flags", JInt(f)) :: Nil) => - setBox(Full(Pattern.compile(s, f.intValue))) - case other => setBox(FieldHelpers.expectedA("JObject", other)) - } - - // parse String into a JObject - def setFromString(in: String): Box[Pattern] = tryo(JsonParser.parse(in)) match { - case Full(jv: JValue) => setFromJValue(jv) - case f: Failure => setBox(f) - case other => setBox(Failure("Error parsing String into a JValue: "+in)) - } - - def toForm: Box[NodeSeq] = Empty - - def asJs = asJValue match { - case JNothing => JsNull - case jv => Str(compactRender(jv)) - } - - def asJValue: JValue = valueBox.map(v => JsonRegex(v)) openOr (JNothing: JValue) - - override def equals(other: Any): Boolean = other match { - case that: PatternTypedField[OwnerType] => - (that.valueBox, this.valueBox) match { - case (Full(a), Full(b)) => - a.pattern == b.pattern && - a.flags == b.flags - case _ => - that.valueBox == this.valueBox - } - case _ => - false - } -} - -class PatternField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType) extends PatternTypedField[OwnerType](owner) with MandatoryTypedField[Pattern] { - def this(owner: OwnerType, value: Pattern) = { - this(owner) - setBox(Full(value)) - } - - def defaultValue = Pattern.compile("") -} - -class OptionalPatternField[OwnerType <: Record[OwnerType]](owner: OwnerType) extends PatternTypedField[OwnerType](owner) with OptionalTypedField[Pattern] { - def this(owner: OwnerType, value: Box[Pattern]) = { - this(owner) - setBox(value) - } -} diff --git a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/UUIDField.scala b/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/UUIDField.scala deleted file mode 100644 index 90fcf19619..0000000000 --- a/persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/UUIDField.scala +++ /dev/null @@ -1,98 +0,0 @@ -/* -* Copyright 2010-2020 WorldWide Conferencing, LLC -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package net.liftweb -package mongodb -package record -package field - -import java.util.UUID - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.S -import net.liftweb.http.js.JE.{JsNull, JsRaw} -import net.liftweb.http.js.JsExp -import net.liftweb.json._ -import net.liftweb.record._ -import net.liftweb.util.Helpers._ - -import scala.xml.NodeSeq - -trait UUIDTypedField[OwnerType <: Record[OwnerType]] extends TypedField[UUID] with Field[UUID, OwnerType] { - def setFromAny(in: Any): Box[UUID] = in match { - case uid: UUID => setBox(Full(uid)) - case Some(uid: UUID) => setBox(Full(uid)) - case Full(uid: UUID) => setBox(Full(uid)) - case (uid: UUID) :: _ => setBox(Full(uid)) - case s: String => setFromString(s) - case Some(s: String) => setFromString(s) - case Full(s: String) => setFromString(s) - case null|None|Empty => setBox(defaultValueBox) - case f: Failure => setBox(f) - case o => setFromString(o.toString) - } - - def setFromJValue(jvalue: JValue): Box[UUID] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JObject(JField("$uuid", JString(s)) :: Nil) => setFromString(s) - case other => setBox(FieldHelpers.expectedA("JObject", other)) - } - - def setFromString(in: String): Box[UUID] = tryo(UUID.fromString(in)) match { - case Full(uid: UUID) => setBox(Full(uid)) - case f: Failure => setBox(f) - case _ => setBox(Failure(s"Invalid UUID string: $in")) - } - - private[this] def elem = S.fmapFunc(S.SFuncHolder(this.setFromAny(_))) { funcName => - v.toString) openOr ""} - tabindex={tabIndex.toString}/> - } - - def toForm: Box[NodeSeq] = uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - def asJs: JsExp = asJValue match { - case JNothing => JsNull - case jv => JsRaw(compactRender(jv)) - } - - def asJValue: JValue = valueBox.map(v => JsonUUID(v)) openOr (JNothing: JValue) -} - -class UUIDField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends UUIDTypedField[OwnerType] with MandatoryTypedField[UUID] { - - def this(owner: OwnerType, value: UUID) = { - this(owner) - setBox(Full(value)) - } - - def defaultValue = UUID.randomUUID - -} - -class OptionalUUIDField[OwnerType <: Record[OwnerType]](val owner: OwnerType) - extends UUIDTypedField[OwnerType] with OptionalTypedField[UUID] { - - def this(owner: OwnerType, value: Box[UUID]) = { - this(owner) - setBox(value) - } - -} - diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/BsonRecordSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/BsonRecordSpec.scala deleted file mode 100644 index 3cdb72f262..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/BsonRecordSpec.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import org.specs2.mutable.Specification - -class BsonRecordSpec extends Specification with MongoTestKit { - "BsonRecordSpec Specification".title - - import fixtures._ - import testmodels._ - - override def before = { - super.before - checkMongoIsRunning - } - - "BsonRecord" should { - "compare properly with set values" in { - - val subRec = SubSubRecord.createRecord.name("subrecord") - val subRec2 = SubSubRecord.createRecord.name("subrecord") - - (subRec == subRec2) must_== true - - subRec2.name("subrecord2") - - (subRec == subRec2) must_== false - - } - - "compare properly with default values" in { - val subRec = SubSubRecord.createRecord - val subRec2 = SubSubRecord.createRecord - - (subRec == subRec2) must_== true - } - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/CustomSerializersSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/CustomSerializersSpec.scala deleted file mode 100644 index a0c6d937fe..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/CustomSerializersSpec.scala +++ /dev/null @@ -1,416 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import common._ -import field._ -import http.js.JE._ -import http.{LiftSession, S} -import json.JsonAST._ -import util.Helpers._ - -import java.util.{Calendar, Date} - -import org.bson.types.ObjectId -import org.specs2.mutable.Specification - -import net.liftweb.record.field._ -import scala.xml.{Elem, NodeSeq} -import util.Helpers - -package customserializersspecs { - -case class Child(name: String, birthdate: Date) extends JsonObject[Child] { - def meta = Child -} - -object Child extends JsonObjectMeta[Child] - -/* -* Date as String -*/ -class Person extends MongoRecord[Person] with ObjectIdPk[Person] { - def meta = Person - - object children extends JsonObjectListField(this, Child) - - object firstBorn extends JsonObjectField(this, Child) { - def defaultValue = Child("", now) - } - -} - -object Person extends Person with MongoMetaRecord[Person] - -/* -* Date as Date -*/ -class Person2 extends MongoRecord[Person2] with ObjectIdPk[Person2] { - def meta = Person2 - - object children extends JsonObjectListField(this, Child) - - object firstBorn extends JsonObjectField(this, Child) { - def defaultValue = Child("", now) - } - -} - -object Person2 extends Person2 with MongoMetaRecord[Person2] { - override def formats = allFormats -} - -class Player extends MongoRecord[Player] with ObjectIdPk[Player] { - def meta = Player - - object name extends StringField(this, 256) - -} - -object Player extends Player with MongoMetaRecord[Player] - -/* -* ObjectId as String -*/ -case class Team(id: String, name: String, qb: String) extends JsonObject[Team] { - def meta = Team -} - -object Team extends JsonObjectMeta[Team] - -class League extends MongoRecord[League] with ObjectIdPk[League] { - def meta = League - - object teams extends JsonObjectListField(this, Team) - - object champion extends JsonObjectField(this, Team) { - def defaultValue = Team("", "", "") - } - -} - -object League extends League with MongoMetaRecord[League] - -/* -* ObjectId as ObjectId -*/ -case class Team2(id: ObjectId, name: String, qb: ObjectId) extends JsonObject[Team2] { - def meta = Team2 -} - -object Team2 extends JsonObjectMeta[Team2] - -class League2 extends MongoRecord[League2] with ObjectIdPk[League2] { - def meta = League2 - - object teams extends JsonObjectListField(this, Team2) - - object champion extends JsonObjectField(this, Team2) { - def defaultValue = Team2(ObjectId.get, "", ObjectId.get) - } - -} - -object League2 extends League2 with MongoMetaRecord[League2] { - override def formats = super.formats + new ObjectIdSerializer -} - -object WeekDay extends Enumeration { - type WeekDay = Value - val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value -} - -class EnumRec extends MongoRecord[EnumRec] with ObjectIdPk[EnumRec] { - def meta = EnumRec - - object dow extends EnumField(this, WeekDay) - -} - -object EnumRec extends EnumRec with MongoMetaRecord[EnumRec] { - override def collectionName = "enumrecs" -} - -} - - -/** - * Systems under specification for CustomSerializers. - */ -class CustomSerializersSpec extends Specification with MongoTestKit { - "CustomSerializers Specification".title - - import customserializersspecs._ - - "CustomSerializers" should { - "handle Date as String value in JsonObjects" in { - checkMongoIsRunning - - // test data - val bdjack = Calendar.getInstance.setTimezone(utc) - bdjack.setTimeInMillis(1288742280000L) - val bdjill = Calendar.getInstance.setTimezone(utc) - bdjill.setTimeInMillis(1288742880000L) - val jack = Child("Jack", bdjack.getTime) - val jill = Child("Jill", bdjill.getTime) - - // create and save a Person record - val mother = Person.createRecord - mother.children(List(jack, jill)) - mother.firstBorn(jack) - mother.save - - // retrieve it and compare - val mother2 = Person.find(mother.id.get) - mother2.isDefined must_== true - mother2 foreach { - m => - m.children.value mustEqual mother.children.value - m.firstBorn.value mustEqual mother.firstBorn.value - } - - // check the conversion functions - /* - mother.children.asJs mustEqual JsArray( - JsObj(("name", Str("Jack")), ("birthdate", Str("2010-11-02T23:58:00.000Z"))), - JsObj(("name", Str("Jill")), ("birthdate", Str("2010-11-03T00:08:00.000Z"))) - )*/ - - mother.children.asJValue mustEqual JArray(List( - JObject(List( - JField("name", JString("Jack")), - JField("birthdate", JString("2010-11-02T23:58:00.000Z")) - )), - JObject(List( - JField("name", JString("Jill")), - JField("birthdate", JString("2010-11-03T00:08:00.000Z")))) - )) - mother.children.toForm must beEmpty - /* - mother.firstBorn.asJs mustEqual - JsObj(("name", Str("Jack")), ("birthdate", Str("2010-11-02T23:58:00.000Z"))) - */ - mother.firstBorn.asJValue mustEqual - JObject(List( - JField("name", JString("Jack")), - JField("birthdate", JString("2010-11-02T23:58:00.000Z")) - )) - mother.firstBorn.toForm must beEmpty - } - - "handle Date as Date value in JsonObjects using DateSerializer" in { - checkMongoIsRunning - - // test data - val bdjack = Calendar.getInstance.setTimezone(utc) - bdjack.setTimeInMillis(1288742280000L) - val bdjill = Calendar.getInstance.setTimezone(utc) - bdjill.setTimeInMillis(1288742880000L) - val jack = Child("Jack", bdjack.getTime) - val jill = Child("Jill", bdjill.getTime) - - // create and save a Person record - val mother = Person2.createRecord - mother.children(List(jack, jill)) - mother.firstBorn(jack) - mother.save() - - // retrieve it and compare - val mother2 = Person2.find(mother.id.get) - mother2.isDefined must_== true - mother2 foreach { - m => - m.children.value mustEqual mother.children.value - m.firstBorn.value mustEqual mother.firstBorn.value - } - - // check the conversion functions - /* - mother.children.asJs mustEqual JsArray( - JsObj(("name", Str("Jack")), ("birthdate", JsObj(("$dt", Str("2010-11-02T23:58:00.000Z"))))), - JsObj(("name", Str("Jill")), ("birthdate", JsObj(("$dt", Str("2010-11-03T00:08:00.000Z"))))) - )*/ - - mother.children.asJValue mustEqual JArray(List( - JObject(List( - JField("name", JString("Jack")), - JField("birthdate", JObject(List(JField("$dt", JString("2010-11-02T23:58:00.000Z"))))) - )), - JObject(List( - JField("name", JString("Jill")), - JField("birthdate", JObject(List(JField("$dt", JString("2010-11-03T00:08:00.000Z"))))) - )) - )) - mother.children.toForm must beEmpty - - /* - mother.firstBorn.asJs mustEqual - JsObj(("name", Str("Jack")), ("birthdate", JsObj(("$dt", Str("2010-11-02T23:58:00.000Z"))))) - */ - - mother.firstBorn.asJValue mustEqual - JObject(List( - JField("name", JString("Jack")), - JField("birthdate", JObject(List(JField("$dt", JString("2010-11-02T23:58:00.000Z"))))) - )) - mother.firstBorn.toForm must beEmpty - } - - "handle ObjectId as String value in JsonObjects" in { - checkMongoIsRunning - - // test data - val rmoss = Player.createRecord.name("Randy Moss").save() - val bfavre = Player.createRecord.name("Brett Favre").save() - val vikes = Team(ObjectId.get.toString, "Vikings", bfavre.id.toString) - val jets = Team(ObjectId.get.toString, "Jets", "") - val saints = Team(ObjectId.get.toString, "Saints", "") - - // create and save a League record - val nfl = League.createRecord - nfl.teams(List(vikes, jets, saints)) - nfl.champion(saints) - nfl.save() - - // retrieve it and compare - val nfl2 = League.find(nfl.id.get) - nfl2.isDefined must_== true - nfl2 foreach { - l => - l.teams.value mustEqual nfl.teams.value - l.champion.value mustEqual nfl.champion.value - } - - // find a player - val vqb = Player.find(vikes.qb) - vqb.isDefined must_== true - vqb foreach { - p => - p.name.value mustEqual "Brett Favre" - } - - // check the conversion functions - nfl.id.asJs mustEqual Str(nfl.id.value.toString) - nfl.id.asJValue mustEqual JString(nfl.id.value.toString) - val session = new LiftSession("", randomString(20), Empty) - val formPattern = - S.initIfUninitted(session) { - val form = nfl.id.toForm - form.isDefined must_== true - form foreach { - fprime => - val f = ("* [name]" #> ".*" & "select *" #> (((ns: NodeSeq) => ns.filter { - case e: Elem => e.attribute("selected").map(_.text) == Some("selected") - case _ => false - }) andThen "* [value]" #> ".*"))(fprime) - val ret: Boolean = Helpers.compareXml(f, formPattern) - ret must_== true - } - } - - // check the setFrom* functions - val nflid = ObjectId.get - nfl.id.setFromString(nflid.toString) - nfl.id.value mustEqual nflid - - nfl.id.setFromString("garbage") - nfl.id.valueBox mustEqual Failure("Invalid ObjectId string: garbage") - - nfl.id.setFromJValue(JString(nflid.toString)) - nfl.id.value mustEqual nflid - - nfl.id.setFromAny(nflid) - nfl.id.value mustEqual nflid - - nfl.id.setFromAny(nflid.toString) - nfl.id.value mustEqual nflid - - } - - "handle ObjectId as ObjectId values in JsonObjects using ObjectIdSerializer" in { - checkMongoIsRunning - - // test data - val rmoss = Player.createRecord.name("Randy Moss").save() - val bfavre = Player.createRecord.name("Brett Favre").save() - val vikes = Team2(ObjectId.get, "Vikings", bfavre.id.get) - val jets = Team2(ObjectId.get, "Jets", bfavre.id.get) - val saints = Team2(ObjectId.get, "Saints", bfavre.id.get) - - // create and save a League record - val nfl = League2.createRecord - nfl.teams(List(vikes, jets, saints)) - nfl.champion(saints) - nfl.save() - - // retrieve it and compare - val nfl2 = League2.find(nfl.id.toString) - nfl2.isDefined must_== true - nfl2 foreach { - l => - l.teams.value mustEqual nfl.teams.value - l.champion.value mustEqual nfl.champion.value - } - - // find a player - val vqb = Player.find(vikes.qb) - vqb.isDefined must_== true - vqb foreach { - p => - p.name.value mustEqual "Brett Favre" - } - - // check the conversion functions - nfl.id.asJs.toJsCmd mustEqual """{"$oid":"%s"}""".format(nfl.id.value.toString) - nfl.id.asJValue mustEqual JObject(List(JField("$oid", JString(nfl.id.value.toString)))) - val session = new LiftSession("", randomString(20), Empty) - val formPattern = - S.initIfUninitted(session) { - val form = nfl.id.toForm - form.isDefined must_== true - form foreach { - fprime => - val f = ("* [name]" #> ".*" & "select *" #> (((ns: NodeSeq) => ns.filter { - case e: Elem => e.attribute("selected").map(_.text) == Some("selected") - case _ => false - }) andThen "* [value]" #> ".*"))(fprime) - val ret: Boolean = Helpers.compareXml(f, formPattern) - ret must_== true - } - } - - // check the setFrom* functions - val nflid = ObjectId.get - nfl.id.setFromString(nflid.toString) - nfl.id.value mustEqual nflid - - nfl.id.setFromString("garbage") - nfl.id.valueBox mustEqual Failure("Invalid ObjectId string: garbage") - - nfl.id.setFromJValue(JObject(List(JField("$oid", JString(nflid.toString))))) - nfl.id.value mustEqual nflid - - nfl.id.setFromAny(nflid) - nfl.id.value mustEqual nflid - - nfl.id.setFromAny(nflid.toString) - nfl.id.value mustEqual nflid - } - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/Fixtures.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/Fixtures.scala deleted file mode 100644 index 135ec67b08..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/Fixtures.scala +++ /dev/null @@ -1,345 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package fixtures - -import field._ - -import common._ -import json._ -import json.ext.{EnumSerializer, JsonBoxSerializer} -import http.SHtml -import util.{FieldError, Helpers} - -import java.math.MathContext -import java.util.{Date, UUID} -import java.util.regex.Pattern -import scala.xml.Text - -import net.liftweb.mongodb.record.testmodels._ -import net.liftweb.record._ -import net.liftweb.record.field._ -import net.liftweb.record.field.joda._ - -import org.bson.types.ObjectId -import org.joda.time.DateTime - -trait HarnessedLifecycleCallbacks extends LifecycleCallbacks { - this: BaseField => - - var beforeValidationHarness: () => Unit = () => () - override def beforeValidation = beforeValidationHarness() - var afterValidationHarness: () => Unit = () => () - override def afterValidation = afterValidationHarness() - - var beforeSaveHarness: () => Unit = () => () - override def beforeSave = beforeSaveHarness() - var beforeCreateHarness: () => Unit = () => () - override def beforeCreate = beforeCreateHarness() - var beforeUpdateHarness: () => Unit = () => () - override def beforeUpdate = beforeUpdateHarness() - - var afterSaveHarness: () => Unit = () => () - override def afterSave = afterSaveHarness() - var afterCreateHarness: () => Unit = () => () - override def afterCreate = afterCreateHarness() - var afterUpdateHarness: () => Unit = () => () - override def afterUpdate = afterUpdateHarness() - - var beforeDeleteHarness: () => Unit = () => () - override def beforeDelete = beforeDeleteHarness() - var afterDeleteHarness: () => Unit = () => () - override def afterDelete = afterDeleteHarness() -} - -class FieldTypeTestRecord private () extends MongoRecord[FieldTypeTestRecord] with ObjectIdPk[FieldTypeTestRecord] { - def meta = FieldTypeTestRecord - - object mandatoryBooleanField extends BooleanField(this) - object legacyOptionalBooleanField extends BooleanField(this) { override def optional_? = true } - object optionalBooleanField extends OptionalBooleanField(this) - - object mandatoryCountryField extends CountryField(this) - object legacyOptionalCountryField extends CountryField(this) { override def optional_? = true } - object optionalCountryField extends OptionalCountryField(this) - - /* - object mandatoryDateTimeField extends DateTimeField(this) - object legacyOptionalDateTimeField extends DateTimeField(this) { override def optional_? = true } - object optionalDateTimeField extends OptionalDateTimeField(this) - */ - - object mandatoryDecimalField extends DecimalField(this, MathContext.UNLIMITED, 2) - object legacyOptionalDecimalField extends DecimalField(this, MathContext.UNLIMITED, 2) { override def optional_? = true } - object optionalDecimalField extends OptionalDecimalField(this, MathContext.UNLIMITED, 2) - - object mandatoryDoubleField extends DoubleField(this) - object legacyOptionalDoubleField extends DoubleField(this) { override def optional_? = true } - object optionalDoubleField extends OptionalDoubleField(this) - - object mandatoryEmailField extends EmailField(this, 100) - object legacyOptionalEmailField extends EmailField(this, 100) { override def optional_? = true } - object optionalEmailField extends OptionalEmailField(this, 100) - - object mandatoryEnumField extends EnumField(this, MyTestEnum) - object legacyOptionalEnumField extends EnumField(this, MyTestEnum) { override def optional_? = true } - object optionalEnumField extends OptionalEnumField(this, MyTestEnum) - - object mandatoryIntField extends IntField(this) - object legacyOptionalIntField extends IntField(this) { override def optional_? = true } - object optionalIntField extends OptionalIntField(this) - - object mandatoryLocaleField extends LocaleField(this) - object legacyOptionalLocaleField extends LocaleField(this) { override def optional_? = true } - object optionalLocaleField extends OptionalLocaleField(this) - - object mandatoryLongField extends LongField(this) - object legacyOptionalLongField extends LongField(this) { override def optional_? = true } - object optionalLongField extends OptionalLongField(this) - - // FIXME would be nice to have some of these PostalCode fields depend on an OptionalCountryField, but the type sig of - // PostalCodeField does not yet allow it. - object mandatoryPostalCodeField extends PostalCodeField(this, mandatoryCountryField) - object legacyOptionalPostalCodeField extends PostalCodeField(this, mandatoryCountryField) { override def optional_? = true } - object optionalPostalCodeField extends OptionalPostalCodeField(this, mandatoryCountryField) - - object mandatoryStringField extends StringField(this, 100) - object legacyOptionalStringField extends StringField(this, 100) { override def optional_? = true } - object optionalStringField extends OptionalStringField(this, 100) - - object mandatoryTextareaField extends TextareaField(this, 100) - object legacyOptionalTextareaField extends TextareaField(this, 100) { override def optional_? = true } - object optionalTextareaField extends OptionalTextareaField(this, 100) - - object mandatoryTimeZoneField extends TimeZoneField(this) - object legacyOptionalTimeZoneField extends TimeZoneField(this) { override def optional_? = true } - object optionalTimeZoneField extends OptionalTimeZoneField(this) - - object mandatoryJodaTimeField extends JodaTimeField(this) - object legacyOptionalJodaTimeField extends JodaTimeField(this) { override def optional_? = true } - object optionalJodaTimeField extends OptionalJodaTimeField(this) -} - -object FieldTypeTestRecord extends FieldTypeTestRecord with MongoMetaRecord[FieldTypeTestRecord] - -class BinaryFieldTestRecord extends MongoRecord[BinaryFieldTestRecord] with IntPk[BinaryFieldTestRecord] { - def meta = BinaryFieldTestRecord - - object mandatoryBinaryField extends BinaryField(this) { - // compare the elements of the Array - override def equals(other: Any): Boolean = other match { - case that: BinaryField[_] => - this.value.zip(that.value).filter(t => t._1 != t._2).length == 0 - case _ => false - } - } - object legacyOptionalBinaryField extends BinaryField(this) { - override def optional_? = true - // compare the elements of the Array - override def equals(other: Any): Boolean = other match { - case that: BinaryField[_] => (this.valueBox, that.valueBox) match { - case (Empty, Empty) => true - case (Full(a), Full(b)) => - a.zip(b).filter(t => t._1 != t._2).length == 0 - case _ => false - } - case _ => false - } - } - object optionalBinaryField extends OptionalBinaryField(this) { - // compare the elements of the Array - override def equals(other: Any): Boolean = other match { - case that: OptionalBinaryField[_] => (this.valueBox, that.valueBox) match { - case (Empty, Empty) => true - case (Full(a), Full(b)) => - a.zip(b).filter(t => t._1 != t._2).length == 0 - case _ => false - } - case _ => false - } - } - - override def equals(other: Any): Boolean = other match { - case that:BinaryFieldTestRecord => - this.id.value == that.id.value && - this.mandatoryBinaryField == that.mandatoryBinaryField && - this.legacyOptionalBinaryField == that.legacyOptionalBinaryField && - this.optionalBinaryField == that.optionalBinaryField - case _ => false - } -} -object BinaryFieldTestRecord extends BinaryFieldTestRecord with MongoMetaRecord[BinaryFieldTestRecord] - - -case class TypeTestJsonObject( - intField: Int, - stringField: String, - mapField: Map[String, String] -) extends JsonObject[TypeTestJsonObject] -{ - // TODO: Add more types - def meta = TypeTestJsonObject -} -object TypeTestJsonObject extends JsonObjectMeta[TypeTestJsonObject] - -class DBRefTestRecord private () extends MongoRecord[DBRefTestRecord] with ObjectIdPk[DBRefTestRecord] { - def meta = DBRefTestRecord -} -object DBRefTestRecord extends DBRefTestRecord with MongoMetaRecord[DBRefTestRecord] - -class MongoFieldTypeTestRecord private () extends MongoRecord[MongoFieldTypeTestRecord] with ObjectIdPk[MongoFieldTypeTestRecord] { - def meta = MongoFieldTypeTestRecord - - object mandatoryDateField extends DateField(this) - object optionalDateField extends OptionalDateField(this) - object legacyOptionalDateField extends DateField(this) { override def optional_? = true } - - - object mandatoryJsonObjectField extends JsonObjectField(this, TypeTestJsonObject) { - def defaultValue = TypeTestJsonObject(0, "", Map[String, String]()) - } - object optionalJsonObjectField extends OptionalJsonObjectField(this, TypeTestJsonObject) - object legacyOptionalJsonObjectField extends JsonObjectField(this, TypeTestJsonObject) { - override def optional_? = true - def defaultValue = TypeTestJsonObject(0, "", Map[String, String]()) - } - - object mandatoryObjectIdField extends ObjectIdField(this) - object optionalObjectIdField extends OptionalObjectIdField(this) - object legacyOptionalObjectIdField extends ObjectIdField(this) { override def optional_? = true } - - object mandatoryUUIDField extends UUIDField(this) - object optionalUUIDField extends OptionalUUIDField(this) - object legacyOptionalUUIDField extends UUIDField(this) { override def optional_? = true } - - object mandatoryCaseClassField extends CaseClassField[MongoFieldTypeTestRecord, CaseClassTestObject](this) { - override def formats = owner.meta.formats - } - object optionalCaseClassField extends OptionalCaseClassField[MongoFieldTypeTestRecord, CaseClassTestObject](this) { - override def formats = owner.meta.formats - } -} - -object MongoFieldTypeTestRecord extends MongoFieldTypeTestRecord with MongoMetaRecord[MongoFieldTypeTestRecord] { - override def formats = allFormats + new EnumSerializer(MyTestEnum) -} - -class PatternFieldTestRecord private () extends MongoRecord[PatternFieldTestRecord] with ObjectIdPk[PatternFieldTestRecord] { - def meta = PatternFieldTestRecord - - object mandatoryPatternField extends PatternField(this) - object optionalPatternField extends OptionalPatternField(this) - object legacyOptionalPatternField extends PatternField(this) { override def optional_? = true } -} - -object PatternFieldTestRecord extends PatternFieldTestRecord with MongoMetaRecord[PatternFieldTestRecord] { - override def formats = allFormats -} - -class PasswordTestRecord private () extends MongoRecord[PasswordTestRecord] with ObjectIdPk[PasswordTestRecord] { - def meta = PasswordTestRecord - - object password extends MongoPasswordField(this, 3) -} -object PasswordTestRecord extends PasswordTestRecord with MongoMetaRecord[PasswordTestRecord] - -class LifecycleTestRecord private () - extends MongoRecord[LifecycleTestRecord] - with ObjectIdPk[LifecycleTestRecord] -{ - def meta = LifecycleTestRecord - - def foreachCallback(f: LifecycleCallbacks => Any): Unit = - meta.foreachCallback(this, f) - - object stringFieldWithCallbacks extends StringField(this, 100) with HarnessedLifecycleCallbacks -} - -object LifecycleTestRecord extends LifecycleTestRecord with MongoMetaRecord[LifecycleTestRecord] - -case class JsonObj(id: String, name: String) extends JsonObject[JsonObj] { - def meta = JsonObj -} -object JsonObj extends JsonObjectMeta[JsonObj] - -class NullTestRecord private () extends MongoRecord[NullTestRecord] with IntPk[NullTestRecord] { - - def meta = NullTestRecord - - object nullstring extends StringField(this, 32) { - override def optional_? = true - } - - object jsonobj extends JsonObjectField[NullTestRecord, JsonObj](this, JsonObj) { - def defaultValue = JsonObj("1", null) - } - object jsonobjlist extends JsonObjectListField[NullTestRecord, JsonObj](this, JsonObj) -} - -object NullTestRecord extends NullTestRecord with MongoMetaRecord[NullTestRecord] - -case class BoxTestJsonObj(id: String, boxEmpty: Box[String], boxFull: Box[String], boxFail: Box[String]) -extends JsonObject[BoxTestJsonObj] { - def meta = BoxTestJsonObj -} -object BoxTestJsonObj extends JsonObjectMeta[BoxTestJsonObj] - -class BoxTestRecord private () extends MongoRecord[BoxTestRecord] with LongPk[BoxTestRecord] { - def meta = BoxTestRecord - - object jsonobj extends JsonObjectField[BoxTestRecord, BoxTestJsonObj](this, BoxTestJsonObj) { - def defaultValue = BoxTestJsonObj("0", Empty, Full("Full String"), Failure("Failure")) - } - object jsonobjlist extends JsonObjectListField[BoxTestRecord, BoxTestJsonObj](this, BoxTestJsonObj) - -} -object BoxTestRecord extends BoxTestRecord with MongoMetaRecord[BoxTestRecord] { - override def formats = super.formats + new JsonBoxSerializer -} - -/* - * MongoRefFields - */ -class RefFieldTestRecord private () extends MongoRecord[RefFieldTestRecord] with ObjectIdPk[RefFieldTestRecord] { - def meta = RefFieldTestRecord - - object mandatoryObjectIdRefField extends ObjectIdRefField(this, FieldTypeTestRecord) - object mandatoryUUIDRefField extends UUIDRefField(this, ListTestRecord) - object mandatoryStringRefField extends StringRefField(this, MapTestRecord, 100) - object mandatoryIntRefField extends IntRefField(this, NullTestRecord) - object mandatoryLongRefField extends LongRefField(this, BoxTestRecord) - - object mandatoryObjectIdRefListField extends ObjectIdRefListField(this, FieldTypeTestRecord) - object mandatoryUUIDRefListField extends UUIDRefListField(this, ListTestRecord) - object mandatoryStringRefListField extends StringRefListField(this, MapTestRecord) - object mandatoryIntRefListField extends IntRefListField(this, NullTestRecord) - object mandatoryLongRefListField extends LongRefListField(this, BoxTestRecord) -} - -object RefFieldTestRecord extends RefFieldTestRecord with MongoMetaRecord[RefFieldTestRecord] { - override def formats = allFormats -} - -class CustomFieldName private () extends MongoRecord[CustomFieldName] with ObjectIdPk[CustomFieldName] { - def meta = CustomFieldName - - object customField extends StringField(this, 256) -} - -object CustomFieldName extends CustomFieldName with MongoMetaRecord[CustomFieldName] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/LegacyMongoClientSaveSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/LegacyMongoClientSaveSpec.scala deleted file mode 100644 index 299c497bd3..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/LegacyMongoClientSaveSpec.scala +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright 2014-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import net.liftweb.common._ -import net.liftweb.record.field._ - -import org.specs2.mutable.Specification - -import com.mongodb._ - - -package legacymongoclientsaverecords { - - import field._ - - class SaveDoc private () extends MongoRecord[SaveDoc] with ObjectIdPk[SaveDoc] { - def meta = SaveDoc - - object name extends StringField(this, 12) - } - object SaveDoc extends SaveDoc with MongoMetaRecord[SaveDoc] { - import BsonDSL._ - - createIndex(("name" -> 1), true) // unique name - } -} - - -/** - * Systems under specification for LegacyMongoClientSave. - */ -class LegacyMongoClientSaveSpec extends Specification with MongoTestKit { - "LegacyMongoClientSave Specification".title - - import legacymongoclientsaverecords._ - - "MongoMetaRecord with Mongo save" in { - - checkMongoIsRunning - - val sd1 = SaveDoc.createRecord.name("MongoSession") - val sd2 = SaveDoc.createRecord.name("MongoSession") - val sd3 = SaveDoc.createRecord.name("MongoDB") - - // save to db - sd1.save() - sd2.save(false) // no exception thrown - sd2.save(true) must throwA[MongoException] - sd2.saveBox() must beLike { - case Failure(msg, _, _) => msg must contain("E11000") - } - sd3.save() - - success - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/LegacyMongoRecordExamplesSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/LegacyMongoRecordExamplesSpec.scala deleted file mode 100644 index af092e9d01..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/LegacyMongoRecordExamplesSpec.scala +++ /dev/null @@ -1,522 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.json.DefaultFormats -import net.liftweb.json.JsonDSL._ -import net.liftweb.record.field._ -import net.liftweb.util.TimeHelpers._ -import net.liftweb.mongodb.record.field._ - -import org.specs2.mutable.Specification - -import com.mongodb._ -import org.bson.types.ObjectId -import http.{S, LiftSession} - - -package legacymongotestrecords { - - import field._ - - class TstRecord private () extends MongoRecord[TstRecord] with UUIDPk[TstRecord] { - - def meta = TstRecord - - object booleanfield extends BooleanField(this) - object datetimefield extends DateTimeField(this) - object doublefield extends DoubleField(this) - object emailfield extends EmailField(this, 220) - object intfield extends IntField(this) - object localefield extends LocaleField(this) - object longfield extends LongField(this) - object passwordfield extends MongoPasswordField(this) - object stringfield extends StringField(this, 32) - object timezonefield extends TimeZoneField(this) - object patternfield extends PatternField(this) - object datefield extends DateField(this) - - // JsonObjectField (requires a definition for defaultValue) - object person extends JsonObjectField[TstRecord, Person](this, Person) { - def defaultValue = Person("", 0, Address("", ""), Nil) - } - } - - object TstRecord extends TstRecord with MongoMetaRecord[TstRecord] - - case class Address(street: String, city: String) - case class Child(name: String, age: Int, birthdate: Option[Date]) - - case class Person(name: String, age: Int, address: Address, children: List[Child]) - extends JsonObject[Person] { - def meta = Person - } - - object Person extends JsonObjectMeta[Person] - - class MainDoc private () extends MongoRecord[MainDoc] with ObjectIdPk[MainDoc] { - def meta = MainDoc - - object name extends StringField(this, 12) - object cnt extends IntField(this) - object refdocId extends ObjectIdRefField(this, RefDoc) - object refuuid extends UUIDRefField(this, RefUuidDoc) - } - object MainDoc extends MainDoc with MongoMetaRecord[MainDoc] - - class RefDoc private () extends MongoRecord[RefDoc] with ObjectIdPk[RefDoc] { - def meta = RefDoc - } - object RefDoc extends RefDoc with MongoMetaRecord[RefDoc] - - // uuid as id - class RefUuidDoc private () extends MongoRecord[RefUuidDoc] with UUIDPk[RefUuidDoc] { - def meta = RefUuidDoc - } - object RefUuidDoc extends RefUuidDoc with MongoMetaRecord[RefUuidDoc] - - class ListDoc private () extends MongoRecord[ListDoc] with ObjectIdPk[ListDoc] { - def meta = ListDoc - - import scala.collection.JavaConverters._ - - // standard list types - object name extends StringField(this, 10) - object stringlist extends MongoListField[ListDoc, String](this) - object intlist extends MongoListField[ListDoc, Int](this) - object doublelist extends MongoListField[ListDoc, Double](this) - object boollist extends MongoListField[ListDoc, Boolean](this) - object objidlist extends MongoListField[ListDoc, ObjectId](this) - object dtlist extends MongoListField[ListDoc, Date](this) - object patternlist extends MongoListField[ListDoc, Pattern](this) - object binarylist extends MongoListField[ListDoc, Array[Byte]](this) - - // specialized list types - object jsonobjlist extends JsonObjectListField(this, JsonDoc) - - // these require custom setFromDBObject methods - object maplist extends MongoListField[ListDoc, Map[String, String]](this) { - override def asDBObject: DBObject = { - val dbl = new BasicDBList - - value.foreach { - m => { - val dbo = new BasicDBObject - - m.keys.foreach(k => { - dbo.put(k.toString, m.getOrElse(k, "")) - }) - - dbl.add(dbo) - } - } - - dbl - } - - override def setFromDBObject(dbo: DBObject): Box[List[Map[String, String]]] = { - val lst: List[Map[String, String]] = - dbo.keySet.asScala.toList.map(dbo.get).collect { - case bdbo: BasicDBObject if bdbo.containsField("name") && bdbo.containsField("type") => - Map("name"-> bdbo.getString("name"), "type" -> bdbo.getString("type")) - } - Full(set(lst)) - } - } - - } - object ListDoc extends ListDoc with MongoMetaRecord[ListDoc] - - case class JsonDoc(id: String, name: String) extends JsonObject[JsonDoc] { - def meta = JsonDoc - } - object JsonDoc extends JsonObjectMeta[JsonDoc] - - class MapDoc private () extends MongoRecord[MapDoc] with ObjectIdPk[MapDoc] { - def meta = MapDoc - - object stringmap extends MongoMapField[MapDoc, String](this) - } - object MapDoc extends MapDoc with MongoMetaRecord[MapDoc] { - override def formats = DefaultFormats.lossless // adds .000 - } - - class OptionalDoc private () extends MongoRecord[OptionalDoc] with ObjectIdPk[OptionalDoc] { - def meta = OptionalDoc - // optional fields - object stringbox extends StringField(this, 32) { - override def optional_? = true - override def defaultValue = "nothin" - } - } - object OptionalDoc extends OptionalDoc with MongoMetaRecord[OptionalDoc] - - class StrictDoc private () extends MongoRecord[StrictDoc] with ObjectIdPk[StrictDoc] { - def meta = StrictDoc - object name extends StringField(this, 32) - } - object StrictDoc extends StrictDoc with MongoMetaRecord[StrictDoc] { - - import net.liftweb.json.JsonDSL._ - - createIndex(("name" -> 1), true) // unique name - } -} - - -/** - * Systems under specification for LegacyMongoRecordExamples. - */ -class LegacyMongoRecordExamplesSpec extends Specification with MongoTestKit { - "LegacyMongoRecordExamples Specification".title - - import legacymongotestrecords._ - import net.liftweb.util.TimeHelpers._ - - val session = new LiftSession("hello", "", Empty) - "TstRecord example" in { - - checkMongoIsRunning - - S.initIfUninitted(session) { - - val pwd = "test" - val cal = Calendar.getInstance - cal.set(2009, 10, 2) - - val tr = TstRecord.createRecord - tr.stringfield("test record string field") - tr.emailfield("test") - tr.validate.size must_== 2 - tr.passwordfield.setPassword(pwd) - tr.emailfield("test@example.com") - tr.datetimefield(cal) - tr.patternfield(Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE)) - tr.validate.size must_== 0 - - // JsonObjectField - val dob1 = Calendar.getInstance.setYear(2005).setMonth(7).setDay(4) - val per = Person("joe", 27, Address("Bulevard", "Helsinki"), List(Child("Mary", 5, Some(dob1.getTime)), Child("Mazy", 3, None))) - tr.person(per) - - // save the record in the db - tr.save() - - // retrieve from db - def fromDb = TstRecord.find("_id", tr.id.value) - - fromDb.isDefined must_== true - - for (t <- fromDb) { - t.id.value must_== tr.id.value - t.booleanfield.value must_== tr.booleanfield.value - TstRecord.formats.dateFormat.format(t.datetimefield.value.getTime) must_== - TstRecord.formats.dateFormat.format(tr.datetimefield.value.getTime) - t.doublefield.value must_== tr.doublefield.value - t.intfield.value must_== tr.intfield.value - t.localefield.value must_== tr.localefield.value - t.longfield.value must_== tr.longfield.value - t.passwordfield.isMatch(pwd) must_== true - t.stringfield.value must_== tr.stringfield.value - t.timezonefield.value must_== tr.timezonefield.value - t.datetimefield.value must_== tr.datetimefield.value - t.patternfield.value.pattern must_== tr.patternfield.value.pattern - t.patternfield.value.flags must_== tr.patternfield.value.flags - t.datefield.value must_== tr.datefield.value - t.person.value.name must_== tr.person.value.name - t.person.value.age must_== tr.person.value.age - t.person.value.address.street must_== tr.person.value.address.street - t.person.value.address.city must_== tr.person.value.address.city - t.person.value.children.size must_== tr.person.value.children.size - for (i <- List.range(0, t.person.value.children.size-1)) { - t.person.value.children(i).name must_== tr.person.value.children(i).name - t.person.value.children(i).age must_== tr.person.value.children(i).age - t.person.value.children(i).birthdate must_== tr.person.value.children(i).birthdate - } - } - - if (!debug) TstRecord.drop - } - - success - } - - "Ref example" in { - - checkMongoIsRunning - - val ref1 = RefDoc.createRecord - val ref2 = RefDoc.createRecord - - ref1.save() must_== ref1 - ref2.save() must_== ref2 - - val refUuid1 = RefUuidDoc.createRecord - val refUuid2 = RefUuidDoc.createRecord - - refUuid1.save() must_== refUuid1 - refUuid2.save() must_== refUuid2 - - val md1 = MainDoc.createRecord - val md2 = MainDoc.createRecord - val md3 = MainDoc.createRecord - val md4 = MainDoc.createRecord - - md1.name.set("md1") - md2.name.set("md2") - md3.name.set("md3") - md4.name.set("md4") - - md1.refdocId.set(ref1.id.get) - md2.refdocId.set(ref1.id.get) - md3.refdocId.set(ref2.id.get) - md4.refdocId.set(ref2.id.get) - - md1.refuuid.set(refUuid1.id.get) - md2.refuuid.set(refUuid1.id.get) - md3.refuuid.set(refUuid2.id.get) - md4.refuuid.set(refUuid2.id.get) - - md1.save() must_== md1 - md2.save() must_== md2 - md3.save() must_== md3 - md4.save() must_== md4 - - MainDoc.count must_== 4 - RefDoc.count must_== 2 - - // get the docs back from the db - MainDoc.find(md1.id.get).foreach(m => { - m.name.value must_== md1.name.value - m.cnt.value must_== md1.cnt.value - m.refdocId.value must_== md1.refdocId.value - m.refuuid.value must_== md1.refuuid.value - }) - - // fetch a refdoc - val refFromFetch = md1.refdocId.obj - refFromFetch.isDefined must_== true - refFromFetch.openOrThrowException("we know this is Full").id.get must_== ref1.id.get - - // query for a single doc with a JObject query - val md1a = MainDoc.find(("name") -> "md1") - md1a.isDefined must_== true - md1a.foreach(o => o.id.get must_== md1.id.get) - - // query for a single doc with a k, v query - val md1b = MainDoc.find("_id", md1.id.get) - md1b.isDefined must_== true - md1b.foreach(o => o.id.get must_== md1.id.get) - - // query for a single doc with a Map query - val md1c = MainDoc.find(("name" -> "md1")) - md1c.isDefined must_== true - md1c.foreach(o => o.id.get must_== md1.id.get) - - // find all documents - MainDoc.findAll.size must_== 4 - RefDoc.findAll.size must_== 2 - - // find all documents with JObject query - val mdq1 = MainDoc.findAll(("name" -> "md1")) - mdq1.size must_== 1 - - // find all documents with $in query, sorted - val qry = ("name" -> ("$in" -> List("md1", "md2"))) - val mdq2 = MainDoc.findAll(qry, ("name" -> -1)) - mdq2.size must_== 2 - mdq2.head.id.get must_== md2.id.get - - // Find all documents using a k, v query - val mdq3 = MainDoc.findAll("_id", md1.id.get) - mdq3.size must_== 1 - - // find all documents with field selection - val mdq4 = MainDoc.findAll(("name" -> "md1"), ("name" -> 1), Empty) - mdq4.size must_== 1 - - // Upsert - this should add a new row - val md5 = MainDoc.createRecord - md5.name.set("md5") - md5.refdocId.set(ref1.id.get) - MainDoc.update(("name" -> "nothing"), md5, Upsert) - MainDoc.findAll.size must_== 5 - - // modifier operations $inc, $set, $push... - val o2 = (("$inc" -> ("cnt" -> 1)) ~ ("$set" -> ("name" -> "md1a"))) - MainDoc.update(("name" -> "md1"), o2) - // get the doc back from the db and compare - val mdq5 = MainDoc.find("_id", md1.id.get) - mdq5.isDefined must_== true - mdq5.map ( m => { - m.name.value must_== "md1a" - m.cnt.value must_== 1 - }) - - if (!debug) { - // delete them - md1.delete_! - md2.delete_! - md3.delete_! - md4.delete_! - md5.delete_! - ref1.delete_! - ref2.delete_! - - MainDoc.findAll.size must_== 0 - - MainDoc.drop - RefDoc.drop - } - - success - } - - "List example" in { - checkMongoIsRunning - - val ref1 = RefDoc.createRecord - val ref2 = RefDoc.createRecord - - ref1.save() must_== ref1 - ref2.save() must_== ref2 - - val name = "ld1" - val strlist = List("string1", "string2", "string3", "string1") - val jd1 = JsonDoc("1", "jsondoc1") - - val ld1 = ListDoc.createRecord - ld1.name.set(name) - ld1.stringlist.set(strlist) - ld1.intlist.set(List(99988,88, 88)) - ld1.doublelist.set(List(997655.998,88.8)) - ld1.boollist.set(List(true,true,false)) - ld1.objidlist.set(List(ObjectId.get, ObjectId.get)) - ld1.dtlist.set(List(now, now)) - ld1.jsonobjlist.set(List(jd1, JsonDoc("2", "jsondoc2"), jd1)) - ld1.patternlist.set(List(Pattern.compile("^Mongo"), Pattern.compile("^Mongo2"))) - ld1.maplist.set(List(Map("name" -> "map1", "type" -> "map"), Map("name" -> "map2", "type" -> "map"))) - ld1.binarylist.set(List[Array[Byte]]("foo".getBytes(), "bar".getBytes())) - - ld1.save() must_== ld1 - - val qld1 = ListDoc.find(ld1.id.get) - - qld1.isDefined must_== true - - qld1.foreach { l => - l.name.value must_== ld1.name.value - l.stringlist.value must_== ld1.stringlist.value - l.intlist.value must_== ld1.intlist.value - l.doublelist.value must_== ld1.doublelist.value - l.boollist.value must_== ld1.boollist.value - l.objidlist.value must_== ld1.objidlist.value - l.dtlist.value must_== ld1.dtlist.value - l.jsonobjlist.value must_== ld1.jsonobjlist.value - for (i <- List.range(0, l.patternlist.value.size-1)) { - l.patternlist.value(i).pattern must_== ld1.patternlist.value(i).pattern - } - l.maplist.value must_== ld1.maplist.value - for (i <- List.range(0, l.jsonobjlist.value.size-1)) { - l.jsonobjlist.value(i).id must_== ld1.jsonobjlist.value(i).id - l.jsonobjlist.value(i).name must_== ld1.jsonobjlist.value(i).name - } - for { - orig <- ld1.binarylist.value.headOption - queried <- l.binarylist.value.headOption - } new String(orig) must_== new String(queried) - } - - if (!debug) { - ListDoc.drop - RefDoc.drop - } - - success - } - - "Map Example" in { - - checkMongoIsRunning - - val md1 = MapDoc.createRecord - md1.stringmap.set(Map("h" -> "hola")) - - md1.save() must_== md1 - - md1.delete_! - - if (!debug) MapDoc.drop - - success - } - - "Optional Example" in { - - checkMongoIsRunning - - val od1 = OptionalDoc.createRecord - od1.stringbox.valueBox must_== Empty - od1.save() must_== od1 - - OptionalDoc.find(od1.id.get).foreach { - od1FromDB => - od1FromDB.stringbox.valueBox must_== od1.stringbox.valueBox - } - - - val od2 = OptionalDoc.createRecord - od1.stringbox.valueBox must_== Empty - od2.stringbox.set("aloha") - od2.save() must_== od2 - - OptionalDoc.find(od2.id.get).foreach { - od2FromDB => - od2FromDB.stringbox.valueBox must_== od2.stringbox.valueBox - } - - if (!debug) OptionalDoc.drop - - success - } - - "Strict Example" in { - - checkMongoIsRunning - - val sd1 = StrictDoc.createRecord.name("sd1") - val sd2 = StrictDoc.createRecord.name("sd1") - - sd1.save(true) must_== sd1 - sd2.save(true) must throwA[MongoException] - - sd1.save() - - sd2.name("sd2") - sd2.save(true) must_== sd2 - - if (!debug) StrictDoc.drop - - success - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoAsyncTestKit.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoAsyncTestKit.scala deleted file mode 100644 index 4bab4774e4..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoAsyncTestKit.scala +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright 2010-2017 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import util.{ConnectionIdentifier, DefaultConnectionIdentifier, Props} - -import scala.collection.JavaConverters._ -import scala.concurrent.{Await, Promise} -import scala.concurrent.duration._ -import java.util.concurrent.TimeoutException - -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeAfterEach - -import org.bson.Document - -import com.mongodb.Block -import com.mongodb.async.SingleResultCallback -import com.mongodb.async.client.{MongoClients, MongoDatabase} - -// The sole mongo object for testing async -object TestMongoAsync { - val mongo = { - val uri = Props.get("mongo.test.uri", "127.0.0.1:27017") - MongoClients.create(s"mongodb://$uri") - } - - class SingleResultCallbackF[A]() extends SingleResultCallback[A] { - private[this] val p = Promise[A]() - - override def onResult(result: A, error: Throwable): Unit = { - Option(error) match { - case None => - p.success(result) - case Some(t) => - p.failure(t) - } - } - - def future = p.future - } - - lazy val isMongoRunning: Boolean = - try { - val res = mongo.listDatabaseNames - val cb = new SingleResultCallbackF[Void]() - - res.forEach( - new Block[String]() { - override def apply(name: String): Unit = { } - }, - cb - ) - - // this will throw an exception if it can't connect to the db - Await.result(cb.future, Duration(2000, MILLISECONDS)) - true - } catch { - case _: TimeoutException => - false - } -} - -trait MongoAsyncTestKit extends Specification with BeforeAfterEach { - sequential - - protected def dbName = "lift_record_"+this.getClass.getName - .replace("$", "") - .replace("net.liftweb.mongodb.record.", "") - .replace(".", "_") - .toLowerCase - - // If you need more than one db, override this - protected def dbs: List[(ConnectionIdentifier, String)] = - (DefaultConnectionIdentifier, dbName) :: Nil - - def debug: Boolean = false - - def before = { - // define the dbs - dbs.foreach { case (id, db) => - MongoAsync.defineDb(id, TestMongoAsync.mongo.getDatabase(db)) - MongoDB.defineDb(id, TestMongo.mongo, db) - } - } - - def checkMongoIsRunning = { - TestMongoAsync.isMongoRunning must beEqualTo(true).orSkip - TestMongo.isMongoRunning must beEqualTo(true).orSkip - } - - def after = { - if (!debug && TestMongoAsync.isMongoRunning) { - val cb = new SingleResultCallback[Void] { - override def onResult(result: Void, t: Throwable) = { } - } - // drop the databases - dbs.foreach { case (id, _) => - MongoAsync.use(id) { _.drop(cb) } - } - } - - // clear the mongo instances - dbs.foreach { case (id, _) => - MongoAsync.remove(id) - } - - if (!debug && TestMongo.isMongoRunning) { - // drop the databases - dbs.foreach { case (id, _) => - MongoDB.use(id) { db => db.dropDatabase } - } - } - - // clear the mongo instances - dbs.foreach { case (id, _) => - MongoDB.remove(id) - } - } -} - diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoFieldSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoFieldSpec.scala deleted file mode 100644 index ae190f741a..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoFieldSpec.scala +++ /dev/null @@ -1,756 +0,0 @@ -/* - * Copyright 2006-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import org.specs2.mutable._ -import org.specs2.specification._ -import org.specs2.execute.AsResult -import org.joda.time.DateTime -import common._ -import mongodb.BsonDSL._ -import util.Helpers.randomString -import http.{LiftSession, S} -import http.js.JE._ -import http.js.JsExp -import net.liftweb.record._ -import common.Box._ - -import scala.xml.{Elem, NodeSeq, Text} -import util.{FieldError, Helpers} -import Helpers._ -import net.liftweb.json.JsonAST._ -import org.bson.Document -import org.bson.types.ObjectId - -/** - * Systems under specification for MongoField. - */ -class MongoFieldSpec extends Specification with MongoTestKit with AroundEach { - "MongoField Specification".title - sequential - - import fixtures._ - import testmodels._ - - lazy val session = new LiftSession("", randomString(20), Empty) - - // One of these is for specs2 2.x, the other for specs2 1.x - protected def around[T : AsResult](t: =>T) = S.initIfUninitted(session) { AsResult(t) } - protected def around[T <% org.specs2.execute.Result](t: =>T) = S.initIfUninitted(session) { t } - - def passBasicTests[A]( - example: A, - example2: A, - mandatory: MandatoryTypedField[A], - optionalBox: Box[OptionalTypedField[A]], - legacyOptionalBox: Box[MandatoryTypedField[A]], - canCheckDefaultValues: Boolean = true - )(implicit m: scala.reflect.Manifest[A]): Unit = { - - def commonBehaviorsForAll(field: TypedField[A]) = { - "which are only flagged as dirty_? when setBox is called with a different value" in { - field.clear - field match { - case owned: OwnedField[_] => owned.owner.runSafe { - field.resetDirty - } - case _ => field.resetDirty - } - field.dirty_? must_== false - val valueBox = field.valueBox - field.setBox(valueBox) - field.dirty_? must_== false - val exampleBox = Full(example) - (valueBox === exampleBox) must_== false - field.setBox(exampleBox) - field.dirty_? must_== true - val exampleBox2 = Full(example2) - (exampleBox === exampleBox2) must_== false - field.setBox(exampleBox2) - field.dirty_? must_== true - field.setBox(valueBox) - success - } - } - - def commonBehaviorsForMandatory(field: MandatoryTypedField[A]) = { - commonBehaviorsForAll(field) - - "which have the correct initial value" in { - field.value must be_==(field.defaultValue).when(canCheckDefaultValues) - field.valueBox must be_==(field.defaultValueBox).when(canCheckDefaultValues) - } - - "which are readable and writable" in { - field.set(example) - field.value must_== example - field.valueBox must_== Full(example) - field.clear - field.value must_!= example - field.valueBox must_!= Full(example) - field.setBox(Full(example)) - field.value must_== example - field.valueBox must_== Full(example) - } - - "which correctly clear back to the default" in { - { field.clear; field.valueBox } must be_==(field.defaultValueBox).when(canCheckDefaultValues) - } - - "which capture error conditions set in" in { - // FIXME: This needs to be rearranged just so that it doesn't foul with subsequent examples - // field.setBox(Failure("my failure")) - // Failure("my failure") must_== Failure("my failure") - pending - } - - - } - - "support mandatory fields" in { - "which are configured correctly" in { - mandatory.optional_? must_== false - } - - "which initialize to some value" in { - mandatory.valueBox.isDefined must_== true - } - - "common behaviors for all MandatoryTypedField fields" in { - commonBehaviorsForMandatory(mandatory) - } - - "which correctly fail to be set to Empty" in { - mandatory.valueBox.isDefined must_== true - mandatory.setBox(Empty) - mandatory.valueBox must beLike { case Failure(s, _, _) => s must_== mandatory.notOptionalErrorMessage } - } - } - - legacyOptionalBox map { legacyOptional => - "support 'legacy' optional fields (override optional_?)" in { - "which are configured correctly" in { - legacyOptional.optional_? must_== true - } - - "which initialize to Empty" in { - legacyOptional.valueBox must_== Empty - } - - "common behaviors for all MandatoryTypedField fields" in { - commonBehaviorsForMandatory(legacyOptional) - } - - "which do not fail when set to Empty" in { - legacyOptional.set(example) - legacyOptional.value must_== example - legacyOptional.valueBox must_== Full(example) - legacyOptional.clear - if (canCheckDefaultValues) { - legacyOptional.value must_== legacyOptional.defaultValue - legacyOptional.valueBox must_== legacyOptional.defaultValueBox - } - legacyOptional.set(example) - legacyOptional.value must_== example - legacyOptional.valueBox must_== Full(example) - legacyOptional.setBox(Empty) - if (canCheckDefaultValues) { - legacyOptional.value must_== legacyOptional.defaultValue - legacyOptional.valueBox must_== legacyOptional.defaultValueBox - } - success - } - } - } - - optionalBox map { optional => - "support optional fields" in { - commonBehaviorsForAll(optional) - - "which are configured correctly" in { - optional.optional_? must_== true - } - - "which initialize to Empty" in { - optional.valueBox must_== Empty - } - - "which have the correct initial value" in { - optional.valueBox must be_== (optional.defaultValueBox) when canCheckDefaultValues - optional.value must beNone when canCheckDefaultValues - } - - "which are readable and writable with box values" in { - optional.setBox(Full(example)) - optional.valueBox must_== Full(example) - optional.value must beSome(example) - optional.clear - optional.valueBox must be (Empty) - optional.value must beNone - optional.set(Some(example)) - optional.valueBox must_== Full(example) - } - - "which correctly clear back to the default box value" in { - { optional.clear; optional.value } must beNone when canCheckDefaultValues - - { optional.clear; optional.valueBox } must be_== (optional.defaultValueBox) when canCheckDefaultValues - } - - "which capture error conditions set in" in { - // FIXME: This needs to be rearranged just so that it doesn't foul with subsequent examples - // field.setBox(Failure("my failure")) - // Failure("my failure") must_== Failure("my failure") - pending - } - - "which do not fail when set to Empty" in { - optional.setBox(Empty) - optional.value must beNone when canCheckDefaultValues - optional.valueBox must be_== (Empty) when canCheckDefaultValues - success - } - } - } - } - - def passConversionTests[A](example: A, mandatory: MandatoryTypedField[A], jsexp: JsExp, jvalue: JValue, formPattern: Box[NodeSeq], canCheckSetFromJValue: Boolean = true) = { - - /* - "convert to JsExp" in { - mandatory.set(example) - mandatory.asJs mustEqual jsexp - }*/ - - "convert to JValue" in { - mandatory.set(example) - mandatory.asJValue mustEqual jvalue - } - - if (canCheckSetFromJValue) { - "get set from JValue" in { - mandatory.setFromJValue(jvalue) mustEqual Full(example) - mandatory.value mustEqual example - } - } - - "convert to form XML" in { - formPattern foreach { fp => - mandatory.set(example) - val session = new LiftSession("", randomString(20), Empty) - S.initIfUninitted(session) { - val formXml = mandatory.toForm - formXml.isDefined must_== true - formXml foreach { fprime => - val f = ("* [name]" #> ".*" & "select *" #> (((ns: NodeSeq) => ns.filter { - case e: Elem => e.attribute("selected").map(_.text) == Some("selected") - case _ => false - }) andThen "* [value]" #> ".*"))(fprime) - val ret: Boolean = Helpers.compareXml(f, fp) - - ret must_== true - } - } - } - success - } - } - - "DateField" should { - val rec = MongoFieldTypeTestRecord.createRecord - val now = new Date - val nowStr = rec.meta.formats.dateFormat.format(now) - val now2 = Calendar.getInstance() - now2.add(Calendar.DATE, 1) - passBasicTests(now, now2.getTime, rec.mandatoryDateField, Full(rec.optionalDateField), Full(rec.legacyOptionalDateField), false) - passConversionTests( - now, - rec.mandatoryDateField, - JsObj(("$dt", Str(nowStr))), - JObject(List(JField("$dt", JString(nowStr)))), - Full() - ) - } - - "JsonObjectField" should { - val rec = MongoFieldTypeTestRecord.createRecord - val ttjo = TypeTestJsonObject(1, "jsonobj1", Map("x" -> "a")) - val ttjo2 = TypeTestJsonObject(2, "jsonobj2", Map("x" -> "b")) - val json = ("intField" -> 1) ~ ("stringField" -> "jsonobj1") ~ ("mapField" -> (("x" -> "a"))) - passBasicTests(ttjo, ttjo2, rec.mandatoryJsonObjectField, Full(rec.optionalJsonObjectField), Full(rec.legacyOptionalJsonObjectField)) - passConversionTests( - ttjo, - rec.mandatoryJsonObjectField, - new JsExp { - def toJsCmd = compactRender(json) - }, - json, - Empty - ) - } - - "ObjectIdField" should { - // The extra `in` here is required for compilation, or we get a strange ambiguous overload warning. - "work and provide the appropriate date" in { - val rec = MongoFieldTypeTestRecord.createRecord - val oid = ObjectId.get - val oid2 = ObjectId.get - passBasicTests(oid, oid2, rec.mandatoryObjectIdField, Full(rec.optionalObjectIdField), Full(rec.legacyOptionalObjectIdField), false) - passConversionTests( - oid, - rec.mandatoryObjectIdField, - JsObj(("$oid", oid.toString)), - JObject(List(JField("$oid", JString(oid.toString)))), - Full() - ) - rec.mandatoryObjectIdField(oid) - - oid.getDate must_== rec.mandatoryObjectIdField.createdAt - } - } - - "PatternField" should { - val rec = PatternFieldTestRecord.createRecord - val ptrn = Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE) - val ptrn2 = Pattern.compile("^MON", Pattern.CASE_INSENSITIVE) - passBasicTests(ptrn, ptrn2, rec.mandatoryPatternField, Full(rec.optionalPatternField), Full(rec.legacyOptionalPatternField), false) - passConversionTests( - ptrn, - rec.mandatoryPatternField, - JsObj(("$regex", Str(ptrn.toString)), ("$flags", Num(2))), - JObject(List(JField("$regex", JString(ptrn.toString)), JField("$flags", JInt(2)))), - Empty, - false - ) - } - - "UUIDField" should { - val rec = MongoFieldTypeTestRecord.createRecord - val uuid = UUID.randomUUID - val uuid2 = UUID.randomUUID - passBasicTests(uuid, uuid2, rec.mandatoryUUIDField, Full(rec.optionalUUIDField), Full(rec.legacyOptionalUUIDField), false) - passConversionTests( - uuid, - rec.mandatoryUUIDField, - JsObj(("$uuid", Str(uuid.toString))), - JObject(List(JField("$uuid", JString(uuid.toString)))), - Full() - ) - } - - "PasswordField" should { - "require a nonempty password" in { - val rec = PasswordTestRecord.createRecord - rec.password.setPassword("") - rec.validate must_== ( - FieldError(rec.password, Text(S.?("password.must.be.set"))) :: - Nil - ) - } - - "require at least 3 character password" in { - val rec = PasswordTestRecord.createRecord - rec.password.setPassword("ab") - rec.validate must_== ( - FieldError(rec.password, Text(S.?("password.too.short"))) :: - Nil - ) - } - } - - "MongoListField (String)" should { - "function correctly" in { - val rec = ListTestRecord.createRecord - val lst = List("abc", "def", "ghi") - val lst2 = List("ab", "de", "gh") - passBasicTests(lst, lst2, rec.mandatoryStringListField, Empty, Empty) - passConversionTests( - lst, - rec.mandatoryStringListField, - JsArray(Str("abc"), Str("def"), Str("ghi")), - JArray(List(JString("abc"), JString("def"), JString("ghi"))), - Empty - ) - } - } - - "MongoListField (Int)" should { - "function correctly" in { - val rec = ListTestRecord.createRecord - val lst = List(4, 5, 6) - val lst2 = List(1, 2, 3) - passBasicTests(lst, lst2, rec.mandatoryIntListField, Empty, Empty) - passConversionTests( - lst, - rec.mandatoryIntListField, - JsArray(Num(4), Num(5), Num(6)), - JArray(List(JInt(4), JInt(5), JInt(6))), - Empty - ) - } - } - - "MongoListField (ObjectId)" should { - "function correctly" in { - val rec = MongoListTestRecord.createRecord - val oid1 = ObjectId.get - val oid2 = ObjectId.get - val oid3 = ObjectId.get - val oid4 = ObjectId.get - val oid5 = ObjectId.get - val oid6 = ObjectId.get - val lst = List(oid1, oid2, oid3) - val lst2 = List(oid4, oid5, oid6) - passBasicTests(lst, lst2, rec.objectIdRefListField, Empty, Empty) - passConversionTests( - lst, - rec.objectIdRefListField, - JsArray(Str(oid1.toString), Str(oid2.toString), Str(oid3.toString)), - JArray(List( - JObject(List(JField("$oid", JString(oid1.toString)))), - JObject(List(JField("$oid", JString(oid2.toString)))), - JObject(List(JField("$oid", JString(oid3.toString)))) - )), - Empty - ) - } - } - - "MongoListField (Pattern)" should { - "function correctly" in { - val rec = MongoListTestRecord.createRecord - val ptrn1 = Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE) - val ptrn2 = Pattern.compile("^MON", Pattern.CASE_INSENSITIVE) - val ptrn3 = Pattern.compile("^TUE") - val ptrn4 = Pattern.compile("^WED") - val lst1 = List(ptrn1, ptrn2) - val lst2 = List(ptrn3, ptrn4) - passBasicTests(lst1, lst2, rec.patternListField, Empty, Empty) - passConversionTests( - lst1, - rec.patternListField, - JsArray(Str(ptrn1.toString), Str(ptrn2.toString)), - JArray(List( - JsonRegex(ptrn1), - JsonRegex(ptrn2) - )), - Empty, - false - ) - } - } - - "MongoListField (Date)" should { - "function correctly" in { - val rec = MongoListTestRecord.createRecord - val dt1 = new Date - val dt2 = new Date - val dt3 = new Date - val dt4 = new Date - val dt5 = new Date - val dt6 = new Date - val lst = List(dt1, dt2, dt3) - val lst2 = List(dt4, dt5, dt6) - passBasicTests(lst, lst2, rec.dateListField, Empty, Empty) - passConversionTests( - lst, - rec.dateListField, - JsArray(Str(dt1.toString), Str(dt2.toString), Str(dt3.toString)), - JArray(List( - JsonDate(dt1)(MongoListTestRecord.formats), - JsonDate(dt2)(MongoListTestRecord.formats), - JsonDate(dt3)(MongoListTestRecord.formats) - )), - Empty - ) - } - } - - "MongoListField (UUID)" should { - "function correctly" in { - val rec = MongoListTestRecord.createRecord - val uuid1 = UUID.randomUUID - val uuid2 = UUID.randomUUID - val uuid3 = UUID.randomUUID - val uuid4 = UUID.randomUUID - val uuid5 = UUID.randomUUID - val uuid6 = UUID.randomUUID - val lst = List(uuid1, uuid2, uuid3) - val lst2 = List(uuid4, uuid5, uuid6) - passBasicTests(lst, lst2, rec.uuidListField, Empty, Empty) - passConversionTests( - lst, - rec.uuidListField, - JsArray(Str(uuid1.toString), Str(uuid2.toString), Str(uuid3.toString)), - JArray(List( - JsonUUID(uuid1), - JsonUUID(uuid2), - JsonUUID(uuid3) - )), - Empty - ) - } - } - - "MongoListField (DateTime)" should { - "function correctly" in { - val rec = MongoJodaListTestRecord.createRecord - val dt1 = new DateTime - val dt2 = new DateTime - val dt3 = new DateTime - val dt4 = new DateTime - val dt5 = new DateTime - val dt6 = new DateTime - val lst = List(dt1, dt2, dt3) - val lst2 = List(dt4, dt5, dt6) - passBasicTests(lst, lst2, rec.dateTimeListField, Empty, Empty) - passConversionTests( - lst, - rec.dateTimeListField, - JsArray(Str(dt1.toString), Str(dt2.toString), Str(dt3.toString)), - JArray(List( - JsonDate(dt1.toDate)(MongoListTestRecord.formats), - JsonDate(dt2.toDate)(MongoListTestRecord.formats), - JsonDate(dt3.toDate)(MongoListTestRecord.formats) - )), - Empty - ) - } - } - - "JsonObjectListField" should { - "function correctly" in { - val rec = ListTestRecord.createRecord - val lst = List(TypeTestJsonObject(1, "jsonobj1", Map("x" -> "1")), TypeTestJsonObject(2, "jsonobj2", Map("x" -> "2"))) - val lst2 = List(TypeTestJsonObject(3, "jsonobj3", Map("x" -> "3")), TypeTestJsonObject(4, "jsonobj4", Map("x" -> "4"))) - val json = List( - ("intField" -> 1) ~ ("stringField" -> "jsonobj1") ~ ("mapField" -> (("x" -> "1"))), - ("intField" -> 2) ~ ("stringField" -> "jsonobj2") ~ ("mapField" -> (("x" -> "2"))) - ) - passBasicTests(lst, lst2, rec.mandatoryJsonObjectListField, Empty, Empty) - passConversionTests( - lst, - rec.mandatoryJsonObjectListField, - new JsExp { - def toJsCmd = compactRender(json) - }, - json, - Empty - ) - } - } - - "CaseClassListField" should { - "setFromAny a List" in { - val rec = ListTestRecord.createRecord - val lst = List(CaseClassTestObject(1,"str1", MyTestEnum.THREE)) - rec.caseClassListField.setFromAny(lst) - rec.caseClassListField.value must_== lst - } - } - - "MongoMapField (String)" should { - "function correctly" in { - val rec = MapTestRecord.createRecord - val map = Map("a" -> "abc", "b" -> "def", "c" -> "ghi") - val map2 = Map("a" -> "ab", "b" -> "de", "c" -> "gh") - passBasicTests(map, map2, rec.mandatoryStringMapField, Empty, Empty) - passConversionTests( - map, - rec.mandatoryStringMapField, - JsObj(("a", Str("abc")), ("b", Str("def")), ("c", Str("ghi"))), - JObject(List( - JField("a", JString("abc")), - JField("b", JString("def")), - JField("c", JString("ghi")) - )), - Empty - ) - } - "set proper maxlength on form element" in { - val rec = MapTestRecord.createRecord - val session = new LiftSession("", randomString(20), Empty) - S.initIfUninitted(session) { - val maxLenAttribute: Box[String] = rec.id.toForm.map(f => (f \ "@maxlength").text) - maxLenAttribute must_== Full(rec.maxIdLength.toString) - } - } - } - - "MongoMapField (Int)" should { - "function correctly" in { - val rec = MapTestRecord.createRecord - val map = Map("a" -> 4, "b" -> 5, "c" -> 6) - val map2 = Map("a" -> 1, "b" -> 2, "c" -> 3) - passBasicTests(map, map2, rec.mandatoryIntMapField, Empty, Empty) - passConversionTests( - map, - rec.mandatoryIntMapField, - JsObj(("a", Num(4)), ("b", Num(5)), ("c", Num(6))), - JObject(List( - JField("a", JInt(4)), - JField("b", JInt(5)), - JField("c", JInt(6)) - )), - Empty - ) - } - } - - "MongoMapField" should { - "create itself from bson doc" in { - import scala.collection.JavaConverters._ - val rec = MapTestRecord.createRecord - val map = Map[String, AnyRef]("a" -> "4", "b" -> "5", "c" -> "6") - val doc = new Document(map.asJava) - rec.mandatoryStringMapField.setFromDocument(doc) - rec.mandatoryStringMapField.value must_== map - rec.mandatoryStringMapField.asDocument must_== doc - } - } - - "BsonRecordField" should { - "function correctly" in { - val rec = SubRecordTestRecord.createRecord - val subSubRec = SubSubRecord.createRecord.name("subsub") - val subRec = SubRecord.createRecord.name("subrecord").subsub(subSubRec) - val subRec2 = SubRecord.createRecord.name("subrecord2") - - val srJson = - ("name" -> "subrecord") ~ - ("subsub" -> ("name" -> "subsub")) ~ - ("subsublist" -> JArray(Nil)) ~ - ("when" -> ("$dt" -> rec.meta.formats.dateFormat.format(subRec.when.value))) ~ - ("slist" -> JArray(Nil)) ~ - ("smap" -> JObject(Nil)) ~ - ("oid" -> ("$oid" -> subRec.oid.value.toString)) ~ - ("pattern" -> - ("$regex" -> subRec.pattern.value.pattern) ~ - ("$flags" -> subRec.pattern.value.flags) - ) ~ - ("uuid" -> ("$uuid" -> subRec.uuid.value.toString)) - - val srJsExp = new JsExp { - def toJsCmd = compactRender(srJson) - } - - passBasicTests(subRec, subRec2, rec.mandatoryBsonRecordField, Full(rec.optioalBsonRecordField), Full(rec.legacyOptionalBsonRecordField), false) - passConversionTests( - subRec, - rec.mandatoryBsonRecordField, - srJsExp, - srJson, - Empty - ) - } - } - - "BsonRecordListField" should { - "function correctly" in { - val rec = SubRecordTestRecord.createRecord - val subSubRec = SubSubRecord.createRecord.name("subsub") - val lst = List(SubRecord.createRecord.name("subrec1").subsub(subSubRec), SubRecord.createRecord.name("subrec2").subsub(subSubRec)) - val lst2 = List(SubRecord.createRecord.name("subrec3"), SubRecord.createRecord.name("subrec4")) - val sr1Json = - ("name" -> "subrec1") ~ - ("subsub" -> ("name" -> "subsub")) ~ - ("subsublist" -> JArray(Nil)) ~ - ("when" -> ("$dt" -> rec.meta.formats.dateFormat.format(lst(0).when.value))) ~ - ("slist" -> JArray(Nil)) ~ - ("smap" -> JObject(Nil)) ~ - ("oid" -> ("$oid" -> lst(0).oid.value.toString)) ~ - ("pattern" -> - ("$regex" -> lst(0).pattern.value.pattern) ~ - ("$flags" -> lst(0).pattern.value.flags) - ) ~ - ("uuid" -> ("$uuid" -> lst(0).uuid.value.toString)) - - val sr2Json = - ("name" -> "subrec2") ~ - ("subsub" -> ("name" -> "subsub")) ~ - ("subsublist" -> JArray(Nil)) ~ - ("when" -> ("$dt" -> rec.meta.formats.dateFormat.format(lst(1).when.value))) ~ - ("slist" -> JArray(Nil)) ~ - ("smap" -> JObject(Nil)) ~ - ("oid" -> ("$oid" -> lst(1).oid.value.toString)) ~ - ("pattern" -> - ("$regex" -> lst(1).pattern.value.pattern) ~ - ("$flags" -> lst(1).pattern.value.flags) - ) ~ - ("uuid" -> ("$uuid" -> lst(1).uuid.value.toString)) - - val sr1JsExp = new JsExp { - def toJsCmd = compactRender(sr1Json) - } - val sr2JsExp = new JsExp { - def toJsCmd = compactRender(sr2Json) - } - - passBasicTests(lst, lst2, rec.mandatoryBsonRecordListField, Empty, Full(rec.legacyOptionalBsonRecordListField)) - passConversionTests( - lst, - rec.mandatoryBsonRecordListField, - JsArray(sr1JsExp, sr2JsExp), - JArray(List(sr1Json, sr2Json)), - Empty - ) - } - } - - "JObjectField" should { - val jo: JValue = ("minutes" -> 59) - val json: JObject = ("mandatoryJObjectField" -> jo) - - "convert to JValue" in { - val rec = JObjectFieldTestRecord.createRecord - .mandatoryJObjectField(json) - - rec.mandatoryJObjectField.asJValue must_== json - - } - "get set from JValue" in { - val rec = JObjectFieldTestRecord.createRecord - val recFromJson = rec.mandatoryJObjectField.setFromJValue(json) - - recFromJson.isDefined must_== true - recFromJson foreach { r => - r must_== json - } - success - } - "get set from JValue after BSON roundtrip" in { - val joftrJson: JObject = ("_id" -> ("$oid" -> ObjectId.get.toString)) ~ ("mandatoryJObjectField" -> ("minutes" -> 59)) - val fromJsonBox = JObjectFieldTestRecord.fromJValue(joftrJson) - - fromJsonBox.isDefined must_== true - - fromJsonBox foreach { fromJson => - //Convert the test record, make a DBObject out of it, and make a record from that DBObject - val fromBson = JObjectFieldTestRecord.fromDBObject(fromJson.asDBObject) - fromBson.asJValue must_== fromJson.asJValue - } - success - } - } -} - diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordAsyncSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordAsyncSpec.scala deleted file mode 100644 index 678654bd56..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordAsyncSpec.scala +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2017-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import org.specs2.mutable.Specification - -import org.specs2.concurrent.ExecutionEnv - -class MongoRecordAsyncSpec(implicit ee: ExecutionEnv) extends Specification with MongoAsyncTestKit { - "MongoRecord Async Specification".title - - import fixtures.FieldTypeTestRecord - - "MongoRecord Async" should { - - "insert asynchronously" in { - checkMongoIsRunning - - val obj = FieldTypeTestRecord.createRecord - .mandatoryLongField(42L) - .mandatoryIntField(27) - - FieldTypeTestRecord.insertAsync(obj) must beEqualTo[Boolean](true).await - - val fetched = FieldTypeTestRecord.find(obj.id.get) - - fetched.isDefined must_== true - - fetched.foreach { o => - o.id.get must_== obj.id.get - o.mandatoryLongField.get must_== 42L - o.mandatoryIntField.get must_== 27 - } - - success - } - - "replaceOne asynchronously" in { - checkMongoIsRunning - - val obj = FieldTypeTestRecord.createRecord - .mandatoryLongField(42L) - .mandatoryIntField(27) - - FieldTypeTestRecord.replaceOneAsync(obj) must beEqualTo[FieldTypeTestRecord](obj).await - - val fetched = FieldTypeTestRecord.find(obj.id.get) - - fetched.isDefined must_== true - - fetched.foreach { o => - o.id.get must_== obj.id.get - o.mandatoryLongField.get must_== 42L - o.mandatoryIntField.get must_== 27 - } - - obj - .mandatoryLongField(44L) - .mandatoryIntField(29) - - FieldTypeTestRecord.replaceOneAsync(obj) must beEqualTo[FieldTypeTestRecord](obj).await - - val fetched2 = FieldTypeTestRecord.find(obj.id.get) - - fetched2.isDefined must_== true - - fetched2.foreach { o => - o.id.get must_== obj.id.get - o.mandatoryLongField.get must_== 44L - o.mandatoryIntField.get must_== 29 - } - - success - } - - "replaceOne without upsert" in { - checkMongoIsRunning - - val obj = FieldTypeTestRecord.createRecord - - FieldTypeTestRecord.replaceOneAsync(obj, false) must beEqualTo[FieldTypeTestRecord](obj).await - FieldTypeTestRecord.find(obj.id.get).isDefined must_== false - } - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordExamplesSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordExamplesSpec.scala deleted file mode 100644 index 18bce4842e..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordExamplesSpec.scala +++ /dev/null @@ -1,487 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import net.liftweb.common.{Box, Empty, Failure, Full} -import net.liftweb.http.{S, LiftSession} -import net.liftweb.json._ -import net.liftweb.json.JsonDSL._ -import net.liftweb.record.field._ -import net.liftweb.util.TimeHelpers._ -import net.liftweb.mongodb.record.field._ - -import org.specs2.mutable.Specification - -import org.bson.Document -import org.bson.types.ObjectId -import com.mongodb._ - -package mongotestrecords { - - import field._ - - class TstRecord private () extends MongoRecord[TstRecord] with UUIDPk[TstRecord] { - - def meta = TstRecord - - object booleanfield extends BooleanField(this) - object datetimefield extends DateTimeField(this) - object doublefield extends DoubleField(this) - object emailfield extends EmailField(this, 220) - object intfield extends IntField(this) - object localefield extends LocaleField(this) - object longfield extends LongField(this) - object passwordfield extends MongoPasswordField(this) - object stringfield extends StringField(this, 32) - object timezonefield extends TimeZoneField(this) - object patternfield extends PatternField(this) - object datefield extends DateField(this) - - // JsonObjectField (requires a definition for defaultValue) - object person extends JsonObjectField[TstRecord, Person](this, Person) { - def defaultValue = Person("", 0, Address("", ""), Nil) - } - } - - object TstRecord extends TstRecord with MongoMetaRecord[TstRecord] - - case class Address(street: String, city: String) - case class Child(name: String, age: Int, birthdate: Option[Date]) - - case class Person(name: String, age: Int, address: Address, children: List[Child]) - extends JsonObject[Person] { - def meta = Person - } - - object Person extends JsonObjectMeta[Person] - - class MainDoc private () extends MongoRecord[MainDoc] with ObjectIdPk[MainDoc] { - def meta = MainDoc - - object name extends StringField(this, 12) - object cnt extends IntField(this) - object refdocId extends ObjectIdRefField(this, RefDoc) - object refuuid extends UUIDRefField(this, RefUuidDoc) - } - object MainDoc extends MainDoc with MongoMetaRecord[MainDoc] - - class RefDoc private () extends MongoRecord[RefDoc] with ObjectIdPk[RefDoc] { - def meta = RefDoc - } - object RefDoc extends RefDoc with MongoMetaRecord[RefDoc] - - // uuid as id - class RefUuidDoc private () extends MongoRecord[RefUuidDoc] with UUIDPk[RefUuidDoc] { - def meta = RefUuidDoc - } - object RefUuidDoc extends RefUuidDoc with MongoMetaRecord[RefUuidDoc] - - class ListDoc private () extends MongoRecord[ListDoc] with ObjectIdPk[ListDoc] { - def meta = ListDoc - - import scala.collection.JavaConverters._ - - // standard list types - object name extends StringField(this, 10) - object stringlist extends MongoListField[ListDoc, String](this) - object intlist extends MongoListField[ListDoc, Int](this) - object doublelist extends MongoListField[ListDoc, Double](this) - object boollist extends MongoListField[ListDoc, Boolean](this) - object objidlist extends MongoListField[ListDoc, ObjectId](this) - object dtlist extends MongoListField[ListDoc, Date](this) - object patternlist extends MongoListField[ListDoc, Pattern](this) - object binarylist extends MongoListField[ListDoc, Array[Byte]](this) - - // specialized list types - object jsonobjlist extends JsonObjectListField(this, JsonDoc) - object maplist extends MongoListField[ListDoc, Map[String, String]](this) {} - } - object ListDoc extends ListDoc with MongoMetaRecord[ListDoc] - - case class JsonDoc(id: String, name: String) extends JsonObject[JsonDoc] { - def meta = JsonDoc - } - object JsonDoc extends JsonObjectMeta[JsonDoc] - - class MapDoc private () extends MongoRecord[MapDoc] with ObjectIdPk[MapDoc] { - def meta = MapDoc - - object stringmap extends MongoMapField[MapDoc, String](this) - } - object MapDoc extends MapDoc with MongoMetaRecord[MapDoc] { - override def formats = DefaultFormats.lossless // adds .000 - } - - class OptionalDoc private () extends MongoRecord[OptionalDoc] with ObjectIdPk[OptionalDoc] { - def meta = OptionalDoc - // optional fields - object stringbox extends StringField(this, 32) { - override def optional_? = true - override def defaultValue = "nothin" - } - } - object OptionalDoc extends OptionalDoc with MongoMetaRecord[OptionalDoc] - - class StrictDoc private () extends MongoRecord[StrictDoc] with ObjectIdPk[StrictDoc] { - def meta = StrictDoc - object name extends StringField(this, 32) - } - object StrictDoc extends StrictDoc with MongoMetaRecord[StrictDoc] { - - import net.liftweb.json.JsonDSL._ - - createIndex(("name" -> 1), true) // unique name - } -} - - -/** - * Systems under specification for MongoRecordExamples. - */ -class MongoRecordExamplesSpec extends Specification with MongoTestKit { - "MongoRecordExamples Specification".title - - import mongotestrecords._ - import net.liftweb.util.TimeHelpers._ - - val session = new LiftSession("hello", "", Empty) - "TstRecord example" in { - - checkMongoIsRunning - - S.initIfUninitted(session) { - - val pwd = "test" - val cal = Calendar.getInstance - cal.set(2009, 10, 2) - - val tr = TstRecord.createRecord - tr.stringfield("test record string field") - tr.emailfield("test") - tr.validate.size must_== 2 - tr.passwordfield.setPassword(pwd) - tr.emailfield("test@example.com") - tr.datetimefield(cal) - tr.patternfield(Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE)) - tr.validate.size must_== 0 - - // JsonObjectField - val dob1 = Calendar.getInstance.setYear(2005).setMonth(7).setDay(4) - val per = Person("joe", 27, Address("Bulevard", "Helsinki"), List(Child("Mary", 5, Some(dob1.getTime)), Child("Mazy", 3, None))) - tr.person(per) - - // save the record in the db - tr.save() - - // retrieve from db - def fromDb = TstRecord.find("_id", tr.id.value) - - fromDb.isDefined must_== true - - for (t <- fromDb) { - t.id.value must_== tr.id.value - t.booleanfield.value must_== tr.booleanfield.value - TstRecord.formats.dateFormat.format(t.datetimefield.value.getTime) must_== - TstRecord.formats.dateFormat.format(tr.datetimefield.value.getTime) - t.doublefield.value must_== tr.doublefield.value - t.intfield.value must_== tr.intfield.value - t.localefield.value must_== tr.localefield.value - t.longfield.value must_== tr.longfield.value - - t.stringfield.value must_== tr.stringfield.value - t.timezonefield.value must_== tr.timezonefield.value - t.datetimefield.value must_== tr.datetimefield.value - t.patternfield.value.pattern must_== tr.patternfield.value.pattern - t.patternfield.value.flags must_== tr.patternfield.value.flags - t.datefield.value must_== tr.datefield.value - t.person.value.name must_== tr.person.value.name - t.person.value.age must_== tr.person.value.age - t.person.value.address.street must_== tr.person.value.address.street - t.person.value.address.city must_== tr.person.value.address.city - t.person.value.children.size must_== tr.person.value.children.size - for (i <- List.range(0, t.person.value.children.size-1)) { - t.person.value.children(i).name must_== tr.person.value.children(i).name - t.person.value.children(i).age must_== tr.person.value.children(i).age - t.person.value.children(i).birthdate must_== tr.person.value.children(i).birthdate - } - t.passwordfield.isMatch(pwd) must_== true - } - - if (!debug) TstRecord.drop - } - - success - } - - "Ref example" in { - - checkMongoIsRunning - - val ref1 = RefDoc.createRecord - val ref2 = RefDoc.createRecord - - ref1.save() must_== ref1 - ref2.save() must_== ref2 - - val refUuid1 = RefUuidDoc.createRecord - val refUuid2 = RefUuidDoc.createRecord - - refUuid1.save() must_== refUuid1 - refUuid2.save() must_== refUuid2 - - val md1 = MainDoc.createRecord - val md2 = MainDoc.createRecord - val md3 = MainDoc.createRecord - val md4 = MainDoc.createRecord - - md1.name.set("md1") - md2.name.set("md2") - md3.name.set("md3") - md4.name.set("md4") - - md1.refdocId.set(ref1.id.get) - md2.refdocId.set(ref1.id.get) - md3.refdocId.set(ref2.id.get) - md4.refdocId.set(ref2.id.get) - - md1.refuuid.set(refUuid1.id.get) - md2.refuuid.set(refUuid1.id.get) - md3.refuuid.set(refUuid2.id.get) - md4.refuuid.set(refUuid2.id.get) - - md1.save() must_== md1 - md2.save() must_== md2 - md3.save() must_== md3 - md4.save() must_== md4 - - MainDoc.count must_== 4 - RefDoc.count must_== 2 - - // get the docs back from the db - MainDoc.find(md1.id.get).foreach(m => { - m.name.value must_== md1.name.value - m.cnt.value must_== md1.cnt.value - m.refdocId.value must_== md1.refdocId.value - m.refuuid.value must_== md1.refuuid.value - }) - - // fetch a refdoc - val refFromFetch = md1.refdocId.obj - refFromFetch.isDefined must_== true - refFromFetch.openOrThrowException("we know this is Full").id.get must_== ref1.id.get - - // query for a single doc with a JObject query - val md1a = MainDoc.find(("name") -> "md1") - md1a.isDefined must_== true - md1a.foreach(o => o.id.get must_== md1.id.get) - - // query for a single doc with a k, v query - val md1b = MainDoc.find("_id", md1.id.get) - md1b.isDefined must_== true - md1b.foreach(o => o.id.get must_== md1.id.get) - - // query for a single doc with a Map query - val md1c = MainDoc.find(("name" -> "md1")) - md1c.isDefined must_== true - md1c.foreach(o => o.id.get must_== md1.id.get) - - // find all documents - MainDoc.findAll.size must_== 4 - RefDoc.findAll.size must_== 2 - - // find all documents with JObject query - val mdq1 = MainDoc.findAll(("name" -> "md1")) - mdq1.size must_== 1 - - // find all documents with $in query, sorted - val qry = ("name" -> ("$in" -> List("md1", "md2"))) - val mdq2 = MainDoc.findAll(qry, ("name" -> -1)) - mdq2.size must_== 2 - mdq2.head.id.get must_== md2.id.get - - // Find all documents using a k, v query - val mdq3 = MainDoc.findAll("_id", md1.id.get) - mdq3.size must_== 1 - - // find all documents with field selection - val mdq4 = MainDoc.findAll(("name" -> "md1"), ("name" -> 1), Empty) - mdq4.size must_== 1 - - // modifier operations $inc, $set, $push... - val o2 = (("$inc" -> ("cnt" -> 1)) ~ ("$set" -> ("name" -> "md1a"))) - MainDoc.updateOne(("name" -> "md1"), o2) - // get the doc back from the db and compare - val mdq5 = MainDoc.find("_id", md1.id.get) - mdq5.isDefined must_== true - mdq5.map ( m => { - m.name.value must_== "md1a" - m.cnt.value must_== 1 - }) - - if (!debug) { - // delete them - md1.delete_! - md2.delete_! - md3.delete_! - md4.delete_! - ref1.delete_! - ref2.delete_! - - MainDoc.findAll.size must_== 0 - - MainDoc.drop - RefDoc.drop - } - - success - } - - "List example" in { - checkMongoIsRunning - - val ref1 = RefDoc.createRecord - val ref2 = RefDoc.createRecord - - ref1.save() must_== ref1 - ref2.save() must_== ref2 - - val name = "ld1" - val strlist = List("string1", "string2", "string3", "string1") - val jd1 = JsonDoc("1", "jsondoc1") - - val ld1 = ListDoc.createRecord - ld1.name.set(name) - ld1.stringlist.set(strlist) - ld1.intlist.set(List(99988,88, 88)) - ld1.doublelist.set(List(997655.998,88.8)) - ld1.boollist.set(List(true,true,false)) - ld1.objidlist.set(List(ObjectId.get, ObjectId.get)) - ld1.dtlist.set(List(now, now)) - ld1.jsonobjlist.set(List(jd1, JsonDoc("2", "jsondoc2"), jd1)) - ld1.patternlist.set(List(Pattern.compile("^Mongo"), Pattern.compile("^Mongo2"))) - ld1.maplist.set(List(Map("name" -> "map1", "type" -> "map"), Map("name" -> "map2", "type" -> "map"))) - ld1.binarylist.set(List[Array[Byte]]("foo".getBytes(), "bar".getBytes())) - - ld1.save() must_== ld1 - - val qld1 = ListDoc.find(ld1.id.get) - - qld1.isDefined must_== true - - qld1.foreach { l => - l.name.value must_== ld1.name.value - l.stringlist.value must_== ld1.stringlist.value - l.intlist.value must_== ld1.intlist.value - l.doublelist.value must_== ld1.doublelist.value - l.boollist.value must_== ld1.boollist.value - l.objidlist.value must_== ld1.objidlist.value - l.dtlist.value must_== ld1.dtlist.value - l.jsonobjlist.value must_== ld1.jsonobjlist.value - for (i <- List.range(0, l.patternlist.value.size-1)) { - l.patternlist.value(i).pattern must_== ld1.patternlist.value(i).pattern - } - l.maplist.value must_== ld1.maplist.value - for (i <- List.range(0, l.jsonobjlist.value.size-1)) { - l.jsonobjlist.value(i).id must_== ld1.jsonobjlist.value(i).id - l.jsonobjlist.value(i).name must_== ld1.jsonobjlist.value(i).name - } - for { - orig <- ld1.binarylist.value.headOption - queried <- l.binarylist.value.headOption - } new String(orig) must_== new String(queried) - } - - if (!debug) { - ListDoc.drop - RefDoc.drop - } - - success - } - - "Map Example" in { - - checkMongoIsRunning - - val md1 = MapDoc.createRecord - md1.stringmap.set(Map("h" -> "hola")) - - md1.save() must_== md1 - - md1.delete_! - - if (!debug) MapDoc.drop - - success - } - - "Optional Example" in { - - checkMongoIsRunning - - val od1 = OptionalDoc.createRecord - od1.stringbox.valueBox must_== Empty - od1.save() must_== od1 - - OptionalDoc.find(od1.id.get).foreach { - od1FromDB => - od1FromDB.stringbox.valueBox must_== od1.stringbox.valueBox - } - - - val od2 = OptionalDoc.createRecord - od1.stringbox.valueBox must_== Empty - od2.stringbox.set("aloha") - od2.save() must_== od2 - - OptionalDoc.find(od2.id.get).foreach { - od2FromDB => - od2FromDB.stringbox.valueBox must_== od2.stringbox.valueBox - } - - if (!debug) OptionalDoc.drop - - success - } - - "Strict Example" in { - - checkMongoIsRunning - - val sd1 = StrictDoc.createRecord.name("sd1") - val sd2 = StrictDoc.createRecord.name("sd1") - - sd1.save() must_== sd1 - sd2.save() must throwA[MongoException] - sd2.saveBox() must beLike { - case Failure(msg, _, _) => msg must contain("E11000") - } - - sd2.name("sd2") - sd2.save() must_== sd2 - - - if (!debug) StrictDoc.drop - - success - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordSpec.scala deleted file mode 100644 index b903d4c0c2..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoRecordSpec.scala +++ /dev/null @@ -1,954 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import java.util.{Date, Locale, UUID} -import java.util.regex.Pattern - -import org.joda.time.DateTime -import org.specs2.mutable.Specification - -import common._ -import http.{S, LiftSession} -import http.js.JsExp -import json._ -import JsonDSL._ -import util.Helpers.snakify - -import net.liftweb.record.RecordRules -import net.liftweb.record.field.Countries - -import org.bson.Document -import org.bson.types.ObjectId -import com.mongodb._ - - -/** - * Systems under specification for MongoRecord. - */ -class MongoRecordSpec extends Specification with MongoTestKit { - "MongoRecord Specification".title - - import fixtures._ - import testmodels._ - val session = new LiftSession("hello", "", Empty) - - override def before = { - super.before - checkMongoIsRunning - } - - "MongoRecord field introspection" should { - val rec = MongoFieldTypeTestRecord.createRecord - val allExpectedFieldNames: List[String] = "_id" :: "mandatoryCaseClassField" :: "optionalCaseClassField" :: - (for { - typeName <- "Date JsonObject ObjectId UUID".split(" ") - flavor <- "mandatory legacyOptional optional".split(" ") - } yield flavor + typeName + "Field").toList - - "introspect only the expected fields" in { - rec.fields().map(_.name).filterNot(allExpectedFieldNames.contains(_)) must_== Nil - } - - "correctly look up fields by name" in { - val fields = - allExpectedFieldNames.flatMap { name => - rec.fieldByName(name) - } - - fields.length must_== allExpectedFieldNames.length - } - - "not look up fields by bogus names" in { - val fields = - allExpectedFieldNames.flatMap { name => - rec.fieldByName("x" + name + "y") - } - - fields.length must_== 0 - } - } - - "MongoRecord lifecycle callbacks" should { - def testOneHarness(scope: String, f: LifecycleTestRecord => HarnessedLifecycleCallbacks) = { - ("be called before validation when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeValidationHarness = () => triggered = true - rec.foreachCallback(_.beforeValidation) - triggered must_== true - } - - ("be called after validation when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterValidationHarness = () => triggered = true - rec.foreachCallback(_.afterValidation) - triggered must_== true - } - - ("be called around validate when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggeredBefore = false - var triggeredAfter = false - f(rec).beforeValidationHarness = () => triggeredBefore = true - f(rec).afterValidationHarness = () => triggeredAfter = true - rec.validate must_== Nil - triggeredBefore must_== true - triggeredAfter must_== true - } - - ("be called before save when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeSaveHarness = () => triggered = true - rec.foreachCallback(_.beforeSave) - triggered must_== true - } - - ("be called before create when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeCreateHarness = () => triggered = true - rec.foreachCallback(_.beforeCreate) - triggered must_== true - } - - ("be called before update when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeUpdateHarness = () => triggered = true - rec.foreachCallback(_.beforeUpdate) - triggered must_== true - } - - ("be called after save when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterSaveHarness = () => triggered = true - rec.foreachCallback(_.afterSave) - triggered must_== true - } - - ("be called after create when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterCreateHarness = () => triggered = true - rec.foreachCallback(_.afterCreate) - triggered must_== true - } - - ("be called after update when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterUpdateHarness = () => triggered = true - rec.foreachCallback(_.afterUpdate) - triggered must_== true - } - - ("be called before delete when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeDeleteHarness = () => triggered = true - rec.foreachCallback(_.beforeDelete) - triggered must_== true - } - - ("be called after delete when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterDeleteHarness = () => triggered = true - rec.foreachCallback(_.afterDelete) - triggered must_== true - } - } - - testOneHarness("the field level", rec => rec.stringFieldWithCallbacks: HarnessedLifecycleCallbacks) - } - - "MongoRecord" should { - val binData: Array[Byte] = Array(18, 19, 20) - - val dt = DateTime.now.plusHours(1) - - val fttr = FieldTypeTestRecord.createRecord - .mandatoryBooleanField(false) - .mandatoryCountryField(Countries.USA) - .mandatoryDecimalField(BigDecimal("3.14")) - .mandatoryDoubleField(1999) - .mandatoryEmailField("test@liftweb.net") - .mandatoryEnumField(MyTestEnum.ONE) - .mandatoryIntField(99) - .mandatoryLocaleField("en_US") - .mandatoryLongField(100L) - .mandatoryPostalCodeField("55401") - .mandatoryStringField("string") - .mandatoryTextareaField("string") - .mandatoryTimeZoneField("America/Chicago") - .mandatoryJodaTimeField(dt) - - val bftr = BinaryFieldTestRecord.createRecord - .mandatoryBinaryField(binData) - - val mfttr = MongoFieldTypeTestRecord.createRecord - .mandatoryDateField(new Date) - .mandatoryJsonObjectField(TypeTestJsonObject(1, "jsonobj1", Map("x" -> "1"))) - .mandatoryObjectIdField(ObjectId.get) - .mandatoryUUIDField(UUID.randomUUID) - .mandatoryCaseClassField(CaseClassTestObject(1,"str",MyTestEnum.TWO)) - - val mfttrJson = - ("_id" -> ("$oid" -> mfttr.id.toString)) ~ - ("mandatoryDateField" -> ("$dt" -> mfttr.meta.formats.dateFormat.format(mfttr.mandatoryDateField.value))) ~ - ("legacyOptionalDateField" -> (None: Option[JObject])) ~ - ("optionalDateField" -> JNothing) ~ - ("mandatoryJsonObjectField" -> (("intField" -> 1) ~ ("stringField" -> "jsonobj1") ~ ("mapField" -> ("x" -> "1")))) ~ - ("optionalJsonObjectField" -> JNothing) ~ - ("legacyOptionalJsonObjectField" -> (None: Option[JObject])) ~ - ("mandatoryObjectIdField", ("$oid" -> mfttr.mandatoryObjectIdField.value.toString)) ~ - ("optionalObjectIdField" -> JNothing) ~ - ("legacyOptionalObjectIdField" -> (None: Option[JObject])) ~ - ("mandatoryUUIDField" -> ("$uuid" -> mfttr.mandatoryUUIDField.value.toString)) ~ - ("optionalUUIDField" -> JNothing) ~ - ("legacyOptionalUUIDField" -> (None: Option[JObject])) ~ - ("mandatoryCaseClassField" -> ("intField" -> 1) ~ ("stringField" -> "str") ~ ("enum" -> 1)) ~ - ("optionalCaseClassField" -> JNothing) - - val pftr = PatternFieldTestRecord.createRecord - .mandatoryPatternField(Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE)) - - val pftrJson = - ("_id" -> ("$oid" -> pftr.id.toString)) ~ - ("mandatoryPatternField" -> (("$regex" -> pftr.mandatoryPatternField.value.pattern) ~ ("$flags" -> pftr.mandatoryPatternField.value.flags))) ~ - ("optionalPatternField" -> JNothing) ~ - ("legacyOptionalPatternField" -> (None: Option[JObject])) - - val ltr = ListTestRecord.createRecord - .mandatoryStringListField(List("abc", "def", "ghi")) - .mandatoryIntListField(List(4, 5, 6)) - .mandatoryJsonObjectListField(List(TypeTestJsonObject(1, "jsonobj1", Map("x" -> "1")), TypeTestJsonObject(2, "jsonobj2", Map("x" -> "2")))) - .caseClassListField(List(CaseClassTestObject(1,"str",MyTestEnum.TWO))) - .mandatoryMongoRefListField(List(fttr.id.get)) - - val ltrJson = - ("mandatoryMongoRefListField" -> JArray(List(("$oid" -> fttr.id.get.toString)))) ~ - ("mandatoryJsonObjectListField" -> List( - (("intField" -> 1) ~ ("stringField" -> "jsonobj1") ~ ("mapField" -> ("x" -> "1"))), - (("intField" -> 2) ~ ("stringField" -> "jsonobj2") ~ ("mapField" -> ("x" -> "2"))) - )) ~ - ("mandatoryStringListField" -> List("abc", "def", "ghi")) ~ - ("_id" -> ("$uuid" -> ltr.id.toString)) ~ - ("mandatoryIntListField" -> List(4, 5, 6)) ~ - ("caseClassListField" -> List( - ("intField" -> 1) ~ ("stringField" -> "str") ~ ("enum" -> 1) - )) - - - val mtr = MapTestRecord.createRecord - .mandatoryStringMapField(Map("a" -> "abc", "b" -> "def", "c" -> "ghi")) - .mandatoryIntMapField(Map("a" -> 4, "b" -> 5, "c" -> 6)) - - val mtrJson = - ("_id" -> mtr.id.toString) ~ - ("mandatoryStringMapField" -> ( - ("a" -> "abc") ~ - ("b" -> "def") ~ - ("c" -> "ghi") - )) ~ - ("mandatoryIntMapField" -> ( - ("a" -> 4) ~ - ("b" -> 5) ~ - ("c" -> 6) - )) - - // SubRecord - val ssr1 = SubSubRecord.createRecord.name("SubSubRecord1") - val ssr2 = SubSubRecord.createRecord.name("SubSubRecord2") - - val sr1 = SubRecord.createRecord - .name("SubRecord1") - .subsub(ssr1) - .subsublist(ssr1 :: ssr2 :: Nil) - .slist("s1" :: "s2" :: Nil) - .smap(Map("a" -> "s1", "b" -> "s2")) - .pattern(Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE)) - - val sr2 = SubRecord.createRecord.name("SubRecord2") - - val srtr = SubRecordTestRecord.createRecord - .mandatoryBsonRecordField(sr1) - .mandatoryBsonRecordListField(List(sr1,sr2)) - - val sr1Json = - ("name" -> "SubRecord1") ~ - ("subsub" -> ("name" -> "SubSubRecord1")) ~ - ("subsublist" -> List( - ("name" -> "SubSubRecord1"), - ("name" -> "SubSubRecord2") - )) ~ - ("when" -> ("$dt" -> srtr.meta.formats.dateFormat.format(sr1.when.value))) ~ - ("slist" -> List("s1", "s2")) ~ - ("smap" -> (("a" -> "s1") ~ ("b" -> "s2"))) ~ - ("oid" -> ("$oid" -> sr1.oid.value.toString)) ~ - ("pattern" -> (("$regex" -> sr1.pattern.value.pattern) ~ ("$flags" -> sr1.pattern.value.flags))) ~ - ("uuid" -> ("$uuid" -> sr1.uuid.value.toString)) - - val sr2Json = - ("name" -> "SubRecord2") ~ - ("subsub" -> ("name" -> "")) ~ - ("subsublist" -> JArray(Nil)) ~ - ("when" -> ("$dt" -> srtr.meta.formats.dateFormat.format(sr2.when.value))) ~ - ("slist" -> JArray(Nil)) ~ - ("smap" -> JObject(Nil)) ~ - ("oid" -> ("$oid" -> sr2.oid.value.toString)) ~ - ("pattern" -> (("$regex" -> sr2.pattern.value.pattern) ~ ("$flags" -> sr2.pattern.value.flags))) ~ - ("uuid" -> ("$uuid" -> sr2.uuid.value.toString)) - - val srtrJson = - ("_id" -> ("$oid" -> srtr.id.toString)) ~ - ("mandatoryBsonRecordField" -> sr1Json) ~ - ("legacyOptionalBsonRecordField" -> JNothing) ~ - ("mandatoryBsonRecordListField" -> List(sr1Json, sr2Json)) ~ - ("legacyOptionalBsonRecordListField", JArray(Nil)) - - // JObjectField - val joftrFieldJObject: JObject = ("minutes" -> 59) - val joftr = JObjectFieldTestRecord.createRecord.mandatoryJObjectField(joftrFieldJObject) - val joftrJson: JValue = ("_id" -> ("$oid" -> joftr.id.toString)) ~ ("mandatoryJObjectField" -> ("minutes" -> 59)) - - "save and retrieve 'standard' type fields" in { - checkMongoIsRunning - - S.initIfUninitted(session) { - fttr.save() - - val fttrFromDb = FieldTypeTestRecord.find(fttr.id.value) - fttrFromDb.isDefined must_== true - fttrFromDb foreach { tr => - tr mustEqual fttr - } - - bftr.save() - - val bftrFromDb = BinaryFieldTestRecord.find(bftr.id.value) - bftrFromDb must beLike { - case Full(tr) => - tr mustEqual bftr - } - } - } - - "delete record properly" in { - checkMongoIsRunning - - S.initIfUninitted(session) { - fttr.save() - FieldTypeTestRecord.find(fttr.id.value).isDefined must_== true - fttr.delete_! - FieldTypeTestRecord.find(fttr.id.value) must beEmpty - } - } - - "save and retrieve Mongo type fields with set values" in { - mfttr.save() - - val mfttrFromDb = MongoFieldTypeTestRecord.find(mfttr.id.value) - mfttrFromDb.isDefined must_== true - mfttrFromDb foreach { tr => - tr mustEqual mfttr - } - - val recs = MongoFieldTypeTestRecord.findAll(List(mfttr.id.value)) - - recs.length mustEqual 1 - - pftr.save() - - val pftrFromDb = PatternFieldTestRecord.find(pftr.id.value) - pftrFromDb.isDefined must_== true - pftrFromDb foreach { tr => - tr mustEqual pftr - } - - ltr.save() - - val ltrFromDb = ListTestRecord.find(ltr.id.value) - ltrFromDb.isDefined must_== true - ltrFromDb foreach { tr => - tr mustEqual ltr - } - - mtr.save() - - val mtrFromDb = MapTestRecord.find(mtr.id.value) - mtrFromDb.isDefined must_== true - mtrFromDb foreach { tr => - tr mustEqual mtr - } - - srtr.save() - - val srtrFromDb = SubRecordTestRecord.find(srtr.id.value) - srtrFromDb.isDefined must_== true - srtrFromDb foreach { tr => - tr mustEqual srtr - } - - joftr.save() - - val joftrFromDb = JObjectFieldTestRecord.find(joftr.id.get) - joftrFromDb.isDefined must_== true - joftrFromDb foreach { tr => - tr must_== joftr - } - success - } - - "save and retrieve Mongo type fields with default values" in { - val mfttrDef = MongoFieldTypeTestRecord.createRecord - mfttrDef.save() - - val mfttrFromDb = MongoFieldTypeTestRecord.find(mfttrDef.id.value) - mfttrFromDb.isDefined must_== true - mfttrFromDb foreach { tr => - tr mustEqual mfttrDef - } - - val pftrDef = PatternFieldTestRecord.createRecord - pftrDef.save() - - val pftrFromDb = PatternFieldTestRecord.find(pftrDef.id.value) - pftrFromDb.isDefined must_== true - pftrFromDb foreach { tr => - tr mustEqual pftrDef - } - - val ltrDef = ListTestRecord.createRecord - ltrDef.save() - - val ltrFromDb = ListTestRecord.find(ltrDef.id.value) - ltrFromDb.isDefined must_== true - ltrFromDb foreach { tr => - tr mustEqual ltrDef - } - - val mtrDef = MapTestRecord.createRecord - mtrDef.save() - - val mtrFromDb = MapTestRecord.find(mtrDef.id.value) - mtrFromDb.isDefined must_== true - mtrFromDb foreach { tr => - tr mustEqual mtrDef - } - - val srtrDef = SubRecordTestRecord.createRecord - srtrDef.save() - - val srtrFromDb = SubRecordTestRecord.find(srtrDef.id.value) - srtrFromDb.isDefined must_== true - srtrFromDb.toList map { tr => - tr mustEqual srtrDef - } - - val joftrDef = JObjectFieldTestRecord.createRecord - joftrDef.save() - - val joftrFromDb = JObjectFieldTestRecord.find(joftrDef.id.value) - joftrFromDb.isDefined must_== true - joftrFromDb foreach { tr => - tr mustEqual joftrDef - } - success - } - - "convert Mongo type fields to JValue" in { - mfttr.asJValue mustEqual mfttrJson - } - - "convert pattern field to JValue" in { - pftr.asJValue mustEqual pftrJson - } - - "convert list fields to JValue" in { - ltr.asJValue mustEqual ltrJson - } - - "convert map fields to JValue" in { - mtr.asJValue mustEqual mtrJson - } - - "convert JObject fields to JValue" in { - joftr.asJValue mustEqual joftrJson - } - - "convert BsonRecord fields to JValue" in { - val srtrAsJValue = srtr.asJValue - srtrAsJValue \\ "_id" mustEqual srtrJson \\ "_id" - srtrAsJValue \\ "mandatoryBsonRecordField" mustEqual srtrJson \\ "mandatoryBsonRecordField" - srtrAsJValue \\ "legacyOptionalBsonRecordField" mustEqual srtrJson \\ "legacyOptionalBsonRecordField" - srtrAsJValue \\ "mandatoryBsonRecordListField" mustEqual srtrJson \\ "mandatoryBsonRecordListField" - srtrAsJValue \\ "legacyOptionalBsonRecordListField" mustEqual srtrJson \\ "legacyOptionalBsonRecordListField" - } - - "get set from json string using lift-json parser" in { - val mfftrFromJson = MongoFieldTypeTestRecord.fromJsonString(compactRender(mfttrJson)) - mfftrFromJson.isDefined must_== true - mfftrFromJson foreach { tr => - tr mustEqual mfttr - } - - val pftrFromJson = PatternFieldTestRecord.fromJsonString(compactRender(pftrJson)) - pftrFromJson.isDefined must_== true - pftrFromJson foreach { tr => - tr mustEqual pftr - } - - val ltrFromJson = ListTestRecord.fromJsonString(compactRender(ltrJson)) - ltrFromJson.isDefined must_== true - ltrFromJson foreach { tr => - tr mustEqual ltr - } - - val mtrFromJson = MapTestRecord.fromJsonString(compactRender(mtrJson)) - mtrFromJson.isDefined must_== true - mtrFromJson.toList map { tr => - tr mustEqual mtr - } - - val joftrFromJson = JObjectFieldTestRecord.fromJsonString(compactRender(joftrJson)) - joftrFromJson must_== Full(joftr) - } - - "handle null" in { - val ntr = NullTestRecord.createRecord - ntr.nullstring.set(null) - ntr.jsonobjlist.set(List(JsonObj("1", null), JsonObj("2", "jsonobj2"))) - - ntr.save() must_== ntr - - val ntrFromDb = NullTestRecord.find(ntr.id.value) - - ntrFromDb must beLike { - case Full(n) => - // goes in as - ntr.nullstring.valueBox.map(_ must beNull) - ntr.nullstring.value must beNull - // comes out as - n.nullstring.valueBox.map(_ must_== "") - n.nullstring.value must_== "" - // JsonObjects - n.jsonobjlist.value.size must_== 2 - ntr.jsonobjlist.value.size must_== 2 - n.jsonobjlist.value(0).id must_== ntr.jsonobjlist.value(0).id - n.jsonobjlist.value(0).name must beNull - ntr.jsonobjlist.value(0).name must beNull - n.jsonobjlist.value(1).id must_== ntr.jsonobjlist.value(1).id - n.jsonobjlist.value(1).name must_== ntr.jsonobjlist.value(1).name - } - } - - "handle Box using JsonBoxSerializer" in { - val btr = BoxTestRecord.createRecord - btr.jsonobjlist.set( - BoxTestJsonObj("1", Empty, Full("Full String1"), Failure("Failure1")) :: - BoxTestJsonObj("2", Empty, Full("Full String2"), Failure("Failure2")) :: - Nil - ) - - btr.save() - - val btrFromDb = BoxTestRecord.find(btr.id.value) - - btrFromDb must beLike { - case Full(b) => - b.jsonobjlist.value.size must_== 2 - btr.jsonobjlist.value.size must_== 2 - val sortedList = b.jsonobjlist.value.sortWith(_.id < _.id) - sortedList(0).boxEmpty must_== Empty - sortedList(0).boxFull must_== Full("Full String1") - sortedList(0).boxFail must_== Failure("Failure1") - } - } - - "retrieve MongoRef objects properly" in { - S.initIfUninitted(session) { - val ntr = NullTestRecord.createRecord - val btr = BoxTestRecord.createRecord - - fttr.save() - ltr.save() - mtr.save() - ntr.save() - btr.save() - - val rftr = RefFieldTestRecord.createRecord - .mandatoryObjectIdRefField(fttr.id.get) - .mandatoryUUIDRefField(ltr.id.get) - .mandatoryStringRefField(mtr.id.get) - .mandatoryIntRefField(ntr.id.get) - .mandatoryLongRefField(btr.id.get) - .mandatoryObjectIdRefListField(List(fttr.id.get)) - .mandatoryUUIDRefListField(List(ltr.id.get)) - .mandatoryStringRefListField(List(mtr.id.get)) - .mandatoryIntRefListField(List(ntr.id.get)) - .mandatoryLongRefListField(List(btr.id.get)) - - // single objects - rftr.mandatoryObjectIdRefField.obj mustEqual Full(fttr) - rftr.mandatoryUUIDRefField.obj mustEqual Full(ltr) - rftr.mandatoryStringRefField.obj mustEqual Full(mtr) - rftr.mandatoryIntRefField.obj mustEqual Full(ntr) - rftr.mandatoryLongRefField.obj mustEqual Full(btr) - - val fttr2 = FieldTypeTestRecord.createRecord.save() - - rftr.mandatoryObjectIdRefField.cached_? mustEqual true - rftr.mandatoryObjectIdRefField(fttr2.id.get) - rftr.mandatoryObjectIdRefField.cached_? mustEqual false - rftr.mandatoryObjectIdRefField.find mustEqual Full(fttr2) - rftr.mandatoryObjectIdRefField.obj mustEqual Full(fttr2) - rftr.mandatoryObjectIdRefField.cached_? mustEqual true - - // lists - rftr.mandatoryObjectIdRefListField.objs mustEqual List(fttr) - rftr.mandatoryUUIDRefListField.objs mustEqual List(ltr) - rftr.mandatoryStringRefListField.objs mustEqual List(mtr) - rftr.mandatoryIntRefListField.objs mustEqual List(ntr) - rftr.mandatoryLongRefListField.objs mustEqual List(btr) - - val fttr3 = FieldTypeTestRecord.createRecord.save() - val objList = List(fttr2, fttr3) - - rftr.mandatoryObjectIdRefListField.cached_? mustEqual true - rftr.mandatoryObjectIdRefListField(objList.map(_.id.get)) - rftr.mandatoryObjectIdRefListField.cached_? mustEqual false - rftr.mandatoryObjectIdRefListField.findAll mustEqual objList - rftr.mandatoryObjectIdRefListField.objs mustEqual objList - rftr.mandatoryObjectIdRefListField.cached_? mustEqual true - } - } - - "use defaultValue when field is not present in the database" in { - S.initIfUninitted(session) { - val missingFieldDocId = ObjectId.get - - // create a Document with no fields manually - val doc = new Document("_id", missingFieldDocId) - - FieldTypeTestRecord.useDatabase { db => - db.getCollection(FieldTypeTestRecord.collectionName, classOf[Document]).insertOne(doc) - } - - val recFromDb = FieldTypeTestRecord.find(missingFieldDocId) - - recFromDb must beLike { - case Full(r) => - r.mandatoryBooleanField.get must_== false - r.legacyOptionalBooleanField - r.optionalBooleanField.get must beEmpty - r.mandatoryCountryField.get must_== Countries.C1 - r.legacyOptionalCountryField.valueBox must beEmpty - r.optionalCountryField.get must beEmpty - r.mandatoryDecimalField.get must_== 0.00 - r.legacyOptionalDecimalField.valueBox must beEmpty - r.optionalDecimalField.get must beEmpty - r.mandatoryDoubleField.get must_== 0d - r.legacyOptionalDoubleField.valueBox must beEmpty - r.optionalDoubleField.get must beEmpty - r.mandatoryEmailField.get must_== "" - r.legacyOptionalEmailField.valueBox must beEmpty - r.optionalEmailField.get must beEmpty - r.mandatoryEnumField.get must_== MyTestEnum.ONE - r.legacyOptionalEnumField.valueBox must beEmpty - r.optionalEnumField.get must beEmpty - r.mandatoryIntField.get must_== 0 - r.legacyOptionalIntField.valueBox must beEmpty - r.optionalIntField.get must beEmpty - r.mandatoryLocaleField.get must_== Locale.getDefault.toString - r.legacyOptionalLocaleField.valueBox must beEmpty - r.optionalLocaleField.get must beEmpty - r.mandatoryLongField.get must_== 0L - r.legacyOptionalLongField.valueBox must beEmpty - r.optionalLongField.get must beEmpty - r.mandatoryPostalCodeField.get must_== "" - r.legacyOptionalPostalCodeField.valueBox must beEmpty - r.optionalPostalCodeField.get must beEmpty - r.mandatoryStringField.get must_== "" - r.legacyOptionalStringField.valueBox must beEmpty - r.optionalStringField.get must beEmpty - r.mandatoryTextareaField.get must_== "" - r.legacyOptionalTextareaField.valueBox must beEmpty - r.optionalTextareaField.get must beEmpty - // r.mandatoryTimeZoneField.get must_== "America/Chicago" - r.legacyOptionalTimeZoneField.valueBox must beEmpty - r.optionalTimeZoneField.get must beEmpty - } - } - } - - "reset dirty flags on save" in { - val fttr = FieldTypeTestRecord.createRecord.save() - fttr.mandatoryDecimalField(BigDecimal("3.14")) - fttr.dirty_? must_== true - fttr.save() - fttr.dirty_? must_== false - } - - "update dirty fields for a FieldTypeTestRecord" in { - S.initIfUninitted(session) { - val fttr1 = FieldTypeTestRecord.createRecord - .legacyOptionalStringField("legacy optional string") - .optionalStringField("optional string") - .save() - - fttr1.mandatoryBooleanField(true) - fttr1.mandatoryBooleanField.dirty_? must_== true - - fttr1.mandatoryDecimalField(BigDecimal("3.14")) - fttr1.mandatoryDecimalField.dirty_? must_== true - - fttr1.mandatoryDoubleField(1999) - fttr1.mandatoryDoubleField.dirty_? must_== true - - fttr1.mandatoryEnumField(MyTestEnum.TWO) - fttr1.mandatoryEnumField.dirty_? must_== true - - fttr1.mandatoryIntField(99) - fttr1.mandatoryIntField.dirty_? must_== true - - fttr1.mandatoryLongField(100L) - fttr1.mandatoryLongField.dirty_? must_== true - - fttr1.mandatoryStringField("string") - fttr1.mandatoryStringField.dirty_? must_== true - - fttr1.optionalStringField(Empty) - fttr1.optionalStringField.dirty_? must_== true - - fttr1.legacyOptionalStringField(Empty) - fttr1.legacyOptionalStringField.dirty_? must_== true - - fttr1.dirty_? must_== true - fttr1.update - fttr1.dirty_? must_== false - - val fromDb = FieldTypeTestRecord.find(fttr1.id.get) - fromDb.isDefined must_== true - fromDb foreach { rec => - rec must_== fttr1 - rec.dirty_? must_== false - } - - val fttr2 = FieldTypeTestRecord.createRecord.save() - - fttr2.legacyOptionalStringField("legacy optional string") - fttr2.legacyOptionalStringField.dirty_? must_== true - - fttr2.optionalStringField("optional string") - fttr2.optionalStringField.dirty_? must_== true - - fttr2.dirty_? must_== true - fttr2.update - fttr2.dirty_? must_== false - - val fromDb2 = FieldTypeTestRecord.find(fttr2.id.get) - fromDb2 must beLike { - case Full(rec) => - rec must_== fttr2 - rec.dirty_? must_== false - } - } - } - - "update dirty fields for a MongoFieldTypeTestRecord" in { - val mfttr = MongoFieldTypeTestRecord.createRecord - .legacyOptionalDateField(new Date) - .legacyOptionalObjectIdField(ObjectId.get) - .save() - - Thread.sleep(100) // sleep so dates will be different - - mfttr.mandatoryDateField(new Date) - mfttr.mandatoryDateField.dirty_? must_== true - - mfttr.mandatoryJsonObjectField(TypeTestJsonObject(1, "jsonobj1", Map("x" -> "1"))) - mfttr.mandatoryJsonObjectField.dirty_? must_== true - - mfttr.mandatoryObjectIdField(ObjectId.get) - mfttr.mandatoryObjectIdField.dirty_? must_== true - - mfttr.mandatoryUUIDField(UUID.randomUUID) - mfttr.mandatoryUUIDField.dirty_? must_== true - - mfttr.legacyOptionalDateField(Empty) - mfttr.legacyOptionalDateField.dirty_? must_== true - - mfttr.legacyOptionalObjectIdField(Empty) - mfttr.legacyOptionalObjectIdField.dirty_? must_== true - - mfttr.dirty_? must_== true - mfttr.update - mfttr.dirty_? must_== false - - val fromDb = MongoFieldTypeTestRecord.find(mfttr.id.get) - fromDb.isDefined must_== true - fromDb foreach { rec => - rec must_== mfttr - rec.dirty_? must_== false - } - - val mfttr2 = MongoFieldTypeTestRecord.createRecord.save() - - mfttr2.legacyOptionalDateField(new Date) - mfttr2.legacyOptionalDateField.dirty_? must_== true - - mfttr2.optionalDateField(new Date) - mfttr2.optionalDateField.dirty_? must_== true - - mfttr2.optionalObjectIdField(ObjectId.get) - mfttr2.optionalObjectIdField.dirty_? must_== true - - mfttr2.optionalUUIDField(UUID.randomUUID()) - mfttr2.optionalUUIDField.dirty_? must_== true - - mfttr2.dirty_? must_== true - mfttr2.update - mfttr2.dirty_? must_== false - - val fromDb2 = MongoFieldTypeTestRecord.find(mfttr2.id.get) - fromDb2 must beLike { - case Full(rec) => - rec must_== mfttr2 - rec.dirty_? must_== false - } - } - - "update dirty fields for a PatternFieldTestRecord" in { - val pftrd = PatternFieldTestRecord.createRecord.save() - - pftrd.mandatoryPatternField(Pattern.compile("^Mon", Pattern.CASE_INSENSITIVE)) - pftrd.mandatoryPatternField.dirty_? must_== true - - pftrd.dirty_? must_== true - pftrd.update - pftrd.dirty_? must_== false - - val fromDb = PatternFieldTestRecord.find(pftrd.id.get) - fromDb must beLike { - case Full(rec) => - rec must_== pftrd - rec.dirty_? must_== false - } - } - - "update dirty fields for a ListTestRecord" in { - val ltr1 = ListTestRecord.createRecord.save() - - ltr1.mandatoryStringListField(List("xyz", "lmn", "opqr")) - ltr1.mandatoryStringListField.dirty_? must_== true - - ltr1.mandatoryIntListField(List(7, 8, 9)) - ltr1.mandatoryIntListField.dirty_? must_== true - - ltr1.mandatoryJsonObjectListField(List(TypeTestJsonObject(1, "jsonobj1", Map("x" -> "1")), TypeTestJsonObject(2, "jsonobj2", Map("x" -> "2")))) - ltr1.mandatoryJsonObjectListField.dirty_? must_== true - - ltr1.caseClassListField(List(CaseClassTestObject(2,"string",MyTestEnum.TWO))) - ltr1.caseClassListField.dirty_? must_== true - - ltr1.dirty_? must_== true - ltr1.update - ltr1.dirty_? must_== false - - val fromDb = ListTestRecord.find(ltr1.id.get) - fromDb must beLike { - case Full(rec) => - rec must_== ltr1 - rec.dirty_? must_== false - } - } - - "update dirty fields for a MapTestRecord" in { - val mtr = MapTestRecord.createRecord.save() - - mtr.mandatoryStringMapField(Map("a" -> "abc", "b" -> "def", "c" -> "ghi")) - mtr.mandatoryStringMapField.dirty_? must_== true - - mtr.mandatoryIntMapField(Map("a" -> 4, "b" -> 5, "c" -> 6)) - mtr.mandatoryIntMapField.dirty_? must_== true - - mtr.dirty_? must_== true - mtr.update - mtr.dirty_? must_== false - - val fromDb = MapTestRecord.find(mtr.id.get) - fromDb must beLike { - case Full(rec) => - rec must_== mtr - rec.dirty_? must_== false - } - } - - "update dirty fields for a SubRecordTestRecord" in { - val ssr1 = SubSubRecord.createRecord.name("SubSubRecord1") - val ssr2 = SubSubRecord.createRecord.name("SubSubRecord2") - - val sr1 = SubRecord.createRecord - .name("SubRecord1") - .subsub(ssr1) - .subsublist(ssr1 :: ssr2 :: Nil) - .slist("s1" :: "s2" :: Nil) - .smap(Map("a" -> "s1", "b" -> "s2")) - .pattern(Pattern.compile("^Mon", Pattern.CASE_INSENSITIVE)) - - val srtr = SubRecordTestRecord.createRecord - .mandatoryBsonRecordField(sr1) - .save() - - val sr2 = sr1.copy.name("SubRecord2") - - srtr.mandatoryBsonRecordField(sr2) - srtr.mandatoryBsonRecordField.dirty_? must_== true - - srtr.mandatoryBsonRecordListField(List(sr1,sr2)) - srtr.mandatoryBsonRecordListField.dirty_? must_== true - - srtr.dirty_? must_== true - srtr.update - srtr.dirty_? must_== false - - val fromDb = SubRecordTestRecord.find(srtr.id.get) - fromDb must beLike { - case Full(rec) => - rec must_== srtr - rec.dirty_? must_== false - } - } - - "support custom field name" in { - RecordRules.fieldName.doWith((_, name) => snakify(name)) { - val rec = CustomFieldName.createRecord - rec.customField.name must_== "custom_field" - rec.save() - - CustomFieldName.find(rec.id.get) must_== Full(rec) - } - } - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoTestKit.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoTestKit.scala deleted file mode 100644 index 1d97ccb461..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/MongoTestKit.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import util.{ConnectionIdentifier, DefaultConnectionIdentifier, Props} - - -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeAfterEach - -import com.mongodb._ - -// The sole mongo object for testing -object TestMongo { - val mongo = { - val uri = Props.get("mongo.test.uri", "127.0.0.1:27017") - val opts = MongoClientOptions.builder.serverSelectionTimeout(2000) - new MongoClient(new MongoClientURI(s"mongodb://$uri", opts)) - } - - lazy val isMongoRunning: Boolean = - try { - // this will throw an exception if it can't connect to the db - mongo.listDatabases - true - } catch { - case _: MongoTimeoutException => - false - } -} - -trait MongoTestKit extends Specification with BeforeAfterEach { - sequential - - def dbName = "lift_record_"+this.getClass.getName - .replace("$", "") - .replace("net.liftweb.mongodb.record.", "") - .replace(".", "_") - .toLowerCase - - // If you need more than one db, override this - def dbs: List[(ConnectionIdentifier, String)] = - (DefaultConnectionIdentifier, dbName) :: Nil - - def debug = false - - def before = { - // define the dbs - dbs.foreach { case (id, db) => - MongoDB.defineDb(id, TestMongo.mongo, db) - } - } - - def checkMongoIsRunning = { - TestMongo.isMongoRunning must beEqualTo(true).orSkip - } - - def after = { - if (!debug && TestMongo.isMongoRunning) { - // drop the databases - dbs.foreach { case (id, _) => - MongoDB.useDatabase(id) { db => db.drop() } - } - } - - // clear the mongo instances - dbs.foreach { case (id, _) => - MongoDB.remove(id) - } - } -} - diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/PlainRecordSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/PlainRecordSpec.scala deleted file mode 100644 index 0f4ef431a9..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/PlainRecordSpec.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import org.specs2.mutable.Specification - -import org.bson._ -import org.bson.codecs.{DecoderContext, EncoderContext} - -import com.mongodb.client.MongoCollection -import com.mongodb.client.model.Filters.{eq => eqs} - -import net.liftweb.mongodb.record.testmodels._ -import net.liftweb.record._ - -class PlainRecordSpec extends Specification with MongoTestKit { - "PlainRecordSpec Specification".title - - /** - * Encodes then decodes a Record instance to/from Bson and asserts they are equal. - */ - private def testEncodeDecode(record: RecordTest) = { - val bson = new BsonDocument() - val writer = new BsonDocumentWriter(bson) - - RecordTestStore.codec.encode(writer, record, EncoderContext.builder.build) - - val reader = new BsonDocumentReader(bson) - val result: RecordTest = RecordTestStore.codec.decode(reader, DecoderContext.builder.build) - - result must_== record - } - - override def before = { - super.before - checkMongoIsRunning - } - - "Record" should { - "encode and decode properly" in { - val rec0 = RecordTest.createRecord - val rec1 = RecordTest.createRecord.stringfield("hello") - - testEncodeDecode(rec0) - testEncodeDecode(rec1) - } - - "save and find properly" in { - val rec0 = RecordTest.createRecord - val rec1 = RecordTest.createRecord.stringfield("hello") - - RecordTestStore.collection.insertOne(rec0) - RecordTestStore.collection.insertOne(rec1) - - val rec0fromDb = RecordTestStore.collection.find(eqs("_id", rec0.id.get)).first() - val rec1fromDb = RecordTestStore.collection.find(eqs("_id", rec1.id.get)).first() - - rec0fromDb must_== rec0 - rec1fromDb must_== rec1 - - MongoConfig.main.drop() - - success - } - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/QueryExamplesSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/QueryExamplesSpec.scala deleted file mode 100644 index 92a773b588..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/QueryExamplesSpec.scala +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2011-2015 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record - -import BsonDSL._ -import json.JObject -import field._ -import net.liftweb.record.field._ - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import org.specs2.mutable.Specification - -package queryexamplesfixtures { - class Person private () extends MongoRecord[Person] with ObjectIdPk[Person] { - def meta = Person - - object name extends StringField(this, 100) - object birthDate extends DateField(this) - object childId extends UUIDField(this) - object petId extends ObjectIdField(this) { - override def optional_? = true - } - } - object Person extends Person with MongoMetaRecord[Person] { - // index name - createIndex(("name" -> 1)) - - // implicit formats already exists - def findAllBornAfter(dt: Date) = findAll(("birthDate" -> ("$gt" -> dt))) - } -} - -class QueryExamplesSpec extends Specification with MongoTestKit { - "QueryExamples Specification".title - - import queryexamplesfixtures._ - - "Query examples" in { - checkMongoIsRunning - - val fredsBirthDate = Calendar.getInstance - fredsBirthDate.set(1970, 1, 1, 19, 0) - - val wilmasBirthDate = Calendar.getInstance - wilmasBirthDate.set(1971, 8, 30, 19, 0) - - val barneysBirthDate = Calendar.getInstance - barneysBirthDate.set(1972, 8, 30, 19, 0) - - val bettysBirthDate = Calendar.getInstance - bettysBirthDate.set(1973, 8, 30, 19, 0) - - val dinoId = ObjectId.get - val pebblesId = UUID.randomUUID - val bammbammId = UUID.randomUUID - - val fred = Person.createRecord - .name("Flinstone, Fred") - .birthDate(fredsBirthDate.getTime) - .childId(pebblesId) - .petId(dinoId) - .save() - val wilma = Person.createRecord - .name("Flinstone, Wilma") - .birthDate(wilmasBirthDate.getTime) - .childId(pebblesId) - .petId(dinoId) - .save() - val barney = Person.createRecord - .name("Rubble, Barney") - .birthDate(barneysBirthDate.getTime) - .childId(bammbammId) - .save() - val betty = Person.createRecord - .name("Rubble, Betty") - .birthDate(bettysBirthDate.getTime) - .childId(bammbammId) - .save() - - val flinstonesIds = List(fred.id.get, wilma.id.get) - val rubblesIds = List(barney.id.get, betty.id.get) - - // query for Bamm-Bamm's parents (UUID) - val pebblesParents = Person.findAll(("childId" -> bammbammId)) - - pebblesParents.length must_== 2 - pebblesParents.map(_.id.get).filterNot(rubblesIds.contains(_)) must_== List() - - // query for Bamm-Bamm's and Pebbles' parents using List[UUID] - val pebblesAndBammBammsParents = Person.findAll(("childId" -> ("$in" -> List(pebblesId, bammbammId)))) - - pebblesAndBammBammsParents.length must_== 4 - - // query for Dino's owners (ObjectId) - val dinosOwners = Person.findAll(("petId" -> dinoId)) - - dinosOwners.length must_== 2 - dinosOwners.map(_.id.get).filterNot(flinstonesIds.contains(_)) must_== List() - - // query for the Rubbles using a Regex - val rubbles = Person.findAll(("name" -> "^Rubble".r)) - - rubbles.length must_== 2 - rubbles.map(_.id.get).filterNot(rubblesIds.contains(_)) must_== List() - - // query for the Flinstones using a Pattern - val flinstones = Person.findAll(("name" -> Pattern.compile("^flinst", Pattern.CASE_INSENSITIVE))) - - flinstones.length must_== 2 - flinstones.map(_.id.get).filterNot(flinstonesIds.contains(_)) must_== List() - - // query for the Flinstones using a List[ObjectId] - val flinstones2 = Person.findAll(("_id" -> ("$in" -> flinstonesIds))) - - flinstones2.length must_== 2 - flinstones2.map(_.id.get).filterNot(flinstonesIds.contains(_)) must_== List() - - // query using Dates - implicit val formats = Person.formats // this is needed for Dates - val qryDate = Calendar.getInstance - qryDate.set(1971, 1, 1, 19, 0) - val people = Person.findAll(("birthDate" -> ("$gt" -> qryDate.getTime))) - - people.length must_== 3 - people.map(_.id.get).filterNot(List(wilma.id.get, barney.id.get, betty.id.get).contains(_)) must_== List() - - // you do not need to define the implicit formats val if you write your query in the MongoMetaRecord object. - val people2 = Person.findAllBornAfter(qryDate.getTime) - - people2.length must_== 3 - people2.map(_.id.get).filterNot(List(wilma.id.get, barney.id.get, betty.id.get).contains(_)) must_== List() - - // query with Sort - val people3 = Person.findAll(JObject(Nil), ("birthDate" -> -1)) - - people3.length must_== 4 - people3.map(_.id.get) must_== List(betty.id.get, barney.id.get, wilma.id.get, fred.id.get) - - val people4 = Person.findAll(JObject(Nil), ("birthDate" -> 1)) - - people4.length must_== 4 - people4.map(_.id.get) must_== List(fred.id.get, wilma.id.get, barney.id.get, betty.id.get) - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/codecs/RecordCodecSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/codecs/RecordCodecSpec.scala deleted file mode 100644 index 61cb75efe4..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/codecs/RecordCodecSpec.scala +++ /dev/null @@ -1,333 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package codecs - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import org.specs2.mutable.Specification - -import net.liftweb.common._ -import net.liftweb.json._ -import net.liftweb.json.JsonDSL._ -import net.liftweb.mongodb.record.fixtures._ -import net.liftweb.mongodb.record.testmodels._ -import net.liftweb.record.{MetaRecord, Record} -import net.liftweb.util.Helpers._ - -import com.mongodb._ -import org.bson._ -import org.bson.codecs.{BsonTypeClassMap, DecoderContext, EncoderContext} - -import org.joda.time.DateTime - -/** - * Systems under specification for RecordCodec. - */ -object RecordCodecSpec extends Specification { - "RecordCodec Specification".title - - /** - * Encodes then decodes a BsonRecord instance to/from Bson and asserts they are equal. - */ - private def testEncodeDecode[T <: BsonRecord[T]](metaRecord: BsonMetaRecord[T], record: T) = { - val bson = new BsonDocument() - val writer = new BsonDocumentWriter(bson) - - metaRecord.codec.encode(writer, record, EncoderContext.builder.build) - - val reader = new BsonDocumentReader(bson) - val result: T = metaRecord.codec.decode(reader, DecoderContext.builder.build) - - result must_== record - } - - "RecordCodec" should { - - "support Binary fields" in { - val binData: Array[Byte] = Array(18, 19, 20) - val rec0 = BinaryTest.createRecord - val rec1 = BinaryTest.createRecord.binaryfield(binData) - - testEncodeDecode(BinaryTest, rec0) - testEncodeDecode(BinaryTest, rec1) - } - - "support Boolean fields" in { - val rec0 = BooleanTest.createRecord - val rec1 = BooleanTest.createRecord.booleanfield(true) - val rec2 = BooleanTest.createRecord.booleanfield(false) - - testEncodeDecode(BooleanTest, rec0) - testEncodeDecode(BooleanTest, rec1) - testEncodeDecode(BooleanTest, rec2) - } - - "support Calendar fields (DateTimeField)" in { - val rec0 = CalendarTest.createRecord - val rec1 = CalendarTest.createRecord.calendarfield(Calendar.getInstance) - - testEncodeDecode(CalendarTest, rec0) - testEncodeDecode(CalendarTest, rec1) - } - - "support Case Class fields" in { - val rec0 = CaseClassTest.createRecord - val tcc = TestCaseClass("hi", 9) - val rec1 = CaseClassTest.createRecord.caseclassfield(tcc) - - testEncodeDecode(CaseClassTest, rec0) - testEncodeDecode(CaseClassTest, rec1) - } - - "support Date fields" in { - val rec0 = DateTest.createRecord - val rec1 = DateTest.createRecord.datefield(new Date) - - testEncodeDecode(DateTest, rec0) - testEncodeDecode(DateTest, rec1) - } - - "support Decimal fields (legacy)" in { - val rec0 = LegacyDecimalTest.createRecord - val rec1 = LegacyDecimalTest.createRecord.decimalfield(BigDecimal("1234.25")) - - testEncodeDecode(LegacyDecimalTest, rec0) - testEncodeDecode(LegacyDecimalTest, rec1) - } - - "support Decimal fields" in { - val rec0 = DecimalTest.createRecord - val rec1 = DecimalTest.createRecord.decimalfield(BigDecimal("1234.25")) - - testEncodeDecode(DecimalTest, rec0) - testEncodeDecode(DecimalTest, rec1) - } - - "support Double fields" in { - val rec0 = DoubleTest.createRecord - val rec1 = DoubleTest.createRecord.doublefield(1234) - - testEncodeDecode(DoubleTest, rec0) - testEncodeDecode(DoubleTest, rec1) - } - - "support Enum fields" in { - val rec0 = EnumTest.createRecord - val rec1 = EnumTest.createRecord.enumfield(TestEnum.Three) - - testEncodeDecode(EnumTest, rec0) - testEncodeDecode(EnumTest, rec1) - } - - "support Int fields" in { - val rec0 = IntTest.createRecord - val rec1 = IntTest.createRecord.intfield(1234) - - testEncodeDecode(IntTest, rec0) - testEncodeDecode(IntTest, rec1) - } - - "support Long fields" in { - val rec0 = LongTest.createRecord - val rec1 = LongTest.createRecord.longfield(1234L) - - testEncodeDecode(LongTest, rec0) - testEncodeDecode(LongTest, rec1) - } - - "support String fields" in { - val rec0 = StringTest.createRecord - val rec1 = StringTest.createRecord.stringfield("abc") - val rec2 = StringTest.createRecord.optstringfield("def") - val rec3 = StringTest.createRecord.stringfield("abc").optstringfield("def") - val rec4 = StringTest.createRecord.stringfieldopt("abc") - - testEncodeDecode(StringTest, rec0) - testEncodeDecode(StringTest, rec1) - testEncodeDecode(StringTest, rec2) - testEncodeDecode(StringTest, rec3) - testEncodeDecode(StringTest, rec4) - } - - // joda - "support Joda DateTime fields (JodaTimeField)" in { - val rec0 = JodaTimeTest.createRecord - val rec1 = JodaTimeTest.createRecord.jodatimefield(DateTime.now) - - testEncodeDecode(JodaTimeTest, rec0) - testEncodeDecode(JodaTimeTest, rec1) - } - - // mongodb.record.field - "support BsonRecord fields" in { - val sub = TestSubRecord.createRecord.name("mrx") - - val rec0 = BsonRecordTest.createRecord - val rec1 = BsonRecordTest.createRecord.bsonrecordfield(sub) - val rec2 = BsonRecordTest.createRecord.bsonrecordlistfield(List(sub)) - val rec3 = BsonRecordTest.createRecord.bsonrecordmapfield(Map("a" -> sub)) - - testEncodeDecode(BsonRecordTest, rec0) - testEncodeDecode(BsonRecordTest, rec1) - testEncodeDecode(BsonRecordTest, rec2) - testEncodeDecode(BsonRecordTest, rec3) - - val mrec0 = BsonRecordMapTest.createRecord - val mrec1 = BsonRecordMapTest.createRecord.bsonrecordmapfield(Map("a" -> sub)) - - testEncodeDecode(BsonRecordMapTest, mrec0) - testEncodeDecode(BsonRecordMapTest, mrec1) - - val lrec0 = BsonRecordListTest.createRecord - val lrec1 = BsonRecordListTest.createRecord.bsonrecordlistfield(List(sub)) - - testEncodeDecode(BsonRecordListTest, lrec0) - testEncodeDecode(BsonRecordListTest, lrec1) - } - - "support List fields" in { - val binData: Array[Byte] = Array(11, 19, 20) - - val sub0 = FieldTypeTestRecord.createRecord - val sub1 = FieldTypeTestRecord.createRecord.mandatoryStringField("mrx") - - val rec0 = ListTestRecord.createRecord - val rec1 = ListTestRecord.createRecord.mandatoryStringListField(List("a", "b")) - val rec2 = ListTestRecord.createRecord.mandatoryIntListField(List(1,12)) - val rec3 = ListTestRecord.createRecord.mandatoryMongoRefListField(List(sub1.id.get)) - val rec4 = ListTestRecord.createRecord.mandatoryMongoRefListField(List(sub0.id.get, sub1.id.get)) - val rec5 = ListTestRecord.createRecord.mandatoryJsonObjectListField(List(TypeTestJsonObject(1, "jsonobj1", Map("x" -> "1")), TypeTestJsonObject(2, "jsonobj2", Map("x" -> "2")))) - val rec6 = ListTestRecord.createRecord.caseClassListField(List(CaseClassTestObject(12, "twelve", MyTestEnum.THREE))) - - testEncodeDecode(ListTestRecord, rec0) - testEncodeDecode(ListTestRecord, rec1) - testEncodeDecode(ListTestRecord, rec2) - testEncodeDecode(ListTestRecord, rec3) - testEncodeDecode(ListTestRecord, rec4) - testEncodeDecode(ListTestRecord, rec5) - testEncodeDecode(ListTestRecord, rec6) - - val mrec0 = MongoListTestRecord.createRecord - val mrec1 = MongoListTestRecord.createRecord.patternListField(List(Pattern.compile("^Mongo"))) - val mrec2 = MongoListTestRecord.createRecord.dateListField(List(new Date)) - val mrec3 = MongoListTestRecord.createRecord.uuidListField(List(UUID.randomUUID)) - - testEncodeDecode(MongoListTestRecord, mrec0) - testEncodeDecode(MongoListTestRecord, mrec1) - testEncodeDecode(MongoListTestRecord, mrec2) - testEncodeDecode(MongoListTestRecord, mrec3) - - val brec0 = BasicListTestRecord.createRecord - val brec1 = BasicListTestRecord.createRecord.binaryListField(List(binData)) - val brec2 = BasicListTestRecord.createRecord.booleanListField(List(false)) - val brec3 = BasicListTestRecord.createRecord.decimalListField(List(BigDecimal(27.33))) - val brec4 = BasicListTestRecord.createRecord.doubleListField(List(12.34)) - val brec5 = BasicListTestRecord.createRecord.longListField(List(876000L)) - val brec6 = BasicListTestRecord.createRecord.stringListListField(List(List("abc"))) - val brec7 = BasicListTestRecord.createRecord.stringMapListField(List(Map("key" -> "abc"))) - val brec8 = BasicListTestRecord.createRecord.bigIntListField(List(BigInt(2000L))) - - testEncodeDecode(BasicListTestRecord, brec0) - testEncodeDecode(BasicListTestRecord, brec1) - testEncodeDecode(BasicListTestRecord, brec2) - testEncodeDecode(BasicListTestRecord, brec3) - testEncodeDecode(BasicListTestRecord, brec4) - testEncodeDecode(BasicListTestRecord, brec5) - testEncodeDecode(BasicListTestRecord, brec6) - testEncodeDecode(BasicListTestRecord, brec7) - testEncodeDecode(BasicListTestRecord, brec8) - - val jrec0 = MongoJodaListTestRecord.createRecord - val jrec1 = MongoJodaListTestRecord.createRecord.dateTimeListField(List(DateTime.now)) - - testEncodeDecode(MongoJodaListTestRecord, jrec0) - testEncodeDecode(MongoJodaListTestRecord, jrec1) - } - - "support Map fields" in { - val binData: Array[Byte] = Array(12, 19, 20) - - val sub0 = FieldTypeTestRecord.createRecord - val sub1 = FieldTypeTestRecord.createRecord.mandatoryStringField("mrx") - - val rec0 = MapTest.createRecord - val rec1 = MapTest.createRecord.mandatoryStringMapField(Map("a" -> "b")) - val rec2 = MapTest.createRecord.mandatoryIntMapField(Map("one" -> 1, "twelve" -> 12)) - val rec3 = MapTest.createRecord.binaryMapField(Map("bin" -> binData)) - val rec4 = MapTest.createRecord.booleanMapField(Map("bool" -> false)) - val rec5 = MapTest.createRecord.dateMapField(Map("when" -> new Date)) - val rec6 = MapTest.createRecord.decimalMapField(Map("bigd" -> BigDecimal(1.23))) - val rec7 = MapTest.createRecord.doubleMapField(Map("double" -> 1234.0)) - val rec8 = MapTest.createRecord.longMapField(Map("long" -> 1234L)) - val rec9 = MapTest.createRecord.patternMapField(Map("regex" -> Pattern.compile("^Mongo"))) - val rec10 = MapTest.createRecord.stringListMapField(Map("list" -> List("hello"))) - val rec11 = MapTest.createRecord.stringMapMapField(Map("map" -> Map("a" -> "hello"))) - val rec12 = MapTest.createRecord.uuidMapField(Map("id" -> UUID.randomUUID)) - - testEncodeDecode(MapTest, rec0) - testEncodeDecode(MapTest, rec1) - testEncodeDecode(MapTest, rec2) - testEncodeDecode(MapTest, rec3) - testEncodeDecode(MapTest, rec4) - testEncodeDecode(MapTest, rec5) - testEncodeDecode(MapTest, rec6) - testEncodeDecode(MapTest, rec7) - testEncodeDecode(MapTest, rec8) - testEncodeDecode(MapTest, rec9) - testEncodeDecode(MapTest, rec10) - testEncodeDecode(MapTest, rec11) - testEncodeDecode(MapTest, rec12) - - val jrec0 = JodaTimeMapTest.createRecord - val jrec1 = JodaTimeMapTest.createRecord.jodatimeMapField(Map("dt" -> DateTime.now)) - - testEncodeDecode(JodaTimeMapTest, jrec0) - testEncodeDecode(JodaTimeMapTest, jrec1) - } - - "support Pattern fields" in { - val rec0 = PatternTest.createRecord - val rec1 = PatternTest.createRecord.patternfield(Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE)) - - testEncodeDecode(PatternTest, rec0) - testEncodeDecode(PatternTest, rec1) - } - - "support UUID fields" in { - val rec0 = UUIDTest.createRecord - val rec1 = UUIDTest.createRecord.uuidfield(UUID.randomUUID) - - testEncodeDecode(UUIDTest, rec0) - testEncodeDecode(UUIDTest, rec1) - } - - "support JObject fields" in { - val joftrFieldJObject: JObject = ("minutes" -> 59) - - val rec0 = JObjectFieldTestRecord.createRecord - val rec1 = JObjectFieldTestRecord.createRecord.mandatoryJObjectField(joftrFieldJObject) - - testEncodeDecode(JObjectFieldTestRecord, rec0) - testEncodeDecode(JObjectFieldTestRecord, rec1) - } - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/BsonRecordListFieldSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/BsonRecordListFieldSpec.scala deleted file mode 100644 index 97fc3f0bc4..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/BsonRecordListFieldSpec.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2010-2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import org.specs2.mutable.Specification -import net.liftweb.common._ -import net.liftweb.record.field.StringField - -package bsonlistfieldspecs { - class BookShelf extends MongoRecord[BookShelf] with ObjectIdPk[BookShelf] { - def meta = BookShelf - - object books extends BsonRecordListField(this, Book) - } - object BookShelf extends BookShelf with MongoMetaRecord[BookShelf] { - override def collectionName = "bookshelf" - } - - class Book extends BsonRecord[Book] { - override def meta = Book - - object title extends StringField(this, 512) - } - object Book extends Book with BsonMetaRecord[Book] -} - -class BsonRecordListFieldSpec extends Specification { - "BsonRecordListField Specification".title - - import bsonlistfieldspecs._ - - "BsonRecordListFieldSpec" should { - - "fail validation if at least one of its elements fails validation" in { - val scalaBook = Book.createRecord.title("Programming in Scala") - val liftBook = Book.createRecord - liftBook.title.setBox(Failure("Bad format")) - val shelf = BookShelf.createRecord.books(scalaBook :: liftBook :: Nil) - - shelf.validate must have size(1) - } - - "pass validation if all of its elements pass validation" in { - val scalaBook = Book.createRecord.title("Programming in Scala") - val liftBook = Book.createRecord.title("Simply Lift") - val shelf = BookShelf.createRecord.books(scalaBook :: liftBook :: Nil) - - shelf.validate must be empty - } - - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/EnumFieldSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/EnumFieldSpec.scala deleted file mode 100644 index 1045c27dd1..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/EnumFieldSpec.scala +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2010-2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import org.bson.types.ObjectId -import org.specs2.mutable.Specification - -import net.liftweb.common._ -import net.liftweb.json.ext.EnumSerializer -import net.liftweb.record.field.{EnumField, OptionalEnumField} -import net.liftweb.util.Helpers._ - -import com.mongodb._ - -package enumfieldspecs { - object WeekDay extends Enumeration { - type WeekDay = Value - val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value - } - - case class JsonObj(dow: WeekDay.WeekDay) extends JsonObject[JsonObj] { - def meta = JsonObj - } - object JsonObj extends JsonObjectMeta[JsonObj] - - class EnumRec extends MongoRecord[EnumRec] with ObjectIdPk[EnumRec] { - def meta = EnumRec - - object dow extends EnumField(this, WeekDay) - object dowOptional extends OptionalEnumField(this, WeekDay) - object jsonobj extends JsonObjectField[EnumRec, JsonObj](this, JsonObj) { - def defaultValue = JsonObj(WeekDay.Mon) - } - - override def equals(other: Any): Boolean = other match { - case that: EnumRec => - this.id.get == that.id.get && - this.dow.value == that.dow.value && - this.dowOptional.valueBox == that.dowOptional.valueBox && - this.jsonobj.value == that.jsonobj.value - case _ => false - } - } - object EnumRec extends EnumRec with MongoMetaRecord[EnumRec] { - override def collectionName = "enumrecs" - override def formats = super.formats + new EnumSerializer(WeekDay) - } -} - - -/** - * Systems under specification for EnumField. - */ -class EnumFieldSpec extends Specification with MongoTestKit { - "EnumField Specification".title - - import enumfieldspecs._ - - "EnumField" should { - - "work with default values" in { - checkMongoIsRunning - - val er = EnumRec.createRecord.save() - - val erFromDb = EnumRec.find(er.id.get) - erFromDb must beLike { - case Full(er2) => - er2 mustEqual er - er2.dow.value mustEqual WeekDay.Mon - er2.dowOptional.valueBox mustEqual Empty - er2.jsonobj.value mustEqual JsonObj(WeekDay.Mon) - } - } - - "work with set values" in { - checkMongoIsRunning - - val er = EnumRec.createRecord - .dow(WeekDay.Tue) - .jsonobj(JsonObj(WeekDay.Sun)) - .save() - - val erFromDb = EnumRec.find(er.id.get) - erFromDb must beLike { - case Full(er2) => - er2 mustEqual er - er2.dow.value mustEqual WeekDay.Tue - er2.jsonobj.value mustEqual JsonObj(WeekDay.Sun) - } - } - - "work with Empty optional values" in { - checkMongoIsRunning - - val er = EnumRec.createRecord - er.dowOptional.setBox(Empty) - er.save() - - val erFromDb = EnumRec.find(er.id.get) - erFromDb must beLike { - case Full(er2) => - er2 mustEqual er - er2.dowOptional.valueBox mustEqual Empty - } - } - - "work with Full optional values" in { - checkMongoIsRunning - - val er = EnumRec.createRecord - er.dowOptional.setBox(Full(WeekDay.Sat)) - er.save() - - val erFromDb = EnumRec.find(er.id.get) - erFromDb must beLike { - case Full(er2) => - er2 mustEqual er - er2.dowOptional.valueBox mustEqual Full(WeekDay.Sat) - } - } - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/EnumNameFieldSpec.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/EnumNameFieldSpec.scala deleted file mode 100644 index 02a1383217..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/EnumNameFieldSpec.scala +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2010-2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package field - -import org.bson.types.ObjectId -import org.specs2.mutable.Specification - -import net.liftweb.common._ -import net.liftweb.json.ext.EnumNameSerializer -import net.liftweb.record.field.{EnumNameField, OptionalEnumNameField} -import net.liftweb.util.Helpers._ - -import com.mongodb._ - -package enumnamefieldspecs { - object WeekDay extends Enumeration { - type WeekDay = Value - val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value - } - - case class JsonObj(dow: WeekDay.WeekDay) extends JsonObject[JsonObj] { - def meta = JsonObj - } - object JsonObj extends JsonObjectMeta[JsonObj] - - class EnumNameRec extends MongoRecord[EnumNameRec] with ObjectIdPk[EnumNameRec] { - def meta = EnumNameRec - - object dow extends EnumNameField(this, WeekDay) - object dowOptional extends OptionalEnumNameField(this, WeekDay) - object jsonobj extends JsonObjectField[EnumNameRec, JsonObj](this, JsonObj) { - def defaultValue = JsonObj(WeekDay.Mon) - } - - override def equals(other: Any): Boolean = other match { - case that: EnumNameRec => - this.id.get == that.id.get && - this.dow.value == that.dow.value && - this.dowOptional.valueBox == that.dowOptional.valueBox && - this.jsonobj.value == that.jsonobj.value - case _ => false - } - } - object EnumNameRec extends EnumNameRec with MongoMetaRecord[EnumNameRec] { - override def collectionName = "enumnamerecs" - override def formats = super.formats + new EnumNameSerializer(WeekDay) - } -} - - -/** - * Systems under specification for EnumNameField. - */ -class EnumNameFieldSpec extends Specification with MongoTestKit { - "EnumNameField Specification".title - - import enumnamefieldspecs._ - - "EnumNameField" should { - - "work with default values" in { - checkMongoIsRunning - - val er = EnumNameRec.createRecord.save() - - val erFromDb = EnumNameRec.find(er.id.get) - erFromDb must beLike { - case Full(er2) => - er2 mustEqual er - er2.dow.value mustEqual WeekDay.Mon - er2.dowOptional.valueBox mustEqual Empty - er2.jsonobj.value mustEqual JsonObj(WeekDay.Mon) - } - } - - "work with set values" in { - checkMongoIsRunning - - val er = EnumNameRec.createRecord - .dow(WeekDay.Tue) - .jsonobj(JsonObj(WeekDay.Sun)) - .save() - - val erFromDb = EnumNameRec.find(er.id.get) - erFromDb must beLike { - case Full(er2) => - er2 mustEqual er - er2.dow.value mustEqual WeekDay.Tue - er2.jsonobj.value mustEqual JsonObj(WeekDay.Sun) - } - } - - "work with Empty optional values" in { - checkMongoIsRunning - - val er = EnumNameRec.createRecord - er.dowOptional.setBox(Empty) - er.save() - - val erFromDb = EnumNameRec.find(er.id.get) - erFromDb must beLike { - case Full(er2) => - er2 mustEqual er - er2.dowOptional.valueBox mustEqual Empty - } - } - - "work with Full optional values" in { - checkMongoIsRunning - - val er = EnumNameRec.createRecord - er.dowOptional.setBox(Full(WeekDay.Sat)) - er.save() - - val erFromDb = EnumNameRec.find(er.id.get) - erFromDb must beLike { - case Full(er2) => - er2 mustEqual er - er2.dowOptional.valueBox mustEqual Full(WeekDay.Sat) - } - } - } -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BinaryTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BinaryTest.scala deleted file mode 100644 index ec067075ae..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BinaryTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.BinaryField - -import com.mongodb._ - -class BinaryTest private () extends MongoRecord[BinaryTest] with ObjectIdPk[BinaryTest] { - - def meta = BinaryTest - - object binaryfield extends BinaryField(this) {} -} - -object BinaryTest extends BinaryTest with MongoMetaRecord[BinaryTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BooleanTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BooleanTest.scala deleted file mode 100644 index b49fa9b6a8..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BooleanTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.BooleanField - -import com.mongodb._ - -class BooleanTest private () extends MongoRecord[BooleanTest] with ObjectIdPk[BooleanTest] { - - def meta = BooleanTest - - object booleanfield extends BooleanField(this) -} - -object BooleanTest extends BooleanTest with MongoMetaRecord[BooleanTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BsonRecordTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BsonRecordTest.scala deleted file mode 100644 index 2b1d0904d2..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/BsonRecordTest.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.StringField - -import com.mongodb._ - -import org.bson.codecs.Codec -import org.bson.codecs.configuration.CodecRegistries - -class TestSubRecord private () extends BsonRecord[TestSubRecord] { - def meta = TestSubRecord - - object name extends StringField(this, 12) -} -object TestSubRecord extends TestSubRecord with BsonMetaRecord[TestSubRecord] { - override def formats = allFormats -} - - -class BsonRecordTest private () extends MongoRecord[BsonRecordTest] with ObjectIdPk[BsonRecordTest] { - - def meta = BsonRecordTest - - object bsonrecordfield extends BsonRecordField(this, TestSubRecord) - object bsonrecordlistfield extends BsonRecordListField(this, TestSubRecord) - object bsonrecordmapfield extends BsonRecordMapField(this, TestSubRecord) -} - -object BsonRecordTest extends BsonRecordTest with MongoMetaRecord[BsonRecordTest] - -/* - * SubRecord fields - */ -class SubRecord private () extends BsonRecord[SubRecord] { - def meta = SubRecord - - object name extends StringField(this, 12) - object subsub extends BsonRecordField(this, SubSubRecord) - object subsublist extends BsonRecordListField(this, SubSubRecord) - object when extends DateField(this) - object slist extends MongoListField[SubRecord, String](this) - object smap extends MongoMapField[SubRecord, String](this) - object oid extends ObjectIdField(this) - object pattern extends PatternField(this) - object uuid extends UUIDField(this) -} -object SubRecord extends SubRecord with BsonMetaRecord[SubRecord] { - override def formats = allFormats -} - -class SubSubRecord private () extends BsonRecord[SubSubRecord] { - def meta = SubSubRecord - - object name extends StringField(this, 12) -} -object SubSubRecord extends SubSubRecord with BsonMetaRecord[SubSubRecord] { - override def formats = allFormats -} - -class SubRecordTestRecord private () extends MongoRecord[SubRecordTestRecord] with ObjectIdPk[SubRecordTestRecord] { - def meta = SubRecordTestRecord - - object mandatoryBsonRecordField extends BsonRecordField(this, SubRecord) - object optioalBsonRecordField extends OptionalBsonRecordField(this, SubRecord) - object legacyOptionalBsonRecordField extends BsonRecordField(this, SubRecord) { - override def optional_? = true - } - - object mandatoryBsonRecordListField extends BsonRecordListField(this, SubRecord) - object legacyOptionalBsonRecordListField extends BsonRecordListField(this, SubRecord) { - override def optional_? = true - } -} -object SubRecordTestRecord extends SubRecordTestRecord with MongoMetaRecord[SubRecordTestRecord] { - override def formats = allFormats -} - -class BsonRecordMapTest private () extends MongoRecord[BsonRecordMapTest] with ObjectIdPk[BsonRecordMapTest] { - - def meta = BsonRecordMapTest - - object bsonrecordmapfield extends BsonRecordMapField(this, TestSubRecord) -} - -object BsonRecordMapTest extends BsonRecordMapTest with MongoMetaRecord[BsonRecordMapTest] - -class BsonRecordListTest private () extends MongoRecord[BsonRecordListTest] with ObjectIdPk[BsonRecordListTest] { - - def meta = BsonRecordListTest - - object bsonrecordlistfield extends BsonRecordListField(this, TestSubRecord) -} - -object BsonRecordListTest extends BsonRecordListTest with MongoMetaRecord[BsonRecordListTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/CalendarTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/CalendarTest.scala deleted file mode 100644 index e954fc2591..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/CalendarTest.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import java.util.{Date, GregorianCalendar} - -import net.liftweb.common._ -import net.liftweb.mongodb.codecs.{BsonTypeClassMap, CalendarCodec} -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.DateTimeField - -import org.bson.BsonType -import org.bson.codecs.configuration.CodecRegistries - -import com.mongodb._ - -class CalendarTest private () extends MongoRecord[CalendarTest] with ObjectIdPk[CalendarTest] { - - def meta = CalendarTest - - object calendarfield extends DateTimeField(this) -} - -object CalendarTest extends CalendarTest with MongoMetaRecord[CalendarTest] { - - override def codecRegistry = CodecRegistries.fromRegistries( - CodecRegistries.fromCodecs(CalendarCodec()), - super.codecRegistry - ) - - override def bsonTypeClassMap = BsonTypeClassMap((BsonType.DATE_TIME -> classOf[GregorianCalendar])) -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/CaseClassTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/CaseClassTest.scala deleted file mode 100644 index 1c6c36e835..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/CaseClassTest.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ - -import com.mongodb._ - -case class TestCaseClass(a: String, b: Int) - -class CaseClassTest private () extends MongoRecord[CaseClassTest] with ObjectIdPk[CaseClassTest] { - - def meta = CaseClassTest - - object caseclassfield extends CaseClassField[CaseClassTest, TestCaseClass](this) -} - -object CaseClassTest extends CaseClassTest with MongoMetaRecord[CaseClassTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DateTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DateTest.scala deleted file mode 100644 index 0f843f1537..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DateTest.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.mongodb.record.field._ - -class DateTest private () extends MongoRecord[DateTest] with ObjectIdPk[DateTest] { - - def meta = DateTest - - object datefield extends DateField(this) -} - -object DateTest extends DateTest with MongoMetaRecord[DateTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DecimalTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DecimalTest.scala deleted file mode 100644 index 2dbcb4f318..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DecimalTest.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import java.math.MathContext - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.mongodb.record.codecs.RecordCodec -import net.liftweb.record.field.DecimalField - -import com.mongodb._ - -class DecimalTest private () extends MongoRecord[DecimalTest] with ObjectIdPk[DecimalTest] { - - def meta = DecimalTest - - object decimalfield extends DecimalField(this, MathContext.UNLIMITED, 2) -} - -object DecimalTest extends DecimalTest with MongoMetaRecord[DecimalTest] { - override def codecRegistry = RecordCodec.defaultRegistry - override def bsonTypeClassMap = RecordCodec.defaultBsonTypeClassMap -} - -class LegacyDecimalTest private () extends MongoRecord[LegacyDecimalTest] with ObjectIdPk[LegacyDecimalTest] { - - def meta = LegacyDecimalTest - - object decimalfield extends DecimalField(this, MathContext.UNLIMITED, 2) -} - -object LegacyDecimalTest extends LegacyDecimalTest with MongoMetaRecord[LegacyDecimalTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DoubleTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DoubleTest.scala deleted file mode 100644 index 46aab0acf5..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/DoubleTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.DoubleField - -import com.mongodb._ - -class DoubleTest private () extends MongoRecord[DoubleTest] with ObjectIdPk[DoubleTest] { - - def meta = DoubleTest - - object doublefield extends DoubleField(this) -} - -object DoubleTest extends DoubleTest with MongoMetaRecord[DoubleTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/EnumTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/EnumTest.scala deleted file mode 100644 index 3805c0e2a9..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/EnumTest.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.Field -import net.liftweb.record.field._ - -import com.mongodb._ - -import org.bson._ -import org.bson.codecs._ -import org.bson.codecs.configuration.{CodecProvider, CodecRegistries, CodecRegistry} - -object TestEnum extends Enumeration { - val One = Value("One") - val Two = Value("Two") - val Three = Value("Three") -} - -class EnumTest private () extends MongoRecord[EnumTest] with ObjectIdPk[EnumTest] { - - def meta = EnumTest - - object enumfield extends EnumField(this, TestEnum) - object enumnamefield extends EnumNameField(this, TestEnum) -} - -object EnumTest extends EnumTest with MongoMetaRecord[EnumTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/IntTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/IntTest.scala deleted file mode 100644 index 71948f6e3b..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/IntTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.IntField - -import com.mongodb._ - -class IntTest private () extends MongoRecord[IntTest] with ObjectIdPk[IntTest] { - - def meta = IntTest - - object intfield extends IntField(this) -} - -object IntTest extends IntTest with MongoMetaRecord[IntTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/JObjectTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/JObjectTest.scala deleted file mode 100644 index bd1b221841..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/JObjectTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ - -import com.mongodb._ - -class JObjectFieldTestRecord private () extends MongoRecord[JObjectFieldTestRecord] with ObjectIdPk[JObjectFieldTestRecord] { - def meta = JObjectFieldTestRecord - - object mandatoryJObjectField extends JObjectField(this) -} - -object JObjectFieldTestRecord extends JObjectFieldTestRecord with MongoMetaRecord[JObjectFieldTestRecord] { - override def formats = allFormats -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/JodaTimeTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/JodaTimeTest.scala deleted file mode 100644 index 56f302d13c..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/JodaTimeTest.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.codecs.{BsonTypeClassMap, JodaDateTimeCodec} -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.joda.JodaTimeField - -import com.mongodb._ - -import org.bson.BsonType -import org.bson.codecs.configuration.CodecRegistries - -import org.joda.time.DateTime - -class JodaTimeTest private () extends MongoRecord[JodaTimeTest] with ObjectIdPk[JodaTimeTest] { - - def meta = JodaTimeTest - - object jodatimefield extends JodaTimeField(this) -} - -object JodaTimeTest extends JodaTimeTest with MongoMetaRecord[JodaTimeTest] {} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/ListTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/ListTest.scala deleted file mode 100644 index 30d5e99668..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/ListTest.scala +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import fixtures._ - -import java.util.{Date, UUID} -import java.util.regex.Pattern - -import net.liftweb.common._ -import net.liftweb.json._ -import net.liftweb.json.ext.EnumSerializer -import net.liftweb.mongodb.codecs.{BigIntLongCodec, BsonTypeClassMap, JodaDateTimeCodec} -import net.liftweb.mongodb.record.codecs.{RecordCodec} -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.IntField - -import org.bson.BsonType -import org.bson.codecs.configuration.CodecRegistries -import org.bson.types.ObjectId -import org.joda.time.DateTime - -import com.mongodb._ - -class BasicListTestRecord private () extends MongoRecord[BasicListTestRecord] with UUIDPk[BasicListTestRecord] { - def meta = BasicListTestRecord - - object bigIntListField extends MongoListField[BasicListTestRecord, BigInt](this) - object binaryListField extends MongoListField[BasicListTestRecord, Array[Byte]](this) - object booleanListField extends MongoListField[BasicListTestRecord, Boolean](this) - object decimalListField extends MongoListField[BasicListTestRecord, BigDecimal](this) - object doubleListField extends MongoListField[BasicListTestRecord, Double](this) - object longListField extends MongoListField[BasicListTestRecord, Long](this) - object stringListListField extends MongoListField[BasicListTestRecord, List[String]](this) - object stringMapListField extends MongoListField[BasicListTestRecord, Map[String, String]](this) -} - -object BasicListTestRecord extends BasicListTestRecord with MongoMetaRecord[BasicListTestRecord] { - override def formats = allFormats - - override def codecRegistry = CodecRegistries.fromRegistries( - CodecRegistries.fromCodecs(BigIntLongCodec()), - RecordCodec.defaultRegistry - ) - override def bsonTypeClassMap = RecordCodec.defaultBsonTypeClassMap -} - -class ListTestRecord private () extends MongoRecord[ListTestRecord] with UUIDPk[ListTestRecord] { - def meta = ListTestRecord - - object mandatoryStringListField extends MongoListField[ListTestRecord, String](this) - object mandatoryMongoRefListField extends ObjectIdRefListField(this, FieldTypeTestRecord) - object mandatoryIntListField extends MongoListField[ListTestRecord, Int](this) - object mandatoryJsonObjectListField extends JsonObjectListField(this, TypeTestJsonObject) - object caseClassListField extends CaseClassListField[ListTestRecord, CaseClassTestObject](this) { - override def formats = owner.meta.formats - } -} - -object ListTestRecord extends ListTestRecord with MongoMetaRecord[ListTestRecord] { - override def formats = allFormats + new EnumSerializer(MyTestEnum) -} - - -class MongoListTestRecord private () extends MongoRecord[MongoListTestRecord] with UUIDPk[MongoListTestRecord] { - def meta = MongoListTestRecord - - object objectIdRefListField extends ObjectIdRefListField(this, FieldTypeTestRecord) - - object patternListField extends MongoListField[MongoListTestRecord, Pattern](this) { - override def equals(other: Any): Boolean = { - other match { - case that: MongoListField[MongoListTestRecord, Pattern] => - that.value.corresponds(this.value) { (a,b) => - a.pattern == b.pattern && a.flags == b.flags - } - case _ => - false - } - } - } - - object dateListField extends MongoListField[MongoListTestRecord, Date](this) - object uuidListField extends MongoListField[MongoListTestRecord, UUID](this) -} - -object MongoListTestRecord extends MongoListTestRecord with MongoMetaRecord[MongoListTestRecord] { - override def formats = DefaultFormats.lossless + new ObjectIdSerializer + new PatternSerializer + new DateSerializer -} - - -class MongoJodaListTestRecord private () extends MongoRecord[MongoJodaListTestRecord] with UUIDPk[MongoJodaListTestRecord] { - def meta = MongoJodaListTestRecord - - object dateTimeListField extends MongoListField[MongoJodaListTestRecord, DateTime](this) -} - -object MongoJodaListTestRecord extends MongoJodaListTestRecord with MongoMetaRecord[MongoJodaListTestRecord] { - override def formats = DefaultFormats.lossless + new DateTimeSerializer - override def bsonTypeClassMap: BsonTypeClassMap = BsonTypeClassMap((BsonType.DATE_TIME -> classOf[DateTime])) -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/LongTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/LongTest.scala deleted file mode 100644 index 80f74c419e..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/LongTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.LongField - -import com.mongodb._ - -class LongTest private () extends MongoRecord[LongTest] with ObjectIdPk[LongTest] { - - def meta = LongTest - - object longfield extends LongField(this) -} - -object LongTest extends LongTest with MongoMetaRecord[LongTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/MapTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/MapTest.scala deleted file mode 100644 index 463d5a3dd1..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/MapTest.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import fixtures._ - -import java.util.{Date, UUID} -import java.util.regex.Pattern - -import net.liftweb.common._ -import net.liftweb.json._ -import net.liftweb.json.ext.EnumSerializer -import net.liftweb.mongodb.codecs.{BsonTypeClassMap, JodaDateTimeCodec} -import net.liftweb.mongodb.record.codecs.RecordCodec -import net.liftweb.mongodb.record.field._ - -import org.bson.{BsonDocument, BsonType} -import org.bson.codecs.configuration.CodecRegistries -import org.bson.types.ObjectId -import org.joda.time.DateTime - -import com.mongodb._ - -class MapTest private () extends MongoRecord[MapTest] with StringPk[MapTest] { - def meta = MapTest - - object mandatoryStringMapField extends MongoMapField[MapTest, String](this) - object mandatoryIntMapField extends MongoMapField[MapTest, Int](this) - - object binaryMapField extends MongoMapField[MapTest, Array[Byte]](this) - object booleanMapField extends MongoMapField[MapTest, Boolean](this) - object dateMapField extends MongoMapField[MapTest, Date](this) - object decimalMapField extends MongoMapField[MapTest, BigDecimal](this) - object doubleMapField extends MongoMapField[MapTest, Double](this) - object longMapField extends MongoMapField[MapTest, Long](this) - - object patternMapField extends MongoMapField[MapTest, Pattern](this) { - override def equals(other: Any): Boolean = { - other match { - case that: MongoMapField[MapTest, Pattern] => - that.value.toSeq.corresponds(this.value.toSeq) { (a,b) => - a._1 == b._1 && // keys - a._2.pattern == b._2.pattern && a._2.flags == b._2.flags - } - case _ => - false - } - } - } - - object stringListMapField extends MongoMapField[MapTest, List[String]](this) - object stringMapMapField extends MongoMapField[MapTest, Map[String, String]](this) - object uuidMapField extends MongoMapField[MapTest, UUID](this) -} - -object MapTest extends MapTest with MongoMetaRecord[MapTest] { - override def formats = allFormats - - override def codecRegistry = RecordCodec.defaultRegistry - override def bsonTypeClassMap = BsonTypeClassMap( - (BsonType.REGULAR_EXPRESSION -> classOf[Pattern]), - (BsonType.BINARY -> classOf[Array[Byte]]), - (BsonType.DECIMAL128 -> classOf[BigDecimal]), - (BsonType.DOCUMENT, classOf[BsonDocument]) - ) -} - -class MapTestRecord private () extends MongoRecord[MapTestRecord] with StringPk[MapTestRecord] { - def meta = MapTestRecord - - object mandatoryStringMapField extends MongoMapField[MapTestRecord, String](this) - object mandatoryIntMapField extends MongoMapField[MapTestRecord, Int](this) -} - -object MapTestRecord extends MapTestRecord with MongoMetaRecord[MapTestRecord] { - override def formats = allFormats -} - -class JodaTimeMapTest private () extends MongoRecord[JodaTimeMapTest] with ObjectIdPk[JodaTimeMapTest] { - - def meta = JodaTimeMapTest - - object jodatimeMapField extends MongoMapField[JodaTimeMapTest, DateTime](this) -} - -object JodaTimeMapTest extends JodaTimeMapTest with MongoMetaRecord[JodaTimeMapTest] { - override def bsonTypeClassMap: BsonTypeClassMap = BsonTypeClassMap((BsonType.DATE_TIME -> classOf[DateTime])) -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/PatternTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/PatternTest.scala deleted file mode 100644 index be98e5f031..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/PatternTest.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ - -import com.mongodb._ - -class PatternTest private () extends MongoRecord[PatternTest] with ObjectIdPk[PatternTest] { - - def meta = PatternTest - - object patternfield extends PatternField(this) -} - -object PatternTest extends PatternTest with MongoMetaRecord[PatternTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/RecordTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/RecordTest.scala deleted file mode 100644 index 12b0761311..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/RecordTest.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import scala.util.Random - -import net.liftweb.common._ -import net.liftweb.record._ -import net.liftweb.record.field._ - -import org.bson._ -import org.bson.codecs.{DecoderContext, EncoderContext} -import org.bson.codecs.configuration.{CodecRegistry, CodecRegistries} - -import com.mongodb._ -import com.mongodb.client.{MongoClients, MongoCollection} -import com.mongodb.client.model.Filters.{eq => eqs} - -import net.liftweb.mongodb.codecs.{BigDecimalCodec, BsonTypeClassMap} -import net.liftweb.mongodb.record.codecs.CollectibleRecordCodec -import net.liftweb.mongodb.record.testmodels._ -import net.liftweb.record._ - -class RecordTest private () extends Record[RecordTest] { - - def meta = RecordTest - - object id extends IntField(this) { - override def name = "_id" - override def defaultValue = Random.nextInt - } - - object stringfield extends StringField(this, 100) -} - -object RecordTest extends RecordTest with MetaRecord[RecordTest] - -object MongoConfig { - val defaultBsonTypeClassMap: BsonTypeClassMap = - BsonTypeClassMap( - (BsonType.BINARY -> classOf[Array[Byte]]), - (BsonType.DECIMAL128 -> classOf[BigDecimal]), - (BsonType.DOCUMENT, classOf[BsonDocument]) - ) - - val defaultRegistry: CodecRegistry = CodecRegistries.fromRegistries( - MongoClientSettings.getDefaultCodecRegistry(), - CodecRegistries.fromCodecs(BigDecimalCodec()) - ) - - private val mongoClient = MongoClients.create() - - val main = mongoClient.getDatabase("record_test_db") -} - -object RecordTestStore { - val codec = CollectibleRecordCodec(RecordTest, MongoConfig.defaultRegistry, MongoConfig.defaultBsonTypeClassMap) - - private val registry = CodecRegistries.fromRegistries( - CodecRegistries.fromCodecs(codec), - MongoConfig.defaultRegistry - ) - - val collection: MongoCollection[RecordTest] = MongoConfig.main.getCollection("record_test", classOf[RecordTest]).withCodecRegistry(registry) -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/StringTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/StringTest.scala deleted file mode 100644 index c1d6c5b228..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/StringTest.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ -import net.liftweb.record.field.{OptionalStringField, StringField} - -import com.mongodb._ - -class StringTest private () extends MongoRecord[StringTest] with ObjectIdPk[StringTest] { - - def meta = StringTest - - object stringfield extends StringField(this, 100) - object optstringfield extends OptionalStringField(this, 100) - object stringfieldopt extends StringField(this, 100) { - override def optional_? = true - } -} - -object StringTest extends StringTest with MongoMetaRecord[StringTest] diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/TestModels.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/TestModels.scala deleted file mode 100644 index dc608c8b43..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/TestModels.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -case class CaseClassTestObject(intField: Int, stringField: String, enum: MyTestEnum.Value) - -object MyTestEnum extends Enumeration { - val ONE = Value("ONE") - val TWO = Value("TWO") - val THREE = Value("THREE") -} diff --git a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/UUIDTest.scala b/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/UUIDTest.scala deleted file mode 100644 index 9c0a9f4535..0000000000 --- a/persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/testmodels/UUIDTest.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package record -package testmodels - -import net.liftweb.common._ -import net.liftweb.mongodb.record.field._ - -import com.mongodb._ - -class UUIDTest private () extends MongoRecord[UUIDTest] with ObjectIdPk[UUIDTest] { - - def meta = UUIDTest - - object uuidfield extends UUIDField(this) -} - -object UUIDTest extends UUIDTest with MongoMetaRecord[UUIDTest] diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/AsObjectId.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/AsObjectId.scala deleted file mode 100644 index c8868cc664..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/AsObjectId.scala +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Copyright 2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import org.bson.types.ObjectId - -/** - * An ObjectId extractor. - */ -object AsObjectId { - def unapply(in: String): Option[ObjectId] = asObjectId(in) - - def asObjectId(in: String): Option[ObjectId] = - if (ObjectId.isValid(in)) Some(new ObjectId(in)) - else None -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/BsonDSL.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/BsonDSL.scala deleted file mode 100644 index 5b4881679c..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/BsonDSL.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import json._ - -import scala.util.matching.Regex -import java.util.{Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import org.joda.time.DateTime - -object BsonDSL extends JsonDSL { - implicit def objectid2jvalue(oid: ObjectId): JValue = JsonObjectId(oid) - implicit def pattern2jvalue(p: Pattern): JValue = JsonRegex(p) - implicit def regex2jvalue(r: Regex): JValue = JsonRegex(r.pattern) - implicit def uuid2jvalue(u: UUID): JValue = JsonUUID(u) - implicit def date2jvalue(d: Date)(implicit formats: Formats): JValue = JsonDate(d) - implicit def datetime2jvalue(d: DateTime)(implicit formats: Formats): JValue = JsonDateTime(d) -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/BsonParser.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/BsonParser.scala deleted file mode 100644 index de9bffcce7..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/BsonParser.scala +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import scala.collection.JavaConverters._ - -import java.util.{Date, UUID} -import java.util.regex.Pattern - -import net.liftweb.json._ -import net.liftweb.common.Box -import net.liftweb.util.SimpleInjector - -import com.mongodb.{BasicDBObject, BasicDBList, DBObject} -import org.bson.types.ObjectId -import org.bson._ - -object BsonParser extends SimpleInjector { - /** - * Set this to override BsonParser turning strings that are valid - * ObjectIds into actual ObjectIds. For example, place the following in Boot.boot: - * - * BsonParser.stringProcessor.default.set((s: String) => BsonString(s)) - */ - val stringProcessor = new Inject(() => defaultStringProcessor _) {} - - def defaultStringProcessor(s: String): BsonValue = { - if (ObjectId.isValid(s)) new BsonObjectId(new ObjectId(s)) - else new BsonString(s) - } - - /** - * Parse a JObject into a BsonDocument - */ - def parse(jo: JObject)(implicit formats: Formats): BsonDocument = - Parser.parse(jo, formats) - - /* - * Serialize a BsonDocument into a JObject - */ - def serialize(a: Any)(implicit formats: Formats): JValue = { - import mongodb.Meta.Reflection._ - a.asInstanceOf[AnyRef] match { - case null => JNull - case x if primitive_?(x.getClass) => primitive2jvalue(x) - case x if datetype_?(x.getClass) => datetype2jvalue(x)(formats) - case x if mongotype_?(x.getClass) => mongotype2jvalue(x)(formats) - case x if bsontype_?(x.getClass) => bsontype2jvalue(x)(formats) - case x: BasicDBList => JArray(x.asScala.toList.map(x => serialize(x)(formats))) - case x: BasicDBObject => JObject( - x.keySet.asScala.toList.map { f => - JField(f.toString, serialize(x.get(f.toString))(formats)) - } - ) - case x: BsonDocument => JObject( - x.keySet.asScala.toList.map { f => - JField(f.toString, serialize(x.get(f.toString))(formats)) - } - ) - case x: BsonArray => - JArray(x.getValues.asScala.toList.map(x => serialize(x)(formats))) - case x => - JNothing - } - } - - object Parser { - - def parse(jo: JObject, formats: Formats): BsonDocument = { - parseObject(jo.obj)(formats) - } - - private def parseArray(arr: List[JValue])(implicit formats: Formats): BsonArray = { - val dbl = new BsonArray - trimArr(arr).map { a => - a match { - case JsonObjectId(objectId) => dbl.add(new BsonObjectId(objectId)) - case JsonRegex(pattern) => dbl.add(new BsonRegularExpression(pattern.pattern, PatternHelper.flagsToString(pattern.flags))) - case JsonUUID(uuid) => dbl.add(new BsonBinary(uuid, UuidRepresentation.JAVA_LEGACY)) - case JsonDate(date) => dbl.add(new BsonDateTime(date.getTime)) - case JArray(arr) => dbl.add(parseArray(arr)) - case JObject(jo) => dbl.add(parseObject(jo)) - case jv: JValue => dbl.add(renderValue(jv)) - } - } - dbl - } - - private def parseObject(obj: List[JField])(implicit formats: Formats): BsonDocument = { - val dbo = new BsonDocument - trimObj(obj).foreach { jf => - jf.value match { - case JsonObjectId(objectId) => dbo.put(jf.name, new BsonObjectId(objectId)) - case JsonRegex(pattern) => dbo.put(jf.name, new BsonRegularExpression(pattern.pattern, PatternHelper.flagsToString(pattern.flags))) - case JsonUUID(uuid) => dbo.put(jf.name, new BsonBinary(uuid, UuidRepresentation.JAVA_LEGACY)) - case JsonDate(date) => dbo.put(jf.name, new BsonDateTime(date.getTime)) - case JArray(arr) => dbo.put(jf.name, parseArray(arr)) - case JObject(jo) => dbo.put(jf.name, parseObject(jo)) - case jv: JValue => dbo.put(jf.name, renderValue(jv)) - } - } - dbo - } - - private def renderValue(jv: JValue)(implicit formats: Formats): BsonValue = jv match { - case JBool(b) => new BsonBoolean(java.lang.Boolean.valueOf(b)) - case JInt(n) => renderInteger(n) - case JDouble(n) => new BsonDouble(new java.lang.Double(n)) - case JNull => new BsonNull() - case JNothing => sys.error("can't render 'nothing'") - case JString(null) => new BsonString("null") - case JString(s) => stringProcessor.vend(s) - case x => new BsonString(x.toString) - } - - private def renderInteger(i: BigInt): BsonValue = { - if (i.isValidInt) { - new BsonInt32(new java.lang.Integer(i.intValue)) - } else if (i.isValidLong) { - new BsonInt64(new java.lang.Long(i.longValue)) - } - else { - new BsonString(i.toString) - } - } - - private def trimArr(xs: List[JValue]) = xs.filter(_ != JNothing) - private def trimObj(xs: List[JField]) = xs.filter(_.value != JNothing) - } -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JObjectParser.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JObjectParser.scala deleted file mode 100644 index 13aef4b3b2..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JObjectParser.scala +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import scala.collection.JavaConverters._ - -import java.util.{Date, UUID} -import java.util.regex.Pattern - -import net.liftweb.json._ -import net.liftweb.common.Box -import net.liftweb.util.SimpleInjector - -import com.mongodb.{BasicDBObject, BasicDBList, DBObject} -import org.bson.types.ObjectId -import org.bson.Document - -@deprecated("Please use BsonParser instead.", "3.4.3") -object JObjectParser extends SimpleInjector { - /** - * Set this to override JObjectParser turning strings that are valid - * ObjectIds into actual ObjectIds. For example, place the following in Boot.boot: - * - * JObjectParser.stringProcessor.default.set((s: String) => s) - */ - @deprecated("Please use BsonParser instead.", "3.4.3") - val stringProcessor = new Inject(() => defaultStringProcessor _) {} - - def defaultStringProcessor(s: String): Object = { - if (ObjectId.isValid(s)) new ObjectId(s) - else s - } - - /* - * Parse a JObject into a DBObject - */ - @deprecated("Please use BsonParser instead.", "3.4.3") - def parse(jo: JObject)(implicit formats: Formats): DBObject = - Parser.parse(jo, formats) - - /* - * Serialize a DBObject into a JObject - */ - @deprecated("Please use BsonParser instead.", "3.4.3") - def serialize(a: Any)(implicit formats: Formats): JValue = { - import mongodb.Meta.Reflection._ - a.asInstanceOf[AnyRef] match { - case null => JNull - case x if primitive_?(x.getClass) => primitive2jvalue(x) - case x if datetype_?(x.getClass) => datetype2jvalue(x)(formats) - case x if mongotype_?(x.getClass) => mongotype2jvalue(x)(formats) - case x: BasicDBList => JArray(x.asScala.toList.map( x => serialize(x)(formats))) - case x: BasicDBObject => JObject( - x.keySet.asScala.toList.map { f => - JField(f.toString, serialize(x.get(f.toString))(formats)) - } - ) - case x: Document => JObject( - x.keySet.asScala.toList.map { f => - JField(f.toString, serialize(x.get(f.toString), formats)) - } - ) - case x => { - JNothing - } - } - } - - @deprecated("Please use BsonParser instead.", "3.4.3") - object Parser { - - @deprecated("Please use BsonParser instead.", "3.4.3") - def parse(jo: JObject, formats: Formats): DBObject = { - parseObject(jo.obj)(formats) - } - - private def parseArray(arr: List[JValue])(implicit formats: Formats): BasicDBList = { - val dbl = new BasicDBList - trimArr(arr).foreach { a => - a match { - case JsonObjectId(objectId) => dbl.add(objectId) - case JsonRegex(regex) => dbl.add(regex) - case JsonUUID(uuid) => dbl.add(uuid) - case JsonDate(date) => dbl.add(date) - case JArray(arr) => dbl.add(parseArray(arr)) - case JObject(jo) => dbl.add(parseObject(jo)) - case jv: JValue => dbl.add(renderValue(jv)) - } - } - dbl - } - - private def parseObject(obj: List[JField])(implicit formats: Formats): BasicDBObject = { - val dbo = new BasicDBObject - trimObj(obj).foreach { jf => - jf.value match { - case JsonObjectId(objectId) => dbo.put(jf.name, objectId) - case JsonRegex(regex) => dbo.put(jf.name, regex) - case JsonUUID(uuid) => dbo.put(jf.name, uuid) - case JsonDate(date) => dbo.put(jf.name, date) - case JArray(arr) => dbo.put(jf.name, parseArray(arr)) - case JObject(jo) => dbo.put(jf.name, parseObject(jo)) - case jv: JValue => dbo.put(jf.name, renderValue(jv)) - } - } - dbo - } - - private def renderValue(jv: JValue)(implicit formats: Formats): Object = jv match { - case JBool(b) => java.lang.Boolean.valueOf(b) - case JInt(n) => renderInteger(n) - case JDouble(n) => new java.lang.Double(n) - case JNull => null - case JNothing => sys.error("can't render 'nothing'") - case JString(null) => "null" - case JString(s) => stringProcessor.vend(s) - case _ => "" - } - - // FIXME: This is not ideal. - private def renderInteger(i: BigInt): Object = { - if (i <= java.lang.Integer.MAX_VALUE && i >= java.lang.Integer.MIN_VALUE) { - new java.lang.Integer(i.intValue) - } - else if (i <= java.lang.Long.MAX_VALUE && i >= java.lang.Long.MIN_VALUE) { - new java.lang.Long(i.longValue) - } - else { - i.toString - } - } - - private def trimArr(xs: List[JValue]) = xs.filter(_ != JNothing) - private def trimObj(xs: List[JField]) = xs.filter(_.value != JNothing) - } -} - diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JsonExtractors.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JsonExtractors.scala deleted file mode 100644 index 3ab6ae1fca..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JsonExtractors.scala +++ /dev/null @@ -1,108 +0,0 @@ -/** - * Copyright 2014-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import json._ -import util.Helpers.tryo -import JsonDSL._ - -import java.util.{Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import org.joda.time.DateTime - -object JsonObjectId { - def unapply(json: JValue): Option[ObjectId] = { - json match { - case JObject(JField("$oid", JString(objectIdString)) :: Nil) if ObjectId.isValid(objectIdString) => - Some(new ObjectId(objectIdString)) - case _ => - None - } - } - - def apply(objectId: ObjectId): JValue = ("$oid" -> objectId.toString) - - def asJValue(objectId: ObjectId, formats: Formats): JValue = - if (isObjectIdSerializerUsed(formats)) - apply(objectId) - else - JString(objectId.toString) - - /** - * Check to see if the ObjectIdSerializer is being used. - */ - private def isObjectIdSerializerUsed(formats: Formats): Boolean = - formats.customSerializers.exists(_.getClass == objectIdSerializerClass) - - private val objectIdSerializerClass = classOf[net.liftweb.mongodb.ObjectIdSerializer] -} - -object JsonRegex { - def unapply(json: JValue): Option[Pattern] = { - json match { - case JObject(JField("$regex", JString(regex)) :: JField("$flags", JInt(f)) :: Nil) => - Some(Pattern.compile(regex, f.intValue)) - case _ => - None - } - } - - def apply(p: Pattern): JValue = ("$regex" -> p.pattern) ~ ("$flags" -> p.flags) -} - -object JsonUUID { - def unapply(json: JValue): Option[UUID] = { - json match { - case JObject(JField("$uuid", JString(s)) :: Nil) => - tryo(UUID.fromString(s)) - case _ => - None - } - } - - def apply(uuid: UUID): JValue = ("$uuid" -> uuid.toString) -} - -object JsonDate { - def unapply(json: JValue)(implicit formats: Formats): Option[Date] = { - json match { - case JObject(JField("$dt", JString(s)) :: Nil) => - formats.dateFormat.parse(s) - case _ => - None - } - } - - def apply(dt: Date)(implicit formats: Formats): JValue = ("$dt" -> formats.dateFormat.format(dt)) - def apply(dt: Long)(implicit formats: Formats): JValue = ("$dt" -> formats.dateFormat.format(new Date(dt))) -} - -object JsonDateTime { - def unapply(json: JValue)(implicit formats: Formats): Option[DateTime] = { - json match { - case JObject(JField("$dt", JString(s)) :: Nil) => - formats.dateFormat.parse(s).map(dt => new DateTime(dt)) - case _ => - None - } - } - - def apply(dt: DateTime)(implicit formats: Formats): JValue = ("$dt" -> formats.dateFormat.format(dt.toDate)) -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JsonObject.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JsonObject.scala deleted file mode 100644 index b2b0a94c43..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/JsonObject.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* -* Copyright 2010-2020 WorldWide Conferencing, LLC -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package net.liftweb -package mongodb - -import json.Formats -import json.JsonAST.JObject - -import scala.reflect.Manifest - -import org.bson.types.ObjectId - -/* -* These traits provide lift-json related convenience methods for case classes -* and their companion objects. Used by MongoDocument, JsonObjectField, and -* JsonObjectListField -*/ -trait JsonObject[BaseDocument] { - self: BaseDocument => - - def meta: JsonObjectMeta[BaseDocument] - - // convert class to a json value - def asJObject()(implicit formats: Formats): JObject = meta.toJObject(this) - -} - -class JsonObjectMeta[BaseDocument](implicit mf: Manifest[BaseDocument]) { - - import net.liftweb.json.Extraction._ - - // create an instance of BaseDocument from a JObject - def create(in: JObject)(implicit formats: Formats): BaseDocument = - extract(in)(formats, mf) - - // convert class to a JObject - def toJObject(in: BaseDocument)(implicit formats: Formats): JObject = - decompose(in)(formats).asInstanceOf[JObject] -} - -/* -* Case class for a db reference (foreign key). To be used in a JsonObject -* ref = collection name, id is the value of the reference -* Only works with ObjectIds. -*/ -case class MongoRef(ref: String, id: ObjectId) - diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Meta.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Meta.scala deleted file mode 100644 index 5abc21c518..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Meta.scala +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import json._ -import util.JodaHelpers - -import java.util.{Calendar, Date, GregorianCalendar, UUID} -import java.util.regex.Pattern - -import org.bson._ -import org.bson.types.ObjectId -import org.joda.time._ - -import com.mongodb._ - -import JsonDSL._ - -private[mongodb] object Meta { - - /* - * For converting scala objects into DBObject values - */ - object Reflection { - import java.lang.reflect._ - - /* - * These don't require a conversion and can be put directly into a DBObject - */ - val primitives = Set[Class[_]](classOf[String], classOf[Int], classOf[Long], classOf[Double], - classOf[Float], classOf[Byte], classOf[BigInt], classOf[Boolean], - classOf[Short], classOf[java.lang.Integer], classOf[java.lang.Long], - classOf[java.lang.Double], classOf[java.lang.Float], - classOf[java.lang.Byte], classOf[java.lang.Boolean], - classOf[java.lang.Short], classOf[scala.Array[Byte]]) - - def primitive_?(clazz: Class[_]) = primitives contains clazz - - /* - * This is used to convert DBObjects into JObjects - */ - def primitive2jvalue(a: Any): JValue = a match { - case x: String => JString(x) - case x: Int => JInt(x) - case x: Long => JInt(x) - case x: Double => JDouble(x) - case x: Float => JDouble(x) - case x: Byte => JInt(BigInt(x)) - case x: BigInt => JInt(x) - case x: Boolean => JBool(x) - case x: Short => JInt(BigInt(x)) - case x: java.lang.Integer => JInt(BigInt(x.asInstanceOf[Int])) - case x: java.lang.Long => JInt(BigInt(x.asInstanceOf[Long])) - case x: java.lang.Double => JDouble(x.asInstanceOf[Double]) - case x: java.lang.Float => JDouble(x.asInstanceOf[Float]) - case x: java.lang.Byte => JInt(BigInt(x.asInstanceOf[Byte])) - case x: java.lang.Boolean => JBool(x.asInstanceOf[Boolean]) - case x: java.lang.Short => JInt(BigInt(x.asInstanceOf[Short])) - case _ => sys.error("not a primitive " + a.asInstanceOf[AnyRef].getClass) - } - - /* - * Date types require formatting - */ - val datetypes = Set[Class[_]](classOf[Calendar], classOf[Date], classOf[GregorianCalendar], classOf[DateTime]) - - def datetype_?(clazz: Class[_]) = datetypes contains clazz - - def datetype2jvalue(a: Any)(implicit formats: Formats): JValue = a match { - case x: Calendar => JsonDate(x.getTime)(formats) - case x: Date => JsonDate(x)(formats) - case x: DateTime => JsonDateTime(x)(formats) - } - - def datetype2dbovalue(a: Any) = a match { - case x: Calendar => x.getTime - case x: Date => x - case x: DateTime => x.toDate - } - - /* - * Extended Mongo types. - */ - val mongotypes = Set[Class[_]]( - classOf[DBRef], classOf[ObjectId], classOf[Pattern], classOf[UUID]) - - def mongotype_?(clazz: Class[_]) = mongotypes contains clazz - - /* - * Definitive place for JValue conversion of mongo types - */ - def mongotype2jvalue(a: Any)(implicit formats: Formats): JValue = a match { - case x: ObjectId => JsonObjectId.asJValue(x, formats) - case x: Pattern => JsonRegex(x) - case x: UUID => JsonUUID(x) - case x: DBRef => sys.error("DBRefs are not supported.") - case _ => sys.error("not a mongotype " + a.asInstanceOf[AnyRef].getClass) - } - - /** - * Bson types - */ - - val bsontypes = Set[Class[_]]( - classOf[BsonBinary], classOf[BsonBoolean], classOf[BsonDateTime], - classOf[BsonDouble], classOf[BsonInt32], classOf[BsonInt64], classOf[BsonNull], - classOf[BsonObjectId], classOf[BsonRegularExpression], classOf[BsonString]) - - def bsontype_?(clazz: Class[_]) = bsontypes contains clazz - - def bsontype2jvalue(a: Any)(implicit formats: Formats): JValue = a match { - case x: BsonBinary => { - val binaryType = x.getType() - - if (binaryType == BsonBinarySubType.UUID_LEGACY.getValue()) { - JsonUUID(x.asUuid(UuidRepresentation.JAVA_LEGACY)) - } else { - sys.error("invalid binary type " + binaryType) - } - } - case x: BsonBoolean => JBool(x.getValue) - case x: BsonDateTime => JsonDate(x.getValue)(formats) - case x: BsonDouble => JDouble(x.getValue) - case x: BsonInt32 => JInt(x.getValue) - case x: BsonInt64 => JInt(x.getValue) - case x: BsonNull => JNull - case x: BsonObjectId => JsonObjectId.asJValue(x.getValue, formats) - case x: BsonRegularExpression => JsonRegex(Pattern.compile(x.getPattern, PatternHelper.optionsToFlags(x.getOptions))) - case x: BsonString => JString(x.getValue) - case _ => sys.error("not a bsontype " + a.asInstanceOf[AnyRef].getClass) - } - } - - @deprecated("use JsonDate.apply", "2.6") - def dateAsJValue(d: Date, formats: Formats): JValue = ("$dt" -> formats.dateFormat.format(d)) - @deprecated("use JsonObjectId.apply", "2.6") - def objectIdAsJValue(oid: ObjectId): JValue = JsonObjectId(oid) - @deprecated("use JsonRegex.apply", "2.6") - def patternAsJValue(p: Pattern): JValue = JsonRegex(p) - @deprecated("use JsonUUID.apply", "2.6") - def uuidAsJValue(u: UUID): JValue = JsonUUID(u) - - @deprecated("use JsonObjectId.asJValue", "2.6") - def objectIdAsJValue(oid: ObjectId, formats: Formats): JValue = - JsonObjectId.asJValue(oid, formats) - - -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Mongo.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Mongo.scala deleted file mode 100644 index b56c5b5f1c..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Mongo.scala +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import net.liftweb.json.{Formats, JObject} -import net.liftweb.util.{ConnectionIdentifier, DefaultConnectionIdentifier} - -import java.util.concurrent.ConcurrentHashMap - -import scala.collection.immutable.HashSet - -import org.bson.Document -import org.bson.conversions.Bson -import com.mongodb.{DB, DBCollection, Mongo, MongoClient, MongoException, MongoOptions, ServerAddress} -import com.mongodb.client.{MongoCollection, MongoDatabase} -import com.mongodb.client.model.{IndexModel, IndexOptions} - - -/** - * Main Mongo object - */ -object MongoDB { - - /** - * HashMap of Mongo instance and db name tuples, keyed by ConnectionIdentifier - */ - private[this] val dbs = new ConcurrentHashMap[ConnectionIdentifier, (MongoClient, String)] - - /** - * Define a MongoClient db using a MongoClient instance. - */ - def defineDb(name: ConnectionIdentifier, mngo: MongoClient, dbName: String) { - dbs.put(name, (mngo, dbName)) - } - - /** - * Get a MongoClient reference - */ - private[this] def getClient(name: ConnectionIdentifier): Option[MongoClient] = { - Option(dbs.get(name)).map { case (mngo, db) => mngo } - } - - /** - * Get a DB reference - */ - @deprecated("Use useDatabase instead", "3.4.3") - def getDb(name: ConnectionIdentifier): Option[DB] = dbs.get(name) match { - case null => None - case (mngo, db) => Some(mngo.getDB(db)) - } - - /** - * Get a MongoDatabase reference - */ - private[this] def getDatabase(name: ConnectionIdentifier): Option[MongoDatabase] = { - Option(dbs.get(name)).map { case (mngo, db) => mngo.getDatabase(db) } - } - - // for legacy purposes - @deprecated("Use getCollection instead", "3.4.3") - private[this] def getColl(name: ConnectionIdentifier, collectionName: String): Option[DBCollection] = - getDb(name) match { - case Some(mongo) if mongo != null => - Some(mongo.getCollection(collectionName)) - case _ => - None - } - - /** - * Get a Mongo collection. Gets a Mongo db first. - */ - private[this] def getCollection[TDocument](name: ConnectionIdentifier, collectionName: String, documentClass: Class[TDocument]): Option[MongoCollection[TDocument]] = - getDatabase(name) match { - case Some(mongo) if mongo != null => - Some(mongo.getCollection(collectionName, documentClass)) - case _ => - None - } - - /** - * Executes function {@code f} with the MongoClient. - */ - def useClient[T](name: ConnectionIdentifier)(f: (MongoClient) => T): T = { - val client = getClient(name) match { - case Some(mongo) => - mongo - case _ => - throw new MongoException("Mongo not found: "+name.toString) - } - - f(client) - } - - /** - * Executes function {@code f} with the mongo db named {@code name}. - */ - @deprecated("Use useDatabase instead", "3.4.3") - def use[T](name: ConnectionIdentifier)(f: (DB) => T): T = { - - val db = getDb(name) match { - case Some(mongo) => mongo - case _ => throw new MongoException("Mongo not found: "+name.toString) - } - - f(db) - } - - /** - * Executes function {@code f} with the mongo db named {@code name}. - */ - def useDatabase[T](name: ConnectionIdentifier)(f: (MongoDatabase) => T): T = { - - val db = getDatabase(name) match { - case Some(mongo) => - mongo - case _ => - throw new MongoException("Mongo not found: "+name.toString) - } - - f(db) - } - - /** - * Executes function {@code f} with the mongo named {@code name}. - * Uses the default ConnectionIdentifier - */ - @deprecated("Use useDefaultDatabase instead", "3.4.3") - def use[T](f: (DB) => T): T = { - - val db = getDb(DefaultConnectionIdentifier) match { - case Some(mongo) => mongo - case _ => throw new MongoException("Mongo not found: "+DefaultConnectionIdentifier.toString) - } - - f(db) - } - - /** - * Executes function {@code f} with the DefaultConnectionIdentifier - */ - def useDefaultDatabase[T](f: (MongoDatabase) => T): T = { - val db = getDatabase(DefaultConnectionIdentifier) match { - case Some(mongo) => - mongo - case _ => - throw new MongoException("Mongo not found: "+DefaultConnectionIdentifier.toString) - } - - f(db) - } - - /** - * Executes function {@code f} with the mongo named {@code name} and - * collection names {@code collectionName}. Gets a collection for you. - */ - @deprecated("Use useMongoCollection instead", "3.4.3") - def useCollection[T](name: ConnectionIdentifier, collectionName: String)(f: (DBCollection) => T): T = { - val coll = getColl(name, collectionName) match { - case Some(collection) => collection - case _ => throw new MongoException("Mongo not found: "+collectionName+". ConnectionIdentifier: "+name.toString) - } - - f(coll) - } - - /** - * Executes function {@code f} with the mongo named {@code name} and - * collection names {@code collectionName}. Gets a collection for you. - */ - def useMongoCollection[TDocument, T](name: ConnectionIdentifier, collectionName: String, documentClass: Class[TDocument])(f: (MongoCollection[TDocument]) => T): T = { - val coll = getCollection[TDocument](name, collectionName, documentClass) match { - case Some(collection) => - collection - case _ => - throw new MongoException("Mongo not found: "+collectionName+". ConnectionIdentifier: "+name.toString) - } - - f(coll) - } - - /** - * Same as above except uses DefaultConnectionIdentifier - */ - @deprecated("Use useMongoCollection instead", "3.4.3") - def useCollection[T](collectionName: String)(f: (DBCollection) => T): T = { - val coll = getColl(DefaultConnectionIdentifier, collectionName) match { - case Some(collection) => collection - case _ => throw new MongoException("Mongo not found: "+collectionName+". ConnectionIdentifier: "+DefaultConnectionIdentifier.toString) - } - - f(coll) - } - - /** - * Same as above except uses DefaultConnectionIdentifier - */ - def useMongoCollection[TDocument, T](collectionName: String, documentClass: Class[TDocument])(f: (MongoCollection[TDocument]) => T): T = { - val coll = getCollection[TDocument](DefaultConnectionIdentifier, collectionName, documentClass) match { - case Some(collection) => - collection - case _ => - throw new MongoException("Mongo not found: "+collectionName+". ConnectionIdentifier: "+DefaultConnectionIdentifier.toString) - } - - f(coll) - } - - def useMongoCollection[T](collectionName: String)(f: (MongoCollection[Document]) => T): T = { - val coll = getCollection[Document](DefaultConnectionIdentifier, collectionName, classOf[Document]) match { - case Some(collection) => - collection - case _ => - throw new MongoException("Mongo not found: "+collectionName+". ConnectionIdentifier: "+DefaultConnectionIdentifier.toString) - } - - f(coll) - } - - /** - * Calls close on each MongoClient instance and clears the HashMap. - */ - def closeAll(): Unit = { - import scala.collection.JavaConverters._ - dbs.values.asScala.foreach { case (mngo, _) => - mngo.close() - } - dbs.clear() - } - - /** - * Clear the HashMap. - */ - def clear(): Unit = { - dbs.clear() - } - - /** - * Remove a specific ConnectionIdentifier from the HashMap. - */ - def remove(id: ConnectionIdentifier): Option[MongoDatabase] = { - val db = getDatabase(id) - dbs.remove(id) - db - } -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoAsync.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoAsync.scala deleted file mode 100644 index e20862ff98..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoAsync.scala +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2017-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb - -import java.util.concurrent.ConcurrentHashMap - -import com.mongodb.MongoException -import com.mongodb.async.client.{MongoCollection, MongoDatabase} -import com.mongodb.async.SingleResultCallback -import net.liftweb.util.ConnectionIdentifier -import org.bson.Document - -import scala.concurrent.Promise - -@deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") -private[mongodb] class SingleBooleanVoidCallback(f: () => Unit) extends SingleResultCallback[Void] { - private[this] val p = Promise[Boolean]() - - override def onResult(result: java.lang.Void, error: Throwable): Unit = { - Option(error) match { - case None => - f() - p.success(true) - case Some(t) => - p.failure(t) - } - } - - def future = p.future -} - -/** - * Async version of MongoDB. - * - * You should only have one instance of MongoClient in a JVM. - * - * Example: - * - * {{{ - * import com.mongodb.MongoClientSettings - * import com.mongodb.async.client.MongoClients - * import net.liftweb.util.{ConnectionIdentifier, DefaultConnectionIdentifier} - * import org.bson.codecs.configuration.CodecRegistries - * - * val client = MongoClients.create("mongodb://127.0.0.1:27017") - * - * // main database - * MongoAsync.defineDb(DefaultConnectionIdentifier, client.getDatabase("mydb")) - * - * // admin database - * case object AdminIdentifier extends ConnectionIdentifier { - * val jndiName = "admin" - * } - * - * val codecRegistry = CodecRegistries.fromRegistries( - * MongoClientSettings.getDefaultCodecRegistry(), - * CodecRegistries.fromCodecs(new LongPrimitiveCodec, new IntegerPrimitiveCodec) - * ) - - * val admin = client.getDatabase("admin").withCodecRegistry(codecRegistry) - * MongoAsync.defineDb(AdminIdentifier, admin) - * - * }}} - */ -@deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") -object MongoAsync { - - /** - * HashMap of MongoDatabase instances keyed by ConnectionIdentifier - */ - private[this] val dbs = new ConcurrentHashMap[ConnectionIdentifier, MongoDatabase] - - /** - * Define a Mongo db using a MongoDatabase instance. - */ - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def defineDb(id: ConnectionIdentifier, db: MongoDatabase): Unit = { - dbs.put(id, db) - } - - /** - * Get a MongoDatabase reference - */ - private[this] def getDatabase(name: ConnectionIdentifier): Option[MongoDatabase] = { - Option(dbs.get(name)) - } - - /** - * Executes function {@code f} with the mongo database identified by {@code name}. - */ - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def use[T](name: ConnectionIdentifier)(f: (MongoDatabase) => T): T = { - val db = getDatabase(name) match { - case Some(mongo) => mongo - case _ => throw new MongoException("Mongo not found: "+name.toString) - } - f(db) - } - - /** - * Executes function {@code f} with the collection named {@code collectionName} from - * the mongo database identified by {@code name}. - */ - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def useCollection[T](name: ConnectionIdentifier, collectionName: String)(f: (MongoCollection[Document]) => T): T = { - val coll = getCollection(name, collectionName) match { - case Some(collection) => collection - case _ => throw new MongoException("Mongo not found: "+collectionName+". ConnectionIdentifier: "+name.toString) - } - - f(coll) - } - - private[this] def getCollection(name: ConnectionIdentifier, collectionName: String): Option[MongoCollection[Document]] = { - getDatabase(name).map(_.getCollection(collectionName)) - } - - /** - * Clear the HashMap. - */ - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def clear(): Unit = { - dbs.clear() - } - - /** - * Remove a specific ConnectionIdentifier from the HashMap. - */ - @deprecated("No longer supported. This will be removed in Lift 4.", "3.4.3") - def remove(id: ConnectionIdentifier): Option[MongoDatabase] = { - Option(dbs.remove(id)) - } -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoCodecs.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoCodecs.scala deleted file mode 100644 index 6a43ed92aa..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoCodecs.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2017 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb - -import org.bson.codecs._ - -/** - * Codec for java.lang.Long - */ -class LongPrimitiveCodec extends LongCodec { - override def getEncoderClass() = java.lang.Long.TYPE -} - -/** - * Codec for java.lang.Integer - */ -class IntegerPrimitiveCodec extends IntegerCodec { - override def getEncoderClass() = java.lang.Integer.TYPE -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoDocument.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoDocument.scala deleted file mode 100644 index 0c64b193df..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoDocument.scala +++ /dev/null @@ -1,292 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import net.liftweb.common.Box -import net.liftweb.json._ -import net.liftweb.util.{ConnectionIdentifier, DefaultConnectionIdentifier} -import net.liftweb.util.Helpers.tryo - -import scala.collection.JavaConverters._ - -import java.util.UUID - -import org.bson.{BsonDocument, Document, UuidRepresentation} -import org.bson.codecs.{PatternCodec, UuidCodecProvider} -import org.bson.codecs.configuration.{CodecRegistries, CodecRegistry} -import org.bson.conversions.Bson -import org.bson.types.ObjectId - -import com.mongodb._ -import com.mongodb.client.{MongoCollection, MongoDatabase} -import com.mongodb.client.model.{DeleteOptions, IndexOptions, InsertOneOptions, ReplaceOptions, UpdateOptions} -import com.mongodb.client.model.Filters.{eq => eqs} -import com.mongodb.client.result.{DeleteResult, UpdateResult} - -/** - * extend case class with this trait - */ -trait MongoDocument[BaseDocument] extends JsonObject[BaseDocument] { - self: BaseDocument => - - def _id: Any - - def meta: MongoDocumentMeta[BaseDocument] - - def delete: Box[DeleteResult] = { - meta.deleteOne("_id", _id) - } - - def save: UpdateResult = meta.save(this) - - def getRef: Option[MongoRef] = _id match { - case oid: ObjectId => Some(MongoRef(meta.collectionName, oid)) - case _ => None - } -} - -/** - * extend case class companion objects with this trait - */ -trait MongoDocumentMeta[BaseDocument] extends JsonObjectMeta[BaseDocument] with MongoMeta[BaseDocument, BsonDocument] { - - private val bsonDocumentClass = classOf[BsonDocument] - - def codecRegistry: CodecRegistry = CodecRegistries.fromRegistries( - MongoClientSettings.getDefaultCodecRegistry(), - CodecRegistries.fromProviders(new UuidCodecProvider(UuidRepresentation.JAVA_LEGACY)), - CodecRegistries.fromCodecs(new PatternCodec()) - ) - - /** - * Override this to specify a ConnectionIdentifier. - */ - def connectionIdentifier: ConnectionIdentifier = DefaultConnectionIdentifier - - /** - * Use the collection associated with this Meta. - */ - def useCollection[T](f: MongoCollection[BsonDocument] => T): T = - MongoDB.useMongoCollection(connectionIdentifier, collectionName, bsonDocumentClass) { mc => - f(mc.withCodecRegistry(codecRegistry).withWriteConcern(writeConcern)) - } - - def useCollection[T](db: MongoDatabase)(f: MongoCollection[BsonDocument] => T): T = { - val mc = db.getCollection(collectionName, bsonDocumentClass) - - f(mc.withCodecRegistry(codecRegistry).withWriteConcern(writeConcern)) - } - - @deprecated("Use useCollection instead", "3.4.3") - def useColl[T](f: DBCollection => T): T = - MongoDB.useCollection(connectionIdentifier, collectionName)(f) - - /** - * Use the db associated with this Meta. - */ - def useDatabase[T](f: MongoDatabase => T): T = - MongoDB.useDatabase(connectionIdentifier) { md => - f(md.withCodecRegistry(codecRegistry).withWriteConcern(writeConcern)) - } - - @deprecated("Use useDatabase instead", "3.4.3") - def useDb[T](f: DB => T): T = MongoDB.use(connectionIdentifier)(f) - - def create(dbo: Bson): BaseDocument = { - val jv = BsonParser.serialize(dbo) - create(jv.asInstanceOf[JObject]) - } - - /** - * Find a single row by a qry, using a Bson. - */ - def find(qry: Bson): Option[BaseDocument] = { - useCollection { coll => - coll.find(qry).limit(1).first match { - case null => None - case dbo => { - Some(create(dbo)) - } - } - } - } - - /** - * Find a single document by _id using a String. - */ - def find(s: String): Option[BaseDocument] = - if (ObjectId.isValid(s)) - find(eqs("_id", new ObjectId(s))) - else - find(eqs("_id", s)) - - /** - * Find a single document by _id using an ObjectId. - */ - def find(oid: ObjectId): Option[BaseDocument] = find(eqs("_id", oid)) - - /** - * Find a single document by _id using a UUID. - */ - def find(uuid: UUID): Option[BaseDocument] = find(eqs("_id", uuid)) - - /** - * Find a single document by a qry using String, Any inputs - */ - def find(k: String, v: Any): Option[BaseDocument] = find(eqs(k, v)) - - /** - * Find a single document by a qry using a json query - */ - def find(json: JObject): Option[BaseDocument] = find(BsonParser.parse(json)) - - /** - * Find all documents in this collection - */ - def findAll: List[BaseDocument] = { - useCollection { coll => - /** Mongo Cursors are both Iterable and Iterator, - * so we need to reduce ambiguity for implicits - */ - coll.find.iterator.asScala.map(create).toList - } - } - - /** - * Find all documents using a Bson query. - */ - def findAll(qry: Bson, sort: Option[Bson], opts: FindOption*): List[BaseDocument] = { - val findOpts = opts.toList - - useCollection { coll => - val cur = coll.find(qry).limit( - findOpts.find(_.isInstanceOf[Limit]).map(x => x.value).getOrElse(0) - ).skip( - findOpts.find(_.isInstanceOf[Skip]).map(x => x.value).getOrElse(0) - ) - sort.foreach(s => cur.sort(s)) - /** Mongo Cursors are both Iterable and Iterator, - * so we need to reduce ambiguity for implicits - */ - cur.iterator.asScala.map(create).toList - } - } - - /** - * Find all documents using a Bson query. - */ - def findAll(qry: Bson, opts: FindOption*): List[BaseDocument] = - findAll(qry, None, opts :_*) - - /** - * Find all documents using a Bson query with sort - */ - def findAll(qry: Bson, sort: Bson, opts: FindOption*): List[BaseDocument] = - findAll(qry, Some(sort), opts :_*) - - /** - * Find all documents using a JObject query - */ - def findAll(qry: JObject, opts: FindOption*): List[BaseDocument] = - findAll(BsonParser.parse(qry), None, opts :_*) - - /** - * Find all documents using a JObject query with sort - */ - def findAll(qry: JObject, sort: JObject, opts: FindOption*): List[BaseDocument] = - findAll(BsonParser.parse(qry), Some(BsonParser.parse(sort)), opts :_*) - - /** - * Find all documents using a k, v query - */ - def findAll(k: String, o: Any, opts: FindOption*): List[BaseDocument] = - findAll(eqs(k, o), None, opts :_*) - - /** - * Find all documents using a k, v query with JObject sort - */ - def findAll(k: String, o: Any, sort: JObject, opts: FindOption*): List[BaseDocument] = - findAll(eqs(k, o), Some(BsonParser.parse(sort)), opts :_*) - - def insertOne(inst: BaseDocument, opts: InsertOneOptions = new InsertOneOptions): Box[BaseDocument] = tryo { - useCollection { coll => - val bson = BsonParser.parse(toJObject(inst)) - coll.insertOne(bson, opts) - inst - } - } - - def replaceOne(inst: BaseDocument, opts: ReplaceOptions = new ReplaceOptions): Box[UpdateResult] = tryo { - useCollection { coll => - val bson = BsonParser.parse(toJObject(inst)) - val id = bson.get("_id") - coll.replaceOne(eqs("_id", id), bson, opts) - } - } - - def replaceOne(qry: Bson, inst: BaseDocument, opts: ReplaceOptions): Box[UpdateResult] = tryo { - useCollection { coll => - val bson = BsonParser.parse(toJObject(inst)) - coll.replaceOne(qry, bson, opts) - } - } - - def replaceOne(qry: Bson, inst: BaseDocument): Box[UpdateResult] = - replaceOne(qry, inst, new ReplaceOptions) - - def replaceOne(qry: JObject, inst: BaseDocument, opts: ReplaceOptions): Box[UpdateResult] = tryo { - useCollection { coll => - val bson = BsonParser.parse(toJObject(inst)) - coll.replaceOne(BsonParser.parse(qry), bson, opts) - } - } - - def replaceOne(qry: JObject, inst: BaseDocument): Box[UpdateResult] = - replaceOne(qry, inst, new ReplaceOptions) - - /** - * Save a document to the db - */ - def save(inst: BaseDocument): UpdateResult = { - val opts = new ReplaceOptions().upsert(true) - useCollection { coll => - val bson = BsonParser.parse(toJObject(inst)) - val id = bson.get("_id") - coll.replaceOne(eqs("_id", id), bson, opts) - } - } - - @deprecated("Use save instead", "3.4.3") - def save(in: BaseDocument, db: DB) { - db.getCollection(collectionName).save(JObjectParser.parse(toJObject(in))) - } - - @deprecated("Use updateOne, updateMany, or replaceOne instead", "3.4.3") - def update(qry: JObject, newbd: BaseDocument, db: DB, opts: UpdateOption*) { - update(qry, toJObject(newbd), db, opts :_*) - } - - @deprecated("Use updateOne, updateMany, or replaceOne instead", "3.4.3") - def update(qry: JObject, newbd: BaseDocument, opts: UpdateOption*) { - MongoDB.use(connectionIdentifier) ( db => { - update(qry, newbd, db, opts :_*) - }) - } - -} - diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoMeta.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoMeta.scala deleted file mode 100644 index 74f1957b4d..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoMeta.scala +++ /dev/null @@ -1,346 +0,0 @@ -/* -* Copyright 2010-2020 WorldWide Conferencing, LLC -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package net.liftweb -package mongodb - -import net.liftweb.common.Box -import net.liftweb.json.{DefaultFormats, Formats} -import net.liftweb.json.JsonAST.JObject -import net.liftweb.util.ConnectionIdentifier -import net.liftweb.util.Helpers.tryo - -import scala.collection.JavaConverters._ - -import com.mongodb.{BasicDBObject, DB, DBCollection, DBObject, MongoClientSettings, WriteConcern} -import com.mongodb.client.{FindIterable, MongoCollection, MongoDatabase} -import com.mongodb.client.model.{DeleteOptions, IndexOptions, UpdateOptions} -import com.mongodb.client.result.{DeleteResult, UpdateResult} - -import org.bson.{BsonDocument, Document, UuidRepresentation} -import org.bson.codecs.configuration.CodecRegistry - -import org.bson.conversions.Bson -import org.bson.types.ObjectId - -trait JsonFormats { - // override this for custom Formats - def formats: Formats = DefaultFormats.lossless - - implicit lazy val _formats: Formats = formats - - lazy val allFormats = DefaultFormats.lossless + new ObjectIdSerializer + new DateSerializer + new DateTimeSerializer + new PatternSerializer + new UUIDSerializer -} - -trait MongoCodecs { - def codecRegistry: CodecRegistry -} - -/** - * This is used by both MongoDocumentMeta and MongoMetaRecord - */ -trait MongoMeta[BaseDocument, TDocument] extends JsonFormats with MongoCodecs { - - def connectionIdentifier: ConnectionIdentifier - - // class name has a $ at the end. - private lazy val _collectionName = getClass.getName.replaceAllLiterally("$", "") - - /** - * Collection names should begin with letters or an underscore and may include - * numbers; $ is reserved. Collections can be organized in namespaces; these - * are named groups of collections defined using a dot notation. For example, - * you could define collections blog.posts and blog.authors, both reside under - * "blog". Note that this is simply an organizational mechanism for the user - * -- the collection namespace is flat from the database's perspective. - * From: http://www.mongodb.org/display/DOCS/Collections - */ - def fixCollectionName = { - val colName = MongoRules.collectionName.vend.apply(connectionIdentifier, _collectionName) - - if (colName.contains("$")) colName.replaceAllLiterally("$", "_d_") - else colName - } - - /** - * The name of the database collection. Override this method if you - * want to change the collection to something other than the name of - * the class with an 's' appended to the end. - */ - def collectionName: String = fixCollectionName - - /** - * This will be used if set to Some, otherwise the WriteConcern set - * in MongoClientOptions will be used. Used by useCollection and useDatabase. - */ - def writeConcern: WriteConcern = MongoRules.defaultWriteConcern.vend - - /** - * Use the collection associated with this Meta. - */ - def useCollection[T](f: MongoCollection[TDocument] => T): T - - @deprecated("Use useCollection instead", "3.4.3") - def useColl[T](f: DBCollection => T): T - - /** - * Use the db associated with this Meta. - */ - def useDatabase[T](f: MongoDatabase => T): T - - @deprecated("Use useDatabase instead", "3.4.3") - def useDb[T](f: DB => T): T - - /** - * Count all documents - */ - def count: Box[Long] = tryo { useCollection { coll => coll.countDocuments } } - - /** - * Count documents by Bson query - */ - def count(qry: Bson): Box[Long] = tryo { useCollection { coll => coll.countDocuments(qry) } } - - /** - * Count documents by JObject query - */ - def count(qry: JObject): Box[Long] = count(BsonParser.parse(qry)) - - /** - * Count distinct records on a given field. - * - * **Warning:** This retrieves all matching documents and puts them in memory. - */ - def countDistinct(key: String, query: Bson): Box[Long] = tryo { - useCollection { coll => coll.distinct(key, query, classOf[Document]).iterator.asScala.toList.length } - } - - def createIndex(keys: Bson, opts: IndexOptions): Box[String] = tryo { - useCollection(_.createIndex(keys, opts)) - } - - def createIndex(keys: Bson): Box[String] = { - val options = new IndexOptions - createIndex(keys, options) - } - - def createIndex(keys: Bson, uniq: Boolean): Box[String] = { - val options = (new IndexOptions).unique(uniq) - createIndex(keys, options) - } - - def createIndex(keys: JObject, opts: IndexOptions): Box[String] = tryo { - useCollection(_.createIndex(BsonParser.parse(keys), opts)) - } - - def createIndex(keys: JObject): Box[String] = tryo { - useCollection(_.createIndex(BsonParser.parse(keys))) - } - - def createIndex(keys: JObject, uniq: Boolean = false): Box[String] = { - val options = (new IndexOptions).unique(uniq) - createIndex(BsonParser.parse(keys), options) - } - - /** - * Delete a single document by a Bson query - */ - def deleteOne(qry: Bson): Box[DeleteResult] = tryo { - useCollection(_.deleteOne(qry)) - } - - /** - * Delete a single document by a Bson query with the given DeleteOptions - */ - def deleteOne(qry: Bson, opts: DeleteOptions): Box[DeleteResult] = tryo { - useCollection(_.deleteOne(qry, opts)) - } - - /** - * Delete a single document by a JObject query - */ - def deleteOne(qry: JObject): Box[DeleteResult] = - deleteOne(BsonParser.parse(qry)) - - /** - * Delete a single document by a JObject query with the given DeleteOptions - */ - def deleteOne(qry: JObject, opts: DeleteOptions): Box[DeleteResult] = - deleteOne(BsonParser.parse(qry), opts) - - /** - * Delete a single document by a key-value pair query - */ - def deleteOne(k: String, v: Any, opts: DeleteOptions = new DeleteOptions): Box[DeleteResult] = { - deleteOne(new Document(k, v match { - case s: String if (ObjectId.isValid(s)) => new ObjectId(s) - case _ => v - }), opts) - } - - /** - * Delete many documents by a Bson query - */ - def deleteMany(qry: Bson): Box[DeleteResult] = tryo { - useCollection(_.deleteMany(qry)) - } - - /** - * Delete many documents by a Bson query with the given DeleteOptions - */ - def deleteMany(qry: Bson, opts: DeleteOptions): Box[DeleteResult] = tryo { - useCollection(_.deleteMany(qry, opts)) - } - - /** - * Delete many documents by a JObject query - */ - def deleteMany(qry: JObject): Box[DeleteResult] = - deleteMany(BsonParser.parse(qry)) - - /** - * Delete many documents by a JObject query with the given DeleteOptions - */ - def deleteMany(qry: JObject, opts: DeleteOptions): Box[DeleteResult] = - deleteMany(BsonParser.parse(qry), opts) - - /** - * Delete documents by a DBObject query - */ - @deprecated("Use deleteOne or deleteMany instead", "3.4.3") - def delete(qry: DBObject): Unit = - useColl { coll => coll.remove(qry) } - - // delete a document - @deprecated("Use deleteOne or deleteMany instead", "3.4.3") - def delete(k: String, v: Any) { - delete(new BasicDBObject(k, v match { - case s: String if (ObjectId.isValid(s)) => new ObjectId(s) - case _ => v - })) - } - - /** - * Delete documents by a JObject query - */ - @deprecated("Use deleteOne or deleteMany instead", "3.4.3") - def delete(qry: JObject): Unit = delete(JObjectParser.parse(qry)) - - /* drop this document collection */ - def drop: Box[Unit] = tryo { useCollection { coll => coll.drop() } } - - @deprecated("Use createIndex that takes IndexOptions as argument instead", "3.4.3") - def createIndex(keys: JObject, opts: JObject): Unit = - useColl { coll => - coll.createIndex(JObjectParser.parse(keys), JObjectParser.parse(opts)) - } - - /** - * Update many documents with a Bson query - */ - def updateMany(qry: Bson, update: Bson): Box[UpdateResult] = tryo { - useCollection(_.updateMany(qry, update)) - } - - /** - * Update many documents with a Bson query with the given UpdateOptions - */ - def updateMany(qry: Bson, update: Bson, opts: UpdateOptions): Box[UpdateResult] = tryo { - useCollection(_.updateMany(qry, update, opts)) - } - - /** - * Update many documents with a JObject query - */ - def updateMany(qry: JObject, update: JObject): Box[UpdateResult] = - updateMany(BsonParser.parse(qry), BsonParser.parse(update)) - - /** - * Update many documents with a JObject query with the given UpdateOptions - */ - def updateMany(qry: JObject, update: JObject, opts: UpdateOptions): Box[UpdateResult] = - updateMany(BsonParser.parse(qry), BsonParser.parse(update)) - - /** - * Update a single document with a Bson query - */ - def updateOne(qry: Bson, update: Bson): Box[UpdateResult] = tryo { - useCollection(_.updateOne(qry, update)) - } - - /** - * Update a single document with a Bson query with the given UpdateOptions - */ - def updateOne(qry: Bson, update: Bson, opts: UpdateOptions): Box[UpdateResult] = tryo { - useCollection(_.updateOne(qry, update, opts)) - } - - /** - * Update a single document with a JObject query - */ - def updateOne(qry: JObject, update: JObject): Box[UpdateResult] = { - updateOne(BsonParser.parse(qry), BsonParser.parse(update)) - } - - /** - * Update a single document with a JObject query with the given UpdateOptions - */ - def updateOne(qry: JObject, update: JObject, opts: UpdateOptions): Box[UpdateResult] = - updateOne(BsonParser.parse(qry), BsonParser.parse(update)) - - @deprecated("Use updateOne or updateMany instead", "3.4.3") - def update(qry: DBObject, newobj: DBObject, db: DB, opts: UpdateOption*) { - val dboOpts = opts.toList - db.getCollection(collectionName).update( - qry, - newobj, - dboOpts.find(_ == Upsert).map(x => true).getOrElse(false), - dboOpts.find(_ == Multi).map(x => true).getOrElse(false) - ) - } - - @deprecated("Use updateOne or updateMany instead", "3.4.3") - def update(qry: JObject, newobj: JObject, db: DB, opts: UpdateOption*) { - update( - JObjectParser.parse(qry), - JObjectParser.parse(newobj), - db, - opts :_* - ) - } - - @deprecated("Use updateOne or updateMany instead", "3.4.3") - def update(qry: JObject, newobj: JObject, opts: UpdateOption*) { - useDb { db => update(qry, newobj, db, opts :_*) } - } -} - -/** - * For passing in options to the find function - */ -abstract sealed class FindOption { - def value: Int -} -case class Limit(value: Int) extends FindOption -case class Skip(value: Int) extends FindOption - -/* -* For passing in options to the update function -*/ -@deprecated("Use com.mongodb.client.model.UpdateOptions instead", "3.4.3") -abstract sealed class UpdateOption -@deprecated("Use com.mongodb.client.model.UpdateOptions instead", "3.4.3") -case object Upsert extends UpdateOption -@deprecated("Use com.mongodb.client.model.UpdateOptions instead", "3.4.3") -case object Multi extends UpdateOption - diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoRules.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoRules.scala deleted file mode 100644 index 1c8e4c4bd4..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/MongoRules.scala +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Copyright 2014-2017 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import util.{ConnectionIdentifier, SimpleInjector} -import util.Helpers._ - -import com.mongodb.WriteConcern - -object MongoRules extends SimpleInjector { - private def defaultCollectionNameFunc(conn: ConnectionIdentifier, name: String): String = { - charSplit(name, '.').last.toLowerCase+"s" - } - - /** - * Calculate the name of a collection based on the full - * class name of the MongoDocument/MongoRecord. Must be - * set in Boot before any code that touches the - * MongoDocumentMeta/MongoMetaRecord. - * - * To get snake_case, use this - * - * RecordRules.collectionName.default.set((_,name) => StringHelpers.snakify(name)) - */ - val collectionName = new Inject[(ConnectionIdentifier, String) => String](defaultCollectionNameFunc _) {} - - /** The default WriteConcern used in some places. - */ - val defaultWriteConcern = new Inject[WriteConcern](WriteConcern.ACKNOWLEDGED) {} -} diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/PatternHelper.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/PatternHelper.scala deleted file mode 100644 index 43c4af4fb9..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/PatternHelper.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb - -import java.util.regex.Pattern - -// only i,x,m,s options are supported: https://docs.mongodb.com/manual/reference/operator/query/regex/ -object PatternHelper { - // values - // 128 -> Pattern.CANON_EQ - // 2 -> Pattern.CASE_INSENSITIVE - // 4 -> Pattern.COMMENTS - // 32 -> Pattern.DOTALL - // 16 -> Pattern.LITERAL - // 8 -> Pattern.MULTILINE - // 64 -> Pattern.UNICODE_CASE - // 1 -> Pattern.UNIX_LINES - - private val flagMap = Map( - Pattern.CANON_EQ -> "c", - Pattern.CASE_INSENSITIVE -> "i", - Pattern.COMMENTS -> "x", - Pattern.DOTALL -> "s", - Pattern.LITERAL -> "t", - Pattern.MULTILINE -> "m", - Pattern.UNICODE_CASE -> "u", - Pattern.UNIX_LINES -> "d" - ) - - def flagsToString(flags: Int): String = { - (for { - (mask, char) <- flagMap - if (flags & mask) != 0 - } yield char).mkString - } - - def optionsToFlags(opts: String): Int = { - opts.foldLeft(0) { (result, char) => char match { - case 'i' => result | Pattern.CASE_INSENSITIVE - case 'x' => result | Pattern.COMMENTS - case 'm' => result | Pattern.MULTILINE - case 's' => result | Pattern.DOTALL - case _ => result - } } - } -} \ No newline at end of file diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Serializers.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Serializers.scala deleted file mode 100644 index 7c1ae6d822..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/Serializers.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* -* Copyright 2010-2014 WorldWide Conferencing, LLC -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package net.liftweb -package mongodb - -import json.{Formats, MappingException, Serializer, TypeInfo} -import json.JsonAST._ - - -import java.util.{Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId - -import org.joda.time.DateTime - -/* -* Provides a way to serialize/de-serialize ObjectIds. -* -* Queries for a ObjectId (oid) using the lift-json DSL look like: -* ("_id" -> ("$oid" -> oid.toString)) -*/ -class ObjectIdSerializer extends Serializer[ObjectId] { - private val ObjectIdClass = classOf[ObjectId] - - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), ObjectId] = { - case (TypeInfo(ObjectIdClass, _), json) => json match { - case JsonObjectId(objectId) => objectId - case x => throw new MappingException("Can't convert " + x + " to ObjectId") - } - } - - def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = { - case x: ObjectId => JsonObjectId(x) - } -} - -/* -* Provides a way to serialize/de-serialize Patterns. -* -* Queries for a Pattern (pattern) using the lift-json DSL look like: -* ("pattern" -> (("$regex" -> pattern.pattern) ~ ("$flags" -> pattern.flags))) -* ("pattern" -> (("$regex" -> "^Mo") ~ ("$flags" -> Pattern.CASE_INSENSITIVE))) -*/ -class PatternSerializer extends Serializer[Pattern] { - private val PatternClass = classOf[Pattern] - - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Pattern] = { - case (TypeInfo(PatternClass, _), json) => json match { - case JsonRegex(regex) => regex - case x => throw new MappingException("Can't convert " + x + " to Pattern") - } - } - - def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = { - case x: Pattern => JsonRegex(x) - } -} - -/* -* Provides a way to serialize/de-serialize Dates. -* -* Queries for a Date (dt) using the lift-json DSL look like: -* ("dt" -> ("$dt" -> formats.dateFormat.format(dt))) -*/ -class DateSerializer extends Serializer[Date] { - private val DateClass = classOf[Date] - - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Date] = { - case (TypeInfo(DateClass, _), json) => json match { - case JsonDate(dt) => dt - case x => throw new MappingException("Can't convert " + x + " to Date") - } - } - - def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { - case x: Date => JsonDate(x) - } -} - -/* -* Provides a way to serialize/de-serialize joda time DateTimes. -* -* Queries for a Date (dt) using the lift-json DSL look like: -* ("dt" -> ("$dt" -> formats.dateFormat.format(dt))) -*/ -class DateTimeSerializer extends Serializer[DateTime] { - private val DateTimeClass = classOf[DateTime] - - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), DateTime] = { - case (TypeInfo(DateTimeClass, _), json) => json match { - case JsonDateTime(dt) => dt - case x => throw new MappingException("Can't convert " + x + " to Date") - } - } - - def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { - case x: DateTime => JsonDateTime(x) - } -} - -/* -* Provides a way to serialize/de-serialize UUIDs. -* -* Queries for a UUID (u) using the lift-json DSL look like: -* ("uuid" -> ("$uuid" -> u.toString)) -*/ -class UUIDSerializer extends Serializer[UUID] { - private val UUIDClass = classOf[UUID] - - def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), UUID] = { - case (TypeInfo(UUIDClass, _), json) => json match { - case JsonUUID(uuid) => uuid - case x => throw new MappingException("Can't convert " + x + " to UUID") - } - } - - def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { - case x: UUID => JsonUUID(x) - } -} - diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigDecimalCodec.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigDecimalCodec.scala deleted file mode 100644 index 0098e81246..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigDecimalCodec.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb -package codecs - -import scala.math.BigDecimal - -import org.bson.{BsonReader, BsonWriter} -import org.bson.codecs._ -import org.bson.types.Decimal128 - -/** - * A Codec for BigDecimal instances. - */ -case class BigDecimalCodec() extends Codec[BigDecimal] { - override def encode(writer: BsonWriter, value: BigDecimal, encoderContext: EncoderContext): Unit = { - writer.writeDecimal128(new Decimal128(value.bigDecimal)) - } - - override def decode(reader: BsonReader, decoderContext: DecoderContext): BigDecimal = { - BigDecimal(reader.readDecimal128().bigDecimalValue()) - } - - override def getEncoderClass(): Class[BigDecimal] = classOf[BigDecimal] -} \ No newline at end of file diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigDecimalStringCodec.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigDecimalStringCodec.scala deleted file mode 100644 index c938cc3863..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigDecimalStringCodec.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb -package codecs - -import scala.math.BigDecimal - -import org.bson.{BsonReader, BsonWriter} -import org.bson.codecs._ -import org.bson.types.Decimal128 - -/** - * A Codec for BigDecimal instances that saves the value as a String. - */ -case class BigDecimalStringCodec() extends Codec[BigDecimal] { - override def encode(writer: BsonWriter, value: BigDecimal, encoderContext: EncoderContext): Unit = { - writer.writeString(value.toString) - } - - override def decode(reader: BsonReader, decoderContext: DecoderContext): BigDecimal = { - BigDecimal(reader.readString) - } - - override def getEncoderClass(): Class[BigDecimal] = classOf[BigDecimal] -} \ No newline at end of file diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigIntCodec.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigIntCodec.scala deleted file mode 100644 index aaf3d27b00..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BigIntCodec.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb -package codecs - -import scala.math.BigInt - -import org.bson.{BsonReader, BsonWriter} -import org.bson.codecs._ - -/** - * A Codec for BigInt instances. Values are stored as INT64. - */ -case class BigIntLongCodec() extends Codec[BigInt] { - override def encode(writer: BsonWriter, value: BigInt, encoderContext: EncoderContext): Unit = { - writer.writeInt64(value.longValue) - } - - override def decode(reader: BsonReader, decoderContext: DecoderContext): BigInt = { - BigInt(reader.readInt64()) - } - - override def getEncoderClass(): Class[BigInt] = classOf[BigInt] -} \ No newline at end of file diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BsonTypeClassMap.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BsonTypeClassMap.scala deleted file mode 100644 index 03bba78a52..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/BsonTypeClassMap.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb -package codecs - -import org.bson.BsonType - -/** - * A companion object for BsonTypeClassMap. - */ -object BsonTypeClassMap { - def apply(replacements: (BsonType, Class[_])*): BsonTypeClassMap = { - val jreplacements = new java.util.HashMap[BsonType, Class[_]]() - replacements.foreach(kv => jreplacements.put(kv._1, kv._2)) - new BsonTypeClassMap(jreplacements) - } -} \ No newline at end of file diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/CalendarCodec.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/CalendarCodec.scala deleted file mode 100644 index 8ac0a7fc17..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/CalendarCodec.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb -package codecs - -import java.util.{Calendar, GregorianCalendar} - -import org.bson.codecs._ -import org.bson.{BsonReader, BsonWriter} - -/** - * A Codec for Calendar instances. - */ -case class CalendarCodec() extends Codec[GregorianCalendar] { - override def decode(reader: BsonReader, decoderContext: DecoderContext): GregorianCalendar = { - val cal = new GregorianCalendar() - cal.setTimeInMillis(reader.readDateTime()) - cal - } - - override def encode(writer: BsonWriter, value: GregorianCalendar, encoderContext: EncoderContext): Unit = { - writer.writeDateTime(value.getTimeInMillis()) - } - - override def getEncoderClass(): Class[GregorianCalendar] = classOf[GregorianCalendar] -} \ No newline at end of file diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/JodaDateTimeCodec.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/JodaDateTimeCodec.scala deleted file mode 100644 index 4e85f09689..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/JodaDateTimeCodec.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb.mongodb -package codecs - -import org.joda.time.DateTime - -import org.bson.codecs._ -import org.bson.{BsonReader, BsonWriter} - -/** - * A Codec for joda DateTime instances. - */ -case class JodaDateTimeCodec() extends Codec[DateTime] { - override def decode(reader: BsonReader, decoderContext: DecoderContext): DateTime = { - new DateTime(reader.readDateTime()) - } - - override def encode(writer: BsonWriter, value: DateTime, encoderContext: EncoderContext): Unit = { - writer.writeDateTime(value.getMillis()) - } - - override def getEncoderClass(): Class[DateTime] = classOf[DateTime] -} \ No newline at end of file diff --git a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/package.scala b/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/package.scala deleted file mode 100644 index 9f897d84ae..0000000000 --- a/persistence/mongodb/src/main/scala/net/liftweb/mongodb/codecs/package.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -package net.liftweb.mongodb - -package object codecs { - type BsonTypeClassMap = org.bson.codecs.BsonTypeClassMap -} diff --git a/persistence/mongodb/src/test/resources/logging.properties b/persistence/mongodb/src/test/resources/logging.properties deleted file mode 100644 index 9e6ae16de7..0000000000 --- a/persistence/mongodb/src/test/resources/logging.properties +++ /dev/null @@ -1,16 +0,0 @@ -# Specify the handlers to create in the root logger -# (all loggers are children of the root logger) -# The following creates two handlers -handlers = java.util.logging.ConsoleHandler - -# Set the default logging level for the root logger -.level = ALL - -# Set the default logging level for new ConsoleHandler instances -java.util.logging.ConsoleHandler.level = INFO - -# Set the default formatter for new ConsoleHandler instances -java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter - -# Set the default logging level for the named logger -org.mongodb.driver.level = WARNING diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/BsonDSLSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/BsonDSLSpec.scala deleted file mode 100644 index e2dea8f81e..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/BsonDSLSpec.scala +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright 2011-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import BsonDSL._ -import json._ - -import scala.collection.JavaConverters._ -import scala.util.matching.Regex - -import java.util.{Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import org.joda.time.DateTime -import org.specs2.mutable.Specification - -import com.mongodb.{BasicDBList, DBObject} - -class BsonDSLSpec extends Specification { - "BsonDSL Specification".title - - "BsonDSL" should { - "Convert ObjectId properly" in { - val oid: ObjectId = ObjectId.get - val qry: JObject = ("id" -> oid) - val dbo: DBObject = JObjectParser.parse(qry)(DefaultFormats) - - dbo.get("id") must_== oid - } - - "Convert List[ObjectId] properly" in { - val oidList = ObjectId.get :: ObjectId.get :: ObjectId.get :: Nil - val qry: JObject = ("ids" -> oidList) - val dbo: DBObject = JObjectParser.parse(qry)(DefaultFormats) - val oidList2: List[ObjectId] = - dbo - .get("ids") - .asInstanceOf[BasicDBList] - .asScala - .toList - .map(_.asInstanceOf[ObjectId]) - - oidList2 must_== oidList - } - - "Convert Pattern properly" in { - val ptrn: Pattern = Pattern.compile("^Mongo", Pattern.MULTILINE | Pattern.CASE_INSENSITIVE) - val qry: JObject = ("ptrn" -> ptrn) - val dbo: DBObject = JObjectParser.parse(qry)(DefaultFormats) - val ptrn2: Pattern = dbo.get("ptrn").asInstanceOf[Pattern] - - ptrn2.pattern must_== ptrn.pattern - ptrn2.flags must_== ptrn.flags - } - - "Convert List[Pattern] properly" in { - val ptrnList = - Pattern.compile("^Mongo1", Pattern.MULTILINE | Pattern.CASE_INSENSITIVE) :: - Pattern.compile("^Mongo2", Pattern.CASE_INSENSITIVE) :: - Pattern.compile("^Mongo3") :: Nil - val qry: JObject = ("ptrns" -> ptrnList) - val dbo: DBObject = JObjectParser.parse(qry)(DefaultFormats) - val ptrnList2: List[Pattern] = - dbo - .get("ptrns") - .asInstanceOf[BasicDBList] - .asScala - .toList - .map(_.asInstanceOf[Pattern]) - - for (i <- 0 to 2) yield { - ptrnList(i).pattern must_== ptrnList2(i).pattern - ptrnList(i).flags must_== ptrnList2(i).flags - } - - ptrnList2.length must_== ptrnList.length - } - - "Convert Regex properly" in { - val regex: Regex = "^Mongo".r - val qry: JObject = ("regex" -> regex) - val dbo: DBObject = JObjectParser.parse(qry)(DefaultFormats) - val ptrn: Pattern = dbo.get("regex").asInstanceOf[Pattern] - - regex.pattern.pattern must_== ptrn.pattern - regex.pattern.flags must_== ptrn.flags - } - - "Convert UUID properly" in { - val uuid: UUID = UUID.randomUUID - val qry: JObject = ("uuid" -> uuid) - val dbo: DBObject = JObjectParser.parse(qry)(DefaultFormats) - - dbo.get("uuid") must_== uuid - } - - "Convert List[UUID] properly" in { - val uuidList = UUID.randomUUID :: UUID.randomUUID :: UUID.randomUUID :: Nil - val qry: JObject = ("ids" -> uuidList) - val dbo: DBObject = JObjectParser.parse(qry)(DefaultFormats) - val uuidList2: List[UUID] = - dbo - .get("ids") - .asInstanceOf[BasicDBList] - .asScala - .toList - .map(_.asInstanceOf[UUID]) - - uuidList2 must_== uuidList - } - - "Convert Date properly" in { - implicit val formats = DefaultFormats.lossless - val dt: Date = new Date - val qry: JObject = ("now" -> dt) - val dbo: DBObject = JObjectParser.parse(qry) - - dbo.get("now") must_== dt - } - - "Convert List[Date] properly" in { - implicit val formats = DefaultFormats.lossless - val dateList = new Date :: new Date :: new Date :: Nil - val qry: JObject = ("dts" -> dateList) - val dbo: DBObject = JObjectParser.parse(qry) - val dateList2: List[Date] = - dbo - .get("dts") - .asInstanceOf[BasicDBList] - .asScala - .toList - .map(_.asInstanceOf[Date]) - - dateList2 must_== dateList - } - - "Convert DateTime properly" in { - implicit val formats = DefaultFormats.lossless - val dt: DateTime = new DateTime - val qry: JObject = ("now" -> dt) - val dbo: DBObject = JObjectParser.parse(qry) - - new DateTime(dbo.get("now")) must_== dt - } - - "Convert List[DateTime] properly" in { - implicit val formats = DefaultFormats.lossless - val dateList = new DateTime :: new DateTime :: new DateTime :: Nil - val qry: JObject = ("dts" -> dateList) - val dbo: DBObject = JObjectParser.parse(qry) - val dateList2: List[DateTime] = - dbo - .get("dts") - .asInstanceOf[BasicDBList] - .asScala - .toList - .map(_.asInstanceOf[Date]).map(d => new DateTime(d)) - - dateList2 must_== dateList - } - } -} \ No newline at end of file diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/BsonParserSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/BsonParserSpec.scala deleted file mode 100644 index 9c93072e69..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/BsonParserSpec.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import json._ -import JsonDSL._ -import util.Helpers._ - -import org.bson.types.ObjectId -import org.specs2.mutable.Specification - -import org.bson._ - -class BsonParserSpec extends Specification { - "BsonParser Specification".title - - def buildTestData: (ObjectId, BsonDocument) = { - val oid = ObjectId.get - val dbo = BsonParser.parse(("x" -> oid.toString))(DefaultFormats) - (oid, dbo) - } - - "BsonParser" should { - "convert strings to ObjectId by default" in { - val (oid, dbo) = buildTestData - val xval = tryo(dbo.getObjectId("x")) - - xval.toList map { x => - x.getValue must_== oid - } - - xval.isDefined must_== true - } - "not convert strings to ObjectId when configured not to" in { - BsonParser.stringProcessor.doWith((s: String) => new BsonString(s)) { - val (oid, dbo) = buildTestData - val xval = tryo(dbo.getString("x")) - - xval.toList map { x => - x.getValue must_== oid.toString - } - - xval.isDefined must_== true - } - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/CustomSerializersSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/CustomSerializersSpec.scala deleted file mode 100644 index 3f5a61032e..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/CustomSerializersSpec.scala +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import java.util.{Calendar, Date, TimeZone, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId - -import org.joda.time.{Instant, DateTime} - -import org.specs2.mutable.Specification - - -package customserializersspecs { - - /* - * ObjectId as String - */ - case class Person(_id: String) - extends MongoDocument[Person] - { - def meta = Person - } - object Person extends MongoDocumentMeta[Person] - - /* - * ObjectId as ObjectId - */ - case class PersonWithObjectId(_id: ObjectId) - extends MongoDocument[PersonWithObjectId] - { - def meta = PersonWithObjectId - } - object PersonWithObjectId extends MongoDocumentMeta[PersonWithObjectId] { - override def formats = allFormats - } - - /* - * Pattern as Pattern - */ - case class PersonWithPattern(_id:ObjectId, pattern: Pattern) extends MongoDocument[PersonWithPattern] { - def meta = PersonWithPattern - } - object PersonWithPattern extends MongoDocumentMeta[PersonWithPattern] { - override def formats = allFormats - } - - /* - * Date as Date - */ - case class PersonWithDate(_id: ObjectId, birthDate: Date) extends MongoDocument[PersonWithDate] { - def meta = PersonWithDate - } - object PersonWithDate extends MongoDocumentMeta[PersonWithDate] { - override def formats = allFormats - } - - /* - * DateTime as DateTime - */ - case class PersonWithDateTime(_id: ObjectId, birthDate: DateTime) extends MongoDocument[PersonWithDateTime] { - def meta = PersonWithDateTime - } - object PersonWithDateTime extends MongoDocumentMeta[PersonWithDateTime] { - override def formats = allFormats - } - - /* - * UUID as UUID - */ - case class PersonWithUUID(_id: UUID) extends MongoDocument[PersonWithUUID] { - def meta = PersonWithUUID - } - object PersonWithUUID extends MongoDocumentMeta[PersonWithUUID] { - override def formats = allFormats - } -} - - -/** - * Systems under specification for CustomSerializers. - */ -class CustomSerializersSpec extends Specification with MongoTestKit { - "CustomSerializers Specification".title - - import customserializersspecs._ - - val utc = TimeZone.getTimeZone("UTC") - - "CustomSerializers" should { - "handle ObjectId as String value" in { - checkMongoIsRunning - - // test data - val jack = Person(ObjectId.get.toString) - - // save the Person document - jack.save - - // retrieve it and compare - Person.find(jack._id) must beLike { - case Some(j) => - j._id mustEqual jack._id - } - } - - "handle ObjectId as ObjectId value using ObjectIdSerializer" in { - checkMongoIsRunning - - // test data - val jack = PersonWithObjectId(ObjectId.get) - - // save the PersonWithObjectId document - jack.save - - // retrieve it and compare - PersonWithObjectId.find(jack._id) must beLike { - case Some(j) => - j._id mustEqual jack._id - } - } - - "handle Pattern as Pattern value using PatternSerializer" in { - checkMongoIsRunning - - // test data - val pattern = Pattern.compile("(?idmsux-idmsux)m(a)gi(?:ic)?[a-zA-Z]+boom") - val jack = PersonWithPattern(ObjectId.get, pattern) - - // save the PersonWithPattern document - jack.save - - // retrieve it and compare - PersonWithPattern.find(jack._id) must beLike { - case Some(j) => - j.pattern.pattern mustEqual jack.pattern.pattern - j.pattern.flags mustEqual jack.pattern.flags - } - } - - "handle DateTime as DateTime value using DateTimeSerializer" in { - checkMongoIsRunning - - // test data - val birthday = (new Instant(1288742280000L)).toDateTime - val jack = PersonWithDateTime(ObjectId.get, birthday) - - // save the Person document - jack.save - - // retrieve it and compare - PersonWithDateTime.find(jack._id) must beLike { - case Some(j) => - j.birthDate mustEqual jack.birthDate - } - } - - "handle Date as Date value using DateSerializer" in { - checkMongoIsRunning - - // test data - val bdjack = Calendar.getInstance - bdjack.setTimeZone(utc) - bdjack.setTimeInMillis(1288742280000L) - val jack = PersonWithDate(ObjectId.get, bdjack.getTime) - - // save the Person document - jack.save - - // retrieve it and compare - PersonWithDate.find(jack._id) must beLike { - case Some(j) => - j.birthDate mustEqual jack.birthDate - } - } - - "handle UUID as UUID value using UUIDSerializer" in { - checkMongoIsRunning - - // test data - val uuid = UUID.randomUUID - val jack = PersonWithUUID(uuid) - - // save the Person document - jack.save - - // retrieve it and compare - PersonWithUUID.find(jack._id) must beLike { - case Some(j) => - j._id mustEqual jack._id - } - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyJObjectParserSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyJObjectParserSpec.scala deleted file mode 100644 index 415c3bcff9..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyJObjectParserSpec.scala +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright 2012-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import json._ -import JsonDSL._ -import util.Helpers._ - -import org.bson.types.ObjectId -import org.specs2.mutable.Specification - -import com.mongodb.DBObject - -class LegacyJObjectParserSpec extends Specification { - "LegacyJObjectParser Specification".title - - def buildTestData: (ObjectId, DBObject) = { - val oid = ObjectId.get - val dbo = JObjectParser.parse(("x" -> oid.toString))(DefaultFormats) - (oid, dbo) - } - - "JObjectParser" should { - "convert strings to ObjectId by default" in { - val (oid, dbo) = buildTestData - val xval = tryo(dbo.get("x").asInstanceOf[ObjectId]) - - xval.toList map { x => - x must_== oid - } - - xval.isDefined must_== true - } - "not convert strings to ObjectId when configured not to" in { - JObjectParser.stringProcessor.doWith((s: String) => s) { - val (oid, dbo) = buildTestData - val xval = tryo(dbo.get("x").asInstanceOf[String]) - - xval.toList map { x => - x must_== oid.toString - } - - xval.isDefined must_== true - } - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDirectMongoClientSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDirectMongoClientSpec.scala deleted file mode 100644 index 9b2bcad4e2..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDirectMongoClientSpec.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2014-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import json.DefaultFormats - -import com.mongodb._ - -import org.specs2.mutable.Specification - -/** - * System under specification for MongoDirectMonoClient. - */ -class LegacyMongoDirectMongoClientSpec extends Specification with MongoTestKit { - "LegacyMongoDirectMongoClient Specification".title - - "MongoClient example" in { - - checkMongoIsRunning - - // use a Mongo instance directly - MongoDB.use( db => { - val coll = db.getCollection("testCollection") - - // create a unique index on name - coll.createIndex(new BasicDBObject("name", 1), new BasicDBObject("unique", true)) - - // build the DBObjects - val doc = new BasicDBObject - val doc2 = new BasicDBObject - val doc3 = new BasicDBObject - - doc.put("name", "MongoSession") - doc.put("type", "db") - doc.put("count", 1: java.lang.Integer) - - doc2.put("name", "MongoSession") - doc2.put("type", "db") - doc2.put("count", 1: java.lang.Integer) - - doc3.put("name", "MongoDB") - doc3.put("type", "db") - doc3.put("count", 1: java.lang.Integer) - - // save the docs to the db - coll.save(doc) - coll.save(doc2) must throwA[MongoException] - coll.save(doc3) - }) - success - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDirectSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDirectSpec.scala deleted file mode 100644 index b3fe0ef92f..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDirectSpec.scala +++ /dev/null @@ -1,290 +0,0 @@ -/* - * Copyright 2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import net.liftweb.util.{Helpers, DefaultConnectionIdentifier} - -import java.util.UUID -import java.util.regex.Pattern - -import com.mongodb.{WriteConcern, BasicDBObject, BasicDBObjectBuilder, MongoException} - -import org.specs2.mutable.Specification - -import json.DefaultFormats -import net.liftweb.common.Failure - - -/** - * System under specification for MongoDirect. - */ -class LegacyMongoDirectSpec extends Specification with MongoTestKit { - "LegacyMongoDirect Specification".title - - def date(s: String) = DefaultFormats.dateFormat.parse(s).get - - "Mongo tutorial example" in { - - checkMongoIsRunning - - // build the DBObject - val doc = new BasicDBObject - - doc.put("name", "MongoDB") - doc.put("type", "database") - doc.put("count", 1: java.lang.Integer) - - val info = new BasicDBObject - - info.put("x", 203: java.lang.Integer) - info.put("y", 102: java.lang.Integer) - - doc.put("info", info) - - // use the Mongo instance directly - MongoDB.use(DefaultConnectionIdentifier) ( db => { - val coll = db.getCollection("testCollection") - - // save the doc to the db - coll.save(doc) - - // get the doc back from the db and compare them - coll.findOne must_== doc - - // upsert - doc.put("type", "document") - doc.put("count", 2: java.lang.Integer) - val q = new BasicDBObject("name", "MongoDB") // the query to select the document(s) to update - val o = doc // the new object to update with, replaces the entire document, except possibly _id - val upsert = false // if the database should create the element if it does not exist - val apply = false // if an _id field should be added to the new object - coll.update(q, o, upsert, apply) - - // get the doc back from the db and compare - coll.findOne.get("type") must_== "document" - coll.findOne.get("count") must_== 2 - - // modifier operations $inc, $set, $push... - val o2 = new BasicDBObject - o2.put("$inc", new BasicDBObject("count", 1)) // increment count by 1 - o2.put("$set", new BasicDBObject("type", "docdb")) // set type - coll.update(q, o2, false, false) - - // get the doc back from the db and compare - coll.findOne.get("type") must_== "docdb" - coll.findOne.get("count") must_== 3 - - if (!debug) { - // delete it - coll.remove(new BasicDBObject("_id", doc.get("_id"))) - coll.find.count must_== 0 - coll.drop - } - - // server-side eval - val six = db.eval(" function() { return 3+3; } ") - six must_== 6 - }) - } - - "Mongo tutorial 2 example" in { - - checkMongoIsRunning - - // use a DBCollection directly - MongoDB.useCollection("iDoc") ( coll => { - // insert multiple documents - for (i <- List.range(1, 101)) { - coll.insert(new BasicDBObject().append("i", i)) - } - - // create an index - coll.createIndex(new BasicDBObject("i", 1)) // create index on "i", ascending - - // count the docs - coll.getCount must_== 100 - - // get the count using a query - coll.getCount(new BasicDBObject("i", new BasicDBObject("$gt", 50))) must_== 50 - - // use a cursor to get all docs - val cur = coll.find - - cur.count must_== 100 - - // get a single document with a query ( i = 71 ) - val query = new BasicDBObject("i", 71) - val cur2 = coll.find(query) - - cur2.count must_== 1 - cur2.next.get("i") must_== 71 - - // get a set of documents with a query - // e.g. find all where i > 50 - val cur3 = coll.find(new BasicDBObject("i", new BasicDBObject("$gt", 50))) - - cur3.count must_== 50 - - // range - 20 < i <= 30 - val cur4 = coll.find(new BasicDBObject("i", new BasicDBObject("$gt", 20).append("$lte", 30))) - - cur4.count must_== 10 - - // limiting result set - val cur5 = coll.find(new BasicDBObject("i", new BasicDBObject("$gt", 50))).limit(3) - - var cntr5 = 0 - while(cur5.hasNext) { - cur5.next - cntr5 += 1 - } - cntr5 must_== 3 - - // skip - val cur6 = coll.find(new BasicDBObject("i", new BasicDBObject("$gt", 50))).skip(10) - - var cntr6 = 0 - while(cur6.hasNext) { - cntr6 += 1 - cur6.next.get("i") must_== 60+cntr6 - } - cntr6 must_== 40 - - /* skip and limit */ - val cur7 = coll.find.skip(10).limit(20) - - var cntr7 = 0 - while(cur7.hasNext) { - cntr7 += 1 - cur7.next.get("i") must_== 10+cntr7 - } - cntr7 must_== 20 - - // sorting - val cur8 = coll.find.sort(new BasicDBObject("i", -1)) // descending - - var cntr8 = 100 - while(cur8.hasNext) { - cur8.next.get("i") must_== cntr8 - cntr8 -= 1 - } - - // remove some docs by a query - coll.remove(new BasicDBObject("i", new BasicDBObject("$gt", 50))) - - coll.find.count must_== 50 - - if (!debug) { - // delete the rest of the rows - coll.remove(new BasicDBObject("i", new BasicDBObject("$lte", 50))) - coll.find.count must_== 0 - coll.drop - } - }) - success - } - - "Mongo more examples" in { - - checkMongoIsRunning - - // use a Mongo instance directly - MongoDB.use ( db => { - val coll = db.getCollection("testCollection") - - // create a unique index on name - coll.createIndex(new BasicDBObject("name", 1), new BasicDBObject("unique", true)) - - // build the DBObjects - val doc = new BasicDBObject - val doc2 = new BasicDBObject - val doc3 = new BasicDBObject - - doc.put("name", "MongoSession") - doc.put("type", "db") - doc.put("count", 1: java.lang.Integer) - - doc2.put("name", "MongoSession") - doc2.put("type", "db") - doc2.put("count", 1: java.lang.Integer) - - doc3.put("name", "MongoDB") - doc3.put("type", "db") - doc3.put("count", 1: java.lang.Integer) - - // save the docs to the db - Helpers.tryo(coll.save(doc, WriteConcern.SAFE)).toOption must beSome - coll.save(doc2, WriteConcern.SAFE) must throwA[MongoException] - Helpers.tryo(coll.save(doc2, WriteConcern.SAFE)) must beLike { - case Failure(msg, _, _) => - msg must contain("E11000") - } - Helpers.tryo(coll.save(doc3, WriteConcern.SAFE)).toOption must beSome - - // query for the docs by type - val qry = new BasicDBObject("type", "db") - coll.find(qry).count must_== 2 - - // modifier operations $inc, $set, $push... - val o2 = new BasicDBObject - o2.put("$inc", new BasicDBObject("count", 1)) // increment count by 1 - coll.update(qry, o2, false, false).getN must_== 1 - coll.update(qry, o2, false, false).isUpdateOfExisting must_== true - - // this update query won't find any docs to update - coll.update(new BasicDBObject("name", "None"), o2, false, false).getN must_== 0 - - // regex query example - val key = "name" - val regex = "^Mongo" - val cur = coll.find( - BasicDBObjectBuilder.start.add(key, Pattern.compile(regex)).get) - cur.count must_== 2 - - // use regex and another dbobject - val cur2 = coll.find( - BasicDBObjectBuilder.start.add(key, Pattern.compile(regex)).add("count", 1).get) - cur2.count must_== 1 - - if (!debug) { - // delete them - coll.remove(new BasicDBObject("type", "db")).getN must_== 2 - coll.find.count must_== 0 - coll.drop - } - }) - success - } - - "UUID Example" in { - - checkMongoIsRunning - - MongoDB.useCollection("examples.uuid") { coll => - val uuid = UUID.randomUUID - val dbo = new BasicDBObject("_id", uuid).append("name", "dbo") - coll.save(dbo) - - val qry = new BasicDBObject("_id", uuid) - val dbo2 = coll.findOne(qry) - - dbo2.get("_id") must_== dbo.get("_id") - dbo2.get("name") must_== dbo.get("name") - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDocumentExamplesSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDocumentExamplesSpec.scala deleted file mode 100644 index d9691e6b98..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDocumentExamplesSpec.scala +++ /dev/null @@ -1,668 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import BsonDSL._ -import net.liftweb.util.{Helpers, ConnectionIdentifier, DefaultConnectionIdentifier} - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import com.mongodb._ - -import org.specs2.mutable.Specification - -import json.DefaultFormats -import net.liftweb.common.Failure - - -package legacymongotestdocs { - /* - * ConnectionIdentifiers - */ - object TstDBa extends ConnectionIdentifier { - val jndiName = "test_a" - } - object TstDBb extends ConnectionIdentifier { - val jndiName = "test_b" - } - - /* - * _id as a ObjectId - */ - case class SimplePerson(_id: ObjectId, name: String, age: Int) extends MongoDocument[SimplePerson] { - def meta = SimplePerson - } - object SimplePerson extends MongoDocumentMeta[SimplePerson] { - override val collectionName = "simplepersons" - override def connectionIdentifier = DefaultConnectionIdentifier - override def formats = super.formats + new ObjectIdSerializer - // index name - createIndex(("name" -> 1)) - } - - case class Address(street: String, city: String) - case class Child(name: String, age: Int, birthdate: Option[Date]) - - /* - * _id as UUID - */ - case class Person(_id: UUID, name: String, age: Int, address: Address, children: List[Child], dob: Date) - extends MongoDocument[Person] { - - def meta = Person - } - - object Person extends MongoDocumentMeta[Person] { - override def connectionIdentifier = TstDBa - override def collectionName = "mypersons" - override def formats = super.formats + new UUIDSerializer - } - - /* - * _id as ObjectId.toString - */ - case class TCInfo(x: Int, y: Int, uuid: UUID) - case class TstCollection(_id: String, name: String, dbtype: String, count: Int, info: TCInfo) - extends MongoDocument[TstCollection] { - - def meta = TstCollection - } - - object TstCollection extends MongoDocumentMeta[TstCollection] { - override def formats = super.formats + new UUIDSerializer - // create a unique index on name - createIndex(("name" -> 1), true) - // create a non-unique index on dbtype passing unique = false. - createIndex(("dbtype" -> 1), false) - } - - case class IDoc(_id: ObjectId, i: Int) extends MongoDocument[IDoc] { - - def meta = IDoc - } - - object IDoc extends MongoDocumentMeta[IDoc] { - override def formats = super.formats + new ObjectIdSerializer - // create an index on "i", descending with custom name - createIndex(("i" -> -1), ("name" -> "i_ix1")) - } - - case class SessCollection(_id: ObjectId, name: String, dbtype: String, count: Int) - extends MongoDocument[SessCollection] { - - def meta = SessCollection - } - - object SessCollection extends MongoDocumentMeta[SessCollection] { - override def formats = super.formats + new ObjectIdSerializer - // create a unique index on name - createIndex(("name" -> 1), true) - } - - /* - * mongo-java-driver is not compatible with numbers that have an e in them - */ - case class Primitive( - _id: ObjectId, - intfield: Int, - longfield: Long, - doublefield: Double, - floatfield: Float, - bigintfield: BigInt, - bytefield: Byte, - booleanfield: Boolean, - shortfield: Short, - datefield: Date - ) extends MongoDocument[Primitive] { - - def meta = Primitive - } - - object Primitive extends MongoDocumentMeta[Primitive] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class MainJDoc(_id: ObjectId, name: String, refdoc: Option[MongoRef], refId: Option[ObjectId]) extends MongoDocument[MainJDoc] { - def meta = MainJDoc - } - - object MainJDoc extends MongoDocumentMeta[MainJDoc] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class RefJDoc(_id: ObjectId) extends MongoDocument[RefJDoc] { - def meta = RefJDoc - } - - object RefJDoc extends MongoDocumentMeta[RefJDoc] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class PatternDoc(_id: ObjectId, regx: Pattern) extends MongoDocument[PatternDoc] { - def meta = PatternDoc - } - object PatternDoc extends MongoDocumentMeta[PatternDoc] { - override def formats = super.formats + new ObjectIdSerializer + new PatternSerializer - } - - case class StringDateDoc(_id: ObjectId, dt: Date) extends MongoDocument[StringDateDoc] { - def meta = StringDateDoc - } - object StringDateDoc extends MongoDocumentMeta[StringDateDoc] { - override def formats = new DefaultFormats { - override def dateFormatter = new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'") - } + new DateSerializer + new ObjectIdSerializer - } - -} - - -/** - * Systems under specification for MongoDocumentExamples. - */ -class LegacyMongoDocumentExamplesSpec extends Specification with MongoTestKit { - "LegacyMongoDocumentExamples Specification".title - - import legacymongotestdocs._ - - override def dbName = "lift_legacymongodocumentexamples" - - override def dbs = (TstDBa, "lift_legacymongodocumentexamples_a") :: super.dbs - - "Simple Person example" in { - - checkMongoIsRunning - - // create a new SimplePerson - val pid = ObjectId.get - val p = SimplePerson(pid, "Tim", 38) - - // save it - p.save - - // retrieve it - def pFromDb = SimplePerson.find(pid) - - pFromDb.isDefined must_== true - p mustEqual pFromDb.get - - // retrieve it using a Json query - def pFromDbViaJson = SimplePerson.find(("_id" -> pid)) - - pFromDbViaJson.isDefined must_== true - - p mustEqual pFromDbViaJson.get - - // modify and save the person - val p2 = p.copy(name="Timm", age=27) - p2.save - pFromDb.isDefined must_== true - p2 must_== pFromDb.get - p2.name must_== pFromDb.get.name - - // find all documents - val all = SimplePerson.findAll - - all.isEmpty must_== false - - all.size must_== 1 - all.head must_== p2 - - // delete it - p2.delete - - pFromDb.isEmpty must_== true - pFromDbViaJson.isEmpty must_== true - - if (!debug) { - SimplePerson.drop - } - - success - } - - "Multiple Simple Person example" in { - - checkMongoIsRunning - - // create new SimplePersons - val p = SimplePerson(ObjectId.get, "Jill", 27) - val p2 = SimplePerson(ObjectId.get, "Bob", 25) - val p3 = SimplePerson(ObjectId.get, "Bob", 29) - - // save them - p.save - p2.save - p3.save - - // retrieve them - def pFromDb = SimplePerson.find(p._id) - def p2FromDb = SimplePerson.find(p2._id) - def p3FromDb = SimplePerson.find(("_id" -> ("$oid" -> p3._id.toString))) - - pFromDb.isDefined must_== true - p2FromDb.isDefined must_== true - p3FromDb.isDefined must_== true - - p mustEqual pFromDb.get - p2 mustEqual p2FromDb.get - p3 mustEqual p3FromDb.get - - // find all persons named 'Bob' - val allBobs = SimplePerson.findAll(("name" -> "Bob")) - val allBobs2 = SimplePerson.findAll("name", "Bob") - - allBobs.isEmpty must_== false - allBobs2.isEmpty must_== false - - if (!debug) { - allBobs.size must_== 2 - allBobs2.size must_== 2 - - // delete them - p.delete - p2.delete - p3.delete - - pFromDb.isEmpty must_== true - p2FromDb.isEmpty must_== true - p3FromDb.isEmpty must_== true - - SimplePerson.drop - } - - success - } - - "Person example" in { - - checkMongoIsRunning - - def date(s: String) = Person.formats.dateFormat.parse(s).get - - val cal = Calendar.getInstance - cal.set(2009, 10, 2) - - // create a new Person UUID.randomUUID.toString - val p = Person(UUID.randomUUID, "joe", 27, Address("Bulevard", "Helsinki"), List(Child("Mary", 5, Some(cal.getTime)), Child("Mazy", 3, None)), date("2004-09-04T18:06:22.000Z")) - - // save it - p.save - - // retrieve it - def pFromDb = Person.find(p._id) - - // compare to original - val p2 = pFromDb - p2.isDefined must_== true - p must_== p2.get - - Person.count must_== 1 - - if (!debug) { - // delete it - p.delete - - pFromDb.isEmpty must_== true - - Person.drop - } - - success - } - - "Mongo tutorial example" in { - - import scala.collection.JavaConverters._ - - checkMongoIsRunning - - // get the indexes - val ixs = MongoDB.useCollection(TstCollection.collectionName)( coll => { - coll.getIndexInfo.asScala - }) - - // unique index on name - val ixName = ixs.find(dbo => dbo.get("name") == "name_1") - ixName.isDefined must_== true - ixName foreach { ix => - ix.containsField("unique") must beTrue - ix.get("unique").asInstanceOf[Boolean] must beTrue - } - - // non-unique index on dbtype - val ixDbtype = ixs.find(dbo => dbo.get("name") == "dbtype_1") - ixDbtype.isDefined must_== true - ixDbtype foreach { ix => - ix.containsField("unique") must beFalse - } - - // build a TstCollection - val info = TCInfo(203, 102, UUID.randomUUID) - val tc = TstCollection(ObjectId.get.toString, "MongoDB", "database", 1, info) - val tc2 = TstCollection(ObjectId.get.toString, "OtherDB", "database", 1, info) - - // save to db - tc.save - tc2.save - - // Query - def tcFromDb = TstCollection.find(tc._id) - def tc2FromDb = TstCollection.find(tc2._id) - - tcFromDb.isDefined must_== true - tcFromDb.get must_== tc - tc2FromDb.isDefined must_== true - tc2FromDb.get must_== tc2 - - // update - val tc3 = TstCollection(tc._id, "MongoDB", "document", 2, info) // the new object to update with, replaces the entire document, except possibly _id - val q = ("name" -> "MongoDB") // the query to select the document(s) to update - TstCollection.update(q, tc3) - tcFromDb.isDefined must_== true - tcFromDb.get must_== tc3 - - // Upsert - this should add a new row - val tc4 = TstCollection(ObjectId.get.toString, "nothing", "document", 1, info) - TstCollection.update(("name" -> "nothing"), tc4, Upsert) - TstCollection.findAll.length must_== 3 - - // modifier operations $inc, $set, $push... - val o2 = (("$inc" -> ("count" -> 1)) ~ ("$set" -> ("dbtype" -> "docdb"))) - TstCollection.update(q, o2) - tcFromDb.isDefined must_== true - tcFromDb.get must_== TstCollection(tc._id, tc.name, "docdb", 3, info) - - // this one shouldn't update anything - val o3 = (("$inc" -> ("count" -> 1)) ~ ("$set" -> ("dbtype" -> "docdb"))) - // when using $ modifiers, apply has to be false - TstCollection.update(("name" -> "nothing"), o3) - TstCollection.findAll.length must_== 3 - - if (!debug) { - // delete them - tc.delete - tc2.delete - tc4.delete - - TstCollection.findAll.size must_== 0 - } - - // insert multiple documents - for (i <- List.range(1, 101)) { - IDoc(ObjectId.get, i).save - } - - // count the docs - IDoc.count must_== 100 - - // get the count using a query - IDoc.count(("i" -> ("$gt" -> 50))) must_== 50 - - // get a List of all documents - val all = IDoc.findAll - all.length must_== 100 - - // get a single document with a query ( i = 71 ) - val doc = IDoc.find(("i" -> 71)) - - doc.isDefined must_== true - doc.get.i must_== 71 - - // get a set of documents with a query - // e.g. find all where i > 50 - val list1 = IDoc.findAll(("i" -> ("$gt" -> 50))) - - list1.length must_== 50 - - // range - 20 < i <= 30 - val list2 = IDoc.findAll(("i" -> ("$gt" -> 20) ~ ("$lte" -> 30))) - - list2.length must_== 10 - - // limiting result set - val list3 = IDoc.findAll(("i" -> ("$gt" -> 50)), Limit(3)) - - list3.length must_== 3 - - // skip - val list4 = IDoc.findAll(("i" -> ("$gt" -> 50)), ("i" -> 1), Skip(10)) - list4.size must_== 40 - var cntr4 = 0 - for (idoc <- list4) { - cntr4 += 1 - idoc.i must_== 60+cntr4 - } - - // skip and limit (get first 10, skipping the first 5, where i > 50) - val list5 = IDoc.findAll(("i" -> ("$gt" -> 50)), ("i" -> 1), Limit(10), Skip(5)) - var cntr5 = 0 - for (idoc <- list5) { - cntr5 += 1 - idoc.i must_== 55+cntr5 - } - list5.length must_== 10 - - // sorting (it's also easy to sort the List after it's returned) - val list6 = IDoc.findAll(("i" -> ("$gt" -> 0)), ("i" -> -1)) // descending - var cntr6 = 100 - for (idoc <- list6) { - idoc.i must_== cntr6 - cntr6 -= 1 - } - list6.length must_== 100 - - // remove some docs by a query - IDoc.delete(("i" -> ("$gt" -> 50))) - IDoc.findAll.length must_== 50 - - IDoc.drop - - success - } - - "Mongo examples" in { - - checkMongoIsRunning - - val tc = SessCollection(ObjectId.get, "MongoSession", "db", 1) - val tc2 = SessCollection(ObjectId.get, "MongoSession", "db", 1) - val tc3 = SessCollection(ObjectId.get, "MongoDB", "db", 1) - - // use a Mongo instance directly - MongoDB.use( db => { - - // save to db - Helpers.tryo(SessCollection.save(tc, db)).toOption must beSome - SessCollection.save(tc2, db) must throwA[MongoException] - Helpers.tryo(SessCollection.save(tc2, db)) must beLike { - case Failure(msg, _, _) => - msg must contain("E11000") - } - - Helpers.tryo(SessCollection.save(tc3, db)).toOption must beSome - - // query for the docs by type - val qry = ("dbtype" -> "db") - SessCollection.findAll(qry).size must_== 2 - - // modifier operations $inc, $set, $push... - val o2 = ("$inc" -> ("count" -> 1)) // increment count by 1 - SessCollection.update(qry, o2, db) - SessCollection.update(qry, o2, db, Multi) - - // regex query example - val lst = SessCollection.findAll(new BasicDBObject("name", Pattern.compile("^Mongo"))) - lst.size must_== 2 - - // jobject query now also works - val lstjobj = SessCollection.findAll(("name" -> (("$regex" -> "^Mon") ~ ("$flags" -> 0)))) - lstjobj.size must_== 2 - - // use regex and another clause - val lst2 = SessCollection.findAll(new BasicDBObject("name", Pattern.compile("^Mon")).append("count", 2)) - lst2.size must_== 1 - - val lstjobj2 = SessCollection.findAll(("name" -> (("$regex" -> "^Mongo") ~ ("$flags" -> 0))) ~ ("count" -> 3)) - lstjobj2.size must_== 1 - - if (!debug) { - // delete them - SessCollection.delete(qry) - SessCollection.findAll.size must_== 0 - - SessCollection.drop - } - - }) - - success - } - - "Primitives example" in { - - checkMongoIsRunning - - def date(s: String) = Primitive.formats.dateFormat.parse(s).get - - val p = Primitive(ObjectId.get, 2147483647, 2147483648L, 1797693, 3.4028235F, 1000, 0, true, 512, date("2004-09-04T18:06:22.000Z")) - - // save it - p.save - - // retrieve it - def pFromDb = Primitive.find(p._id) - - pFromDb.isDefined must_== true - - p mustEqual pFromDb.get - - if (!debug) { - // delete it - p.delete - - pFromDb.isEmpty must_== true - Primitive.drop - } - - success - } - - "Ref example" in { - - checkMongoIsRunning - - val ref1 = RefJDoc(ObjectId.get) - val ref2 = RefJDoc(ObjectId.get) - - ref1.save - ref2.save - - val md1 = MainJDoc(ObjectId.get, "md1", ref1.getRef, Some(ref1._id)) - val md2 = MainJDoc(ObjectId.get, "md2", ref1.getRef, None) - val md3 = MainJDoc(ObjectId.get, "md3", ref2.getRef, None) - val md4 = MainJDoc(ObjectId.get, "md4", ref2.getRef, None) - - md1.save - md2.save - md3.save - md4.save - - MainJDoc.count must_== 4 - RefJDoc.count must_== 2 - - // query for a single doc with a JObject query - val md1a = MainJDoc.find(("name") -> "md1") - md1a.isDefined must_== true - md1a.foreach(o => o._id must_== md1._id) - - // query for a single doc with a k, v query - val md1b = MainJDoc.find(md1._id) - md1b.isDefined must_== true - md1b.foreach(o => o._id must_== md1._id) - - // find all documents - MainJDoc.findAll.size must_== 4 - RefJDoc.findAll.size must_== 2 - - // find all documents with JObject query - val mdq1 = MainJDoc.findAll(("name" -> "md1")) - mdq1.size must_== 1 - - // find all documents with $in query, sorted - val qry = ("name" -> ("$in" -> List("md1", "md2"))) - val mdq2 = MainJDoc.findAll(qry, ("name" -> -1)) - mdq2.size must_== 2 - mdq2.head._id must_== md2._id - - // Find all documents using a k, v query - val mdq3 = MainJDoc.findAll("_id", md1._id) - mdq3.size must_== 1 - - MainJDoc.drop - RefJDoc.drop - - success - } - - "Pattern example" in { - - checkMongoIsRunning - - val pdoc1 = PatternDoc(ObjectId.get, Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE)) - pdoc1.save - - PatternDoc.find(pdoc1._id) must beLike { - case Some(pdoc) => - pdoc._id must_== pdoc1._id - pdoc.regx.pattern must_== pdoc1.regx.pattern - pdoc.regx.flags must_== pdoc1.regx.flags - } - } - - "Issue 586 Date test" in { - - checkMongoIsRunning - - def date(s: String): Date = StringDateDoc.formats.dateFormat.parse(s).get - - val newId = ObjectId.get - val dtStr = "2004-09-04T18:06Z" - val newDt = date(dtStr) - - // create a document manually with a String for the Date field - MongoDB.useCollection("stringdatedocs") { coll => - coll.save(new BasicDBObject("_id", newId).append("dt", dtStr)) - } - - val fromDb = StringDateDoc.find(newId) - fromDb must beLike { - case Some(sdd) => - sdd._id must_== newId - sdd.dt must_== newDt - sdd.save - - StringDateDoc.find(newId) must beLike { - case Some(sdd2) => - sdd2.dt must_== sdd.dt - } - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDocumentMongoClientSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDocumentMongoClientSpec.scala deleted file mode 100644 index 07fae4150b..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/LegacyMongoDocumentMongoClientSpec.scala +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import BsonDSL._ - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import com.mongodb._ - -import org.specs2.mutable.Specification - -import json._ - -package legacymongoclienttestdocs { - case class SessCollection(_id: ObjectId, name: String, dbtype: String, count: Int) - extends MongoDocument[SessCollection] { - - def meta = SessCollection - } - - object SessCollection extends MongoDocumentMeta[SessCollection] { - override def formats = super.formats + new ObjectIdSerializer - // create a unique index on name - createIndex(("name" -> 1), true) - } -} - -/** - * Systems under specification for MongoDocumentMongoClient. - */ -class LegacyMongoDocumentMongoClientSpec extends Specification with MongoTestKit { - "LegacyMongoDocumentMongoClient Specification".title - - import legacymongoclienttestdocs._ - - "MongoClient example" in { - - checkMongoIsRunning - - val tc = SessCollection(ObjectId.get, "MongoSession", "db", 1) - val tc2 = SessCollection(ObjectId.get, "MongoSession", "db", 1) - val tc3 = SessCollection(ObjectId.get, "MongoDB", "db", 1) - - // save to db - SessCollection.save(tc) - SessCollection.save(tc2) must throwA[MongoException] - SessCollection.save(tc3) - - success - } - -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDirectSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDirectSpec.scala deleted file mode 100644 index 3271eabb12..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDirectSpec.scala +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import net.liftweb.util.{Helpers, DefaultConnectionIdentifier} - -import scala.collection.JavaConverters._ -import java.util.UUID -import java.util.regex.Pattern - -import org.bson.Document -import com.mongodb.{WriteConcern, BasicDBObject, BasicDBObjectBuilder, MongoException} -import com.mongodb.client.model.{IndexOptions, ReplaceOptions, UpdateOptions} -import com.mongodb.client.model.Filters.{and, eq => eqs} - -import org.specs2.mutable.Specification - -import json.DefaultFormats -import net.liftweb.common.Failure - - -/** - * System under specification for MongoDirect. - */ -class MongoDirectSpec extends Specification with MongoTestKit { - "MongoDirect Specification".title - - def date(s: String) = DefaultFormats.dateFormat.parse(s).get - - "Mongo tutorial example" in { - - checkMongoIsRunning - - // build the DBObject - val doc = new Document - - doc.put("name", "MongoDB") - doc.put("type", "database") - doc.put("count", 1: java.lang.Integer) - - val info = new Document - - info.put("x", 203: java.lang.Integer) - info.put("y", 102: java.lang.Integer) - - doc.put("info", info) - - // use the Mongo instance directly - MongoDB.useDatabase(DefaultConnectionIdentifier) { db => - val coll = db.getCollection("testCollection") - - // save the doc to the db - coll.insertOne(doc) - - // get the doc back from the db and compare them - coll.find().first() must_== doc - - // upsert - doc.put("type", "document") - doc.put("count", 2: java.lang.Integer) - val q = new Document("name", "MongoDB") // the query to select the document(s) to update - val o = doc // the new object to update with, replaces the entire document, except possibly _id - val ropts = new ReplaceOptions().upsert(false) - coll.replaceOne(q, o, ropts) - - // get the doc back from the db and compare - coll.find.first.get("type") must_== "document" - coll.find.first.get("count") must_== 2 - - // modifier operations $inc, $set, $push... - val o2 = new Document - o2.put("$inc", new Document("count", 1)) // increment count by 1 - o2.put("$set", new Document("type", "docdb")) // set type - val uopts = new UpdateOptions().upsert(false) - coll.updateOne(q, o2, uopts) - - // get the doc back from the db and compare - coll.find.first.get("type") must_== "docdb" - coll.find.first.get("count") must_== 3 - - if (!debug) { - // delete it - coll.deleteOne(new Document("_id", doc.get("_id"))) - coll.countDocuments() must_== 0 - coll.drop - } - - success - } - } - - "Mongo tutorial 2 example" in { - - checkMongoIsRunning - - // use a DBCollection directly - MongoDB.useMongoCollection("iDoc", classOf[Document]) { coll => - // insert multiple documents - for (i <- List.range(1, 101)) { - coll.insertOne(new Document().append("i", i)) - } - - // create an index - coll.createIndex(new Document("i", 1)) // create index on "i", ascending - - // count the docs - coll.countDocuments() must_== 100 - - // get the count using a query - coll.countDocuments(new Document("i", new Document("$gt", 50))) must_== 50 - - // use a cursor to get all docs - val cur = coll.find - - cur.iterator.asScala.toList.size must_== 100 - - // get a single document with a query ( i = 71 ) - val query = new Document("i", 71) - val cur2 = coll.find(query) - - cur2.iterator.asScala.toList.size must_== 1 - cur2.first.get("i") must_== 71 - - // get a set of documents with a query - // e.g. find all where i > 50 - val cur3 = coll.find(new Document("i", new Document("$gt", 50))) - - cur3.iterator.asScala.toList.size must_== 50 - - // range - 20 < i <= 30 - val cur4 = coll.find(new Document("i", new Document("$gt", 20).append("$lte", 30))) - - cur4.iterator.asScala.toList.size must_== 10 - - // limiting result set - val cur5 = coll.find(new Document("i", new Document("$gt", 50))).limit(3).iterator - - var cntr5 = 0 - while(cur5.hasNext) { - cur5.next - cntr5 += 1 - } - cntr5 must_== 3 - - // skip - val cur6 = coll.find(new Document("i", new Document("$gt", 50))).skip(10).iterator - - var cntr6 = 0 - while(cur6.hasNext) { - cntr6 += 1 - cur6.next.get("i") must_== 60+cntr6 - } - cntr6 must_== 40 - - /* skip and limit */ - val cur7 = coll.find.skip(10).limit(20).iterator - - var cntr7 = 0 - while(cur7.hasNext) { - cntr7 += 1 - cur7.next.get("i") must_== 10+cntr7 - } - cntr7 must_== 20 - - // sorting - val cur8 = coll.find.sort(new Document("i", -1)).iterator // descending - - var cntr8 = 100 - while(cur8.hasNext) { - cur8.next.get("i") must_== cntr8 - cntr8 -= 1 - } - - // remove some docs by a query - coll.deleteMany(new Document("i", new Document("$gt", 50))) - - coll.countDocuments() must_== 50 - - if (!debug) { - // delete the rest of the rows - coll.deleteMany(new Document("i", new Document("$lte", 50))) - coll.countDocuments() must_== 0 - coll.drop - } - } - success - } - - "Mongo more examples" in { - - checkMongoIsRunning - - // use a Mongo instance directly - MongoDB.useDefaultDatabase { db => - val coll = db.getCollection("testCollection") - - // create a unique index on name - coll.createIndex(new Document("name", 1), (new IndexOptions).unique(true)) - - // build the DBObjects - val doc = new Document - val doc2 = new Document - val doc3 = new Document - - doc.put("name", "MongoSession") - doc.put("type", "db") - doc.put("count", 1: java.lang.Integer) - - doc2.put("name", "MongoSession") - doc2.put("type", "db") - doc2.put("count", 1: java.lang.Integer) - - doc3.put("name", "MongoDB") - doc3.put("type", "db") - doc3.put("count", 1: java.lang.Integer) - - // save the docs to the db - Helpers.tryo(coll.withWriteConcern(WriteConcern.ACKNOWLEDGED).insertOne(doc)).toOption must beSome - coll.withWriteConcern(WriteConcern.ACKNOWLEDGED).insertOne(doc2) must throwA[MongoException] - Helpers.tryo(coll.withWriteConcern(WriteConcern.ACKNOWLEDGED).insertOne(doc2)) must beLike { - case Failure(msg, _, _) => - msg must contain("E11000") - } - Helpers.tryo(coll.withWriteConcern(WriteConcern.ACKNOWLEDGED).insertOne(doc3)).toOption must beSome - - // query for the docs by type - val qry = eqs("type", "db") - coll.countDocuments(qry) must_== 2 - - // modifier operations $inc, $set, $push... - val o2 = new Document - o2.put("$inc", new Document("count", 1)) // increment count by 1 - coll.updateOne(qry, o2).getModifiedCount must_== 1 - coll.updateOne(qry, o2).getMatchedCount must_== 1 - - // this update query won't find any docs to update - coll.updateOne(eqs("name", "None"), o2).getModifiedCount must_== 0 - - // regex query example - val key = "name" - val regex = "^Mongo" - coll.countDocuments(eqs(key, Pattern.compile(regex))) must_== 2 - - // use regex and another dbobject - coll.countDocuments(and(eqs(key, Pattern.compile(regex)), eqs("count", 1))) must_== 1 - - if (!debug) { - // delete them - coll.deleteMany(eqs("type", "db")).getDeletedCount must_== 2 - coll.countDocuments must_== 0 - coll.drop - } - } - success - } - - "UUID Example" in { - - checkMongoIsRunning - - MongoDB.useMongoCollection("examples.uuid", classOf[Document]) { coll => - val uuid = UUID.randomUUID - val doc = new Document("_id", uuid).append("name", "doc") - coll.insertOne(doc) - - val qry = eqs("_id", uuid) - val fromDb = coll.find(qry).first - - fromDb.get("_id") must_== doc.get("_id") - fromDb.get("name") must_== doc.get("name") - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDocumentExamplesSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDocumentExamplesSpec.scala deleted file mode 100644 index b687b6db38..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDocumentExamplesSpec.scala +++ /dev/null @@ -1,678 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import BsonDSL._ - -import net.liftweb.common.Failure -import net.liftweb.json.{DefaultFormats, JObject} -import net.liftweb.util.{Helpers, ConnectionIdentifier, DefaultConnectionIdentifier} - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import org.bson.Document -import org.bson.types.ObjectId -import com.mongodb._ -import com.mongodb.client.model.{IndexOptions, ReplaceOptions, UpdateOptions} -import com.mongodb.client.model.Filters.{and, eq => eqs} -import com.mongodb.client.model.Indexes._ - -import org.specs2.mutable.Specification - -package mongotestdocs { - /* - * ConnectionIdentifiers - */ - object TstDBa extends ConnectionIdentifier { - val jndiName = "test_a" - } - object TstDBb extends ConnectionIdentifier { - val jndiName = "test_b" - } - - /* - * _id as a ObjectId - */ - case class SimplePerson(_id: ObjectId, name: String, age: Int) extends MongoDocument[SimplePerson] { - def meta = SimplePerson - } - object SimplePerson extends MongoDocumentMeta[SimplePerson] { - override val collectionName = "simplepersons" - override def connectionIdentifier = DefaultConnectionIdentifier - override def formats = super.formats + new ObjectIdSerializer - - def createIndexes(): Unit = { - SimplePerson.createIndex(ascending("name")) - } - } - - case class Address(street: String, city: String) - case class Child(name: String, age: Int, birthdate: Option[Date]) - - /* - * _id as UUID - */ - case class Person(_id: UUID, name: String, age: Int, address: Address, children: List[Child], dob: Date) - extends MongoDocument[Person] { - - def meta = Person - } - - object Person extends MongoDocumentMeta[Person] { - override def connectionIdentifier = TstDBa - override def collectionName = "mypersons" - override def formats = super.formats + new UUIDSerializer - } - - /* - * _id as ObjectId.toString - */ - case class TCInfo(x: Int, y: Int, uuid: UUID) - case class TstCollection(_id: String, name: String, dbtype: String, count: Int, info: TCInfo) - extends MongoDocument[TstCollection] { - - def meta = TstCollection - } - - object TstCollection extends MongoDocumentMeta[TstCollection] { - override def formats = super.formats + new UUIDSerializer - - def createIndexes(): Unit = { - TstCollection.createIndex(ascending("name"), true) - TstCollection.createIndex(ascending("dbtype")) - } - } - - case class IDoc(_id: ObjectId, i: Int) extends MongoDocument[IDoc] { - - def meta = IDoc - } - - object IDoc extends MongoDocumentMeta[IDoc] { - override def formats = super.formats + new ObjectIdSerializer - - def createIndexes(): Unit = { - IDoc.createIndex(descending("i"), (new IndexOptions).name("i_ix1")) - } - } - - case class SessCollection(_id: ObjectId, name: String, dbtype: String, count: Int) - extends MongoDocument[SessCollection] { - - def meta = SessCollection - } - - object SessCollection extends MongoDocumentMeta[SessCollection] { - override def formats = super.formats + new ObjectIdSerializer - - def createIndexes(): Unit = { - SessCollection.createIndex(ascending("name"), true) - } - } - - /* - * mongo-java-driver is not compatible with numbers that have an e in them - */ - case class Primitive( - _id: ObjectId, - intfield: Int, - longfield: Long, - doublefield: Double, - floatfield: Float, - bigintfield: BigInt, - bytefield: Byte, - booleanfield: Boolean, - shortfield: Short, - datefield: Date - ) extends MongoDocument[Primitive] { - - def meta = Primitive - } - - object Primitive extends MongoDocumentMeta[Primitive] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class MainJDoc(_id: ObjectId, name: String, refdoc: Option[MongoRef], refId: Option[ObjectId]) extends MongoDocument[MainJDoc] { - def meta = MainJDoc - } - - object MainJDoc extends MongoDocumentMeta[MainJDoc] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class RefJDoc(_id: ObjectId) extends MongoDocument[RefJDoc] { - def meta = RefJDoc - } - - object RefJDoc extends MongoDocumentMeta[RefJDoc] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class PatternDoc(_id: ObjectId, regx: Pattern) extends MongoDocument[PatternDoc] { - def meta = PatternDoc - } - object PatternDoc extends MongoDocumentMeta[PatternDoc] { - override def formats = super.formats + new ObjectIdSerializer + new PatternSerializer - } - - case class StringDateDoc(_id: ObjectId, dt: Date) extends MongoDocument[StringDateDoc] { - def meta = StringDateDoc - } - object StringDateDoc extends MongoDocumentMeta[StringDateDoc] { - override def formats = new DefaultFormats { - override def dateFormatter = new java.text.SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'") - } + new DateSerializer + new ObjectIdSerializer - } - -} - - -/** - * Systems under specification for MongoDocumentExamples. - */ -class MongoDocumentExamplesSpec extends Specification with MongoTestKit { - "MongoDocumentExamples Specification".title - - import mongotestdocs._ - - override def dbName = "lift_mongodocumentexamples" - - override def dbs = (TstDBa, "lift_mongodocumentexamples_a") :: super.dbs - - "Simple Person example" in { - - checkMongoIsRunning - - SimplePerson.createIndexes() - - // create a new SimplePerson - val pid = ObjectId.get - val p = SimplePerson(pid, "Tim", 38) - - // save it - p.save - - // retrieve it - def pFromDb = SimplePerson.find(pid) - - pFromDb.isDefined must_== true - p mustEqual pFromDb.get - - // retrieve it using a Json query - def pFromDbViaJson = SimplePerson.find(("_id" -> pid)) - - pFromDbViaJson.isDefined must_== true - - p mustEqual pFromDbViaJson.get - - // modify and save the person - val p2 = p.copy(name="Timm", age=27) - p2.save - pFromDb.isDefined must_== true - p2 must_== pFromDb.get - p2.name must_== pFromDb.get.name - - // find all documents - val all = SimplePerson.findAll - - all.isEmpty must_== false - - all.size must_== 1 - all.head must_== p2 - - // delete it - p2.delete - - pFromDb.isEmpty must_== true - pFromDbViaJson.isEmpty must_== true - - if (!debug) { - SimplePerson.drop - } - - success - } - - "Multiple Simple Person example" in { - - checkMongoIsRunning - - // create new SimplePersons - val p = SimplePerson(ObjectId.get, "Jill", 27) - val p2 = SimplePerson(ObjectId.get, "Bob", 25) - val p3 = SimplePerson(ObjectId.get, "Bob", 29) - - // save them - p.save - p2.save - p3.save - - // retrieve them - def pFromDb = SimplePerson.find(p._id) - def p2FromDb = SimplePerson.find(p2._id) - def p3FromDb = SimplePerson.find(("_id" -> ("$oid" -> p3._id.toString))) - - pFromDb.isDefined must_== true - p2FromDb.isDefined must_== true - p3FromDb.isDefined must_== true - - p mustEqual pFromDb.get - p2 mustEqual p2FromDb.get - p3 mustEqual p3FromDb.get - - // find all persons named 'Bob' - val allBobs = SimplePerson.findAll(("name" -> "Bob")) - val allBobs2 = SimplePerson.findAll("name", "Bob") - - allBobs.isEmpty must_== false - allBobs2.isEmpty must_== false - - if (!debug) { - allBobs.size must_== 2 - allBobs2.size must_== 2 - - // delete them - p.delete - p2.delete - p3.delete - - pFromDb.isEmpty must_== true - p2FromDb.isEmpty must_== true - p3FromDb.isEmpty must_== true - - SimplePerson.drop - } - - success - } - - "Person example" in { - - checkMongoIsRunning - - def date(s: String) = Person.formats.dateFormat.parse(s).get - - val cal = Calendar.getInstance - cal.set(2009, 10, 2) - - // create a new Person UUID.randomUUID.toString - val p = Person(UUID.randomUUID, "joe", 27, Address("Bulevard", "Helsinki"), List(Child("Mary", 5, Some(cal.getTime)), Child("Mazy", 3, None)), date("2004-09-04T18:06:22.000Z")) - - // save it - p.save - - // retrieve it - def pFromDb = Person.find(p._id) - - // compare to original - val p2 = pFromDb - p2.isDefined must_== true - p must_== p2.get - - Person.count must_== 1 - - if (!debug) { - // delete it - p.delete - - pFromDb.isEmpty must_== true - - Person.drop - } - - success - } - - "Mongo tutorial example" in { - - import scala.collection.JavaConverters._ - - checkMongoIsRunning - - IDoc.createIndexes() - TstCollection.createIndexes() - - // get the indexes - val ixs = MongoDB.useMongoCollection(TstCollection.collectionName)(_.listIndexes.asScala) - - // unique index on name - val ixName = ixs.find(_.get("name") == "name_1") - ixName.isDefined must_== true - ixName foreach { ix => - ix.containsKey("unique") must beTrue - ix.get("unique").asInstanceOf[Boolean] must beTrue - } - - // non-unique index on dbtype - val ixDbtype = ixs.find(_.get("name") == "dbtype_1") - ixDbtype.isDefined must_== true - ixDbtype foreach { ix => - ix.containsKey("unique") must beFalse - } - - // build a TstCollection - val info = TCInfo(203, 102, UUID.randomUUID) - val tc = TstCollection(ObjectId.get.toString, "MongoDB", "database", 1, info) - val tc2 = TstCollection(ObjectId.get.toString, "OtherDB", "database", 1, info) - - // save to db - tc.save - tc2.save - - // Query - def tcFromDb = TstCollection.find(tc._id) - def tc2FromDb = TstCollection.find(tc2._id) - - tcFromDb.isDefined must_== true - tcFromDb.get must_== tc - tc2FromDb.isDefined must_== true - tc2FromDb.get must_== tc2 - - // update - val tc3 = TstCollection(tc._id, "MongoDB", "document", 2, info) // the new object to update with, replaces the entire document, except possibly _id - val q: JObject = ("name" -> "MongoDB") // the query to select the document(s) to update - TstCollection.replaceOne(q, tc3) - tcFromDb.isDefined must_== true - tcFromDb.get must_== tc3 - - // Upsert - this should add a new row - val tc4 = TstCollection(ObjectId.get.toString, "nothing", "document", 1, info) - TstCollection.replaceOne(("name" -> "nothing"), tc4, (new ReplaceOptions).upsert(true)) - TstCollection.count must_== 3 - - // modifier operations $inc, $set, $push... - val o2 = (("$inc" -> ("count" -> 1)) ~ ("$set" -> ("dbtype" -> "docdb"))) - TstCollection.updateOne(q, o2) - tcFromDb.isDefined must_== true - tcFromDb.get must_== TstCollection(tc._id, tc.name, "docdb", 3, info) - - // this one shouldn't update anything - val o3 = (("$inc" -> ("count" -> 1)) ~ ("$set" -> ("dbtype" -> "docdb"))) - // when using $ modifiers, apply has to be false - TstCollection.updateOne(("name" -> "nothing"), o3) - TstCollection.count must_== 3 - - if (!debug) { - // delete them - tc.delete - tc2.delete - tc4.delete - - TstCollection.findAll.size must_== 0 - } - - // insert multiple documents - for (i <- List.range(1, 101)) { - IDoc(ObjectId.get, i).save - } - - // count the docs - IDoc.count must_== 100 - - // get the count using a query - IDoc.count(("i" -> ("$gt" -> 50))) must_== 50 - - // get a List of all documents - val all = IDoc.findAll - all.length must_== 100 - - // get a single document with a query ( i = 71 ) - val doc = IDoc.find(("i" -> 71)) - - doc.isDefined must_== true - doc.get.i must_== 71 - - // get a set of documents with a query - // e.g. find all where i > 50 - val list1 = IDoc.findAll(("i" -> ("$gt" -> 50))) - - list1.length must_== 50 - - // range - 20 < i <= 30 - val list2 = IDoc.findAll(("i" -> ("$gt" -> 20) ~ ("$lte" -> 30))) - - list2.length must_== 10 - - // limiting result set - val list3 = IDoc.findAll(("i" -> ("$gt" -> 50)), Limit(3)) - - list3.length must_== 3 - - // skip - val list4 = IDoc.findAll(("i" -> ("$gt" -> 50)), ("i" -> 1), Skip(10)) - list4.size must_== 40 - var cntr4 = 0 - for (idoc <- list4) { - cntr4 += 1 - idoc.i must_== 60+cntr4 - } - - // skip and limit (get first 10, skipping the first 5, where i > 50) - val list5 = IDoc.findAll(("i" -> ("$gt" -> 50)), ("i" -> 1), Limit(10), Skip(5)) - var cntr5 = 0 - for (idoc <- list5) { - cntr5 += 1 - idoc.i must_== 55+cntr5 - } - list5.length must_== 10 - - // sorting (it's also easy to sort the List after it's returned) - val list6 = IDoc.findAll(("i" -> ("$gt" -> 0)), ("i" -> -1)) // descending - var cntr6 = 100 - for (idoc <- list6) { - idoc.i must_== cntr6 - cntr6 -= 1 - } - list6.length must_== 100 - - // remove some docs by a query - IDoc.deleteMany(("i" -> ("$gt" -> 50))) - IDoc.findAll.length must_== 50 - - IDoc.drop - - success - } - - "Mongo examples" in { - - checkMongoIsRunning - - SessCollection.createIndexes() - - val tc = SessCollection(ObjectId.get, "MongoSession", "db", 1) - val tc2 = SessCollection(ObjectId.get, "MongoSession", "db", 1) - val tc3 = SessCollection(ObjectId.get, "MongoDB", "db", 1) - - // save to db - Helpers.tryo(SessCollection.insertOne(tc)).toOption must beSome - SessCollection.save(tc2) must throwA[MongoException] - SessCollection.insertOne(tc2) must beLike { - case Failure(msg, _, _) => - msg must contain("E11000") - } - - Helpers.tryo(SessCollection.insertOne(tc3)).toOption must beSome - - // query for the docs by type - val qry: JObject = ("dbtype" -> "db") - SessCollection.count(qry) must_== 2 - - // modifier operations $inc, $set, $push... - val o2 = ("$inc" -> ("count" -> 1)) // increment count by 1 - SessCollection.updateOne(qry, o2) - SessCollection.updateMany(qry, o2) - - // regex query example - val lst = SessCollection.findAll(eqs("name", Pattern.compile("^Mongo"))) - lst.size must_== 2 - - // jobject query now also works - val lstjobj = SessCollection.findAll(("name" -> (("$regex" -> "^Mon") ~ ("$flags" -> 0)))) - lstjobj.size must_== 2 - - // use regex and another clause - val lst2 = SessCollection.findAll(and(eqs("name", Pattern.compile("^Mon")), eqs("count", 2))) - lst2.size must_== 1 - - val lstjobj2 = SessCollection.findAll(("name" -> (("$regex" -> "^Mongo") ~ ("$flags" -> 0))) ~ ("count" -> 3)) - lstjobj2.size must_== 1 - - if (!debug) { - // delete them - SessCollection.deleteMany(qry) - SessCollection.count must_== 0 - - SessCollection.drop - } - - success - } - - "Primitives example" in { - - checkMongoIsRunning - - def date(s: String) = Primitive.formats.dateFormat.parse(s).get - - val p = Primitive(ObjectId.get, 2147483647, 2147483648L, 1797693, 3.4028235F, 1000, 0, true, 512, date("2004-09-04T18:06:22.000Z")) - - // save it - p.save - - // retrieve it - def pFromDb = Primitive.find(p._id) - - pFromDb.isDefined must_== true - - p mustEqual pFromDb.get - - if (!debug) { - // delete it - p.delete - - pFromDb.isEmpty must_== true - Primitive.drop - } - - success - } - - "Ref example" in { - - checkMongoIsRunning - - val ref1 = RefJDoc(ObjectId.get) - val ref2 = RefJDoc(ObjectId.get) - - ref1.save - ref2.save - - val md1 = MainJDoc(ObjectId.get, "md1", ref1.getRef, Some(ref1._id)) - val md2 = MainJDoc(ObjectId.get, "md2", ref1.getRef, None) - val md3 = MainJDoc(ObjectId.get, "md3", ref2.getRef, None) - val md4 = MainJDoc(ObjectId.get, "md4", ref2.getRef, None) - - md1.save - md2.save - md3.save - md4.save - - MainJDoc.count must_== 4 - RefJDoc.count must_== 2 - - // query for a single doc with a JObject query - val md1a = MainJDoc.find(("name") -> "md1") - md1a.isDefined must_== true - md1a.foreach(o => o._id must_== md1._id) - - // query for a single doc with a k, v query - val md1b = MainJDoc.find(md1._id) - md1b.isDefined must_== true - md1b.foreach(o => o._id must_== md1._id) - - // find all documents - MainJDoc.findAll.size must_== 4 - RefJDoc.findAll.size must_== 2 - - // find all documents with JObject query - val mdq1 = MainJDoc.findAll(("name" -> "md1")) - mdq1.size must_== 1 - - // find all documents with $in query, sorted - val qry = ("name" -> ("$in" -> List("md1", "md2"))) - val mdq2 = MainJDoc.findAll(qry, ("name" -> -1)) - mdq2.size must_== 2 - mdq2.head._id must_== md2._id - - // Find all documents using a k, v query - val mdq3 = MainJDoc.findAll("_id", md1._id) - mdq3.size must_== 1 - - MainJDoc.drop - RefJDoc.drop - - success - } - - "Pattern example" in { - - checkMongoIsRunning - - val pdoc1 = PatternDoc(ObjectId.get, Pattern.compile("^Mo", Pattern.CASE_INSENSITIVE)) - pdoc1.save - - PatternDoc.find(pdoc1._id) must beLike { - case Some(pdoc) => - pdoc._id must_== pdoc1._id - pdoc.regx.pattern must_== pdoc1.regx.pattern - pdoc.regx.flags must_== pdoc1.regx.flags - } - } - - "Issue 586 Date test" in { - - checkMongoIsRunning - - def date(s: String): Date = StringDateDoc.formats.dateFormat.parse(s).get - - val newId = ObjectId.get - val dtStr = "2004-09-04T18:06Z" - val newDt = date(dtStr) - - // create a document manually with a String for the Date field - MongoDB.useMongoCollection("stringdatedocs") { coll => - coll.insertOne(new Document("_id", newId).append("dt", dtStr)) - } - - val fromDb = StringDateDoc.find(newId) - fromDb must beLike { - case Some(sdd) => - sdd._id must_== newId - sdd.dt must_== newDt - StringDateDoc.replaceOne(sdd) - - StringDateDoc.find(newId) must beLike { - case Some(sdd2) => - sdd2.dt must_== sdd.dt - } - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDocumentSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDocumentSpec.scala deleted file mode 100644 index 18168249ed..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoDocumentSpec.scala +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Copyright 2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import org.bson.types.ObjectId -import org.joda.time._ -import org.specs2.mutable.Specification -import org.specs2.execute.Result - -import common._ -import json.ext._ - - -package mongodocumentspecs { - case class Primitives( - _id: ObjectId, - s: String, - i: Int, - l: Long, - d: Double, - f: Float, - b: Byte, - bi: BigInt, - bool: Boolean, - sh: Short, - jli: java.lang.Integer, - jll: java.lang.Long, - jld: java.lang.Double, - jlf: java.lang.Float, - jlb: java.lang.Byte, - jlbool: java.lang.Boolean, - jlsh: java.lang.Short - ) extends MongoDocument[Primitives] { - - def meta = Primitives - } - object Primitives extends MongoDocumentMeta[Primitives] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class NullTestEmbed(nul: String) - case class NullTestDoc(_id: ObjectId, nul: String, ent: NullTestEmbed) - extends MongoDocument[NullTestDoc] { - def meta = NullTestDoc - } - object NullTestDoc extends MongoDocumentMeta[NullTestDoc] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class OptionTestDoc(_id: ObjectId, optNone: Option[String], - optSome: Option[String]) - extends MongoDocument[OptionTestDoc] { - def meta = OptionTestDoc - } - object OptionTestDoc extends MongoDocumentMeta[OptionTestDoc] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class BoxTestDoc(_id: ObjectId, boxEmpty: Box[String], - boxFull: Box[String], boxFail: Box[String]) - extends MongoDocument[BoxTestDoc] { - def meta = BoxTestDoc - } - object BoxTestDoc extends MongoDocumentMeta[BoxTestDoc] { - override def formats = super.formats + new JsonBoxSerializer + new ObjectIdSerializer - } - - case class MapTestDoc(_id: ObjectId, aMap: Map[String, String]) - extends MongoDocument[MapTestDoc] { - def meta = MapTestDoc - } - object MapTestDoc extends MongoDocumentMeta[MapTestDoc] { - override def formats = super.formats + new ObjectIdSerializer - } - - case class DateTimeTestDoc(_id: ObjectId, dt: DateTime) extends MongoDocument[DateTimeTestDoc] { - def meta = DateTimeTestDoc - } - object DateTimeTestDoc extends MongoDocumentMeta[DateTimeTestDoc] { - override def formats = super.formats + new ObjectIdSerializer + new DateTimeSerializer - } - - case class LocalDateTestDoc(_id: ObjectId, ld: LocalDate) extends MongoDocument[LocalDateTestDoc] { - def meta = LocalDateTestDoc - } - object LocalDateTestDoc extends MongoDocumentMeta[LocalDateTestDoc] { - override def formats = super.formats + new ObjectIdSerializer + LocalDateSerializer() - } -} - -/** - * System specification for MongoDocument - */ -class MongoDocumentSpec extends Specification with MongoTestKit { - "MongoDocument Specification".title - - def passSaveAndRetrieveTests(obj: MongoDocument[_], meta: MongoDocumentMeta[_]): Result = { - obj.save - val objFromDb = meta.find(obj._id.asInstanceOf[ObjectId]) - objFromDb.isDefined must_== true - objFromDb.get must_== obj - } - - "MongoDocument" should { - - "handle primitives" in { - checkMongoIsRunning - import mongodocumentspecs._ - - val primitives = Primitives( - ObjectId.get, - "This is a String", - 123, - 124L, - (125.5).toDouble, - (126.5).toFloat, - (127).toByte, - BigInt(128999), - true, - (129).toShort, - new java.lang.Integer(130), - new java.lang.Long(131L), - new java.lang.Double(132.5), - new java.lang.Float(133.5), - new java.lang.Byte("12"), - java.lang.Boolean.TRUE, - new java.lang.Short("135") - ) - passSaveAndRetrieveTests(primitives, Primitives) - } - - "handle null" in { - checkMongoIsRunning - import mongodocumentspecs._ - - val ntd = NullTestDoc(ObjectId.get, null, NullTestEmbed(null)) - passSaveAndRetrieveTests(ntd, NullTestDoc) - } - - "handle Option" in { - checkMongoIsRunning - import mongodocumentspecs._ - - val otd = OptionTestDoc(ObjectId.get, None, Some("Some String")) - passSaveAndRetrieveTests(otd, OptionTestDoc) - } - - "handle Box using JsonBoxSerializer" in { - checkMongoIsRunning - import mongodocumentspecs._ - - val btd = BoxTestDoc(ObjectId.get, Empty, Full("Full String"), Failure("This is a failure")) - passSaveAndRetrieveTests(btd, BoxTestDoc) - } - - "handle Maps properly" in { - checkMongoIsRunning - import mongodocumentspecs._ - - val mtd = MapTestDoc(ObjectId.get, Map("x" -> "1")) - passSaveAndRetrieveTests(mtd, MapTestDoc) - - // empty map - val mtd2 = MapTestDoc(ObjectId.get, Map[String, String]()) - passSaveAndRetrieveTests(mtd2, MapTestDoc) - } - - "handle DateTime properly" in { - checkMongoIsRunning - import mongodocumentspecs._ - - val dt = DateTime.now.plusMinutes(10) - - val dttd = DateTimeTestDoc(ObjectId.get, dt) - passSaveAndRetrieveTests(dttd, DateTimeTestDoc) - } - - "handle LocalDate properly" in { - checkMongoIsRunning - import mongodocumentspecs._ - - val ld = LocalDate.now - - val dttd = LocalDateTestDoc(ObjectId.get, ld) - passSaveAndRetrieveTests(dttd, LocalDateTestDoc) - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoRulesSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoRulesSpec.scala deleted file mode 100644 index e49f520d0e..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoRulesSpec.scala +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright 2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import common._ -import util.Helpers._ - -import org.specs2.mutable._ - -import org.bson.types.ObjectId - -case class CollectionNameTestDoc(_id: ObjectId) extends MongoDocument[CollectionNameTestDoc] { - def meta = CollectionNameTestDoc -} -object CollectionNameTestDoc extends MongoDocumentMeta[CollectionNameTestDoc] - -/** - * Systems under specification for MongoRules. - */ -class MongoRulesSpec extends Specification { - "Mongo Rules Specification".title - sequential - - "MongoRules" should { - "default collection name" in { - CollectionNameTestDoc.collectionName must_== "collectionnametestdocs" - } - "snakify collection name" in { - MongoRules.collectionName.doWith((_, name) => snakify(name)+"s") { - CollectionNameTestDoc.collectionName must_== "net.liftweb.mongodb.collection_name_test_docs" - } - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoSpec.scala deleted file mode 100644 index 4a0d65ed3e..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoSpec.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2014-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import util.{ConnectionIdentifier, DefaultConnectionIdentifier} - -import org.specs2.mutable.Specification -import org.specs2.execute.Result - -import com.mongodb._ - -class MongoSpec extends Specification { - "Mongo Specification".title - - case object TestMongoIdentifier extends ConnectionIdentifier { - val jndiName = "test_a" - } - - def passDefinitionTests(id: ConnectionIdentifier, mc: MongoClient, db: String): Result = { - // define the db - MongoDB.defineDb(id, mc, db) - - // make sure mongo is running - try { - // this will throw an exception if it can't connect to the db - mc.listDatabaseNames() - } catch { - case _: MongoTimeoutException => - skipped("MongoDB is not running") - } - - // using an undefined identifier throws an exception - MongoDB.useDatabase(DefaultConnectionIdentifier) { db => - db.listCollections - } must throwA(new MongoException("Mongo not found: ConnectionIdentifier(lift)")) - - // remove defined db - MongoDB.remove(id) - - success - } - - "Mongo" should { - - "Define DB with MongoClient instance" in { - val opts = MongoClientOptions.builder - .connectionsPerHost(12) - .serverSelectionTimeout(2000) - .build - passDefinitionTests(TestMongoIdentifier, new MongoClient(new ServerAddress("localhost"), opts), "test_default_b") - } - } -} diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoTestKit.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoTestKit.scala deleted file mode 100644 index 83b35a4d8c..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/MongoTestKit.scala +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2010-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import util.{ConnectionIdentifier, DefaultConnectionIdentifier, Props} - -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeAfterEach - -import com.mongodb._ - -// The sole mongo object for testing -object TestMongo { - val mongo = { - val uri = Props.get("mongo.test.uri", "127.0.0.1:27017") - val opts = MongoClientOptions.builder.serverSelectionTimeout(2000) - new MongoClient(new MongoClientURI(s"mongodb://$uri", opts)) - } - - lazy val isMongoRunning: Boolean = - try { - // this will throw an exception if it can't connect to the db - mongo.listDatabaseNames() - true - } catch { - case _: MongoTimeoutException => - false - } -} - -trait MongoTestKit extends Specification with BeforeAfterEach { - sequential - - def dbName = "lift_"+this.getClass.getName - .replace("$", "") - .replace("net.liftweb.mongodb.", "") - .replace(".", "_") - .toLowerCase - - // If you need more than one db, override this - def dbs: List[(ConnectionIdentifier, String)] = - (DefaultConnectionIdentifier, dbName) :: Nil - - def debug = false - - def before = { - // define the dbs - dbs.foreach { case (id, db) => - MongoDB.defineDb(id, TestMongo.mongo, db) - } - } - - def checkMongoIsRunning = { - TestMongo.isMongoRunning must beEqualTo(true).orSkip - } - - def after = { - if (!debug && TestMongo.isMongoRunning) { - // drop the databases - dbs.foreach { case (id, _) => - MongoDB.useDatabase(id) { _.drop() } - } - } - - // clear the mongo instances - dbs.foreach { case (id, _) => - MongoDB.remove(id) - } - } -} - diff --git a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/QueryExamplesSpec.scala b/persistence/mongodb/src/test/scala/net/liftweb/mongodb/QueryExamplesSpec.scala deleted file mode 100644 index c384cc5536..0000000000 --- a/persistence/mongodb/src/test/scala/net/liftweb/mongodb/QueryExamplesSpec.scala +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2011-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package mongodb - -import BsonDSL._ -import json.JObject - -import java.util.{Calendar, Date, UUID} -import java.util.regex.Pattern - -import org.bson.types.ObjectId -import org.specs2.mutable.Specification - -package queryexamplesfixtures { - import com.mongodb.client.model.Indexes._ - - case class Person(_id: ObjectId, name: String, birthDate: Date, childId: UUID, petId: Option[ObjectId]) extends MongoDocument[Person] { - def meta = Person - } - object Person extends MongoDocumentMeta[Person] { - override def formats = allFormats - - def createIndexes(): Unit = { - Person.createIndex(ascending("name")) - } - - // implicit formats already exists - def findAllBornAfter(dt: Date) = findAll(("birthDate" -> ("$gt" -> dt))) - } -} - -class QueryExamplesSpec extends Specification with MongoTestKit { - "QueryExamples Specification".title - - import queryexamplesfixtures._ - - "Query examples" in { - checkMongoIsRunning - - Person.createIndexes() - - val fredsBirthDate = Calendar.getInstance - fredsBirthDate.set(1970, 1, 1, 19, 0) - - val wilmasBirthDate = Calendar.getInstance - wilmasBirthDate.set(1971, 8, 30, 19, 0) - - val barneysBirthDate = Calendar.getInstance - barneysBirthDate.set(1972, 8, 30, 19, 0) - - val bettysBirthDate = Calendar.getInstance - bettysBirthDate.set(1973, 8, 30, 19, 0) - - val dinoId = ObjectId.get - val pebblesId = UUID.randomUUID - val bammbammId = UUID.randomUUID - - val fred = Person(ObjectId.get, "Flinstone, Fred", fredsBirthDate.getTime, pebblesId, Some(dinoId)) - val wilma = Person(ObjectId.get, "Flinstone, Wilma", wilmasBirthDate.getTime, pebblesId, Some(dinoId)) - val barney = Person(ObjectId.get, "Rubble, Barney", barneysBirthDate.getTime, bammbammId, None) - val betty = Person(ObjectId.get, "Rubble, Betty", bettysBirthDate.getTime, bammbammId, None) - - fred.save - wilma.save - barney.save - betty.save - - val flinstonesIds = List(fred._id, wilma._id) - val rubblesIds = List(barney._id, betty._id) - - // query for Bamm-Bamm's parents (UUID) by childId - val pebblesParents = Person.findAll(("childId" -> bammbammId)) - - pebblesParents.length must_== 2 - pebblesParents.map(_._id).filterNot(rubblesIds.contains(_)) must_== List() - - // query for Bamm-Bamm's and Pebbles' parents using List[UUID] - val pebblesAndBammBammsParents = Person.findAll(("childId" -> ("$in" -> List(pebblesId, bammbammId)))) - - pebblesAndBammBammsParents.length must_== 4 - - // query for Dino's owners (ObjectId) - val dinosOwners = Person.findAll(("petId" -> dinoId)) - - dinosOwners.length must_== 2 - dinosOwners.map(_._id).filterNot(flinstonesIds.contains(_)) must_== List() - - // query for the Rubbles using a Regex - val rubbles = Person.findAll(("name" -> "^Rubble".r)) - - rubbles.length must_== 2 - rubbles.map(_._id).filterNot(rubblesIds.contains(_)) must_== List() - - // query for the Flinstones using a Pattern - val flinstones = Person.findAll(("name" -> Pattern.compile("^flinst", Pattern.CASE_INSENSITIVE))) - - flinstones.length must_== 2 - flinstones.map(_._id).filterNot(flinstonesIds.contains(_)) must_== List() - - // query for the Flinstones using a List[ObjectId] - val flinstones2 = Person.findAll(("_id" -> ("$in" -> flinstonesIds))) - - flinstones2.length must_== 2 - flinstones2.map(_._id).filterNot(flinstonesIds.contains(_)) must_== List() - - // query using Dates - implicit val formats = Person.formats // this is needed for Dates - val qryDate = Calendar.getInstance - qryDate.set(1971, 1, 1, 19, 0) - val people = Person.findAll(("birthDate" -> ("$gt" -> qryDate.getTime))) - - people.length must_== 3 - people.map(_._id).filterNot(List(wilma._id, barney._id, betty._id).contains(_)) must_== List() - - // you do not need to define the implicit formats val if you write your query in the DocumentMeta object. - val people2 = Person.findAllBornAfter(qryDate.getTime) - - people2.length must_== 3 - people2.map(_._id).filterNot(List(wilma._id, barney._id, betty._id).contains(_)) must_== List() - - // query all with Sort - val people3 = Person.findAll(JObject(Nil), ("birthDate" -> -1)) - - people3.length must_== 4 - people3.map(_._id) must_== List(betty._id, barney._id, wilma._id, fred._id) - - val people4 = Person.findAll(JObject(Nil), ("birthDate" -> 1)) - - people4.length must_== 4 - people4.map(_._id) must_== List(fred._id, wilma._id, barney._id, betty._id) - } -} diff --git a/persistence/proto/src/main/scala/net/liftweb/proto/Crudify.scala b/persistence/proto/src/main/scala/net/liftweb/proto/Crudify.scala deleted file mode 100755 index 7fe8ef0146..0000000000 --- a/persistence/proto/src/main/scala/net/liftweb/proto/Crudify.scala +++ /dev/null @@ -1,807 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package proto - -import sitemap._ -import Loc._ -import http._ -import util._ -import common._ -import Helpers._ - -import scala.xml._ - -/** - * This trait automatically adds CRUD (Create, read, update and delete) operations - * to an existing persistence mechanism. - * Various methods can be overridden to - * customize which operations are available to a user and how things are displayed. - * For example, you can disable deletion of entities by overriding deleteMenuLoc to Empty. - * - */ -trait Crudify { - /** - * The type of records we're manipulating - */ - type TheCrudType - - /** - * A generic representation of a field. For example, this represents the - * abstract "name" field and is used along with an instance of TheCrudType - * to compute the BaseField that is the "name" field on the specific instance - * of TheCrudType - */ - type FieldPointerType - - /** - * This trait represents a Bridge between TheCrudType - * and the Crudify trait. It's not necessary to mix this - * trait into TheCrudType, but instead provide a mechanism - * for promoting a TheCrudType to CrudBridge - */ - protected trait CrudBridge { - /** - * Delete the instance of TheCrudType from the backing store - */ - def delete_! : Boolean - - /** - * Save an instance of TheCrudType in backing store - */ - def save : Boolean - - /** - * Validate the fields in TheCrudType and return a List[FieldError] - * representing the errors. - */ - def validate: List[FieldError] - - /** - * Return a string representation of the primary key field - */ - def primaryKeyFieldAsString: String - } - - /** - * This method will instantiate a bridge from TheCrudType so - * that the appropriate logical operations can be performed - * on TheCrudType - */ - protected implicit def buildBridge(from: TheCrudType): CrudBridge - - protected trait FieldPointerBridge { - /** - * What is the display name of this field? - */ - def displayHtml: NodeSeq - } - - /** - * Based on a FieldPointer, build a FieldPointerBridge - */ - protected implicit def buildFieldBridge(from: FieldPointerType): FieldPointerBridge - - lazy val Prefix = calcPrefix - lazy val ListItems = calcListItems - lazy val ViewItem = calcViewItem - lazy val CreateItem = calcCreateItem - lazy val EditItem = calcEditItem - lazy val DeleteItem = calcDeleteItem - - /** - * What's the prefix for this CRUD. Typically the table name. - */ - def calcPrefix: List[String] - - /** - * Vend a new instance of TheCrudType - */ - def create: TheCrudType - - def calcListItems = "list" - - def calcViewItem = "view" - - def calcCreateItem = "create" - - def calcEditItem = "edit" - - def calcDeleteItem = "delete" - - def displayName = displayHtml.text - - def displayHtml: NodeSeq = Text(calcPrefix.head) - - /** - * The fields displayed on the list page. By default all - * the displayed fields, but this list - * can be shortened. - */ - def fieldsForList: List[FieldPointerType] = fieldsForDisplay - - /** - * When displaying a record, what fields do we display - */ - def fieldsForDisplay: List[FieldPointerType] - - /** - * The list of fields to present on a form form editing - */ - def fieldsForEditing: List[FieldPointerType] = fieldsForDisplay - - def pageWrapper(body: NodeSeq): NodeSeq = - - { - body - } - - - /** - * The menu item for listing items (make this "Empty" to disable) - */ - def showAllMenuLoc: Box[Menu] = - Full(Menu(Loc("List "+Prefix, listPath, showAllMenuName, - addlMenuLocParams ::: ( - locSnippets :: Loc.Template(showAllTemplate) :: - showAllMenuLocParams)))) - - /** - * Override to include new Params for the show all menu - */ - def showAllMenuLocParams: List[Loc.AnyLocParam] = Nil - - /** - * The menu item for creating items (make this "Empty" to disable) - */ - def createMenuLoc: Box[Menu] = - Full(Menu(Loc("Create "+Prefix, createPath, createMenuName, - (addlMenuLocParams ::: ( - locSnippets :: Loc.Template(createTemplate) :: - createMenuLocParams))))) - /** - * Override to include new Params for the create menu - */ - def createMenuLocParams: List[Loc.AnyLocParam] = Nil - - /** - * If there are any Loc.LocParams that need to be - * added to every menu (e.g., a guard for access control - * of the Crudify screens) - */ - protected def addlMenuLocParams: List[Loc.AnyLocParam] = Nil - - - /** - * Customize the display of a row for displayRecord - */ - protected def doDisplayRecordRow(entry: TheCrudType): (NodeSeq)=>NodeSeq = { - "^" #> { - for { - pointer <- fieldsForDisplay - field <- computeFieldFromPointer(entry, pointer).toList - if field.shouldDisplay_? - } yield { - ".name *" #> field.displayHtml & - ".value *" #> field.asHtml - } - } - } - - /** - * Customize the display of records for view menu loc - */ - protected def displayRecord(entry: TheCrudType): (NodeSeq)=>NodeSeq = { - ".row" #> doDisplayRecordRow(entry) - } - - - /** - * The menu item for viewing an item (make this "Empty" to disable) - */ - def viewMenuLoc: Box[Menu] = - Full(Menu(new Loc[TheCrudType]{ - // the name of the page - def name = "View "+Prefix - - override val snippets: SnippetTest = { - case ("crud.view", Full(wp)) => displayRecord(wp.asInstanceOf[TheCrudType]) - } - - def defaultValue = Empty - - lazy val params = addlMenuLocParams ::: viewMenuLocParams - - /** - * What's the text of the link? - */ - val text = new Loc.LinkText(calcLinkText _) - - def calcLinkText(in: TheCrudType): NodeSeq = Text(S.?("crudify.menu.view.displayName", displayName)) - - /** - * Rewrite the request and emit the type-safe parameter - */ - override val rewrite: LocRewrite = - Full(NamedPF(name) { - case RewriteRequest(pp , _, _) if hasParamFor(pp, viewPath) => - (RewriteResponse(viewPath), findForParam(pp.wholePath.last)) - }) - - override def calcTemplate = Full(viewTemplate) - - val link = - new Loc.Link[TheCrudType](viewPath, false) { - override def createLink(in: TheCrudType) = - Full(Text(viewPathString+"/"+obscurePrimaryKey(in))) - } - })) - /** - * Override to include new Params for the view menu - */ - def viewMenuLocParams: List[Loc.LocParam[TheCrudType]] = Nil - - - /** - * The menu item for editing an item (make this "Empty" to disable) - */ - def editMenuLoc: Box[Menu] = { - Full(Menu(new Loc[TheCrudType]{ - // the name of the page - def name = "Edit "+Prefix - - override val snippets: SnippetTest = { - case ("crud.edit", Full(wp)) => crudDoForm(wp.asInstanceOf[TheCrudType], S.?("Save")) - } - - def defaultValue = Empty - - lazy val params = addlMenuLocParams ::: editMenuLocParams - - /** - * What's the text of the link? - */ - val text = new Loc.LinkText(calcLinkText _) - - def calcLinkText(in: TheCrudType): NodeSeq = Text(S.?("crudify.menu.edit.displayName", displayName)) - - /** - * Rewrite the request and emit the type-safe parameter - */ - override val rewrite: LocRewrite = - Full(NamedPF(name) { - case RewriteRequest(pp , _, _) if hasParamFor(pp, editPath) => - (RewriteResponse(editPath), findForParam(pp.wholePath.last)) - }) - - override def calcTemplate = Full(editTemplate) - - val link = - new Loc.Link[TheCrudType](editPath, false) { - override def createLink(in: TheCrudType) = - Full(Text(editPathString+"/"+obscurePrimaryKey(in))) - } - })) - } - - /** - * Override to include new Params for the edit menu - */ - def editMenuLocParams: List[Loc.LocParam[TheCrudType]] = Nil - - - /** - * The String displayed for menu editing - */ - def editMenuName = S.?("Edit")+" "+displayName - - /** - * This is the template that's used to render the page after the - * optional wrapping of the template in the page wrapper - */ - def editTemplate(): NodeSeq = pageWrapper(_editTemplate) - - def editId = "edit_page" - def editClass = "edit_class" - def editErrorClass = "edit_error_class" - - /** - * The core template for editing. Does not include any - * page wrapping. - */ - protected def _editTemplate = { -
- - - - - - - - - - -
 
-
- } - - def editButton = S.?("Save") - - /** - * Override this method to change how fields are displayed for delete - */ - protected def doDeleteFields(item: TheCrudType): (NodeSeq)=>NodeSeq = { - "^" #> { - for { - pointer <- fieldsForDisplay - field <- computeFieldFromPointer(item, pointer).toList - if field.shouldDisplay_? - } yield { - ".name *" #> field.displayHtml & - ".value *" #> field.asHtml - } - } - } - - /** - * Override this method to change the behavior of deleting an item - */ - protected def doDeleteSubmit(item: TheCrudType, from: String)() = { - S.notice(S ? "Deleted") - item.delete_! - S.redirectTo(from) - } - - - - /** - * Override this method to change how the delete screen is built - */ - protected def crudyDelete(item: TheCrudType): (NodeSeq)=>NodeSeq = { - val from = referer - - ".field" #> doDeleteFields(item) & - "type=submit" #> SHtml.onSubmitUnit(doDeleteSubmit(item, from) _) - } - - - /** - * The menu item for deleting an item (make this "Empty" to disable) - */ - def deleteMenuLoc: Box[Menu] = { - Full(Menu(new Loc[TheCrudType]{ - // the name of the page - def name = "Delete "+Prefix - - override val snippets: SnippetTest = { - case ("crud.delete", Full(wp)) => crudyDelete(wp.asInstanceOf[TheCrudType]) - } - - def defaultValue = Empty - - lazy val params = addlMenuLocParams ::: deleteMenuLocParams - - /** - * What's the text of the link? - */ - val text = new Loc.LinkText(calcLinkText _) - - def calcLinkText(in: TheCrudType): NodeSeq = Text(S.?("crudify.menu.delete.displayName", displayName)) - - /** - * Rewrite the request and emit the type-safe parameter - */ - override val rewrite: LocRewrite = - Full(NamedPF(name) { - case RewriteRequest(pp , _, _) if hasParamFor(pp, deletePath) => - (RewriteResponse(deletePath), findForParam(pp.wholePath.last)) - }) - - override def calcTemplate = Full(deleteTemplate) - - val link = - new Loc.Link[TheCrudType](deletePath, false) { - override def createLink(in: TheCrudType) = - Full(Text(deletePathString+"/"+obscurePrimaryKey(in))) - } - })) - } - - private def hasParamFor(pp: ParsePath, toTest: List[String]): Boolean = { - pp.wholePath.startsWith(toTest) && - pp.wholePath.length == (toTest.length + 1) && - findForParam(pp.wholePath.last).isDefined - } - - /** - * Override to include new Params for the delete menu - */ - def deleteMenuLocParams: List[Loc.LocParam[TheCrudType]] = Nil - - - def deleteMenuName = S.?("Delete")+" "+displayName - - /** - * This is the template that's used to render the page after the - * optional wrapping of the template in the page wrapper - */ - def deleteTemplate(): NodeSeq = pageWrapper(_deleteTemplate) - - def deleteId = "delete_page" - def deleteClass = "delete_class" - - /** - * The core template for deleting. Does not include any - * page wrapping. - */ - def _deleteTemplate = { -
- - - - - - - - - - -
 
-
- } - - def deleteButton = S.?("Delete") - - - def createMenuName = S.?("Create")+" "+displayName - - /** - * This is the template that's used to render the page after the - * optional wrapping of the template in the page wrapper. - */ - def createTemplate(): NodeSeq = pageWrapper(_createTemplate) - - def createId = "create_page" - def createClass = "create_class" - - /** - * The core template for creating. Does not include any - * page wrapping. - */ - def _createTemplate = { -
- - - - - - - - - - -
 
-
- } - - def createButton = S.?("Create") - - def viewMenuName = S.?("View")+" "+displayName - - /** - * This is the template that's used to render the page after the - * optional wrapping of the template in the page wrapper - */ - def viewTemplate(): NodeSeq = pageWrapper(_viewTemplate) - - def viewId = "view_page" - def viewClass = "view_class" - - /** - * The core template for viewing. Does not include any - * page wrapping. - */ - def _viewTemplate = { -
- - - - - -
-
- } - - def showAllMenuName = S.?("List", displayName) - - /** - * This is the template that's used to render the page after the - * optional wrapping of the template in the page wrapper - */ - def showAllTemplate(): NodeSeq = pageWrapper(_showAllTemplate) - - def showAllId = "show_all" - def showAllClass = "show_all" - - /** - * The core template for showing record. Does not include any - * page wrapping - */ - def _showAllTemplate = { -
- - - - - - - - - - - - - - - - - - - - - - - - - - - -
   
{S ? "View"}{S ? "Edit"}{S ? "Delete"}
{previousWord}{nextWord}
-
- } - - def nextWord = S.?("Next") - def previousWord = S.?("Previous") - - lazy val listPath = Prefix ::: List(ListItems) - - lazy val listPathString: String = mp(listPath) - - lazy val createPath = Prefix ::: List(CreateItem) - - lazy val createPathString: String = mp(createPath) - - lazy val viewPath = Prefix ::: List(ViewItem) - - lazy val viewPathString: String = mp(viewPath) - - lazy val editPath = Prefix ::: List(EditItem) - - lazy val editPathString: String = mp(editPath) - - lazy val deletePath = Prefix ::: List(DeleteItem) - - lazy val deletePathString: String = mp(deletePath) - - private def mp(in: List[String]) = in.mkString("/", "/", "") - - def menus: List[Menu] = - List(showAllMenuLoc, createMenuLoc, viewMenuLoc, - editMenuLoc, deleteMenuLoc).flatMap(x => x) - - /** - * Given a range, find the records. Your implementation of this - * method should enforce ordering (e.g., on primary key). - */ - def findForList(start: Long, count: Int): List[TheCrudType] - - /** - * Given a String that represents the primary key, find an instance of - * TheCrudType - */ - def findForParam(in: String): Box[TheCrudType] - - /** - * Given an instance of TheCrudType and FieldPointerType, convert - * that to an actual instance of a BaseField on the instance of TheCrudType - */ - protected def computeFieldFromPointer(instance: TheCrudType, pointer: FieldPointerType): Box[BaseField] - - /** - * This method defines how many rows are displayed per page. By - * default, it's hard coded at 20, but you can make it session specific - * or change the default by overriding this method. - */ - protected def rowsPerPage: Int = 20 - - /** - * Override this method to customize how header items are treated - */ - protected def doCrudAllHeaderItems: (NodeSeq)=>NodeSeq = { - "^ *" #> fieldsForList.map(_.displayHtml) - } - - /** - * Override this method to customize how a crudAll line is generated - */ - protected def doCrudAllRowItem(c: TheCrudType): (NodeSeq)=>NodeSeq = { - "^" #> { - for { - pointer <- fieldsForList - field <- computeFieldFromPointer(c, pointer).toList - } yield { - ".row-item *" #> field.asHtml - } - } - } - - /** - * Override this method to determine how all the rows on a crud - * page are displayed - */ - protected def doCrudAllRows(list: List[TheCrudType]): (NodeSeq)=>NodeSeq = { - "^" #> list.take(rowsPerPage).map { rowItem => - ".row-item" #> doCrudAllRowItem(rowItem) & - ".view [href]" #> (s"$viewPathString/${obscurePrimaryKey(rowItem)}") & - ".edit [href]" #> (s"$editPathString/${obscurePrimaryKey(rowItem)}") & - ".delete [href]" #> (s"$deletePathString/${obscurePrimaryKey(rowItem)}") - } - } - - /** - * Override this method to change how the previous link is - * generated - */ - protected def crudAllPrev(first: Long): (NodeSeq)=>NodeSeq = { - if (first < rowsPerPage) { - ClearNodes - } else { - "^ <*>" #> - " #> - - } - } - - /** - * Override this method if you want to change the behavior - * of displaying records via the crud.all snippet - */ - protected def doCrudAll: (NodeSeq)=>NodeSeq = { - val first = S.param("first").map(toLong) openOr 0L - val list = findForList(first, rowsPerPage) - - ".header-item" #> doCrudAllHeaderItems & - ".row" #> doCrudAllRows(list) & - ".previous" #> crudAllPrev(first) & - ".next" #> crudAllNext(first, list) - } - - - lazy val locSnippets = new DispatchLocSnippets { - val dispatch: PartialFunction[String, NodeSeq => NodeSeq] = { - case "crud.all" => doCrudAll - case "crud.create" => crudDoForm(create, S.?("Created")) - } - - } - - /** - * This method can be used to obscure the primary key. This is more secure - * because end users will not have access to the primary key. - */ - def obscurePrimaryKey(in: TheCrudType): String = obscurePrimaryKey(in.primaryKeyFieldAsString) - - /** - * This method can be used to obscure the primary key. This is more secure - * because end users will not have access to the primary key. This method - * actually does the obfuscation. You can use Mapper's KeyObfuscator class - * to implement a nice implementation of this method for session-by-session - * obfuscation.

- * - * By default, there's no obfuscation. Note that if you obfuscate the - * primary key, you need to update the findForParam method to accept - * the obfuscated keys (and translate them back.) - */ - def obscurePrimaryKey(in: String): String = in - - def referer: String = S.referer openOr listPathString - - /** - * As the field names are being displayed for editing, this method - * is called with the XHTML that will be displayed as the field name - * and a flag indicating whether the field is required. You - * can wrap the fieldName in a span with a css class indicating that - * the field is required or otherwise do something to update the field - * name indicating to the user that the field is required. By default - * the method wraps the fieldName in a span with the class attribute set - * to "required_field". - */ - def wrapNameInRequired(fieldName: NodeSeq, required: Boolean): NodeSeq = { - if (required) { - {fieldName} - } else { - fieldName - } - } - - def crudDoForm(item: TheCrudType, noticeMsg: String)(in: NodeSeq): NodeSeq = { - val from = referer - val snipName = S.currentSnippet - - def loop(html:NodeSeq): NodeSeq = { - def error(field: BaseField): NodeSeq = { - field.uniqueFieldId match { - case fid @ Full(id) => S.getNotices.filter(_._3 == fid).flatMap(err => - List(Text(" "), {err._2}) ) - - case _ => NodeSeq.Empty - } - } - - def doFields(html: NodeSeq): NodeSeq = - for { - pointer <- fieldsForEditing - field <- computeFieldFromPointer(item, pointer).toList - if field.show_? - form <- field.toForm.toList - bindNode = - ".name *" #> { - wrapNameInRequired(field.displayHtml, field.required_?) ++ - error(field) - } & - ".form *" #> form - node <- bindNode(html) - } yield node - - def doSubmit() = item.validate match { - case Nil => - S.notice(noticeMsg) - item.save - S.redirectTo(from) - - case xs => - S.error(xs) - snipName.foreach(S.mapSnippet(_, loop)) - } - - val bind = - ".field" #> doFields _ & - "type=submit" #> SHtml.onSubmitUnit(doSubmit _) - - bind(html) - } - - loop(in) - } - - -} - diff --git a/persistence/proto/src/main/scala/net/liftweb/proto/ProtoRules.scala b/persistence/proto/src/main/scala/net/liftweb/proto/ProtoRules.scala deleted file mode 100644 index d31d8b4dce..0000000000 --- a/persistence/proto/src/main/scala/net/liftweb/proto/ProtoRules.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package proto - -import net.liftweb.common._ -import net.liftweb.util._ -import net.liftweb.http._ -import scala.reflect.Manifest - -import java.util.regex.Pattern - -/** - * This singleton contains the rules for persistence - */ -object ProtoRules extends Factory with LazyLoggable { - /** - * The regular expression pattern for matching email addresses. - */ - val emailRegexPattern = new FactoryMaker(Pattern.compile("^[a-z0-9._%\\-+]+@(?:[a-z0-9\\-]+\\.)+[a-z]{2,}$", Pattern.CASE_INSENSITIVE)) {} - -} - diff --git a/persistence/proto/src/main/scala/net/liftweb/proto/ProtoUser.scala b/persistence/proto/src/main/scala/net/liftweb/proto/ProtoUser.scala deleted file mode 100644 index 84ee3f5f70..0000000000 --- a/persistence/proto/src/main/scala/net/liftweb/proto/ProtoUser.scala +++ /dev/null @@ -1,1139 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package proto - -import net.liftweb.http._ -import js._ -import JsCmds._ -import scala.xml.{NodeSeq, Node, Text, Elem} -import scala.xml.transform._ -import net.liftweb.sitemap._ -import net.liftweb.sitemap.Loc._ -import net.liftweb.util.Helpers._ -import net.liftweb.util._ -import net.liftweb.common._ -import net.liftweb.util.Mailer._ -import S._ - -/** - * A prototypical user class with abstractions to the underlying storage - */ -trait ProtoUser { - /** - * The underlying record for the User - */ - type TheUserType - - /** - * Bridges from TheUserType to methods used in this class - */ - protected trait UserBridge { - /** - * Convert the user's primary key to a String - */ - def userIdAsString: String - - /** - * Return the user's first name - */ - def getFirstName: String - - /** - * Return the user's last name - */ - def getLastName: String - - /** - * Get the user's email - */ - def getEmail: String - - /** - * Is the user a superuser - */ - def superUser_? : Boolean - - /** - * Has the user been validated? - */ - def validated_? : Boolean - - /** - * Does the supplied password match the actual password? - */ - def testPassword(toTest: Box[String]): Boolean - - /** - * Set the validation flag on the user and return the user - */ - def setValidated(validation: Boolean): TheUserType - - /** - * Set the unique ID for this user to a new value - */ - def resetUniqueId(): TheUserType - - /** - * Return the unique ID for the user - */ - def getUniqueId(): String - - /** - * Validate the user - */ - def validate: List[FieldError] - - /** - * Given a list of string, set the password - */ - def setPasswordFromListString(in: List[String]): TheUserType - - /** - * Save the user to backing store - */ - def save: Boolean - - /** - * Get a nice name for the user - */ - def niceName: String = (getFirstName, getLastName, getEmail) match { - case (f, l, e) if f.length > 1 && l.length > 1 => f+" "+l+" ("+e+")" - case (f, _, e) if f.length > 1 => f+" ("+e+")" - case (_, l, e) if l.length > 1 => l+" ("+e+")" - case (_, _, e) => e - } - - /** - * Get a short name for the user - */ - def shortName: String = (getFirstName, getLastName) match { - case (f, l) if f.length > 1 && l.length > 1 => f+" "+l - case (f, _) if f.length > 1 => f - case (_, l) if l.length > 1 => l - case _ => getEmail - } - - /** - * Get an email link - */ - def niceNameWEmailLink = {niceName} - - } - - /** - * Convert an instance of TheUserType to the Bridge trait - */ - protected implicit def typeToBridge(in: TheUserType): UserBridge - - /** - * Get a nice name for the user - */ - def niceName(inst: TheUserType): String = inst.niceName - - /** - * Get a nice name for the user - */ - def shortName(inst: TheUserType): String = inst.shortName - - /** - * Get an email link for the user - */ - def niceNameWEmailLink(inst: TheUserType): Elem = inst.niceNameWEmailLink - - /** - * A generic representation of a field. For example, this represents the - * abstract "name" field and is used along with an instance of TheCrudType - * to compute the BaseField that is the "name" field on the specific instance - * of TheCrudType - */ - type FieldPointerType - - /** - * Based on a FieldPointer, build a FieldPointerBridge - */ - protected implicit def buildFieldBridge(from: FieldPointerType): FieldPointerBridge - - protected trait FieldPointerBridge { - /** - * What is the display name of this field? - */ - def displayHtml: NodeSeq - - /** - * Does this represent a pointer to a Password field - */ - def isPasswordField_? : Boolean - } - - /** - * The list of fields presented to the user at sign-up - */ - def signupFields: List[FieldPointerType] - - - /** - * The list of fields presented to the user for editing - */ - def editFields: List[FieldPointerType] - - /** - * What template are you going to wrap the various nodes in - */ - def screenWrap: Box[Node] = Empty - - /** - * The base path for the user related URLs. Override this - * method to change the base path - */ - def basePath: List[String] = "user_mgt" :: Nil - - /** - * The path suffix for the sign up screen - */ - def signUpSuffix: String = "sign_up" - - /** - * The computed path for the sign up screen - */ - lazy val signUpPath = thePath(signUpSuffix) - - /** - * The path suffix for the login screen - */ - def loginSuffix = "login" - - /** - * The computed path for the login screen - */ - lazy val loginPath = thePath(loginSuffix) - - /** - * The path suffix for the lost password screen - */ - def lostPasswordSuffix = "lost_password" - - /** - * The computed path for the lost password screen - */ - lazy val lostPasswordPath = thePath(lostPasswordSuffix) - - /** - * The path suffix for the reset password screen - */ - def passwordResetSuffix = "reset_password" - - /** - * The computed path for the reset password screen - */ - lazy val passwordResetPath = thePath(passwordResetSuffix) - - /** - * The path suffix for the change password screen - */ - def changePasswordSuffix = "change_password" - - /** - * The computed path for change password screen - */ - lazy val changePasswordPath = thePath(changePasswordSuffix) - - /** - * The path suffix for the logout screen - */ - def logoutSuffix = "logout" - - /** - * The computed pat for logout - */ - lazy val logoutPath = thePath(logoutSuffix) - - /** - * The path suffix for the edit screen - */ - def editSuffix = "edit" - - /** - * The computed path for the edit screen - */ - lazy val editPath = thePath(editSuffix) - - /** - * The path suffix for the validate user screen - */ - def validateUserSuffix = "validate_user" - - /** - * The calculated path to the user validation screen - */ - lazy val validateUserPath = thePath(validateUserSuffix) - - /** - * The application's home page - */ - def homePage = "/" - - /** - * If you want to redirect a user to a different page after login, - * put the page here - */ - object loginRedirect extends SessionVar[Box[String]](Empty) { - override lazy val __nameSalt = Helpers.nextFuncName - } - - /** - * A helper class that holds menu items for the path - */ - case class MenuItem(name: String, path: List[String], - loggedIn: Boolean) { - lazy val endOfPath = path.last - lazy val pathStr: String = path.mkString("/", "/", "") - lazy val display = name match { - case null | "" => false - case _ => true - } - } - - /** - * Calculate the path given a suffix by prepending the basePath to the suffix - */ - protected def thePath(end: String): List[String] = basePath ::: List(end) - - /** - * Return the URL of the "login" page - */ - def loginPageURL = loginPath.mkString("/","/", "") - - /** - * Inverted loggedIn_? - */ - def notLoggedIn_? = !loggedIn_? - - /** - * A Menu.LocParam to test if the user is logged in - */ - lazy val testLogginIn = If(loggedIn_? _, S.?("must.be.logged.in")) ; - - /** - * A Menu.LocParam to test if the user is a super user - */ - lazy val testSuperUser = If(superUser_? _, S.?("must.be.super.user")) - - /** - * A Menu.LocParam for testing if the user is logged in and if they're not, - * redirect them to the login page - */ - def loginFirst = If( - loggedIn_? _, - () => { - import net.liftweb.http.{RedirectWithState, RedirectState} - val uri = S.uriAndQueryString - RedirectWithState( - loginPageURL, - RedirectState( ()=>{loginRedirect.set(uri)}) - ) - } - ) - - /** - * Is there a user logged in and are they a superUser? - */ - def superUser_? : Boolean = currentUser.map(_.superUser_?) openOr false - - /** - * The menu item for login (make this "Empty" to disable) - */ - def loginMenuLoc: Box[Menu] = - Full(Menu(Loc("Login" + menuNameSuffix, loginPath, S.?("login"), loginMenuLocParams ::: globalUserLocParams))) - - - /** - * If you want to include a LocParam (e.g. LocGroup) on all the - * User menus, add them here - */ - protected def globalUserLocParams: List[LocParam[Unit]] = Nil - - /** - * The LocParams for the menu item for login. - * Overwrite in order to add custom LocParams. Attention: Not calling super will change the default behavior! - */ - protected def loginMenuLocParams: List[LocParam[Unit]] = - If(notLoggedIn_? _, S.?("already.logged.in")) :: - Template(() => wrapIt(login)) :: - Nil - - /** - * If you have more than 1 ProtoUser in your application, you'll need to distinguish the menu names. - * Do so by changing the menu name suffix so that there are no name clashes - */ - protected def menuNameSuffix: String = "" - - /** - * The menu item for logout (make this "Empty" to disable) - */ - def logoutMenuLoc: Box[Menu] = - Full(Menu(Loc("Logout" + menuNameSuffix, logoutPath, S.?("logout"), logoutMenuLocParams ::: globalUserLocParams))) - - /** - * The LocParams for the menu item for logout. - * Overwrite in order to add custom LocParams. Attention: Not calling super will change the default behavior! - */ - protected def logoutMenuLocParams: List[LocParam[Unit]] = - Template(() => wrapIt(logout)) :: - testLogginIn :: - Nil - - /** - * The menu item for creating the user/sign up (make this "Empty" to disable) - */ - def createUserMenuLoc: Box[Menu] = - Full(Menu(Loc("CreateUser" + menuNameSuffix, signUpPath, S.?("sign.up"), createUserMenuLocParams ::: globalUserLocParams))) - - /** - * The LocParams for the menu item for creating the user/sign up. - * Overwrite in order to add custom LocParams. Attention: Not calling super will change the default behavior! - */ - protected def createUserMenuLocParams: List[LocParam[Unit]] = - Template(() => wrapIt(signupFunc.map(_()) openOr signup)) :: - If(notLoggedIn_? _, S.?("logout.first")) :: - Nil - - /** - * The menu item for lost password (make this "Empty" to disable) - */ - def lostPasswordMenuLoc: Box[Menu] = - Full(Menu(Loc("LostPassword" + menuNameSuffix, lostPasswordPath, S.?("lost.password"), lostPasswordMenuLocParams ::: globalUserLocParams))) // not logged in - - /** - * The LocParams for the menu item for lost password. - * Overwrite in order to add custom LocParams. Attention: Not calling super will change the default behavior! - */ - protected def lostPasswordMenuLocParams: List[LocParam[Unit]] = - Template(() => wrapIt(lostPassword)) :: - If(notLoggedIn_? _, S.?("logout.first")) :: - Nil - - /** - * The menu item for resetting the password (make this "Empty" to disable) - */ - def resetPasswordMenuLoc: Box[Menu] = - Full(Menu(Loc("ResetPassword" + menuNameSuffix, (passwordResetPath, true), S.?("reset.password"), resetPasswordMenuLocParams ::: globalUserLocParams))) //not Logged in - - /** - * The LocParams for the menu item for resetting the password. - * Overwrite in order to add custom LocParams. Attention: Not calling super will change the default behavior! - */ - protected def resetPasswordMenuLocParams: List[LocParam[Unit]] = - Hidden :: - Template(() => wrapIt(passwordReset(snarfLastItem))) :: - If(notLoggedIn_? _, S.?("logout.first")) :: - Nil - - /** - * The menu item for editing the user (make this "Empty" to disable) - */ - def editUserMenuLoc: Box[Menu] = - Full(Menu(Loc("EditUser" + menuNameSuffix, editPath, S.?("edit.user"), editUserMenuLocParams ::: globalUserLocParams))) - - /** - * The LocParams for the menu item for editing the user. - * Overwrite in order to add custom LocParams. Attention: Not calling super will change the default behavior! - */ - protected def editUserMenuLocParams: List[LocParam[Unit]] = - Template(() => wrapIt(editFunc.map(_()) openOr edit)) :: - testLogginIn :: - Nil - - /** - * The menu item for changing password (make this "Empty" to disable) - */ - def changePasswordMenuLoc: Box[Menu] = - Full(Menu(Loc("ChangePassword" + menuNameSuffix, changePasswordPath, S.?("change.password"), changePasswordMenuLocParams ::: globalUserLocParams))) - - /** - * The LocParams for the menu item for changing password. - * Overwrite in order to add custom LocParams. Attention: Not calling super will change the default behavior! - */ - protected def changePasswordMenuLocParams: List[LocParam[Unit]] = - Template(() => wrapIt(changePassword)) :: - testLogginIn :: - Nil - - /** - * The menu item for validating a user (make this "Empty" to disable) - */ - def validateUserMenuLoc: Box[Menu] = - Full(Menu(Loc("ValidateUser" + menuNameSuffix, (validateUserPath, true), S.?("validate.user"), validateUserMenuLocParams ::: globalUserLocParams))) - - /** - * The LocParams for the menu item for validating a user. - * Overwrite in order to add custom LocParams. Attention: Not calling super will change the default behavior! - */ - protected def validateUserMenuLocParams: List[LocParam[Unit]] = - Hidden :: - Template(() => wrapIt(validateUser(snarfLastItem))) :: - If(notLoggedIn_? _, S.?("logout.first")) :: - Nil - - /** - * An alias for the sitemap property - */ - def menus: List[Menu] = sitemap // issue 182 - - /** - * Insert this LocParam into your menu if you want the - * User's menu items to be inserted at the same level - * and after the item - */ - final case object AddUserMenusAfter extends Loc.LocParam[Any] - - /** - * replace the menu that has this LocParam with the User's menu - * items - */ - final case object AddUserMenusHere extends Loc.LocParam[Any] - - /** - * Insert this LocParam into your menu if you want the - * User's menu items to be children of that menu - */ - final case object AddUserMenusUnder extends Loc.LocParam[Any] - - private lazy val AfterUnapply = SiteMap.buildMenuMatcher(_ == AddUserMenusAfter) - private lazy val HereUnapply = SiteMap.buildMenuMatcher(_ == AddUserMenusHere) - private lazy val UnderUnapply = SiteMap.buildMenuMatcher(_ == AddUserMenusUnder) - - /** - * The SiteMap mutator function - */ - def sitemapMutator: SiteMap => SiteMap = SiteMap.sitemapMutator { - case AfterUnapply(menu) => menu :: sitemap - case HereUnapply(_) => sitemap - case UnderUnapply(menu) => List(menu.rebuild(_ ::: sitemap)) - }(SiteMap.addMenusAtEndMutator(sitemap)) - - lazy val sitemap: List[Menu] = - List(loginMenuLoc, createUserMenuLoc, - lostPasswordMenuLoc, resetPasswordMenuLoc, - editUserMenuLoc, changePasswordMenuLoc, - validateUserMenuLoc, logoutMenuLoc).flatten(a => a) - - - def skipEmailValidation = false - - def userMenu: List[Node] = { - val li = loggedIn_? - ItemList. - filter(i => i.display && i.loggedIn == li). - map(i => ({i.name})) - } - - protected def snarfLastItem: String = - (for (r <- S.request) yield r.path.wholePath.last) openOr "" - - lazy val ItemList: List[MenuItem] = - List(MenuItem(S.?("sign.up"), signUpPath, false), - MenuItem(S.?("log.in"), loginPath, false), - MenuItem(S.?("lost.password"), lostPasswordPath, false), - MenuItem("", passwordResetPath, false), - MenuItem(S.?("change.password"), changePasswordPath, true), - MenuItem(S.?("log.out"), logoutPath, true), - MenuItem(S.?("edit.profile"), editPath, true), - MenuItem("", validateUserPath, false)) - - var onLogIn: List[TheUserType => Unit] = Nil - - var onLogOut: List[Box[TheUserType] => Unit] = Nil - - /** - * This function is given a chance to log in a user - * programmatically when needed - */ - var autologinFunc: Box[()=>Unit] = Empty - - def loggedIn_? = { - if(!currentUserId.isDefined) - for(f <- autologinFunc) f() - currentUserId.isDefined - } - - def logUserIdIn(id: String) { - curUser.remove() - curUserId(Full(id)) - } - - def logUserIn(who: TheUserType, postLogin: () => Nothing): Nothing = { - if (destroySessionOnLogin) { - S.session.openOrThrowException("we have a session here").destroySessionAndContinueInNewSession(() => { - logUserIn(who) - postLogin() - }) - } else { - logUserIn(who) - postLogin() - } - } - - def logUserIn(who: TheUserType) { - curUserId.remove() - curUser.remove() - curUserId(Full(who.userIdAsString)) - curUser(Full(who)) - onLogIn.foreach(_(who)) - } - - def logoutCurrentUser = logUserOut() - - def logUserOut() { - onLogOut.foreach(_(curUser)) - curUserId.remove() - curUser.remove() - S.session.foreach(_.destroySession()) - } - - /** - * There may be times when you want to be another user - * for some stack frames. Here's how to do it. - */ - def doWithUser[T](u: Box[TheUserType])(f: => T): T = - curUserId.doWith(u.map(_.userIdAsString)) { - curUser.doWith(u) { - f - } - } - - - private object curUserId extends SessionVar[Box[String]](Empty) { - override lazy val __nameSalt = Helpers.nextFuncName - } - - - def currentUserId: Box[String] = curUserId.get - - private object curUser extends RequestVar[Box[TheUserType]](currentUserId.flatMap(userFromStringId)) with CleanRequestVarOnSessionTransition { - override lazy val __nameSalt = Helpers.nextFuncName - } - - - /** - * Given a String representing the User ID, find the user - */ - protected def userFromStringId(id: String): Box[TheUserType] - - def currentUser: Box[TheUserType] = curUser.get - - def signupXhtml(user: TheUserType) = { - (
- {localForm(user, false, signupFields)} - -
{ S.?("sign.up") }
 
) - } - - - def signupMailBody(user: TheUserType, validationLink: String): Elem = { - ( - - {S.?("sign.up.confirmation")} - - -

{S.?("dear")} {user.getFirstName}, -
-
- {S.?("sign.up.validation.link")} -
{validationLink} -
-
- {S.?("thank.you")} -

- - ) - } - - def signupMailSubject = S.?("sign.up.confirmation") - - /** - * Send validation email to the user. The XHTML version of the mail - * body is generated by calling signupMailBody. You can customize the - * mail sent to users by override generateValidationEmailBodies to - * send non-HTML mail or alternative mail bodies. - */ - def sendValidationEmail(user: TheUserType) { - val resetLink = S.hostAndPath+"/"+validateUserPath.mkString("/")+ - "/"+urlEncode(user.getUniqueId()) - - val email: String = user.getEmail - - val msgXml = signupMailBody(user, resetLink) - - Mailer.sendMail(From(emailFrom),Subject(signupMailSubject), - (To(user.getEmail) :: - generateValidationEmailBodies(user, resetLink) ::: - (bccEmail.toList.map(BCC(_)))) :_* ) - } - - /** - * Generate the mail bodies to send with the valdiation link. - * By default, just an HTML mail body is generated by calling signupMailBody - * but you can send additional or alternative mail by override this method. - */ - protected def generateValidationEmailBodies(user: TheUserType, - resetLink: String): - List[MailBodyType] = List(xmlToMailBodyType(signupMailBody(user, resetLink))) - - protected object signupFunc extends RequestVar[Box[() => NodeSeq]](Empty) { - override lazy val __nameSalt = Helpers.nextFuncName - } - - - /** - * Override this method to do something else after the user signs up - */ - protected def actionsAfterSignup(theUser: TheUserType, func: () => Nothing): Nothing = { - theUser.setValidated(skipEmailValidation).resetUniqueId() - theUser.save - if (!skipEmailValidation) { - sendValidationEmail(theUser) - S.notice(S.?("sign.up.message")) - func() - } else { - logUserIn(theUser, () => { - S.notice(S.?("welcome")) - func() - }) - } - } - - /** - * Override this method to validate the user signup (eg by adding captcha verification) - */ - def validateSignup(user: TheUserType): List[FieldError] = user.validate - - /** - * Create a new instance of the User - */ - protected def createNewUserInstance(): TheUserType - - /** - * If there's any mutation to do to the user on creation for - * signup, override this method and mutate the user. This can - * be used to pull query parameters from the request and assign - * certain fields. . Issue #722 - * - * @param user the user to mutate - * @return the mutated user - */ - protected def mutateUserOnSignup(user: TheUserType): TheUserType = user - - def signup = { - val theUser: TheUserType = mutateUserOnSignup(createNewUserInstance()) - val theName = signUpPath.mkString("") - - def testSignup() { - validateSignup(theUser) match { - case Nil => - actionsAfterSignup(theUser, () => S.redirectTo(homePage)) - - case xs => S.error(xs) ; signupFunc(Full(innerSignup _)) - } - } - - def innerSignup = { - ("type=submit" #> signupSubmitButton(S ? "sign.up", testSignup _)) apply signupXhtml(theUser) - } - - innerSignup - } - - def signupSubmitButton(name: String, func: () => Any = () => {}): NodeSeq = { - standardSubmitButton(name, func) - } - - def emailFrom = "noreply@"+S.hostName - - def bccEmail: Box[String] = Empty - - def testLoggedIn(page: String): Boolean = - ItemList.filter(_.endOfPath == page) match { - case x :: xs if x.loggedIn == loggedIn_? => true - case _ => false - } - - - def validateUser(id: String): NodeSeq = findUserByUniqueId(id) match { - case Full(user) if !user.validated_? => - user.setValidated(true).resetUniqueId().save - logUserIn(user, () => { - S.notice(S.?("account.validated")) - S.redirectTo(homePage) - }) - - case _ => S.error(S.?("invalid.validation.link")); S.redirectTo(homePage) - } - - /** - * How do we prompt the user for the username. By default, - * it's S.?("email.address"), you can can change it to something else - */ - def userNameFieldString: String = S.?("email.address") - - /** - * The string that's generated when the user name is not found. By - * default: S.?("email.address.not.found") - */ - def userNameNotFoundString: String = S.?("email.address.not.found") - - def loginXhtml = { - (
- - -
{S.?("log.in")}
{userNameFieldString}
{S.?("password")}
{S.?("recover.password")}
-
) - } - - /** - * Given an username (probably email address), find the user - */ - protected def findUserByUserName(username: String): Box[TheUserType] - - /** - * Given a unique id, find the user - */ - protected def findUserByUniqueId(id: String): Box[TheUserType] - - /** - * By default, destroy the session on login. - * Change this is some of the session information needs to - * be preserved. - */ - protected def destroySessionOnLogin = true - - /** - * If there's any state that you want to capture pre-login - * to be set post-login (the session is destroyed), - * then set the state here. Just make a function - * that captures the state... that function will be applied - * post login. - */ - protected def capturePreLoginState(): () => Unit = () => {} - - def login = { - if (S.post_?) { - S.param("username"). - flatMap(username => findUserByUserName(username)) match { - case Full(user) if user.validated_? && - user.testPassword(S.param("password")) => { - val preLoginState = capturePreLoginState() - val redir = loginRedirect.get match { - case Full(url) => - loginRedirect(Empty) - url - case _ => - homePage - } - - logUserIn(user, () => { - S.notice(S.?("logged.in")) - - preLoginState() - - S.redirectTo(redir) - }) - } - - case Full(user) if !user.validated_? => - S.error(S.?("account.validation.error")) - - case _ => S.error(S.?("invalid.credentials")) - } - } - - val emailElemId = nextFuncName - S.appendJs(Focus(emailElemId)) - val bind = - ".email [id]" #> emailElemId & - ".email [name]" #> "username" & - ".password [name]" #> "password" & - "type=submit" #> loginSubmitButton(S.?("log.in")) - - bind(loginXhtml) - } - - def loginSubmitButton(name: String, func: () => Any = () => {}): NodeSeq = { - standardSubmitButton(name, func) - } - - def standardSubmitButton(name: String, func: () => Any = () => {}) = { - SHtml.submit(name, func) - } - - def lostPasswordXhtml = { - (
- - - -
{S.?("enter.email")}
{userNameFieldString}
 
-
) - } - - def passwordResetMailBody(user: TheUserType, resetLink: String): Elem = { - ( - - {S.?("reset.password.confirmation")} - - -

{S.?("dear")} {user.getFirstName}, -
-
- {S.?("click.reset.link")} -
{resetLink} -
-
- {S.?("thank.you")} -

- - ) - } - - /** - * Generate the mail bodies to send with the password reset link. - * By default, just an HTML mail body is generated by calling - * passwordResetMailBody - * but you can send additional or alternative mail by overriding this method. - */ - protected def generateResetEmailBodies(user: TheUserType, - resetLink: String): - List[MailBodyType] = - List(xmlToMailBodyType(passwordResetMailBody(user, resetLink))) - - - def passwordResetEmailSubject = S.?("reset.password.request") - - /** - * Send password reset email to the user. The XHTML version of the mail - * body is generated by calling passwordResetMailBody. You can customize the - * mail sent to users by overriding generateResetEmailBodies to - * send non-HTML mail or alternative mail bodies. - */ - def sendPasswordReset(email: String) { - findUserByUserName(email) match { - case Full(user) if user.validated_? => - user.resetUniqueId().save - val resetLink = S.hostAndPath+ - passwordResetPath.mkString("/", "/", "/")+urlEncode(user.getUniqueId()) - - val email: String = user.getEmail - - Mailer.sendMail(From(emailFrom),Subject(passwordResetEmailSubject), - (To(user.getEmail) :: - generateResetEmailBodies(user, resetLink) ::: - (bccEmail.toList.map(BCC(_)))) :_*) - - S.notice(S.?("password.reset.email.sent")) - S.redirectTo(homePage) - - case Full(user) => - sendValidationEmail(user) - S.notice(S.?("account.validation.resent")) - S.redirectTo(homePage) - - case _ => S.error(userNameNotFoundString) - } - } - - def lostPassword = { - val bind = - ".email" #> SHtml.text("", sendPasswordReset _) & - "type=submit" #> lostPasswordSubmitButton(S.?("send.it")) - - bind(lostPasswordXhtml) - } - - def lostPasswordSubmitButton(name: String, func: () => Any = () => {}): NodeSeq = { - standardSubmitButton(name, func) - } - - def passwordResetXhtml = { - (
- - - - -
{S.?("reset.your.password")}
{S.?("enter.your.new.password")}
{S.?("repeat.your.new.password")}
 
-
) - } - - def passwordReset(id: String) = - findUserByUniqueId(id) match { - case Full(user) => - def finishSet() { - user.validate match { - case Nil => S.notice(S.?("password.changed")) - user.resetUniqueId().save - logUserIn(user, () => S.redirectTo(homePage)) - - case xs => S.error(xs) - } - } - - val passwordInput = SHtml.password_*("", - (p: List[String]) => user.setPasswordFromListString(p)) - - - val bind = { - "type=password" #> passwordInput & - "type=submit" #> resetPasswordSubmitButton(S.?("set.password"), finishSet _) - } - - bind(passwordResetXhtml) - case _ => S.error(S.?("password.link.invalid")); S.redirectTo(homePage) - } - - def resetPasswordSubmitButton(name: String, func: () => Any = () => {}): NodeSeq = { - standardSubmitButton(name, func) - } - - def changePasswordXhtml = { - (
- - - - - -
{S.?("change.password")}
{S.?("old.password")}
{S.?("new.password")}
{S.?("repeat.password")}
 
-
) - } - - def changePassword = { - val user = currentUser.openOrThrowException("we can do this because the logged in test has happened") - var oldPassword = "" - var newPassword: List[String] = Nil - - def testAndSet() { - if (!user.testPassword(Full(oldPassword))) S.error(S.?("wrong.old.password")) - else { - user.setPasswordFromListString(newPassword) - user.validate match { - case Nil => user.save; S.notice(S.?("password.changed")); S.redirectTo(homePage) - case xs => S.error(xs) - } - } - } - - val bind = { - // Use the same password input for both new password fields. - val passwordInput = SHtml.password_*("", LFuncHolder(s => newPassword = s)) - - ".old-password" #> SHtml.password("", s => oldPassword = s) & - ".new-password" #> passwordInput & - "type=submit" #> changePasswordSubmitButton(S.?("change"), testAndSet _) - } - - bind(changePasswordXhtml) - } - - def changePasswordSubmitButton(name: String, func: () => Any = () => {}): NodeSeq = { - standardSubmitButton(name, func) - } - - def editXhtml(user: TheUserType) = { - (
- - {localForm(user, true, editFields)} - -
{S.?("edit")}
 
-
) - } - - object editFunc extends RequestVar[Box[() => NodeSeq]](Empty) { - override lazy val __nameSalt = Helpers.nextFuncName - } - - - /** - * If there's any mutation to do to the user on retrieval for - * editing, override this method and mutate the user. This can - * be used to pull query parameters from the request and assign - * certain fields. Issue #722 - * - * @param user the user to mutate - * @return the mutated user - */ - protected def mutateUserOnEdit(user: TheUserType): TheUserType = user - - def edit = { - val theUser: TheUserType = - mutateUserOnEdit(currentUser.openOrThrowException("we know we're logged in")) - - val theName = editPath.mkString("") - - def testEdit() { - theUser.validate match { - case Nil => - theUser.save - S.notice(S.?("profile.updated")) - S.redirectTo(homePage) - - case xs => S.error(xs) ; editFunc(Full(innerEdit _)) - } - } - - def innerEdit = { - ("type=submit" #> editSubmitButton(S.?("save"), testEdit _)) apply editXhtml(theUser) - } - - innerEdit - } - - def editSubmitButton(name: String, func: () => Any = () => {}): NodeSeq = { - standardSubmitButton(name, func) - } - - def logout = { - logoutCurrentUser - S.redirectTo(homePage) - } - - /** - * Given an instance of TheCrudType and FieldPointerType, convert - * that to an actual instance of a BaseField on the instance of TheCrudType - */ - protected def computeFieldFromPointer(instance: TheUserType, pointer: FieldPointerType): Box[BaseField] - - - - protected def localForm(user: TheUserType, ignorePassword: Boolean, fields: List[FieldPointerType]): NodeSeq = { - for { - pointer <- fields - field <- computeFieldFromPointer(user, pointer).toList - if field.show_? && (!ignorePassword || !pointer.isPasswordField_?) - form <- field.toForm.toList - } yield {field.displayName}{form} - } - - protected def wrapIt(in: NodeSeq): NodeSeq = - screenWrap.map(new RuleTransformer(new RewriteRule { - override def transform(n: Node) = n match { - case e: Elem if "bind" == e.label && "lift" == e.prefix => in - case _ => n - } - })) openOr in -} - diff --git a/persistence/proto/src/test/scala/net/liftweb/CrudifySpec.scala b/persistence/proto/src/test/scala/net/liftweb/CrudifySpec.scala deleted file mode 100755 index fde7a3cb39..0000000000 --- a/persistence/proto/src/test/scala/net/liftweb/CrudifySpec.scala +++ /dev/null @@ -1,293 +0,0 @@ -package net.liftweb - -import net.liftweb.common.{Box, Empty, Full} -import net.liftweb.fixtures.RequestContext._ -import net.liftweb.fixtures._ -import net.liftweb.http.{Req, ResponseShortcutException, S} -import org.specs2.matcher.XmlMatchers -import org.specs2.mutable.Specification -import org.specs2.specification.Scope - -import scala.collection.immutable -import scala.xml.{NodeSeq, Text} - -class CrudifySpec extends Specification with XmlMatchers { - "Crudify Trait Specification".title - - class SpecCrudifyWithContext extends SpecCrudify with Scope { - val repo: SpecCrudRepo = SpecCrudType.defaultRepo - val firstItem: SpecCrudType = repo.content(0, 1).head - - def all: NodeSeq = withRequest(Req.nil) { - doCrudAll(this.showAllTemplate()) - } - - def viewItem(item: SpecCrudType = firstItem): NodeSeq = withRequest(Req.nil) { - displayRecord(item)(viewTemplate()) - } - - def editItem(item: SpecCrudType = firstItem): NodeSeq = withSession(Req.nil) { - crudDoForm(item, "EditMsg")(editTemplate()) - } - - def deleteItem(item: SpecCrudType = firstItem): NodeSeq = withSession(Req.nil) { - crudyDelete(item)(deleteTemplate()) - } - } - - "doCrudAll method `showAllTemplate`" should { - - "render proper rows count" in new SpecCrudifyWithContext { - all \\ "tbody" \\ "tr" must have size rowsPerPage - } - - "honor rowsPerPage settings" in new SpecCrudifyWithContext { - override def rowsPerPage = 1 - - all \\ "tbody" \\ "tr" must have size 1 - } - - "use `first` params for pagination" in new SpecCrudifyWithContext { - withRequest(params(Map("first" -> "10"))) { - val result = doCrudAll(this.showAllTemplate()) - val rowData = (result \\ "tbody" \\ "tr" \\ "td").take(fieldsForDisplay.size).map(_.text) - val repoData = repo.content(10, 1).flatMap(i => List(i.id, i.value)) - rowData === repoData - } - } - - "render proper headers content" in new SpecCrudifyWithContext { - val th: NodeSeq = all \\ "thead" \\ "th" - val renderedHeaders = th.map(_.text).filterNot(_ == " ") - renderedHeaders must contain(exactly(fieldsForDisplay.map(_.fieldName): _*)) - } - - "render proper columns content" in new SpecCrudifyWithContext { - val tr: NodeSeq = all \\ "tbody" \\ "tr" - val renderedValues: List[List[String]] = tr.map(row => { - (row \ "td") - .filter(td => (td \ "@class").nonEmpty) - .map(_.text).toList - }).toList - val expectedValues: List[List[String]] = repo.content(0, rowsPerPage).map(i => List(i.id, i.value)) - renderedValues === expectedValues - } - - "render only next link on first page" in new SpecCrudifyWithContext { - withRequest(Req.nil) { - val html = doCrudAll(this.showAllTemplate()) - val nextLinkContainer = (html \\ "td").find(td => (td \ "@class").text == "next") - val prevLinkContainer = (html \\ "td").find(td => (td \ "@class").text == "previous") - nextLinkContainer must beSome[NodeSeq] - val nextLink = nextLinkContainer.get - nextLink must \\("a", "href") - nextLink must \\("a").textIs(nextWord) - prevLinkContainer must beNone - } - } - - "render both naviagation links inbeetwen" in new SpecCrudifyWithContext { - withRequest(params(Map("first" -> s"$rowsPerPage"))) { - val html = doCrudAll(this.showAllTemplate()) - val nextLinkContainer = (html \\ "td").find(td => (td \ "@class").text == "next") - val prevLinkContainer = (html \\ "td").find(td => (td \ "@class").text == "previous") - prevLinkContainer must beSome[NodeSeq] - nextLinkContainer must beSome[NodeSeq] - - val prevLink = prevLinkContainer.get - prevLink must \\("a", "href") - prevLink must \\("a").textIs(previousWord) - val nextLink = nextLinkContainer.get - nextLink must \\("a", "href") - nextLink must \\("a").textIs(nextWord) - } - } - } - - - "displayRecord on `viewTemplate`" should { - - "render row for each field" in new SpecCrudifyWithContext { - viewItem() \\ "table" \\ "tr" must have size fieldsForDisplay.size - } - - "render correct field names" in new SpecCrudifyWithContext { - val filedNames: Seq[String] = (viewItem() \\ "table" \\ "tr" \\ "td"). - filter(e => (e \ "@class").text == "name") - .map(_.text) - filedNames must contain(exactly(fieldsForDisplay.map(_.fieldName): _*)) - } - - "render correct field values" in new SpecCrudifyWithContext { - val filedNames: Seq[String] = (viewItem() \\ "table" \\ "tr" \\ "td"). - filter(e => (e \ "@class").text == "value") - .map(_.text) - filedNames must contain(exactly(firstItem.id, firstItem.value)) - } - } - - trait FormHelpers { - this: SpecCrudifyWithContext => - def buildEditForm(): NodeSeq = { - crudDoForm(firstItem, "Edit Notice")(editTemplate()) - } - - def setId(form: NodeSeq, newId: String): Unit = { - val setIdFunc: String = ((form \\ "input").find(i => (i \\ "@id").text == "id").head \\ "@name").text - S.functionMap(setIdFunc).asInstanceOf[Any => Any].apply(List(newId)) - } - - def setValue(form: NodeSeq, newValue: String): Unit = { - val setValueFunc: String = ((form \\ "input").find(i => (i \\ "@id").text == "value").head \\ "@name").text - S.functionMap(setValueFunc).asInstanceOf[Any => Any].apply(List(newValue)) - } - - def submitForm(form: NodeSeq, expectRedirect: Boolean = true): Unit = { - val submitFunc: String = ((form \\ "button").find(i => (i \\ "@type").text == "submit").head \\ "@name").text - val lazySubmit = () => S.functionMap(submitFunc).asInstanceOf[Any => Any].apply(List("")) - if (expectRedirect) { - lazySubmit() must throwA[ResponseShortcutException] - } else { - lazySubmit() - } - } - } - - "crudDoForm on `editTemplate`" should { - - "render row for each field" in new SpecCrudifyWithContext { - val trElements = (editItem() \\ "table" \\ "tr") - .filter(tr => (tr \ "@class").text == "field") - - trElements must haveSize(fieldsForDisplay.length) - } - - "render label for each field" in new SpecCrudifyWithContext { - val labels = (editItem() \\ "table" \\ "tr" \\ "td" \\ "label") - .map(_.text) - - labels must contain(exactly(fieldsForDisplay.map(_.fieldName): _*)) - } - - "render inputs for each field" in new SpecCrudifyWithContext { - val values = (editItem() \\ "table" \\ "tr" \\ "td" \\ "input") - .map(i => (i \ "@value").text) - - values must contain(exactly(firstItem.id, firstItem.value)) - } - - "render save button" in new SpecCrudifyWithContext { - val button = editItem() \\ "table" \\ "tr" \\ "td" \\ "button" - button must haveSize(1) - button must \\("button", "type" -> "submit") - } - - "render error message for each filed" in new SpecCrudifyWithContext with FormHelpers { - fieldsForDisplay.map { fp => - withSession(Req.nil) { - S.error(fp.fieldName, s"Dummy error for ${fp.fieldName}") - val form = buildEditForm() - val filedRow = (form \\ "tr").filter(tr => { - (tr \\ "td" \\ "input" \\ "@id").text == fp.fieldName - }) - filedRow must \\("span").textIs(s"Dummy error for ${fp.fieldName}") - } - } - } - - "produce notice on update" in new SpecCrudifyWithContext with FormHelpers { - withSession(Req.nil) { - val form = buildEditForm() - submitForm(form) - - val notices: immutable.Seq[(NodeSeq, Box[String])] = S.notices - notices.map(_._1).map(_.text) must contain(exactly("Edit Notice")) - } - } - - "validate input" in new SpecCrudifyWithContext with FormHelpers { - withSession(Req.nil) { - val form = buildEditForm() - setId(form, "INVALID") - submitForm(form, expectRedirect = false) - S.errors == List((Text("Id filed must be numeric"), Full("id"))) - } - } - - "allow to save modified content" in new SpecCrudifyWithContext with FormHelpers { - withSession(Req.nil) { - val form = buildEditForm() - val oldId = firstItem.id - val newId = "300" - val newValue = "UPDATED LINE 300" - - setId(form, newId) - setValue(form, newValue) - submitForm(form) - - repo.find(oldId) === Empty - val updated = repo.find(newId) - - updated.isDefined must beTrue - val item = updated.openOrThrowException("Guarded before") - item.id === newId - item.value === newValue - } - } - } - - "crudyDelete on `deleteTemplate`" should { - - "render row for each field" in new SpecCrudifyWithContext { - val trElements = (deleteItem() \\ "table" \\ "tr") - .filter(tr => (tr \ "@class").text == "field") - - trElements must haveSize(fieldsForDisplay.length) - } - - "render label for each field" in new SpecCrudifyWithContext { - val labels = (deleteItem() \\ "table" \\ "tr" \\ "td" \\ "label") - .map(_.text) - - labels must contain(exactly(fieldsForDisplay.map(_.fieldName): _*)) - } - - "render values for each field" in new SpecCrudifyWithContext { - val values = (deleteItem() \\ "table" \\ "tr" \\ "td") - .filter(td => (td \\ "@class").text == "value") - .map(_.text) - - values must contain(exactly(firstItem.id, firstItem.value)) - } - - "render delete button" in new SpecCrudifyWithContext { - withSession(Req.nil) { - val form = crudyDelete(firstItem)(deleteTemplate()) - val button = form - button must haveSize(1) - button must \\("button", "type" -> "submit") - button must \\("button").textIs(deleteButton) - } - } - - "produce notice on delete" in new SpecCrudifyWithContext with FormHelpers { - withSession(Req.nil) { - val form = crudyDelete(firstItem)(deleteTemplate()) - submitForm(form) - - val notices: immutable.Seq[(NodeSeq, Box[String])] = S.notices - notices.map(_._1).map(_.text) must contain(exactly(S ? "Deleted")) - } - } - - "remove item from repo on submit" in new SpecCrudifyWithContext with FormHelpers { - withSession(Req.nil) { - val form = crudyDelete(firstItem)(deleteTemplate()) - submitForm(form) - - repo.find(firstItem.id) === Empty - } - } - - } -} diff --git a/persistence/proto/src/test/scala/net/liftweb/fixtures/Fixtures.scala b/persistence/proto/src/test/scala/net/liftweb/fixtures/Fixtures.scala deleted file mode 100755 index 7df1d99039..0000000000 --- a/persistence/proto/src/test/scala/net/liftweb/fixtures/Fixtures.scala +++ /dev/null @@ -1,254 +0,0 @@ -package net.liftweb -package fixtures - -import net.liftweb.common.{Box, Empty, Full} -import net.liftweb.http.S.SFuncHolder -import net.liftweb.http.{LiftSession, Req, RewriteRequest, RewriteResponse, S} -import net.liftweb.proto.Crudify -import net.liftweb.util.{BaseField, FieldError, FieldIdentifier, LiftValue} - -import scala.collection.mutable -import scala.xml._ - -/** - * Helper type represents content for [[net.liftweb.proto.Crudify]] - * - * @param id fake data `id` field - * @param value fake data `value` field - */ -case class SpecCrudType(var id: String, var value: String) { - val _id = id - val _value = value -} - -/** Helper object for [[net.liftweb.proto.Crudify]] trait testing */ -object SpecCrudType { - /** [[net.liftweb.proto.Crudify.FieldPointerType]] */ - type FieldType = SpecField[SpecCrudType] - - /** Default initial [[SpecCrudRepo]] content */ - def DEFAULT_REPO_CONTENT = (0 until 100).map(n => n.toString -> new SpecCrudType(n.toString, s"Line number $n")) - - /** Default fields for [[SpecCrudType]] */ - val FIELDS: List[FieldType] = List( - SpecField("id", _.id, (h, v) => h.id = v), - SpecField("value", _.value, (h, v) => h.value = v)) - - /** Build [[SpecCrudRepo]] with default content */ - def defaultRepo: SpecCrudRepo = new SpecCrudRepo(DEFAULT_REPO_CONTENT: _*) -} - -/** - * Helper type witch should be used as [[net.liftweb.proto.Crudify.FieldPointerType]] - * - * @param fieldName fake date field na,e - * @param getter return `fieldName` value as [[String]] from [[T]] instance - * @param setter convert given [[String]] and it as set `fieldName` value in [[T]] instance - * @tparam T target fake data holder type ([[SpecCrudType]] for now) - */ -case class SpecField[T](fieldName: String, getter: T => String, setter: (T, String) => Unit) extends FieldIdentifier { - - /** - * Field name as HTML - * - * @return Element with represents field name in HTML - */ - def displayHtml: NodeSeq = Text(fieldName) - - override def uniqueFieldId: Box[String] = Full(fieldName) -} - - -/** - * Helper class for creating fake data repository for using as [[net.liftweb.proto.Crudify.CrudBridge]] and for others - * methods needed by [[net.liftweb.proto.Crudify]] implementation - * - * @param initialContent initial content for repor - */ -class SpecCrudRepo(initialContent: (String, SpecCrudType)*) { - private val dict: mutable.Map[String, SpecCrudType] = mutable.LinkedHashMap(initialContent: _*) - - /** Return items count in repo */ - def size: Int = dict.size - - /** - * Return repo content part restricted by `start` and `count` parameters - * - * @param start first returned item index - * @param count maximal returned items count - * @return Repo items starting form `start` and truncated to `count` size - */ - def content(start: Long, count: Int): List[SpecCrudType] = { - val startIndex = start.toInt - dict.values.slice(startIndex, startIndex + count).toList - } - - /** Find content in repo by [[String]] `id` param */ - def find(id: String): Box[SpecCrudType] = { - dict.get(id) - } - - /** Delete content from repo */ - def delete_!(target: SpecCrudType): Boolean = { - dict.remove(target.id).isDefined - } - - /** Save new instance to repo or replace previous value inside repo if present */ - def save(target: SpecCrudType): Boolean = { - dict.remove(target._id) //remove previous id if present - val newValue = SpecCrudType(target.id, target.value) - dict += newValue.id -> newValue - true - } - - /** Validate instance */ - def validate(target: SpecCrudType): List[FieldError] = { - val numbersOnly = "(\\d+)".r - target.id match { - case numbersOnly(_) => - Nil - case _ => - List( - FieldError(SpecCrudType.FIELDS.head, "Id filed must be numeric") - ) - } - } - - - /** Return [[String]] representation of instance primary field */ - def primaryKeyFieldAsString(target: SpecCrudType): String = target.id -} - - -/** Spec class implementation of [[net.liftweb.proto.Crudify]] trait */ -trait SpecCrudify extends Crudify { - - def repo: SpecCrudRepo - - override type TheCrudType = SpecCrudType - - override type FieldPointerType = SpecCrudType.FieldType - - - override def calcPrefix: List[String] = List("Prefix") - - override def create: SpecCrudType = new SpecCrudType("", "") - - override def fieldsForDisplay: List[FieldPointerType] = SpecCrudType.FIELDS - - override def findForList(start: Long, count: Int): List[TheCrudType] = repo.content(start, count) - - override def findForParam(in: String): Box[TheCrudType] = repo.find(in) - - - override protected implicit def buildBridge(from: TheCrudType): CrudBridge = new CrudBridge { - - override def delete_! : Boolean = repo.delete_!(from) - - - override def save: Boolean = repo.save(from) - - override def validate: List[FieldError] = repo.validate(from) - - - override def primaryKeyFieldAsString: String = repo.primaryKeyFieldAsString(from) - } - - override protected implicit def buildFieldBridge(from: FieldPointerType): FieldPointerBridge = new FieldPointerBridge { - override def displayHtml: NodeSeq = from.displayHtml - } - - override protected def computeFieldFromPointer(instance: TheCrudType, pointer: FieldPointerType): Box[BaseField] = { - val result: BaseField = new BaseField with LiftValue[String] { - override def setFilter: List[String => String] = Nil - - override def validations: List[String => List[FieldError]] = Nil - - override def validate: List[FieldError] = Nil - - override def toForm: Box[NodeSeq] = { - Full( - S.fmapFunc(SFuncHolder(set)) { - funcName => - - } % idAttribute() - ) - } - - override def displayNameHtml: Box[NodeSeq] = { - Full( % idAttribute("for")) - } - - override def name: String = pointer.fieldName - - override def set(in: String): String = { - pointer.setter(instance, in) - in - } - - override def get: String = pointer.getter(instance) - - override def fieldId: Option[NodeSeq] = Some(Text(displayName)) - - override def uniqueFieldId: Box[String] = Some(pointer.fieldName) - - def idAttribute(name: String = "id"): MetaData = fieldId match { - case Some(nodeSeq) => Attribute.apply(name, nodeSeq, Null) - case _ => Null - } - } - Full(result) - } -} - - -/** Helper object for calling method inside context of `Lift` request */ -object RequestContext { - val testSession = new LiftSession("/context-path", "underlying id", Empty) - - /** - * Produce HTTP request field with params to build Lift` context - * - * @param params HTTP request params - * @return Test HTTP request filled with params` - */ - def params(params: Map[String, String]): Req = { - Req(Req.nil, List({ - case r: RewriteRequest => - RewriteResponse(r.path, params, stopRewriting = true) - })) - } - - /** - * Call `function` inside `Lift` context produced from `request` - * Make functions like `S ?` or `S.param` works during call of `function` - * - * @param request HTTP request filled with needed params - * @param function target function to execute in context of given `Lift` request - * @tparam T `function` return type - * @return result of `function` execution - */ - def withRequest[T](request: Req)(function: => T): T = { - S.statelessInit(request)({ - function - }) - } - - /** - * Call `function` inside session based `Lift` context produced from `request` - * Make functions like `S.fmapFunc` or `SHtml.onSubmitUnit` works during call of `function` - * - * @param request HTTP request filled with needed params - * @param function target function to execute in context of given `Lift` request - * @tparam T `function` return type - * @return result of `function` execution - */ - def withSession[T](request: Req)(function: => T): T = { - S.init(Some(request), testSession)({ - function - }) - } -} - - - diff --git a/persistence/record/src/main/scala/net/liftweb/record/Field.scala b/persistence/record/src/main/scala/net/liftweb/record/Field.scala deleted file mode 100644 index c561c1664d..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/Field.scala +++ /dev/null @@ -1,509 +0,0 @@ -/* - * Copyright 2007-20190 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import net.liftweb.common._ -import net.liftweb.http.S -import net.liftweb.http.js.{JsExp} -import net.liftweb.json.JsonAST.{JNothing, JNull, JString, JValue} -import net.liftweb.util._ -import scala.reflect.Manifest -import scala.xml._ -import http.SHtml - -/** Base trait of record fields, with functionality common to any type of field owned by any type of record */ -trait BaseField extends FieldIdentifier with util.BaseField { - private[record] var fieldName: String = _ - private[record] var dirty = false - - protected def dirty_?(b: Boolean) = dirty = b - - def resetDirty { - if (safe_?) dirty_?(false) - } - - def dirty_? : Boolean = dirty - - /** - * Should the dirty flag always be set when setBox is called - */ - def forceDirty_? : Boolean = false - - /** - * Should the field be ignored by the OR Mapper? - */ - def ignoreField_? = false - - /** - * Is the value of this field optional (e.g. NULLable)? - */ - def optional_? = false - - /** - * Can the value of this field be read without obscuring the result? - */ - def canRead_? : Boolean = safe_? || checkCanRead_? - - /** - * If the owner is not in "safe" mode, check the current environment to see if - * the field can be read - */ - def checkCanRead_? = true - - /** - * Can the value of this field be written? - */ - def canWrite_? : Boolean = safe_? || checkCanWrite_? - - /** - * If the owner is not in "safe" mode, check the current environment to see if - * the field can be written - */ - def checkCanWrite_? = true - - /** - * Convert the field value to an XHTML representation - */ - def toXHtml: NodeSeq = Text(toString) - - /** - * Generate a form control for the field - */ - def toForm: Box[NodeSeq] - - /** - * Returns the field's value as a valid JavaScript expression - */ - def asJs: JsExp - - /** Encode the field value into a JValue */ - def asJValue: JValue - - /** - * What form elements are we going to add to this field? - */ - def formElemAttrs: Seq[SHtml.ElemAttr] = Nil - - /** - * Set the name of this field - */ - private[record] final def setName_!(newName : String) : String = { - if (safe_?) fieldName = newName - fieldName - } - - /** - * The error message used when the field value could not be set - */ - def noValueErrorMessage : String = "Value cannot be changed" - - /** - * The error message used when the field value must be set - */ - def notOptionalErrorMessage : String = "Value required" - - /** - * Form field's type. - * Defaults to 'text', but you may want to change it to other HTML5 values. - */ - def formInputType = "text" - - def tabIndex: Int = 1 - - override def uniqueFieldId: Box[String] = Full(name+"_id") - - def label: NodeSeq = uniqueFieldId match { - case Full(id) => - case _ => NodeSeq.Empty - } - - def asString: String - - def safe_? : Boolean = true // let owned fields make it unsafe some times -} - -/** Refined trait for fields owned by a particular record type */ -trait OwnedField[OwnerType <: Record[OwnerType]] extends BaseField { - /** - * Return the owner of this field - */ - def owner: OwnerType - - /** - * The text name of this field - */ - def name: String = RecordRules.fieldName.vend.apply(owner.meta.connectionIdentifier, fieldName) - - /** - * The display name of this field (e.g., "First Name") - */ - override def displayName: String = RecordRules.displayName.vend.apply(owner, S.locale, name) - - /** - * Are we in "safe" mode (i.e., the value of the field can be read or written without any security checks.) - */ - override final def safe_? : Boolean = owner.safe_? -} - -/** Refined trait for fields holding a particular value type */ -trait TypedField[ThisType] extends BaseField { - - /* - * Unless overridden, MyType is equal to ThisType. Available for - * backwards compatibility - */ - type MyType = ThisType // For backwards compatibility - - type ValidationFunction = ValueType => List[FieldError] - - private[record] var data: Box[MyType] = Empty - private[record] var needsDefault: Boolean = true - - /** - * Helper for implementing asJValue for a conversion to an encoded JString - * - * @param encode function to transform the field value into a String - */ - protected def asJString(encode: MyType => String): JValue = - valueBox.map(v => JString(encode(v))) openOr (JNothing: JValue) - - /** Decode the JValue and set the field to the decoded value. Returns Empty or Failure if the value could not be set */ - def setFromJValue(jvalue: JValue): Box[MyType] - - /** - * Helper for implementing setFromJValue for a conversion from an encoded JString - * - * @param decode function to try and transform a String into a field value - */ - protected def setFromJString(jvalue: JValue)(decode: String => Box[MyType]): Box[MyType] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JString(s) => setBox(decode(s)) - case other => setBox(FieldHelpers.expectedA("JString", other)) - } - - def validations: List[ValidationFunction] = Nil - - /** Validate this field's setting, returning any errors found */ - def validate: List[FieldError] = runValidation(valueBox) - - /** Helper function that does validation of a value by using the validators specified for the field */ - protected def runValidation(in: Box[MyType]): List[FieldError] = in match { - case Full(_) => validations.flatMap(_(toValueType(in))).distinct - case Empty => Nil - case Failure(msg, _, _) => Text(msg) - } - - protected implicit def boxNodeToFieldError(in: Box[Node]): List[FieldError] = - in match { - case Full(node) => List(FieldError(this, node)) - case _ => Nil - } - - protected implicit def nodeToFieldError(node: Node): List[FieldError] = - List(FieldError(this, node)) - - protected implicit def boxNodeFuncToFieldError(in: Box[MyType] => Box[Node]): - Box[MyType] => List[FieldError] = - param => boxNodeToFieldError(in(param)) - - /** The default value of the field when no value is set. Must return a Full Box unless optional_? is true */ - def defaultValueBox: Box[MyType] - - /** - * Convert the field to a String... usually of the form "displayName=value" - */ - def asString = displayName + "=" + data - - def obscure(in: MyType): Box[MyType] = Failure("value obscured") - - def setBox(in: Box[MyType]): Box[MyType] = synchronized { - needsDefault = false - val oldValue = data - data = in match { - case _ if !canWrite_? => Failure(noValueErrorMessage) - case Full(_) => set_!(in) - case _ if optional_? => set_!(in) - case (f: Failure) => set_!(f) // preserve failures set in - case _ => Failure(notOptionalErrorMessage) - } - if (forceDirty_?) { - dirty_?(true) - } - else if (!dirty_?) { - val same = (oldValue, data) match { - case (Full(ov), Full(nv)) => ov == nv - case (a, b) => a == b - } - dirty_?(!same) - } - data - } - - // Helper methods for things to easily use mixins and so on that use ValueType instead of Box[MyType], regardless of the optional-ness of the field - protected def toValueType(in: Box[MyType]): ValueType - - protected def toBoxMyType(in: ValueType): Box[MyType] - - protected def set_!(in: Box[MyType]): Box[MyType] = runFilters(in, setFilterBox) - - def setFilter: List[ValueType => ValueType] = Nil - - /** OptionalTypedField and MandatoryTypedField implement this to do the appropriate lifting of Box[MyType] to ValueType */ - protected def liftSetFilterToBox(in: Box[MyType]): Box[MyType] - - /** - * A list of functions that transform the value before it is set. The transformations - * are also applied before the value is used in a query. Typical applications - * of this are trimming and/or toLowerCase-ing strings - */ - protected def setFilterBox: List[Box[MyType] => Box[MyType]] = liftSetFilterToBox _ :: Nil - - def runFilters(in: Box[MyType], filter: List[Box[MyType] => Box[MyType]]): Box[MyType] = filter match { - case Nil => in - case x :: xs => runFilters(x(in), xs) - } - - /** - * Set the value of the field from anything. - * Implementations of this method should accept at least the following (pattern => valueBox) - * - value: MyType => setBox(Full(value)) - * - Some(value: MyType) => setBox(Full(value)) - * - Full(value: MyType) => setBox(Full(value)) - * - (value: MyType)::_ => setBox(Full(value)) - * - s: String => setFromString(s) - * - Some(s: String) => setFromString(s) - * - Full(s: String) => setFromString(s) - * - null|None|Empty => setBox(defaultValueBox) - * - f: Failure => setBox(f) - * And usually convert the input to a string and uses setFromString as a last resort. - * - * Note that setFromAny should _always_ call setBox, even if the conversion fails. This is so that validation - * properly notes the error. - * - * The method genericSetFromAny implements this guideline. - */ - def setFromAny(in: Any): Box[MyType] - - /** Generic implementation of setFromAny that implements exactly what the doc for setFromAny specifies, using a Manifest to check types */ - protected final def genericSetFromAny(in: Any)(implicit m: Manifest[MyType]): Box[MyType] = in match { - case value if m.runtimeClass.isInstance(value) => setBox(Full(value.asInstanceOf[MyType])) - case Some(value) if m.runtimeClass.isInstance(value) => setBox(Full(value.asInstanceOf[MyType])) - case Full(value) if m.runtimeClass.isInstance(value) => setBox(Full(value.asInstanceOf[MyType])) - case (value)::_ if m.runtimeClass.isInstance(value) => setBox(Full(value.asInstanceOf[MyType])) - case (value: String) => setFromString(value) - case Some(value: String) => setFromString(value) - case Full(value: String) => setFromString(value) - case (value: String)::_ => setFromString(value) - case null|None|Empty => setBox(defaultValueBox) - case (failure: Failure) => setBox(failure) - case Some(other) => setFromString(String.valueOf(other)) - case Full(other) => setFromString(String.valueOf(other)) - case other => setFromString(String.valueOf(other)) - } - - /** - * Set the value of the field using some kind of type-specific conversion from a String. - * By convention, if the field is optional_?, then the empty string should be treated as no-value (Empty). - * Note that setFromString should _always_ call setBox, even if the conversion fails. This is so that validation - * properly notes the error. - * - * @return Full(convertedValue) if the conversion succeeds (the field value will be set by side-effect) - * Empty or Failure if the conversion does not succeed - */ - def setFromString(s: String): Box[MyType] - - def valueBox: Box[MyType] = synchronized { - if (needsDefault) { - needsDefault = false - data = defaultValueBox - } - - if (canRead_?) data - else data.flatMap(obscure) - } - - /** Clear the value of this field */ - def clear: Unit = optional_? match { - case true => setBox(Empty) - case false => setBox(defaultValueBox) - } - - override def equals(other: Any): Boolean = { - other match { - case that: TypedField[ThisType] => - that.name == this.name && that.valueBox == this.valueBox - case _ => - false - } - } -} - -trait MandatoryTypedField[ThisType] extends TypedField[ThisType] with Product1[ThisType] { - - /** - * ValueType represents the type that users will work with. For MandatoryTypeField, this is - * equal to ThisType. - */ - type ValueType = ThisType // For util.BaseField - - //TODO: fullfil the contract of Product1[ThisType] - def canEqual(a:Any) = false - - def _1 = value - - override def optional_? = false - - /** - * Set the value of the field to the given value. - * Note: Because setting a field can fail (return non-Full), this method will - * return defaultValue if the field could not be set. - */ - def set(in: MyType): MyType = setBox(Full(in)) openOr defaultValue - - def toValueType(in: Box[MyType]) = in openOr defaultValue - def toBoxMyType(in: ValueType) = Full(in) - - def value: MyType = valueBox openOr defaultValue - - def get: MyType = value - - protected def liftSetFilterToBox(in: Box[MyType]): Box[MyType] = in.map(v => setFilter.foldLeft(v)((prev, f) => f(prev))) - - /** - * The default value of the field when a field has no value set and is optional, or a method that must return a value (e.g. value) is used - */ - def defaultValue: MyType - - def defaultValueBox: Box[MyType] = if (optional_?) Empty else Full(defaultValue) - - override def toString = valueBox match { - case Full(null)|null => "null" - case Full(v) => v.toString - case _ => defaultValueBox.map(v => if (v != null) v.toString else "null") openOr "" - } -} - -trait OptionalTypedField[ThisType] extends TypedField[ThisType] with Product1[Box[ThisType]] { - - /** - * ValueType represents the type that users will work with. For OptionalTypedField, this is - * equal to Option[ThisType]. - */ - type ValueType = Option[ThisType] // For util.BaseField - - //TODO: fullfil the contract of Product1[ThisType] - def canEqual(a:Any) = false - - def _1 = value - - final override def optional_? = true - - /** - * Set the value of the field to the given value. - * Note: Because setting a field can fail (return non-Full), this method will - * return defaultValueBox if the field could not be set. - */ - def set(in: Option[MyType]): Option[MyType] = setBox(in) or defaultValueBox - - def toValueType(in: Box[MyType]) = in - - def toBoxMyType(in: ValueType) = in - - def value: Option[MyType] = valueBox - - def get: Option[MyType] = value - - protected def liftSetFilterToBox(in: Box[MyType]): Box[MyType] = setFilter.foldLeft(in){ (prev, f) => - prev match { - case fail: Failure => fail //stop on failure, otherwise some filters will clober it to Empty - case other => f(other) - } - } - - def defaultValueBox: Box[MyType] = Empty - - override def toString = valueBox match { - case Full(null)|null => "null" - case Full(v) => v.toString - case _ => defaultValueBox.map(v => if (v != null) v.toString else "null") openOr "" - } - -} - -/** - * A simple field that can store and retrieve a value of a given type - */ -trait Field[ThisType, OwnerType <: Record[OwnerType]] extends OwnedField[OwnerType] with TypedField[ThisType] { - - def apply(in: MyType): OwnerType = apply(Full(in)) - - def apply(in: Box[MyType]): OwnerType = if (owner.meta.mutable_?) { - this.setBox(in) - owner - } else { - owner.meta.createWithMutableField(owner, this, in) - } -} - -/** - * Mix in to a field to change its form display to be formatted with the label aside. - * - * E.g. - *
- *
- * { control } - *
- */ -trait DisplayWithLabel[OwnerType <: Record[OwnerType]] extends OwnedField[OwnerType] { - override abstract def toForm: Box[NodeSeq] = - for (id <- uniqueFieldId; control <- super.toForm) - yield -
-
- { control } - -
-} - -trait KeyField[MyType, OwnerType <: Record[OwnerType] with KeyedRecord[OwnerType, MyType]] extends Field[MyType, OwnerType] { - def ===(other: KeyField[MyType, OwnerType]): Boolean = this.valueBox == other.valueBox -} - - -object FieldHelpers { - def expectedA(what: String, notA: AnyRef): Failure = Failure("Expected a " + what + ", not a " + (if (notA == null) "null" else notA.getClass.getName)) -} - - -trait LifecycleCallbacks { - this: BaseField => - - def beforeValidation {} - def afterValidation {} - - def beforeSave {} - def beforeCreate {} - def beforeUpdate {} - - def afterSave {} - def afterCreate {} - def afterUpdate {} - - def beforeDelete {} - def afterDelete {} -} diff --git a/persistence/record/src/main/scala/net/liftweb/record/MetaRecord.scala b/persistence/record/src/main/scala/net/liftweb/record/MetaRecord.scala deleted file mode 100644 index d1f9c16b50..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/MetaRecord.scala +++ /dev/null @@ -1,474 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import scala.language.existentials - -import java.lang.reflect.Modifier -import net.liftweb._ -import util._ -import common._ -import scala.collection.mutable.{ListBuffer} -import scala.xml._ -import net.liftweb.http.js.{JsExp, JE, JsObj} -import net.liftweb.http.{SHtml, Req, LiftResponse, LiftRules} -import net.liftweb.json._ -import net.liftweb.record.FieldHelpers.expectedA -import java.lang.reflect.Method -import field._ -import Box._ -import JE._ -import Helpers._ - -/** - * Holds meta information and operations on a record - */ -trait MetaRecord[BaseRecord <: Record[BaseRecord]] { - self: BaseRecord => - - private var fieldList: List[FieldHolder] = Nil - private var fieldMap: Map[String, FieldHolder] = Map.empty - - private var lifecycleCallbacks: List[(String, Method)] = Nil - - def connectionIdentifier: ConnectionIdentifier = DefaultConnectionIdentifier - - /** - * Set this to use your own form template when rendering a Record to a form. - * - * This template is any given XHtml that contains three nodes acting as placeholders such as: - * - *
-   *
-   * <lift:field_label name="firstName"/> - the label for firstName field will be rendered here
-   * <lift:field name="firstName"/> - the firstName field will be rendered here (typically an input field)
-   * <lift:field_msg name="firstName"/> - the  will be rendered here having the id given by
-   *                                             uniqueFieldId of the firstName field.
-   *
-   *
-   * Example.
-   *
-   * Having:
-   *
-   * class MyRecord extends Record[MyRecord] {
-   *
-   * 	def meta = MyRecordMeta
-   *
-   * 	object firstName extends StringField(this, "John")
-   *
-   * }
-   *
-   * object MyRecordMeta extends MyRecord with MetaRecord[MyRecord] {
-   *  override def mutable_? = false
-   * }
-   *
-   * ...
-   *
-   * val rec = MyRecordMeta.createRecord.firstName("McLoud")
-   *
-   * val template =
-   * <div>
-   * 	<div>
-   * 		<div><lift:field_label name="firstName"/></div>
-   * 		<div><lift:field name="firstName"/></div>
-   * 		<div><lift:field_msg name="firstName"/></div>
-   * 	</div>
-   * </div>
-   *
-   * MyRecordMeta.formTemplate = Full(template)
-   * rec.toForm((r:MyRecord) => println(r));
-   *
-   * 
- * - */ - var formTemplate: Box[NodeSeq] = Empty - - protected val rootClass = this.getClass.getSuperclass - - private def isLifecycle(m: Method) = classOf[LifecycleCallbacks].isAssignableFrom(m.getReturnType) - - private def isField(m: Method) = { - val ret = !m.isSynthetic && classOf[Field[_, _]].isAssignableFrom(m.getReturnType) - ret - } - - def introspect(rec: BaseRecord, methods: Array[Method])(f: (Method, Field[_, BaseRecord]) => Any): Unit = { - - // find all the potential fields - val potentialFields = methods.toList.filter(isField) - - // any fields with duplicate names get put into a List - val map: Map[String, List[Method]] = potentialFields.foldLeft[Map[String, List[Method]]](Map()){ - case (map, method) => val name = method.getName - map + (name -> (method :: map.getOrElse(name, Nil))) - } - - // sort each list based on having the most specific type and use that method - val realMeth = map.values.map(_.sortWith { - case (a, b) => !a.getReturnType.isAssignableFrom(b.getReturnType) - }).map(_.head) - - for (v <- realMeth) { - v.invoke(rec) match { - case mf: Field[_, BaseRecord] if !mf.ignoreField_? => - mf.setName_!(v.getName) - f(v, mf) - case _ => - } - } - - } - - this.runSafe { - val tArray = new ListBuffer[FieldHolder] - - val methods = rootClass.getMethods - - lifecycleCallbacks = (for (v <- methods - if v.getName != "meta" && isLifecycle(v)) yield (v.getName, v)).toList - - introspect(this, methods) { - case (v, mf) => tArray += FieldHolder(mf.name, v, mf) - } - - fieldList = { - val ordered = fieldOrder.flatMap(f => tArray.find(_.metaField == f)) - ordered ++ (tArray --= ordered) - } - - fieldMap = Map() ++ fieldList.map(i => (i.name, i)) - } - - /** - * Specifies if this Record is mutable or not - */ - def mutable_? = true - - /** - * Creates a new record - */ - def createRecord: BaseRecord = { - val rec = instantiateRecord - rec runSafe { - fieldList.foreach(fh => fh.field(rec).setName_!(fh.name)) - } - rec - } - - /** Make a new record instance. This method can be overridden to provide caching behavior or what have you. */ - protected def instantiateRecord: BaseRecord = rootClass.newInstance.asInstanceOf[BaseRecord] - - /** - * Creates a new record, setting the value of the fields from the original object but - * applying the new value for the specific field - * - * @param - original the initial record - * @param - field the new mutated field - * @param - the new value of the field - */ - def createWithMutableField[FieldType](original: BaseRecord, - field: Field[FieldType, BaseRecord], - newValue: Box[FieldType]): BaseRecord = { - val rec = createRecord - - for (fh <- fieldList) { - val recField = fh.field(rec) - if (fh.name == field.name) - recField.asInstanceOf[Field[FieldType, BaseRecord]].setBox(newValue) - else - recField.setFromAny(fh.field(original).valueBox) - } - - rec - } - - /** - * Returns the HTML representation of inst Record. - * - * @param inst - th designated Record - * @return a NodeSeq - */ - def toXHtml(inst: BaseRecord): NodeSeq = fieldList.flatMap(_.field(inst).toXHtml ++ Text("\n")) - - - /** - * Validates the inst Record by calling validators for each field - * - * @param inst - the Record to be validated - * @return a List of FieldError. If this list is empty you can assume that record was validated successfully - */ - def validate(inst: BaseRecord): List[FieldError] = { - foreachCallback(inst, _.beforeValidation) - try{ - fieldList.flatMap(_.field(inst).validate) - } finally { - foreachCallback(inst, _.afterValidation) - } - } - - /** - * Returns the JSON representation of inst record - * - * @param inst: BaseRecord - * @return JsObj - */ - def asJSON(inst: BaseRecord): JsObj = { - val tups = fieldList.map{ fh => - val field = fh.field(inst) - field.name -> field.asJs - } - JsObj(tups:_*) - } - - /** - * Returns the JSON representation of inst record, converts asJValue to JsObj - * - * @return a JsObj - */ - def asJsExp(inst: BaseRecord): JsExp = new JsExp { - lazy val toJsCmd = compactRender(asJValue(inst)) - } - - /** Encode a record instance into a JValue */ - def asJValue(rec: BaseRecord): JValue = { - asJObject(rec) - } - - /** Encode a record instance into a JObject */ - def asJObject(rec: BaseRecord): JObject = { - JObject(fields(rec).map(f => JField(f.name, f.asJValue))) - } - - /** Create a record by decoding a JValue which must be a JObject */ - def fromJValue(jvalue: JValue): Box[BaseRecord] = { - val inst = createRecord - setFieldsFromJValue(inst, jvalue) map (_ => inst) - } - - /** Attempt to decode a JValue, which must be a JObject, into a record instance */ - def setFieldsFromJValue(rec: BaseRecord, jvalue: JValue): Box[Unit] = { - def fromJFields(jfields: List[JField]): Box[Unit] = { - for { - jfield <- jfields - field <- rec.fieldByName(jfield.name) - } field.setFromJValue(jfield.value) - - Full(()) - } - - jvalue match { - case JObject(jfields) => fromJFields(jfields) - case other => expectedA("JObject", other) - } - } - - /** - * Create a record with fields populated with values from the JSON construct - * - * @param json - The stringified JSON object - * @return Box[BaseRecord] - */ - def fromJsonString(json: String): Box[BaseRecord] = { - val inst = createRecord - setFieldsFromJsonString(inst, json) map (_ => inst) - } - - /** - * Set from a Json String using the lift-json parser - */ - def setFieldsFromJsonString(inst: BaseRecord, json: String): Box[Unit] = - setFieldsFromJValue(inst, JsonParser.parse(json)) - - def foreachCallback(inst: BaseRecord, f: LifecycleCallbacks => Any) { - lifecycleCallbacks.foreach(m => f(m._2.invoke(inst).asInstanceOf[LifecycleCallbacks])) - } - - /** - * Returns the XHTML representation of inst Record. If formTemplate is set, - * this template will be used otherwise a default template is considered. - * - * @param inst - the record to be rendered - * @return the XHTML content as a NodeSeq - */ - def toForm(inst: BaseRecord): NodeSeq = { - formTemplate match { - case Full(template) => toForm(inst, template) - case _ => fieldList.flatMap(_.field(inst).toForm.openOr(NodeSeq.Empty) ++ Text("\n")) - } - } - - /** - * Returns the XHTML representation of inst Record. You must provide the Node template - * to represent this record in the proprietary layout. - * - * @param inst - the record to be rendered - * @param template - The markup template forthe form. See also the formTemplate variable - * @return the XHTML content as a NodeSeq - */ - def toForm(inst: BaseRecord, template: NodeSeq): NodeSeq = { - template match { - case e @ {_*} => e.attribute("name") match { - case Some(name) => fieldByName(name.toString, inst).map(_.label).openOr(NodeSeq.Empty) - case _ => NodeSeq.Empty - } - - case e @ {_*} => e.attribute("name") match { - case Some(name) => fieldByName(name.toString, inst).flatMap(_.toForm).openOr(NodeSeq.Empty) - case _ => NodeSeq.Empty - } - - case e @ {_*} => e.attribute("name") match { - case Some(name) => fieldByName(name.toString, inst).map(_.uniqueFieldId match { - case Full(id) => - case _ => NodeSeq.Empty - }).openOr(NodeSeq.Empty) - case _ => NodeSeq.Empty - } - - case elem: Elem => - elem.copy(child = toForm(inst, elem.child.flatMap(n => toForm(inst, n)))) - - case s : Seq[_] => s.flatMap(e => e match { - case elem: Elem => - elem.copy(child = toForm(inst, elem.child.flatMap(n => toForm(inst, n)))) - - case x => x - }) - - } - } - - /** - * Get a field by the field name - * @param fieldName -- the name of the field to get - * @param actual -- the instance to get the field on - * - * @return Box[The Field] (Empty if the field is not found) - */ - def fieldByName(fieldName: String, inst: BaseRecord): Box[Field[_, BaseRecord]] = { - Box(fieldMap.get(fieldName).map(_.field(inst))) - } - - /** - * Prepend a DispatchPF function to LiftRules.dispatch. If the partial function is defined for a give Req - * it will construct a new Record based on the HTTP query string parameters - * and will pass this Record to the function returned by func parameter. - * - * @param func - a PartialFunction for associating a request with a user-provided function and the proper Record - */ - def prependDispatch(func: PartialFunction[Req, BaseRecord => Box[LiftResponse]])= { - LiftRules.dispatch.prepend (makeFunc(func)) - } - - /** - * Append a DispatchPF function to LiftRules.dispatch. If the partial function is defined for a give Req - * it will construct a new Record based on the HTTP query string parameters - * and will pass this Record to the function returned by func parameter. - * - * @param func - a PartialFunction for associating a request with a user-provided function and the proper Record - */ - def appendDispatch(func: PartialFunction[Req, BaseRecord => Box[LiftResponse]])= { - LiftRules.dispatch.append (makeFunc(func)) - } - - - private def makeFunc(func: PartialFunction[Req, BaseRecord => Box[LiftResponse]]) = new PartialFunction[Req, () => Box[LiftResponse]] { - - def isDefinedAt(r: Req): Boolean = func.isDefinedAt(r) - - def apply(r: Req): () => Box[LiftResponse] = { - val rec = fromReq(r) - () => func(r)(rec) - } - } - - /** - * Create a record with fields populated with values from the request - * - * @param req - The Req to read from - * @return the created record - */ - def fromReq(r: Req): BaseRecord = { - val inst = createRecord - setFieldsFromReq(inst, r) - inst - } - - /** - * Populate the fields of the record with values from the request - * - * @param inst - The record to populate - * @param req - The Req to read from - */ - def setFieldsFromReq(inst: BaseRecord, req: Req) { - for(fh <- fieldList){ - fh.field(inst).setFromAny(req.param(fh.name)) - } - } - - /** - * Populate the fields of the record with values from an existing record - * - * @param inst - The record to populate - * @param rec - The Record to read from - */ - def setFieldsFromRecord(inst: BaseRecord, rec: BaseRecord) { - for { - fh <- fieldList - fld <- rec.fieldByName(fh.name) - } { - fh.field(inst).setFromAny(fld.valueBox) - } - } - - def copy(rec: BaseRecord): BaseRecord = { - val inst = createRecord - setFieldsFromRecord(inst, rec) - inst - } - - /** - * Defines the order of the fields in this record - * - * @return a List of Field - */ - def fieldOrder: List[Field[_, BaseRecord]] = Nil - - /** - * Renamed from fields() due to a clash with fields() in Record. Use this method - * to obtain a list of fields defined in the meta companion objects. Possibly a - * breaking change? (added 14th August 2009, Tim Perrett) - * - * @see Record - */ - def metaFields() : List[Field[_, BaseRecord]] = fieldList.map(_.metaField) - - /** - * Obtain the fields for a particular Record or subclass instance by passing - * the instance itself. - * (added 14th August 2009, Tim Perrett) - */ - def fields(rec: BaseRecord) : List[Field[_, BaseRecord]] = fieldList.map(_.field(rec)) - - case class FieldHolder(name: String, method: Method, metaField: Field[_, BaseRecord]) { - def field(inst: BaseRecord): Field[_, BaseRecord] = method.invoke(inst).asInstanceOf[Field[_, BaseRecord]] - } - - def dirty_?(inst: BaseRecord): Boolean = !fields(inst).filter(_.dirty_?).isEmpty -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/ProtoUser.scala b/persistence/record/src/main/scala/net/liftweb/record/ProtoUser.scala deleted file mode 100644 index 97e81602df..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/ProtoUser.scala +++ /dev/null @@ -1,428 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import net.liftweb.http.S -import net.liftweb.http.S._ -import net.liftweb.http.js._ -import JsCmds._ -import scala.xml.{NodeSeq, Node, Text, Elem} -import scala.xml.transform._ -import net.liftweb.sitemap._ -import net.liftweb.sitemap.Loc._ -import net.liftweb.util.Helpers._ -import net.liftweb.util._ -import net.liftweb.common._ -import net.liftweb.util.Mailer._ -import net.liftweb.record.field._ -import net.liftweb.proto.{ProtoUser => GenProtoUser} - -/** - * ProtoUser provides a "User" with a first name, last name, email, etc. - */ -trait ProtoUser[T <: ProtoUser[T]] extends Record[T] { - self: T => - - /** - * The primary key field for the User. You can override the behavior - * of this field: - *
-   * override lazy val id = new MyMappedLongClass(this) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val id: LongField[T] = new MyMappedLongClass(this) - - protected class MyMappedLongClass(obj: T) extends LongField(obj) - - /** - * Convert the id to a String - */ - def userIdAsString: String = id.get.toString - - /** - * The first name field for the User. You can override the behavior - * of this field: - *
-   * override lazy val firstName = new MyFirstName(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - - lazy val firstName: StringField[T] = new MyFirstName(this, 32) - - protected class MyFirstName(obj: T, size: Int) extends StringField(obj, size) { - override def displayName = owner.firstNameDisplayName - override val fieldId = Some(Text("txtFirstName")) - } - - /** - * The string name for the first name field - */ - def firstNameDisplayName = S.?("first.name") - - /** - * The last field for the User. You can override the behavior - * of this field: - *
-   * override lazy val lastName = new MyLastName(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val lastName: StringField[T] = new MyLastName(this, 32) - - protected class MyLastName(obj: T, size: Int) extends StringField(obj, size) { - override def displayName = owner.lastNameDisplayName - override val fieldId = Some(Text("txtLastName")) - } - - /** - * The last name string - */ - def lastNameDisplayName = S.?("last.name") - - /** - * The email field for the User. You can override the behavior - * of this field: - *
-   * override lazy val email = new MyEmail(this, 48) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val email: EmailField[T] = new MyEmail(this, 48) - - protected class MyEmail(obj: T, size: Int) extends EmailField(obj, size) { - override def validations = valUnique(S.?("unique.email.address")) _ :: super.validations - override def displayName = owner.emailDisplayName - override val fieldId = Some(Text("txtEmail")) - } - - protected def valUnique(errorMsg: => String)(email: String): List[FieldError] - - /** - * The email first name - */ - def emailDisplayName = S.?("email.address") - - /** - * The password field for the User. You can override the behavior - * of this field: - *
-   * override lazy val password = new MyPassword(this) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val password: PasswordField[T] = new MyPassword(this) - - protected class MyPassword(obj: T) extends PasswordField(obj) { - override def displayName = owner.passwordDisplayName - } - - /** - * The display name for the password field - */ - def passwordDisplayName = S.?("password") - - /** - * The superuser field for the User. You can override the behavior - * of this field: - *
-   * override lazy val superUser = new MySuperUser(this) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val superUser: BooleanField[T] = new MySuperUser(this) - - protected class MySuperUser(obj: T) extends BooleanField(obj) { - override def defaultValue = false - } - - def niceName: String = (firstName.get, lastName.get, email.get) match { - case (f, l, e) if f.length > 1 && l.length > 1 => f+" "+l+" ("+e+")" - case (f, _, e) if f.length > 1 => f+" ("+e+")" - case (_, l, e) if l.length > 1 => l+" ("+e+")" - case (_, _, e) => e - } - - def shortName: String = (firstName.get, lastName.get) match { - case (f, l) if f.length > 1 && l.length > 1 => f+" "+l - case (f, _) if f.length > 1 => f - case (_, l) if l.length > 1 => l - case _ => email.get - } - - def niceNameWEmailLink = {niceName} -} - -/** - * Mix this trait into the Mapper singleton for User and you - * get a bunch of user functionality including password reset, etc. - */ -trait MetaMegaProtoUser[ModelType <: MegaProtoUser[ModelType]] extends MetaRecord[ModelType] with GenProtoUser { - self: ModelType => - - type TheUserType = ModelType - - /** - * What's a field pointer for the underlying CRUDify - */ - type FieldPointerType = Field[_, TheUserType] - - /** - * Based on a FieldPointer, build a FieldPointerBridge - */ - protected implicit def buildFieldBridge(from: FieldPointerType): FieldPointerBridge = new MyPointer(from) - - - protected class MyPointer(from: FieldPointerType) extends FieldPointerBridge { - /** - * What is the display name of this field? - */ - def displayHtml: NodeSeq = from.displayHtml - - /** - * Does this represent a pointer to a Password field? - */ - def isPasswordField_? : Boolean = from match { - case a: PasswordField[_] => true - case _ => false - } - } - - /** - * Convert an instance of TheUserType to the Bridge trait - */ - protected implicit def typeToBridge(in: TheUserType): UserBridge = - new MyUserBridge(in) - - /** - * Bridges from TheUserType to methods used in this class - */ - protected class MyUserBridge(in: TheUserType) extends UserBridge { - /** - * Convert the user's primary key to a String - */ - def userIdAsString: String = in.id.toString - - /** - * Return the user's first name - */ - def getFirstName: String = in.firstName.get - - /** - * Return the user's last name - */ - def getLastName: String = in.lastName.get - - /** - * Get the user's email - */ - def getEmail: String = in.email.get - - /** - * Is the user a superuser - */ - def superUser_? : Boolean = in.superUser.get - - /** - * Has the user been validated? - */ - def validated_? : Boolean = in.validated.get - - /** - * Does the supplied password match the actual password? - */ - def testPassword(toTest: Box[String]): Boolean = - toTest.map(in.password.match_?) openOr false - - /** - * Set the validation flag on the user and return the user - */ - def setValidated(validation: Boolean): TheUserType = - in.validated(validation) - - /** - * Set the unique ID for this user to a new value - */ - def resetUniqueId(): TheUserType = { - in.uniqueId.reset() - } - - /** - * Return the unique ID for the user - */ - def getUniqueId(): String = in.uniqueId.get - - /** - * Validate the user - */ - def validate: List[FieldError] = in.validate - - /** - * Given a list of string, set the password - */ - def setPasswordFromListString(pwd: List[String]): TheUserType = { - in.password.setFromAny(pwd) - in - } - - /** - * Save the user to backing store - */ - def save(): Boolean = in.saveTheRecord().isDefined - } - - /** - * Given a field pointer and an instance, get the field on that instance - */ - protected def computeFieldFromPointer(instance: TheUserType, pointer: FieldPointerType): Box[BaseField] = fieldByName(pointer.name, instance) - - - /** - * Given an username (probably email address), find the user - */ - protected def findUserByUserName(email: String): Box[TheUserType] - - /** - * Given a unique id, find the user - */ - protected def findUserByUniqueId(id: String): Box[TheUserType] - - /** - * Create a new instance of the User - */ - protected def createNewUserInstance(): TheUserType = self.createRecord - - /** - * Given a String representing the User ID, find the user - */ - protected def userFromStringId(id: String): Box[TheUserType] - - /** - * The list of fields presented to the user at sign-up - */ - def signupFields: List[FieldPointerType] = List(firstName, - lastName, - email, - locale, - timezone, - password) - - /** - * The list of fields presented to the user for editing - */ - def editFields: List[FieldPointerType] = List(firstName, - lastName, - email, - locale, - timezone) - -} - -/** - * ProtoUser is bare bones. MetaProtoUser contains a bunch - * more fields including a validated flag, locale, timezone, etc. - */ -trait MegaProtoUser[T <: MegaProtoUser[T]] extends ProtoUser[T] { - self: T => - - /** - * The unique id field for the User. This field - * is used for validation, lost passwords, etc. - * You can override the behavior - * of this field: - *
-   * override lazy val uniqueId = new MyUniqueId(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val uniqueId: UniqueIdField[T] = new MyUniqueId(this, 32) - - protected class MyUniqueId(obj: T, size: Int) extends UniqueIdField(obj, size) { - - } - - /** - * Whether the user has been validated. - * You can override the behavior - * of this field: - *
-   * override lazy val validated = new MyValidated(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val validated: BooleanField[T] = new MyValidated(this) - - protected class MyValidated(obj: T) extends BooleanField[T](obj) { - override def defaultValue = false - override val fieldId = Some(Text("txtValidated")) - } - - /** - * The locale field for the User. - * You can override the behavior - * of this field: - *
-   * override lazy val locale = new MyLocale(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val locale: LocaleField[T] = new MyLocale(this) - - protected class MyLocale(obj: T) extends LocaleField[T](obj) { - override def displayName = owner.localeDisplayName - override val fieldId = Some(Text("txtLocale")) - } - - /** - * The time zone field for the User. - * You can override the behavior - * of this field: - *
-   * override lazy val timezone = new MyTimeZone(this, 32) {
-   *   println("I am doing something different")
-   * }
-   * 
- */ - lazy val timezone: TimeZoneField[T] = new MyTimeZone(this) - - protected class MyTimeZone(obj: T) extends TimeZoneField[T](obj) { - override def displayName = owner.timezoneDisplayName - override val fieldId = Some(Text("txtTimeZone")) - } - - /** - * The string for the timezone field - */ - def timezoneDisplayName = S.?("time.zone") - - /** - * The string for the locale field - */ - def localeDisplayName = S.?("locale") - -} diff --git a/persistence/record/src/main/scala/net/liftweb/record/Record.scala b/persistence/record/src/main/scala/net/liftweb/record/Record.scala deleted file mode 100644 index 038756c3a6..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/Record.scala +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright 2007-2019 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import common._ -import http.js.{JsExp, JsObj} -import http.{Req, SHtml} -import json.JsonAST._ -import util._ -import field._ - -import scala.xml._ -import java.util.prefs.BackingStoreException - -trait Record[MyType <: Record[MyType]] extends FieldContainer { - self: MyType => - - /** - * Get the fields defined on the meta object for this record instance - */ - def fields() = meta.fields(this) - - def allFields = fields() - - /** - * The meta record (the object that contains the meta result for this type) - */ - def meta: MetaRecord[MyType] - - /** - * Is it safe to make changes to the record (or should we check access control?) - */ - final def safe_? : Boolean = { - Safe.safe_?(System.identityHashCode(this)) - } - - def runSafe[T](f : => T) : T = { - Safe.runSafe(System.identityHashCode(this))(f) - } - - /** - * Returns the HTML representation of this Record - */ - def toXHtml: NodeSeq = { - meta.toXHtml(this) - } - - /** - * Validates this Record by calling validators for each field - * - * @return a List of FieldError. If this list is empty you can assume that record was validated successfully - */ - def validate : List[FieldError] = { - runSafe { - meta.validate(this) - } - } - - /** - * Returns the JSON representation of this record - * - * @return a JsObj - */ - def asJSON: JsExp = meta.asJSON(this) - - /** - * Save the instance and return the instance - */ - def saveTheRecord(): Box[MyType] = throw new BackingStoreException("Raw Records don't save themselves") - - /** - * Returns the JSON representation of this record, converts asJValue to JsObj - * - * @return a JsObj - */ - def asJsExp: JsExp = meta.asJsExp(this) - - /** Encode this record instance as a JValue */ - def asJValue: JValue = meta.asJValue(this) - - /** Encode this record instance as a JObject */ - def asJObject: JObject = meta.asJObject(this) - - /** Set the fields of this record from the given JValue */ - def setFieldsFromJValue(jvalue: JValue): Box[Unit] = meta.setFieldsFromJValue(this, jvalue) - - /** - * Sets the fields of this Record from the given JSON. - */ - def setFieldsFromJsonString(json: String): Box[Unit] = meta.setFieldsFromJsonString(this, json) - - /** - * Sets the fields of this Record from the given Req. - */ - def setFieldsFromReq(req: Req){ meta.setFieldsFromReq(this, req) } - - /** - * Present the model as a form and execute the function on submission of the form - * - * @param button - If it's Full, put a submit button on the form with the value of the parameter - * @param f - the function to execute on form submission - * - * @return the form - */ - def toForm(button: Box[String])(f: MyType => Unit): NodeSeq = { - meta.toForm(this) ++ - (SHtml.hidden(() => f(this))) ++ - ((button.map(b => ()) openOr scala.xml.Text(""))) - } - - /** - * Present the model as a form and execute the function on submission of the form - * - * @param f - the function to execute on form submission - * - * @return the form - */ - def toForm(f: MyType => Unit): NodeSeq = meta.toForm(this) ++ (SHtml.hidden(() => f(this))) - - /** - * Find the field by name - * @param fieldName -- the name of the field to find - * - * @return Box[MappedField] - */ - def fieldByName(fieldName: String): Box[Field[_, MyType]] = meta.fieldByName(fieldName, this) - - override def equals(other: Any): Boolean = { - other match { - case that: Record[MyType] => - that.fields.corresponds(this.fields) { (a,b) => - a == b - } - case _ => - false - } - } - - override def toString = { - val fieldList = this.fields.map(f => "%s=%s" format (f.name, - f.valueBox match { - case Full(c: java.util.Calendar) => c.getTime().toString() - case Full(null) => "null" - case Full(v) => v.toString - case x => x.toString - })) - - "%s={%s}" format (this.getClass.toString, fieldList.mkString(", ")) - } - - def copy: MyType = meta.copy(this) - - def dirty_? : Boolean = meta.dirty_?(this) -} - -trait ExpandoRecord[MyType <: Record[MyType] with ExpandoRecord[MyType]] { - self: MyType => - - /** - * If there's a field in this record that defines the locale, return it - */ - def localeField: Box[LocaleField[MyType]] = Empty - - def timeZoneField: Box[TimeZoneField[MyType]] = Empty - - def countryField: Box[CountryField[MyType]] = Empty -} - - -trait KeyedRecord[MyType <: KeyedRecord[MyType, KeyType], KeyType] extends Record[MyType] { - self: MyType => - - def primaryKey: KeyField[KeyType, MyType] - - def comparePrimaryKeys(other: MyType) = primaryKey === other.primaryKey -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/RecordHelpers.scala b/persistence/record/src/main/scala/net/liftweb/record/RecordHelpers.scala deleted file mode 100644 index fe566947f9..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/RecordHelpers.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import net.liftweb.http.js.{JsExp, JsObj} -import net.liftweb.http.js.JE.{JsArray, JsFalse, JsNull, JsObj, JsTrue, Num, Str} -import net.liftweb.json.JsonAST.{JArray, JBool, JInt, JDouble, JField, JNothing, JNull, JObject, JString, JValue} - -object RecordHelpers { - - /* For the moment, I couldn't find any other way to bridge JValue and JsExp, so I wrote something simple here */ - implicit def jvalueToJsExp(jvalue: JValue): JsExp = { - jvalue match { - case JArray(vs) => JsArray(vs.map(jvalueToJsExp): _*) - case JBool(b) => if (b) JsTrue else JsFalse - case JDouble(d) => Num(d) - case JInt(i) => Num(i) - case JNothing => JsNull - case JNull => JsNull - case JObject(fs) => JsObj(fs.map(f => (f.name, jvalueToJsExp(f.value))): _*) - case JString(s) => Str(s) - } - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/RecordRules.scala b/persistence/record/src/main/scala/net/liftweb/record/RecordRules.scala deleted file mode 100644 index 1db9ad4efc..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/RecordRules.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2013 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import http.Factory -import util.ConnectionIdentifier -import util.Helpers._ - -import java.util.Locale - -object RecordRules extends Factory { - /** - * Calculate the name of a field based on the name - * of the Field. Must be set in Boot before any code - * that touches the MetaRecord. - * - * To get snake_case, use this: - * - * RecordRules.fieldName.default.set((_, name) => StringHelpers.snakify(name)) - */ - val fieldName = new FactoryMaker[(ConnectionIdentifier, String) => String]((_: ConnectionIdentifier, name: String) => name) {} - - /** - * This function is used to calculate the displayName of a field. Can be - * used to easily localize fields based on the locale in the - * current request - */ - val displayName: FactoryMaker[(Record[_], Locale, String) => String] = - new FactoryMaker[(Record[_], Locale, String) => String]((m: Record[_], l: Locale, name: String) => name) {} -} diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/BinaryField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/BinaryField.scala deleted file mode 100644 index 38c4785714..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/BinaryField.scala +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ -import net.liftweb.common._ -import net.liftweb.http.{S} -import net.liftweb.http.js._ -import net.liftweb.json.JsonAST.JValue -import net.liftweb.util._ -import Helpers._ -import S._ -import JE._ - - -trait BinaryTypedField extends TypedField[Array[Byte]] { - - def setFromAny(in: Any): Box[Array[Byte]] = genericSetFromAny(in) - - def setFromString(s: String): Box[Array[Byte]] = s match { - case null|"" if optional_? => setBox(Empty) - case null|"" => setBox(Failure(notOptionalErrorMessage)) - case _ => setBox(tryo(s.getBytes("UTF-8"))) - } - - def toForm: Box[NodeSeq] = Empty - - def asJs = valueBox.map(v => Str(hexEncode(v))) openOr JsNull - - def asJValue: JValue = asJString(base64Encode _) - def setFromJValue(jvalue: JValue) = setFromJString(jvalue)(s => tryo(base64Decode(s))) -} - -class BinaryField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Array[Byte], OwnerType] with MandatoryTypedField[Array[Byte]] with BinaryTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Array[Byte]) = { - this(owner) - set(value) - } - - def defaultValue = Array(0) -} - -class OptionalBinaryField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Array[Byte], OwnerType] with OptionalTypedField[Array[Byte]] with BinaryTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[Array[Byte]]) = { - this(owner) - setBox(value) - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/BooleanField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/BooleanField.scala deleted file mode 100644 index 6219df1861..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/BooleanField.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ -import net.liftweb.common._ -import net.liftweb.http.js._ -import net.liftweb.http.{S, SHtml} -import net.liftweb.json.JsonAST.{JBool, JNothing, JNull, JValue} -import net.liftweb.util._ -import Helpers._ -import S._ -import JE._ - -trait BooleanTypedField extends TypedField[Boolean] { - - def setFromAny(in: Any): Box[Boolean] = in match{ - case b: java.lang.Boolean => setBox(Full(b.booleanValue)) - case Full(b: java.lang.Boolean) => setBox(Full(b.booleanValue)) - case Some(b: java.lang.Boolean) => setBox(Full(b.booleanValue)) - case (b: java.lang.Boolean) :: _ => setBox(Full(b.booleanValue)) - case _ => genericSetFromAny(in) - } - - def setFromString(s: String): Box[Boolean] = - if(s == null || s.isEmpty) { - if(optional_?) - setBox(Empty) - else - setBox(Failure(notOptionalErrorMessage)) - } else { - setBox(tryo(toBoolean(s))) - } - - private def elem(attrs: SHtml.ElemAttr*) = - SHtml.checkbox(valueBox openOr false, (b: Boolean) => this.setBox(Full(b)), (("tabindex" -> tabIndex.toString): SHtml.ElemAttr) :: attrs.toList: _*) - - def toForm: Box[NodeSeq] = - // FIXME? no support for optional_? - uniqueFieldId match { - case Full(id) => Full(elem("id" -> id)) - case _ => Full(elem()) - } - - def asJs: JsExp = valueBox.map(boolToJsExp) openOr JsNull - - def asJValue: JValue = valueBox.map(JBool) openOr (JNothing: JValue) - def setFromJValue(jvalue: JValue) = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JBool(b) => setBox(Full(b)) - case other => setBox(FieldHelpers.expectedA("JBool", other)) - } -} - -class BooleanField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Boolean, OwnerType] with MandatoryTypedField[Boolean] with BooleanTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Boolean) = { - this(owner) - set(value) - } - - def defaultValue = false -} - -class OptionalBooleanField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Boolean, OwnerType] with OptionalTypedField[Boolean] with BooleanTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[Boolean]) = { - this(owner) - setBox(value) - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/CountryField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/CountryField.scala deleted file mode 100644 index 5df760a142..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/CountryField.scala +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import net.liftweb.http.S - - -object Countries extends Enumeration(1) { - - val C1, C2, C3, C4, C5, C6, C7, C8, C9, C10, - C11, C12, C13, C14, C15, C16, C17, C18, C19, C20, - C21, C22, C23, C24, C25, C26, C27, C28, C29, C30, - C31, C32, C33, C34, C35, C36, C37, C38, C39, C40, - C41, C42, C43, C44, C45, C46, C47, C48, C49, C50, - C51, C52, C53, C54, C55, C56, C57, C58, C59, C60, - C61, C62, C63, C64, C65, C66, C67, C68, C69, C70, - C71, C72, C73, C74, C75, C76, C77, C78, C79, C80, - C81, C82, C83, C84, C85, C86, C87, C88, C89, C90, - C91, C92, C93, C94, C95, C96, C97, C98, C99, C100, - C101, C102, C103, C104, C105, C106, C107, C108, C109, C110, - C111, C112, C113, C114, C115, C116, C117, C118, C119, C120, - C121, C122, C123, C124, C125, C126, C127, C128, C129, C130, - C131, C132, C133, C134, C135, C136, C137, C138, C139, C140, - C141, C142, C143, C144, C145, C146, C147, C148, C149, C150, - C151, C152, C153, C154, C155, C156, C157, C158, C159, C160, - C161, C162, C163, C164, C165, C166, C167, C168, C169, C170, - C171, C172, C173, C174, C175, C176, C177, C178, C179, C180, - C181, C182, C183, C184, C185, C186, C187, C188, C189, C190, - C191, C192, C193, C194, C195, C196, C197, C198, C199, C200, - C201, C202, C203, C204, C205, C206, C207, C208, C209, C210, - C211, C212, C213, C214, C215, C216, C217, C218, C219, C220, - C221, C222, C223, C224, C225, C226, C227, C228, C229, C230, - C231, C232, C233, C234, C235, C236, C237, C238, C239, C240, - C241, C242, C243, C244, C245, C246, C247, C248, C249, C250, - C251, C252, C253, C254, C255, C256, C257, C258, C259, C260, - C261, C262, C263, C264, C265, C266, C267, C268, C269, C270, - C271, C272 = I18NCountry - - val USA = C1 - val Australia = C10 - val Canada = C32 - val Sweden = C167 - - - def I18NCountry = new I18NCountry - - class I18NCountry extends Val { - override def toString() = - S.?("country_" + id) - } -} - - -class CountryField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType) - extends EnumField(owner, Countries) - -class OptionalCountryField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType) - extends OptionalEnumField(owner, Countries) - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/DateTimeField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/DateTimeField.scala deleted file mode 100644 index d5897c9635..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/DateTimeField.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2007-2012 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ -import net.liftweb.common._ -import net.liftweb.http.{S} -import net.liftweb.http.js._ -import net.liftweb.json._ -import net.liftweb.util._ -import java.util.{Calendar, Date} -import Helpers._ -import S._ -import JE._ - -trait DateTimeTypedField extends TypedField[Calendar] { - private final def dateToCal(d: Date): Calendar = { - val cal = Calendar.getInstance() - cal.setTime(d) - cal - } - - val formats = new DefaultFormats { - override def dateFormatter = Helpers.internetDateFormatter - } - - def setFromAny(in : Any): Box[Calendar] = toDate(in).flatMap(d => setBox(Full(dateToCal(d)))) or genericSetFromAny(in) - - def setFromString(s: String): Box[Calendar] = s match { - case null|"" if optional_? => setBox(Empty) - case null|"" => setBox(Failure(notOptionalErrorMessage)) - case other => setBox(tryo(dateToCal(parseInternetDate(s)))) - } - - private def elem = - S.fmapFunc(SFuncHolder(this.setFromAny(_))){funcName => - toInternetDate(s.getTime)) openOr ""} - tabindex={tabIndex.toString}/> - } - - def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - def asJs = valueBox.map(v => Str(formats.dateFormat.format(v.getTime))) openOr JsNull - - def asJValue: JValue = asJString(v => formats.dateFormat.format(v.getTime)) - def setFromJValue(jvalue: JValue) = setFromJString(jvalue) { - v => formats.dateFormat.parse(v).map(d => { - val cal = Calendar.getInstance - cal.setTime(d) - cal - }) - } -} - -class DateTimeField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Calendar, OwnerType] with MandatoryTypedField[Calendar] with DateTimeTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Calendar) = { - this(owner) - setBox(Full(value)) - } - - def defaultValue = Calendar.getInstance -} - -class OptionalDateTimeField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Calendar, OwnerType] with OptionalTypedField[Calendar] with DateTimeTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[Calendar]) = { - this(owner) - setBox(value) - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/DecimalField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/DecimalField.scala deleted file mode 100644 index 53251d9629..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/DecimalField.scala +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import java.math.{BigDecimal => JBigDecimal,MathContext,RoundingMode} -import scala.xml._ -import net.liftweb.common._ -import net.liftweb.http.{S} -import net.liftweb.json.JsonAST.JValue -import net.liftweb.util._ -import Helpers._ -import S._ - - -trait DecimalTypedField extends NumericTypedField[BigDecimal] { - protected val scale: Int - protected val context: MathContext - private val zero = BigDecimal("0") - - def defaultValue = zero.setScale(scale) - - def setFromAny(in : Any): Box[BigDecimal] = setNumericFromAny(in, n => BigDecimal(n.toString)) - - def setFromString (s : String) : Box[BigDecimal] = - if(s == null || s.isEmpty) { - if(optional_?) - setBox(Empty) - else - setBox(Failure(notOptionalErrorMessage)) - } else { - setBox(tryo(BigDecimal(s))) - } - - def set_!(in: BigDecimal): BigDecimal = new BigDecimal(in.bigDecimal.setScale(scale, context.getRoundingMode)) - - def asJValue: JValue = asJString(_.toString) - def setFromJValue(jvalue: JValue) = setFromJString(jvalue)(s => tryo(BigDecimal(s))) -} - - -/** - *

- * A field that maps to a decimal value. Decimal precision and rounding - * are controlled via the context parameter. The default value is zero. - *

- * - *

Note:
- * Using MathContext.UNLIMITED, whether explicitly or implicitly, means - * that no precision or scaling will be used for the SQL field definition; the - * default scale for DECIMAL is zero per the SQL standard, but the precision - * for DECIMAL is vendor-specific. For example, PostgreSQL uses maximum precision - * if it's not specified, but SQL Server uses a default precision of 18. - *

- * - * @author Derek Chen-Becker - * - * @param owner The Record that owns this field - * @param context The MathContext that controls precision and rounding - * @param scale Controls the scale of the underlying BigDecimal - */ -class DecimalField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType, val context : MathContext, val scale : Int) - extends Field[BigDecimal, OwnerType] with MandatoryTypedField[BigDecimal] with DecimalTypedField { - - /** - * Constructs a DecimalField with the specified initial value. The context - * is set to MathContext.UNLIMITED (see note above about default precision). - * The scale is taken from the initial value. - * - * @param owner The Record that owns this field - * @param value The initial value - */ - def this(@deprecatedName('rec) owner : OwnerType, value : BigDecimal) = { - this(owner, MathContext.UNLIMITED, value.scale) - set(value) - } - - /** - * Constructs a DecimalField with the specified initial value and context. - * The scale is taken from the initial value. - * - * @param owner The Record that owns this field - * @param value The initial value - * @param context The MathContext that controls precision and rounding - */ - def this(@deprecatedName('rec) owner : OwnerType, value : BigDecimal, context : MathContext) = { - this(owner, context, value.scale) - set(value) - } -} - - -/** - *

- * A field that maps to a decimal value. Decimal precision and rounding - * are controlled via the context parameter. The default value is zero. - *

- * - *

Note:
- * Using MathContext.UNLIMITED, whether explicitly or implicitly, means - * that no precision or scaling will be used for the SQL field definition; the - * default scale for DECIMAL is zero per the SQL standard, but the precision - * for DECIMAL is vendor-specific. For example, PostgreSQL uses maximum precision - * if it's not specified, but SQL Server uses a default precision of 18. - *

- * - * @author Derek Chen-Becker - * - * @param owner The Record that owns this field - * @param context The MathContext that controls precision and rounding - * @param scale Controls the scale of the underlying BigDecimal - */ -class OptionalDecimalField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType, val context : MathContext, val scale : Int) - extends Field[BigDecimal, OwnerType] with OptionalTypedField[BigDecimal] with DecimalTypedField { - - /** - * Constructs a DecimalField with the specified initial value. The context - * is set to MathContext.UNLIMITED (see note above about default precision). - * The scale is taken from the initial value. - * - * @param owner The Record that owns this field - * @param value The initial value - * @param scale the scale of the decimal field, since there might be no value - */ - def this(@deprecatedName('rec) owner : OwnerType, value : Box[BigDecimal], scale : Int) = { - this(owner, MathContext.UNLIMITED, scale) - setBox(value) - } - - /** - * Constructs a DecimalField with the specified initial value and context. - * The scale is taken from the initial value. - * - * @param owner The Record that owns this field - * @param value The initial value - * @param scale the scale of the decimal field, since there might be no value - * @param context The MathContext that controls precision and rounding - */ - def this(@deprecatedName('rec) owner : OwnerType, value : Box[BigDecimal], scale : Int, context : MathContext) = { - this(owner, context, scale) - setBox(value) - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/DoubleField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/DoubleField.scala deleted file mode 100644 index 8aff8c103f..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/DoubleField.scala +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import net.liftweb.common._ -import net.liftweb.http.{S} -import json._ -import net.liftweb.util._ -import Helpers._ - -trait DoubleTypedField extends NumericTypedField[Double] { - - def setFromAny(in: Any): Box[Double] = setNumericFromAny(in, _.doubleValue) - - def setFromString(s: String): Box[Double] = - if(s == null || s.isEmpty) { - if(optional_?) - setBox(Empty) - else - setBox(Failure(notOptionalErrorMessage)) - } else { - setBox(tryo(java.lang.Double.parseDouble(s))) - } - - def defaultValue = 0.0 - - def asJValue: JValue = valueBox.map(JDouble) openOr (JNothing: JValue) - - def setFromJValue(jvalue: JValue) = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JDouble(d) => setBox(Full(d)) - case JInt(i) => setBox(Full(i.toDouble)) - case other => setBox(FieldHelpers.expectedA("JDouble", other)) - } -} - -class DoubleField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Double, OwnerType] with MandatoryTypedField[Double] with DoubleTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Double) = { - this(owner) - set(value) - } -} - -class OptionalDoubleField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Double, OwnerType] with OptionalTypedField[Double] with DoubleTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[Double]) = { - this(owner) - setBox(value) - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/EmailField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/EmailField.scala deleted file mode 100644 index 4e2e682151..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/EmailField.scala +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ -import net.liftweb.util._ -import net.liftweb.common._ -import net.liftweb.proto._ -import net.liftweb.http.{S} -import java.util.regex._ -import Helpers._ -import S._ - -object EmailField { - def emailPattern = ProtoRules.emailRegexPattern.vend - - def validEmailAddr_?(email: String): Boolean = emailPattern.matcher(email).matches -} - -trait EmailTypedField extends TypedField[String] { - private def validateEmail(emailValue: ValueType): List[FieldError] = { - toBoxMyType(emailValue) match { - case Full(email) if (optional_? && email.isEmpty) => Nil - case Full(email) if EmailField.validEmailAddr_?(email) => Nil - case _ => Text(S.?("invalid.email.address")) - } - } - - override def validations = validateEmail _ :: Nil -} - -class EmailField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType, maxLength: Int) - extends StringField[OwnerType](owner, maxLength) with EmailTypedField - -class OptionalEmailField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType, maxLength: Int) - extends OptionalStringField[OwnerType](owner, maxLength) with EmailTypedField - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/EnumField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/EnumField.scala deleted file mode 100644 index 9d9257dce2..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/EnumField.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright 2007-2020 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import reflect.Manifest -import scala.xml._ - -import common._ -import Box.option2Box -import json._ -import util._ -import Helpers._ -import http.js._ -import http.{S, SHtml} -import S._ -import JE._ - - -trait EnumTypedField[EnumType <: Enumeration] extends TypedField[EnumType#Value] { - protected val enum: EnumType - protected val valueManifest: Manifest[EnumType#Value] - - def toInt: Box[Int] = valueBox.map(_.id) - - def fromInt(in: Int): Box[EnumType#Value] = tryo(enum(in)) - - def setFromAny(in: Any): Box[EnumType#Value] = in match { - case (value: Int) => setFromInt(value) - case Some(value: Int) => setFromInt(value) - case Full(value: Int) => setFromInt(value) - case (value: Int)::_ => setFromInt(value) - case (value: Number) => setFromInt(value.intValue) - case Some(value: Number) => setFromInt(value.intValue) - case Full(value: Number) => setFromInt(value.intValue) - case (value: Number)::_ => setFromInt(value.intValue) - case _ => genericSetFromAny(in)(valueManifest) - } - - def setFromString(s: String): Box[EnumType#Value] = - if(s == null || s.isEmpty) { - if(optional_?) - setBox(Empty) - else - setBox(Failure(notOptionalErrorMessage)) - } else { - setBox(asInt(s).flatMap(fromInt)) - } - - def setFromInt(in: Int): Box[EnumType#Value] = setBox(fromInt(in)) - - /** Label for the selection item representing Empty, show when this field is optional. Defaults to the empty string. */ - def emptyOptionLabel: String = "" - - /** - * Build a list of (value, label) options for a select list. Return a tuple of (Box[String], String) where the first string - * is the value of the field and the second string is the Text name of the Value. - */ - def buildDisplayList: List[(Box[EnumType#Value], String)] = { - val options = enum.values.toList.map(a => (Full(a), a.toString)) - if (optional_?) (Empty, emptyOptionLabel)::options else options - } - - - private def elem = SHtml.selectObj[Box[EnumType#Value]](buildDisplayList, Full(valueBox), setBox(_)) % ("tabindex" -> tabIndex.toString) - - def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - def defaultValue: EnumType#Value = enum.values.iterator.next - - def asJs = valueBox.map(_ => Str(toString)) openOr JsNull - - def asJIntOrdinal: JValue = toInt.map(i => JInt(BigInt(i))) openOr (JNothing: JValue) - def setFromJIntOrdinal(jvalue: JValue): Box[EnumType#Value] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JInt(i) => setBox(fromInt(i.intValue)) - case other => setBox(FieldHelpers.expectedA("JInt", other)) - } - - def asJStringName: JValue = valueBox.map(v => JString(v.toString)) openOr (JNothing: JValue) - def setFromJStringName(jvalue: JValue): Box[EnumType#Value] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JString(s) => setBox(Option(enum.withName(s)) ?~ ("Unknown value \"" + s + "\"")) - case other => setBox(FieldHelpers.expectedA("JString", other)) - } - - def asJValue: JValue = asJIntOrdinal - def setFromJValue(jvalue: JValue): Box[EnumType#Value] = setFromJIntOrdinal(jvalue) -} - -class EnumField[OwnerType <: Record[OwnerType], EnumType <: Enumeration](@deprecatedName('rec) val owner: OwnerType, - protected val enum: EnumType)(implicit m: Manifest[EnumType#Value] -) extends Field[EnumType#Value, OwnerType] with MandatoryTypedField[EnumType#Value] with EnumTypedField[EnumType] { - - def this(@deprecatedName('rec) owner: OwnerType, enum: EnumType, value: EnumType#Value)(implicit m: Manifest[EnumType#Value]) = { - this(owner, enum) - set(value) - } - - protected val valueManifest = m -} - -class OptionalEnumField[OwnerType <: Record[OwnerType], EnumType <: Enumeration](@deprecatedName('rec) val owner: OwnerType, - protected val enum: EnumType)(implicit m: Manifest[EnumType#Value] -) extends Field[EnumType#Value, OwnerType] with OptionalTypedField[EnumType#Value] with EnumTypedField[EnumType] { - - def this(@deprecatedName('rec) owner: OwnerType, enum: EnumType, value: Box[EnumType#Value])(implicit m: Manifest[EnumType#Value]) = { - this(owner, enum) - setBox(value) - } - - protected val valueManifest = m -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/EnumNameField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/EnumNameField.scala deleted file mode 100644 index 366b08897f..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/EnumNameField.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import reflect.Manifest -import scala.xml._ - -import common._ -import Box.option2Box -import json._ -import util._ -import http.js._ -import http.{S, SHtml} -import S._ -import Helpers._ -import JE._ - - -trait EnumNameTypedField[EnumType <: Enumeration] extends TypedField[EnumType#Value] { - protected val enum: EnumType - protected val valueManifest: Manifest[EnumType#Value] - - def setFromAny(in: Any): Box[EnumType#Value] = genericSetFromAny(in)(valueManifest) - - def setFromString(s: String): Box[EnumType#Value] = s match { - case null|"" if optional_? => setBox(Empty) - case null|"" => setBox(Failure(notOptionalErrorMessage)) - case _ => setBox(enum.values.find(_.toString == s)) - } - - /** Label for the selection item representing Empty, show when this field is optional. Defaults to the empty string. */ - def emptyOptionLabel: String = "" - - /** - * Build a list of (value, label) options for a select list. Return a tuple of (Box[String], String) where the first string - * is the value of the field and the second string is the Text name of the Value. - */ - def buildDisplayList: List[(Box[EnumType#Value], String)] = { - val options = enum.values.toList.map(a => (Full(a), a.toString)) - if (optional_?) (Empty, emptyOptionLabel)::options else options - } - - private def elem = SHtml.selectObj[Box[EnumType#Value]](buildDisplayList, Full(valueBox), setBox(_)) % ("tabindex" -> tabIndex.toString) - - def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - def defaultValue: EnumType#Value = enum.values.iterator.next - - def asJs = valueBox.map(_ => Str(toString)) openOr JsNull - - def asJStringName: JValue = valueBox.map(v => JString(v.toString)) openOr (JNothing: JValue) - def setFromJStringName(jvalue: JValue): Box[EnumType#Value] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JString(s) => setBox(enum.values.find(_.toString == s) ?~ ("Unknown value \"" + s + "\"")) - case other => setBox(FieldHelpers.expectedA("JString", other)) - } - - def asJValue: JValue = asJStringName - def setFromJValue(jvalue: JValue): Box[EnumType#Value] = setFromJStringName(jvalue) -} - -class EnumNameField[OwnerType <: Record[OwnerType], EnumType <: Enumeration](@deprecatedName('rec) val owner: OwnerType, - protected val enum: EnumType)(implicit m: Manifest[EnumType#Value] -) extends Field[EnumType#Value, OwnerType] with MandatoryTypedField[EnumType#Value] with EnumNameTypedField[EnumType] { - - def this(@deprecatedName('rec) owner: OwnerType, enum: EnumType, value: EnumType#Value)(implicit m: Manifest[EnumType#Value]) = { - this(owner, enum) - set(value) - } - - protected val valueManifest = m -} - -class OptionalEnumNameField[OwnerType <: Record[OwnerType], EnumType <: Enumeration](@deprecatedName('rec) val owner: OwnerType, - protected val enum: EnumType)(implicit m: Manifest[EnumType#Value] -) extends Field[EnumType#Value, OwnerType] with OptionalTypedField[EnumType#Value] with EnumNameTypedField[EnumType] { - - def this(@deprecatedName('rec) owner: OwnerType, enum: EnumType, value: Box[EnumType#Value])(implicit m: Manifest[EnumType#Value]) = { - this(owner, enum) - setBox(value) - } - - protected val valueManifest = m -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/IntField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/IntField.scala deleted file mode 100644 index d02cf60ff2..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/IntField.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ -import net.liftweb.common._ -import net.liftweb.http.S -import json._ -import net.liftweb.util._ -import Helpers._ -import S._ - -trait IntTypedField extends NumericTypedField[Int] { - - def setFromAny(in: Any): Box[Int] = setNumericFromAny(in, _.intValue) - - def setFromString(s: String): Box[Int] = s match { - case null|"" if optional_? => setBox(Empty) - case null|"" => setBox(Failure(notOptionalErrorMessage)) - case _ => setBox(tryo(java.lang.Integer.parseInt(s))) - } - - def defaultValue = 0 - - def asJValue: JValue = valueBox.map(i => JInt(BigInt(i))) openOr (JNothing: JValue) - - def setFromJValue(jvalue: JValue): Box[Int] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JInt(i) => setBox(Full(i.intValue)) - case JDouble(d) => setBox(Full(d.toInt)) - case other => setBox(FieldHelpers.expectedA("JInt", other)) - } -} - -class IntField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Int, OwnerType] with MandatoryTypedField[Int] with IntTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Int) = { - this(owner) - set(value) - } -} - -class OptionalIntField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Int, OwnerType] with OptionalTypedField[Int] with IntTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[Int]) = { - this(owner) - setBox(value) - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/LocaleField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/LocaleField.scala deleted file mode 100644 index a341ed447a..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/LocaleField.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import java.util.{Locale} -import scala.xml._ - -import common._ -import util._ -import Helpers._ -import http.{S, SHtml} -import S._ - - -object LocaleField { - lazy val localeList = Locale - .getAvailableLocales.toList - .sortWith(_.getDisplayName < _.getDisplayName) - .map(lo => (lo.toString, lo.getDisplayName)) -} - -trait LocaleTypedField extends TypedField[String] { - /** Build a list of string pairs for a select list. */ - def buildDisplayList: List[(String, String)] - - private def elem = SHtml.select(buildDisplayList, Full(valueBox.map(_.toString) openOr ""), - locale => setBox(Full(locale))) % ("tabindex" -> tabIndex.toString) - - override def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } -} - -class LocaleField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType) - extends StringField(owner, 16) with LocaleTypedField { - - override def defaultValue = Locale.getDefault.toString - - def isAsLocale: Locale = Locale.getAvailableLocales.filter(_.toString == value).toList match { - case Nil => Locale.getDefault - case x :: xs => x - } - - def buildDisplayList: List[(String, String)] = LocaleField.localeList - -} - -class OptionalLocaleField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType) - extends OptionalStringField(owner, 16) with LocaleTypedField { - - /** Label for the selection item representing Empty, show when this field is optional. Defaults to the empty string. */ - def emptyOptionLabel: String = "" - - def buildDisplayList: List[(String, String)] = ("", emptyOptionLabel)::LocaleField.localeList -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/LongField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/LongField.scala deleted file mode 100644 index 5a70da19d6..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/LongField.scala +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ -import net.liftweb.common._ -import net.liftweb.http.{S} -import json._ -import net.liftweb.util._ -import Helpers._ -import S._ - -trait LongTypedField extends NumericTypedField[Long] { - - def setFromAny(in: Any): Box[Long] = setNumericFromAny(in, _.longValue) - - def setFromString(s: String): Box[Long] = - if(s == null || s.isEmpty) { - if(optional_?) - setBox(Empty) - else - setBox(Failure(notOptionalErrorMessage)) - } else { - setBox(asLong(s)) - } - - def defaultValue = 0L - - def asJValue: JValue = valueBox.map(l => JInt(BigInt(l))) openOr (JNothing: JValue) - def setFromJValue(jvalue: JValue): Box[Long] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JInt(i) => setBox(Full(i.longValue)) - case JDouble(d) => setBox(Full(d.toLong)) - case other => setBox(FieldHelpers.expectedA("JLong", other)) - } -} - -class LongField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Long, OwnerType] with MandatoryTypedField[Long] with LongTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Long) = { - this(owner) - set(value) - } -} - -class OptionalLongField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[Long, OwnerType] with OptionalTypedField[Long] with LongTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[Long]) = { - this(owner) - setBox(value) - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/NumericField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/NumericField.scala deleted file mode 100644 index 0df7c872ab..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/NumericField.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import net.liftweb.http.{S} -import net.liftweb.http.js._ -import net.liftweb.util._ -import net.liftweb.common._ -import scala.reflect.Manifest -import scala.xml._ -import S._ -import Helpers._ -import JE._ - -trait NumericTypedField[MyType] extends TypedField[MyType] { - - /** Augments genericSetFromAny with support for values of type Number (optionally wrapped in any of the usual suspects) */ - protected final def setNumericFromAny(in: Any, f: Number => MyType)(implicit m: Manifest[MyType]): Box[MyType] = - in match { - case (n: Number) => setBox(Full(f(n))) - case Some(n: Number) => setBox(Full(f(n))) - case Full(n: Number) => setBox(Full(f(n))) - case (n: Number)::_ => setBox(Full(f(n))) - case _ => genericSetFromAny(in) - } - - private def elem = S.fmapFunc((s: List[String]) => setFromAny(s)) { - funcName => - } - - /** - * Returns form input of this field - */ - def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - override def noValueErrorMessage = S.?("number.required") - - def asJs = valueBox.map(v => JsRaw(String.valueOf(v))) openOr JsNull - -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/PasswordField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/PasswordField.scala deleted file mode 100644 index 21be922e98..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/PasswordField.scala +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ - -import common._ -import json._ -import util._ -import Helpers._ -import http.S -import http.js._ -import S._ -import JE._ - -import org.mindrot.jbcrypt.BCrypt - -object PasswordField { - @volatile var blankPw = "*******" - @volatile var minPasswordLength = 5 - @volatile var logRounds = 10 - def hashpw(in: String): Box[String] = tryo(BCrypt.hashpw(in, BCrypt.gensalt(logRounds))) -} - -trait PasswordTypedField extends TypedField[String] { - private var invalidPw = false - private var invalidMsg = "" - - def match_?(toTest: String): Boolean = - valueBox.filter(_.length > 0) - .flatMap(p => tryo(BCrypt.checkpw(toTest, p))) - .openOr(false) - - override def set_!(in: Box[String]): Box[String] = { - // can't be hashed here, because this get's called when setting value from database (Squeryl) - in - } - - def setPlain(in: String): String = setBoxPlain(Full(in)) openOr defaultValue - - def setBoxPlain(in: Box[String]): Box[String] = { - if(!validatePassword(in)) { - val hashed = in.map(s => PasswordField.hashpw(s) openOr s) - setBox(hashed) - } - else setBox(defaultValueBox) - } - - /** - * If passed value is an Array[String] or a List[String] containing 2 items with equal value, it it hashes this value and sets it as new password. - * If passed value is a String or a Full[String] that starts with "$2a$", it assumes that it's a hashed version, thus sets it as it is, without hashing. - * In any other case, it fails the validation with "Passwords do not match" error - */ - def setFromAny(in: Any): Box[String] = { - in match { - case (a: Array[String]) if a.length == 2 && a(0) == a(1) => setBoxPlain(Full(a(0))) - case (h1: String) :: (h2: String) :: Nil if h1 == h2 => setBoxPlain(Full(h1)) - case (hash: String) if hash.startsWith("$2a$") => setBox(Full(hash)) - case Full(hash: String) if hash.startsWith("$2a$") => setBox(Full(hash)) - case _ => - invalidPw = true - invalidMsg = S.?("passwords.do.not.match") - Failure(invalidMsg) - } - } - - def setFromString(s: String): Box[String] = s match { - case null|"" if optional_? => setBoxPlain(Empty) - case null|"" => setBoxPlain(Failure(notOptionalErrorMessage)) - case _ => setBoxPlain(Full(s)) - } - - override def validate: List[FieldError] = { - if (!invalidPw && valueBox != defaultValueBox) Nil - else if (invalidPw) List(FieldError(this, Text(invalidMsg))) - else List(FieldError(this, Text(notOptionalErrorMessage))) - } - - override def notOptionalErrorMessage = S.?("password.must.be.set") - - override def formInputType = "password" - - private def elem = S.fmapFunc(SFuncHolder(this.setFromAny(_))){ - funcName => } - - def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - protected def validatePassword(pwdValue: Box[String]): Boolean = { - pwdValue match { - case Empty|Full(""|null) if !optional_? => { invalidPw = true ; invalidMsg = notOptionalErrorMessage } - case Full(s) if s == "" || s == PasswordField.blankPw || s.length < PasswordField.minPasswordLength => - { invalidPw = true ; invalidMsg = S.?("password.too.short") } - case _ => { invalidPw = false; invalidMsg = "" } - } - invalidPw - } - - def defaultValue = "" - - def asJs = valueBox.map(Str) openOr JsNull - - def asJValue: JValue = valueBox.map(v => JString(v)) openOr (JNothing: JValue) - - def setFromJValue(jvalue: JValue): Box[MyType] = jvalue match { - case JNothing|JNull if optional_? => setBoxPlain(Empty) - case JString(s) => setFromString(s) - case other => setBoxPlain(FieldHelpers.expectedA("JString", other)) - } - - -} - -class PasswordField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[String, OwnerType] with MandatoryTypedField[String] with PasswordTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: String) = { - this(owner) - setPlain(value) - } - - override def apply(in: Box[String]): OwnerType = - if(owner.meta.mutable_?) { - this.setBoxPlain(in) - owner - } else { - owner.meta.createWithMutableField(owner, this, in) - } - -} - -class OptionalPasswordField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[String, OwnerType] with OptionalTypedField[String] with PasswordTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[String]) = { - this(owner) - setBoxPlain(value) - } - - override def apply(in: Box[String]): OwnerType = - if(owner.meta.mutable_?) { - this.setBoxPlain(in) - owner - } else { - owner.meta.createWithMutableField(owner, this, in) - } -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/PostalCodeField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/PostalCodeField.scala deleted file mode 100644 index 71ce1c26c0..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/PostalCodeField.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ -import net.liftweb.util._ -import net.liftweb.common._ -import net.liftweb.http.{S} -import java.util.regex._ -import java.util.regex.{Pattern => RegexPattern} -import Helpers._ -import S._ - - -trait PostalCodeTypedField extends StringTypedField { - - protected val country: CountryField[_] - - override def setFilter = toUpper _ :: trim _ :: super.setFilter - - override def validations = validatePostalCode _ :: Nil - - def validatePostalCode(in: ValueType): List[FieldError] = { - toBoxMyType(in) match { - case Full(zip) if optional_? && zip.isEmpty => Nil - case _ => - country.value match { - case Countries.USA => valRegex(RegexPattern.compile("[0-9]{5}(\\-[0-9]{4})?"), S.?("invalid.zip.code"))(in) - case Countries.Sweden => valRegex(RegexPattern.compile("[0-9]{3}[ ]?[0-9]{2}"), S.?("invalid.postal.code"))(in) - case Countries.Australia => valRegex(RegexPattern.compile("(0?|[1-9])[0-9]{3}"), S.?("invalid.postal.code"))(in) - case Countries.Canada => valRegex(RegexPattern.compile("[A-Z][0-9][A-Z][ ][0-9][A-Z][0-9]"), S.?("invalid.postal.code"))(in) - case _ => genericCheck(in) - } - } - } - private def genericCheck(zip: ValueType): List[FieldError] = { - toBoxMyType(zip) flatMap { - case null => Full(Text(S.?("invalid.postal.code"))) - case s if s.length < 3 => Full(Text(S.?("invalid.postal.code"))) - case _ => Empty - } - } -} - -class PostalCodeField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType, val country: CountryField[OwnerType]) extends StringField(owner, 32) with PostalCodeTypedField - -class OptionalPostalCodeField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType, val country: CountryField[OwnerType]) extends OptionalStringField(owner, 32) with PostalCodeTypedField - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/StringField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/StringField.scala deleted file mode 100644 index 5580f03dda..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/StringField.scala +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ - -import common._ -import json._ -import util._ -import Helpers._ -import http.js._ -import http.S -import S._ -import JE._ - - -trait StringTypedField extends TypedField[String] with StringValidators { - val maxLength: Int - - def maxLen = maxLength - - def setFromAny(in: Any): Box[String] = in match { - case seq: Seq[_] if seq.nonEmpty => setFromAny(seq.head) - case _ => genericSetFromAny(in) - } - - def setFromString(s: String): Box[String] = s match { - case null|"" if optional_? => setBox(Empty) - case null|"" => setBox(Failure(notOptionalErrorMessage)) - case _ => setBox(Full(s)) - } - - private def elem = S.fmapFunc(SFuncHolder(this.setFromAny(_))) { - funcName => - - } - - def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - - def defaultValue = "" - - def asJs = valueBox.map(Str) openOr JsNull - - def asJValue: JValue = valueBox.map(v => JString(v)) openOr (JNothing: JValue) - def setFromJValue(jvalue: JValue): Box[MyType] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JString(s) => setFromString(s) - case other => setBox(FieldHelpers.expectedA("JString", other)) - } -} - -class StringField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType, val maxLength: Int) - extends Field[String, OwnerType] with MandatoryTypedField[String] with StringTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, maxLength: Int, value: String) = { - this(owner, maxLength) - set(value) - } - - def this(@deprecatedName('rec) owner: OwnerType, value: String) = { - this(owner, 100) - set(value) - } - - protected def valueTypeToBoxString(in: ValueType): Box[String] = toBoxMyType(in) - protected def boxStrToValType(in: Box[String]): ValueType = toValueType(in) -} - -abstract class UniqueIdField[OwnerType <: Record[OwnerType]](rec: OwnerType, override val maxLength: Int) extends StringField[OwnerType](rec, maxLength) { - override lazy val defaultValue = randomString(maxLen) - - def reset(): OwnerType = this(randomString(maxLen)) -} - - -class OptionalStringField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType, val maxLength: Int) - extends Field[String, OwnerType] with OptionalTypedField[String] with StringTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, maxLength: Int, value: Box[String]) = { - this(owner, maxLength) - setBox(value) - } - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[String]) = { - this(owner, 100) - setBox(value) - } - - protected def valueTypeToBoxString(in: ValueType): Box[String] = toBoxMyType(in) - protected def boxStrToValType(in: Box[String]): ValueType = toValueType(in) -} - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/TextareaField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/TextareaField.scala deleted file mode 100644 index 31dd07a5f0..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/TextareaField.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import scala.xml._ -import net.liftweb.util._ -import net.liftweb.common._ -import net.liftweb.http.{S} -import S._ -import Helpers._ - -trait TextareaTypedField extends StringTypedField { - private def elem = S.fmapFunc(SFuncHolder(this.setFromAny(_))){ - funcName => - } - - override def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - - override def toString = valueBox match { - case Full(s) if s.length >= 100 => s.substring(0,40) + " ... " + s.substring(s.length - 40) - case _ => super.toString - } - - def textareaRows = 8 - - def textareaCols = 20 -} - -class TextareaField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType, maxLength: Int) - extends StringField(owner, maxLength) with TextareaTypedField - -class OptionalTextareaField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType, maxLength: Int) - extends OptionalStringField(owner, maxLength) with TextareaTypedField - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/TimeZoneField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/TimeZoneField.scala deleted file mode 100644 index 9c4d4015e5..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/TimeZoneField.scala +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2007-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field - -import java.util.TimeZone -import scala.xml._ - -import common._ -import util._ -import Helpers._ -import http.{S, SHtml} -import S._ - - -object TimeZoneField { - lazy val timeZoneList: List[(String, String)] = TimeZone.getAvailableIDs.toList. - filter(!_.startsWith("SystemV/")). - filter(!_.startsWith("Etc/")).filter(_.length > 3). - sortWith(_ < _).map(tz => (tz, tz)) -} - -trait TimeZoneTypedField extends StringTypedField { - /** Label for the selection item representing Empty, show when this field is optional. Defaults to the empty string. */ - def emptyOptionLabel: String = "" - - def buildDisplayList: List[(String, String)] = - if (optional_?) ("", emptyOptionLabel)::TimeZoneField.timeZoneList else TimeZoneField.timeZoneList - - private def elem = SHtml.select(buildDisplayList, Full(valueBox openOr ""), - timezone => setBox(Full(timezone))) % ("tabindex" -> tabIndex.toString) - - override def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } -} - -class TimeZoneField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType) - extends StringField(owner, 32) with TimeZoneTypedField { - - override def defaultValue = TimeZone.getDefault.getID - - def isAsTimeZone: TimeZone = TimeZone.getTimeZone(value) match { - case null => TimeZone.getDefault - case x => x - } -} - -class OptionalTimeZoneField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType) - extends OptionalStringField(owner, 32) with TimeZoneTypedField - diff --git a/persistence/record/src/main/scala/net/liftweb/record/field/joda/JodaTimeField.scala b/persistence/record/src/main/scala/net/liftweb/record/field/joda/JodaTimeField.scala deleted file mode 100644 index f33642a4c1..0000000000 --- a/persistence/record/src/main/scala/net/liftweb/record/field/joda/JodaTimeField.scala +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2013 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package field -package joda - -import scala.xml._ - -import common._ -import http.S -import http.js._ -import json._ -import util._ - -import Helpers._ -import S._ -import JE._ - -import org.joda.time._ - -trait JodaTimeTypedField extends TypedField[DateTime] with JodaHelpers { - - def setFromAny(in: Any): Box[DateTime] = toDateTime(in).flatMap(d => setBox(Full(d))) or genericSetFromAny(in) - - def setFromString(s: String): Box[DateTime] = s match { - case null|"" if optional_? => setBox(Empty) - case null|"" => setBox(Failure(notOptionalErrorMessage)) - case other => setBox(toDateTime(other)) - } - - private def elem = - S.fmapFunc(SFuncHolder(this.setFromAny(_))){funcName => - dateTimeFormatter.print(v)) openOr ""} - tabindex={tabIndex.toString}/> - } - - def toForm: Box[NodeSeq] = - uniqueFieldId match { - case Full(id) => Full(elem % ("id" -> id)) - case _ => Full(elem) - } - - def asJs = valueBox.map(v => Num(v.getMillis)) openOr JsNull - - protected def asJInt(encode: MyType => BigInt): JValue = - valueBox.map(v => JInt(encode(v))) openOr (JNothing: JValue) - - def asJValue: JValue = asJInt(v => v.getMillis) - def setFromJValue(jvalue: JValue) = setFromJInt(jvalue) { - v => toDateTime(v) - } - - protected def setFromJInt(jvalue: JValue)(decode: BigInt => Box[MyType]): Box[MyType] = jvalue match { - case JNothing|JNull if optional_? => setBox(Empty) - case JInt(n) => setBox(decode(n)) - case other => setBox(FieldHelpers.expectedA("JInt", other)) - } -} - -class JodaTimeField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[DateTime, OwnerType] with MandatoryTypedField[DateTime] with JodaTimeTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: DateTime) = { - this(owner) - setBox(Full(value)) - } - - def defaultValue = DateTime.now -} - -class OptionalJodaTimeField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) val owner: OwnerType) - extends Field[DateTime, OwnerType] with OptionalTypedField[DateTime] with JodaTimeTypedField { - - def this(@deprecatedName('rec) owner: OwnerType, value: Box[DateTime]) = { - this(owner) - setBox(value) - } -} diff --git a/persistence/record/src/test/scala/net/liftweb/record/FieldSpec.scala b/persistence/record/src/test/scala/net/liftweb/record/FieldSpec.scala deleted file mode 100644 index fdb0e5056b..0000000000 --- a/persistence/record/src/test/scala/net/liftweb/record/FieldSpec.scala +++ /dev/null @@ -1,732 +0,0 @@ -/* - * Copyright 2010-2012 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import field.{Countries, PasswordField, StringField} - -import common.{Box, Empty, Failure, Full} -import http.{LiftSession, S} -import http.js.JE._ -import http.js.JsExp -import json.JsonAST._ -import util.Helpers._ - -import java.util.Calendar -import org.specs2.mutable._ -import org.joda.time.DateTime - -import fixtures._ -import net.liftweb.util.{Helpers, FieldError, JodaHelpers} -import scala.xml.{NodeSeq, Elem, Node, Text} - - -/** - * Systems under specification for RecordField. - */ -class FieldSpec extends Specification { - "Record Field Specification".title - sequential - - lazy val session = new LiftSession("", randomString(20), Empty) - - def passBasicTests[A](example: A, example2: A, mandatory: MandatoryTypedField[A], legacyOptional: MandatoryTypedField[A], optional: OptionalTypedField[A], canCheckDefaultValues: Boolean = true)(implicit m: scala.reflect.Manifest[A]) = { - def commonBehaviorsForMandatory(in: MandatoryTypedField[A]): Unit = { - - if (canCheckDefaultValues) { - "which have the correct initial value" in S.initIfUninitted(session) { - in.get must_== in.defaultValue - } - } - - "which are readable and writable" in { - in.set(example) - in.get must_== example - in.clear - in.get must_!= example - in.setBox(Box !! example) - in.get must_== example - in.clear - success - } - - if (canCheckDefaultValues) { - "which correctly clear back to the default value" in { - in.set(example) - in.clear - in.get must_== in.defaultValue - } - } - - if(!in.optional_?) { - "which fail when set with an empty string when not optional" in { - in.setFromString(null) - in.valueBox must beLike { case f: Failure => ok } - in.setFromString("") - in.valueBox must beLike { case f: Failure => ok } - } - } else { - "which don't fail when set with an empty string when optional" in { - in.setFromString(null) - in.valueBox must_== Empty - in.setFromString("") - in.valueBox must_== Empty - } - } - } - - def commonBehaviorsForAllFlavors(in: TypedField[A]): Unit = { - if (canCheckDefaultValues) { - "which have the correct initial boxed value" in { - in match { - case mandatory: MandatoryTypedField[_] => - mandatory.value must_== mandatory.defaultValue - case _ => () - } - in.valueBox must_== in.defaultValueBox - } - } - - "which have readable and writable boxed values" in S.initIfUninitted(session) { - in.setBox(Full(example)) - in.valueBox.isDefined must_== true - in.valueBox must_== Full(example) - in.clear - in.valueBox must_!= Full(example) - } - - if (canCheckDefaultValues) { - "which correctly clear back to the default box value" in S.initIfUninitted(session) { - in.setBox(Full(example)) - in.valueBox.isDefined must_== true - in.clear - in.valueBox must_== in.defaultValueBox - } - } - - "which capture error conditions set in" in { - val old = in.valueBox - in.setBox(Failure("my failure")) - in.valueBox must_== Failure("my failure") - in.setBox(old) - success - } - - "which are only flagged as dirty_? when setBox is called with a different value" in { - in.clear - in match { - case owned: OwnedField[_] => owned.owner.runSafe { - in.resetDirty - } - case _ => in.resetDirty - } - in.dirty_? must_== false - val valueBox = in.valueBox - in.setBox(valueBox) - in.dirty_? must_== false - val exampleBox = Full(example) - (valueBox === exampleBox) must_== false - in.setBox(exampleBox) - in.dirty_? must_== true - val exampleBox2 = Full(example2) - (exampleBox === exampleBox2) must_== false - in.setBox(exampleBox2) - in.dirty_? must_== true - //dirty value should remain true, even if the same value is set twice before persisting - in.setBox(exampleBox) - in.dirty_? must_== true - in.setBox(valueBox) - success - } - } - - "support mandatory fields" in { - commonBehaviorsForAllFlavors(mandatory) - commonBehaviorsForMandatory(mandatory) - - "which are configured correctly" in { - mandatory.optional_? must_== false - } - - "which initialize to some value" in { - mandatory.clear - mandatory.valueBox.isDefined must_== true - } - - "which correctly fail to be set to Empty" in { - mandatory.valueBox.isDefined must_== true - mandatory.setBox(Empty) - mandatory.valueBox must beLike { case Failure(s, _, _) => s must_== mandatory.notOptionalErrorMessage} - } - } - - "support 'legacy' optional fields (override optional_?)" in { - commonBehaviorsForAllFlavors(legacyOptional) - commonBehaviorsForMandatory(legacyOptional) - - "which are configured correctly" in { - legacyOptional.optional_? must_== true - } - - "which initialize to Empty" in { - legacyOptional.valueBox must_== Empty - } - - "which do not fail when set to Empty" in { - legacyOptional.set(example) - legacyOptional.value must_== example - legacyOptional.valueBox must_== Full(example) - legacyOptional.clear - if (canCheckDefaultValues) { - legacyOptional.value must_== legacyOptional.defaultValue - legacyOptional.valueBox must_== legacyOptional.defaultValueBox - } - legacyOptional.set(example) - legacyOptional.value must_== example - legacyOptional.valueBox must_== Full(example) - legacyOptional.setBox(Empty) - if (canCheckDefaultValues) { - legacyOptional.value must_== legacyOptional.defaultValue - legacyOptional.valueBox must_== legacyOptional.defaultValueBox - } - success - } - } - - "support optional fields" in { - commonBehaviorsForAllFlavors(optional) - - "which are configured correctly" in { - optional.optional_? must_== true - } - - "which initialize to Empty" in { - optional.valueBox must_== Empty - } - - "which don't fail when set with an empty string" in { - optional.setFromString(null) - optional.value must_== None - optional.valueBox must_== Empty - optional.setFromString("") - optional.value must_== None - optional.valueBox must_== Empty - } - - "which do not fail when set to Empty" in { - optional.set(Some(example)) - optional.value must_== Some(example) - optional.valueBox must_== Full(example) - optional.set(None) - optional.value must_== None - optional.valueBox must_== Empty - optional.set(Some(example)) - optional.value must_== Some(example) - optional.valueBox must_== Full(example) - optional.setBox(Empty) - optional.value must_== None - optional.valueBox must_== Empty - } - } - success - } - - def passConversionTests[A](example: A, mandatory: MandatoryTypedField[A], jsexp: JsExp, jvalue: JValue, formPattern: Box[NodeSeq]) = { - - "convert to JsExp" in S.initIfUninitted(session) { - mandatory.set(example) - mandatory.asJs mustEqual jsexp - } - - "convert to JValue" in { - mandatory.set(example) - mandatory.asJValue mustEqual jvalue - } - - // toInternetDate doesn't retain millisecond data so, dates can't be compared accurately. - if (!mandatory.defaultValue.isInstanceOf[Calendar]) { - "get set from JValue" in { - mandatory.setFromJValue(jvalue) mustEqual Full(example) - mandatory.value mustEqual example - } - } - - formPattern foreach { fp => - "convert to form XML" in { - mandatory.set(example) - val session = new LiftSession("", randomString(20), Empty) - S.initIfUninitted(session) { - val formXml = mandatory.toForm - formXml must beLike { - case Full(fprime) => - val f = ("* [name]" #> ".*" & "select *" #> (((ns: NodeSeq) => ns.filter { - case e: Elem => e.attribute("selected").map(_.text) == Some("selected") - case _ => false - }) andThen "* [value]" #> ".*"))(fprime) - val ret: Boolean = Helpers.compareXml(f, fp) - ret must_== true - } - } - } - } - success - } - - /* Since Array[Byte]s cannot be compared, commenting out this test for now - "BinaryField" should { - val rec = FieldTypeTestRecord.createRecord - val a = new Array[Byte](3) - a(0) = 1 - a(1) = 2 - a(2) = 3 - passBasicTests(a, rec.mandatoryBinaryField, rec.legacyOptionalBinaryField, rec.optionalBinaryField) - } - */ - - "BooleanField" should { - val rec = FieldTypeTestRecord.createRecord - val bool = true - val bool2 = false - passBasicTests(bool, bool2, rec.mandatoryBooleanField, rec.legacyOptionalBooleanField, rec.optionalBooleanField) - passConversionTests( - bool, - rec.mandatoryBooleanField, - JsTrue, - JBool(bool), - Full() - ) - "support java.lang.Boolean" in { - rec.mandatoryBooleanField.setFromAny(java.lang.Boolean.TRUE) - rec.optionalBooleanField.setFromAny(java.lang.Boolean.TRUE) - (rec.mandatoryBooleanField.get && (rec.optionalBooleanField.get getOrElse false)) must_== true - } - "support Full(java.lang.Boolean)" in { - rec.mandatoryBooleanField.setFromAny(Full(java.lang.Boolean.TRUE)) - rec.optionalBooleanField.setFromAny(Full(java.lang.Boolean.TRUE)) - (rec.mandatoryBooleanField.get && (rec.optionalBooleanField.get getOrElse false)) must_== true - } - "support Some(java.lang.Boolean)" in { - rec.mandatoryBooleanField.setFromAny(Some(java.lang.Boolean.TRUE)) - rec.optionalBooleanField.setFromAny(Some(java.lang.Boolean.TRUE)) - (rec.mandatoryBooleanField.get && (rec.optionalBooleanField.get getOrElse false)) must_== true - } - } - - "CountryField" should { - val session = new LiftSession("", randomString(20), Empty) - S.initIfUninitted(session){ - val rec = FieldTypeTestRecord.createRecord - val country = Countries.Canada - val country2 = Countries.USA - passBasicTests(country, country2, rec.mandatoryCountryField, rec.legacyOptionalCountryField, rec.optionalCountryField) - passConversionTests( - country, - rec.mandatoryCountryField, - Str(country.toString), - JInt(country.id), - Full() - ) - } - } - - "DateTimeField" should { - val rec = FieldTypeTestRecord.createRecord - val dt = Calendar.getInstance - val dt2 = Calendar.getInstance - dt2.add(Calendar.DATE, 1) - val dtStr = toInternetDate(dt.getTime) - // don't try to use the default value of date/time typed fields, because it changes from moment to moment! - passBasicTests(dt, dt2, rec.mandatoryDateTimeField, rec.legacyOptionalDateTimeField, rec.optionalDateTimeField, false) - passConversionTests( - dt, - rec.mandatoryDateTimeField, - Str(dtStr), - JString(dtStr), - Full() - ) - } - - "DateTimeField with custom format" should { - val rec = CustomFormatDateTimeRecord.createRecord - val dt = Calendar.getInstance - val dtStr = rec.customFormatDateTimeField.formats.dateFormat.format(dt.getTime) - passConversionTests( - dt, - rec.customFormatDateTimeField, - Str(dtStr), - JString(dtStr), - Full() - ) - } - - "DecimalField" should { - val rec = FieldTypeTestRecord.createRecord - val bd = BigDecimal("12.34") - val bd2 = BigDecimal("1.22") - passBasicTests(bd, bd2, rec.mandatoryDecimalField, rec.legacyOptionalDecimalField, rec.optionalDecimalField) - passConversionTests( - bd, - rec.mandatoryDecimalField, - JsRaw(bd.toString), - JString(bd.toString), - Full() - ) - } - - "DoubleField" should { - val rec = FieldTypeTestRecord.createRecord - val d = 12.34 - val d2 = 1.00 - passBasicTests(d, d2, rec.mandatoryDoubleField, rec.legacyOptionalDoubleField, rec.optionalDoubleField) - passConversionTests( - d, - rec.mandatoryDoubleField, - JsRaw(d.toString), - JDouble(d), - Full() - ) - - "get set from JInt" in { - rec.mandatoryDoubleField.setFromJValue( JInt(1) ) mustEqual Full( d2 ) - rec.mandatoryDoubleField.value mustEqual d2 - } - } - - "EmailField" should { - val session = new LiftSession("", randomString(20), Empty) - val rec = FieldTypeTestRecord.createRecord - val email = "foo@bar.baz" - val email2 = "foo2@bar.baz" - passBasicTests(email, email2, rec.mandatoryEmailField, rec.legacyOptionalEmailField, rec.optionalEmailField) - passConversionTests( - email, - rec.mandatoryEmailField, - Str(email), - JString(email), - Full() - ) - "pass validation if field is optional and value is Empty" in { - S.initIfUninitted(session) { - rec.legacyOptionalEmailField(Empty) - rec.legacyOptionalEmailField.validate must have length(0) - - rec.optionalEmailField(Empty) - rec.optionalEmailField.validate must have length(0) - } - } - "pass validation if field is optional and value is an empty string" in { - S.initIfUninitted(session) { - rec.legacyOptionalEmailField("") - rec.legacyOptionalEmailField.validate must have length(0) - - rec.optionalEmailField("") - rec.optionalEmailField.validate must have length(0) - } - } - "fail validation if value is invalid" in { - S.initIfUninitted(session) { - rec.mandatoryEmailField("invalid email") - rec.mandatoryEmailField.validate must have length(1) - } - } - } - - "EnumField" should { - val rec = FieldTypeTestRecord.createRecord - val ev = MyTestEnum.TWO - val ev2 = MyTestEnum.ONE - passBasicTests(ev, ev2, rec.mandatoryEnumField, rec.legacyOptionalEnumField, rec.optionalEnumField) - passConversionTests( - ev, - rec.mandatoryEnumField, - Str(ev.toString), - JInt(ev.id), - Full() - ) - } - - "IntField" should { - val rec = FieldTypeTestRecord.createRecord - val num = 123 - val num2 = 456 - passBasicTests(num, num2, rec.mandatoryIntField, rec.legacyOptionalIntField, rec.optionalIntField) - passConversionTests( - num, - rec.mandatoryIntField, - JsRaw(num.toString), - JInt(num), - Full() - ) - - "get set from JDouble" in { - rec.mandatoryIntField.setFromJValue( JDouble(num) ) mustEqual Full( num ) - rec.mandatoryIntField.value mustEqual num - } - } - - "IntField with custom HTML5 type" should { - val rec = CustomTypeIntFieldRecord.createRecord - val num = 123 - passConversionTests( - num, - rec.customIntField, - JsRaw(num.toString), - JInt(num), - Full() - ) - } - - "LocaleField" should { - val rec = FieldTypeTestRecord.createRecord - val example = java.util.Locale.getDefault.toString match { - case "en_US" => "en_GB" - case _ => "en_US" - } - val example2 = java.util.Locale.getDefault.toString match { - case "es_ES" => "en_NZ" - case _ => "es_ES" - } - passBasicTests(example, example2, rec.mandatoryLocaleField, rec.legacyOptionalLocaleField, rec.optionalLocaleField) - } - - "LongField" should { - val rec = FieldTypeTestRecord.createRecord - val lng = 1234L - val lng2 = 5678L - passBasicTests(lng, lng2, rec.mandatoryLongField, rec.legacyOptionalLongField, rec.optionalLongField) - passConversionTests( - lng, - rec.mandatoryLongField, - JsRaw(lng.toString), - JInt(lng), - Full() - ) - - "get set from JDouble" in { - rec.mandatoryLongField.setFromJValue( JDouble(lng) ) mustEqual Full( lng ) - rec.mandatoryLongField.value mustEqual lng - } - } - - "PasswordField" should { - "require a nonempty password" in S.initIfUninitted(session) { - val rec = PasswordTestRecord.createRecord.password("") - - rec.validate must_== ( - FieldError(rec.password, Text(S.?("password.must.be.set"))) :: - Nil - ) - } - - "correctly validate the unencrypted value" in S.initIfUninitted(session) { - val rec = PasswordTestRecord.createRecord.password("testvalue") - rec.validate must_== Nil - - rec.password("1234") - rec.validate must_== ( - FieldError(rec.password, Text(S.?("password.too.short"))) :: - Nil - ) - } - - "match with encrypted value" in { - val rec = PasswordTestRecord.createRecord.password("testpassword") - rec.password.match_?("testpassword") must_== true - - rec.password.set("$2a$10$6CJWdXpKoP8bVTjGH8SbKOWevNQVL8MkYVlBLmqtywVi7dp/YgPXC") - rec.password.match_?("dummyPassw0rd") must_== true - } - } - - "PostalCodeField" should { - val session = new LiftSession("", randomString(20), Empty) - val rec = FieldTypeTestRecord.createRecord - val zip = "02452" - val zip2 = "03344" - rec.mandatoryCountryField.set(Countries.USA) - passBasicTests(zip, zip2, rec.mandatoryPostalCodeField, rec.legacyOptionalPostalCodeField, rec.optionalPostalCodeField) - passConversionTests( - zip, - rec.mandatoryPostalCodeField, - Str(zip), - JString(zip), - Full() - ) - "pass validation if field is optional and value is Empty" in { - S.initIfUninitted(session) { - rec.legacyOptionalPostalCodeField(Empty) - rec.legacyOptionalPostalCodeField.validate must have length(0) - - rec.optionalPostalCodeField(Empty) - rec.optionalPostalCodeField.validate must have length(0) - } - } - "pass validation if field is optional and value is an empty string" in { - S.initIfUninitted(session) { - rec.legacyOptionalPostalCodeField("") - rec.legacyOptionalPostalCodeField.validate must have length(0) - - rec.optionalPostalCodeField("") - rec.optionalPostalCodeField.validate must have length(0) - } - } - "fail validation if value is invalid" in { - S.initIfUninitted(session) { - rec.mandatoryPostalCodeField("invalid zip") - rec.mandatoryPostalCodeField.validate must have length(1) - } - } - } - - "StringField" should { - { - val rec = FieldTypeTestRecord.createRecord - val str = "foobar" - val str2 = "foobaz" - passBasicTests(str, str2, rec.mandatoryStringField, rec.legacyOptionalStringField, rec.optionalStringField) - passConversionTests( - str, - rec.mandatoryStringField, - Str(str), - JString(str), - Full() - ) - } - - "honor validators configured in the usual way" in { - val rec = StringTestRecord.createRecord - - rec.validate must_== ( - FieldError(rec.string, Text("String field name must be at least 3 characters.")) :: - Nil - ) - } - - "honor harnessed validators" in { - val rec = ValidationTestRecord.createRecord - val field = rec.stringFieldWithValidation - - "which always succeed" in { - field.validationHarness = _ => Nil - rec.validate must_== Nil - } - - "which always fail" in { - val fieldError = FieldError(field, Text("failed")) - field.validationHarness = s => FieldError(rec.stringFieldWithValidation, Text("failed")) :: Nil - rec.validate must_== (fieldError :: Nil) - } - - "which receive the value" in { - var received: String = null - field.set("foobar") - field.validationHarness = s => { received = s; Nil } - rec.validate must_== Nil - received must_== "foobar" - } - } - - "support filtering" in { - val rec = FilterTestRecord.createRecord - val field = rec.stringFieldWithFiltering - - "which does nothing" in { - field.set("foobar") - field.value must_== "foobar" - field.valueBox must_== Full("foobar") - } - - "which trims the input at the value level" in { - field.setFilterHarness = _.trim - field.set(" foobar ") - field.value must_== "foobar" - field.valueBox must_== Full("foobar") - } - - "which trims the input at the box level" in { - field.setFilterBoxHarness = _.map(_.trim) - field.set(" foobar ") - field.value must_== "foobar" - field.valueBox must_== Full("foobar") - } - - "which Empties the box" in { - field.setFilterBoxHarness = s => Empty - field.set("foobar") - field.value must_== field.defaultValue - field.valueBox must_== Empty - } - - "which Fails" in { - field.setFilterBoxHarness = s => Failure("my failure") - field.set("foobar") - field.value must_== field.defaultValue - field.valueBox must_== Failure("my failure") - } - } - } - - "TextareaField" should { - val rec = FieldTypeTestRecord.createRecord - val txt = "foobar" - val txt2 = "foobaz" - passBasicTests(txt, txt2, rec.mandatoryTextareaField, rec.legacyOptionalTextareaField, rec.optionalTextareaField) - passConversionTests( - txt, - rec.mandatoryTextareaField, - Str(txt), - JString(txt), - Full() - ) - } - - "TimeZoneField" should { - val rec = FieldTypeTestRecord.createRecord - val example = java.util.TimeZone.getDefault.getID match { - case "America/New_York" => "Europe/London" - case _ => "America/New_York" - } - val example2 = java.util.TimeZone.getDefault.getID match { - case "America/Chicago" => "Europe/Paris" - case _ => "America/Chicago" - } - passBasicTests(example, example2, rec.mandatoryTimeZoneField, rec.legacyOptionalTimeZoneField, rec.optionalTimeZoneField) - passConversionTests( - example, - rec.mandatoryTimeZoneField, - Str(example), - JString(example), - Full() - ) - } - - "JodaTimeField" should { - val rec = FieldTypeTestRecord.createRecord - val dt = DateTime.now - val dt2 = DateTime.now.plusDays(1) - val dtStr = JodaHelpers.dateTimeFormatter.print(dt) - passBasicTests(dt, dt2, rec.mandatoryJodaTimeField, rec.legacyOptionalJodaTimeField, rec.optionalJodaTimeField, false) - passConversionTests( - dt, - rec.mandatoryJodaTimeField, - Num(dt.getMillis), - JInt(dt.getMillis), - Full() - ) - } -} - diff --git a/persistence/record/src/test/scala/net/liftweb/record/Fixtures.scala b/persistence/record/src/test/scala/net/liftweb/record/Fixtures.scala deleted file mode 100644 index f90d921897..0000000000 --- a/persistence/record/src/test/scala/net/liftweb/record/Fixtures.scala +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Copyright 2010-2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record -package fixtures - -import java.math.MathContext -import scala.xml.Text -import common.{Box, Empty, Full} -import json._ -import util.{FieldError, Helpers} -import org.specs2.mutable._ - -import field._ -import field.joda._ - -class BasicTestRecord private () extends Record[BasicTestRecord] { - def meta = BasicTestRecord - - object field1 extends StringField(this,10) - object field2 extends StringField(this,10) - object fieldThree extends StringField(this,10) -} - -object BasicTestRecord extends BasicTestRecord with MetaRecord[BasicTestRecord] { - override def fieldOrder = List(field2,field1) -} - -class PasswordTestRecord private () extends Record[PasswordTestRecord] { - def meta = PasswordTestRecord - - object password extends PasswordField(this) { - override def validations = validateNonEmptyPassword _ :: - super.validations - - def validateNonEmptyPassword(v: String): List[FieldError] = - v match { - case "testvalue" => Text("no way!") - case _ => Nil - } - } -} - -object PasswordTestRecord extends PasswordTestRecord with MetaRecord[PasswordTestRecord] - -class StringTestRecord private () extends Record[StringTestRecord] { - def meta = StringTestRecord - - object string extends StringField(this, 32) { - override def validations = - valMinLen(3, "String field name must be at least 3 characters.") _ :: - super.validations - } -} - -object StringTestRecord extends StringTestRecord with MetaRecord[StringTestRecord] - -object MyTestEnum extends Enumeration { - val ONE = Value("ONE") - val TWO = Value("TWO") - val THREE = Value("THREE") -} - -trait HarnessedLifecycleCallbacks extends LifecycleCallbacks { - this: BaseField => - - var beforeValidationHarness: () => Unit = () => () - override def beforeValidation = beforeValidationHarness() - var afterValidationHarness: () => Unit = () => () - override def afterValidation = afterValidationHarness() - - var beforeSaveHarness: () => Unit = () => () - override def beforeSave = beforeSaveHarness() - var beforeCreateHarness: () => Unit = () => () - override def beforeCreate = beforeCreateHarness() - var beforeUpdateHarness: () => Unit = () => () - override def beforeUpdate = beforeUpdateHarness() - - var afterSaveHarness: () => Unit = () => () - override def afterSave = afterSaveHarness() - var afterCreateHarness: () => Unit = () => () - override def afterCreate = afterCreateHarness() - var afterUpdateHarness: () => Unit = () => () - override def afterUpdate = afterUpdateHarness() - - var beforeDeleteHarness: () => Unit = () => () - override def beforeDelete = beforeDeleteHarness() - var afterDeleteHarness: () => Unit = () => () - override def afterDelete = afterDeleteHarness() -} - -class LifecycleTestRecord private () extends Record[LifecycleTestRecord] { - def meta = LifecycleTestRecord - - def foreachCallback(f: LifecycleCallbacks => Any): Unit = - meta.foreachCallback(this, f) - - object stringFieldWithCallbacks extends StringField(this, 100) with HarnessedLifecycleCallbacks -} - -object LifecycleTestRecord extends LifecycleTestRecord with MetaRecord[LifecycleTestRecord] - - -class ValidationTestRecord private() extends Record[ValidationTestRecord] { - def meta = ValidationTestRecord - - object stringFieldWithValidation extends StringField(this, 100) { - var validationHarness: ValueType => List[FieldError] = x => Nil - override def validations = validationHarness :: super.validations - } -} - -object ValidationTestRecord extends ValidationTestRecord with MetaRecord[ValidationTestRecord] - - -class FilterTestRecord private() extends Record[FilterTestRecord] { - def meta = FilterTestRecord - - object stringFieldWithFiltering extends StringField(this, 100) { - var setFilterHarness: ValueType => ValueType = identity _ - override def setFilter = setFilterHarness :: super.setFilter - - var setFilterBoxHarness: Box[MyType] => Box[MyType] = identity _ - override protected def setFilterBox = setFilterBoxHarness :: super.setFilterBox - } -} - -object FilterTestRecord extends FilterTestRecord with MetaRecord[FilterTestRecord] - - -class FieldTypeTestRecord private () extends Record[FieldTypeTestRecord] { - def meta = FieldTypeTestRecord - - object mandatoryBinaryField extends BinaryField(this) - object legacyOptionalBinaryField extends BinaryField(this) { override def optional_? = true } - object optionalBinaryField extends OptionalBinaryField(this) - - object mandatoryBooleanField extends BooleanField(this) - object legacyOptionalBooleanField extends BooleanField(this) { override def optional_? = true } - object optionalBooleanField extends OptionalBooleanField(this) - - object mandatoryCountryField extends CountryField(this) - object legacyOptionalCountryField extends CountryField(this) { override def optional_? = true } - object optionalCountryField extends OptionalCountryField(this) - - object mandatoryDateTimeField extends DateTimeField(this) - object legacyOptionalDateTimeField extends DateTimeField(this) { override def optional_? = true } - object optionalDateTimeField extends OptionalDateTimeField(this) - - object mandatoryDecimalField extends DecimalField(this, MathContext.UNLIMITED, 2) - object legacyOptionalDecimalField extends DecimalField(this, MathContext.UNLIMITED, 2) { override def optional_? = true } - object optionalDecimalField extends OptionalDecimalField(this, MathContext.UNLIMITED, 2) - - object mandatoryDoubleField extends DoubleField(this) - object legacyOptionalDoubleField extends DoubleField(this) { override def optional_? = true } - object optionalDoubleField extends OptionalDoubleField(this) - - object mandatoryEmailField extends EmailField(this, 100) - object legacyOptionalEmailField extends EmailField(this, 100) { override def optional_? = true } - object optionalEmailField extends OptionalEmailField(this, 100) - - object mandatoryEnumField extends EnumField(this, MyTestEnum) - object legacyOptionalEnumField extends EnumField(this, MyTestEnum) { override def optional_? = true } - object optionalEnumField extends OptionalEnumField(this, MyTestEnum) - - object mandatoryIntField extends IntField(this) - object legacyOptionalIntField extends IntField(this) { override def optional_? = true } - object optionalIntField extends OptionalIntField(this) - - object mandatoryLocaleField extends LocaleField(this) - object legacyOptionalLocaleField extends LocaleField(this) { override def optional_? = true } - object optionalLocaleField extends OptionalLocaleField(this) - - object mandatoryLongField extends LongField(this) - object legacyOptionalLongField extends LongField(this) { override def optional_? = true } - object optionalLongField extends OptionalLongField(this) - - /* - object mandatoryPasswordField extends PasswordField(this) - object legacyOptionalPasswordField extends PasswordField(this) { override def optional_? = true } - object optionalPasswordField extends OptionalPasswordField(this) - */ - - // FIXME would be nice to have some of these PostalCode fields depend on an OptionalCountryField, but the type sig of - // PostalCodeField does not yet allow it. - object mandatoryPostalCodeField extends PostalCodeField(this, mandatoryCountryField) - object legacyOptionalPostalCodeField extends PostalCodeField(this, mandatoryCountryField) { override def optional_? = true } - object optionalPostalCodeField extends OptionalPostalCodeField(this, mandatoryCountryField) - - object mandatoryStringField extends StringField(this, 100) - object legacyOptionalStringField extends StringField(this, 100) { override def optional_? = true } - object optionalStringField extends OptionalStringField(this, 100) - - object mandatoryTextareaField extends TextareaField(this, 100) - object legacyOptionalTextareaField extends TextareaField(this, 100) { override def optional_? = true } - object optionalTextareaField extends OptionalTextareaField(this, 100) - - object mandatoryTimeZoneField extends TimeZoneField(this) - object legacyOptionalTimeZoneField extends TimeZoneField(this) { override def optional_? = true } - object optionalTimeZoneField extends OptionalTimeZoneField(this) - - object mandatoryJodaTimeField extends JodaTimeField(this) - object legacyOptionalJodaTimeField extends JodaTimeField(this) { override def optional_? = true } - object optionalJodaTimeField extends OptionalJodaTimeField(this) - - def fieldsToCompare = { - fields - .filterNot(_.name == "mandatoryBinaryField") // binarys don't compare - .filterNot(_.name == "mandatoryDateTimeField") // toInternetDate is lossy (doesn't retain time to ms precision) - } - - override def equals(other: Any): Boolean = other match { - case that: FieldTypeTestRecord => - that.fieldsToCompare.corresponds(this.fieldsToCompare) { (a,b) => - a.name == b.name && a.valueBox == b.valueBox - } - case _ => false - } -} - -object FieldTypeTestRecord extends FieldTypeTestRecord with MetaRecord[FieldTypeTestRecord] - -trait SyntheticTestTrait{ - - val genericField: StringField[_] - -} - -class SyntheticTestRecord extends Record[SyntheticTestRecord] with SyntheticTestTrait{ - - object genericField extends StringField(this, 1024) - - def meta = SyntheticTestRecord - -} - -object SyntheticTestRecord extends SyntheticTestRecord with MetaRecord[SyntheticTestRecord] - -class CustomFormatDateTimeRecord private () extends Record[CustomFormatDateTimeRecord] { - import java.text.SimpleDateFormat - - def meta = CustomFormatDateTimeRecord - - object customFormatDateTimeField extends DateTimeField(this) { - override val formats = new DefaultFormats { - override def dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") - } - } - -} - -object CustomFormatDateTimeRecord extends CustomFormatDateTimeRecord with MetaRecord[CustomFormatDateTimeRecord] - -class CustomTypeIntFieldRecord private () extends Record[CustomTypeIntFieldRecord] { - - def meta = CustomTypeIntFieldRecord - - object customIntField extends IntField(this) { - override def formInputType = "number" - } - -} - -object CustomTypeIntFieldRecord extends CustomTypeIntFieldRecord with MetaRecord[CustomTypeIntFieldRecord] diff --git a/persistence/record/src/test/scala/net/liftweb/record/RecordRulesSpec.scala b/persistence/record/src/test/scala/net/liftweb/record/RecordRulesSpec.scala deleted file mode 100644 index c0955011a3..0000000000 --- a/persistence/record/src/test/scala/net/liftweb/record/RecordRulesSpec.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2014 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import common._ -import http.{LiftSession, S} -import util.Helpers._ - -import org.specs2.mutable._ - -import fixtures._ - - -/** - * Systems under specification for RecordField. - */ -class RecordRulesSpec extends Specification { - "Record Rules Specification".title - sequential - - "RecordRules" should { - "snakify custom field name" in { - RecordRules.fieldName.doWith((_, name) => snakify(name)) { - val rec = BasicTestRecord.createRecord - - rec.fieldThree.name must_== "field_three" - } - } - "camelify custom field display name" in { - RecordRules.displayName.doWith((_, _, name) => camelify(name)) { - val rec = BasicTestRecord.createRecord - - rec.fieldThree.displayName must_== "FieldThree" - } - } - } -} diff --git a/persistence/record/src/test/scala/net/liftweb/record/RecordSpec.scala b/persistence/record/src/test/scala/net/liftweb/record/RecordSpec.scala deleted file mode 100644 index c43c22db0f..0000000000 --- a/persistence/record/src/test/scala/net/liftweb/record/RecordSpec.scala +++ /dev/null @@ -1,346 +0,0 @@ -/* - * Copyright 2010-2015 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package record - -import java.util.Calendar - -import org.specs2.mutable.Specification -import org.joda.time.DateTime -import common._ -import http.{LiftSession, S} -import util._ -import util.Helpers._ -import field.Countries -import fixtures._ -import net.liftweb.http.js.JE._ -import net.liftweb.json.JsonAST._ -import net.liftweb.json.JsonDSL._ - - -/** - * Systems under specification for Record. - */ -class RecordSpec extends Specification { - "Record Specification".title - - "Record field introspection" should { - val rec = FieldTypeTestRecord.createRecord - val allExpectedFieldNames: List[String] = (for { - typeName <- "Binary Boolean Country DateTime Decimal Double Email Enum Int Locale Long PostalCode String Textarea TimeZone JodaTime".split(" ") - flavor <- "mandatory legacyOptional optional".split(" ") - } yield flavor + typeName + "Field").toList - - "introspect only the expected fields" in { - rec.fields().map(_.name).sortWith(_ < _) must_== allExpectedFieldNames.sortWith(_ < _) - } - - "correctly look up fields by name" in { - val fields = allExpectedFieldNames.flatMap(rec.fieldByName _) - - fields.length must_== allExpectedFieldNames.length - } - - "not look up fields by bogus names" in { - val fields = - allExpectedFieldNames.flatMap { name => - rec.fieldByName("x" + name + "y") - } - - fields.length must_== 0 - } - - "ignore synthetic methods" in { - SyntheticTestRecord.metaFields.size must_== 1 - } - - } - - "Record lifecycle callbacks" should { - def testOneHarness(scope: String, f: LifecycleTestRecord => HarnessedLifecycleCallbacks) = { - ("be called before validation when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeValidationHarness = () => triggered = true - rec.foreachCallback(_.beforeValidation) - triggered must_== true - } - - ("be called after validation when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterValidationHarness = () => triggered = true - rec.foreachCallback(_.afterValidation) - triggered must_== true - } - - ("be called around validate when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggeredBefore = false - var triggeredAfter = false - f(rec).beforeValidationHarness = () => triggeredBefore = true - f(rec).afterValidationHarness = () => triggeredAfter = true - rec.validate must_== Nil - triggeredBefore must_== true - triggeredAfter must_== true - } - - ("be called before save when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeSaveHarness = () => triggered = true - rec.foreachCallback(_.beforeSave) - triggered must_== true - } - - ("be called before create when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeCreateHarness = () => triggered = true - rec.foreachCallback(_.beforeCreate) - triggered must_== true - } - - ("be called before update when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeUpdateHarness = () => triggered = true - rec.foreachCallback(_.beforeUpdate) - triggered must_== true - } - - ("be called after save when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterSaveHarness = () => triggered = true - rec.foreachCallback(_.afterSave) - triggered must_== true - } - - ("be called after create when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterCreateHarness = () => triggered = true - rec.foreachCallback(_.afterCreate) - triggered must_== true - } - - ("be called after update when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterUpdateHarness = () => triggered = true - rec.foreachCallback(_.afterUpdate) - triggered must_== true - } - - ("be called before delete when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).beforeDeleteHarness = () => triggered = true - rec.foreachCallback(_.beforeDelete) - triggered must_== true - } - - ("be called after delete when specified at " + scope) in { - val rec = LifecycleTestRecord.createRecord - var triggered = false - f(rec).afterDeleteHarness = () => triggered = true - rec.foreachCallback(_.afterDelete) - triggered must_== true - } - } - - testOneHarness("the field level", rec => rec.stringFieldWithCallbacks: HarnessedLifecycleCallbacks) - } - - "Record" should { - val session = new LiftSession("", randomString(20), Empty) - S.initIfUninitted(session){ - val gu: Array[Byte] = Array(18, 19, 20) - val cal = Calendar.getInstance - val dt: DateTime = DateTime.now - - val fttr = FieldTypeTestRecord.createRecord - .mandatoryBinaryField(gu) - .mandatoryBooleanField(false) - .mandatoryCountryField(Countries.USA) - .mandatoryDateTimeField(cal) - .mandatoryDecimalField(BigDecimal("3.14")) - .mandatoryDoubleField(1999) - .mandatoryEmailField("test@liftweb.net") - .mandatoryEnumField(MyTestEnum.ONE) - .mandatoryIntField(99) - .mandatoryLocaleField("en_US") - .mandatoryLongField(100L) - .mandatoryPostalCodeField("55401") - .mandatoryStringField("foobar") - .mandatoryTextareaField("foobar") - .mandatoryTimeZoneField("America/Chicago") - .mandatoryJodaTimeField(dt) - - val fttrJValue: JValue = - ("mandatoryBooleanField" -> false) ~ - ("mandatoryCountryField" -> 1) ~ - ("mandatoryDateTimeField" -> Helpers.toInternetDate(cal.getTime)) ~ - ("mandatoryDecimalField" -> "3.14") ~ - ("mandatoryDoubleField" -> 1999.0) ~ - ("mandatoryEmailField" -> "test@liftweb.net") ~ - ("mandatoryEnumField" -> 0) ~ - ("mandatoryIntField" -> 99) ~ - ("mandatoryLocaleField" -> "en_US") ~ - ("mandatoryLongField" -> 100) ~ - ("mandatoryPostalCodeField" -> "55401") ~ - ("mandatoryStringField" -> "foobar") ~ - ("mandatoryTextareaField" -> "foobar") ~ - ("mandatoryTimeZoneField" -> "America/Chicago") ~ - ("mandatoryBinaryField" -> "EhMU") ~ - ("mandatoryJodaTimeField" -> dt.getMillis) - - val fttrJson: String = compactRender(fttrJValue) - - val fttrAsJsObj = JsObj( - ("mandatoryBooleanField", JsFalse), - ("mandatoryCountryField", Str(Countries.USA.toString)), - ("mandatoryDateTimeField", Str(Helpers.toInternetDate(cal.getTime))), - ("mandatoryDecimalField", Num(3.14)), - ("mandatoryDoubleField", Num(1999.0)), - ("mandatoryEmailField", Str("test@liftweb.net")), - ("mandatoryEnumField", Str(MyTestEnum.ONE.toString)), - ("mandatoryIntField", Num(99)), - ("mandatoryLocaleField", Str("en_US")), - ("mandatoryLongField", Num(100)), - ("mandatoryPostalCodeField", Str("55401")), - ("mandatoryStringField", Str("foobar")), - ("mandatoryTextareaField", Str("foobar")), - ("mandatoryTimeZoneField", Str("America/Chicago")), - ("mandatoryBinaryField", Str("121314")), - ("mandatoryJodaTimeField", Num(dt.getMillis)), - ("legacyOptionalBooleanField", JsNull), - ("optionalBooleanField", JsNull), - ("legacyOptionalCountryField", JsNull), - ("optionalCountryField", JsNull), - ("legacyOptionalDateTimeField", JsNull), - ("optionalDateTimeField", JsNull), - ("legacyOptionalDecimalField", JsNull), - ("optionalDecimalField", JsNull), - ("legacyOptionalDoubleField", JsNull), - ("optionalDoubleField", JsNull), - ("legacyOptionalEmailField", JsNull), - ("optionalEmailField", JsNull), - ("legacyOptionalEnumField", JsNull), - ("optionalEnumField", JsNull), - ("legacyOptionalIntField", JsNull), - ("optionalIntField", JsNull), - ("legacyOptionalLocaleField", JsNull), - ("optionalLocaleField", JsNull), - ("legacyOptionalLongField", JsNull), - ("optionalLongField", JsNull), - ("legacyOptionalPostalCodeField", JsNull), - ("optionalPostalCodeField", JsNull), - ("legacyOptionalStringField", JsNull), - ("optionalStringField", JsNull), - ("legacyOptionalTextareaField", JsNull), - ("optionalTextareaField", JsNull), - ("legacyOptionalTimeZoneField", JsNull), - ("optionalTimeZoneField", JsNull), - ("optionalBinaryField", JsNull), - ("legacyOptionalBinaryField", JsNull), - ("legacyOptionalJodaTimeField", JsNull), - ("optionalJodaTimeField", JsNull) - ) - - "convert to JsExp (via asJSON)" in { - S.initIfUninitted(new LiftSession("", randomString(20), Empty)) { - fttr.asJSON mustEqual fttrAsJsObj - } - } - - /* Test broken - "convert to JsExp (via asJsExp)" in { - fttr.asJsExp mustEqual fttrAsJsObj - }*/ - - "convert to JValue" in { - fttr.asJValue mustEqual JObject(List( - JField("mandatoryBooleanField", JBool(false)), - JField("legacyOptionalBooleanField", JNothing), - JField("optionalBooleanField", JNothing), - JField("mandatoryCountryField", JInt(Countries.USA.id)), - JField("legacyOptionalCountryField", JNothing), - JField("optionalCountryField", JNothing), - JField("mandatoryDateTimeField", JString(Helpers.toInternetDate(cal.getTime))), - JField("legacyOptionalDateTimeField", JNothing), - JField("optionalDateTimeField", JNothing), - JField("mandatoryDecimalField", JString("3.14")), - JField("legacyOptionalDecimalField", JNothing), - JField("optionalDecimalField", JNothing), - JField("mandatoryDoubleField", JDouble(1999.0)), - JField("legacyOptionalDoubleField", JNothing), - JField("optionalDoubleField", JNothing), - JField("mandatoryEmailField", JString("test@liftweb.net")), - JField("legacyOptionalEmailField", JNothing), - JField("optionalEmailField", JNothing), - JField("mandatoryEnumField", JInt(MyTestEnum.ONE.id)), - JField("legacyOptionalEnumField", JNothing), - JField("optionalEnumField", JNothing), - JField("mandatoryIntField", JInt(99)), - JField("legacyOptionalIntField", JNothing), - JField("optionalIntField", JNothing), - JField("mandatoryLocaleField", JString("en_US")), - JField("legacyOptionalLocaleField", JNothing), - JField("optionalLocaleField", JNothing), - JField("mandatoryLongField", JInt(100)), - JField("legacyOptionalLongField", JNothing), - JField("optionalLongField", JNothing), - JField("mandatoryPostalCodeField", JString("55401")), - JField("legacyOptionalPostalCodeField", JNothing), - JField("optionalPostalCodeField", JNothing), - JField("mandatoryStringField", JString("foobar")), - JField("legacyOptionalStringField", JNothing), - JField("optionalStringField", JNothing), - JField("mandatoryTextareaField", JString("foobar")), - JField("legacyOptionalTextareaField", JNothing), - JField("optionalTextareaField", JNothing), - JField("mandatoryTimeZoneField", JString("America/Chicago")), - JField("legacyOptionalTimeZoneField", JNothing), - JField("optionalTimeZoneField", JNothing), - JField("mandatoryBinaryField", JString("EhMU")), - JField("legacyOptionalBinaryField", JNothing), - JField("optionalBinaryField", JNothing), - JField("mandatoryJodaTimeField", JInt(dt.getMillis)), - JField("legacyOptionalJodaTimeField", JNothing), - JField("optionalJodaTimeField", JNothing) - )) - } - - "get set from json string using lift-json parser" in { - S.initIfUninitted(new LiftSession("", randomString(20), Empty)) { - val fttrFromJson = FieldTypeTestRecord.fromJsonString(fttrJson) - - fttrFromJson must_== Full(fttr) - } - } - } - } - - "basic record" should { - "order fields according to fieldOrder" in { - BasicTestRecord.metaFields must_== List(BasicTestRecord.field2, BasicTestRecord.field1, BasicTestRecord.fieldThree) - } - } -} - diff --git a/persistence/record/src/test/webapp/WEB-INF/web.xml b/persistence/record/src/test/webapp/WEB-INF/web.xml deleted file mode 100644 index 677471014e..0000000000 --- a/persistence/record/src/test/webapp/WEB-INF/web.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - LiftFilter - Lift Filter - The Filter that intercepts lift calls - net.liftweb.http.LiftFilter - - - - - LiftFilter - /* - - - diff --git a/persistence/record/src/test/webapp/htmlFragmentWithHead.html b/persistence/record/src/test/webapp/htmlFragmentWithHead.html deleted file mode 100644 index ec053346db..0000000000 --- a/persistence/record/src/test/webapp/htmlFragmentWithHead.html +++ /dev/null @@ -1,7 +0,0 @@ - - - - -

Welcome to your project!

-
- diff --git a/persistence/record/src/test/webapp/htmlSnippetWithHead.html b/persistence/record/src/test/webapp/htmlSnippetWithHead.html deleted file mode 100644 index 71577c1c25..0000000000 --- a/persistence/record/src/test/webapp/htmlSnippetWithHead.html +++ /dev/null @@ -1,5 +0,0 @@ - -

Welcome to your project!

-

-
- diff --git a/persistence/record/src/test/webapp/index.html b/persistence/record/src/test/webapp/index.html deleted file mode 100644 index 5692497160..0000000000 --- a/persistence/record/src/test/webapp/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -

Welcome to your project!

-

-
- diff --git a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/CRUDify.scala b/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/CRUDify.scala deleted file mode 100644 index 39fc3a5cc1..0000000000 --- a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/CRUDify.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright 2006-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package squerylrecord - -import net.liftweb.record.{Record, MetaRecord} -import net.liftweb.proto.Crudify -import org.squeryl._ -import net.liftweb.squerylrecord.RecordTypeMode._ -import net.liftweb.record.Field -import net.liftweb.common.{Box, Empty, Full} -import scala.xml.NodeSeq - -trait CRUDify[K, T <: Record[T] with KeyedEntity[K]] extends Crudify { - self: MetaRecord[T] => - - type TheCrudType = T - - type FieldPointerType = Field[_, TheCrudType] - - def table: Table[TheCrudType] - - def idFromString(in: String): K - - override def calcPrefix = table.name :: Nil - - override def fieldsForDisplay: List[FieldPointerType] = metaFields.filter(_.shouldDisplay_?) - - override def computeFieldFromPointer(instance: TheCrudType, pointer: FieldPointerType): Box[FieldPointerType] = instance.fieldByName(pointer.name) - - override def findForParam(in: String): Box[TheCrudType] = - inTransaction{ - table.lookup(idFromString(in)) - } - - override def findForList(start: Long, count: Int) = - inTransaction{ - from(table)(t => select(t)).page(start.toInt, count).toList - } - - override def create = createRecord - - override def buildBridge(in: TheCrudType) = new SquerylBridge(in) - - protected class SquerylBridge(in: TheCrudType) extends CrudBridge { - - def delete_! = inTransaction { - table.delete(in.id) - } - - def save = { - if (in.isPersisted) { - inTransaction{ - table.update(in) - } - } - else { - inTransaction { - table.insert(in) - } - } - true - } - - def validate = in.validate - - def primaryKeyFieldAsString = in.id.toString - } - - def buildFieldBridge(from: FieldPointerType): FieldPointerBridge = new SquerylFieldBridge(from) - - protected class SquerylFieldBridge(in: FieldPointerType) extends FieldPointerBridge { - def displayHtml: NodeSeq = in.displayHtml - } - -} diff --git a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/KeyedRecord.scala b/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/KeyedRecord.scala deleted file mode 100644 index ce60575d84..0000000000 --- a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/KeyedRecord.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package squerylrecord - -import record.MandatoryTypedField -import org.squeryl.IndirectKeyedEntity - -/** - * Trait to mix into records that have a primary key. The primary key field must be named "idField", though - * the name of the database column can be changed from that using @Column(name="id") or similar. - */ -trait KeyedRecord[K] extends IndirectKeyedEntity[K, MandatoryTypedField[K]] { - /** The primary key field of the record. Must not be optional. */ - def idField: MandatoryTypedField[K] - - /** Implement requirements of KeyedEntity by returning the current value of idField */ - def id = idField.value -} - diff --git a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/RecordMetaDataFactory.scala b/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/RecordMetaDataFactory.scala deleted file mode 100644 index 11753a1bf5..0000000000 --- a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/RecordMetaDataFactory.scala +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright 2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package squerylrecord - -import common.{ Box, Full } -import record.{ BaseField, MetaRecord, Record, TypedField, OwnedField } -import record.field._ -import org.squeryl.internals.{ FieldMetaData, PosoMetaData, FieldMetaDataFactory } -import org.squeryl.annotations.Column -import java.lang.reflect.{ Method, Field } -import java.lang.annotation.Annotation -import java.sql.{ ResultSet, Timestamp } -import java.util.{ Calendar, Date } -import scala.collection.immutable.Map -import net.liftweb.util.Settable -import net.liftweb.record.OptionalTypedField - -/** FieldMetaDataFactory that allows Squeryl to use Records as model objects. */ -class RecordMetaDataFactory extends FieldMetaDataFactory { - private val rec: { type R0 <: Record[R0] } = null - private type Rec = rec.R0 - - /** Cache MetaRecords by the model object class (Record class) */ - private var metaRecordsByClass: Map[Class[Rec], MetaRecord[Rec]] = Map.empty - - - /** Given a model object class (Record class) and field name, return the BaseField from the meta record */ - private def findMetaField(clasz: Class[Rec], name: String): BaseField = { - def fieldFrom(mr: MetaRecord[Rec]): BaseField = - mr.asInstanceOf[Record[Rec]].fieldByName(name) match { - case Full(f: BaseField) => f - case Full(_) => org.squeryl.internals.Utils.throwError("field " + name + " in Record metadata for " + clasz + " is not a TypedField") - case _ => org.squeryl.internals.Utils.throwError("failed to find field " + name + " in Record metadata for " + clasz) - } - - metaRecordsByClass get clasz match { - case Some(mr) => fieldFrom(mr) - case None => - try { - val rec = clasz.newInstance.asInstanceOf[Record[Rec]] - val mr = rec.meta - metaRecordsByClass = metaRecordsByClass updated (clasz, mr) - fieldFrom(mr) - } catch { - case ex: Exception => org.squeryl.internals.Utils.throwError("failed to find MetaRecord for " + clasz + " due to exception " + ex.toString) - } - } - } - - /** Build a Squeryl FieldMetaData for a particular field in a Record */ - def build(parentMetaData: PosoMetaData[_], name: String, - property: (Option[Field], Option[Method], Option[Method], Set[Annotation]), - sampleInstance4OptionTypeDeduction: AnyRef, isOptimisticCounter: Boolean): FieldMetaData = { - if (!isRecord(parentMetaData.clasz) || isOptimisticCounter) { - // Either this is not a Record class, in which case we'll - //treat it as a normal class in primitive type mode, or the field - //was mixed in by the Optimisitic trait and is not a Record field. - return SquerylRecord.posoMetaDataFactory.build(parentMetaData, name, property, - sampleInstance4OptionTypeDeduction, isOptimisticCounter) - } - - val metaField = findMetaField(parentMetaData.clasz.asInstanceOf[Class[Rec]], name) - - val (field, getter, setter, annotations) = property - - val colAnnotation = annotations.find(a => a.isInstanceOf[Column]).map(a => a.asInstanceOf[Column]) - - val fieldsValueType = metaField match { - case (f: SquerylRecordField) => f.classOfPersistentField - case (_: BooleanTypedField) => classOf[Boolean] - case (_: DateTimeTypedField) => classOf[Timestamp] - case (_: DoubleTypedField) => classOf[Double] - case (_: IntTypedField) => classOf[java.lang.Integer] - case (_: LongTypedField) => classOf[java.lang.Long] - case (_: DecimalTypedField) => classOf[BigDecimal] - case (_: TimeZoneTypedField) => classOf[String] - case (_: StringTypedField) => classOf[String] - case (_: PasswordTypedField) => classOf[String] - case (_: BinaryTypedField) => classOf[Array[Byte]] - case (_: LocaleTypedField) => classOf[String] - case (_: EnumTypedField[_]) => classOf[Int] - case (_: EnumNameTypedField[_]) => classOf[String] - case _ => org.squeryl.internals.Utils.throwError("Unsupported field type. Consider implementing " + - "SquerylRecordField for defining the persistent class." + - "Field: " + metaField) - } - - new FieldMetaData( - parentMetaData, - name, - fieldsValueType, // if isOption, this fieldType is the type param of Option, i.e. the T in Option[T] - fieldsValueType, //in primitive type mode fieldType == wrappedFieldType, in custom type mode wrappedFieldType is the 'real' type, i.e. the (primitive) type that jdbc understands - None, //val customTypeFactory: Option[AnyRef=>Product1[Any]], - metaField.optional_?, - getter, - setter, - field, - colAnnotation, - isOptimisticCounter, - metaField) { - - override def length = { - import java.math.MathContext - val fieldLength = - metaField match { - case (stringTypedField: StringTypedField) => Some(stringTypedField.maxLength) - case decimalField: DecimalField[_] => { - val precision = decimalField.context.getPrecision(); - if (precision != 0) - Some(precision) - else - None - } - case decimalField: OptionalDecimalField[_] => { - val precision = decimalField.context.getPrecision(); - if (precision != 0) - Some(precision) - else - None - } - case _ => None - } - fieldLength getOrElse super.length - } - - override def scale = { - val fieldScale = - metaField match { - case decimalField: DecimalField[_] => Some(decimalField.scale) - case decimalField: OptionalDecimalField[_] => Some(decimalField.scale) - case _ => None - } - fieldScale getOrElse super.scale - } - - private def fieldFor(o: AnyRef) = getter.get.invoke(o) match { - case tf: TypedField[_] => tf - case other => org.squeryl.internals.Utils.throwError("Field's used with Squeryl must inherit from net.liftweb.record.TypedField : " + other ) - } - - /** - * Sets the value which was retrieved from the DB into the appropriate Record field - */ - override def set(target: AnyRef, value: AnyRef) = target match { - case record: Record[_] => - record.runSafe { - val typedField: TypedField[_] = fieldFor(target) - typedField.setFromAny(Box !! value) - typedField.resetDirty - } - case other => - org.squeryl.internals.Utils.throwError("RecordMetaDataFactory can not set fields on non Record objects : " + other) - } - - override def setFromResultSet(target: AnyRef, rs: ResultSet, index: Int) = set(target, resultSetHandler(rs, index)) - - /** - * Extracts the value from the field referenced by o that will be stored in the DB - */ - override def get(o: AnyRef) = fieldFor(o) match { - case enumField: EnumTypedField[_] => enumField.valueBox match { - case Full(enum: Enumeration#Value) => enum.id: java.lang.Integer - case _ => null - } - case enumNameField: EnumNameTypedField[_] => enumNameField.valueBox match { - case Full(enum: Enumeration#Value) => enum.toString - case _ => null - } - case other => other.valueBox match { - case Full(c: Calendar) => new Timestamp(c.getTime.getTime) - case Full(other: AnyRef) => other - case _ => null - } - } - } - } - - /** - * Checks if the given class is a subclass of Record. A special handling is only - * needed for such subtypes. For other classes, use the standard squeryl methods. - */ - private def isRecord(clasz: Class[_]) = { - classOf[Record[_]].isAssignableFrom(clasz) - } - - /** - * For records, the constructor must not be used directly when - * constructing Objects. Instead, the createRecord method must be called. - */ - def createPosoFactory(posoMetaData: PosoMetaData[_]): () => AnyRef = { - if (!isRecord(posoMetaData.clasz)) { - // No record class - use standard poso meta data factory - return SquerylRecord.posoMetaDataFactory.createPosoFactory(posoMetaData); - } - - // Extract the MetaRecord for the companion object. This - // is done only once for each class. - val metaRecord = Class.forName(posoMetaData.clasz.getName + - "$").getField("MODULE$").get(null).asInstanceOf[MetaRecord[_]] - - () => metaRecord.createRecord.asInstanceOf[AnyRef] - } - - /** - * There needs to be a special handling for squeryl-record when single fields are selected. - * - * The problem was that record fields reference the record itself and thus Squeryl was of the - * opinion that the whole record should be returned, as well as the selected field. - * It is described in detail in this bug report: - * https://www.assembla.com/spaces/liftweb/tickets/876-record-squeryl-selecting-unspecified-columns-in-generated-sql - * - * By overriding this function, the reference to the record is excluded from - * the reference finding algorithm in Squeryl. - */ - override def hideFromYieldInspection(o: AnyRef, f: Field): Boolean = { - o.isInstanceOf[OwnedField[_]] && isRecord(f.getType) - } - -} diff --git a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/RecordTypeMode.scala b/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/RecordTypeMode.scala deleted file mode 100644 index 0b9bcea1c4..0000000000 --- a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/RecordTypeMode.scala +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Copyright 2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package squerylrecord - -import record.{ MandatoryTypedField, OptionalTypedField, TypedField, Record} -import record.field.{EnumNameField, OptionalEnumNameField, EnumField, OptionalEnumField} - -import org.squeryl.{ PrimitiveTypeMode, Schema, Query } -import org.squeryl.dsl.{ BooleanExpression, DateExpression, EnumExpression, NumericalExpression, StringExpression, NonNumericalExpression } -import org.squeryl.dsl.ast.{ SelectElementReference, SelectElement, ConstantExpressionNode, RightHandSideOfIn } -import org.squeryl.internals.{ AttributeValidOnNonNumericalColumn, AttributeValidOnNumericalColumn, FieldReferenceLinker, OutMapper } - -import java.util.{ Calendar, Date } -import java.sql.Timestamp - -/** - * All methods from this object should be imported when creating queries using the Squeryl DSL with lift records. - * - * It provides implicit conversions for all record field types to the underlying primitive types. Thus, you can use - * record fields in the Squeryl DSL as if they were primitive types. - */ -object RecordTypeMode extends RecordTypeMode - -trait RecordTypeMode extends PrimitiveTypeMode { - - /** Conversion of mandatory Long fields to Squeryl Expressions. */ - implicit def long2ScalarLong(f: MandatoryTypedField[Long]) = convertNumericalMandatory(f, createOutMapperLongType) - - /** Conversion of mandatory Int fields to Squeryl Expressions. */ - implicit def int2ScalarInt(f: MandatoryTypedField[Int]) = convertNumericalMandatory(f, createOutMapperIntType) - - /** Conversion of mandatory Double fields to Squeryl Expressions. */ - implicit def double2ScalarDouble(f: MandatoryTypedField[Double]) = convertNumericalMandatory(f, createOutMapperDoubleType) - - /** Conversion of mandatory BigDecimal fields to Squeryl Expressions. */ - implicit def decimal2ScalarDecimal(f: MandatoryTypedField[BigDecimal]) = convertNumericalMandatory(f, createOutMapperBigDecimalType) - - /** Conversion of optional Int fields to Squeryl Expressions. */ - implicit def optionInt2ScalarInt(f: OptionalTypedField[Int]) = convertNumericalOptional(f, createOutMapperIntTypeOption) - - /** Conversion needed for outer joins */ - implicit def optionIntField2OptionInt(f: Option[TypedField[Int]]) = convertNumericalOption(f, createOutMapperIntTypeOption) - - /** Conversion of optional Long fields to Squeryl Expressions. */ - implicit def optionLong2ScalarLong(f: OptionalTypedField[Long]) = convertNumericalOptional(f, createOutMapperLongTypeOption) - - /** Conversion needed for outer joins */ - implicit def optionLongField2OptionLong(f: Option[TypedField[Long]]) = convertNumericalOption(f, createOutMapperLongTypeOption) - - /** Conversion of optional Double fields to Squeryl Expressions. */ - implicit def optionDouble2ScalarDouble(f: OptionalTypedField[Double]) = convertNumericalOptional(f, createOutMapperDoubleTypeOption) - - /** Conversion needed for outer joins */ - implicit def optionDoubleField2OptionDouble(f: Option[TypedField[Double]]) = convertNumericalOption(f, createOutMapperDoubleTypeOption) - - /** Conversion of optional BigDecimal fields to Squeryl Expressions. */ - implicit def optionDecimal2ScalarBoolean(f: OptionalTypedField[BigDecimal]) = convertNumericalOptional(f, createOutMapperBigDecimalTypeOption) - - /** Conversion needed for outer joins */ - implicit def optionDecimalField2OptionDecimal(f: Option[TypedField[BigDecimal]]) = convertNumericalOption(f, createOutMapperBigDecimalTypeOption) - - - /** Conversion of mandatory String fields to Squeryl Expressions. */ - implicit def string2ScalarString(f: MandatoryTypedField[String]) = fieldReference match { - case Some(e) => new SelectElementReference[String](e)(createOutMapperStringType) with StringExpression[String] with SquerylRecordNonNumericalExpression[String] - case None => new ConstantExpressionNode[String](f.get)(createOutMapperStringType) with StringExpression[String] with SquerylRecordNonNumericalExpression[String] - } - - /** Conversion of optional String fields to Squeryl Expressions. */ - implicit def optionString2ScalarString(f: OptionalTypedField[String]) = fieldReference match { - case Some(e) => new SelectElementReference[Option[String]](e)(createOutMapperStringTypeOption) with StringExpression[Option[String]] with SquerylRecordNonNumericalExpression[Option[String]] - case None => new ConstantExpressionNode[Option[String]](f.get)(createOutMapperStringTypeOption) with StringExpression[Option[String]] with SquerylRecordNonNumericalExpression[Option[String]] - } - - /** Needed for outer joins */ - implicit def optionStringField2OptionString(f: Option[TypedField[String]]) = fieldReference match { - case Some(e) => new SelectElementReference[String](e)(createOutMapperStringType) with StringExpression[String] with SquerylRecordNonNumericalExpression[String] - case None => new ConstantExpressionNode[String](getValueOrNull(f))(createOutMapperStringType) with StringExpression[String] with SquerylRecordNonNumericalExpression[String] - } - - /** Conversion of mandatory Boolean fields to Squeryl Expressions. */ - implicit def bool2ScalarBoolean(f: MandatoryTypedField[Boolean]) = fieldReference match { - case Some(e) => new SelectElementReference[Boolean](e)(createOutMapperBooleanType) with BooleanExpression[Boolean] with SquerylRecordNonNumericalExpression[Boolean] - case None => new ConstantExpressionNode[Boolean](f.get)(createOutMapperBooleanType) with BooleanExpression[Boolean] with SquerylRecordNonNumericalExpression[Boolean] - } - - /** Conversion of optional Boolean fields to Squeryl Expressions. */ - implicit def optionBoolean2ScalarBoolean(f: OptionalTypedField[Boolean]) = fieldReference match { - case Some(e) => new SelectElementReference[Option[Boolean]](e)(createOutMapperBooleanTypeOption) with BooleanExpression[Option[Boolean]] with SquerylRecordNonNumericalExpression[Option[Boolean]] - case None => new ConstantExpressionNode[Option[Boolean]](f.get)(createOutMapperBooleanTypeOption) with BooleanExpression[Option[Boolean]] with SquerylRecordNonNumericalExpression[Option[Boolean]] - } - - /** Needed for outer joins. */ - implicit def optionBooleanField2Boolean(f: Option[TypedField[Boolean]]) = fieldReference match { - case Some(e) => new SelectElementReference[Boolean](e)(createOutMapperBooleanType) with BooleanExpression[Boolean] with SquerylRecordNonNumericalExpression[Boolean] - case None => new ConstantExpressionNode[Boolean](getValue(f).getOrElse(false))(createOutMapperBooleanType) with BooleanExpression[Boolean] with SquerylRecordNonNumericalExpression[Boolean] - } - - /** Conversion of mandatory Calendar fields to Squeryl Expressions. */ - implicit def date2ScalarDate(f: MandatoryTypedField[Calendar]) = fieldReference match { - case Some(e) => new SelectElementReference[Timestamp](e)(createOutMapperTimestampType) with DateExpression[Timestamp] with SquerylRecordNonNumericalExpression[Timestamp] - case None => new ConstantExpressionNode[Timestamp](new Timestamp(f.get.getTimeInMillis))(createOutMapperTimestampType) with DateExpression[Timestamp] with SquerylRecordNonNumericalExpression[Timestamp] - } - - /** Conversion of optional Calendar fields to Squeryl Expressions. */ - implicit def optionDate2ScalarDate(f: OptionalTypedField[Calendar]) = fieldReference match { - case Some(e) => new SelectElementReference[Option[Timestamp]](e)(createOutMapperTimestampTypeOption) with DateExpression[Option[Timestamp]] with SquerylRecordNonNumericalExpression[Option[Timestamp]] - case None => { - val date = f.get match { - case Some(calendar) => Some(new Timestamp(calendar.getTimeInMillis)) - case None => None - } - new ConstantExpressionNode[Option[Timestamp]](date)(createOutMapperTimestampTypeOption) with DateExpression[Option[Timestamp]] with SquerylRecordNonNumericalExpression[Option[Timestamp]] - } - } - - /** Needed for inner selects. The cast is possible here because the type is not - * used in the in query. Only the AST of the query is needed. */ - //implicit def queryStringField2QueryString[T <: TypedField[String]](q: Query[T]): Query[String] = q.asInstanceOf[Query[String]] - - /** Needed for outer joins. */ - implicit def optionDateField2OptionDate(f: Option[TypedField[Calendar]]) = fieldReference match { - case Some(e) => new SelectElementReference[Timestamp](e)(createOutMapperTimestampType) with DateExpression[Timestamp] with SquerylRecordNonNumericalExpression[Timestamp] - case None => new ConstantExpressionNode[Timestamp](getValue(f).map(field => new Timestamp(field.getTimeInMillis)).orNull)(createOutMapperTimestampType) with DateExpression[Timestamp] with SquerylRecordNonNumericalExpression[Timestamp] - } - - /** Needed for inner queries on date fields */ - //implicit def dateField2Timestamp(f: MandatoryTypedField[Calendar]) = new java.sql.Timestamp(f.get.getTime.getTime) - //implicit def optionalDateField2Timestamp(f: OptionalTypedField[Calendar]): Option[java.sql.Timestamp] = f.get.map(d => new java.sql.Timestamp(d.getTime.getTime)) - implicit def calendarFieldQuery2RightHandSideOfIn[F <: TypedField[Calendar]](q: org.squeryl.Query[F]) = new RightHandSideOfIn[Timestamp](q.ast) - - /** - * Needed for queries on constant calendar values. - */ - implicit def calendarToTimestampExpression(c: Calendar) = dateToTimestampExpression(c.getTime) - - /** - * Neeed for queries on constant date values. - */ - implicit def dateToTimestampExpression(d: java.util.Date) = - new ConstantExpressionNode[Timestamp](new java.sql.Timestamp(d.getTime))(createOutMapperTimestampType) with DateExpression[Timestamp] with SquerylRecordNonNumericalExpression[Timestamp] - - /** Conversion of mandatory Enum fields to Squeryl Expressions. */ - implicit def enum2EnumExpr[EnumType <: Enumeration](f: MandatoryTypedField[EnumType#Value]) = fieldReference match { - case Some(e) => new SelectElementReference[Enumeration#Value](e)(e.createEnumerationMapper(f.defaultValue)) with EnumExpression[Enumeration#Value] with SquerylRecordNonNumericalExpression[Enumeration#Value] - case None => new ConstantExpressionNode[Enumeration#Value](f.get)(outMapperFromEnumValue(f.get)) with EnumExpression[Enumeration#Value] with SquerylRecordNonNumericalExpression[Enumeration#Value] - } - - def reifySingleton[T](m: Manifest[T]) = { - val cls = m.runtimeClass - val field = cls.getField("MODULE$") - field.get(null).asInstanceOf[T] - } - - /** Conversion of optional Enum fields to Squeryl Expressions. */ - implicit def optionEnum2ScalaEnum[EnumType <: Enumeration](f: OptionalTypedField[EnumType#Value])(implicit m: Manifest[EnumType]) = - fieldReference match { - case Some(e) => - new SelectElementReference[Option[Enumeration#Value]](e)(e.createEnumerationOptionMapper(Some(reifySingleton(m).values.iterator.next))) with EnumExpression[Option[Enumeration#Value]] with SquerylRecordNonNumericalExpression[Option[Enumeration#Value]] - case None => - new ConstantExpressionNode[Option[Enumeration#Value]](f.get)(outMapperOptionFromOptionEnumValue(f.get).orNull) with EnumExpression[Option[Enumeration#Value]] with SquerylRecordNonNumericalExpression[Option[Enumeration#Value]] - } - - /** Needed for outer joins. */ - implicit def optionEnumField2OptionEnum[EnumType <: Enumeration](f: Option[TypedField[EnumType#Value]])(implicit m: Manifest[EnumType]) = fieldReference match { - case Some(e) => - new SelectElementReference[Enumeration#Value](e)(e.createEnumerationMapper(reifySingleton(m).values.iterator.next)) with EnumExpression[Enumeration#Value] with SquerylRecordNonNumericalExpression[Enumeration#Value] - case None => new ConstantExpressionNode[Enumeration#Value](getValue(f).orNull)({ - val enumOption = f flatMap { f1: TypedField[EnumType#Value] => f1.valueBox.toOption } - val outMapperOption: Option[OutMapper[Enumeration#Value]] = enumOption map { e: EnumType#Value => outMapperFromEnumValue(e) : OutMapper[Enumeration#Value] /*crashes scala 2.9.1 without explicit type */ } - outMapperOption.orNull - }) with EnumExpression[Enumeration#Value] with SquerylRecordNonNumericalExpression[Enumeration#Value] - } - - implicit def enumFieldQuery2RightHandSideOfIn[EnumType <: Enumeration, T <: Record[T]](q: org.squeryl.Query[EnumNameField[T, EnumType]]) = new RightHandSideOfIn[Enumeration#Value](q.ast) - - - /** Needed for inner queries on certain non-numerical fields: */ - /*implicit def mandatoryTypedField2Value[T](f: MandatoryTypedField[T]): T = f.get - implicit def optionalTypedField2Value[T](f: OptionalTypedField[T]): Option[T] = f.get*/ - - implicit def typedFieldQuery2RightHandSideOfIn[T, F <: TypedField[T]](q: org.squeryl.Query[F]) = new RightHandSideOfIn[T](q.ast) - - - /** - * Helper method for converting mandatory numerical fields to Squeryl Expressions. - */ - private def convertNumericalMandatory[T](f: MandatoryTypedField[T], outMapper: OutMapper[T]) = fieldReference match { - case Some(e) => new SelectElementReference[T](e)(outMapper) with NumericalExpression[T] with SquerylRecordNumericalExpression[T] - case None => new ConstantExpressionNode[T](f.get)(outMapper) with NumericalExpression[T] with SquerylRecordNumericalExpression[T] - } - - /** - * Helper method for converting optional numerical fields to Squeryl Expressions. - */ - private def convertNumericalOptional[T](f: OptionalTypedField[T], outMapper: OutMapper[Option[T]]) = fieldReference match { - case Some(e: SelectElement) => new SelectElementReference[Option[T]](e)(outMapper) with NumericalExpression[Option[T]] with SquerylRecordNumericalExpression[Option[T]] - case None => new ConstantExpressionNode[Option[T]](f.get)(outMapper) with NumericalExpression[Option[T]] with SquerylRecordNumericalExpression[Option[T]] - } - - private def convertNumericalOption[T](f: Option[TypedField[T]], outMapper: OutMapper[Option[T]]) = fieldReference match { - case Some(e) => new SelectElementReference[Option[T]](e)(outMapper) with NumericalExpression[Option[T]] with SquerylRecordNumericalExpression[Option[T]] - case None => new ConstantExpressionNode[Option[T]](getValue(f))(outMapper) with NumericalExpression[Option[T]] with SquerylRecordNumericalExpression[Option[T]] - } - - private def getValue[T](f: Option[TypedField[T]]): Option[T] = f match { - case Some(field) => field.valueBox - case None => None - } - - private def getValueOrNull[T <: AnyRef](f: Option[TypedField[T]]): T = f match { - case Some(field) => field.valueBox.openOr(null.asInstanceOf[T]) - case None => null.asInstanceOf[T] - } - - /** - * Returns the field that was last referenced by Squeryl. Can also be None. - */ - private def fieldReference = FieldReferenceLinker.takeLastAccessedFieldReference - -} - -/** - * Record-Specific extensions to numerical Squeryl Expressions. - */ -trait SquerylRecordNumericalExpression[T] { this: NumericalExpression[T] => - - /** - * Can be used instead of the often conflicting "is" function. - */ - def defineAs(columnAttributes: AttributeValidOnNumericalColumn*)(implicit restrictUsageWithinSchema: Schema) = { - is(columnAttributes: _*)(restrictUsageWithinSchema) - } -} - -/** - * Record-Specific extensions to non-numerical Squeryl Expressions. - */ -trait SquerylRecordNonNumericalExpression[T] { this: NonNumericalExpression[T] => - - /** - * Can be used instead of the often conflicting "is" function. - */ - def defineAs(columnAttributes: AttributeValidOnNonNumericalColumn*)(implicit restrictUsageWithinSchema: Schema) = { - is(columnAttributes: _*)(restrictUsageWithinSchema) - } -} diff --git a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/SquerylRecord.scala b/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/SquerylRecord.scala deleted file mode 100644 index b213ea82e6..0000000000 --- a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/SquerylRecord.scala +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package squerylrecord - -import common.{Box, Full, Loggable} -import db.DB -import util.DynoVar -import org.squeryl.{Session, SessionFactory} -import org.squeryl.internals.{DatabaseAdapter, FieldMetaData} -import net.liftweb.util.LoanWrapper -import RecordTypeMode._ - -/** Object containing initialization logic for the Squeryl/Record integration */ -object SquerylRecord extends Loggable { - - /** - * We have to remember the default Squeryl metadata factory before - * we override it with our own implementation, so that we can use - * the original factory for non-record classes. - */ - private[squerylrecord] val posoMetaDataFactory = FieldMetaData.factory - - - /** - * Initialize the Squeryl/Record integration. This must be called somewhere during your Boot before you use any - * Records with Squeryl. When using this method, configure your Session separately - * (see [[http://squeryl.org/sessions-and-tx.html]] for details) or you can use initWithSquerylSession to do both at once. - */ - def init() { - FieldMetaData.factory = new RecordMetaDataFactory - } - - /** - * Initialize the Squeryl/Record integration and configure a default Session at the same time. - */ - def initWithSquerylSession(sessionFactory: => Session) { - init() - SessionFactory.concreteFactory = Some(() => sessionFactory) - } - - def buildLoanWrapper() = new LoanWrapper { - override def apply[T](f: => T): T = inTransaction { - f - } - } - - /** - * - * NOTE: Remove this along with the deprecated method below - * Keep track of the current Squeryl Session we've created using DB - * */ - private object currentSession extends DynoVar[Session] -} diff --git a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/SquerylRecordField.scala b/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/SquerylRecordField.scala deleted file mode 100644 index 6281ca0f81..0000000000 --- a/persistence/squeryl-record/src/main/scala/net/liftweb/squerylrecord/SquerylRecordField.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2010 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package squerylrecord - -/** - * This trait has to be extended for new fields that - * are derived from net.liftweb.record.BaseField or TypedField and should be used - * in squeryl records. - * - * This is necessary because the class of the field's value in the database - * has to be known for squeryl, and this information is not directly - * available in BaseField or TypedField. - * - * For all standard fields in record, there is a special handling in - * squeryl-record. That means, for example, that you can create a subclass - * of StringTypedField without the need to extend this trait. - */ -trait SquerylRecordField { - - /** - * Should return the class of the field's value in the database. - */ - def classOfPersistentField: Class[_] -} diff --git a/persistence/squeryl-record/src/test/scala/net/liftweb/squerylrecord/Fixtures.scala b/persistence/squeryl-record/src/test/scala/net/liftweb/squerylrecord/Fixtures.scala deleted file mode 100644 index acb77966d9..0000000000 --- a/persistence/squeryl-record/src/test/scala/net/liftweb/squerylrecord/Fixtures.scala +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Copyright 2010-2015 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package squerylrecord - -import RecordTypeMode._ -import record.{ MetaRecord, Record, TypedField, MandatoryTypedField } -import common.{ Box, Full } -import record.field._ -import record.Field -import json.JsonAST.{ JValue, JString } -import http.js.JE.Str -import org.squeryl.{ SessionFactory, Session, Schema, Optimistic } -import org.squeryl.adapters.H2Adapter -import org.squeryl.annotations.Column -import org.squeryl.internals.AutoIncremented -import org.squeryl.internals.PrimaryKey -import org.squeryl.dsl.CompositeKey2 -import org.squeryl.KeyedEntity -import java.math.MathContext -import java.sql.DriverManager -import java.util.Calendar - -object DBHelper { - def initSquerylRecordWithInMemoryDB() { - SquerylRecord.initWithSquerylSession { - // TODO: Use mapper.StandardDBVendor - Class.forName("org.h2.Driver") - val session = Session.create(DriverManager.getConnection("jdbc:h2:mem:testSquerylRecordDB;DB_CLOSE_DELAY=-1;LOCK_TIMEOUT=3000"), new H2Adapter) - //session.setLogger(statement => println(statement)) - session - } - } - - /** - * Creates the test schema in a new transaction. Drops an old schema if - * it exists. - */ - def createSchema() { - inTransaction { - try { - //MySchema.printDdl - MySchema.dropAndCreate - MySchema.createTestData - } catch { - case e: Exception => e.printStackTrace() - throw e; - } - } - } -} - -/** - * Test Record: Company. It has many different field types for test purposes. - */ -class Company private () extends Record[Company] with KeyedRecord[Long] with Optimistic{ - - override def meta = Company - - @Column(name = "id") - override val idField = new LongField(this) - - val name = new StringField(this, "") - val description = new OptionalTextareaField(this, 1000) - val country = new CountryField(this) - val postCode = new PostalCodeField(this, country) - val created = new DateTimeField(this) - val employeeSatisfaction = new OptionalDecimalField(this, new MathContext(10), 5) - - lazy val employees = MySchema.companyToEmployees.left(this) - -} -object Company extends Company with MetaRecord[Company] with CRUDify[Long, Company]{ - - def table = MySchema.companies - - def idFromString(in: String) = in.toLong - -} - -object EmployeeRole extends Enumeration { - - type EmployeeRole = Value - - val Programmer, Manager = Value -} - -/** - * A field type that works just like a String field. - * Only for testing that custom fields derived from - * TypedField are also supported. - */ -class SpecialField[OwnerType <: Record[OwnerType]](rec: OwnerType) - extends Field[String, OwnerType] with TypedField[String] - with SquerylRecordField with MandatoryTypedField[String] { - - override def owner = rec - override def classOfPersistentField = classOf[String] - override def defaultValue = "" - override def setFromString(s: String) = setBox(Full(s)) - override def setFromAny(c: Any) = c match { - case Full(v) => setBox(Full(v.toString)) - case None => setBox(None) - case v => setBox(Full(v.toString)) - } - override def setFromJValue(jValue: JValue) = setBox(Full(jValue.toString)) - override def asJValue: JValue = JString(get) - override def asJs = Str(get) - override def toForm = Full(scala.xml.Text(get)) -} - -/** - * Test record: An employee belongs to a company. - */ -class Employee private () extends Record[Employee] with KeyedRecord[Long]{ - - override def meta = Employee - - @Column(name = "id") - override val idField = new LongField(this) - - val name = new SpecialField(this) - val companyId = new LongField(this) - val email = new EmailField(this, 100) - val salary = new DecimalField(this, MathContext.UNLIMITED, 2) - val locale = new LocaleField(this) - val timeZone = new TimeZoneField(this) - val password = new PasswordField(this) - val photo = new OptionalBinaryField(this) - val admin = new BooleanField(this) - val departmentNumber = new IntField(this) - val role = new EnumNameField(this, EmployeeRole) - - lazy val company = MySchema.companyToEmployees.right(this) - lazy val rooms = MySchema.roomAssignments.left(this) - -} -object Employee extends Employee with MetaRecord[Employee] - -/** - * Test record: One or more employees can have a room (one-to-many-relation). - */ -class Room private() extends Record[Room] with KeyedRecord[Long] { - override def meta = Room - - override val idField = new LongField(this) - - val name = new StringField(this, 50) - - lazy val employees = MySchema.roomAssignments.right(this) -} - -object Room extends Room with MetaRecord[Room] - -/** - * Relation table for assignments of rooms to employees. - * This must not be a Record. However, it's ok if it is not - * a record, because we won't use a relation table for - * a web form or similar. - */ -class RoomAssignment(val employeeId: Long, val roomId: Long) extends KeyedEntity[CompositeKey2[Long,Long]] { - def id = compositeKey(employeeId, roomId) -} - - -/** - * Schema for the test database. - */ -object MySchema extends Schema { - val companies = table[Company] - val employees = table[Employee] - val rooms = table[Room] - - val companyToEmployees = - oneToManyRelation(companies, employees).via((c, e) => c.id === e.companyId) - - val roomAssignments = manyToManyRelation(employees, rooms). - via[RoomAssignment]((employee, room, roomAssignment) => - (roomAssignment.employeeId === employee.idField, roomAssignment.roomId === room.idField)) - - on(employees)(e => - declare(e.companyId defineAs (indexed("idx_employee_companyId")), - e.email defineAs indexed("idx_employee_email"))) - - /** - * Drops an old schema if exists and then creates - * the new schema. - */ - def dropAndCreate { - drop - create - } - - /** - * Creates some test instances of companies and employees - * and saves them in the database. - */ - def createTestData { - import TestData._ - - allCompanies.foreach(companies.insert(_)) - allEmployees.foreach(employees.insert(_)) - allRooms.foreach(rooms.insert(_)) - - e1.rooms.associate(r1) - e1.rooms.associate(r2) - } - - object TestData { - - val c1 = Company.createRecord.name("First Company USA"). - created(Calendar.getInstance()). - country(Countries.USA).postCode("12345") - - val c2 = Company.createRecord.name("Second Company USA"). - created(Calendar.getInstance()). - country(Countries.USA).postCode("54321") - - val c3 = Company.createRecord.name("Company or Employee"). - created(Calendar.getInstance()). - country(Countries.Canada).postCode("1234") - - val allCompanies = List(c1, c2, c3) - - lazy val e1 = Employee.createRecord.companyId(c1.idField.get). - name("Peter Example"). - email("peter@example.com").salary(BigDecimal(345)). - locale(java.util.Locale.GERMAN.toString()). - timeZone("Europe/Berlin").password("exampletest"). - admin(false).departmentNumber(2).role(EmployeeRole.Programmer). - photo(Array[Byte](0, 1, 2, 3, 4)) - - lazy val e2 = Employee.createRecord.companyId(c2.idField.get). - name("Company or Employee"). - email("test@example.com").salary(BigDecimal("123.123")). - locale(java.util.Locale.US.toString()). - timeZone("America/Los_Angeles").password("test"). - admin(true).departmentNumber(1).role(EmployeeRole.Manager). - photo(Array[Byte](1)) - - lazy val e3 = Employee.createRecord.companyId(c2.idField.get). - name("Joe Shmo"). - email("joe@shmo.com").salary(BigDecimal("100000.00")). - locale(java.util.Locale.US.toString()). - timeZone("America/Los_Angeles").password("test"). - admin(false).departmentNumber(1).role(EmployeeRole.Programmer). - photo(Array[Byte](1)) - - lazy val allEmployees = List(e1, e2, e3) - - val r1 = Room.createRecord.name("Room 1") - - val r2 = Room.createRecord.name("Room 2") - - val r3 = Room.createRecord.name("Room 3") - - val allRooms = List(r1, r2, r3) - } -} - diff --git a/persistence/squeryl-record/src/test/scala/net/liftweb/squerylrecord/SquerylRecordSpec.scala b/persistence/squeryl-record/src/test/scala/net/liftweb/squerylrecord/SquerylRecordSpec.scala deleted file mode 100644 index b2fbd714da..0000000000 --- a/persistence/squeryl-record/src/test/scala/net/liftweb/squerylrecord/SquerylRecordSpec.scala +++ /dev/null @@ -1,472 +0,0 @@ -/* - * Copyright 2010-2011 WorldWide Conferencing, LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.liftweb -package squerylrecord - -import org.squeryl.Session -import org.squeryl.dsl.ast.FunctionNode -import org.squeryl.internals.OutMapper -import org.squeryl.dsl.StringExpression -import org.squeryl.dsl.DateExpression - -import org.specs2.mutable.Specification -import org.specs2.specification.AroundEach -import org.specs2.execute.{ AsResult , Result } - -import record.{ BaseField, Record } -import record.field._ -import RecordTypeMode._ -import MySchema.{ TestData => td, _ } -import java.util.Calendar -import java.sql.Timestamp -import java.text.SimpleDateFormat -import org.squeryl.adapters.PostgreSqlAdapter -import common.Empty -import http.{LiftSession, S} -import util.Helpers - -/** - * Systems under specification for SquerylRecord. - */ -class SquerylRecordSpec extends Specification with AroundEach { - "SquerylRecord Specification".title - sequential - - lazy val session = new LiftSession("", Helpers.randomString(20), Empty) - // One of these is for specs2 2.x, the other for specs2 1.x - protected def around[T <% Result](t: =>T) = { - S.initIfUninitted(session) { - DBHelper.initSquerylRecordWithInMemoryDB() - DBHelper.createSchema() - t - } - } - - protected def around[T : AsResult](t: =>T) = { - S.initIfUninitted(session) { - DBHelper.initSquerylRecordWithInMemoryDB() - DBHelper.createSchema() - AsResult(t) - } - } - - "SquerylRecord" should { - "load record by ID" in { - transaction { - S.initIfUninitted(session) { - val company = companies.lookup(td.c2.id) - checkCompaniesEqual(company.get, td.c2) - - val employee = employees.lookup(td.e1.id) - checkEmployeesEqual(employee.get, td.e1) - } - } - } - - "load record by string field value" in { - transaction { - S.initIfUninitted(session){ - val company = from(companies)(c => - where(c.name === td.c1.name.get) select (c)) - checkCompaniesEqual(company.single, td.c1) - } - } - } - - "support order by" in { - transaction { - val orderedCompanies = from(companies)(c => - select(c) orderBy (c.name)) - val ids = orderedCompanies.map(_.id) - // NOTE: This circumvents implicit conversion for the contents on List - // ids must containInOrder( - // td.allCompanies.sortBy(_.name.get).map(_.id)) - ids.mkString("(", ",", ")") must_== td.allCompanies.sortBy(_.name.get).map(_.id).mkString("(", ",", ")") - } - } - - "support normal joins" in { - transaction { - val companiesWithEmployees = from(companies, employees)((c, e) => - where(c.id === e.companyId.get) - select ((c.id, e.id))).toList - companiesWithEmployees must haveSize(td.allEmployees.size) - companiesWithEmployees must containAllOf(td.allEmployees map { e => (e.companyId.get, e.id) }) - } - } - - "support left outer joins" in { - transaction { - S.initIfUninitted(session){ - val companiesWithEmployees = join(companies, employees.leftOuter)((c, e) => - select(c, e) - on (c.id === e.map(_.companyId)) - ) - - companiesWithEmployees must haveSize(4) - // One company doesn't have an employee, two have - companiesWithEmployees.filter(ce => ce._2.isEmpty) must haveSize(1) - - val companiesAndEmployeesWithSameName = join(companies, employees.leftOuter)((c, e) => - groupBy(c.id) - compute (countDistinct(e.map(_.id))) - on (c.name === e.map(_.name)) - ) - - // There are three companies - companiesAndEmployeesWithSameName must haveSize(3) - // One company has the same name as an employee, two don't - companiesAndEmployeesWithSameName.filter(ce => ce.measures == 0) must haveSize(2) - - val employeesWithSameAdminSetting = join(employees, employees.leftOuter)((e1, e2) => - select(e1, e2) - on (e1.admin === e2.map(_.admin)) - ) - - employeesWithSameAdminSetting.foreach { ee => - ee._2 must not(beEmpty) - } - - val companiesWithSameCreationDate = join(companies, companies.leftOuter)((c1, c2) => - select(c1, c2) - on (c1.created === c2.map(_.created)) - ) - companiesWithSameCreationDate must not(beEmpty) - - val employeesWithSameDepartmentNumber = join(employees, employees.leftOuter)((e1, e2) => - select(e1, e2) - on (e1.departmentNumber === e2.map(_.departmentNumber)) - ) - employeesWithSameDepartmentNumber must not(beEmpty) - - val employeesWithSameRoles = join(employees, employees.leftOuter)((e1, e2) => - select(e1, e2) - on (e1.role === e2.map(_.role)) - ) - employeesWithSameRoles must not(beEmpty) - } - } - } - - "support one to many relations" in { - transaction { - val company = companies.lookup(td.c1.id) - company must beSome[Company] - val employees = company.get.employees - employees must haveSize(1) - checkEmployeesEqual(td.e1, employees.head) - employees.associate(td.e3) - td.e3.companyId.get must_== company.get.id - } - } - - "support many to many relations" in { - transactionWithRollback { - td.e1.rooms must haveSize(2) - - td.e2.rooms must beEmpty - - td.r1.employees must haveSize(1) - td.r3.employees must beEmpty - - td.r3.employees.associate(td.e2) - td.e2.rooms must haveSize(1) - } - } - - "support updates" in { - val id = td.c1.id - - transactionWithRollback { - S.initIfUninitted(session) { - val company = companies.lookup(id).get - company.name("New Name") - company.postCode("11111") - companies.update(company) - - val loaded = companies.lookup(id).get - checkCompaniesEqual(company, loaded) - - update(companies)(c => where(c.id === id) - set (c.name := "Name2")) - val afterPartialUpdate = companies.lookup(id).get - afterPartialUpdate.name.get must_== "Name2" - } - } - - // After rollback, the company should still be the same: - transaction { - S.initIfUninitted(session) { - val company = companies.lookup(id).get - checkCompaniesEqual(td.c1, company) - } - } - } - - "support delete" in { - transactionWithRollback { - employees.delete(td.e2.id) - employees.lookup(td.e2.id) must beNone - } - } - - "support select with properties of formerly fetched objects" in { - transaction { - S.initIfUninitted(session) { - val company = companies.lookup(td.c2.id).head - val employee = from(employees)(e => - where(e.companyId === company.idField) select (e)).head - employee.id must_== td.e2.id - - val loadedCompanies = from(companies)(c => - where(c.created === company.created) select (c)) - loadedCompanies.size must beGreaterThanOrEqualTo(1) - } - } - } - - "support many to many relations" >> { - transactionWithRollback { - td.e1.rooms must haveSize(2) - } - } - - "support date/time queries" >> { - transaction { - val c1 = from(companies)(c => - where(c.created <= Calendar.getInstance) - select (c)) - c1.size must beGreaterThan(1) - - val c2 = from(companies)(c => - where(c.created <= Calendar.getInstance.getTime) - select (c)) - c2.size must beGreaterThan(1) - } - } - - "support inner queries" >> { - import record.field._ - - transaction { - // Should work with the ID function (returns a long): - val companyId: Long = from(companies)(c => where(c.id in - from(companies)(c2 => where(c2.id === td.c1.id) select (c2.id))) - select (c.id)).single - companyId must_== td.c1.id - - // It should also be possible to select the ID field directly: - val companyIdField: LongField[Company] = from(companies)(c => where(c.idField in - from(companies)(c2 => where(c2.id === td.c1.id) select (c2.idField))) - select (c.idField)).single - companyIdField.get must_== td.c1.id - - // Strings should also be selectable in inner queries - val companyIdByName: Long = from(companies)(c => where(c.name in - from(companies)(c2 => where(c2.name === td.c1.name) select (c2.name))) - select (c.id)).single - companyIdByName must_== td.c1.id - - // ...And DateTime-Fields: - val companyIdByCreated: DateTimeField[Company] = from(companies)(c => where(c.created in - from(companies)(c2 => where(c2.id === td.c1.id) select (c2.created))) - select (c.created)).single - companyIdByCreated.get must_== td.c1.created.get - - // Decimal Fields: - val empSalary: DecimalField[Employee] = from(employees)(e => where(e.salary in - from(employees)(e2 => where(e2.id === td.e1.id) select (e2.salary))) - select (e.salary)).single - empSalary.get must_== td.e1.salary.get - - // Email fields: - val empEmail: EmailField[Employee] = from(employees)(e => where(e.email in - from(employees)(e2 => where(e2.id === td.e1.id) select (e2.email))) - select (e.email)).single - empSalary.get must_== td.e1.salary.get - - // Boolean fields: - val empAdmin: BooleanField[Employee] = from(employees)(e => where(e.admin in - from(employees)(e2 => where(e2.id === td.e2.id) select (e2.admin))) - select (e.admin)).single - empAdmin.get must_== td.e2.admin.get - - // Enum fields: - val empRoleQuery = from(employees)(e => where(e.role in - from(employees)(e2 => where(e2.id === td.e2.id) select (e2.role))) - select (e.role.get)) - val empRole = empRoleQuery.single - empRole must_== td.e2.role.get - } - - } - - "support the CRUDify trait" >> { - transaction { - val company = Company.create.name("CRUDify Company"). - created(Calendar.getInstance()). - country(Countries.USA).postCode("90210") - val bridge = Company.buildBridge(company) - bridge.save - val id = company.id - company.isPersisted must_== true - id must be_>(0l) - company.postCode("10001") - bridge.save - val company2 = Company.findForParam(id.toString) - company2.isDefined must_== true - company2.foreach(c2 => { - c2.postCode.get must_== "10001" - }) - val allCompanies = Company.findForList(0, 1000) - allCompanies.size must be_>(0) - bridge.delete_! - val allCompanies2 = Company.findForList(0, 1000) - allCompanies2.size must_== (allCompanies.size - 1) - } - } - - "Support Optimistic Locking" >> { - val company = Company.create.name("Optimistic Company"). - created(Calendar.getInstance()). - country(Countries.USA). - postCode("90210") - //First insert the company in one transaction - transaction { - companies.insert(company) - } - //Retrieve and modify in another transaction - val innerUpdate = new Thread(new Runnable { - override def run() { - transaction { - val company2 = companies.lookup(company.id).get - company2.created(Calendar.getInstance()) - companies.update(company2) - } - } - }) - innerUpdate.start - innerUpdate.join - //Then in a third transaction, try to update the original object - transaction { - import org.squeryl.StaleUpdateException - company.created(Calendar.getInstance()) - companies.update(company) must throwAn[StaleUpdateException] - } - } - - "Allow custom functions" in { - inTransaction { - val created = - from(companies)(c => - where(c.name === "First Company USA") - select (&(toChar(c.created, "EEE, d MMM yyyy"))) - ) - created.head must_== new SimpleDateFormat("EEE, d MMM yyyy").format(Calendar.getInstance().getTime()) - } - } - - "Support precision and scale taken from DecimalTypedField" >> { - val posoMetaData = companies.posoMetaData - val fieldMetaData = posoMetaData.findFieldMetaDataForProperty("employeeSatisfaction").get - val columnDefinition = new PostgreSqlAdapter().writeColumnDeclaration(fieldMetaData, false, MySchema) - columnDefinition.endsWith("numeric(" + Company.employeeSatisfaction.context.getPrecision() +"," + Company.employeeSatisfaction.scale + ")") must_== true - } - - "Properly reset the dirty_? flag after loading entities" >> inTransaction { - val company = from(companies)(company => - select(company)).page(0, 1).single - company.allFields map { f => f.dirty_? must_== false } - success - } - } - class ToChar(d: DateExpression[Timestamp], e: StringExpression[String], m: OutMapper[String]) - extends FunctionNode[String]("FORMATDATETIME", Some(m), Seq(d, e)) with StringExpression[String] - - def toChar(d: DateExpression[Timestamp], e: StringExpression[String])(implicit m: OutMapper[String]) = new ToChar(d, e, m) - - class TransactionRollbackException extends RuntimeException - - /** - * Runs the given code in a transaction and rolls - * back the transaction afterwards. - */ - private def transactionWithRollback[T](code: => T): T = { - - def rollback: Unit = throw new TransactionRollbackException() - - var result: T = null.asInstanceOf[T] - try { - transaction { - result = code - rollback - } - } catch { - case e: TransactionRollbackException => // OK, was rolled back - } - - result - } - - private def checkCompaniesEqual(c1: Company, c2: Company): Result = { - val cmp = new RecordComparer[Company](c1, c2) - cmp.check(_.idField) - cmp.check(_.description) - cmp.check(_.country) - cmp.check(_.postCode) - cmp.check(_.created) - - cmp.checkXHtml() - } - - private def checkEmployeesEqual(e1: Employee, e2: Employee): Result = { - val cmp = new RecordComparer[Employee](e1, e2) - cmp.check(_.name) - cmp.check(_.companyId) - cmp.check(_.email) - cmp.check(_.salary) - cmp.check(_.locale) - cmp.check(_.timeZone) - //cmp.check(_.password) - cmp.check(_.admin) - cmp.check(_.departmentNumber) - cmp.check(_.role) - - // Photo must be checked separately - e1.photo.get match { - case Some(p) => { - val p2 = e2.photo.get - p2 must beSome[Array[Byte]] - p2.get.size must_== p.size - - (0 until p.size) map { i => - p2.get(i) must_== p(i) - } - } - case None => e2.photo.get must beNone - } - } - - class RecordComparer[T <: Record[T]](val r1: T, val r2: T) { - def check(fieldExtractor: (T) => BaseField): Result = { - val f1 = fieldExtractor(r1) - val f2 = fieldExtractor(r2) - f1.get must_== f2.get - f1.name must_== f2.name - } - - def checkXHtml(): Result = - r1.toXHtml must_== r2.toXHtml - } -} diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 3cfff34d3c..0ac8a6296e 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -21,32 +21,39 @@ object Dependencies { type ModuleMap = String => ModuleID - lazy val slf4jVersion = "1.7.25" + lazy val slf4jVersion = "1.7.36" // Compile scope: // Scope available in all classpath, transitive by default. - lazy val commons_codec = "commons-codec" % "commons-codec" % "1.11" - lazy val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.3.3" + lazy val commons_codec = "commons-codec" % "commons-codec" % "1.19.0" + lazy val commons_fileupload = "org.apache.commons" % "commons-fileupload2-jakarta-servlet6" % "2.0.0-M4" lazy val commons_httpclient = "commons-httpclient" % "commons-httpclient" % "3.1" - lazy val javamail = "javax.mail" % "mail" % "1.4.7" - lazy val jbcrypt = "org.mindrot" % "jbcrypt" % "0.4" - lazy val joda_time = "joda-time" % "joda-time" % "2.10" - lazy val joda_convert = "org.joda" % "joda-convert" % "2.1" - lazy val htmlparser = "nu.validator" % "htmlparser" % "1.4.12" + lazy val joda_time = "joda-time" % "joda-time" % "2.14.0" + lazy val joda_convert = "org.joda" % "joda-convert" % "3.0.1" + lazy val json4s_ext = "org.json4s" %% "json4s-ext" % "4.0.7" + lazy val json4s_native = "org.json4s" %% "json4s-native" % "4.0.7" + lazy val json4s_xml = "org.json4s" %% "json4s-xml" % "4.0.7" + lazy val htmlparser = "nu.validator" % "htmlparser" % "1.4.16" lazy val mongo_java_driver = "org.mongodb" % "mongodb-driver" % "3.12.7" lazy val mongo_java_driver_async = "org.mongodb" % "mongodb-driver-async" % "3.12.7" - lazy val paranamer = "com.thoughtworks.paranamer" % "paranamer" % "2.8" + lazy val paranamer = "com.thoughtworks.paranamer" % "paranamer" % "2.8.3" lazy val scalajpa = "org.scala-libs" % "scalajpa" % "1.5" lazy val scalap: ModuleMap = "org.scala-lang" % "scalap" % _ - lazy val scala_compiler: ModuleMap = "org.scala-lang" % "scala-compiler" % _ - lazy val scalaz7_core = "org.scalaz" %% "scalaz-core" % "7.2.28" - lazy val squeryl = "org.squeryl" %% "squeryl" % "0.9.5-7" + lazy val scalaz7_core = "org.scalaz" %% "scalaz-core" % "7.3.8" lazy val slf4j_api = "org.slf4j" % "slf4j-api" % slf4jVersion - lazy val scala_xml = "org.scala-lang.modules" %% "scala-xml" % "1.3.0" + lazy val scala_xml = "org.scala-lang.modules" %% "scala-xml" % "2.4.0" lazy val scala_parallel_collections = "org.scala-lang.modules" %% "scala-parallel-collections" % "0.2.0" - lazy val rhino = "org.mozilla" % "rhino" % "1.7.10" - lazy val scala_parser = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.2" - lazy val xerces = "xerces" % "xercesImpl" % "2.11.0" + lazy val rhino = "org.mozilla" % "rhino" % "1.7.15" + lazy val scala_parser = "org.scala-lang.modules" %% "scala-parser-combinators" % "2.4.0" + lazy val xerces = "xerces" % "xercesImpl" % "2.12.2" + + lazy val scala_compiler: ModuleMap = (version: String) => { + if (version.startsWith("2")) { + "org.scala-lang" % "scala-compiler" % version + } else { + "org.scala-lang" % "scala3-compiler_3" % version + } + } // Aliases lazy val mongo_driver = mongo_java_driver @@ -55,11 +62,10 @@ object Dependencies { // Provided scope: // Scope provided by container, available only in compile and test classpath, non-transitive by default. - lazy val logback = "ch.qos.logback" % "logback-classic" % "1.2.3" % Provided + lazy val logback = "ch.qos.logback" % "logback-classic" % "1.2.13" % Provided lazy val log4j = "log4j" % "log4j" % "1.2.17" % Provided lazy val slf4j_log4j12 = "org.slf4j" % "slf4j-log4j12" % slf4jVersion % Provided - lazy val persistence_api = "javax.persistence" % "persistence-api" % "1.0.2" % Provided - lazy val servlet_api = "javax.servlet" % "javax.servlet-api" % "3.1.0" % Provided + lazy val servlet_api = "jakarta.servlet" % "jakarta.servlet-api" % "6.1.0" % Provided lazy val jquery = "org.webjars.bower" % "jquery" % "1.11.3" % Provided lazy val jasmineCore = "org.webjars.bower" % "jasmine-core" % "2.4.1" % Provided lazy val jasmineAjax = "org.webjars.bower" % "jasmine-ajax" % "3.2.0" % Provided @@ -67,22 +73,46 @@ object Dependencies { // Test scope: // Scope available only in test classpath, non-transitive by default. - lazy val jetty6 = "org.mortbay.jetty" % "jetty" % "6.1.26" % Test - lazy val jwebunit = "net.sourceforge.jwebunit" % "jwebunit-htmlunit-plugin" % "2.5" % Test - lazy val derby = "org.apache.derby" % "derby" % "10.7.1.1" % Test - lazy val h2database = "com.h2database" % "h2" % "1.2.147" % Test - - lazy val specs2 = "org.specs2" %% "specs2-core" % "4.9.4" % Test - lazy val scalacheck = "org.specs2" %% "specs2-scalacheck" % specs2.revision % Test - lazy val specs2Prov = "org.specs2" %% "specs2-core" % specs2.revision % Provided - lazy val specs2Matchers = "org.specs2" %% "specs2-matcher-extra" % specs2.revision % Test - lazy val specs2MatchersProv = "org.specs2" %% "specs2-matcher-extra" % specs2.revision % Provided - lazy val specs2Mock = "org.specs2" %% "specs2-mock" % specs2.revision % Test - - lazy val scalactic = "org.scalactic" %% "scalactic" % "3.1.2" % Test - lazy val scalatest = "org.scalatest" %% "scalatest" % "3.1.2" % Test + lazy val jetty11 = "org.eclipse.jetty" % "jetty-servlet" % "11.0.15" % Test + lazy val jettywebapp = "org.eclipse.jetty" % "jetty-webapp" % "11.0.15" % Test + lazy val jwebunit = "net.sourceforge.jwebunit" % "jwebunit-htmlunit-plugin" % "2.5" % Test + lazy val derby = "org.apache.derby" % "derby" % "10.7.1.1" % Test + lazy val h2database = "com.h2database" % "h2" % "1.2.147" % Test + + // Specs2 versions differ between Scala 2 and Scala 3 + def specs2Version(scalaVersion: String): String = { + CrossVersion.partialVersion(scalaVersion) match { + case Some((2, 13)) => "4.21.0" + case Some((3, _)) => "5.6.4" + case _ => "4.21.0" + } + } + + lazy val specs2: ModuleMap = (version: String) => "org.specs2" %% "specs2-core" % specs2Version(version) % Test + lazy val scalacheck: ModuleMap = (version: String) => "org.specs2" %% "specs2-scalacheck" % specs2Version(version) % Test + lazy val specs2Prov: ModuleMap = (version: String) => "org.specs2" %% "specs2-core" % specs2Version(version) % Provided + lazy val specs2Matchers: ModuleMap = (version: String) => "org.specs2" %% "specs2-matcher-extra" % specs2Version(version) % Test + lazy val specs2MatchersProv: ModuleMap = (version: String) => "org.specs2" %% "specs2-matcher-extra" % specs2Version(version) % Provided + lazy val specs2Mock: ModuleMap = (version: String) => { + CrossVersion.partialVersion(version) match { + case Some((2, 13)) => "org.specs2" %% "specs2-mock" % specs2Version(version) % Test + case Some((3, _)) => "org.scalatestplus" %% "mockito-5-18" % "3.2.19.0" % Test + case _ => "org.specs2" %% "specs2-mock" % specs2Version(version) % Test + } + } + + lazy val scalactic = "org.scalactic" %% "scalactic" % "3.2.19" % Test + lazy val scalatest = "org.scalatest" %% "scalatest" % "3.2.19" % Test lazy val scalatest_junit = "org.scalatestplus" %% "junit-4-12" % "3.1.2.0" % Test - lazy val mockito_scalatest = "org.mockito" %% "mockito-scala-scalatest" % "1.14.3" % Test + lazy val mockito_scalatest: ModuleMap = (version: String) => { + CrossVersion.partialVersion(version) match { + case Some((2, 13)) => "org.mockito" %% "mockito-scala-scalatest" % "1.14.3" % Test + case Some((3, _)) => "org.scalatestplus" %% "mockito-5-18" % "3.2.19.0" % Test + case _ => "org.mockito" %% "mockito-scala-scalatest" % "1.14.3" % Test + } + } + + lazy val scalamock = "org.scalamock" %% "scalamock" % "7.4.1" % Test // Aliases lazy val h2 = h2database diff --git a/project/LiftSbtHelpers.scala b/project/LiftSbtHelpers.scala index 64b676559a..7dfc3eea11 100644 --- a/project/LiftSbtHelpers.scala +++ b/project/LiftSbtHelpers.scala @@ -41,7 +41,7 @@ object LiftSbtHelpers { .settings( autoAPIMappings := true, apiMappings ++= { - val cp: Seq[Attributed[File]] = (fullClasspath in Compile).value + val cp: Seq[Attributed[File]] = (Compile / fullClasspath) .value findManagedDependency(cp, "org.scala-lang.modules", "scala-xml").map { case (revision, file) => diff --git a/project/build.properties b/project/build.properties index c8fcab543a..6520f6981d 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.6.2 +sbt.version=1.11.0 diff --git a/project/metals.sbt b/project/metals.sbt new file mode 100644 index 0000000000..da486d8ca8 --- /dev/null +++ b/project/metals.sbt @@ -0,0 +1,8 @@ +// format: off +// DO NOT EDIT! This file is auto-generated. + +// This file enables sbt-bloop to create bloop config files. + +addSbtPlugin("ch.epfl.scala" % "sbt-bloop" % "2.0.16") + +// format: on diff --git a/project/plugins.sbt b/project/plugins.sbt index d087ed7317..456061f4ef 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,7 +4,10 @@ resolvers += Resolver.typesafeRepo("releases") addSbtPlugin("com.typesafe.sbt" % "sbt-web" % "1.4.4") //addSbtPlugin("org.jetbrains" % "sbt-idea-plugin" % "2.1.3") -lazy val buildPlugin = RootProject(uri("https://github.com/lift/sbt-lift-build.git#f9c52bda7b43a98b9f8805c654c713d99db0a58f")) +lazy val buildPlugin = RootProject(uri("https://github.com/lift/sbt-lift-build.git#01af51e838d2162ebeae56505a635860392b09a6")) lazy val root = (project in file(".")).dependsOn(buildPlugin) addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") +addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.4") +addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.3.1") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.12.2") diff --git a/scripts/check-2.6-to-3.0-migration.sh b/scripts/check-2.6-to-3.0-migration.sh deleted file mode 100644 index 5bef9f2935..0000000000 --- a/scripts/check-2.6-to-3.0-migration.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -LIFT_SCREEN_TEMPLATE=`find ./ -name "wizard-all.html"` -LIFT_SCREEN_EXTENDS=`grep -E "extends +LiftScreen"` -LIFT_SCREEN_WITH=`grep -E "with +LiftScreen"` - -if [[ -n "$LIFT_SCREEN_TEMPLATE" ]]; then - echo "You're likely using an outdated base LiftScreen template at:" - echo "$LIFT_SCREEN_TEMPLATE" - echo "Assuming you haven't changed it, you can replace it with the lift_basic version for" - echo "exactly the same result:" - echo "https://github.com/lift/lift_30_sbt/blob/master/lift_basic/src/main/webapp/templates-hidden/wizard-all.html" - echo "----------------------------------------------------------------------------------" -fi - -if [[ -n $LIFT_SCREEN_EXTENDS ]] || [[ -n $LIFT_SCREEN_WITH ]]; then - echo "You're extending LiftScreen. LiftScreen as of Lift 3.0 is the equivalent of 2.6's" - echo "CssBoundLiftScreen. This means it binds using CSS selector transforms instead of" - echo "the Lift 2.x series's \`bind\` function, which no longer exists. See this document" - echo "for porting instructions:" - echo "https://github.com/lift/framework/docs/migration/2.6-to-3.0-lift-screen.adoc" - echo "Here are the uses of LiftScreen we found:" - [[ -n $LIFT_SCREEN_EXTENDS ]] && echo "$LIFT_SCREEN_EXTENDS" - [[ -n $LIFT_SCREEN_WITH ]] && echo "$LIFT_SCREEN_WITH" - echo "----------------------------------------------------------------------------------" -fi - -BIND_USES=`grep -E "bind\("` - -if [[ -n $BIND_USES ]]; then - echo "You seem to be using Lift's bind helpers. These have been removed from Lift 3.0," - echo "superseded by Lift's CSS selector transforms. You can port your application to CSS" - echo "selector transforms piecewise while still on Lift 2.6, as they are supported in" - echo "both versions. For a primer on CSS selector transforms, look at this document:" - echo "https://github.com/lift/framework/blob/master/docs/css-selectors.adoc" - echo "" - echo "If you find yourself with additional questions, please ask on the Lift mailing list" - echo "and you should find willing helpers." -fi diff --git a/travis.sh b/travis.sh deleted file mode 100755 index 5acd4c3e2a..0000000000 --- a/travis.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -set -e - -sbt "++$TRAVIS_SCALA_VERSION test" - -./npmsh - -if [ "${TRAVIS_PULL_REQUEST}" = "false" ] && [ -z "$DISABLE_PUBLISH" ]; then - mkdir -p ~/.sbt/1.0/ - openssl aes-256-cbc -K $encrypted_a177bbd76133_key -iv $encrypted_a177bbd76133_iv -in .credentials.enc -out ~/.sbt/1.0/.credentials -d - - if [[ "$TRAVIS_SCALA_VERSION" =~ ^2.13 ]]; then - # we only have certain modules available for publishing in 2.13 - sbt ++$TRAVIS_SCALA_VERSION lift-webkit/publish lift-json/publish lift-actor/publish lift-json-ext/publish lift-record/publish lift-proto/publish lift-mapper/publish lift-common/publish lift-db/publish lift-markdown/publish lift-util/publish lift-testkit/publish lift-mongodb/publish lift-mongodb-record/publish lift-json-scalaz7/publish - else - sbt ++$TRAVIS_SCALA_VERSION publish - fi - - rm ~/.sbt/1.0/.credentials -fi diff --git a/unsafePublishLift.sh b/unsafePublishLift.sh deleted file mode 100755 index 161ee87439..0000000000 --- a/unsafePublishLift.sh +++ /dev/null @@ -1,174 +0,0 @@ -#!/bin/bash -#It is called unsafe so most people stay away from this -#But this script is safe to use if you are trying to publish Lift Framework to sonatype -#as a release version (including Milestones and RC's - - -## This scripts runs on mac's bash terminal - -# Exit on any errors and on unbound vars to be safe -set -o errexit -set -o nounset - - -BUILDLOG=/tmp/Lift-do-release-`date "+%Y%m%d-%H%M%S"`.log - -# This script is an attempt to automate the Lift release process -# -# From Indrajit, the steps on each module (superbuild, framework, examples) are: -# -# 1. git checkout -b -# 2. ./liftsh 'set project.version ' -# 3. Edit project/plugins/Plugins.scala to change the version of lift-sbt -# 4. git commit -v -a -m "Prepare for " -# 5. git push origin -# 6. git tag -release -# 7. git push origin -release -# 8. LIFTSH_OPTS="-Dpublish.remote=true -Dsbt.log.noformat=true" ./liftsh clean-cache clean-plugins reload +clean-lib +update +clean +publish-signed -# 9. Wait for happiness - -SCRIPTVERSION=0.1 - -##### Utility functions (break these out into an include?) ##### -# Basically yes/no confirmation with customized messages -# Usage: confirm "prompt" -# Returns 0 for yes, 1 for no -function confirm { - while read -p "$1 [yes/no] " CONFIRM; do - case "`echo $CONFIRM | tr [:upper:] [:lower:]`" in - yes) - return 0 - ;; - no) - return 1 - ;; - *) - echo "Please enter yes or no" - ;; - esac - done -} - -function debug { - #echo $@ - echo -n "" -} - -function die { - echo $@ - exit 1 -} - -# Locate our base directory (taken from http://blog.eitchnet.ch/?p=242) -SCRIPT_NAME="${PWD##*/}" -SCRIPT_DIR="${PWD%/*}" - -# if the script was started from the base directory, then the -# expansion returns a period -if test "$SCRIPT_DIR" == "." ; then - SCRIPT_DIR="$PWD" -# if the script was not called with an absolute path, then we need to add the -# current working directory to the relative path of the script -elif test "${SCRIPT_DIR:0:1}" != "/" ; then - SCRIPT_DIR="$PWD/$SCRIPT_DIR" -fi - -echo -e "\n*********************************************************************" -echo -e "SCRIPT_DIR is ${SCRIPT_DIR}" -echo -e "\n*********************************************************************" - -##### End Utility Functions ##### - - -echo -e "\n*********************************************************************" -printf "* Lift Full Release build script version %-26s *\n" "$SCRIPTVERSION" -#echo "* Default choices for prompts are marked in capitals *" -printf "*********************************************************************\n\n" - -echo -e "Build output logged to $BUILDLOG\n" - - -# CouchDB will blow up with HTTP proxy set because it doesn't correctly interpret the return codes -set +o nounset -if [ ! -z "${http_proxy}" -o ! -z "${HTTP_PROXY}" ]; then - echo -e "CouchDB tests will fail with http_proxy set! Please unset and re-run.\n" - exit -fi -set -o nounset - -# First, let's confirm that we really want to release... -confirm "Are you certain you want a release build?" || die "Cancelling release build." - -echo -e "\nProceeding...\n" - -# Now we need to know what version we're releasing -read -p "Please enter the version of the release: " RELEASE_VERSION - -# Sanity check on the release version -if ! echo $RELEASE_VERSION | egrep -x '[0-9]+\.[0-9]+(-(M|RC)[0-9]+)?' > /dev/null; then - confirm "$RELEASE_VERSION does not appear to be a valid version. Are you sure?" || - die "Cencelling release build!" -fi - -# Perform a sanity check on the modules first -for MODULE in framework ; do - cd ${SCRIPT_DIR}/${MODULE} - - echo "We cd'ed into `pwd`" - - # ensure that we're on master, and that we're up-to-date - CURRENT_BRANCH=`git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/\1/'` - debug "Current branch for $MODULE is $CURRENT_BRANCH" - - if [ "${CURRENT_BRANCH}" != "master" ]; then - echo "Currently releases can only be built from master. $MODULE is on branch $CURRENT_BRANCH. Aborting build." - exit - fi - - # ensure that we don't have any outstanding changes - if git status | grep -q "Changes not staged for commit" ; then - die "There are outstanding changes in $MODULE. Aborting build." - else - echo "All changes are committed, moving on" - fi - -done - -echo -e "\nPre-build tests passed. Initiating release build of LiftWeb version $RELEASE_VERSION\n" - -# For the remaining modules, we follow indrajit's steps outlined above -for MODULE in framework ; do - echo -e "\nStarting build on $MODULE module" - cd ${SCRIPT_DIR}/${MODULE} || die "Could not change to $MODULE directory!" - - git checkout -b ${RELEASE_VERSION} >> ${BUILDLOG} || die "Error creating work branch!" - - - ./liftsh ";set version in ThisBuild := \"${RELEASE_VERSION}\" ; session save " >> ${BUILDLOG} || die "Could not update project version in SBT!" - - git commit -v -a -m "Prepare for ${RELEASE_VERSION}" >> ${BUILDLOG} || die "Could not commit project version change!" - -#git push origin ${RELEASE_VERSION} >> ${BUILDLOG} || die "Could not push project version change!" - - git tag ${RELEASE_VERSION}-release >> ${BUILDLOG} || die "Could not tag release!" - -#git push origin ${RELEASE_VERSION}-release >> ${BUILDLOG} || die "Could not push release tag!" - - # Do a separate build for each configured Scala version so we don't blow the heap - for SCALA_VERSION in $(grep crossScalaVersions build.sbt | cut -d '(' -f 2 | sed s/[,\)\"]//g ); do - echo -n " Building against Scala ${SCALA_VERSION}..." - if ! ./liftsh ++${SCALA_VERSION} clean update test publishSigned >> ${BUILDLOG} ; then - echo "failed! See build log for details" - exit - fi - echo "complete" - done - - echo "Build complete for module ${MODULE}" - -done - -echo -e "\n\nRelease complete!" -echo -e "\n\nPlease update the lift_30_sbt templates!" -echo -e "\n\nwrite something about this release on the liftweb.net site." -echo -e "\n\nand if all went well, push tags to github" diff --git a/web/testkit/src/main/scala/net/liftweb/http/testing/TestFramework.scala b/web/testkit/src/main/scala/net/liftweb/http/testing/TestFramework.scala index 988164cfd1..b5bbf39e11 100644 --- a/web/testkit/src/main/scala/net/liftweb/http/testing/TestFramework.scala +++ b/web/testkit/src/main/scala/net/liftweb/http/testing/TestFramework.scala @@ -22,8 +22,9 @@ import scala.language.implicitConversions import net.liftweb.util.Helpers._ import net.liftweb.util._ -import net.liftweb.json._ -import JsonDSL._ +import org.json4s._ +import org.json4s.native._ +import org.json4s.JsonDSL._ import net.liftweb.common._ import scala.xml._ import scala.xml.Utility.trim @@ -83,14 +84,12 @@ trait ToBoxTheResponse { (baseUrl + fullUrl, httpClient.executeMethod(getter)) match { case (server, responseCode) => val respHeaders = slurpApacheHeaders(getter.getResponseHeaders) + val body = for { + st <- Box !! getter.getResponseBodyAsStream + bytes <- tryo(readWholeStream(st)) + } yield bytes - Full(new TheResponse(baseUrl, - responseCode, getter.getStatusText, - respHeaders, - for {st <- Box !! getter.getResponseBodyAsStream - bytes <- tryo(readWholeStream(st)) - } yield bytes, - httpClient)) + Full(new TheResponse(baseUrl, responseCode, getter.getStatusText, respHeaders, body, httpClient)) } } catch { case e: IOException => Failure(baseUrl + fullUrl, Full(e), Empty) @@ -201,14 +200,14 @@ trait BaseGetPoster { def isRepeatable() = true - def writeRequest(out: OutputStream) { + def writeRequest(out: OutputStream): Unit = { out.write(bytes) } } implicit def jsonToRequestEntity(body: JValue): RequestEntity = new RequestEntity { - val bytes = compactRender(body).toString.getBytes("UTF-8") + val bytes = JsonMethods.compact(JsonMethods.render(body)).toString.getBytes("UTF-8") def getContentLength() = bytes.length @@ -216,7 +215,7 @@ trait BaseGetPoster { def isRepeatable() = true - def writeRequest(out: OutputStream) { + def writeRequest(out: OutputStream): Unit = { out.write(bytes) } } @@ -269,7 +268,7 @@ trait BaseGetPoster { def isRepeatable() = true - def writeRequest(out: OutputStream) { + def writeRequest(out: OutputStream): Unit = { out.write(bytes) } }) @@ -342,7 +341,7 @@ trait BaseGetPoster { def isRepeatable() = true - def writeRequest(out: OutputStream) { + def writeRequest(out: OutputStream): Unit = { out.write(bytes) } }) @@ -456,7 +455,7 @@ trait TestKit extends ClientBuilder with GetPoster with GetPosterHelper { def baseUrl: String class TestHandler(res: TestResponse) { - def then(f: TestResponse => TestResponse): TestResponse = f(res) + def `then`(f: TestResponse => TestResponse): TestResponse = f(res) def also(f: TestResponse => Any): TestResponse = {f(res); res} } @@ -518,14 +517,14 @@ trait TestFramework extends TestKit { // protected lazy val httpClient = new HttpClient(new MultiThreadedHttpConnectionManager) - def fork(cnt: Int)(f: Int => Any) { + def fork(cnt: Int)(f: Int => Any): Unit = { val threads = for (t <- (1 to cnt).toList) yield { - val th = new Thread(new Runnable {def run {f(t)}}) + val th = new Thread(new Runnable {def run: Unit = {f(t)}}) th.start th } - def waitAll(in: List[Thread]) { + def waitAll(in: List[Thread]): Unit = { in match { case Nil => case x :: xs => x.join; waitAll(xs) @@ -612,7 +611,7 @@ object TestHelpers { type CRK = JavaList[String] implicit def jitToIt[T](in: JavaIterator[T]): Iterator[T] = new Iterator[T] { - def next: T = in.next + def next(): T = in.next() def hasNext = in.hasNext } @@ -792,9 +791,7 @@ class HttpResponse(baseUrl: String, code: Int, msg: String, headers: Map[String, List[String]], body: Box[Array[Byte]], - theHttpClient: HttpClient) extends - BaseResponse(baseUrl, code, msg, headers, body, theHttpClient) with - ToResponse with TestResponse { + theHttpClient: HttpClient) extends BaseResponse(baseUrl, code, msg, headers, body, theHttpClient) with ToResponse with TestResponse { } /** @@ -805,9 +802,7 @@ class TheResponse(baseUrl: String, code: Int, msg: String, headers: Map[String, List[String]], body: Box[Array[Byte]], - theHttpClient: HttpClient) extends - BaseResponse(baseUrl, code, msg, headers, body, theHttpClient) with - ToBoxTheResponse { + theHttpClient: HttpClient) extends BaseResponse(baseUrl, code, msg, headers, body, theHttpClient) with ToBoxTheResponse { type SelfType = TheResponse } @@ -831,7 +826,7 @@ abstract class BaseResponse(override val baseUrl: String, private object FindElem { def unapply(in: NodeSeq): Option[Elem] = in match { case e: Elem => Some(e) - case d: Document => unapply(d.docElem) + case d: scala.xml.Document => unapply(d.docElem) case g: Group => unapply(g.nodes) case n: Text => None case sn: SpecialNode => None diff --git a/web/testkit/src/main/scala/net/liftweb/http/testing/TestRunner.scala b/web/testkit/src/main/scala/net/liftweb/http/testing/TestRunner.scala index 6d60a2f415..01513fcc21 100644 --- a/web/testkit/src/main/scala/net/liftweb/http/testing/TestRunner.scala +++ b/web/testkit/src/main/scala/net/liftweb/http/testing/TestRunner.scala @@ -54,12 +54,12 @@ class TestRunner(clearDB: Box[() => Any], setupDB: Box[() => Any],beforeAssertLi } } - def beforeTest(name: String) { + def beforeTest(name: String): Unit = { log += Tracker(name, false, true, true, Empty, Nil) beforeTestListeners.foreach(_(name)) } - def afterTest(name: String, success: Boolean, excp: Box[Throwable], trace: List[StackTraceElement]) { + def afterTest(name: String, success: Boolean, excp: Box[Throwable], trace: List[StackTraceElement]): Unit = { log += Tracker(name, false, false, success, excp, trace) afterTestListeners.foreach(_(name, success, excp, trace)) } @@ -68,7 +68,7 @@ class TestRunner(clearDB: Box[() => Any], setupDB: Box[() => Any],beforeAssertLi def run: TestResults = { - def doResetDB { + def doResetDB: Unit = { clearDB.foreach(_()) setupDB.foreach(_()) } @@ -76,7 +76,7 @@ class TestRunner(clearDB: Box[() => Any], setupDB: Box[() => Any],beforeAssertLi doResetDB - def runASingleTest(testItem: Item) { + def runASingleTest(testItem: Item): Unit = { beforeTest(testItem.name) val myTrace = @@ -103,9 +103,9 @@ class TestRunner(clearDB: Box[() => Any], setupDB: Box[() => Any],beforeAssertLi afterTest(testItem.name, success, excp, trace) } - def runForkTest(testItem: Item, cnt: Int) { + def runForkTest(testItem: Item, cnt: Int): Unit = { val threads = for (n <- (1 to cnt).toList) yield { - val thread = new Thread(new Runnable {def run { + val thread = new Thread(new Runnable {def run: Unit = { beforeTest(testItem.name+" thread "+n) val myTrace = @@ -136,7 +136,7 @@ class TestRunner(clearDB: Box[() => Any], setupDB: Box[() => Any],beforeAssertLi thread } - def waitAll(in: List[Thread]) { + def waitAll(in: List[Thread]): Unit = { in match { case Nil => case x :: xs => x.join; waitAll(xs) @@ -159,7 +159,7 @@ class TestRunner(clearDB: Box[() => Any], setupDB: Box[() => Any],beforeAssertLi TestResults(log.toList) } - (run _, applyAssert _) + (() => run, applyAssert _) } } diff --git a/web/testkit/src/main/scala/net/liftweb/mocks/MockHttpServletRequest.scala b/web/testkit/src/main/scala/net/liftweb/mocks/MockHttpServletRequest.scala index 22746db9af..163113a0d4 100644 --- a/web/testkit/src/main/scala/net/liftweb/mocks/MockHttpServletRequest.scala +++ b/web/testkit/src/main/scala/net/liftweb/mocks/MockHttpServletRequest.scala @@ -26,10 +26,10 @@ import java.util.Date import java.util.Locale import java.util.{Enumeration => JEnum} import java.util.{HashMap => JHash} -import javax.servlet._ -import javax.servlet.http._ +import jakarta.servlet._ +import jakarta.servlet.http._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.ListBuffer import scala.xml.NodeSeq @@ -37,7 +37,9 @@ import scala.xml.NodeSeq import common.{Box,Empty} import util.Helpers -import json.JsonAST._ +import org.json4s._ +import org.json4s.native._ +import org.json4s.JsonAST._ /** * A Mock ServletRequest. Change its state to create the request you are @@ -84,7 +86,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = * Note that the String will be converted to bytes * based on the current setting of charEncoding. */ - def body_= (s : String) : Unit = body_=(s, "text/plain") + def body_= (s : String): Unit = body_=(s, "text/plain") /** * Sets the body to the given string and content type. @@ -92,7 +94,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = * Note that the String will be converted to bytes * based on the current setting of charEncoding. */ - def body_= (s : String, contentType : String) : Unit = { + def body_= (s : String, contentType : String): Unit = { body = s.getBytes(charEncoding) this.contentType = contentType } @@ -104,7 +106,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = * Note that the elements will be converted to bytes * based on the current setting of charEncoding. */ - def body_= (nodes : NodeSeq) : Unit = body_=(nodes, "text/xml") + def body_= (nodes : NodeSeq): Unit = body_=(nodes, "text/xml") /** * Sets the body to the given elements and content type. @@ -112,7 +114,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = * Note that the elements will be converted to bytes * based on the current setting of charEncoding. */ - def body_= (nodes : NodeSeq, contentType : String) : Unit = { + def body_= (nodes : NodeSeq, contentType : String): Unit = { body = nodes.toString.getBytes(charEncoding) this.contentType = contentType } @@ -121,16 +123,13 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = * Sets the body to the given json value. Also * sets the contentType to "application/json" */ - def body_= (jval : JValue) : Unit = body_=(jval, "application/json") + def body_= (jval : JValue): Unit = body_=(jval, "application/json") /** * Sets the body to the given json value and content type. */ - def body_= (jval : JValue, contentType : String) : Unit = { - import json.JsonDSL._ - import json.JsonAST - - body = JsonAST.prettyRender(jval).getBytes(charEncoding) + def body_= (jval : JValue, contentType : String): Unit = { + body = JsonMethods.pretty(JsonMethods.render(jval)).getBytes(charEncoding) this.contentType = contentType } @@ -192,7 +191,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = null } - def queryString_= (q : String) { + def queryString_= (q : String): Unit = { if (q != null && q.length > 0) { val newParams = ListBuffer[(String,String)]() @@ -307,7 +306,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = * * @param url The URL to extract from */ - def processUrl (url : String) { + def processUrl (url : String): Unit = { if (url.toLowerCase.startsWith("http")) { processUrl(new URL(url)) } else if (url.startsWith("/")) { @@ -334,7 +333,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = * @param url The URL to extract from * @param contextPath The servlet context of the request. Defaults to "" */ - def processUrl (url : URL) { + def processUrl (url : URL): Unit = { // Deconstruct the URL to set values url.getProtocol match { case "http" => scheme = "http"; secure = false @@ -373,7 +372,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = /** * Adds an "Authorization" header, per RFC1945. */ - def addBasicAuth (user : String, pass : String) { + def addBasicAuth (user : String, pass : String): Unit = { val hashedCredentials = Helpers.base64Encode((user + ":" + pass).getBytes) headers += "Authorization" -> List("Basic " + hashedCredentials) @@ -547,7 +546,7 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = * A utility method to set the given header to an RFC1123 date * based on the given long value (epoch seconds). */ - def setDateHeader(s: String, l: Long) { + def setDateHeader(s: String, l: Long): Unit = { headers += (s -> List(Helpers.toInternetDate(l))) } @@ -570,10 +569,16 @@ class MockHttpServletRequest(val url : String = null, var contextPath : String = def getServletContext(): ServletContext = null def isAsyncStarted(): Boolean = false def isAsyncSupported(): Boolean = false - def startAsync(request: javax.servlet.ServletRequest, response: javax.servlet.ServletResponse): AsyncContext = null + def startAsync(request: jakarta.servlet.ServletRequest, response: jakarta.servlet.ServletResponse): AsyncContext = null def startAsync(): AsyncContext = null def changeSessionId(): String = null def getContentLengthLong(): Long = body.length - def upgrade[T <: javax.servlet.http.HttpUpgradeHandler](x$1: Class[T]): T = ??? + override def getRequestId: String = ??? + + override def getProtocolRequestId: String = ??? + + override def getServletConnection: ServletConnection = ??? + + def upgrade[T <: jakarta.servlet.http.HttpUpgradeHandler](x$1: Class[T]): T = ??? } diff --git a/web/testkit/src/main/scala/net/liftweb/mocks/MockHttpServletResponse.scala b/web/testkit/src/main/scala/net/liftweb/mocks/MockHttpServletResponse.scala index 10dd2a7b29..4efc312a86 100644 --- a/web/testkit/src/main/scala/net/liftweb/mocks/MockHttpServletResponse.scala +++ b/web/testkit/src/main/scala/net/liftweb/mocks/MockHttpServletResponse.scala @@ -18,7 +18,7 @@ package net.liftweb package mocks import scala.collection.mutable.HashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import java.io.PrintWriter import java.io.StringReader import java.io.BufferedReader @@ -32,8 +32,8 @@ import java.util.Arrays import java.util.Date import java.util.Locale import java.util.Vector -import javax.servlet._ -import javax.servlet.http._ +import jakarta.servlet._ +import jakarta.servlet.http._ /** * A Mock HttpServletResponse. Take a peek at it's writer or @@ -66,16 +66,16 @@ class MockHttpServletResponse(var writer: PrintWriter, var outputStream: Servlet } def getStatus = statusCode - def addIntHeader(s: String, i: Int) { + def addIntHeader(s: String, i: Int): Unit = { addHeader(s, i.toString) } - def setIntHeader(s: String, i: Int) { + def setIntHeader(s: String, i: Int): Unit = { setHeader(s, i.toString) } - def addHeader(s1: String, s2: String) { + def addHeader(s1: String, s2: String): Unit = { headers += (s1 -> (headers.getOrElse(s1, Nil) ::: List(s2))) } - def setHeader(s1: String, s2: String) { + def setHeader(s1: String, s2: String): Unit = { headers += (s1 -> List(s2)) } @@ -89,24 +89,24 @@ class MockHttpServletResponse(var writer: PrintWriter, var outputStream: Servlet headers.keySet.toSeq.asJava } - def addDateHeader(s: String, l: Long) { + def addDateHeader(s: String, l: Long): Unit = { addHeader(s, (new Date(l)).toString) } - def setDateHeader(s: String, l: Long) { + def setDateHeader(s: String, l: Long): Unit = { setHeader(s, (new Date(l)).toString) } - def sendRedirect(uri: String) { + override def sendRedirect(uri: String): Unit = { // Send back a 301 to the URL mentioned statusCode = 301 addHeader("Location", uri) } - def sendError(code: Int) { + def sendError(code: Int): Unit = { statusCode = code } - def sendError(code: Int, s: String) { + def sendError(code: Int, s: String): Unit = { sendError(code) statusString = s } @@ -129,14 +129,14 @@ class MockHttpServletResponse(var writer: PrintWriter, var outputStream: Servlet } def getLocale: Locale = locale def setLocale(l: Locale) = locale = l - def reset { + def reset: Unit = { // well, reset all the state to it's original values. yikes. later. } def isCommitted = false - def resetBuffer { + def resetBuffer: Unit = { // reset the buffer. } - def flushBuffer { + def flushBuffer: Unit = { // flush the buffer } def getBufferSize(): Int = bufferSize @@ -149,4 +149,6 @@ class MockHttpServletResponse(var writer: PrintWriter, var outputStream: Servlet def getContentType(): String = contentType def getCharacterEncoding(): String = charEncoding def setContentLengthLong(l: Long): Unit = contentType = l.toString + + override def sendRedirect(location: String, sc: Int, clearBuffer: Boolean): Unit = ??? } diff --git a/web/testkit/src/main/scala/net/liftweb/mocks/MockServletContext.scala b/web/testkit/src/main/scala/net/liftweb/mocks/MockServletContext.scala index 02af378db3..dd24a4739b 100644 --- a/web/testkit/src/main/scala/net/liftweb/mocks/MockServletContext.scala +++ b/web/testkit/src/main/scala/net/liftweb/mocks/MockServletContext.scala @@ -20,8 +20,7 @@ package mocks import common.Logger import scala.collection.mutable.HashMap -import scala.collection.JavaConverters._ - +import scala.jdk.CollectionConverters._ import java.io.PrintWriter import java.io.StringReader import java.io.BufferedReader @@ -34,8 +33,10 @@ import java.util.Arrays import java.util.Date import java.util.Locale import java.util.Vector -import javax.servlet._ -import javax.servlet.http._ +import jakarta.servlet._ +import jakarta.servlet.http._ + +import scala.annotation.nowarn /** * An example of how to use these mock classes in your unit tests: @@ -71,8 +72,8 @@ class MockServletContext(var target: String) extends ServletContext { def getInitParameterNames(): java.util.Enumeration[String] = new Vector[String]().elements def getAttribute(f: String): Object = null def getAttributeNames(): java.util.Enumeration[String] = new Vector[String]().elements - def removeAttribute(name: String) {} - def setAttribute(name: String, o: Object) {} + def removeAttribute(name: String): Unit = {} + def setAttribute(name: String, o: Object): Unit = {} def getContext(path: String): ServletContext = this def getMajorVersion() = 2 def getMimeType(file: String): String = null @@ -81,6 +82,13 @@ class MockServletContext(var target: String) extends ServletContext { def getNamedDispatcher(name: String): RequestDispatcher = null def getRequestDispatcher(path: String): RequestDispatcher = null def getResource(path: String): java.net.URL = null + def addJspFile(x$1: String, x$2: String): jakarta.servlet.ServletRegistration.Dynamic = null + def getRequestCharacterEncoding(): String = null + def getResponseCharacterEncoding(): String = null + def getSessionTimeout(): Int = 5 + def setRequestCharacterEncoding(x$1: String): Unit = {} + def setResponseCharacterEncoding(x$1: String): Unit = {} + def setSessionTimeout(x$1: Int): Unit = {} def getResourceAsStream(path: String): java.io.InputStream = { val file = Paths.get(target + path) if (Files.exists(file)) { @@ -96,33 +104,33 @@ class MockServletContext(var target: String) extends ServletContext { def getServletContextName(): String = null def getServletNames(): java.util.Enumeration[String] = new Vector[String]().elements def getServlets(): java.util.Enumeration[Servlet] = new Vector[Servlet]().elements - def log(msg: String, t: Throwable) { + def log(msg: String, t: Throwable): Unit = { t.printStackTrace log(msg) } - def log(e: Exception, msg: String) { + def log(e: Exception, msg: String): Unit = { e.printStackTrace log(msg) } def log(msg: String) = println("MockServletContext.log: " + msg) def getContextPath(): String = null - def addFilter(x$1: String,x$2: Class[_ <: javax.servlet.Filter]): FilterRegistration.Dynamic = null - def addFilter(x$1: String,x$2: javax.servlet.Filter): FilterRegistration.Dynamic = null + def addFilter(x$1: String,x$2: Class[_ <: jakarta.servlet.Filter]): FilterRegistration.Dynamic = null + def addFilter(x$1: String,x$2: jakarta.servlet.Filter): FilterRegistration.Dynamic = null def addFilter(x$1: String,x$2: String): FilterRegistration.Dynamic = null def addListener(listenerClass: Class[_ <: java.util.EventListener]): Unit = () def addListener[T <: java.util.EventListener](listener: T): Unit = () def addListener(listenerClass: String): Unit = () - def addServlet(servletNAme: String, servletClass: Class[_ <: javax.servlet.Servlet]): ServletRegistration.Dynamic = null - def addServlet(servletName: String, servlet: javax.servlet.Servlet): ServletRegistration.Dynamic = null + def addServlet(servletNAme: String, servletClass: Class[_ <: jakarta.servlet.Servlet]): ServletRegistration.Dynamic = null + def addServlet(servletName: String, servlet: jakarta.servlet.Servlet): ServletRegistration.Dynamic = null def addServlet(servletName: String, servletClass: String): ServletRegistration.Dynamic = null // This remains unimplemented since we can't provide a Null here due to type restrictions. - def createFilter[T <: javax.servlet.Filter](filter: Class[T]): T = ??? + def createFilter[T <: jakarta.servlet.Filter](filter: Class[T]): T = ??? def createListener[T <: java.util.EventListener](listener: Class[T]): T = ??? - def createServlet[T <: javax.servlet.Servlet](servletClass: Class[T]): T = ??? + def createServlet[T <: jakarta.servlet.Servlet](servletClass: Class[T]): T = ??? def getDefaultSessionTrackingModes(): java.util.Set[SessionTrackingMode] = Set.empty[SessionTrackingMode].asJava @@ -130,15 +138,15 @@ class MockServletContext(var target: String) extends ServletContext { def getClassLoader(): ClassLoader = getClass.getClassLoader def getEffectiveMajorVersion(): Int = 0 def getEffectiveMinorVersion(): Int = 0 - def getEffectiveSessionTrackingModes(): java.util.Set[javax.servlet.SessionTrackingMode] = null - def getFilterRegistration(x$1: String): javax.servlet.FilterRegistration = null - def getFilterRegistrations(): java.util.Map[String, _ <: javax.servlet.FilterRegistration] = null - def getJspConfigDescriptor(): javax.servlet.descriptor.JspConfigDescriptor = null - def getServletRegistration(x$1: String): javax.servlet.ServletRegistration = null - def getServletRegistrations(): java.util.Map[String, _ <: javax.servlet.ServletRegistration] = null - def getSessionCookieConfig(): javax.servlet.SessionCookieConfig = null + def getEffectiveSessionTrackingModes(): java.util.Set[jakarta.servlet.SessionTrackingMode] = null + def getFilterRegistration(x$1: String): jakarta.servlet.FilterRegistration = null + def getFilterRegistrations(): java.util.Map[String, _ <: jakarta.servlet.FilterRegistration] = null + def getJspConfigDescriptor(): jakarta.servlet.descriptor.JspConfigDescriptor = null + def getServletRegistration(x$1: String): jakarta.servlet.ServletRegistration = null + def getServletRegistrations(): java.util.Map[String, _ <: jakarta.servlet.ServletRegistration] = null + def getSessionCookieConfig(): jakarta.servlet.SessionCookieConfig = null def setInitParameter(key: String,value: String): Boolean = true - def setSessionTrackingModes(trackingModes: java.util.Set[javax.servlet.SessionTrackingMode]): Unit = () + def setSessionTrackingModes(trackingModes: java.util.Set[jakarta.servlet.SessionTrackingMode]): Unit = () def getVirtualServerName(): String = null } @@ -160,7 +168,7 @@ class MockFilterConfig(servletContext: ServletContext) extends FilterConfig { * @author Steve Jenson (stevej@pobox.com) */ class DoNothingFilterChain extends FilterChain with Logger { - def doFilter(req: ServletRequest, res: ServletResponse) { debug("Doing nothing on filter chain") } + def doFilter(req: ServletRequest, res: ServletResponse): Unit = { debug("Doing nothing on filter chain") } } /** @@ -172,7 +180,7 @@ class MockServletInputStream(is: InputStream) extends ServletInputStream { def read() = is.read() def isFinished(): Boolean = is.available() > 0 def isReady(): Boolean = true - def setReadListener(x$1: javax.servlet.ReadListener): Unit = () + def setReadListener(x$1: jakarta.servlet.ReadListener): Unit = () } /** @@ -181,12 +189,12 @@ class MockServletInputStream(is: InputStream) extends ServletInputStream { * @author Steve Jenson (stevej@pobox.com) */ class MockServletOutputStream(os: ByteArrayOutputStream) extends ServletOutputStream { - def write(b: Int) { + def write(b: Int): Unit = { os.write(b) } def isReady(): Boolean = true - def setWriteListener(x$1: javax.servlet.WriteListener): Unit = () + def setWriteListener(x$1: jakarta.servlet.WriteListener): Unit = () } /** @@ -201,7 +209,7 @@ class MockHttpSession extends HttpSession { protected var maxii: Int = 0 protected var creationTime: Long = System.currentTimeMillis def isNew = false - def invalidate {} + def invalidate: Unit = {} def getValue(key: String): Object = values.get(key) match { case Some(v) => v case None => null @@ -218,9 +226,8 @@ class MockHttpSession extends HttpSession { def getAttributeNames(): java.util.Enumeration[String] = new java.util.Enumeration[String] { private val keys = attr.keys.iterator def hasMoreElements() = keys.hasNext - def nextElement(): String = keys.next + def nextElement(): String = keys.next() } - def getSessionContext(): HttpSessionContext = null def getMaxInactiveInterval(): Int = maxii def setMaxInactiveInterval(i: Int): Unit = maxii = i def getServletContext(): ServletContext = null diff --git a/web/testkit/src/test/scala/net/liftweb/http/testing/MockHttpRequestSpec.scala b/web/testkit/src/test/scala/net/liftweb/http/testing/MockHttpRequestSpec.scala index 393559c7c3..914e53e14a 100644 --- a/web/testkit/src/test/scala/net/liftweb/http/testing/MockHttpRequestSpec.scala +++ b/web/testkit/src/test/scala/net/liftweb/http/testing/MockHttpRequestSpec.scala @@ -18,8 +18,7 @@ package mocks import org.specs2.mutable.Specification -import json.JsonDSL._ - +import org.json4s.JsonDSL._ /** * System under specification for MockHttpRequest. @@ -95,7 +94,7 @@ class MockHttpRequestSpec extends Specification { "properly set a default content type for JSON" in { val testRequest = new MockHttpServletRequest(TEST_URL, "/test") - testRequest.body = ("name" -> "joe") + testRequest.body_=("name" -> "joe") testRequest.contentType must_== "application/json" } @@ -111,7 +110,7 @@ class MockHttpRequestSpec extends Specification { "properly set a default content type for XML" in { val testRequest = new MockHttpServletRequest(TEST_URL, "/test") - testRequest.body = + testRequest.body_=() testRequest.contentType must_== "text/xml" } @@ -127,7 +126,7 @@ class MockHttpRequestSpec extends Specification { "properly set a default content type for a String" in { val testRequest = new MockHttpServletRequest(TEST_URL, "/test") - testRequest.body = "test" + testRequest.body_=("test") testRequest.contentType must_== "text/plain" } diff --git a/web/testkit/src/test/scala/net/liftweb/http/testing/TestObjects.scala b/web/testkit/src/test/scala/net/liftweb/http/testing/TestObjects.scala index 4a3d594b48..1a86e74e4b 100644 --- a/web/testkit/src/test/scala/net/liftweb/http/testing/TestObjects.scala +++ b/web/testkit/src/test/scala/net/liftweb/http/testing/TestObjects.scala @@ -33,10 +33,8 @@ object MyCode extends TestKit { val baseUrl = "" val l2: TestResponse = post("/foo") - l2.foreach { - x: HttpResponse => - val l3: TestResponse = x.get("ddd") - println("Hello") + l2.foreach { (x: HttpResponse) => + val l3: TestResponse = x.get("ddd") } @@ -50,10 +48,8 @@ object MyBoxCode extends RequestKit { def baseUrl = "" val l2: Box[TheResponse] = post("/foo") - l2.foreach { - x: TheResponse => - val l3: Box[TheResponse] = x.get("ddd") - println("Hello") + l2.foreach { (x: TheResponse) => + val l3: Box[TheResponse] = x.get("ddd") } diff --git a/web/webkit/src/main/scala/net/liftweb/builtin/comet/AsyncRenderComet.scala b/web/webkit/src/main/scala/net/liftweb/builtin/comet/AsyncRenderComet.scala index 900aef014b..e7d6f5aa17 100644 --- a/web/webkit/src/main/scala/net/liftweb/builtin/comet/AsyncRenderComet.scala +++ b/web/webkit/src/main/scala/net/liftweb/builtin/comet/AsyncRenderComet.scala @@ -40,7 +40,7 @@ class AsyncRenderComet extends MessageCometActor { override def lifespan: Box[TimeSpan] = Full(90.seconds) // make this method visible so that we can initialize the actor - override def initCometActor(creationInfo: CometCreationInfo) { + override def initCometActor(creationInfo: CometCreationInfo): Unit = { super.initCometActor(creationInfo) } diff --git a/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Comet.scala b/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Comet.scala index ba21e120d4..0f37edfc99 100644 --- a/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Comet.scala +++ b/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Comet.scala @@ -69,7 +69,7 @@ object Comet extends DispatchSnippet with LazyLoggable { cometActor.buildSpan(response.inSpan) case failedResult => - cometActor.cometRenderTimeoutHandler openOr { + cometActor.cometRenderTimeoutHandler() openOr { throw new CometTimeoutException(s"Type: ${cometActor.theType}, name: ${cometActor.name}; result was: $failedResult") } } diff --git a/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Menu.scala b/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Menu.scala index 72f8775684..00604d952f 100644 --- a/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Menu.scala +++ b/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Menu.scala @@ -428,8 +428,6 @@ object Menu extends DispatchSnippet { for { name <- S.attr("name").toList } yield { - type T = Q forSome { type Q } - // Builds a link for the given loc def buildLink[T](loc : Loc[T]) = { Group(SiteMap.buildLink(name, text) match { @@ -441,8 +439,8 @@ object Menu extends DispatchSnippet { } (S.originalRequest.flatMap(_.location), S.attr("param"), SiteMap.findAndTestLoc(name)) match { - case (_, Full(param), Full(loc: Loc[_] with ConvertableLoc[_])) => { - val typedLoc = loc.asInstanceOf[Loc[T] with ConvertableLoc[T]] + case (_, Full(param), Full(loc: Loc[t] with ConvertableLoc[_])) => { + val typedLoc = loc.asInstanceOf[Loc[t] with ConvertableLoc[t]] (for { pv <- typedLoc.convert(param) diff --git a/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Tail.scala b/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Tail.scala index 40b70885e9..1cc2c11869 100644 --- a/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Tail.scala +++ b/web/webkit/src/main/scala/net/liftweb/builtin/snippet/Tail.scala @@ -60,10 +60,10 @@ object Head extends DispatchSnippet { case e: Elem if (null eq e.prefix) => NodeSeq.Empty case x => x } - - val xhtml = validHeadTagsOnly(_xhtml) - { + val xhtml = validHeadTagsOnly(_xhtml) + + { if ((S.attr("withResourceId") or S.attr("withresourceid")).filter(Helpers.toBoolean).isDefined) { WithResourceId.render(xhtml) } else { diff --git a/web/webkit/src/main/scala/net/liftweb/http/CometActor.scala b/web/webkit/src/main/scala/net/liftweb/http/CometActor.scala index 9e412d1ed2..660613dc21 100644 --- a/web/webkit/src/main/scala/net/liftweb/http/CometActor.scala +++ b/web/webkit/src/main/scala/net/liftweb/http/CometActor.scala @@ -21,7 +21,8 @@ import net.liftweb.common._ import net.liftweb.actor._ import net.liftweb.util.Helpers._ import net.liftweb.util._ -import net.liftweb.json._ +import org.json4s._ +import org.json4s.native._ import scala.xml.{NodeSeq, Text, Elem, Node, Group, Null, PrefixedAttribute, UnprefixedAttribute} import scala.collection.mutable.ListBuffer import net.liftweb.http.js._ @@ -222,13 +223,13 @@ trait ListenerManager { * Called after RemoveAListener-message is processed and no more listeners exist. * Default does nothing. */ - protected def onListenersListEmptied() { + protected def onListenersListEmptied(): Unit ={ } /** * Update the listeners with the message generated by createUpdate */ - protected def updateListeners(listeners: List[ActorTest] = listeners) { + protected def updateListeners(listeners: List[ActorTest] = listeners): Unit ={ val update = createUpdate listeners foreach { @@ -241,7 +242,7 @@ trait ListenerManager { * Send a message we create to all of the listeners. Note that with this * invocation the createUpdate method is not used. */ - protected def sendListenersMessage(msg: Any) { + protected def sendListenersMessage(msg: Any): Unit ={ listeners foreach (_._1 ! msg) } @@ -275,16 +276,6 @@ trait ListenerManager { protected def lowPriority: PartialFunction[Any, Unit] = Map.empty } -/** - * A LiftActorJ with ListenerManager. Subclass this class - * to get a Java-usable LiftActorJ with ListenerManager - */ -abstract class LiftActorJWithListenerManager extends LiftActorJ with ListenerManager { - protected override def messageHandler: PartialFunction[Any, Unit] = - highPriority orElse mediumPriority orElse - listenerService orElse lowPriority orElse _messageHandler -} - /** * This trait adds functionality to automatically register with a given * Actor using AddAListener and RemoveAListener control messages. The most @@ -306,12 +297,12 @@ trait CometListener extends BaseCometActor { */ protected def registerWith: SimpleActor[Any] - abstract override protected def localSetup() { + abstract override protected def localSetup(): Unit ={ registerWith ! AddAListener(this, { case _ => true }) super.localSetup() } - abstract override protected def localShutdown() { + abstract override protected def localShutdown(): Unit ={ registerWith ! RemoveAListener(this) super.localShutdown() } @@ -327,7 +318,7 @@ trait LiftCometActor extends TypedActor[Any, Any] with ForwardableActor[Any, Any def lastListenerTime: Long def lastRenderTime: Long - private[http] def callInitCometActor(creationInfo: CometCreationInfo) { + private[http] def callInitCometActor(creationInfo: CometCreationInfo): Unit ={ initCometActor(creationInfo) } @@ -422,29 +413,11 @@ trait LiftCometActor extends TypedActor[Any, Any] with ForwardableActor[Any, Any private var _myLocale = Locale.getDefault() - private[http] def setCometActorLocale(loc: Locale) { + private[http] def setCometActorLocale(loc: Locale): Unit = { _myLocale = loc } } -/** - * Subclass from this class if you're in Java-land - * and want a CometActor - */ -abstract class CometActorJ extends LiftActorJ with CometActor { - - override def lowPriority = _messageHandler - -} - -/** - * Subclass from this class if you want a CometActorJ with - * CometListeners - */ -abstract class CometActorJWithCometListener extends CometActorJ with CometListener { - override def lowPriority = _messageHandler -} - trait CometActor extends BaseCometActor { override final private[http] def partialUpdateStream_? = false } @@ -454,7 +427,7 @@ trait MessageCometActor extends BaseCometActor { override final def render = NodeSeq.Empty - protected def pushMessage(cmd: => JsCmd) { + protected def pushMessage(cmd: => JsCmd): Unit = { partialUpdate(cmd) } } @@ -528,7 +501,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits /** * set the last rendering... ignore if we're not caching */ - private def lastRendering_=(last: RenderOut) { + private def lastRendering_=(last: RenderOut): Unit = { if (!dontCacheRendering) { _realLastRendering = last } @@ -604,7 +577,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * It's seriously suboptimal to override this method. Instead * use localSetup() */ - protected def initCometActor(creationInfo: CometCreationInfo) { + protected def initCometActor(creationInfo: CometCreationInfo): Unit = { if (!dontCacheRendering) { lastRendering = RenderOut(Full(defaultHtml), Empty, Empty, Empty, false) @@ -653,7 +626,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * * @param h -- the PartialFunction that can handle a JSON request */ - def appendJsonHandler(h: PartialFunction[Any, JsCmd]) { + def appendJsonHandler(h: PartialFunction[Any, JsCmd]): Unit = { jsonHandlerChain = h orElse jsonHandlerChain } @@ -683,7 +656,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits */ def jsonToIncludeInCode: JsCmd = _jsonToIncludeCode - private lazy val (_sendJson, _jsonToIncludeCode) = S.createJsonFunc(Full(_defaultPrefix), onJsonError, receiveJson _) + private lazy val (_sendJson, _jsonToIncludeCode) = S.createJsonFunc(Full(_defaultPrefix), onJsonError, () => receiveJson) /** * Set this method to true to have the Json call code included in the Comet output @@ -700,7 +673,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits /** * How to report an error that occurs during message dispatch */ - protected def reportError(msg: String, exception: Exception) { + protected def reportError(msg: String, exception: Exception): Unit = { logger.error(msg, exception) } @@ -961,7 +934,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits case ShutdownIfPastLifespan => for { - ls <- lifespan if listeners.isEmpty && (lastListenerTime + ls.millis + 1000l) < millis + ls <- lifespan if listeners.isEmpty && (lastListenerTime + ls.millis + 1000L) < millis } { this ! ShutDown } @@ -987,7 +960,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits _localShutdown() case PartialUpdateMsg(cmdF) => { - val cmd: JsCmd = cmdF.apply + val cmd: JsCmd = cmdF.apply() val time = Helpers.nextNum val delta = JsDelta(time, cmd) receivedDelta = true @@ -1043,7 +1016,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * @param sendAll -- Should the fixed part of the CometActor be * rendered. */ - def reRender(sendAll: Boolean) { + def reRender(sendAll: Boolean): Unit = { this ! ReRender(sendAll) } @@ -1054,7 +1027,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * be sent to the client. It's a much better practice to use * partialUpdate for non-trivial CometActor components. */ - def reRender() { + def reRender(): Unit = { reRender(false) } @@ -1073,7 +1046,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * You can change the behavior of the wiring dependency management * by overriding this method */ - protected def clearWiringDependencies() { + protected def clearWiringDependencies(): Unit = { if (!manualWiringDependencyManagement) { theSession.clearPostPageJavaScriptForThisPage() unregisterFromAllDependencies() @@ -1149,11 +1122,11 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * all of the currently listening browser tabs. This is the * preferred method over reRender to update the component */ - protected def partialUpdate(cmd: => JsCmd) { + protected def partialUpdate(cmd: => JsCmd): Unit = { this ! PartialUpdateMsg(() => cmd) } - protected def startQuestion(what: Any) { + protected def startQuestion(what: Any): Unit = { } /** @@ -1173,10 +1146,10 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * or capture any request parameters that you care about rather * the keeping the whole Req reference. */ - protected def captureInitialReq(initialReq: Box[Req]) { + protected def captureInitialReq(initialReq: Box[Req]): Unit = { } - private def _localShutdown() { + private def _localShutdown(): Unit = { localShutdown() clearNotices listeners = Nil @@ -1218,7 +1191,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * Ask another CometActor a question. That other CometActor will * take over the screen real estate until the question is answered. */ - protected def ask(who: LiftCometActor, what: Any)(answerWith: Any => Unit) { + protected def ask(who: LiftCometActor, what: Any)(answerWith: Any => Unit): Unit = { who.callInitCometActor(CometCreationInfo(who.uniqueId, name, defaultHtml, attributes, theSession)) theSession.addCometActor(who) @@ -1228,7 +1201,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits who ! AskQuestion(what, this, listeners) } - protected def answer(answer: Any) { + protected def answer(answer: Any): Unit = { whosAsking.foreach(_ !? AnswerQuestion(answer, listeners)) whosAsking = Empty performReRender(false) @@ -1241,7 +1214,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits * is helpful if you use Lift's CSS Selector Transforms to define * rendering. */ - protected implicit def nsToNsFuncToRenderOut(f: NodeSeq => NodeSeq) = { + protected implicit def nsToNsFuncToRenderOut(f: NodeSeq => NodeSeq) : RenderOut = { val additionalJs = if (autoIncludeJsonCode) { Full(jsonToIncludeInCode) @@ -1267,7 +1240,7 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits Empty } - new RenderOut(Full(in: NodeSeq), internalFixedRender, additionalJs, Empty, false) + new RenderOut(Full(in: NodeSeq), internalFixedRender, additionalJs, Empty, false) } protected implicit def jsToXmlOrJsCmd(in: JsCmd): RenderOut = { @@ -1293,89 +1266,89 @@ trait BaseCometActor extends LiftActor with LiftCometActor with CssBindImplicits /** * Similar with S.error */ - def error(n: String) { + def error(n: String): Unit = { error(Text(n)) } /** * Similar with S.error */ - def error(n: NodeSeq) { + def error(n: NodeSeq): Unit = { notices += ((NoticeType.Error, n, Empty)) } /** * Similar with S.error */ - def error(id: String, n: NodeSeq) { + def error(id: String, n: NodeSeq): Unit = { notices += ((NoticeType.Error, n, Full(id))) } /** * Similar with S.error */ - def error(id: String, n: String) { + def error(id: String, n: String): Unit = { error(id, Text(n)) } /** * Similar with S.notice */ - def notice(n: String) { + def notice(n: String): Unit = { notice(Text(n)) } /** * Similar with S.notice */ - def notice(n: NodeSeq) { + def notice(n: NodeSeq): Unit = { notices += ((NoticeType.Notice, n, Empty)) } /** * Similar with S.notice */ - def notice(id: String, n: NodeSeq) { + def notice(id: String, n: NodeSeq): Unit = { notices += ((NoticeType.Notice, n, Full(id))) } /** * Similar with S.notice */ - def notice(id: String, n: String) { + def notice(id: String, n: String): Unit = { notice(id, Text(n)) } /** * Similar with S.warning */ - def warning(n: String) { + def warning(n: String): Unit = { warning(Text(n)) } /** * Similar with S.warning */ - def warning(n: NodeSeq) { + def warning(n: NodeSeq): Unit = { notices += ((NoticeType.Warning, n, Empty)) } /** * Similar with S.warning */ - def warning(id: String, n: NodeSeq) { + def warning(id: String, n: NodeSeq): Unit = { notices += ((NoticeType.Warning, n, Full(id))) } /** * Similar with S.warning */ - def warning(id: String, n: String) { + def warning(id: String, n: String): Unit = { warning(id, Text(n)) } - private def clearNotices { - notices.clear + private def clearNotices: Unit = { + notices.clear() } } diff --git a/web/webkit/src/main/scala/net/liftweb/http/ContentParser.scala b/web/webkit/src/main/scala/net/liftweb/http/ContentParser.scala index bb6bfae5dc..e0a4ff65ea 100644 --- a/web/webkit/src/main/scala/net/liftweb/http/ContentParser.scala +++ b/web/webkit/src/main/scala/net/liftweb/http/ContentParser.scala @@ -54,7 +54,7 @@ object ContentParser { * @return your parser wrapped up to handle an `InputStream` */ def toInputStreamParser(simpleParser: String=>Box[NodeSeq]): InputStream=>Box[NodeSeq] = { - is:InputStream => + (is: InputStream) => for { bytes <- Helpers.tryo(Helpers.readWholeStream(is)) elems <- simpleParser(new String(bytes, "UTF-8")) diff --git a/web/webkit/src/main/scala/net/liftweb/http/LiftMerge.scala b/web/webkit/src/main/scala/net/liftweb/http/LiftMerge.scala index 5fda671df3..a720887f1b 100644 --- a/web/webkit/src/main/scala/net/liftweb/http/LiftMerge.scala +++ b/web/webkit/src/main/scala/net/liftweb/http/LiftMerge.scala @@ -44,8 +44,8 @@ private[this] case class HtmlState( private[http] trait LiftMerge { self: LiftSession => - private def scriptUrl(scriptFile: String) = { - S.encodeURL(s"${LiftRules.liftPath}/$scriptFile") + private def pageJsUrl = { + S.encodeURL(s"${S.contextPath}/${LiftRules.pageJsFunc().mkString("/")}/${RenderVersion.get}.js") } // Gather all page-specific JS into one JsCmd. @@ -63,7 +63,7 @@ private[http] trait LiftMerge { private def pageScopedScriptFileWith(cmd: JsCmd) = { pageScript(Full(JavaScriptResponse(cmd, Nil, Nil, 200))) - + } /** @@ -74,10 +74,10 @@ private[http] trait LiftMerge { val waitUntil = millis + LiftRules.lazySnippetTimeout.vend.millis val stripComments: Boolean = LiftRules.stripComments.vend - def waitUntilSnippetsDone() { + def waitUntilSnippetsDone(): Unit ={ val myMillis = millis snippetHashs.synchronized { - if (myMillis >= waitUntil || snippetHashs.isEmpty || !snippetHashs.values.toIterator.contains(Empty)) () + if (myMillis >= waitUntil || snippetHashs.isEmpty || !snippetHashs.values.iterator.contains(Empty)) () else { snippetHashs.wait(waitUntil - myMillis) waitUntilSnippetsDone() @@ -160,59 +160,59 @@ private[http] trait LiftMerge { startingState.copy(headChild = false, headInBodyChild = false, tailInBodyChild = false, bodyChild = false) } - val bodyHead = childInfo.headInBodyChild && ! headInBodyChild - val bodyTail = childInfo.tailInBodyChild && ! tailInBodyChild + val bodyHead = childInfo.headInBodyChild && ! headInBodyChild + val bodyTail = childInfo.tailInBodyChild && ! tailInBodyChild - HtmlNormalizer - .normalizeNode(node, contextPath, stripComments, LiftRules.extractInlineJavaScript) - .map { - case normalized @ NodeAndEventJs(normalizedElement: Elem, _) => - val normalizedChildren = - normalizeMergeAndExtractEvents(normalizedElement.child, childInfo) + HtmlNormalizer + .normalizeNode(node, contextPath, stripComments, LiftRules.extractInlineJavaScript) + .map { + case normalized @ NodeAndEventJs(normalizedElement: Elem, _) => + val normalizedChildren = + normalizeMergeAndExtractEvents(normalizedElement.child, childInfo) - normalized.copy( - normalizedElement.copy(child = normalizedChildren.nodes), - js = normalized.js & normalizedChildren.js - ) + normalized.copy( + normalizedElement.copy(child = normalizedChildren.nodes), + js = normalized.js & normalizedChildren.js + ) - case other => - other - } - .map { normalizedResults: NodeAndEventJs => - node match { - case e: Elem if e.label == "node" && - e.prefix == "lift_deferred" => - val deferredNodes: Seq[NodesAndEventJs] = { - for { - idAttribute <- e.attributes("id").take(1) - id = idAttribute.text - nodes <- processedSnippets.get(id) - } yield { - normalizeMergeAndExtractEvents(nodes, startingState) - }}.toSeq - - deferredNodes.foldLeft(soFar.append(normalizedResults))(_ append _) - - case _ => - if (headChild) { - headChildren ++= normalizedResults.node - } else if (headInBodyChild) { - addlHead ++= normalizedResults.node - } else if (tailInBodyChild) { - addlTail ++= normalizedResults.node - } else if (_bodyChild && ! bodyHead && ! bodyTail) { - bodyChildren ++= normalizedResults.node - } - - if (bodyHead || bodyTail) { - soFar.append(normalizedResults.js) - } else { - soFar.append(normalizedResults) - } - } - } getOrElse { - soFar + case other => + other + } + .map { (normalizedResults: NodeAndEventJs) => + node match { + case e: Elem if e.label == "node" && + e.prefix == "lift_deferred" => + val deferredNodes: Seq[NodesAndEventJs] = { + for { + idAttribute <- e.attributes("id").take(1) + id = idAttribute.text + nodes <- processedSnippets.get(id) + } yield { + normalizeMergeAndExtractEvents(nodes, startingState) + }}.toSeq + + deferredNodes.foldLeft(soFar.append(normalizedResults))(_ append _) + + case _ => + if (headChild) { + headChildren ++= normalizedResults.node + } else if (headInBodyChild) { + addlHead ++= normalizedResults.node + } else if (tailInBodyChild) { + addlTail ++= normalizedResults.node + } else if (_bodyChild && ! bodyHead && ! bodyTail) { + bodyChildren ++= normalizedResults.node + } + + if (bodyHead || bodyTail) { + soFar.append(normalizedResults.js) + } else { + soFar.append(normalizedResults) + } } + } getOrElse { + soFar + } } } @@ -248,7 +248,7 @@ private[http] trait LiftMerge { } // Appends ajax script to body - if (LiftRules.autoIncludeAjaxCalc.vend().apply(this)) { + if (LiftRules.autoIncludeAjaxCalc.vend()(this)) { bodyChildren += , , , @@ -70,7 +66,7 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { ): NodeSeq) } - "merge tail segments in the page body in order at the end of the body" in new WithRules(testRules) { + "merge tail segments in the page body in order at the end of the body" in withLiftRules(testRules) { val result = testSession.merge( @@ -97,14 +93,14 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { mockReq ) - (result \ "body" \ "_").takeRight(3) must_== (Seq( + (result \ "body" \ "_").takeRight(3) === (Seq( , , ): NodeSeq) } - "not merge tail segments in the head" in new WithRules(testRules) { + "not merge tail segments in the head" in withLiftRules(testRules) { val result = testSession.merge( @@ -133,14 +129,14 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { mockReq ) - (result \ "body" \ "_").takeRight(3) must_== (Seq( + (result \ "body" \ "_").takeRight(3) === (Seq( , , ): NodeSeq) } - "normalize absolute link hrefs everywhere" in new WithLiftContext(testRules, testSession) { + "normalize absolute link hrefs everywhere" in withLiftContext(testRules, testSession) { val result = testSession.merge( @@ -168,13 +164,13 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { mockReq ) - (result \\ "link").map(_ \@ "href") must_== + (result \\ "link").map(_ \@ "href") === "/context-path/testlink" :: "/context-path/testlink2" :: "/context-path/testlink3" :: Nil } - "normalize absolute script srcs everywhere" in new WithLiftContext(testRules, testSession) { + "normalize absolute script srcs everywhere" in withLiftContext(testRules, testSession) { val result = testSession.merge( @@ -202,12 +198,12 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { mockReq ) - (result \\ "script").map(_ \@ "src") must_== + (result \\ "script").map(_ \@ "src") === "/context-path/testscript" :: "/context-path/testscript2" :: Nil } - "normalize absolute a hrefs everywhere" in new WithLiftContext(testRules, testSession) { + "normalize absolute a hrefs everywhere" in withLiftContext(testRules, testSession) { val result = testSession.merge( @@ -235,7 +231,7 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { mockReq ) - (result \\ "a").map(_ \@ "href") must_== + (result \\ "a").map(_ \@ "href") === "/context-path/testa1" :: "testa3" :: "/context-path/testa2" :: @@ -244,7 +240,7 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { "/context-path/testa5" :: Nil } - "normalize absolute form actions everywhere" in new WithLiftContext(testRules, testSession) { + "normalize absolute form actions everywhere" in withLiftContext(testRules, testSession) { val result = testSession.merge( @@ -272,7 +268,7 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { mockReq ) - (result \\ "form").map(_ \@ "action") must_== + (result \\ "form").map(_ \@ "action") === "/context-path/testform1" :: "testform3" :: "/context-path/testform2" :: @@ -281,7 +277,7 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { "/context-path/testform5" :: Nil } - "not rewrite script srcs anywhere" in new WithLiftContext(testRules, testSession) { + "not rewrite script srcs anywhere" in withLiftContext(testRules, testSession) { val result = URLRewriter.doWith((_: String) => "rewritten") { testSession.merge( @@ -309,13 +305,13 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { ) } - (result \\ "script").map(_ \@ "src") must_== + (result \\ "script").map(_ \@ "src") === "testscript" :: "testscript2" :: "testscript3" :: Nil } - "not rewrite link hrefs anywhere" in new WithLiftContext(testRules, testSession) { + "not rewrite link hrefs anywhere" in withLiftContext(testRules, testSession) { val result = URLRewriter.doWith((_: String) => "rewritten") { testSession.merge( @@ -343,13 +339,13 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { ) } - (result \\ "link").map(_ \@ "href") must_== + (result \\ "link").map(_ \@ "href") === "testlink" :: "testlink2" :: "testlink3" :: Nil } - "rewrite a hrefs everywhere" in new WithLiftContext(testRules, testSession) { + "rewrite a hrefs everywhere" in withLiftContext(testRules, testSession) { val result = URLRewriter.doWith((_: String) => "rewritten") { testSession.merge( @@ -377,13 +373,13 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { ) } - (result \\ "a").map(_ \@ "href") must_== + (result \\ "a").map(_ \@ "href") === "rewritten" :: "rewritten" :: "rewritten" :: Nil } - "rewrite form actions everywhere" in new WithLiftContext(testRules, testSession) { + "rewrite form actions everywhere" in withLiftContext(testRules, testSession) { val result = URLRewriter.doWith((_: String) => "rewritten") { testSession.merge( @@ -411,13 +407,13 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { ) } - (result \\ "form").map(_ \@ "action") must_== + (result \\ "form").map(_ \@ "action") === "rewritten" :: "rewritten" :: "rewritten" :: Nil } - "include a page script in the page tail if events are extracted" in new WithLiftContext(eventExtractingTestRules, testSession) { + "include a page script in the page tail if events are extracted" in withLiftContext(eventExtractingTestRules, testSession) { val result = testSession.merge( @@ -437,7 +433,7 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { val scripts = (result \\ "script") - scripts must have length(1) + scripts must haveLength(1) scripts.map(_ \@ "src") must beLike { case scriptSrc :: Nil => scriptSrc must beMatching("/context-path/lift/page/F[^.]+.js") @@ -449,7 +445,7 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { } } - "include a page script in the page tail even if the page doesn't have a head and body" in new WithLiftContext(eventExtractingTestRules, testSession) { + "include a page script in the page tail even if the page doesn't have a head and body" in withLiftContext(eventExtractingTestRules, testSession) { val result = testSession.merge(
@@ -462,7 +458,7 @@ class LiftMergeSpec extends Specification with XmlMatchers with Mockito { val scripts = (result \\ "script") - scripts must have length(1) + scripts must haveLength(1) scripts.map(_ \@ "src") must beLike { case scriptSrc :: Nil => scriptSrc must beMatching("/context-path/lift/page/F[^.]+.js") diff --git a/web/webkit/src/test/scala-2.13/net/liftweb/http/LiftSessionSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/http/LiftSessionSpec.scala new file mode 100644 index 0000000000..50701622c6 --- /dev/null +++ b/web/webkit/src/test/scala-2.13/net/liftweb/http/LiftSessionSpec.scala @@ -0,0 +1,141 @@ +/* + * Copyright 2010-2015 WorldWide Conferencing, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package net.liftweb +package http + +import scala.concurrent.ExecutionContext.Implicits.global +import scala.xml.NodeSeq +import net.liftweb.common.{Full, Empty, Failure} +import net.liftweb.util.Helpers.tryo +import org.specs2.specification.BeforeEach +import org.specs2.mutable.Specification + +object LiftSessionSpec { + private var receivedMessages = Vector[Int]() + private object NoOp + + private[LiftSessionSpec] class TestCometActor extends CometActor { + def render = NodeSeq.Empty + + override def lowPriority = { + case n: Int => + receivedMessages :+= n + case NoOp => + reply(NoOp) + case _ => + } + } + + private[LiftSessionSpec] class ExplodesInConstructorCometActor extends CometActor { + def render = NodeSeq.Empty + + throw new RuntimeException("boom, this explodes in the constructor!") + override def lowPriority = { + case _ => + } + } +} + +class LiftSessionSpec extends Specification with BeforeEach { + import LiftSessionSpec._ + + sequential + + // specs2 4.x: before method executes directly without step wrapper + override def before = { receivedMessages = Vector[Int]() } + + "A LiftSession" should { + + "Send accumulated messages to a newly-created comet actor in the order in which they arrived" in { + val session = new LiftSession("Test Session", "", Empty) + + S.init(Empty, session) { + val cometName = "TestCometActor" + val sendingMessages = 1 to 20 + + sendingMessages.foreach { message => + session.sendCometMessage(cometName, Full(cometName), message) + } + + session.findOrCreateComet[TestCometActor](Full(cometName), NodeSeq.Empty, Map.empty).map { comet => + comet !? NoOp /* Block to allow time for all messages to be collected */ + } + + receivedMessages === sendingMessages.toVector + } + } + + "Send messages to all comets of a particular type, regardless of name" in { + val session = new LiftSession("Test Session", "", Empty) + + S.init(Empty, session) { + val cometType = "TestCometActor" + val cometName = "Comet1" + + // Spin up two comets: one with a name and one without + session.sendCometMessage(cometType, Full(cometName), NoOp) + session.sendCometMessage(cometType, Empty, NoOp) + + // Send a message to both + session.sendCometMessage(cometType, 1) + + // Ensure both process the message + session.findOrCreateComet[TestCometActor](Full(cometName), NodeSeq.Empty, Map.empty).map { comet => + comet !? NoOp + } + session.findOrCreateComet[TestCometActor](Empty, NodeSeq.Empty, Map.empty).map { comet => + comet !? NoOp + } + + // Assert that the message was seen twice + receivedMessages === Vector(1, 1) + } + } + + "Surface exceptions from the no-arg comet constructor" in { + val session = new LiftSession("Test Session", "", Empty) + + S.init(Empty, session) { + val result = session.findOrCreateComet[ExplodesInConstructorCometActor](Empty, NodeSeq.Empty, Map.empty) + + result match { + case Failure(_, Full(ex: java.lang.reflect.InvocationTargetException), _) => + success + + case other => + failure("Comet did not fail with an InvocationTargetException. Please check to ensure error handling in no-arg comet constructors wasn't broken.") + } + } + } + } + + "LiftSession when building deferred functions" should { + + "not fail when the underlying container request is null" in { + val session = new LiftSession("Test Session", "", Empty) + + def stubFunction: () => Int = () => 3 + + S.init(Full(Req.nil), session) { + + val attempt = tryo(session.buildDeferredFunction(stubFunction)) + + attempt.toOption must beSome + } + } + } +} diff --git a/web/webkit/src/test/scala/net/liftweb/http/ReqSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/http/ReqSpec.scala similarity index 86% rename from web/webkit/src/test/scala/net/liftweb/http/ReqSpec.scala rename to web/webkit/src/test/scala-2.13/net/liftweb/http/ReqSpec.scala index 06afd96ced..1c4a85ed93 100644 --- a/web/webkit/src/test/scala/net/liftweb/http/ReqSpec.scala +++ b/web/webkit/src/test/scala-2.13/net/liftweb/http/ReqSpec.scala @@ -22,16 +22,14 @@ import java.io.ByteArrayInputStream import scala.xml.XML import org.specs2.matcher.XmlMatchers - -import org.mockito.Mockito._ - import org.specs2.mutable.Specification -import org.specs2.mock.Mockito import org.specs2.specification.Scope +import org.specs2.mock.Mockito import common._ -import json.JsonDSL._ -import json.JsonParser +import org.json4s._ +import org.json4s.JsonDSL._ +import org.json4s.native._ import util.Helpers.tryo import provider._ @@ -42,11 +40,11 @@ import provider._ class ReqSpec extends Specification with XmlMatchers with Mockito { "Req Specification".title - private val iPhoneUserAgents = + private val iPhoneUserAgents = List("Mozilla/5.0 (iPhone Simulator; U; CPU iPhone OS 3_0 like Mac OS X; en-us) AppleWebKit/528.18 (KHTML, like Gecko) Version/4.0 Mobile/7A341 Safari/528.16", "Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_2_1 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5") - private val iPadUserAgents = + private val iPadUserAgents = List("Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B367 Safari/531.21.10", "Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5") @@ -64,7 +62,7 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { val uac = new UserAgentCalculator { def userAgent = Full("Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-HK) AppleWebKit/533.18.1 (KHTML, like Gecko) Version/5.0.2 Safari/533.18.5") } - uac.safariVersion.openOrThrowException("legacy code") must_== 5 + uac.safariVersion.openOrThrowException("legacy code") === 5 } "Do the right thing with iPhone" in { @@ -73,8 +71,8 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { val uac = new UserAgentCalculator { def userAgent = Full(agent) } - uac.isIPhone must_== true - uac.isIPad must_== false + uac.isIPhone === true + uac.isIPad === false } } @@ -87,8 +85,8 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { val uac = new UserAgentCalculator { def userAgent = Full(agent) } - uac.isIPhone must_== false - uac.isIPad must_== true + uac.isIPhone === false + uac.isIPad === true } } @@ -104,7 +102,7 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { userAgentCalculator.ieVersion } - ieVersions must_== List(6, 7, 8, 9, 10, 11) + ieVersions === List(6, 7, 8, 9, 10, 11) } trait mockReq extends Scope { @@ -118,7 +116,7 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { Req.NilPath, "/", GetRequest, Full(contentType), mockHttpRequest, - 0l, 1l, true, + 0L, 1L, true, () => paramCalcInfo, Map.empty ) @@ -127,7 +125,7 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { class mockJsonReq(jsonString: String = """{ "booyan": "shazam", "booyak": 5, "bazam": 2.5 }""") extends mockReq { val testJson = jsonString - val parsedJson = tryo(JsonParser.parse(jsonString)) openOr json.JsonAST.JNothing + val parsedJson = tryo(JsonParser.parse(jsonString)) openOr JsonAST.JNothing def bodyBytes = { testJson.getBytes("UTF-8") @@ -149,11 +147,11 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { } "with an application/json Content-Type should return the result of parsing the JSON" in new mockJsonReq { - req("application/json").json should_== Full(parsedJson) + req("application/json").json === Full(parsedJson) } "with a text/json Content-Type should return the result of parsing the JSON" in new mockJsonReq { - req("text/json").json should_== Full(parsedJson) + req("text/json").json === Full(parsedJson) } "with invalid JSON and a text/json Content-Type should return a Failure" in new mockJsonReq("epic fail") { @@ -163,15 +161,15 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { "when forcing a request body JSON parse with forcedBodyAsJson" in { "with an invalid Content-Type should return the result of parsing the JSON" in new mockJsonReq { - req("text/plain").forcedBodyAsJson should_== Full(parsedJson) + req("text/plain").forcedBodyAsJson === Full(parsedJson) } "with an application/json Content-Type should return the result of parsing the JSON" in new mockJsonReq { - req("application/json").forcedBodyAsJson should_== Full(parsedJson) + req("application/json").forcedBodyAsJson === Full(parsedJson) } "with a text/json Content-Type should return the result of parsing the JSON" in new mockJsonReq { - req("text/json").forcedBodyAsJson should_== Full(parsedJson) + req("text/json").forcedBodyAsJson === Full(parsedJson) } "with invalid JSON should return a Failure" in new mockJsonReq("epic fail") { @@ -185,11 +183,11 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { } "with an application/xml Content-Type should return the result of parsing the JSON" in new mockXmlReq { - req("application/xml").xml should_== Full(parsedXml) + req("application/xml").xml === Full(parsedXml) } "with a text/xml Content-Type should return the result of parsing the JSON" in new mockXmlReq { - req("text/xml").xml should_== Full(parsedXml) + req("text/xml").xml === Full(parsedXml) } "with invalid XML and a text/xml Content-Type should return a Failure" in new mockXmlReq("epic fail") { @@ -199,15 +197,15 @@ class ReqSpec extends Specification with XmlMatchers with Mockito { "when forcing a request body XML parse with forcedBodyAsXml" in { "with an invalid Content-Type should return the result of parsing the JSON" in new mockXmlReq { - req("text/plain").forcedBodyAsXml should_== Full(parsedXml) + req("text/plain").forcedBodyAsXml === Full(parsedXml) } "with an application/json Content-Type should return the result of parsing the JSON" in new mockXmlReq { - req("application/xml").forcedBodyAsXml should_== Full(parsedXml) + req("application/xml").forcedBodyAsXml === Full(parsedXml) } "with a text/json Content-Type should return the result of parsing the JSON" in new mockXmlReq { - req("text/xml").forcedBodyAsXml should_== Full(parsedXml) + req("text/xml").forcedBodyAsXml === Full(parsedXml) } "with invalid XML should return a Failure" in new mockXmlReq("epic fail") { diff --git a/web/webkit/src/test/scala-2.13/net/liftweb/http/SpecContextHelpers.scala b/web/webkit/src/test/scala-2.13/net/liftweb/http/SpecContextHelpers.scala new file mode 100644 index 0000000000..19881f88b1 --- /dev/null +++ b/web/webkit/src/test/scala-2.13/net/liftweb/http/SpecContextHelpers.scala @@ -0,0 +1,59 @@ +package net.liftweb +package http + +import org.specs2.execute.{Result, AsResult} +import org.specs2.mutable.Specification + +import common.{Box, Empty} + +/** + * Helper functions for wrapping test execution with Lift context. + * + * These functions properly wrap test code with ThreadLocal state management, + * ensuring that LiftRules and S (session) scope remain active during test execution. + */ +object SpecContextHelpers { + /** + * Wraps test execution with LiftRules context. + * The rules are active for the duration of the test execution. + * + * Example usage: + * {{{ + * import SpecContextHelpers._ + * + * "my test" in withLiftRules(testRules) { + * // test code here - LiftRules are available + * } + * }}} + */ + def withLiftRules[T: AsResult](rules: LiftRules)(test: =>T): Result = { + LiftRulesMocker.devTestLiftRulesInstance.doWith(rules) { + AsResult(test) + } + } + + /** + * Wraps test execution with both LiftRules and S (session) context. + * Both the rules and S scope are active for the duration of the test execution. + * + * Example usage: + * {{{ + * import SpecContextHelpers._ + * + * "my test" in withLiftContext(testRules, testSession) { + * // test code here - LiftRules and S scope are available + * } + * }}} + */ + def withLiftContext[T: AsResult]( + rules: LiftRules, + session: LiftSession, + req: Box[Req] = Empty + )(test: =>T): Result = { + LiftRulesMocker.devTestLiftRulesInstance.doWith(rules) { + S.init(req, session) { + AsResult(test) + } + } + } +} diff --git a/web/webkit/src/test/scala-2.13/net/liftweb/http/js/LiftJavaScriptSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/http/js/LiftJavaScriptSpec.scala new file mode 100644 index 0000000000..6393305620 --- /dev/null +++ b/web/webkit/src/test/scala-2.13/net/liftweb/http/js/LiftJavaScriptSpec.scala @@ -0,0 +1,240 @@ +/* + * Copyright 2013 WorldWide Conferencing, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package net.liftweb +package http +package js + +import java.util.Locale + +import net.liftweb.http.js.extcore.ExtCoreArtifacts +import net.liftweb.http.js.jquery.JQueryArtifacts +import org.specs2.execute.{Result, AsResult} +import org.specs2.specification.Scope +import org.specs2.mutable.Specification + +import common._ +import http.js._ +import http.js.JsCmds._ +import http.js.JE._ +import util.Props +import util.Helpers._ + +/** + * System under specification for LiftJavaScript. + */ +class LiftJavaScriptSpec extends Specification { + sequential + "LiftJavaScript Specification".title + + private def session = new LiftSession("", randomString(20), Empty) + + "LiftJavaScript" should { + "create default settings" in new WithLocale(Locale.ENGLISH) { + S.initIfUninitted(session) { + val settings = LiftJavaScript.settings + settings.toJsCmd === formatjs( + """{"liftPath": "/lift", + |"ajaxRetryCount": 3, + |"ajaxPostTimeout": 5000, + |"gcPollingInterval": 75000, + |"gcFailureRetryTimeout": 15000, + |"cometGetTimeout": 140000, + |"cometFailureRetryTimeout": 10000, + |"cometServer": null, + |"logError": function(msg) {}, + |"ajaxOnFailure": function() {alert("The server cannot be contacted at this time");}, + |"ajaxOnStart": function() {}, + |"ajaxOnEnd": function() {}}""" + ) + } + } + "create internationalized default settings" in new WithLocale(Locale.forLanguageTag("pl-PL")) { + S.initIfUninitted(session) { + val settings = LiftJavaScript.settings + val internationalizedMessage = "Nie mo\\u017cna skontaktowa\\u0107 si\\u0119 z serwerem" + settings.toJsCmd === formatjs( + s"""{"liftPath": "/lift", + |"ajaxRetryCount": 3, + |"ajaxPostTimeout": 5000, + |"gcPollingInterval": 75000, + |"gcFailureRetryTimeout": 15000, + |"cometGetTimeout": 140000, + |"cometFailureRetryTimeout": 10000, + |"cometServer": null, + |"logError": function(msg) {}, + |"ajaxOnFailure": function() {alert("$internationalizedMessage");}, + |"ajaxOnStart": function() {}, + |"ajaxOnEnd": function() {}}""" + ) + } + } + "create custom static settings" in new WithLocale(Locale.ENGLISH) { + S.initIfUninitted(session) { + LiftRules.ajaxRetryCount = Full(4) + val settings = LiftJavaScript.settings + settings.toJsCmd === formatjs( + """{"liftPath": "/lift", + |"ajaxRetryCount": 4, + |"ajaxPostTimeout": 5000, + |"gcPollingInterval": 75000, + |"gcFailureRetryTimeout": 15000, + |"cometGetTimeout": 140000, + |"cometFailureRetryTimeout": 10000, + |"cometServer": null, + |"logError": function(msg) {}, + |"ajaxOnFailure": function() {alert("The server cannot be contacted at this time");}, + |"ajaxOnStart": function() {}, + |"ajaxOnEnd": function() {}}""" + ) + } + } + "create custom dynamic settings" in new WithLocale(Locale.ENGLISH) { + S.initIfUninitted(session) { + LiftRules.cometServer = () => Some("srvr1") + val settings = LiftJavaScript.settings + settings.toJsCmd === formatjs( + """{"liftPath": "/lift", + |"ajaxRetryCount": 4, + |"ajaxPostTimeout": 5000, + |"gcPollingInterval": 75000, + |"gcFailureRetryTimeout": 15000, + |"cometGetTimeout": 140000, + |"cometFailureRetryTimeout": 10000, + |"cometServer": "srvr1", + |"logError": function(msg) {}, + |"ajaxOnFailure": function() {alert("The server cannot be contacted at this time");}, + |"ajaxOnStart": function() {}, + |"ajaxOnEnd": function() {}}""" + ) + } + } + "create custom function settings" in new WithLocale(Locale.ENGLISH) { + S.initIfUninitted(session) { + LiftRules.jsLogFunc = Full(v => JE.Call("lift.logError", v)) + val settings = LiftJavaScript.settings + settings.toJsCmd === formatjs( + """{"liftPath": "/lift", + |"ajaxRetryCount": 4, + |"ajaxPostTimeout": 5000, + |"gcPollingInterval": 75000, + |"gcFailureRetryTimeout": 15000, + |"cometGetTimeout": 140000, + |"cometFailureRetryTimeout": 10000, + |"cometServer": "srvr1", + |"logError": function(msg) {lift.logError(msg);}, + |"ajaxOnFailure": function() {alert("The server cannot be contacted at this time");}, + |"ajaxOnStart": function() {}, + |"ajaxOnEnd": function() {}}""" + ) + } + } + "create init command" in new WithLocale(Locale.ENGLISH) { + S.initIfUninitted(session) { + val init = LiftRules.javaScriptSettings.vend().map(_.apply(session)).map(LiftJavaScript.initCmd(_).toJsCmd) + init === Full(formatjs(List( + "var lift_settings = {};", + "window.lift.extend(lift_settings,window.liftJQuery);", + """window.lift.extend(lift_settings,{"liftPath": "/lift", + |"ajaxRetryCount": 4, + |"ajaxPostTimeout": 5000, + |"gcPollingInterval": 75000, + |"gcFailureRetryTimeout": 15000, + |"cometGetTimeout": 140000, + |"cometFailureRetryTimeout": 10000, + |"cometServer": "srvr1", + |"logError": function(msg) {lift.logError(msg);}, + |"ajaxOnFailure": function() {alert("The server cannot be contacted at this time");}, + |"ajaxOnStart": function() {}, + |"ajaxOnEnd": function() {}});""", + "window.lift.init(lift_settings);" + ))) + } + } + "create init command with VanillaJS" in new WithLocale(Locale.ENGLISH) { + S.initIfUninitted(session) { + LiftRules.jsArtifacts = ExtCoreArtifacts + val init = LiftRules.javaScriptSettings.vend().map(_.apply(session)).map(LiftJavaScript.initCmd(_).toJsCmd) + init === Full(formatjs(List( + "var lift_settings = {};", + "window.lift.extend(lift_settings,window.liftVanilla);", + """window.lift.extend(lift_settings,{"liftPath": "/lift", + |"ajaxRetryCount": 4, + |"ajaxPostTimeout": 5000, + |"gcPollingInterval": 75000, + |"gcFailureRetryTimeout": 15000, + |"cometGetTimeout": 140000, + |"cometFailureRetryTimeout": 10000, + |"cometServer": "srvr1", + |"logError": function(msg) {lift.logError(msg);}, + |"ajaxOnFailure": function() {alert("The server cannot be contacted at this time");}, + |"ajaxOnStart": function() {}, + |"ajaxOnEnd": function() {}});""", + "window.lift.init(lift_settings);" + ))) + } + } + "create init command with custom setting" in new WithLocale(Locale.ENGLISH) { + S.initIfUninitted(session) { + LiftRules.jsArtifacts = JQueryArtifacts + val settings = LiftJavaScript.settings.extend(JsObj("liftPath" -> "liftyStuff", "mysetting" -> 99)) + val init = LiftJavaScript.initCmd(settings) + init.toJsCmd === formatjs(List( + "var lift_settings = {};", + "window.lift.extend(lift_settings,window.liftJQuery);", + """window.lift.extend(lift_settings,{"liftPath": "liftyStuff", + |"ajaxRetryCount": 4, + |"ajaxPostTimeout": 5000, + |"gcPollingInterval": 75000, + |"gcFailureRetryTimeout": 15000, + |"cometGetTimeout": 140000, + |"cometFailureRetryTimeout": 10000, + |"cometServer": "srvr1", + |"logError": function(msg) {lift.logError(msg);}, + |"ajaxOnFailure": function() {alert("The server cannot be contacted at this time");}, + |"ajaxOnStart": function() {}, + |"ajaxOnEnd": function() {}, + |"mysetting": 99});""", + "window.lift.init(lift_settings);" + )) + } + } + } + + def formatjs(line:String):String = formatjs(line :: Nil) + def formatjs(lines:List[String]):String = lines.map { _.stripMargin.linesIterator.toList match { + case init :+ last => (init.map(_ + " ") :+ last).mkString + case Nil => "" + }}.mkString("\n") + + object withEnglishLocale extends WithLocale(Locale.ENGLISH) + + object withPolishLocale extends WithLocale(Locale.forLanguageTag("pl-PL")) + + class WithLocale(locale: Locale) extends Scope { + val savedDefaultLocale = Locale.getDefault + Locale.setDefault(locale) + + // Cleanup happens automatically when scope exits via try/finally in specs2 + override def toString = { + try { + super.toString + } finally { + Locale.setDefault(savedDefaultLocale) + } + } + } +} diff --git a/web/webkit/src/test/scala-2.13/net/liftweb/http/provider/servlet/OfflineRequestSnapshotSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/http/provider/servlet/OfflineRequestSnapshotSpec.scala new file mode 100644 index 0000000000..9db267cf6d --- /dev/null +++ b/web/webkit/src/test/scala-2.13/net/liftweb/http/provider/servlet/OfflineRequestSnapshotSpec.scala @@ -0,0 +1,86 @@ +/* + * Copyright 2010-2011 WorldWide Conferencing, LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package net.liftweb.http.provider.servlet + +import net.liftweb.http.provider._ +import net.liftweb.mockweb.WebSpec +import org.specs2.mock.Mockito + + +object OfflineRequestSnapshotSpec extends WebSpec with Mockito { + + private[this] val X_SSL = "X-SSL" + private[this] val xSSLHeader = HTTPParam(X_SSL, List("true")) :: Nil + + "OfflineRequestSnapshot" should { + "have a 'headers' method that returns the list of headers with a given name" in { + val req = getRequestSnapshot(originalPort = 80, headers = xSSLHeader) + req.headers("X-SSL") === List("true") + req.headers("Unknown") must beEmpty + } + + "have the serverPort value" in { + "443 when the 'X-SSL' header is set to the string 'true' (case-insensitive) and original port is 80" in { + val port80Req = getRequestSnapshot(originalPort = 80, headers = xSSLHeader) + port80Req.serverPort === 443 + } + + s"equal to the original request-port when" in { + s"the '$X_SSL' header is absent" in { + val nonSSLReq = getRequestSnapshot(originalPort = 80) + nonSSLReq.serverPort === 80 + } + + s"the '$X_SSL' header is not set to the string 'true' (case-insensitive)" in { + val falseSSLHeaderReq = getRequestSnapshot(originalPort = 90, headers = HTTPParam(X_SSL, List("anything")) :: Nil) + falseSSLHeaderReq.serverPort === 90 + } + + "the original request-port is not 80" in { + val req = getRequestSnapshot(originalPort = 90, headers = xSSLHeader) + req.serverPort === 90 + } + } + } + + "have a 'param' method that returns the list of parameters with a given name (case-sensitive)" in { + val tennisParams = List("Roger Federer", "Raphael Nadal") + val swimmingParams = List("Michael Phelps", "Ian Thorpe") + val params = HTTPParam("tennis", tennisParams) :: HTTPParam("swimming", swimmingParams) :: Nil + val snapshot = getRequestSnapshot(80, params = params) + + snapshot.param("tennis") === tennisParams + snapshot.param("Tennis") should beEmpty + snapshot.param("swimming") === swimmingParams + } + } + + + private[this] def getRequestSnapshot(originalPort: Int, headers: List[HTTPParam] = Nil, params: List[HTTPParam] = Nil) = { + val mockHttpRequest = mock[HTTPRequest] + val httpProvider = new HTTPProvider { + override protected def context: HTTPContext = null + } + + mockHttpRequest.headers returns headers + mockHttpRequest.cookies returns Nil + mockHttpRequest.params returns params + mockHttpRequest.serverPort returns originalPort + new OfflineRequestSnapshot(mockHttpRequest, httpProvider) + } + +} diff --git a/web/webkit/src/test/scala/net/liftweb/http/rest/XMLApiSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/http/rest/XMLApiSpec.scala similarity index 93% rename from web/webkit/src/test/scala/net/liftweb/http/rest/XMLApiSpec.scala rename to web/webkit/src/test/scala-2.13/net/liftweb/http/rest/XMLApiSpec.scala index b671597a38..8a89f0573d 100644 --- a/web/webkit/src/test/scala/net/liftweb/http/rest/XMLApiSpec.scala +++ b/web/webkit/src/test/scala-2.13/net/liftweb/http/rest/XMLApiSpec.scala @@ -91,12 +91,12 @@ class XmlApiSpec extends Specification { * new attributes makes comparison fail. Instead, we simply stringify and * reparse the response contents and that seems to fix the issue. */ val converted = secureXML.loadString(x.xml.toString) - result(converted == expected, - "%s matches %s".format(converted,expected), - "%s does not match %s".format(converted, expected), - response) + result(converted == expected, + "%s matches %s".format(converted,expected), + "%s does not match %s".format(converted, expected), + response) } - case other => result(false,"matches","not an XmlResponse", response) + case other => result(false, "XmlResponse", "not an XmlResponse", response) } } @@ -121,8 +121,8 @@ class XmlApiSpec extends Specification { failure must haveClass[XmlResponse] failure match { case x : XmlResponse => { - x.xml.attribute("success").map(_.text) must_== Some("false") - x.xml.attribute("msg").isDefined must_== true + x.xml.attribute("success").map(_.text) === Some("false") + x.xml.attribute("msg").isDefined === true } } } diff --git a/web/webkit/src/test/scala/net/liftweb/mockweb/MockWebSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/mockweb/MockWebSpec.scala similarity index 84% rename from web/webkit/src/test/scala/net/liftweb/mockweb/MockWebSpec.scala rename to web/webkit/src/test/scala-2.13/net/liftweb/mockweb/MockWebSpec.scala index 838cfb093b..1e58dcee27 100644 --- a/web/webkit/src/test/scala/net/liftweb/mockweb/MockWebSpec.scala +++ b/web/webkit/src/test/scala-2.13/net/liftweb/mockweb/MockWebSpec.scala @@ -78,8 +78,8 @@ class MockWebSpec extends Specification { "provide a Req corresponding to a string url" in { testReq("http://foo.com/test/this?a=b&a=c", "/test") { req => - req.uri must_== "/this" - req.params("a") must_== List("b","c") + req.uri === "/this" + req.params("a") === List("b","c") } } @@ -89,13 +89,15 @@ class MockWebSpec extends Specification { mockReq.method = "POST" - import json.JsonDSL._ + import org.json4s.JsonDSL._ + import org.json4s.native.JsonMethods._ - mockReq.body = ("name" -> "joe") ~ ("age" -> 35) + mockReq.body = compact(render(("name" -> "joe") ~ ("age" -> 35))).getBytes("UTF-8") + mockReq.contentType = "application/json" testReq(mockReq) { req => - req.json_? must_== true + req.json_? must_=== true } } @@ -103,7 +105,7 @@ class MockWebSpec extends Specification { LiftRulesMocker.devTestLiftRulesInstance.doWith(mockLiftRules) { useLiftRules.doWith(true) { testReq("http://foo.com/test/this") { - req => req.remoteAddr must_== "1.2.3.4" + req => req.remoteAddr === "1.2.3.4" } } } @@ -113,7 +115,7 @@ class MockWebSpec extends Specification { LiftRulesMocker.devTestLiftRulesInstance.doWith(mockLiftRules) { useLiftRules.doWith(true) { testReq("http://foo.com/test/stateless") { - req => req.path.partPath must_== List("stateless", "works") + req => req.path.partPath === List("stateless", "works") } } } @@ -121,7 +123,7 @@ class MockWebSpec extends Specification { "initialize S based on a string url" in { testS("http://foo.com/test/that?a=b&b=c") { - S.param("b") must_== Full("c") + S.param("b") mustEqual Full("c") } } @@ -130,9 +132,9 @@ class MockWebSpec extends Specification { new MockHttpServletRequest("http://foo.com/test/this?foo=bar", "/test") testS(mockReq) { - S.param("foo") must_== Full("bar") + S.param("foo") === Full("bar") - S.uri must_== "/this" + S.uri === "/this" } } @@ -140,7 +142,7 @@ class MockWebSpec extends Specification { LiftRulesMocker.devTestLiftRulesInstance.doWith(mockLiftRules) { useLiftRules.doWith(true) { testS("http://foo.com/test/stateless") { - S.request.foreach(_.path.partPath must_== List("stateless", "works")) + S.request.foreach(_.path.partPath === List("stateless", "works")) } } } @@ -151,7 +153,7 @@ class MockWebSpec extends Specification { LiftRulesMocker.devTestLiftRulesInstance.doWith(mockLiftRules) { useLiftRules.doWith(true) { testS("http://foo.com/test/stateful") { - S.request.foreach(_.path.partPath must_== List("stateful", "works")) + S.request.foreach(_.path.partPath === List("stateful", "works")) } } } @@ -161,8 +163,8 @@ class MockWebSpec extends Specification { "emulate a snippet invocation" in { testS("http://foo.com/test/stateful") { withSnippet("MyWidget.foo", new UnprefixedAttribute("bar", Text("bat"), Null)) { - S.currentSnippet must_== Full("MyWidget.foo") - S.attr("bar") must_== Full("bat") + S.currentSnippet mustEqual Full("MyWidget.foo") + S.attr("bar") mustEqual Full("bat") } } } @@ -178,7 +180,7 @@ class MockWebSpec extends Specification { // A second test testS("http://foo.com/test2", session) { - testVar.is must_== "Foo!" + testVar.is === "Foo!" } } diff --git a/web/webkit/src/test/scala/net/liftweb/mockweb/WebSpecSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/mockweb/WebSpecSpec.scala similarity index 81% rename from web/webkit/src/test/scala/net/liftweb/mockweb/WebSpecSpec.scala rename to web/webkit/src/test/scala-2.13/net/liftweb/mockweb/WebSpecSpec.scala index 68b1655002..378efda9e8 100644 --- a/web/webkit/src/test/scala/net/liftweb/mockweb/WebSpecSpec.scala +++ b/web/webkit/src/test/scala-2.13/net/liftweb/mockweb/WebSpecSpec.scala @@ -20,8 +20,8 @@ package mockweb import common.Full import http._ import http.rest._ -import json._ -import json.JsonDSL._ +import org.json4s._ +import org.json4s.JsonDSL._ import mocks.MockHttpServletRequest /** @@ -29,7 +29,7 @@ import mocks.MockHttpServletRequest * you could just use "() => bootstrap.Boot.boot". */ object WebSpecSpecBoot { - def boot() { + def boot() : Unit = { // Add this so that withTemplateFor test works LiftRules.addToPackages("net.liftweb.mockweb") @@ -69,7 +69,7 @@ class WebSpecSpec extends WebSpec(WebSpecSpecBoot.boot _) { "WebSpec" should { val testUrl = "http://foo.com/test/stateless" - val testReq = + val testReq = new MockHttpServletRequest("http://foo.com/test/this?foo=bar", "/test") // Create a new session for use in the tests @@ -81,53 +81,53 @@ class WebSpecSpec extends WebSpec(WebSpecSpecBoot.boot _) { "properly set up S with a String url" withSFor(testUrl) in { S.request match { - case Full(req) => req.path.partPath must_== List("stateless", "works") + case Full(req) => req.path.partPath === List("stateless", "works") case _ => failure("No request in S") } } "properly set up S with a String url and session" withSFor(testUrl, testSession) in { TestVar("foo!") - TestVar.is must_== "foo!" + TestVar.is === "foo!" } "properly re-use a provided session" withSFor(testUrl, testSession) in { - TestVar.is must_== "foo!" - } + TestVar.is === "foo!" + } "properly set up S with a HttpServletRequest" withSFor(testReq) in { - S.uri must_== "/this" - S.param("foo") must_== Full("bar") + S.uri must_=== "/this" + S.param("foo") must_=== Full("bar") } "properly set up a Req with a String url" withReqFor(testUrl) in { - _.path.partPath must_== List("stateless", "works") + _.path.partPath === List("stateless", "works") } "properly set up a Req with a String url and context path" withReqFor(testUrl, "/test") in { - _.path.partPath must_== List("stateless") + _.path.partPath === List("stateless") } "properly set up a Req with a HttpServletRequest" withReqFor(testReq) in { - _.uri must_== "/this" + _.uri === "/this" } "properly set a plain text body" withReqFor(testUrl) withPost("This is a test") in { req => - req.contentType must_== Full("text/plain") - req.post_? must_== true + req.contentType === Full("text/plain") + req.post_? === true req.body match { - case Full(body) => (new String(body)) must_== "This is a test" + case Full(body) => (new String(body)) === "This is a test" case _ => failure("No body set") } } "properly set a JSON body" withReqFor(testUrl) withPut(("name" -> "Joe")) in { req => - req.json_? must_== true - req.put_? must_== true + req.json_? === true + req.put_? === true req.json match { - case Full(jval) => jval must_== JObject(List(JField("name", JString("Joe")))) + case Full(jval) => jval === JObject(List(JField("name", JString("Joe")))) case _ => failure("No body set") } } @@ -135,15 +135,15 @@ class WebSpecSpec extends WebSpec(WebSpecSpecBoot.boot _) { "properly set an XML body" withSFor(testUrl) withPost() in { S.request match { case Full(req) => - req.xml_? must_== true - req.post_? must_== true - req.xml must_== Full() + req.xml_? must_=== true + req.post_? must_=== true + req.xml must_=== Full() case _ => failure("No request found in S") } } "properly mutate the request" withSFor(testUrl) withMods(_.contentType = "application/xml") in { - (S.request.map(_.xml_?) openOr false) must_== true + (S.request.map(_.xml_?) openOr false) === true } "process a JSON RestHelper Request" withReqFor("http://foo.com/api/info.json") in { req => @@ -154,7 +154,7 @@ class WebSpecSpec extends WebSpec(WebSpecSpecBoot.boot _) { } "properly process a template" withTemplateFor("http://foo.com/net/liftweb/mockweb/webspecspectemplate") in { - case Full(template) => template.toString.contains("Hello, WebSpec!") must_== true + case Full(template) => template.toString.contains("Hello, WebSpec!") === true case other => failure("Error on template : " + other) } } diff --git a/web/webkit/src/test/scala/net/liftweb/sitemap/LocSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/sitemap/LocSpec.scala similarity index 94% rename from web/webkit/src/test/scala/net/liftweb/sitemap/LocSpec.scala rename to web/webkit/src/test/scala-2.13/net/liftweb/sitemap/LocSpec.scala index 7fbb50caa2..bdf58d7dd2 100644 --- a/web/webkit/src/test/scala/net/liftweb/sitemap/LocSpec.scala +++ b/web/webkit/src/test/scala-2.13/net/liftweb/sitemap/LocSpec.scala @@ -38,12 +38,12 @@ class LocSpec extends Specification { "calculate default href for basic menu definition" in { val loc = (Menu("Test") / "foo" / "bar").toMenu.loc - loc.calcDefaultHref mustEqual "/foo/bar" + loc.calcDefaultHref === "/foo/bar" } "calculate href for menu with parameters" in { val loc = (Menu.param[Param]("Test", "Test", s => Full(Param(s)), p => p.s) / "foo" / "bar" / *).toLoc - loc.calcHref(Param("myparam")) mustEqual "/foo/bar/myparam" + loc.calcHref(Param("myparam")) === "/foo/bar/myparam" } "should not match a Req matching its Link when currentValue is Empty" in { @@ -55,7 +55,7 @@ class LocSpec extends Specification { testS(mockReq) { testReq(mockReq) { req => - testLoc.doesMatch_?(req) mustEqual false + testLoc.doesMatch_?(req) === false } } } @@ -87,7 +87,7 @@ class LocSpec extends Specification { val rewriteFn = testLoc.rewrite.openOrThrowException("No rewrite function") rewriteFn(rrq) must not(throwA[Exception]) - rewriteFn(rrq)._2 must_== Empty + rewriteFn(rrq)._2 mustEqual Empty } } } diff --git a/web/webkit/src/test/scala/net/liftweb/webapptest/MemoizeSpec.scala b/web/webkit/src/test/scala-2.13/net/liftweb/webapptest/MemoizeSpec.scala similarity index 58% rename from web/webkit/src/test/scala/net/liftweb/webapptest/MemoizeSpec.scala rename to web/webkit/src/test/scala-2.13/net/liftweb/webapptest/MemoizeSpec.scala index 9834ad2983..fa68cf8b2b 100644 --- a/web/webkit/src/test/scala/net/liftweb/webapptest/MemoizeSpec.scala +++ b/web/webkit/src/test/scala-2.13/net/liftweb/webapptest/MemoizeSpec.scala @@ -42,43 +42,43 @@ class MemoizeSpec extends Specification { import SessionInfo._ "Memoize" should { - "Session memo should default to empty" >> { - S.initIfUninitted(session1) { - sessionMemo.get(3) must_== Empty + "Session memo should default to empty" in { + S.init(Full(Req.nil), session1) { + sessionMemo.get(3) mustEqual Empty } } - "Session memo should be settable" >> { - S.initIfUninitted(session1) { - sessionMemo.get(3, 8) must_== 8 + "Session memo should be settable" in { + S.init(Full(Req.nil), session1) { + sessionMemo.get(3, 8) mustEqual 8 - sessionMemo.get(3) must_== Full(8) + sessionMemo.get(3) mustEqual Full(8) } } - "Session memo should survive across calls" >> { - S.initIfUninitted(session1) { - sessionMemo.get(3) must_== Full(8) + "Session memo should survive across calls" in { + S.init(Full(Req.nil), session1) { + sessionMemo.get(3) mustEqual Full(8) } } - "Session memo should not float across sessions" >> { - S.initIfUninitted(session2) { - sessionMemo.get(3) must_== Empty + "Session memo should not float across sessions" in { + S.init(Full(Req.nil), session2) { + sessionMemo.get(3) mustEqual Empty } } - "Request memo should work in the same request" >> { - S.initIfUninitted(session1) { - requestMemo(3) must_== Empty - requestMemo(3, 44) must_== 44 - requestMemo(3) must_== Full(44) + "Request memo should work in the same request" in { + S.init(Full(Req.nil), session1) { + requestMemo(3) mustEqual Empty + requestMemo(3, 44) mustEqual 44 + requestMemo(3) mustEqual Full(44) } } - "Request memo should not span requests" >> { - S.initIfUninitted(session1) { - requestMemo(3) must_== Empty + "Request memo should not span requests" in { + S.init(Full(Req.nil), session1) { + requestMemo(3) mustEqual Empty } } diff --git a/web/webkit/src/test/scala-3/net/liftweb/http/LAFutureWithSessionSpec.scala b/web/webkit/src/test/scala-3/net/liftweb/http/LAFutureWithSessionSpec.scala new file mode 100644 index 0000000000..c4a283c402 --- /dev/null +++ b/web/webkit/src/test/scala-3/net/liftweb/http/LAFutureWithSessionSpec.scala @@ -0,0 +1,314 @@ +package net.liftweb.http + +import net.liftweb.actor.LAFuture +import net.liftweb.common.{Box, Empty, Failure, Full} +import net.liftweb.mockweb.WebSpec +import org.specs2.matcher.ThrownMessages + +class LAFutureWithSessionSpec extends WebSpec with ThrownMessages { + + sequential + + object SessionVar1 extends SessionVar[String]("Uninitialized1") + object SessionVar2 extends SessionVar[String]("Uninitialized2") + + object ReqVar1 extends RequestVar[String]("Uninitialized1") + object ReqVar2 extends RequestVar[String]("Uninitialized2") + + val timeout = 10000L + + "LAFutureWithSession" should { + + "fail if session is not available" in { + val future = LAFutureWithSession.withCurrentSession("kaboom") + + future.get(timeout) must be_== (Failure("LiftSession not available in this thread context", Empty, Empty)) + } + + "succeed with original value if session is available" withSFor "/" in { + val future = LAFutureWithSession.withCurrentSession("works!") + + future.get(timeout) must be_== (Full("works!")) + } + + "have access to session variables in LAFuture task" withSFor "/" in { + SessionVar1("dzien dobry") + + val future = LAFutureWithSession.withCurrentSession(SessionVar1.is) + + future.get(timeout) must be_== (Full("dzien dobry")) + } + + "have access to request variables in LAFuture task" withSFor "/" in { + ReqVar1("guten tag") + + val future = LAFutureWithSession.withCurrentSession(ReqVar1.is) + + future.get(timeout) must be_== (Full("guten tag")) + } + + "have access to session variables in onComplete()" withSFor "/" in { + // workaround for a possible race condition in AnyVarTrait + // https://groups.google.com/forum/#!topic/liftweb/V1pWy14Wl3A + SessionVar1.is + + val future = LAFutureWithSession.withCurrentSession { + Thread.sleep(Long.MaxValue) + "292 billion years" + } + + future.onComplete { + case Full(v) => SessionVar1(v) + case problem => ko("Future computation failed: " + problem) + } + + future.satisfy("thorgal") + + SessionVar1.is must eventually(beEqualTo("thorgal")) + } + + "have access to request variables in onComplete()" withSFor "/" in { + // workaround for a possible race condition in AnyVarTrait + // https://groups.google.com/forum/#!topic/liftweb/V1pWy14Wl3A + ReqVar1.is + + val future = LAFutureWithSession.withCurrentSession { + Thread.sleep(Long.MaxValue) + "292 billion years" + } + + future.onComplete { + case Full(v) => ReqVar1(v) + case problem => ko("Future computation failed: " + problem) + } + + future.satisfy("thor") + + ReqVar1.is must eventually(beEqualTo("thor")) + } + + "have access to session variables in onFail()" withSFor "/" in { + // workaround for a possible race condition in SessionVar + // https://groups.google.com/forum/#!topic/liftweb/V1pWy14Wl3A + SessionVar1.is + + val future = LAFutureWithSession.withCurrentSession { + Thread.sleep(Long.MaxValue) + "292 billion years" + } + + future.onFail { + case f: Failure => SessionVar1(f.msg) + case _ => fail("The Future should have failed") + } + + future.fail(new Exception("kaboom!")) + + SessionVar1.is must eventually(beEqualTo("kaboom!")) + } + + "have access to request variables in onFail()" withSFor "/" in { + // workaround for a possible race condition in AnyVarTrait + // https://groups.google.com/forum/#!topic/liftweb/V1pWy14Wl3A + ReqVar1.is + + val future = LAFutureWithSession.withCurrentSession { + Thread.sleep(Long.MaxValue) + "292 billion years" + } + + future.onFail { + case f: Failure => ReqVar1(f.msg) + case _ => fail("The Future should have failed") + } + + future.fail(new Exception("nope!")) + + ReqVar1.is must eventually(beEqualTo("nope!")) + } + + "have access to session variables in onSuccess()" withSFor "/" in { + // workaround for a possible race condition in AnyVarTrait + // https://groups.google.com/forum/#!topic/liftweb/V1pWy14Wl3A + SessionVar1.is + + val future = LAFutureWithSession.withCurrentSession { + Thread.sleep(Long.MaxValue) + "292 billion years" + } + + future.onSuccess(SessionVar1(_)) + + future.satisfy("done") + + SessionVar1.is must eventually(beEqualTo("done")) + } + + "have access to request variables in onSuccess()" withSFor "/" in { + // workaround for a possible race condition in AnyVarTrait + // https://groups.google.com/forum/#!topic/liftweb/V1pWy14Wl3A + ReqVar1.is + + val future = LAFutureWithSession.withCurrentSession { + Thread.sleep(Long.MaxValue) + "292 billion years" + } + + future.onSuccess(ReqVar1(_)) + + future.satisfy("my preciousss") + + ReqVar1.is must eventually(beEqualTo("my preciousss")) + } + + "have access to session variables in chains of filter()" withSFor "/" in { + SessionVar1("see") + SessionVar2("me") + + val future = LAFutureWithSession.withCurrentSession("they see me rollin") + val filtered = future + .filter(_.contains(SessionVar1.is)) + .filter(_.contains(SessionVar2.is)) + + filtered.get(timeout) must eventually(===(Full("they see me rollin"): Box[String])) + } + + "have access to request variables in chains of filter()" withSFor "/" in { + ReqVar1("see") + ReqVar2("me") + + val future = LAFutureWithSession.withCurrentSession("they see me rollin") + val filtered = future + .filter(_.contains(ReqVar1.is)) + .filter(_.contains(ReqVar2.is)) + + filtered.get(timeout) must eventually(===(Full("they see me rollin"): Box[String])) + } + + "have access to session variables in chains of withFilter()" withSFor "/" in { + SessionVar1("come") + SessionVar2("prey") + + val future = LAFutureWithSession.withCurrentSession("do not come between the nazgul and his prey") + val filtered = future + .withFilter(_.contains(SessionVar1.is)) + .withFilter(_.contains(SessionVar2.is)) + + filtered.get(timeout) must eventually(===(Full("do not come between the nazgul and his prey"): Box[String])) + } + + "have access to request variables in chains of withFilter()" withSFor "/" in { + ReqVar1("hurt") + ReqVar2("precious") + + val future = LAFutureWithSession.withCurrentSession("mustn't go that way, mustn't hurt the precious!") + val filtered = future + .withFilter(_.contains(ReqVar1.is)) + .withFilter(_.contains(ReqVar2.is)) + + filtered.get(timeout) must eventually(===(Full("mustn't go that way, mustn't hurt the precious!"): Box[String])) + } + + "have access to session variables in chains of map()" withSFor "/" in { + SessionVar1("b") + SessionVar2("c") + + val future = LAFutureWithSession.withCurrentSession("a") + val mapped = future.map(_ + SessionVar1.is).map(_ + SessionVar2.is) + + mapped.get(timeout) must be_== (Full("abc")) + } + + "have access to request variables in chains of map()" withSFor "/" in { + ReqVar1("b") + ReqVar2("c") + + val future = LAFutureWithSession.withCurrentSession("a") + val mapped = future.map(_ + ReqVar1.is).map(_ + ReqVar2.is) + + mapped.get(timeout) must be_== (Full("abc")) + } + + "have access to session variables in chains of flatMap()" withSFor "/" in { + SessionVar1("e") + SessionVar2("f") + + val future = LAFutureWithSession.withCurrentSession("d") + val mapped = future + .flatMap { s => + val out = s + SessionVar1.is + LAFuture.build(out) + } + .flatMap { s => + val out = s + SessionVar2.is + LAFuture.build(out) + } + + mapped.get(timeout) must be_== (Full("def")) + } + + "have access to request variables in chains of flatMap()" withSFor "/" in { + ReqVar1("e") + ReqVar2("f") + + val future = LAFutureWithSession.withCurrentSession("d") + val mapped = future + .flatMap { s => + val out = s + ReqVar1.is + LAFuture.build(out) + } + .flatMap { s => + val out = s + ReqVar2.is + LAFuture.build(out) + } + + mapped.get(timeout) must be_== (Full("def")) + } + + "have access to session variables in foreach()" withSFor "/" in { + // workaround for a possible race condition in AnyVarTrait + // https://groups.google.com/forum/#!topic/liftweb/V1pWy14Wl3A + SessionVar1.is + + val future = LAFutureWithSession.withCurrentSession("cookie") + future.foreach(SessionVar1(_)) + + SessionVar1.is must eventually(beEqualTo("cookie")) + } + + "have access to request variables in foreach()" withSFor "/" in { + // workaround for a possible race condition in AnyVarTrait + // https://groups.google.com/forum/#!topic/liftweb/V1pWy14Wl3A + ReqVar1.is + + val future = LAFutureWithSession.withCurrentSession("monster") + future.foreach(ReqVar1(_)) + + ReqVar1.is must eventually(beEqualTo("monster")) + } + + "not leak out initial session between threads with their own sessions" in { + val session1 = new LiftSession("Test session 1", "", Empty) + val session2 = new LiftSession("Test session 2", "", Empty) + val session3 = new LiftSession("Test session 3", "", Empty) + + S.initIfUninitted(session1)(SessionVar1("one")) + S.initIfUninitted(session2)(SessionVar1("two")) + S.initIfUninitted(session3)(SessionVar1("three")) + + val future = S.initIfUninitted(session1)(LAFutureWithSession.withCurrentSession("zero")) + + S.initIfUninitted(session2) { + future.map(v => SessionVar1.is).get(timeout) must eventually(===(Full("two"): Box[String])) + } + + S.initIfUninitted(session3) { + future.map(v => SessionVar1.is).get(timeout) must eventually(===(Full("three"): Box[String])) + } + + S.initIfUninitted(session1) { + future.map(v => SessionVar1.is).get(timeout) must eventually(===(Full("one"): Box[String])) + } + } + } +} diff --git a/web/webkit/src/test/scala-3/net/liftweb/http/LiftMergeSpec.scala b/web/webkit/src/test/scala-3/net/liftweb/http/LiftMergeSpec.scala new file mode 100644 index 0000000000..7e79d4d34b --- /dev/null +++ b/web/webkit/src/test/scala-3/net/liftweb/http/LiftMergeSpec.scala @@ -0,0 +1,473 @@ +package net.liftweb +package http + +import scala.xml._ + +import org.specs2.mutable.Specification +import org.specs2.matcher.XmlMatchers +import org.mockito.Mockito.{mock, when} + +import common._ + +import js.JE.JsObj +import js.pageScript +import SpecContextHelpers._ + +class LiftMergeSpec extends Specification with XmlMatchers { + val mockReq = mock(classOf[Req]) + when(mockReq.contextPath).thenReturn("/context-path") + + val testSession = new LiftSession("/context-path", "underlying id", Empty) + + val testRules = new LiftRules() + // Avoid extra appended elements by default. + testRules.javaScriptSettings.default.set(() => () => Empty) + testRules.autoIncludeAjaxCalc.default.set(() => () => (_: LiftSession) => false) + testRules.excludePathFromContextPathRewriting.default + .set( + () => (in: String) => in.startsWith("exclude-me") + ) + + val eventExtractingTestRules = new LiftRules() + eventExtractingTestRules.javaScriptSettings.default.set(() => () => Empty) + eventExtractingTestRules.autoIncludeAjaxCalc.default.set(() => () => (_: LiftSession) => false) + eventExtractingTestRules.extractInlineJavaScript = true + + "LiftMerge when doing the final page merge" should { + "merge head segments in the page body in order into main head" in withLiftRules(testRules) { + val result = + testSession.merge( + + + + + + + + + +
+

+ + + +

+
+ + , + mockReq + ) + + (result \ "head" \ "_") === (Seq( + , + , + , + + ): NodeSeq) + } + + "merge tail segments in the page body in order at the end of the body" in withLiftRules(testRules) { + val result = + testSession.merge( + + + + + + + + + +
+

+ + + +

+
+ +

Thingies

+

More thingies

+ + , + mockReq + ) + + (result \ "body" \ "_").takeRight(3) === (Seq( + , + , + + ): NodeSeq) + } + + "not merge tail segments in the head" in withLiftRules(testRules) { + val result = + testSession.merge( + + + + + + + + + + + +
+

+ + + +

+
+ +

Thingies

+

More thingies

+ + , + mockReq + ) + + (result \ "body" \ "_").takeRight(3) === (Seq( + , + , + + ): NodeSeq) + } + + "normalize absolute link hrefs everywhere" in withLiftContext(testRules, testSession) { + val result = + testSession.merge( + + + + + + + + + + +
+

+ + + +

+
+ +

Thingies

+

More thingies

+ + , + mockReq + ) + + (result \\ "link").map(_ \@ "href") === + "/context-path/testlink" :: + "/context-path/testlink2" :: + "/context-path/testlink3" :: Nil + } + + "normalize absolute script srcs everywhere" in withLiftContext(testRules, testSession) { + val result = + testSession.merge( + + + + + + + + + + +
+

+ + + +

+
+ +

Thingies

+

More thingies

+ + , + mockReq + ) + + (result \\ "script").map(_ \@ "src") === + "/context-path/testscript" :: + "/context-path/testscript2" :: Nil + } + + "normalize absolute a hrefs everywhere" in withLiftContext(testRules, testSession) { + val result = + testSession.merge( + + + Booyan + + + Booyan + + Booyan + +
+ Booyan +

+ + Booyan + +

+
+ +

Thingies Booyan

+

More thingies

+ + , + mockReq + ) + + (result \\ "a").map(_ \@ "href") === + "/context-path/testa1" :: + "testa3" :: + "/context-path/testa2" :: + "testa4" :: + "/context-path/testa6" :: + "/context-path/testa5" :: Nil + } + + "normalize absolute form actions everywhere" in withLiftContext(testRules, testSession) { + val result = + testSession.merge( + + +
Booyan
+ + +
Booyan
+ +
Booyan
+ +
+
Booyan
+

+ +

Booyan
+ +

+
+ +

Thingies

Booyan

+

More thingies

+ + , + mockReq + ) + + (result \\ "form").map(_ \@ "action") === + "/context-path/testform1" :: + "testform3" :: + "/context-path/testform2" :: + "testform4" :: + "/context-path/testform6" :: + "/context-path/testform5" :: Nil + } + + "not rewrite script srcs anywhere" in withLiftContext(testRules, testSession) { + val result = + URLRewriter.doWith((_: String) => "rewritten") { + testSession.merge( + + + + + + + + +
+

+ +