diff --git a/.github/workflows/main_test_release.yml b/.github/workflows/main_test_release.yml index 8e1fb226..091d77b6 100644 --- a/.github/workflows/main_test_release.yml +++ b/.github/workflows/main_test_release.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - mongodb-version: [ '4.4', '5.0', '6.0', '7.0' , '8.0' ] + mongodb-version: ['4.4', '5.0', '6.0', '7.0', '8.0'] java: [ '21', '23' ] steps: - uses: actions/checkout@main diff --git a/.github/workflows/other_test.yml b/.github/workflows/other_test.yml index 495a43b8..af8748c2 100644 --- a/.github/workflows/other_test.yml +++ b/.github/workflows/other_test.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - mongodb-version: [ '4.4', '5.0', '6.0', '7.0' , '8.0' ] + mongodb-version: [ '4.4', '5.0', '6.0', '7.0', '8.0' ] java: [ '21', '23' ] steps: - uses: actions/checkout@main diff --git a/README.md b/README.md index e0736c14..d42e9bfc 100644 --- a/README.md +++ b/README.md @@ -3,9 +3,7 @@ A library for easy usage of the mongo-scala-driver (5.1.xa). Full MongoDB Functionality in Scala with a few lines of code. ## MongoDB Support - -Support MongoDB 3.6 to 7.0.x. - +We currently testing with '4.4', '5.0', '6.0', '7.0' and '8.0'. ## Features @@ -28,7 +26,7 @@ Documentation can be found [here](https://mongodb-driver.mongocamp.dev/). ## Version -Scala Version is 2.13.x / 2.12.x. +Scala Version is 3.6.x / 2.13.x ## CI @@ -46,7 +44,7 @@ Add following lines to your build.sbt (replace x.x with the actual Version) ``` -libraryDependencies += "dev.mongocamp" %% "mongodb-driver" % "2.x.x" +libraryDependencies += "dev.mongocamp" %% "mongodb-driver" % "3.x.x" ``` @@ -61,6 +59,8 @@ import dev.mongocamp.driver.mongodb.database.DatabaseProvider import org.bson.codecs.configuration.CodecRegistries._ import org.mongodb.scala.bson.ObjectId import org.mongodb.scala.bson.codecs.Macros._ +import dev.mongocamp.driver.mongodb.json._ +import io.circe.generic.auto._ /** * import mongodb restaurants sample data @@ -71,12 +71,9 @@ object RestaurantDatabase { case class Grade(date: Date, grade: String, score: Int) - case class Restaurant(restaurant_id: String, name: String, borough: String, cuisine: String, - grades: List[Grade], address: Address, _id: ObjectId = new ObjectId()) - - private val registry = fromProviders(classOf[Restaurant], classOf[Address], classOf[Grade]) + case class Restaurant(restaurant_id: String, name: String, borough: String, cuisine: String, grades: List[Grade], address: Address, _id: ObjectId = new ObjectId()) - val provider = DatabaseProvider("test", registry) + val provider = DatabaseProvider.fromPath("dev.mongocamp") object RestaurantDAO extends MongoDAO[Restaurant](provider, "restaurants") @@ -88,7 +85,6 @@ object RestaurantDatabase { Import the database object and execute some find and CRUD functions on the DAO object ... ```scala - import dev.mongocamp.driver.mongodb.demo.restaurant.RestaurantDemoDatabase._ import dev.mongocamp.driver.mongodb._ @@ -97,17 +93,14 @@ trait RestaurantDemoDatabaseFunctions { /** * single result with implicit conversion to Entity Option */ - def findRestaurantByName(name: String): Option[Restaurant] = - RestaurantDAO.find("name", name) + def findRestaurantByName(name: String): Option[Restaurant] = RestaurantDAO.find("name", name) def restaurantsSize: Long = RestaurantDAO.count() /** * result with implicit conversion to List of Entities */ - def findAllRestaurants(filterValues: Map[String, Any] = Map()): List[Restaurant] = - RestaurantDAO.find(filterValues) - + def findAllRestaurants(filterValues: Map[String, Any] = Map()): List[Restaurant] = RestaurantDAO.find(filterValues) ``` @@ -137,7 +130,6 @@ Use the mongodb functions in your app ... Write some spec tests ... ```scala - import dev.mongocamp.driver.mongodb.demo.restaurant.RestaurantDemoDatabase._ import org.specs2.mutable.Specification @@ -159,9 +151,9 @@ class RestaurantDemoSpec extends Specification with RestaurantDemoDatabaseFuncti ## Run Tests ```shell -docker run -d --publish 27017:27017 --name mongodb mongocamp/mongodb:latest; -sbt test; docker rm -f mongodb; +docker run -d --publish 27017:27017 --name mongodb mongocamp/mongodb:latest; +sbt +test ``` ## Supporters diff --git a/build.sbt b/build.sbt index 9684e320..67bdfa49 100644 --- a/build.sbt +++ b/build.sbt @@ -40,7 +40,8 @@ developers := List( licenses += ("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0.html")) -crossScalaVersions := Seq("2.13.15", "2.12.20") +//crossScalaVersions := Seq("2.13.16") +crossScalaVersions := Seq("3.6.0", "2.13.16") scalaVersion := crossScalaVersions.value.head @@ -59,12 +60,6 @@ buildInfoOptions += BuildInfoOption.BuildTime resolvers += "Sonatype OSS Snapshots".at("https://oss.sonatype.org/content/repositories/snapshots") -// Test - -libraryDependencies += "org.specs2" %% "specs2-core" % "4.20.9" % Test - -libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.12" % Test - libraryDependencies += "joda-time" % "joda-time" % "2.13.0" val circeVersion = "0.14.10" @@ -75,10 +70,9 @@ libraryDependencies ++= Seq( "io.circe" %% "circe-parser" ).map(_ % circeVersion) -libraryDependencies += "org.mongodb.scala" %% "mongo-scala-driver" % "5.1.4" +libraryDependencies += ("org.mongodb.scala" %% "mongo-scala-driver" % "5.3.1").cross(CrossVersion.for3Use2_13) -// MongoDB 5.2.0 not supported for de.bwaldvogel -> https://github.com/bwaldvogel/mongo-java-server/issues/233 -val MongoJavaServerVersion = "1.45.0" +val MongoJavaServerVersion = "1.46.0" libraryDependencies += "de.bwaldvogel" % "mongo-java-server" % MongoJavaServerVersion % Provided @@ -86,9 +80,9 @@ libraryDependencies += "de.bwaldvogel" % "mongo-java-server-h2-backend" % MongoJ libraryDependencies += "org.xerial.snappy" % "snappy-java" % "1.1.10.7" % Provided -libraryDependencies += "com.github.luben" % "zstd-jni" % "1.5.6-7" % Provided +libraryDependencies += "com.github.luben" % "zstd-jni" % "1.5.6-9" % Provided -libraryDependencies += "org.apache.lucene" % "lucene-queryparser" % "10.0.0" +libraryDependencies += "org.apache.lucene" % "lucene-queryparser" % "10.1.0" libraryDependencies += "com.github.pathikrit" %% "better-files" % "3.9.2" @@ -96,13 +90,11 @@ libraryDependencies += "com.typesafe" % "config" % "1.4.3" libraryDependencies += "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5" -libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % "2.12.0" +//libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % "2.12.0" libraryDependencies += "com.vdurmont" % "semver4j" % "3.1.0" -libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "5.0" - -libraryDependencies += "org.liquibase" % "liquibase-core" % "4.30.0" % Test +libraryDependencies += "com.github.jsqlparser" % "jsqlparser" % "5.1" buildInfoPackage := "dev.mongocamp.driver.mongodb" diff --git a/build_release.sbt b/build_release.sbt index 6ba48a77..35fb513b 100644 --- a/build_release.sbt +++ b/build_release.sbt @@ -1,10 +1,10 @@ import com.vdurmont.semver4j.Semver import dev.quadstingray.sbt.json.JsonFile import sbtrelease.ReleasePlugin.autoImport.ReleaseKeys.versions -import sbtrelease.ReleasePlugin.autoImport.ReleaseTransformations.* +import sbtrelease.ReleasePlugin.autoImport.ReleaseTransformations._ import sbtrelease.ReleasePlugin.runtimeVersion -import scala.sys.process.* +import scala.sys.process._ releaseVersionBump := sbtrelease.Version.Bump.NextStable diff --git a/build_test.sbt b/build_test.sbt new file mode 100644 index 00000000..086e2bda --- /dev/null +++ b/build_test.sbt @@ -0,0 +1,9 @@ +Test / parallelExecution := false + +libraryDependencies += "org.liquibase" % "liquibase-core" % "4.31.0" % Test + +// Test + +libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.5.16" % Test + +libraryDependencies += "org.scalameta" %% "munit" % "1.1.0" diff --git a/docs/documentation/collection/aggregation.md b/docs/documentation/collection/aggregation.md index 74e1ad23..b1346ffc 100644 --- a/docs/documentation/collection/aggregation.md +++ b/docs/documentation/collection/aggregation.md @@ -7,10 +7,10 @@ MongoDB support an easy to use [Aggregation Handling](https://docs.mongodb.com/m ## Demo ### Setup imports -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSpec.scala#agg_imports +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSuite.scala#agg_imports ### Define stages -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSpec.scala#agg_stages +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSuite.scala#agg_stages ### Execute Aggregation @@ -19,10 +19,10 @@ MongoDB support an easy to use [Aggregation Handling](https://docs.mongodb.com/m ::: -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSpec.scala#agg_execute +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSuite.scala#agg_execute ### Convert Result For easy result handling, using the implicit Document to Map conversion can be useful. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSpec.scala#agg_convert +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSuite.scala#agg_convert diff --git a/docs/documentation/collection/analyse-schema.md b/docs/documentation/collection/analyse-schema.md index 6910c041..2008c588 100644 --- a/docs/documentation/collection/analyse-schema.md +++ b/docs/documentation/collection/analyse-schema.md @@ -6,7 +6,7 @@ The driver supports an automated detection of the schema of an existing collecti ### Schema Analysis Analyse a collection to detect the values for each field and the percentage distribution of the types. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala#schema-analysis +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSuite.scala#schema-analysis ### Detect Schema The Schema Detector can be used to detect the schema of a collection and is based on [Schema Anaysis](analyse-schema.md#schema-analysis). The schema is used to detect the types of the columns and generate a [JSON Schema](https://json-schema.org) for the collection. In case of multiple types of a field the Generation of the JSON Schema use the type with the most elements. @@ -15,4 +15,4 @@ The Schema Detector can be used to detect the schema of a collection and is base The [JSON Schema](https://json-schema.org) format can be use to validate or generate data, as well to secure your [Mongo Collection](https://www.mongodb.com/docs/manual/core/schema-validation/). ::: -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala#schema-explorer \ No newline at end of file +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSuite.scala#schema-explorer \ No newline at end of file diff --git a/docs/documentation/collection/pagination.md b/docs/documentation/collection/pagination.md index c20c5d87..af81c332 100644 --- a/docs/documentation/collection/pagination.md +++ b/docs/documentation/collection/pagination.md @@ -8,16 +8,16 @@ In many cases you want to have the possibility to paginate over the response of The Pagination over an aggregation pipeline supports only the response of `Document`, also if you use an case class MongoDAO you will got an `Document` back. ::: -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationAggregationSpec.scala#aggregation-pagination +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationAggregationSuite.scala#aggregation-pagination ## Find Pagination -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationFilterSpec.scala#filter-pagination +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationFilterSuite.scala#filter-pagination ## Foreach over Pagination result ### With default row count -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSpec.scala#foreach-default-rows +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSuite.scala#foreach-default-rows ### With specific row count -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSpec.scala#foreach-with-rows +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSuite.scala#foreach-with-rows diff --git a/docs/documentation/database/bson.md b/docs/documentation/database/bson.md index 1763250c..024634b6 100644 --- a/docs/documentation/database/bson.md +++ b/docs/documentation/database/bson.md @@ -14,31 +14,36 @@ BSON converter is used for bidirectional converting of BSON data. ### toBson ```scala - val result = BsonConverter.toBson(3) // result = BsonInt32(3) +val result = BsonConverter.toBson(3) // result = BsonInt32(3) ``` ### fromBson ```scala - val result = BsonConverter.fromBson(BsonInt32(3)) // result = 3 +val result = BsonConverter.fromBson(BsonInt32(3)) // result = 3 ``` ### toDocument ```scala - case class Base(int: Int, Long: Long, float: Float, double: Double, - string: String, date: Date = new Date()) - object Base { - def apply(): Base = new Base(1, 2, 3, 4, "test") - } +case class Base(int: Int, Long: Long, float: Float, double: Double, string: String, date: Date = new Date()) - val document = Converter.toDocument(Base()) +object Base { + def apply(): Base = new Base(1, 2, 3, 4, "test") +} - // Document((float,BsonDouble{value=3.0}), (string,BsonString{value='test'}), - // (double,BsonDouble{value=4.0}), (Long,BsonInt64{value=2}), (date, - // BsonDateTime{value=1531166757627}), (int,BsonInt32{value=1})) +val document = Converter.toDocument(Base()) + +// Document( +// (float,BsonDouble{value=3.0}), +// (string,BsonString{value='test'}), +// (double,BsonDouble{value=4.0}), +// (Long,BsonInt64{value=2}), +// (date, BsonDateTime{value=1531166757627}), +// (int,BsonInt32{value=1}) +// ) ``` ## Plugins diff --git a/docs/documentation/database/config.md b/docs/documentation/database/config.md index 96de2d6b..14405e1f 100644 --- a/docs/documentation/database/config.md +++ b/docs/documentation/database/config.md @@ -40,7 +40,7 @@ mongo.db.test { Scala Code Snippet ```scala - val customConfig: MongoConfig = MongoConfig.fromPath("mongodb.db.prod") +val customConfig: MongoConfig = MongoConfig.fromPath("mongodb.db.prod") ``` @@ -76,5 +76,3 @@ It is used for DatabaseProvider creation. | minSize | 0 | | DefaultMaintenanceInitialDelay | 0 | -## Multiple databases access - diff --git a/docs/documentation/database/index.md b/docs/documentation/database/index.md index b9d934c6..09bbe97c 100644 --- a/docs/documentation/database/index.md +++ b/docs/documentation/database/index.md @@ -13,10 +13,10 @@ DatabaseProvider is the central repository for MongoClient, registries, database Every [Mongo DAO](../mongo-dao/index.md) / [GridFs DAO](../gridfs-dao/index.md) Instance needs this class. ```scala - val provider: DatabaseProvider = DatabaseProvider(MongoConfig.fromPath()) +val provider: DatabaseProvider = DatabaseProvider(MongoConfig.fromPath()) - val database: MongoDatabase = provider.database() +val database: MongoDatabase = provider.database() - // Infos for all collections in the default database - val collectionInfos: List[CollectionInfo] = provider.collectionInfos() +// Infos for all collections in the default database +val collectionInfos: List[CollectionInfo] = provider.collectionInfos() ``` \ No newline at end of file diff --git a/docs/documentation/database/lucene.md b/docs/documentation/database/lucene.md index 1fb5e0c1..ef673954 100644 --- a/docs/documentation/database/lucene.md +++ b/docs/documentation/database/lucene.md @@ -6,17 +6,17 @@ MongoCamp Mongo Driver support the usage of [Lucene Query](https://lucene.apache ### Explicit Usage The LuceneConverter has the methods to parse a String to and `Query` and a other to the document conversion. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala#lucene-parser-with-explicit +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSuite.scala#lucene-parser-with-explicit ### Implicit Usage Like the Map to Bson conversion there is also an implicit method to convert `Query` to find Bson. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala#lucene-parser-with-implicit +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSuite.scala#lucene-parser-with-implicit ### Parse String to Query We have an individual parser to parse an string to Lucene Query, because the default Lucene Analyser is case-insensitive and convert all search data into lower case. So the best way to seach in MongoDb with Lucene Query is to use this code. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala#lucene-parser +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSuite.scala#lucene-parser ## Read More [Lucene Cheatsheet](https://www.lucenetutorial.com/lucene-query-syntax.html) \ No newline at end of file diff --git a/docs/documentation/database/provider.md b/docs/documentation/database/provider.md index 44af3348..5676d0f3 100644 --- a/docs/documentation/database/provider.md +++ b/docs/documentation/database/provider.md @@ -1,6 +1,6 @@ # DatabaseProvider -DatabaseProvider is the central repository for MongoClient, registries, databases and collections. +DatabaseProvider is the central repository for MongoClient, databases, collections and DocumentDAO`s. DatabaseProvider gives access to @@ -8,30 +8,13 @@ DatabaseProvider gives access to * MongoDatabase * MongoCollection -## Registries - -::: tip ScalaDriverDocs -Additional Info for [Registries](https://mongodb.github.io/mongo-java-driver/4.0/driver-scala/getting-started/quick-start-case-class/#configuring-case-classes) -::: - -### Create Case Classes -```scala -case class Student(_id: Long, name: String, scores: List[Score]) - -case class Score(score: Double, `type`: String) - -case class Grade(_id: ObjectId, student_id: Long, class_id: Long, scores: List[Score]) -``` - -### Create Registry -```scala -val registry: CodecRegistry = fromProviders(classOf[Student], classOf[Score], classOf[Grade]) - -val providerWithRegistry: DatabaseProvider = DatabaseProvider(MongoConfig.fromPath(), registry) -``` -## Multiple databases access - - +## DocumentDAO +From an DocumentDAO you can perform DocumentDAO initialization and caching. On this DAO you can perform CRUD operations. +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/database/DatabaseProviderSuite.scala#document-dao +## ~~Registries~~ +::: danger +Registries are no longer supported for automatic case class conversion. For scala 3 support we changed from mongodb driver conversion to circe conversion. +::: \ No newline at end of file diff --git a/docs/documentation/database/relationships.md b/docs/documentation/database/relationships.md index 8daa3cc9..534809bd 100644 --- a/docs/documentation/database/relationships.md +++ b/docs/documentation/database/relationships.md @@ -20,14 +20,16 @@ Simple Setup. * Friend Collection ```scala - case class User(id: Long, name: String, loginId: String) - case class Login(id: String, email: String, password: String) - case class Friend(id: Long, name: String, userId: Long) - - object UserDAO extends MongoDAO[User](database, "user") - object LoginDAO extends MongoDAO[Login](database, "login") - object FriendDAO extends MongoDAO[Friend](database, "friend") - +import dev.mongocamp.driver.mongodb.json._ +import io.circe.generic.auto._ + +case class User(id: Long, name: String, loginId: String) +case class Login(id: String, email: String, password: String) +case class Friend(id: Long, name: String, userId: Long) + +object UserDAO extends MongoDAO[User](database, "user") +object LoginDAO extends MongoDAO[Login](database, "login") +object FriendDAO extends MongoDAO[Friend](database, "friend") ``` For relationship setup we create two Relationships in the UserDAO. diff --git a/docs/documentation/gridfs-dao/crud.md b/docs/documentation/gridfs-dao/crud.md index e8c04473..ba2d549c 100644 --- a/docs/documentation/gridfs-dao/crud.md +++ b/docs/documentation/gridfs-dao/crud.md @@ -13,7 +13,7 @@ Possible Metadata types: Return Observable of ObjectId. ```scala - ImageFilesDAO.insertOne(filename, stream, metadata) +ImageFilesDAO.insertOne(filename, stream, metadata) ``` ## Update @@ -31,8 +31,11 @@ With implicit conversion you can use for OID Parameter: * String ```scala - // for implicit conversion usage - import dev.mongocamp.driver.mongodb._ - - ImageFilesDAO.deleteOne(oid) +// for implicit conversion usage +import dev.mongocamp.driver.mongodb._ +// case class conversion only needed in the DAO Holder Class +import dev.mongocamp.driver.mongodb.json._ +import io.circe.generic.auto._ + +ImageFilesDAO.deleteOne(oid) ``` diff --git a/docs/documentation/gridfs-dao/index.md b/docs/documentation/gridfs-dao/index.md index ce47326b..2364da6b 100644 --- a/docs/documentation/gridfs-dao/index.md +++ b/docs/documentation/gridfs-dao/index.md @@ -19,15 +19,14 @@ A [MongoDatabase](http://mongodb.github.io/mongo-scala-driver/2.3/scaladoc/org/m ### Create DAO ```scala - - /** - * use bucket name fs - */ - object ImageFilesDAO extends GridFSDAO(database) - - /** - * use bucket name images - */ - object ImageFilesDAO extends GridFSDAO(database, "images") +/** +* use bucket name fs +*/ +object ImageFilesDAO extends GridFSDAO(database) + +/** +* use bucket name images +*/ +object ImageFilesDAO extends GridFSDAO(database, "images") ``` \ No newline at end of file diff --git a/docs/documentation/gridfs-dao/metadata.md b/docs/documentation/gridfs-dao/metadata.md index ad119488..198c272c 100644 --- a/docs/documentation/gridfs-dao/metadata.md +++ b/docs/documentation/gridfs-dao/metadata.md @@ -17,10 +17,9 @@ UpdateMetadataElement/s update some part of the metadata by a given filter. ```scala - - val elements = Map("category"->"logos") - val filter = Map() // all files - ImageFilesDAO.updateMetadataElements(filter, elements) +val elements = Map("category"->"logos") +val filter = Map() // all files +ImageFilesDAO.updateMetadataElements(filter, elements) ``` diff --git a/docs/documentation/mongo-dao/base.md b/docs/documentation/mongo-dao/base.md index 1dd0f42d..45b83bfa 100644 --- a/docs/documentation/mongo-dao/base.md +++ b/docs/documentation/mongo-dao/base.md @@ -41,7 +41,7 @@ Sometimes we need Raw Support (DAO maps to Document). This is automatically included in the MongoDAO class. Simply call Raw on your DAO Object. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSpec.scala#agg_execute +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSuite.scala#agg_execute ## Base Functions diff --git a/docs/documentation/mongo-dao/index.md b/docs/documentation/mongo-dao/index.md index d8d7b4ab..432500cb 100644 --- a/docs/documentation/mongo-dao/index.md +++ b/docs/documentation/mongo-dao/index.md @@ -18,26 +18,30 @@ The MongoDAO object holds a reference to a [MongoCollection](http://mongodb.gith A DatabaseProvider is needed. ```scala - -private val registry = fromProviders(classOf[Restaurant]) - -val provider = DatabaseProvider("database", registry) +val provider = DatabaseProvider.fromPath("dev.mongocamp") ``` ### Create DAO -A Type Parameter is used for automatic Document to Class conversion (case classes needs to be registered). +::: warning +Since Version 3.0.0 with Scala 3 support, we use Circe for automatic case class conversion. +In the most cases, you can use the generic auto import. More Information about Circe can be found [here](https://circe.github.io/circe/codecs/auto-derivation.html). +::: + +A Type Parameter is used for automatic Document to Class conversion. ```scala - object RestaurantDAO extends MongoDAO[Restaurant](provider, "restaurants") +import dev.mongocamp.driver.mongodb.json._ +import io.circe.generic.auto._ +object RestaurantDAO extends MongoDAO[Restaurant](provider, "restaurants") ``` ### Use DAO ```scala - import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb._ - def restaurantsSize: Long = RestaurantDAO.count() +def restaurantsSize: Long = RestaurantDAO.count() - def findAllRestaurants:List[Restaurant] = RestaurantDAO.find() +def findAllRestaurants:List[Restaurant] = RestaurantDAO.find() ``` diff --git a/docs/documentation/sql/jdbc-driver.md b/docs/documentation/sql/jdbc-driver.md index ac6cf60c..6d7a285f 100644 --- a/docs/documentation/sql/jdbc-driver.md +++ b/docs/documentation/sql/jdbc-driver.md @@ -7,7 +7,7 @@ The JDBC driver is a way to use the SQL queries in your application and run them ### Register Driver In some environments you have to register the driver manually. This is the case for example in the tests. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala#register-driver +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSuite.scala#register-driver After the driver is registered you can use the driver like a normal [JDBC driver](https://www.baeldung.com/java-jdbc). diff --git a/docs/documentation/sql/queryholder.md b/docs/documentation/sql/queryholder.md index f33ebddd..0c813b55 100644 --- a/docs/documentation/sql/queryholder.md +++ b/docs/documentation/sql/queryholder.md @@ -6,19 +6,19 @@ The MongoSqlQueryHolder provides a way to convert a SQL query to a Mongo query a Initialize the query holder with the SQL query you want to analyse or execute. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#initialize-query-holder +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSuite.scala#initialize-query-holder In most cases you simply want to run the query and get the result as a `Seq[Document]`. ::: tip The method run returns a classical MongoDb Observable use the implicits to convert it to a `Seq[Document]`. ::: -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#query-holder-run +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSuite.scala#query-holder-run You can also get the information about the collection and the keys that are used in the query. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#extract-collection -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#select-keys +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSuite.scala#extract-collection +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSuite.scala#select-keys In some cases you need the information about the function calls in the query, for example in your own [jdbc driver](jdbc-driver.md) implementation. Because the difference of MongoDb and SQL for example a sql `select count(*) from empty-collection` is a list documents with one element and the MongoDb has no document in it. -<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala#has-function-call +<<< @/../src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSuite.scala#has-function-call diff --git a/docs/index.md b/docs/index.md index 4e3796d0..6c8b5344 100644 --- a/docs/index.md +++ b/docs/index.md @@ -17,6 +17,8 @@ hero: link: https://github.com/MongoCamp/mongodb-driver features: + - title: Scala 3 Support + details: Since Version 3.0.0 we support Scala 3. - title: Easy Config details: Easy Database Config with provider and MongoConfig - title: SQL Support @@ -25,8 +27,6 @@ features: details: Implement the DAO Pattern for simple MongoDB usage [MongoDAO. - title: Pagination details: Use Pagination in your MongoDB for a lower RAM needing over large responses. - - title: Enhanced BSON - details: Implicit Conversion from Scala Map to BSON - title: GridFS Support details: It provides easy upload, download and metadata handling. - title: Reactive Streams diff --git a/package.json b/package.json index fd28d891..8799952f 100644 --- a/package.json +++ b/package.json @@ -1,28 +1,28 @@ { - "name" : "mongodb-driver", - "organization" : "dev.mongocamp", - "version" : "2.8.2.snapshot", - "author" : "info@mongocamp.dev", - "license" : "Apache-2.0", - "type" : "module", - "repository" : { - "type" : "git", - "url" : "git+https://github.com/MongoCamp/mongodb-driver.git" + "name": "mongodb-driver", + "organization": "dev.mongocamp", + "version": "3.0.0", + "author": "info@mongocamp.dev", + "license": "Apache-2.0", + "type": "module", + "repository": { + "type": "git", + "url": "git+https://github.com/MongoCamp/mongodb-driver.git" }, - "bugs" : { - "url" : "https://github.com/MongoCamp/mongodb-driver/issues" + "bugs": { + "url": "https://github.com/MongoCamp/mongodb-driver/issues" }, - "homepage" : "https://mongodb-driver.mongocamp.dev/", - "scripts" : { - "docs:serve" : "vitepress serve docs --port 5555", - "docs:build" : "pnpm docs:external; vitepress build docs", - "docs:external" : "sh docs/external/fileloader.sh", - "docs:dev" : "pnpm docs:external; vitepress dev docs" + "homepage": "https://mongodb-driver.mongocamp.dev/", + "scripts": { + "docs:serve": "vitepress serve docs --port 5555", + "docs:build": "pnpm docs:external; vitepress build docs", + "docs:external": "sh docs/external/fileloader.sh", + "docs:dev": "pnpm docs:external; vitepress dev docs" }, - "devDependencies" : { - "@iconify-json/fluent-emoji" : "^1.2.1", - "@unocss/preset-icons" : "^0.63.4", - "unocss" : "^0.63.4", - "vitepress" : "1.4.1" + "devDependencies": { + "@iconify-json/fluent-emoji": "^1.2.3", + "@unocss/preset-icons": "^65.4.3", + "unocss": "^65.4.3", + "vitepress": "1.6.3" } } \ No newline at end of file diff --git a/project/plugins.sbt b/project/plugins.sbt index 192efc85..43e7531e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,10 +1,10 @@ addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.2.2") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.0") -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.13.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.13.1") // updates @@ -21,6 +21,9 @@ addSbtPlugin("com.github.sbt" % "sbt-release" % "1.4.0") addSbtPlugin("dev.quadstingray" %% "sbt-json" % "0.7.1") +addSbtPlugin("ch.epfl.scala" % "sbt-scala3-migrate" % "0.7.1") + + addDependencyTreePlugin // todo remove as soon as possible diff --git a/src/main/scala-2.12/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala b/src/main/scala-2.12/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala deleted file mode 100644 index c6cbf2e5..00000000 --- a/src/main/scala-2.12/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala +++ /dev/null @@ -1,18 +0,0 @@ -package dev.mongocamp.driver.mongodb.schema - -import io.circe.Decoder.Result -import io.circe.{Decoder, Encoder, HCursor, Json} -import jdk.internal.reflect.Reflection -import org.bson.types.ObjectId -import org.joda.time.DateTime -import org.mongodb.scala.Document - -import java.util.Date - -trait CirceProductSchema { - - def productElementNames(internalProduct: Product): Iterator[String] = { - (internalProduct.getClass.getDeclaredFields ++ internalProduct.getClass.getFields).map(_.getName).iterator - } - -} \ No newline at end of file diff --git a/src/main/scala-2.13/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala b/src/main/scala-2.13/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala deleted file mode 100644 index ca8ce007..00000000 --- a/src/main/scala-2.13/dev/mongocamp/driver/mongodb/schema/CirceProductSchema.scala +++ /dev/null @@ -1,17 +0,0 @@ -package dev.mongocamp.driver.mongodb.schema - -import io.circe.Decoder.Result -import io.circe.{ Decoder, Encoder, HCursor, Json } -import org.bson.types.ObjectId -import org.joda.time.DateTime -import org.mongodb.scala.Document - -import java.util.Date - -trait CirceProductSchema { - - def productElementNames(internalProduct: Product): Iterator[String] = { - internalProduct.productElementNames - } - -} diff --git a/src/main/scala/dev/mongocamp/driver/DocumentIncludes.scala b/src/main/scala/dev/mongocamp/driver/DocumentIncludes.scala index a128b40e..1caffec5 100644 --- a/src/main/scala/dev/mongocamp/driver/DocumentIncludes.scala +++ b/src/main/scala/dev/mongocamp/driver/DocumentIncludes.scala @@ -6,21 +6,20 @@ import dev.mongocamp.driver.mongodb.database.DatabaseProvider import dev.mongocamp.driver.mongodb.lucene.LuceneQueryConverter import org.apache.lucene.search.Query import org.bson.types.ObjectId -import org.mongodb.scala.Document import org.mongodb.scala.bson.conversions.Bson +import org.mongodb.scala.{ documentToUntypedDocument, Document } import scala.jdk.CollectionConverters._ import scala.language.implicitConversions + trait DocumentIncludes { implicit def mapToBson(value: Map[_, _]): Bson = Converter.toDocument(value) implicit def luceneQueryBson(query: Query): Bson = LuceneQueryConverter.toDocument(query) - implicit def documentFromJavaMap(map: java.util.Map[String, Any]): Document = - documentFromScalaMap(map.asScala.toMap) + implicit def documentFromJavaMap(map: java.util.Map[String, Any]): Document = documentFromScalaMap(map.asScala.toMap) - implicit def documentFromMutableMap(map: collection.mutable.Map[String, Any]): Document = - documentFromScalaMap(map.toMap) + implicit def documentFromMutableMap(map: collection.mutable.Map[String, Any]): Document = documentFromScalaMap(map.toMap) implicit def documentFromScalaMap(map: Map[String, Any]): Document = { var result = Document() @@ -43,15 +42,15 @@ trait DocumentIncludes { result } - implicit def mapFromDocument(document: Document): Map[String, Any] = - BsonConverter.asMap(document) + implicit def mapFromDocument(document: Document): Map[String, Any] = BsonConverter.asMap(document) - implicit def mapListFromDocuments(documents: List[Document]): List[Map[String, Any]] = - BsonConverter.asMapList(documents) + implicit def mapListFromDocuments(documents: List[Document]): List[Map[String, Any]] = BsonConverter.asMapList(documents) // ObjectId implicit def stringToObjectId(str: String): ObjectId = new ObjectId(str) - implicit def documentToObjectId(doc: Document): ObjectId = + implicit def documentToObjectId(doc: Document): ObjectId = { doc.getObjectId(DatabaseProvider.ObjectIdKey) + } + } diff --git a/src/main/scala/dev/mongocamp/driver/MongoImplicits.scala b/src/main/scala/dev/mongocamp/driver/MongoImplicits.scala index eb8159f3..9c703c12 100644 --- a/src/main/scala/dev/mongocamp/driver/MongoImplicits.scala +++ b/src/main/scala/dev/mongocamp/driver/MongoImplicits.scala @@ -4,16 +4,16 @@ import dev.mongocamp.driver.mongodb.operation.ObservableIncludes import org.bson.BsonValue import org.bson.types.ObjectId import org.mongodb.scala.gridfs.{ GridFSFile, GridFSFindObservable } -import org.mongodb.scala.{ FindObservable, Observable, ObservableImplicits } +import org.mongodb.scala.{ Observable, ObservableImplicits } import scala.language.implicitConversions trait MongoImplicits extends ObservableIncludes with ObservableImplicits { implicit def observableToResult[T](obs: Observable[T]): T = obs.result() - implicit def findObservableToResultList[T](obs: FindObservable[T]): List[T] = obs.resultList() + implicit def findObservableToResultList[T](obs: Observable[T]): List[T] = obs.resultList() - implicit def findObservableToResultOption[T](obs: FindObservable[T]): Option[T] = obs.resultOption() + implicit def findObservableToResultOption[T](obs: Observable[T]): Option[T] = obs.resultOption() // gridfs-dao diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/Converter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/Converter.scala index 21f151f2..44cd5f35 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/Converter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/Converter.scala @@ -1,7 +1,7 @@ package dev.mongocamp.driver.mongodb import dev.mongocamp.driver.mongodb.bson.BsonConverter -import org.mongodb.scala.Document +import org.mongodb.scala.{ bsonDocumentToDocument, Document } object Converter { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/Filter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/Filter.scala index 11f0ef01..bc4c8db8 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/Filter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/Filter.scala @@ -1,12 +1,12 @@ package dev.mongocamp.driver.mongodb -import java.util.Date - import org.bson.conversions.Bson import org.mongodb.scala.bson.BsonDocument import org.mongodb.scala.bson.collection.immutable.Document import org.mongodb.scala.model.Filters._ +import java.util.Date + object Filter extends Filter trait Filter { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/MongoDAO.scala b/src/main/scala/dev/mongocamp/driver/mongodb/MongoDAO.scala index a64b62ba..b1ca9bde 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/MongoDAO.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/MongoDAO.scala @@ -3,7 +3,9 @@ package dev.mongocamp.driver.mongodb import better.files.File import dev.mongocamp.driver.mongodb.bson.{ BsonConverter, DocumentHelper } import dev.mongocamp.driver.mongodb.database.{ ChangeObserver, CollectionStatus, CompactResult, DatabaseProvider } +import dev.mongocamp.driver.mongodb.json._ import dev.mongocamp.driver.mongodb.operation.Crud +import io.circe.Decoder import org.bson.json.JsonParseException import org.mongodb.scala.model.Accumulators._ import org.mongodb.scala.model.Aggregates._ @@ -18,13 +20,13 @@ import scala.reflect.ClassTag /** Created by tom on 20.01.17. */ -abstract class MongoDAO[A](provider: DatabaseProvider, collectionName: String)(implicit ct: ClassTag[A]) extends Crud[A] { +abstract class MongoDAO[A](provider: DatabaseProvider, collectionName: String)(implicit ct: ClassTag[A], decoder: Decoder[A]) extends Crud[A] { val databaseName: String = provider.guessDatabaseName(collectionName) val name: String = provider.guessName(collectionName) - val collection: MongoCollection[A] = provider.collection[A](collectionName) + val collection: MongoCollection[Document] = provider.collection(collectionName) def addChangeObserver(observer: ChangeObserver[A]): ChangeObserver[A] = { coll.watch[A]().subscribe(observer) @@ -64,7 +66,7 @@ abstract class MongoDAO[A](provider: DatabaseProvider, collectionName: String)(i BsonConverter.fromBson(aggregationResult.get("keySet").head).asInstanceOf[List[String]] } - protected def coll: MongoCollection[A] = collection + protected def coll: MongoCollection[Document] = collection // internal object for raw document access object Raw extends MongoDAO[Document](provider, collectionName) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/AbstractConverterPlugin.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/AbstractConverterPlugin.scala new file mode 100644 index 00000000..6a9c8b55 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/bson/AbstractConverterPlugin.scala @@ -0,0 +1,26 @@ +package dev.mongocamp.driver.mongodb.bson + +import org.bson.BsonValue +import org.mongodb.scala.bson.BsonNull + +abstract class AbstractConverterPlugin { + + def customClassList: List[Class[_]] = { + List() + } + + def hasCustomClass(v: Any): Boolean = { + customClassList.exists(c => c.isAssignableFrom(v.getClass)) + } + + def objectToBson(value: AnyRef): BsonValue = { + val map: Map[String, Any] = ClassUtil.membersToMap(value) + BsonConverter.toBson(map) + } + + def toBson(value: Any): BsonValue = + value match { + case _ => + BsonNull() + } +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/BaseConverterPlugin.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/BaseConverterPlugin.scala new file mode 100644 index 00000000..0f4422ed --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/bson/BaseConverterPlugin.scala @@ -0,0 +1,3 @@ +package dev.mongocamp.driver.mongodb.bson + +class BaseConverterPlugin extends AbstractConverterPlugin diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/BsonConverter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/BsonConverter.scala index 93fc7f3d..38c693b4 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/bson/BsonConverter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/bson/BsonConverter.scala @@ -156,9 +156,8 @@ object BsonConverter { BsonNull() } - def fromBson(value: BsonValue): Any = + def fromBson(value: BsonValue): Any = { value match { - case b: BsonBoolean => b.getValue case s: BsonString => s.getValue case bytes: BsonBinary => bytes.getData @@ -169,13 +168,14 @@ object BsonConverter { case i: BsonInt32 => i.getValue case l: BsonInt64 => l.getValue case d: BsonDouble => d.doubleValue() - case d: BsonDecimal128 => d.getValue.bigDecimalValue() + case d: BsonDecimal128 => new scala.math.BigDecimal(d.getValue.bigDecimalValue()) case doc: BsonDocument => Document(doc) case array: BsonArray => array.getValues.asScala.toList.map(v => fromBson(v)) case n: BsonNull => null case _ => value } + } def asMap(document: Document): Map[String, Any] = { val result = new mutable.HashMap[String, Any]() diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/ConverterPlugin.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/ClassUtil.scala similarity index 62% rename from src/main/scala/dev/mongocamp/driver/mongodb/bson/ConverterPlugin.scala rename to src/main/scala/dev/mongocamp/driver/mongodb/bson/ClassUtil.scala index e5fbff85..df06ce3d 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/bson/ConverterPlugin.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/bson/ClassUtil.scala @@ -1,33 +1,8 @@ package dev.mongocamp.driver.mongodb.bson import java.lang.reflect.Field - -import org.bson.BsonValue -import org.mongodb.scala.bson.BsonNull - import scala.collection.mutable -class BaseConverterPlugin extends AbstractConverterPlugin - -abstract class AbstractConverterPlugin { - - def customClassList: List[Class[_]] = List() - - def hasCustomClass(v: Any): Boolean = - customClassList.exists(c => c.isAssignableFrom(v.getClass)) - - def objectToBson(value: AnyRef): BsonValue = { - val map: Map[String, Any] = ClassUtil.membersToMap(value) - BsonConverter.toBson(map) - } - - def toBson(value: Any): BsonValue = - value match { - case _ => - BsonNull() - } -} - object ClassUtil { private val classRegistry = new mutable.HashMap[Class[_], Map[String, Field]]() diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelper.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelper.scala index bf37e4b9..6069db17 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelper.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelper.scala @@ -1,6 +1,6 @@ package dev.mongocamp.driver.mongodb.bson -import better.files.{ Scanner, StringSplitter } +import better.files.{ Scanner, StringSplitter, stringSource } import com.typesafe.scalalogging.LazyLogging import org.mongodb.scala.Document diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/BigDecimalCodec.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/BigDecimalCodec.scala deleted file mode 100644 index 1c955d68..00000000 --- a/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/BigDecimalCodec.scala +++ /dev/null @@ -1,17 +0,0 @@ -package dev.mongocamp.driver.mongodb.bson.codecs - -import org.bson.codecs.{ Codec, DecoderContext, EncoderContext } -import org.bson.{ BsonReader, BsonWriter } - -/** A Codec for BigDecimal instances. - */ -class BigDecimalCodec extends Codec[BigDecimal] { - - override def decode(reader: BsonReader, decoderContext: DecoderContext): BigDecimal = - BigDecimal(reader.readDouble()) - - override def encode(writer: BsonWriter, value: BigDecimal, encoderContext: EncoderContext): Unit = - writer.writeDouble(value.toDouble) - - override def getEncoderClass: Class[BigDecimal] = classOf[BigDecimal] -} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/BigIntCodec.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/BigIntCodec.scala deleted file mode 100644 index d7081ddd..00000000 --- a/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/BigIntCodec.scala +++ /dev/null @@ -1,20 +0,0 @@ -package dev.mongocamp.driver.mongodb.bson.codecs - -import org.bson.codecs.{ Codec, DecoderContext, EncoderContext } -import org.bson.{ BsonReader, BsonWriter } - -/** A Codec for BigInt instances. - * - * @since - * 3.0 - */ -class BigIntCodec extends Codec[BigInt] { - - override def decode(reader: BsonReader, decoderContext: DecoderContext): BigInt = - BigInt(reader.readInt64()) - - override def encode(writer: BsonWriter, value: BigInt, encoderContext: EncoderContext): Unit = - writer.writeInt64(value.toLong) - - override def getEncoderClass: Class[BigInt] = classOf[BigInt] -} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/CustomCodecProvider.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/CustomCodecProvider.scala deleted file mode 100644 index fe1aa6be..00000000 --- a/src/main/scala/dev/mongocamp/driver/mongodb/bson/codecs/CustomCodecProvider.scala +++ /dev/null @@ -1,20 +0,0 @@ -package dev.mongocamp.driver.mongodb.bson.codecs - -import org.bson.codecs.Codec -import org.bson.codecs.configuration.{ CodecProvider, CodecRegistry } - -case class CustomCodecProvider() extends CodecProvider { - - val BigIntClass: Class[BigInt] = classOf[BigInt] - val BigDecimalClass: Class[BigDecimal] = classOf[BigDecimal] - - // scalastyle:off null - @SuppressWarnings(Array("unchecked")) - def get[T](clazz: Class[T], registry: CodecRegistry): Codec[T] = - clazz match { - case BigIntClass => new BigIntCodec().asInstanceOf[Codec[T]] - case BigDecimalClass => new BigDecimalCodec().asInstanceOf[Codec[T]] - case _ => null - } - // scalastyle:on null -} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/bson/convert/JsonDateTimeConverter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/bson/convert/JsonDateTimeConverter.scala index 6c0a775f..d84c67ae 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/bson/convert/JsonDateTimeConverter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/bson/convert/JsonDateTimeConverter.scala @@ -1,11 +1,11 @@ package dev.mongocamp.driver.mongodb.bson.convert -import java.text.{ DateFormat, SimpleDateFormat } -import java.util.TimeZone - import com.typesafe.scalalogging.LazyLogging import org.bson.json.{ Converter, StrictJsonWriter } +import java.text.{ DateFormat, SimpleDateFormat } +import java.util.TimeZone + object JsonDateTimeConverter { val Converter = new JsonDateTimeConverter diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/ChangeObserver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/ChangeObserver.scala index 38b7edd0..23a9275d 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/ChangeObserver.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/ChangeObserver.scala @@ -8,11 +8,13 @@ case class ChangeObserver[A](onChangeCallback: ChangeStreamDocument[A] => Unit) override def onSubscribe(subscription: Subscription): Unit = subscription.request(Long.MaxValue) // Request data - override def onNext(changeDocument: ChangeStreamDocument[A]): Unit = + override def onNext(changeDocument: ChangeStreamDocument[A]): Unit = { onChangeCallback(changeDocument) + } - override def onError(throwable: Throwable): Unit = + override def onError(throwable: Throwable): Unit = { logger.error(throwable.getMessage, throwable) + } override def onComplete(): Unit = {} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/CollectionInfo.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/CollectionInfo.scala index e1852094..fb45a435 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/CollectionInfo.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/CollectionInfo.scala @@ -1,9 +1,10 @@ package dev.mongocamp.driver.mongodb.database -import java.util.Date - -import org.mongodb.scala.bson.Document import dev.mongocamp.driver.mongodb._ +import org.mongodb.scala.bson.Document +import org.mongodb.scala.documentToUntypedDocument + +import java.util.Date case class CollectionInfo( name: String, diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/CollectionStatus.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/CollectionStatus.scala index 1539ac5e..1cee2f5e 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/CollectionStatus.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/CollectionStatus.scala @@ -1,9 +1,9 @@ package dev.mongocamp.driver.mongodb.database -import java.util.Date - -import org.mongodb.scala.bson.Document import dev.mongocamp.driver.mongodb._ +import org.mongodb.scala.bson.Document + +import java.util.Date case class CollectionStatus( ns: String, diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseInfo.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseInfo.scala index bf9d1b0a..75d28625 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseInfo.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseInfo.scala @@ -1,9 +1,10 @@ package dev.mongocamp.driver.mongodb.database -import java.util.Date import dev.mongocamp.driver.mongodb._ - import org.mongodb.scala.bson.Document +import org.mongodb.scala.documentToUntypedDocument + +import java.util.Date case class DatabaseInfo(name: String, sizeOnDisk: Double, empty: Boolean, fetched: Date, map: Map[String, Any]) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala index 9e500a75..26aed4aa 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/DatabaseProvider.scala @@ -1,17 +1,14 @@ package dev.mongocamp.driver.mongodb.database import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.bson.codecs.CustomCodecProvider -import org.bson.codecs.configuration.CodecRegistries.{ fromProviders, fromRegistries } -import org.bson.codecs.configuration.CodecRegistry -import org.mongodb.scala.MongoClient.DEFAULT_CODEC_REGISTRY +import dev.mongocamp.driver.mongodb.json._ import org.mongodb.scala._ import org.mongodb.scala.gridfs.GridFSBucket import scala.collection.mutable import scala.reflect.ClassTag -class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) extends Serializable { +class DatabaseProvider(val config: MongoConfig) extends Serializable { private val cachedDatabaseMap = new mutable.HashMap[String, MongoDatabase]() private val cachedMongoDAOMap = new mutable.HashMap[String, MongoDAO[Document]]() private var cachedClient: Option[MongoClient] = None @@ -20,7 +17,7 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext def DefaultDatabaseName: String = defaultDatabaseName - def connectionString = { + def connectionString: String = { s"mongodb://${config.host}:${config.port}/${config.database}" } @@ -67,7 +64,7 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext def database(databaseName: String = DefaultDatabaseName): MongoDatabase = { if (!cachedDatabaseMap.contains(databaseName)) { - cachedDatabaseMap.put(databaseName, client.getDatabase(databaseName).withCodecRegistry(registry)) + cachedDatabaseMap.put(databaseName, client.getDatabase(databaseName)) } cachedDatabaseMap(databaseName) } @@ -97,14 +94,14 @@ class DatabaseProvider(val config: MongoConfig, val registry: CodecRegistry) ext runCommand(Map("collStats" -> collectionName), databaseName).map(document => CollectionStatus(document)) } - def collection[A](collectionName: String)(implicit ct: ClassTag[A]): MongoCollection[A] = + def collection(collectionName: String): MongoCollection[Document] = if (collectionName.contains(DatabaseProvider.CollectionSeparator)) { val newDatabaseName: String = guessDatabaseName(collectionName) val newCollectionName: String = guessName(collectionName) - database(newDatabaseName).getCollection[A](newCollectionName) + database(newDatabaseName).getCollection(newCollectionName) } else { - database().getCollection[A](collectionName) + database().getCollection(collectionName) } def guessDatabaseName(maybeSeparatedName: String): String = { @@ -155,16 +152,12 @@ object DatabaseProvider { val ObjectIdKey = "_id" val CollectionSeparator = ":" - private val CustomRegistry = fromProviders(CustomCodecProvider()) - - private val codecRegistry: CodecRegistry = fromRegistries(CustomRegistry, DEFAULT_CODEC_REGISTRY) - - def apply(config: MongoConfig, registry: CodecRegistry = codecRegistry): DatabaseProvider = { - new DatabaseProvider(config, fromRegistries(registry, CustomRegistry, DEFAULT_CODEC_REGISTRY)) + def apply(config: MongoConfig): DatabaseProvider = { + new DatabaseProvider(config) } - def fromPath(configPath: String = MongoConfig.DefaultConfigPathPrefix, registry: CodecRegistry = codecRegistry): DatabaseProvider = { - apply(MongoConfig.fromPath(configPath), fromRegistries(registry, CustomRegistry, DEFAULT_CODEC_REGISTRY)) + def fromPath(configPath: String = MongoConfig.DefaultConfigPathPrefix): DatabaseProvider = { + apply(MongoConfig.fromPath(configPath)) } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala index b5597e20..34e559ff 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoConfig.scala @@ -1,10 +1,8 @@ package dev.mongocamp.driver.mongodb.database -import java.util.concurrent.TimeUnit import com.mongodb.MongoCompressor import com.mongodb.MongoCredential.createCredential import com.mongodb.event.{ CommandListener, ConnectionPoolListener } -import com.typesafe.config.{ Config, ConfigFactory } import dev.mongocamp.driver.mongodb.database.MongoConfig.{ CompressionSnappy, CompressionZlib, @@ -17,8 +15,9 @@ import dev.mongocamp.driver.mongodb.database.MongoConfig.{ import org.mongodb.scala.connection._ import org.mongodb.scala.{ MongoClientSettings, MongoCredential, ServerAddress } -import scala.jdk.CollectionConverters._ +import java.util.concurrent.TimeUnit import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ case class MongoConfig( database: String, diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoIndex.scala b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoIndex.scala index 4d0f3e89..98a8cf65 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoIndex.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/database/MongoIndex.scala @@ -1,12 +1,12 @@ package dev.mongocamp.driver.mongodb.database -import java.util.Date - -import dev.mongocamp.driver.mongodb.operation.ObservableIncludes import com.typesafe.scalalogging.LazyLogging +import dev.mongocamp.driver.mongodb.operation.ObservableIncludes import org.mongodb.scala.ListIndexesObservable import org.mongodb.scala.model.IndexOptions +import java.util.Date + case class MongoIndex( name: String, fields: List[String], diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala index cf361035..5384800c 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Base.scala @@ -1,16 +1,16 @@ package dev.mongocamp.driver.mongodb.gridfs -import java.io.OutputStream -import java.nio.ByteBuffer - import better.files.File import com.mongodb.client.gridfs.model.GridFSUploadOptions +import com.typesafe.scalalogging.LazyLogging import dev.mongocamp.driver.mongodb.Converter import dev.mongocamp.driver.mongodb.database.DatabaseProvider -import com.typesafe.scalalogging.LazyLogging import org.mongodb.scala.bson.ObjectId import org.mongodb.scala.gridfs.{ GridFSBucket, GridFSDownloadObservable } -import org.mongodb.scala.{ Document, Observable, ReadConcern, ReadPreference, WriteConcern } +import org.mongodb.scala.{ documentToUntypedDocument, Document, Observable, ReadConcern, ReadPreference, WriteConcern } + +import java.io.OutputStream +import java.nio.ByteBuffer abstract class Base extends LazyLogging { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala index 550c3d14..676dbee5 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Crud.scala @@ -1,10 +1,10 @@ package dev.mongocamp.driver.mongodb.gridfs -import java.io.InputStream - import org.mongodb.scala.bson.ObjectId import org.mongodb.scala.{ Document, Observable } +import java.io.InputStream + abstract class Crud extends Search { def deleteOne(id: ObjectId): Observable[Unit] = gridfsBucket.delete(id) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSStreamObservable.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSStreamObservable.scala index 1308a3ea..b34667e1 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSStreamObservable.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSStreamObservable.scala @@ -1,12 +1,12 @@ package dev.mongocamp.driver.mongodb.gridfs +import com.typesafe.scalalogging.LazyLogging +import org.mongodb.scala.{ Observable, Observer, Subscription } + import java.io.InputStream import java.nio.ByteBuffer import java.util.concurrent.atomic.AtomicBoolean -import com.typesafe.scalalogging.LazyLogging -import org.mongodb.scala.{ Observable, Observer, Subscription } - case class GridFSStreamObservable(inputStream: InputStream, bufferSize: Int = 1024 * 64) extends Observable[ByteBuffer] with LazyLogging { val isPublishing = new AtomicBoolean(false) val buffer = new Array[Byte](bufferSize) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSStreamObserver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSStreamObserver.scala index 1140fb94..70dbbb52 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSStreamObserver.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSStreamObserver.scala @@ -1,12 +1,12 @@ package dev.mongocamp.driver.mongodb.gridfs +import com.typesafe.scalalogging.LazyLogging +import org.mongodb.scala.Observer + import java.io.OutputStream import java.nio.{ Buffer, ByteBuffer } import java.util.concurrent.atomic.{ AtomicBoolean, AtomicLong } -import com.typesafe.scalalogging.LazyLogging -import org.mongodb.scala.Observer - case class GridFSStreamObserver(outputStream: OutputStream) extends Observer[ByteBuffer] with LazyLogging { val completed = new AtomicBoolean(false) val resultLength = new AtomicLong(0) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Metadata.scala b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Metadata.scala index 158969e2..ad067c4a 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Metadata.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/gridfs/Metadata.scala @@ -3,6 +3,7 @@ package dev.mongocamp.driver.mongodb.gridfs import dev.mongocamp.driver.mongodb.MongoDAO import dev.mongocamp.driver.mongodb.bson.BsonConverter import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import dev.mongocamp.driver.mongodb.json._ import org.bson.types.ObjectId import org.mongodb.scala.bson.BsonValue import org.mongodb.scala.bson.conversions.Bson diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala index 7654f8bf..627e081b 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDatabaseMetaData.scala @@ -2,18 +2,19 @@ package dev.mongocamp.driver.mongodb.jdbc import com.vdurmont.semver4j.Semver import dev.mongocamp.driver.mongodb.database.DatabaseProvider.CollectionSeparator -import dev.mongocamp.driver.mongodb.{ BuildInfo, Converter, GenericObservable } import dev.mongocamp.driver.mongodb.jdbc.resultSet.MongoDbResultSet import dev.mongocamp.driver.mongodb.schema.SchemaExplorer -import org.mongodb.scala.bson.{ BsonNull, BsonString } +import dev.mongocamp.driver.mongodb.{ BuildInfo, Converter, GenericObservable } import org.mongodb.scala.bson.collection.immutable.Document +import org.mongodb.scala.bson.{ BsonNull, BsonString } +import org.mongodb.scala.documentToUntypedDocument import java.sql.{ Connection, DatabaseMetaData, ResultSet, RowIdLifetime, Types } import scala.collection.mutable.ArrayBuffer class MongoDatabaseMetaData(connection: MongoJdbcConnection) extends DatabaseMetaData { private lazy val semVer = new Semver(BuildInfo.version) - private lazy val jdbcSemVer = new Semver("4.2") + private lazy val jdbcSemVer = new Semver("4.2.0") private lazy val DatabaseNameKey = "mongodb" override def allProceduresAreCallable() = false diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala index 29ae8ad6..4db9f1ff 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongoJdbcConnection.scala @@ -1,28 +1,15 @@ package dev.mongocamp.driver.mongodb.jdbc -import dev.mongocamp.driver.mongodb.Converter import dev.mongocamp.driver.mongodb.bson.BsonConverter import dev.mongocamp.driver.mongodb.database.DatabaseProvider import dev.mongocamp.driver.mongodb.jdbc.statement.MongoPreparedStatement +import dev.mongocamp.driver.mongodb.json.JsonConverter +import org.mongodb.scala.bson.collection.immutable.Document -import java.{ sql, util } -import java.sql.{ - Blob, - CallableStatement, - Clob, - Connection, - DatabaseMetaData, - NClob, - PreparedStatement, - SQLException, - SQLWarning, - SQLXML, - Savepoint, - Statement, - Struct -} +import java.sql.{Blob, CallableStatement, Clob, Connection, DatabaseMetaData, NClob, PreparedStatement, SQLException, SQLWarning, SQLXML, Savepoint, Statement, Struct} import java.util.Properties import java.util.concurrent.Executor +import java.{sql, util} import scala.jdk.CollectionConverters._ class MongoJdbcConnection(databaseProvider: DatabaseProvider) extends Connection with MongoJdbcCloseable { @@ -186,23 +173,19 @@ class MongoJdbcConnection(databaseProvider: DatabaseProvider) extends Connection } override def createClob(): Clob = { - checkClosed() - null + throw sqlFeatureNotSupported() } override def createBlob(): Blob = { - checkClosed() - null + throw sqlFeatureNotSupported() } override def createNClob(): NClob = { - checkClosed() - null + throw sqlFeatureNotSupported() } override def createSQLXML(): SQLXML = { - checkClosed() - null + throw sqlFeatureNotSupported() } override def isValid(timeout: Int): Boolean = { @@ -237,19 +220,17 @@ class MongoJdbcConnection(databaseProvider: DatabaseProvider) extends Connection override def getClientInfo: Properties = { val properties = new Properties() properties.setProperty("ApplicationName", databaseProvider.config.applicationName) - val document = Converter.toDocument(databaseProvider.config) + val document = Document(new JsonConverter().toJson(databaseProvider.config)) BsonConverter.asMap(document).foreach(entry => properties.setProperty(entry._1, entry._2.toString)) properties } override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = { - checkClosed() - null + throw sqlFeatureNotSupported() } override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = { - checkClosed() - null + throw sqlFeatureNotSupported() } override def setSchema(schema: String): Unit = { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongodbJdbcDriverPropertyInfoHelper.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongodbJdbcDriverPropertyInfoHelper.scala index f785b034..7ddb0467 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongodbJdbcDriverPropertyInfoHelper.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/MongodbJdbcDriverPropertyInfoHelper.scala @@ -1,5 +1,5 @@ package dev.mongocamp.driver.mongodb.jdbc -import MongodbJdbcDriverPropertyInfoHelper._ +import dev.mongocamp.driver.mongodb.jdbc.MongodbJdbcDriverPropertyInfoHelper._ object MongodbJdbcDriverPropertyInfoHelper { val ApplicationName = "appName" diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala index d0428235..a57287b7 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSet.scala @@ -1,27 +1,28 @@ package dev.mongocamp.driver.mongodb.jdbc.resultSet -import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.bson.BsonConverter -import org.mongodb.scala.bson.{ BsonArray, BsonBoolean, BsonDateTime, BsonDouble, BsonInt32, BsonInt64, BsonNull, BsonNumber, BsonObjectId, BsonString } +import dev.mongocamp.driver.mongodb.jdbc.MongoJdbcCloseable import org.mongodb.scala.bson.collection.immutable.Document +import org.mongodb.scala.bson.{ BsonDouble, BsonInt32, BsonInt64, BsonObjectId, BsonString } +import org.mongodb.scala.documentToUntypedDocument import java.io.{ InputStream, Reader } import java.net.{ URI, URL } -import java.{ sql, util } +import java.nio.charset.StandardCharsets import java.sql.{ Blob, Clob, Date, NClob, Ref, ResultSet, ResultSetMetaData, RowId, SQLException, SQLWarning, SQLXML, Statement, Time, Timestamp } import java.util.Calendar -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.jdbc.MongoJdbcCloseable - -import java.nio.charset.StandardCharsets +import java.{ sql, util } import javax.sql.rowset.serial.SerialBlob import scala.util.Try -class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], queryTimeOut: Int) extends ResultSet with MongoJdbcCloseable { +class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], queryTimeOut: Int, keySet: List[String] = List.empty) + extends ResultSet + with MongoJdbcCloseable { private var currentRow: Document = _ private var index: Int = 0 - private lazy val metaData = new MongoDbResultSetMetaData(collectionDao, data) + private lazy val metaData = new MongoDbResultSetMetaData(collectionDao, data, keySet) def getDocument: Document = currentRow @@ -250,7 +251,7 @@ class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], override def getMetaData: ResultSetMetaData = { checkClosed() - new MongoDbResultSetMetaData(collectionDao, data) + new MongoDbResultSetMetaData(collectionDao, data, keySet) } override def getObject(columnIndex: Int): AnyRef = { @@ -390,143 +391,116 @@ class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], } override def updateNull(columnIndex: Int): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonNull()) + updateObject(columnIndex, null) } override def updateNull(columnLabel: String): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonNull()) + updateObject(columnLabel, null) } override def updateBoolean(columnIndex: Int, x: Boolean): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonBoolean(x)) + updateObject(columnIndex, x) } override def updateBoolean(columnLabel: String, x: Boolean): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonBoolean(x)) + updateObject(columnLabel, x) } override def updateByte(columnIndex: Int, x: Byte): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + updateObject(columnIndex, x) } override def updateByte(columnLabel: String, x: Byte): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonNumber(x)) + updateObject(columnLabel, x) } override def updateShort(columnIndex: Int, x: Short): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + updateObject(columnIndex, x) } override def updateShort(columnLabel: String, x: Short): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonNumber(x)) + updateObject(columnLabel, x) } override def updateInt(columnIndex: Int, x: Int): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + updateObject(columnIndex, x) + } override def updateInt(columnLabel: String, x: Int): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonNumber(x)) + updateObject(columnLabel, x) } override def updateLong(columnIndex: Int, x: Long): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + updateObject(columnIndex, x) } override def updateLong(columnLabel: String, x: Long): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonNumber(x)) + updateObject(columnLabel, x) } override def updateFloat(columnIndex: Int, x: Float): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + updateObject(columnIndex, x) } override def updateFloat(columnLabel: String, x: Float): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonNumber(x)) + updateObject(columnLabel, x) } override def updateDouble(columnIndex: Int, x: Double): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x)) + updateObject(columnIndex, x) } override def updateDouble(columnLabel: String, x: Double): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonNumber(x)) + updateObject(columnLabel, x) } override def updateBigDecimal(columnIndex: Int, x: java.math.BigDecimal): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonNumber(x.doubleValue())) + updateObject(columnIndex, x) } override def updateBigDecimal(columnLabel: String, x: java.math.BigDecimal): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonNumber(x.doubleValue())) + updateObject(columnLabel, x) } override def updateString(columnIndex: Int, x: String): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonString(x)) + updateObject(columnIndex, x) } override def updateString(columnLabel: String, x: String): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonString(x)) + updateObject(columnLabel, x) } override def updateBytes(columnIndex: Int, x: Array[Byte]): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonArray(x)) + updateObject(columnIndex, x) } override def updateBytes(columnLabel: String, x: Array[Byte]): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonArray(x)) + updateObject(columnLabel, x) } override def updateDate(columnIndex: Int, x: Date): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonDateTime(x)) + updateObject(columnIndex, x) } override def updateDate(columnLabel: String, x: Date): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonDateTime(x)) + updateObject(columnLabel, x) } override def updateTime(columnIndex: Int, x: Time): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonDateTime(x)) + updateObject(columnIndex, x) } override def updateTime(columnLabel: String, x: Time): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonDateTime(x)) + updateObject(columnLabel, x) } override def updateTimestamp(columnIndex: Int, x: Timestamp): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonDateTime(x)) + updateObject(columnIndex, x) } override def updateTimestamp(columnLabel: String, x: Timestamp): Unit = { - checkClosed() - currentRow.updated(columnLabel, BsonDateTime(x)) + updateObject(columnLabel, x) } override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Int): Unit = sqlFeatureNotSupported() @@ -542,23 +516,21 @@ class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], override def updateCharacterStream(columnLabel: String, reader: Reader, length: Int): Unit = sqlFeatureNotSupported() override def updateObject(columnIndex: Int, x: Any, scaleOrLength: Int): Unit = { - checkClosed() updateObject(columnIndex, x) } override def updateObject(columnLabel: String, x: Any, scaleOrLength: Int): Unit = { - checkClosed() updateObject(columnLabel, x) } override def updateObject(columnIndex: Int, x: Any): Unit = { - checkClosed() - currentRow.updated(metaData.getColumnName(columnIndex), BsonConverter.toBson(x)) + updateObject(metaData.getColumnName(columnIndex), x) } override def updateObject(columnLabel: String, x: Any): Unit = { checkClosed() - currentRow.updated(columnLabel, BsonConverter.toBson(x)) + currentRow = currentRow.updated(columnLabel, BsonConverter.toBson(x)) + data.updated(index, currentRow) } override def insertRow(): Unit = { @@ -719,49 +691,41 @@ class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], override def updateNCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = sqlFeatureNotSupported() override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Long): Unit = { - checkClosed() val text = new String(x.readAllBytes, StandardCharsets.UTF_8) updateString(columnIndex, text) } override def updateAsciiStream(columnLabel: String, x: InputStream, length: Long): Unit = { - checkClosed() val text = new String(x.readAllBytes, StandardCharsets.UTF_8) updateString(columnLabel, text) } override def updateAsciiStream(columnIndex: Int, x: InputStream): Unit = { - checkClosed() val text = new String(x.readAllBytes, StandardCharsets.UTF_8) updateString(columnIndex, text) } override def updateAsciiStream(columnLabel: String, x: InputStream): Unit = { - checkClosed() val text = new String(x.readAllBytes, StandardCharsets.UTF_8) updateString(columnLabel, text) } override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Long): Unit = { - checkClosed() val text = new String(x.readAllBytes, StandardCharsets.UTF_8) updateString(columnIndex, text) } override def updateBinaryStream(columnLabel: String, x: InputStream, length: Long): Unit = { - checkClosed() val text = new String(x.readAllBytes, StandardCharsets.UTF_8) updateString(columnLabel, text) } override def updateBinaryStream(columnIndex: Int, x: InputStream): Unit = { - checkClosed() val text = new String(x.readAllBytes, StandardCharsets.UTF_8) updateString(columnIndex, text) } override def updateBinaryStream(columnLabel: String, x: InputStream): Unit = { - checkClosed() val text = new String(x.readAllBytes, StandardCharsets.UTF_8) updateString(columnLabel, text) } @@ -775,52 +739,43 @@ class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], override def updateNCharacterStream(columnLabel: String, reader: Reader): Unit = sqlFeatureNotSupported() override def updateBlob(columnIndex: Int, inputStream: InputStream, length: Long): Unit = { - checkClosed() val text = new String(inputStream.readAllBytes, StandardCharsets.UTF_8) updateString(columnIndex, text) } override def updateBlob(columnLabel: String, inputStream: InputStream, length: Long): Unit = { - checkClosed() val text = new String(inputStream.readAllBytes, StandardCharsets.UTF_8) updateString(columnLabel, text) } override def updateBlob(columnIndex: Int, inputStream: InputStream): Unit = { - checkClosed() val text = new String(inputStream.readAllBytes, StandardCharsets.UTF_8) updateString(columnIndex, text) } override def updateBlob(columnLabel: String, inputStream: InputStream): Unit = { - checkClosed() val text = new String(inputStream.readAllBytes, StandardCharsets.UTF_8) updateString(columnLabel, text) } override def updateClob(columnIndex: Int, reader: Reader, length: Long): Unit = { - checkClosed() val text = convertReaderToString(reader) updateString(columnIndex, text) } override def updateNClob(columnIndex: Int, reader: Reader, length: Long): Unit = { - checkClosed() val text = convertReaderToString(reader) updateString(columnIndex, text) } override def updateNClob(columnLabel: String, reader: Reader, length: Long): Unit = { - checkClosed() val text = convertReaderToString(reader) updateString(columnLabel, text) } override def updateNClob(columnIndex: Int, reader: Reader): Unit = { - checkClosed() val text = convertReaderToString(reader) updateString(columnIndex, text) } override def updateNClob(columnLabel: String, reader: Reader): Unit = { - checkClosed() val text = convertReaderToString(reader) updateString(columnLabel, text) } @@ -834,13 +789,11 @@ class MongoDbResultSet(collectionDao: MongoDAO[Document], data: List[Document], } override def updateBlob(columnIndex: Int, x: Blob): Unit = { - checkClosed() val text = new String(x.getBinaryStream.readAllBytes(), StandardCharsets.UTF_8) updateString(columnIndex, text) } override def updateBlob(columnLabel: String, x: Blob): Unit = { - checkClosed() val text = new String(x.getBinaryStream.readAllBytes(), StandardCharsets.UTF_8) updateString(columnLabel, text) } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSetMetaData.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSetMetaData.scala index 1baa03d7..a70b7719 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSetMetaData.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/resultSet/MongoDbResultSetMetaData.scala @@ -1,15 +1,16 @@ package dev.mongocamp.driver.mongodb.jdbc.resultSet -import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.json._ import org.mongodb.scala.Document import org.mongodb.scala.bson.{ BsonBoolean, BsonInt32, BsonInt64, BsonNumber, BsonString } -import dev.mongocamp.driver.mongodb._ import java.sql.{ ResultSetMetaData, SQLException } class MongoDbResultSetMetaData extends ResultSetMetaData { private var document: Document = _ private var collectionDao: MongoDAO[Document] = _ + private var keySet: List[String] = List.empty def this(dao: MongoDAO[Document]) = { this() @@ -31,6 +32,14 @@ class MongoDbResultSetMetaData extends ResultSetMetaData { this.collectionDao = dao } + def this(dao: MongoDAO[Document], data: List[Document], keySet: List[String]) = { + this() + val row: Document = extractDocumentFromDataList(data) + this.document = row + this.collectionDao = dao + this.keySet = keySet + } + private def extractDocumentFromDataList(data: List[Document]) = { var row = data.headOption.getOrElse(throw new SQLException("No data in ResultSet")).copy() val distinctKeys = data.flatMap(_.keys).distinct @@ -45,6 +54,17 @@ class MongoDbResultSetMetaData extends ResultSetMetaData { override def getColumnCount: Int = document.size + override def getColumnLabel(column: Int): String = { + val keys: Iterable[String] = if (keySet.nonEmpty) { + keySet + } else { + document.keys + } + keys.toList(column - 1) + } + + override def getColumnName(column: Int): String = getColumnLabel(column) + override def isAutoIncrement(column: Int): Boolean = false override def isCaseSensitive(column: Int): Boolean = true @@ -59,10 +79,6 @@ class MongoDbResultSetMetaData extends ResultSetMetaData { override def getColumnDisplaySize(column: Int): Int = Int.MaxValue - override def getColumnLabel(column: Int): String = document.keys.toList(column - 1) - - override def getColumnName(column: Int): String = getColumnLabel(column) - override def getSchemaName(column: Int): String = collectionDao.databaseName override def getPrecision(column: Int): Int = 0 @@ -74,14 +90,14 @@ class MongoDbResultSetMetaData extends ResultSetMetaData { override def getCatalogName(column: Int): String = collectionDao.name override def getColumnType(column: Int): Int = { - document.values.toList(column - 1) match { + document(getColumnLabel(column)) match { case _: BsonInt32 => java.sql.Types.INTEGER case _: BsonInt64 => java.sql.Types.BIGINT case _: BsonNumber => java.sql.Types.DOUBLE case _: BsonString => java.sql.Types.VARCHAR case _: BsonBoolean => java.sql.Types.BOOLEAN - case _: Document => java.sql.Types.STRUCT - case _ => java.sql.Types.NULL +// case _: Document => java.sql.Types.STRUCT // todo: check if this is correct + case _ => java.sql.Types.NULL } } @@ -120,6 +136,11 @@ class MongoDbResultSetMetaData extends ResultSetMetaData { override def isWrapperFor(iface: Class[_]): Boolean = false def getColumnIndex(columnLabel: String): Int = { - document.keys.toList.indexOf(columnLabel) + val keys: List[String] = if (keySet.nonEmpty) { + keySet + } else { + document.keys.toList + } + keys.indexOf(columnLabel) + 1 } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala index ad0a1986..f2c76515 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/jdbc/statement/MongoPreparedStatement.scala @@ -1,35 +1,19 @@ package dev.mongocamp.driver.mongodb.jdbc.statement import com.typesafe.scalalogging.LazyLogging -import dev.mongocamp.driver.mongodb.{ Converter, GenericObservable } import dev.mongocamp.driver.mongodb.exception.SqlCommandNotSupportedException -import dev.mongocamp.driver.mongodb.jdbc.{ MongoJdbcCloseable, MongoJdbcConnection } import dev.mongocamp.driver.mongodb.jdbc.resultSet.MongoDbResultSet +import dev.mongocamp.driver.mongodb.jdbc.{MongoJdbcCloseable, MongoJdbcConnection} +import dev.mongocamp.driver.mongodb.json.JsonConverter import dev.mongocamp.driver.mongodb.sql.MongoSqlQueryHolder +import dev.mongocamp.driver.mongodb.{Converter, GenericObservable} import org.joda.time.DateTime -import java.io.{ InputStream, Reader } +import java.io.{InputStream, Reader} import java.net.URL -import java.{ sql, util } -import java.sql.{ - Blob, - CallableStatement, - Clob, - Connection, - Date, - NClob, - ParameterMetaData, - PreparedStatement, - Ref, - ResultSet, - ResultSetMetaData, - RowId, - SQLWarning, - SQLXML, - Time, - Timestamp -} +import java.sql.{Blob, CallableStatement, Clob, Connection, Date, NClob, ParameterMetaData, Ref, ResultSet, ResultSetMetaData, RowId, SQLWarning, SQLXML, Time, Timestamp} import java.util.Calendar +import java.{sql, util} import scala.collection.mutable import scala.util.Try @@ -68,6 +52,9 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla val queryHolder: MongoSqlQueryHolder = try MongoSqlQueryHolder(sql) catch { + case e: java.sql.SQLException => + logger.error(e.getMessage, e) + null case e: SqlCommandNotSupportedException => logger.error(e.getMessage, e) null @@ -79,7 +66,7 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla var response = queryHolder.run(connection.getDatabaseProvider).results(getQueryTimeout) if (response.isEmpty && queryHolder.hasFunctionCallInSelect) { val emptyDocument = mutable.Map[String, Any]() - queryHolder.getKeysForEmptyDocument.foreach(key => emptyDocument.put(key, null)) + queryHolder.getKeysFromSelect.foreach(key => emptyDocument.put(key, null)) val doc = Converter.toDocument(emptyDocument.toMap) response = Seq(doc) } @@ -90,7 +77,7 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla newDoc }) } - val resultSet = new MongoDbResultSet(collectionName.orNull, response.toList, getQueryTimeout) + val resultSet = new MongoDbResultSet(collectionName.orNull, response.toList, getQueryTimeout, queryHolder.getKeysFromSelect) _lastResultSet = resultSet resultSet } @@ -165,22 +152,22 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla override def setBytes(parameterIndex: Int, x: Array[Byte]): Unit = { checkClosed() - setObject(parameterIndex, x) + setObject(parameterIndex, x.mkString("[", ",", "]")) } override def setDate(parameterIndex: Int, x: Date): Unit = { checkClosed() - setObject(parameterIndex, s"'${x.toInstant.toString}'") + setObject(parameterIndex, s"'${new DateTime(x).toInstant.toString}'") } override def setTime(parameterIndex: Int, x: Time): Unit = { checkClosed() - setObject(parameterIndex, s"'${x.toInstant.toString}'") + setObject(parameterIndex, s"'${new DateTime(x).toInstant.toString}'") } override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = { checkClosed() - setObject(parameterIndex, s"'${x.toInstant.toString}'") + setObject(parameterIndex, s"'${new DateTime(x).toInstant.toString}'") } override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { @@ -220,12 +207,15 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla } override def setObject(parameterIndex: Int, x: Any, targetSqlType: Int): Unit = { + checkClosed() setObject(parameterIndex, x) } override def setObject(parameterIndex: Int, x: Any): Unit = { checkClosed() x match { + case null => + parameters.put(parameterIndex, "null") case d: Date => parameters.put(parameterIndex, s"'${d.toInstant.toString}'") case d: DateTime => @@ -233,9 +223,9 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla case t: Time => parameters.put(parameterIndex, s"'${t.toInstant.toString}'") case a: Array[Byte] => - parameters.put(parameterIndex, a.mkString("[", ",", "]")) + parameters.put(parameterIndex, new JsonConverter().toJson(a)) case a: Iterable[_] => - parameters.put(parameterIndex, a.mkString("[", ",", "]")) + parameters.put(parameterIndex, new JsonConverter().toJson(a)) case _ => parameters.put(parameterIndex, x.toString) } @@ -287,7 +277,7 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla } override def setURL(parameterIndex: Int, x: URL): Unit = { - sqlFeatureNotSupported() + setString(parameterIndex, x.toString) } override def getParameterMetaData: ParameterMetaData = { @@ -374,7 +364,7 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla checkClosed() val updateResponse = executeQuery(sql) updateResponse.next() - val updateCount = updateResponse.getInt("matchedCount") + updateResponse.getInt("deletedCount") + updateResponse.getInt("insertedCount") + val updateCount = updateResponse.getInt("modifiedCount") + updateResponse.getInt("deletedCount") + updateResponse.getInt("insertedCount") _lastUpdateCount = updateCount updateCount } @@ -519,7 +509,6 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla override def setPoolable(poolable: Boolean): Unit = { checkClosed() - 0 } override def isPoolable: Boolean = { @@ -535,235 +524,575 @@ case class MongoPreparedStatement(connection: MongoJdbcConnection) extends Calla checkClosed() false } -// todo - override def unwrap[T](iface: Class[T]): T = null.asInstanceOf[T] - override def isWrapperFor(iface: Class[_]): Boolean = false + override def unwrap[T](iface: Class[T]): T = { + checkClosed() + null.asInstanceOf[T] + } - override def registerOutParameter(parameterIndex: Int, sqlType: Int): Unit = ??? + override def isWrapperFor(iface: Class[_]): Boolean = { + checkClosed() + false + } - override def registerOutParameter(parameterIndex: Int, sqlType: Int, scale: Int): Unit = ??? + override def wasNull(): Boolean = { + checkClosed() + false + } - override def wasNull(): Boolean = ??? + def getStringOption(parameterIndex: Int): Option[String] = { + checkClosed() + parameters.get(parameterIndex).map(_.replace("'", "")) + } - override def getString(parameterIndex: Int): String = parameters.get(parameterIndex).orNull + override def getString(parameterIndex: Int): String = { + getStringOption(parameterIndex).orNull + } - override def getBoolean(parameterIndex: Int): Boolean = parameters.get(parameterIndex).flatMap(v => Try(v.toBoolean).toOption).getOrElse(false) + override def getBoolean(parameterIndex: Int): Boolean = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(v.toBoolean).toOption).getOrElse(false) + } - override def getByte(parameterIndex: Int): Byte = parameters.get(parameterIndex).flatMap(v => Try(v.toByte).toOption).getOrElse(0) + override def getByte(parameterIndex: Int): Byte = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(v.toByte).toOption).getOrElse(Byte.MinValue) + } - override def getShort(parameterIndex: Int): Short = parameters.get(parameterIndex).flatMap(v => Try(v.toShort).toOption).getOrElse(0) + override def getShort(parameterIndex: Int): Short = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(v.toShort).toOption).getOrElse(Short.MinValue) + } - override def getInt(parameterIndex: Int): Int = parameters.get(parameterIndex).flatMap(v => Try(v.toInt).toOption).getOrElse(0) + override def getInt(parameterIndex: Int): Int = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(v.toInt).toOption).getOrElse(Int.MinValue) + } - override def getLong(parameterIndex: Int): Long = parameters.get(parameterIndex).flatMap(v => Try(v.toLong).toOption).getOrElse(0) + override def getLong(parameterIndex: Int): Long = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(v.toLong).toOption).getOrElse(Long.MinValue) + } - override def getFloat(parameterIndex: Int): Float = parameters.get(parameterIndex).flatMap(v => Try(v.toFloat).toOption).getOrElse(0.0.toFloat) + override def getFloat(parameterIndex: Int): Float = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(v.toFloat).toOption).getOrElse(Float.MinValue) + } - override def getDouble(parameterIndex: Int): Double = parameters.get(parameterIndex).flatMap(v => Try(v.toDouble).toOption).getOrElse(0.0) + override def getDouble(parameterIndex: Int): Double = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(v.toDouble).toOption).getOrElse(Double.MinValue) + } override def getBigDecimal(parameterIndex: Int, scale: Int): java.math.BigDecimal = getBigDecimal(parameterIndex) - override def getBytes(parameterIndex: Int): Array[Byte] = ??? + override def getBytes(parameterIndex: Int): Array[Byte] = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(new JsonConverter().toObject[Array[Byte]](v)).toOption).orNull + } - override def getDate(parameterIndex: Int): Date = ??? + override def getDate(parameterIndex: Int): Date = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(new Date(DateTime.parse(v).getMillis)).toOption).orNull + } - override def getTime(parameterIndex: Int): Time = ??? + override def getTime(parameterIndex: Int): Time = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(new Time(DateTime.parse(v).getMillis)).toOption).orNull + } - override def getTimestamp(parameterIndex: Int): Timestamp = ??? + override def getTimestamp(parameterIndex: Int): Timestamp = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(new Timestamp(DateTime.parse(v).getMillis)).toOption).orNull + } - override def getObject(parameterIndex: Int): AnyRef = ??? + override def getObject(parameterIndex: Int): AnyRef = { + checkClosed() + getStringOption(parameterIndex).orNull + } - override def getBigDecimal(parameterIndex: Int): java.math.BigDecimal = - parameters.get(parameterIndex).flatMap(v => Try(v.toDouble).toOption).map(new java.math.BigDecimal(_)).orNull + override def getBigDecimal(parameterIndex: Int): java.math.BigDecimal = { + checkClosed() + getStringOption(parameterIndex).flatMap(v => Try(new java.math.BigDecimal(v.toDouble)).toOption).orNull + } - override def getObject(parameterIndex: Int, map: util.Map[String, Class[_]]): AnyRef = ??? + override def getObject(parameterIndex: Int, map: util.Map[String, Class[_]]): AnyRef = { + checkClosed() + getStringOption(parameterIndex).orNull + } - override def getRef(parameterIndex: Int): Ref = ??? + override def getRef(parameterIndex: Int): Ref = { + checkClosed() + sqlFeatureNotSupported() + } - override def getBlob(parameterIndex: Int): Blob = ??? + override def getBlob(parameterIndex: Int): Blob = { + checkClosed() + sqlFeatureNotSupported() + } - override def getClob(parameterIndex: Int): Clob = ??? + override def getClob(parameterIndex: Int): Clob = { + checkClosed() + sqlFeatureNotSupported() + } - override def getArray(parameterIndex: Int): sql.Array = ??? + override def getArray(parameterIndex: Int): sql.Array = { + checkClosed() + sqlFeatureNotSupported() + } - override def getDate(parameterIndex: Int, cal: Calendar): Date = ??? + override def getDate(parameterIndex: Int, cal: Calendar): Date = getDate(parameterIndex) - override def getTime(parameterIndex: Int, cal: Calendar): Time = ??? + override def getTime(parameterIndex: Int, cal: Calendar): Time = getTime(parameterIndex) - override def getTimestamp(parameterIndex: Int, cal: Calendar): Timestamp = ??? + override def getTimestamp(parameterIndex: Int, cal: Calendar): Timestamp = getTimestamp(parameterIndex) - override def registerOutParameter(parameterIndex: Int, sqlType: Int, typeName: String): Unit = ??? + override def registerOutParameter(parameterIndex: Int, sqlType: Int, typeName: String): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def registerOutParameter(parameterName: String, sqlType: Int): Unit = ??? + override def registerOutParameter(parameterName: String, sqlType: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def registerOutParameter(parameterName: String, sqlType: Int, scale: Int): Unit = ??? + override def registerOutParameter(parameterName: String, sqlType: Int, scale: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def registerOutParameter(parameterName: String, sqlType: Int, typeName: String): Unit = ??? + override def registerOutParameter(parameterName: String, sqlType: Int, typeName: String): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getURL(parameterIndex: Int): URL = ??? + override def registerOutParameter(parameterIndex: Int, sqlType: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setURL(parameterName: String, `val`: URL): Unit = ??? + override def registerOutParameter(parameterIndex: Int, sqlType: Int, scale: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setNull(parameterName: String, sqlType: Int): Unit = ??? + override def getURL(parameterIndex: Int): URL = { + checkClosed() + Option(getString(parameterIndex)).flatMap(v => { + val urlParser = Try(new java.net.URI(v).toURL) + urlParser.toOption + }).orNull + } - override def setBoolean(parameterName: String, x: Boolean): Unit = ??? + override def getString(parameterName: String): String = { + checkClosed() + sqlFeatureNotSupported() + } - override def setByte(parameterName: String, x: Byte): Unit = ??? + override def getBoolean(parameterName: String): Boolean = { + checkClosed() + sqlFeatureNotSupported() + } - override def setShort(parameterName: String, x: Short): Unit = ??? + override def getByte(parameterName: String): Byte = { + checkClosed() + sqlFeatureNotSupported() + } - override def setInt(parameterName: String, x: Int): Unit = ??? + override def getShort(parameterName: String): Short = { + checkClosed() + sqlFeatureNotSupported() + } - override def setLong(parameterName: String, x: Long): Unit = ??? + override def getInt(parameterName: String): Int = { + checkClosed() + sqlFeatureNotSupported() + } - override def setFloat(parameterName: String, x: Float): Unit = ??? + override def getLong(parameterName: String): Long = { + checkClosed() + sqlFeatureNotSupported() + } - override def setDouble(parameterName: String, x: Double): Unit = ??? + override def getFloat(parameterName: String): Float = { + checkClosed() + sqlFeatureNotSupported() + } - override def setBigDecimal(parameterName: String, x: java.math.BigDecimal): Unit = ??? + override def getDouble(parameterName: String): Double = { + checkClosed() + sqlFeatureNotSupported() + } - override def setString(parameterName: String, x: String): Unit = ??? + override def getBytes(parameterName: String): Array[Byte] = { + checkClosed() + sqlFeatureNotSupported() + } - override def setBytes(parameterName: String, x: Array[Byte]): Unit = ??? + override def getDate(parameterName: String): Date = { + checkClosed() + sqlFeatureNotSupported() + } - override def setDate(parameterName: String, x: Date): Unit = ??? + override def getTime(parameterName: String): Time = { + checkClosed() + sqlFeatureNotSupported() + } - override def setTime(parameterName: String, x: Time): Unit = ??? + override def getTimestamp(parameterName: String): Timestamp = { + checkClosed() + sqlFeatureNotSupported() + } - override def setTimestamp(parameterName: String, x: Timestamp): Unit = ??? + override def getObject(parameterName: String): AnyRef = { + checkClosed() + sqlFeatureNotSupported() + } - override def setAsciiStream(parameterName: String, x: InputStream, length: Int): Unit = ??? + override def getBigDecimal(parameterName: String): java.math.BigDecimal = { + checkClosed() + sqlFeatureNotSupported() + } - override def setBinaryStream(parameterName: String, x: InputStream, length: Int): Unit = ??? + override def getObject(parameterName: String, map: util.Map[String, Class[_]]): AnyRef = { + checkClosed() + sqlFeatureNotSupported() + } - override def setObject(parameterName: String, x: Any, targetSqlType: Int, scale: Int): Unit = ??? + override def getRef(parameterName: String): Ref = { + checkClosed() + sqlFeatureNotSupported() + } - override def setObject(parameterName: String, x: Any, targetSqlType: Int): Unit = ??? + override def getBlob(parameterName: String): Blob = { + checkClosed() + sqlFeatureNotSupported() + } - override def setObject(parameterName: String, x: Any): Unit = ??? + override def getClob(parameterName: String): Clob = { + checkClosed() + sqlFeatureNotSupported() + } - override def setCharacterStream(parameterName: String, reader: Reader, length: Int): Unit = ??? + override def getArray(parameterName: String): sql.Array = { + checkClosed() + sqlFeatureNotSupported() + } - override def setDate(parameterName: String, x: Date, cal: Calendar): Unit = ??? + override def getDate(parameterName: String, cal: Calendar): Date = { + checkClosed() + sqlFeatureNotSupported() + } - override def setTime(parameterName: String, x: Time, cal: Calendar): Unit = ??? + override def getTime(parameterName: String, cal: Calendar): Time = { + checkClosed() + sqlFeatureNotSupported() + } - override def setTimestamp(parameterName: String, x: Timestamp, cal: Calendar): Unit = ??? + override def getTimestamp(parameterName: String, cal: Calendar): Timestamp = { + checkClosed() + sqlFeatureNotSupported() + } - override def setNull(parameterName: String, sqlType: Int, typeName: String): Unit = ??? + override def getURL(parameterName: String): URL = { + checkClosed() + sqlFeatureNotSupported() + } - override def getString(parameterName: String): String = ??? + override def getRowId(parameterIndex: Int): RowId = { + checkClosed() + sqlFeatureNotSupported() + } - override def getBoolean(parameterName: String): Boolean = ??? + override def getRowId(parameterName: String): RowId = { + checkClosed() + sqlFeatureNotSupported() + } - override def getByte(parameterName: String): Byte = ??? + override def setRowId(parameterName: String, x: RowId): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getShort(parameterName: String): Short = ??? + override def setNString(parameterName: String, value: String): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getInt(parameterName: String): Int = ??? + override def setNCharacterStream(parameterName: String, value: Reader, length: Long): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getLong(parameterName: String): Long = ??? + override def setNClob(parameterName: String, value: NClob): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getFloat(parameterName: String): Float = ??? + override def setClob(parameterName: String, reader: Reader, length: Long): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getDouble(parameterName: String): Double = ??? + override def setBlob(parameterName: String, inputStream: InputStream, length: Long): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getBytes(parameterName: String): Array[Byte] = ??? + override def setNClob(parameterName: String, reader: Reader, length: Long): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getDate(parameterName: String): Date = ??? + override def getNClob(parameterIndex: Int): NClob = { + checkClosed() + sqlFeatureNotSupported() + } - override def getTime(parameterName: String): Time = ??? + override def getNClob(parameterName: String): NClob = { + checkClosed() + sqlFeatureNotSupported() + } - override def getTimestamp(parameterName: String): Timestamp = ??? + override def setSQLXML(parameterName: String, xmlObject: SQLXML): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getObject(parameterName: String): AnyRef = ??? + override def getSQLXML(parameterIndex: Int): SQLXML = { + checkClosed() + sqlFeatureNotSupported() + } - override def getBigDecimal(parameterName: String): java.math.BigDecimal = ??? + override def getSQLXML(parameterName: String): SQLXML = { + checkClosed() + sqlFeatureNotSupported() + } - override def getObject(parameterName: String, map: util.Map[String, Class[_]]): AnyRef = ??? + override def getNString(parameterIndex: Int): String = { + checkClosed() + sqlFeatureNotSupported() + } - override def getRef(parameterName: String): Ref = ??? + override def getNString(parameterName: String): String = { + checkClosed() + sqlFeatureNotSupported() + } - override def getBlob(parameterName: String): Blob = ??? + override def getNCharacterStream(parameterIndex: Int): Reader = { + checkClosed() + sqlFeatureNotSupported() + } - override def getClob(parameterName: String): Clob = ??? + override def getNCharacterStream(parameterName: String): Reader = { + checkClosed() + sqlFeatureNotSupported() + } - override def getArray(parameterName: String): sql.Array = ??? + override def getCharacterStream(parameterIndex: Int): Reader = { + checkClosed() + sqlFeatureNotSupported() + } - override def getDate(parameterName: String, cal: Calendar): Date = ??? + override def getCharacterStream(parameterName: String): Reader = { + checkClosed() + sqlFeatureNotSupported() + } - override def getTime(parameterName: String, cal: Calendar): Time = ??? + override def setBlob(parameterName: String, x: Blob): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getTimestamp(parameterName: String, cal: Calendar): Timestamp = ??? + override def setClob(parameterName: String, x: Clob): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getURL(parameterName: String): URL = ??? + override def setAsciiStream(parameterName: String, x: InputStream, length: Long): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getRowId(parameterIndex: Int): RowId = ??? + override def setBinaryStream(parameterName: String, x: InputStream, length: Long): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getRowId(parameterName: String): RowId = ??? + override def setCharacterStream(parameterName: String, reader: Reader, length: Long): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setRowId(parameterName: String, x: RowId): Unit = ??? + override def setAsciiStream(parameterName: String, x: InputStream): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setNString(parameterName: String, value: String): Unit = ??? + override def setBinaryStream(parameterName: String, x: InputStream): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setNCharacterStream(parameterName: String, value: Reader, length: Long): Unit = ??? + override def setCharacterStream(parameterName: String, reader: Reader): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setNClob(parameterName: String, value: NClob): Unit = ??? + override def setNCharacterStream(parameterName: String, value: Reader): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setClob(parameterName: String, reader: Reader, length: Long): Unit = ??? + override def setClob(parameterName: String, reader: Reader): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setBlob(parameterName: String, inputStream: InputStream, length: Long): Unit = ??? + override def setBlob(parameterName: String, inputStream: InputStream): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setNClob(parameterName: String, reader: Reader, length: Long): Unit = ??? + override def setNClob(parameterName: String, reader: Reader): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getNClob(parameterIndex: Int): NClob = ??? + override def getObject[T](parameterIndex: Int, `type`: Class[T]): T = { + checkClosed() + sqlFeatureNotSupported() + } - override def getNClob(parameterName: String): NClob = ??? + override def getObject[T](parameterName: String, `type`: Class[T]): T = { + checkClosed() + sqlFeatureNotSupported() + } - override def setSQLXML(parameterName: String, xmlObject: SQLXML): Unit = ??? + override def setURL(parameterName: String, `val`: URL): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getSQLXML(parameterIndex: Int): SQLXML = ??? + override def setNull(parameterName: String, sqlType: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getSQLXML(parameterName: String): SQLXML = ??? + override def setBoolean(parameterName: String, x: Boolean): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getNString(parameterIndex: Int): String = ??? + override def setByte(parameterName: String, x: Byte): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getNString(parameterName: String): String = ??? + override def setShort(parameterName: String, x: Short): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getNCharacterStream(parameterIndex: Int): Reader = ??? + override def setInt(parameterName: String, x: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getNCharacterStream(parameterName: String): Reader = ??? + override def setLong(parameterName: String, x: Long): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getCharacterStream(parameterIndex: Int): Reader = ??? + override def setFloat(parameterName: String, x: Float): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getCharacterStream(parameterName: String): Reader = ??? + override def setDouble(parameterName: String, x: Double): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setBlob(parameterName: String, x: Blob): Unit = ??? + override def setBigDecimal(parameterName: String, x: java.math.BigDecimal): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setClob(parameterName: String, x: Clob): Unit = ??? + override def setString(parameterName: String, x: String): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setAsciiStream(parameterName: String, x: InputStream, length: Long): Unit = ??? + override def setBytes(parameterName: String, x: Array[Byte]): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setBinaryStream(parameterName: String, x: InputStream, length: Long): Unit = ??? + override def setDate(parameterName: String, x: Date): Unit = { + checkClosed() + sqlFeatureNotSupported() + } + + override def setTime(parameterName: String, x: Time): Unit = { + checkClosed() + sqlFeatureNotSupported() + } + + override def setTimestamp(parameterName: String, x: Timestamp): Unit = { + checkClosed() + sqlFeatureNotSupported() + } + + override def setAsciiStream(parameterName: String, x: InputStream, length: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setCharacterStream(parameterName: String, reader: Reader, length: Long): Unit = ??? + override def setBinaryStream(parameterName: String, x: InputStream, length: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setAsciiStream(parameterName: String, x: InputStream): Unit = ??? + override def setObject(parameterName: String, x: Any, targetSqlType: Int, scale: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setBinaryStream(parameterName: String, x: InputStream): Unit = ??? + override def setObject(parameterName: String, x: Any, targetSqlType: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setCharacterStream(parameterName: String, reader: Reader): Unit = ??? + override def setObject(parameterName: String, x: Any): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setNCharacterStream(parameterName: String, value: Reader): Unit = ??? + override def setCharacterStream(parameterName: String, reader: Reader, length: Int): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setClob(parameterName: String, reader: Reader): Unit = ??? + override def setDate(parameterName: String, x: Date, cal: Calendar): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setBlob(parameterName: String, inputStream: InputStream): Unit = ??? + override def setTime(parameterName: String, x: Time, cal: Calendar): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def setNClob(parameterName: String, reader: Reader): Unit = ??? + override def setTimestamp(parameterName: String, x: Timestamp, cal: Calendar): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getObject[T](parameterIndex: Int, `type`: Class[T]): T = ??? + override def setNull(parameterName: String, sqlType: Int, typeName: String): Unit = { + checkClosed() + sqlFeatureNotSupported() + } - override def getObject[T](parameterName: String, `type`: Class[T]): T = ??? } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/json/CirceProductSchema.scala b/src/main/scala/dev/mongocamp/driver/mongodb/json/CirceProductSchema.scala new file mode 100644 index 00000000..59566772 --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/json/CirceProductSchema.scala @@ -0,0 +1,9 @@ +package dev.mongocamp.driver.mongodb.json + +trait CirceProductSchema { + + def productElementNames(internalProduct: Product): Iterator[String] = { + internalProduct.productElementNames + } + +} diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala b/src/main/scala/dev/mongocamp/driver/mongodb/json/CirceSchema.scala similarity index 62% rename from src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala rename to src/main/scala/dev/mongocamp/driver/mongodb/json/CirceSchema.scala index 45f54a7c..6295f160 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/schema/CirceSchema.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/json/CirceSchema.scala @@ -1,4 +1,4 @@ -package dev.mongocamp.driver.mongodb.schema +package dev.mongocamp.driver.mongodb.json import io.circe.Decoder.Result import io.circe.{ Decoder, Encoder, HCursor, Json } @@ -10,37 +10,64 @@ import java.util.Date trait CirceSchema extends CirceProductSchema { - implicit val DateFormat: Encoder[Date] with Decoder[Date] = new Encoder[Date] with Decoder[Date] { - override def apply(a: Date): Json = Encoder.encodeString.apply(a.toInstant.toString) + implicit lazy val DocumentOneFormat: io.circe.Decoder[org.mongodb.scala.Document] = { (c: HCursor) => + // not really needed only for decoder must exists + ??? + } - override def apply(c: HCursor): Result[Date] = Decoder.decodeString - .map(s => new DateTime(s).toDate) - .apply(c) + implicit lazy val DocumentTowFormat: io.circe.Decoder[org.bson.Document] = { (c: HCursor) => + // not really needed only for decoder must exists + ??? } - implicit val DateTimeFormat: Encoder[DateTime] with Decoder[DateTime] = new Encoder[DateTime] with Decoder[DateTime] { - override def apply(a: DateTime): Json = Encoder.encodeString.apply(a.toInstant.toString) + implicit val DateFormat: Encoder[Date] with io.circe.Decoder[Date] = new io.circe.Encoder[Date] with io.circe.Decoder[Date] { + override def apply(a: Date): Json = { + Encoder.encodeString.apply(a.toInstant.toString) + } - override def apply(c: HCursor): Result[DateTime] = Decoder.decodeString - .map(s => new DateTime(s)) - .apply(c) + override def apply(c: HCursor): Result[Date] = { + Decoder.decodeString + .map(s => new DateTime(s).toDate) + .apply(c) + } } - implicit val ObjectIdFormat: Encoder[ObjectId] with Decoder[ObjectId] = new Encoder[ObjectId] with Decoder[ObjectId] { - override def apply(a: ObjectId): Json = Encoder.encodeString.apply(a.toHexString) + implicit val DateTimeFormat: Encoder[DateTime] with io.circe.Decoder[DateTime] = new io.circe.Encoder[DateTime] with io.circe.Decoder[DateTime] { + override def apply(a: DateTime): Json = { + Encoder.encodeString.apply(a.toInstant.toString) + } - override def apply(c: HCursor): Result[ObjectId] = Decoder.decodeString - .map(s => new ObjectId(s)) - .apply(c) + override def apply(c: HCursor): Result[DateTime] = { + Decoder.decodeString + .map(s => new DateTime(s)) + .apply(c) + } } - implicit val MapStringAnyFormat: Encoder[Map[String, Any]] with Decoder[Map[String, Any]] = new Encoder[Map[String, Any]] with Decoder[Map[String, Any]] { - override def apply(a: Map[String, Any]): Json = encodeMapStringAny(a) + implicit val ObjectIdFormat: Encoder[ObjectId] with io.circe.Decoder[ObjectId] = new io.circe.Encoder[ObjectId] with io.circe.Decoder[ObjectId] { + override def apply(a: ObjectId): Json = { + Encoder.encodeString.apply(a.toHexString) + } - override def apply(c: HCursor): Result[Map[String, Any]] = Decoder.decodeMap[String, Any].apply(c) + override def apply(c: HCursor): Result[ObjectId] = { + Decoder.decodeString + .map(s => new ObjectId(s)) + .apply(c) + } + } + + implicit val MapStringAnyFormat: Encoder[Map[String, Any]] with io.circe.Decoder[Map[String, Any]] = new io.circe.Encoder[Map[String, Any]] + with io.circe.Decoder[Map[String, Any]] { + override def apply(a: Map[String, Any]): Json = { + encodeMapStringAny(a) + } + + override def apply(c: HCursor): Result[Map[String, Any]] = { + Decoder.decodeMap[String, Any].apply(c) + } } - implicit val AnyFormat: Encoder[Any] with Decoder[Any] = new Encoder[Any] with Decoder[Any] { + implicit val AnyFormat: Encoder[Any] with io.circe.Decoder[Any] = new io.circe.Encoder[Any] with io.circe.Decoder[Any] { override def apply(a: Any): Json = encodeAnyToJson(a) override def apply(c: HCursor): Result[Any] = { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonConverter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/json/JsonConverter.scala similarity index 71% rename from src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonConverter.scala rename to src/main/scala/dev/mongocamp/driver/mongodb/json/JsonConverter.scala index 16da7787..ea5c535f 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonConverter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/json/JsonConverter.scala @@ -1,9 +1,10 @@ -package dev.mongocamp.driver.mongodb.schema +package dev.mongocamp.driver.mongodb.json import better.files.Resource +import io.circe.Decoder import io.circe.jawn.decode import io.circe.syntax._ -import io.circe.generic.auto._ + class JsonConverter extends CirceSchema { def toJson(s: Any): String = { @@ -20,4 +21,8 @@ class JsonConverter extends CirceSchema { readJsonMap(fileContent) } + def toObject[A](jsonString: String)(implicit decoder: Decoder[A]): A = { + decode[A](jsonString).getOrElse(null.asInstanceOf[A]) + } + } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/json/package.scala b/src/main/scala/dev/mongocamp/driver/mongodb/json/package.scala new file mode 100644 index 00000000..5b425f9e --- /dev/null +++ b/src/main/scala/dev/mongocamp/driver/mongodb/json/package.scala @@ -0,0 +1,3 @@ +package dev.mongocamp.driver.mongodb + +package object json extends CirceSchema diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala index f0d4766f..b8e2355e 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/lucene/LuceneQueryConverter.scala @@ -4,8 +4,9 @@ import com.typesafe.scalalogging.LazyLogging import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.exception.NotSupportedException import org.apache.lucene.queryparser.classic.QueryParser -import org.apache.lucene.search.BooleanClause.Occur import org.apache.lucene.search._ +import org.apache.lucene.search.BooleanClause.Occur +import org.joda.time.DateTime import org.mongodb.scala.bson.conversions.Bson import java.text.SimpleDateFormat @@ -13,6 +14,7 @@ import java.util.Date import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.jdk.CollectionConverters._ +import scala.util.Try object LuceneQueryConverter extends LazyLogging { @@ -33,7 +35,7 @@ object LuceneQueryConverter extends LazyLogging { private def getMongoDbSearchMap(query: Query, negated: Boolean, searchWithValueAndString: Boolean): Map[String, Any] = { val searchMapResponse = mutable.Map[String, Any]() query match { - case booleanQuery: BooleanQuery => appendBooleanQueryToSearchMap(searchMapResponse, booleanQuery, searchWithValueAndString) + case booleanQuery: BooleanQuery => appendBooleanQueryToSearchMap(searchMapResponse, booleanQuery, searchWithValueAndString, negated) case termRangeQuery: TermRangeQuery => appendTermRangeQueryToSearchMap(negated, searchMapResponse, termRangeQuery, searchWithValueAndString) case termQuery: TermQuery => appendTermQueryToSearchMap(negated, searchMapResponse, termQuery, searchWithValueAndString) case query: PrefixQuery => appendPrefixQueryToSearchMap(negated, searchMapResponse, query) @@ -51,14 +53,17 @@ object LuceneQueryConverter extends LazyLogging { private def appendBooleanQueryToSearchMap( searchMapResponse: mutable.Map[String, Any], booleanQuery: BooleanQuery, - searchWithValueAndString: Boolean + searchWithValueAndString: Boolean, + negate: Boolean ): Unit = { val subQueries = booleanQuery.clauses().asScala val listOfAnd = ArrayBuffer[Map[String, Any]]() val listOfOr = ArrayBuffer[Map[String, Any]]() + val listOfNOr = ArrayBuffer[Map[String, Any]]() var nextTypeAnd = true subQueries.foreach(c => { - val queryMap = getMongoDbSearchMap(c.query(), c.isProhibited, searchWithValueAndString) + val negateSubquery = (c.occur() == Occur.MUST_NOT) + val queryMap = getMongoDbSearchMap(c.query(), negateSubquery, searchWithValueAndString) var thisTypeAnd = true if (c.occur == Occur.MUST) { @@ -85,10 +90,18 @@ object LuceneQueryConverter extends LazyLogging { }) if (listOfAnd.nonEmpty) { - searchMapResponse.put("$and", listOfAnd.toList) + if (negate) { + searchMapResponse.put("$nor", listOfAnd.toList) + } else { + searchMapResponse.put("$and", listOfAnd.toList) + } } if (listOfOr.nonEmpty) { - searchMapResponse.put("$or", listOfOr.toList) + if (negate) { + searchMapResponse.put("$nor", listOfOr.toList) + } else { + searchMapResponse.put("$or", listOfOr.toList) + } } } @@ -184,7 +197,17 @@ object LuceneQueryConverter extends LazyLogging { } private def appendPhraseQueryToSearchMap(negated: Boolean, searchMapResponse: mutable.Map[String, Any], query: PhraseQuery): Unit = { - val listOfSearches = query.getTerms.map(term => Map(term.field() -> generateRegexQuery(s"(.*?)${checkAndConvertValue(term.text())}(.*?)", "i"))).toList + val listOfSearches = query.getTerms + .map(term => { + val convertedValue = checkAndConvertValue(term.text()) + if (convertedValue.isInstanceOf[String]) { + Map(term.field() -> generateRegexQuery(s"(.*?)$convertedValue(.*?)", "i")) + } + else { + Map(term.field() -> Map("$eq" -> convertedValue)) + } + }) + .toList if (negated) { searchMapResponse.put("$nor", listOfSearches) } @@ -217,33 +240,35 @@ object LuceneQueryConverter extends LazyLogging { try { val convertedValue: Option[Any] = (List() ++ checkOrReturn(() => s.toDouble) ++ checkOrReturn(() => s.toLong) ++ checkOrReturn(() => s.toBoolean)).headOption - convertedValue.getOrElse({ - val parsedOptions: Option[Date] = datePatters - .map(pattern => { + val response = convertedValue.getOrElse({ + val parsedOptions: List[Date] = Try(new DateTime(s).toDate).toOption.toList ++ datePatters + .flatMap(pattern => { try { val formatter = new SimpleDateFormat(pattern) - Option(formatter.parse(s)) + val r = Option(formatter.parse(s)) + logger.info(s"parsed date $s with pattern $pattern to $r") + r } catch { - case _: Exception => + case e: Exception => None } }) - .find(_.nonEmpty) - .flatten - parsedOptions.getOrElse(s) + .distinct + parsedOptions.headOption.getOrElse(s) }) + response } catch { - case _: Exception => + case _: Throwable => s } } private lazy val datePatters = List( - "yyyyMMdd'T'HHmmssSSSZZ", - "yyyyMMdd'T'HHmmssZZ", - "yyyyMMdd'T'HHmmZZ", + "yyyyMMdd'T'HHmmssSSS'Z'", + "yyyyMMdd'T'HHmmssZ", + "yyyyMMdd'T'HHmmZ", "yyyyMMdd'T'HHmmssSSS", "yyyyMMdd'T'HHmmss", "yyyyMMdd'T'HHmm", diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala index 1e4f7555..df6eae06 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Base.scala @@ -1,17 +1,36 @@ package dev.mongocamp.driver.mongodb.operation import com.typesafe.scalalogging.LazyLogging +import dev.mongocamp.driver.mongodb.bson.BsonConverter import dev.mongocamp.driver.mongodb.database.MongoIndex +import dev.mongocamp.driver.mongodb.json._ +import io.circe.Decoder +import io.circe.syntax._ import org.mongodb.scala.bson.conversions.Bson import org.mongodb.scala.model.Sorts._ import org.mongodb.scala.model.{ CountOptions, DropIndexOptions, IndexOptions, Indexes } import org.mongodb.scala.{ Document, ListIndexesObservable, MongoCollection, Observable, SingleObservable } -import scala.concurrent.duration.Duration +import scala.concurrent.duration.{ durationToPair, Duration } +import scala.reflect.ClassTag -abstract class Base[A] extends LazyLogging { +abstract class Base[A](implicit classTag: ClassTag[A]) extends LazyLogging with CirceSchema { - protected def coll: MongoCollection[A] + def documentToObject[A](document: Document, decoder: Decoder[A]): A = { + if (classTag.runtimeClass == classOf[Document]) { + document.asInstanceOf[A] + } + else { + val helperMap = BsonConverter.asMap(document) + val response = decoder.decodeJson(helperMap.asJson) + if (response.isLeft) { + logger.error(s"Error decoding document to object: ${response.swap.getOrElse("")}") + } + response.getOrElse(null.asInstanceOf[A]) + } + } + + protected def coll: MongoCollection[Document] def count(filter: Bson = Document(), options: CountOptions = CountOptions()): Observable[Long] = { coll.countDocuments(filter, options) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Crud.scala b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Crud.scala index 2849e1c6..31d3311f 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Crud.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Crud.scala @@ -1,51 +1,58 @@ package dev.mongocamp.driver.mongodb.operation -import java.util.Date - import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import dev.mongocamp.driver.mongodb.sync.MongoSyncOperation import dev.mongocamp.driver.mongodb.{ Converter, _ } +import io.circe.Decoder import org.mongodb.scala.bson.conversions.Bson +import org.mongodb.scala.model._ import org.mongodb.scala.model.Filters._ -import org.mongodb.scala.model.{ BulkWriteOptions, _ } +import org.mongodb.scala.model.Updates._ import org.mongodb.scala.result.{ DeleteResult, InsertManyResult, InsertOneResult, UpdateResult } -import org.mongodb.scala.{ BulkWriteResult, Observable, SingleObservable } +import org.mongodb.scala.{ BulkWriteResult, Document, Observable, SingleObservable } +import java.util.Date import scala.collection.mutable.ArrayBuffer import scala.reflect.ClassTag -import Updates._ -import dev.mongocamp.driver.mongodb.sync.MongoSyncOperation -abstract class Crud[A]()(implicit ct: ClassTag[A]) extends Search[A] { +abstract class Crud[A]()(implicit ct: ClassTag[A], decoder: Decoder[A]) extends Search[A] { // create - def insertOne(value: A): Observable[InsertOneResult] = coll.insertOne(value) + def insertOne(value: A): Observable[InsertOneResult] = { + coll.insertOne(Converter.toDocument(value)) + } - def insertOne(value: A, options: InsertOneOptions): Observable[InsertOneResult] = - coll.insertOne(value, options) + def insertOne(value: A, options: InsertOneOptions): Observable[InsertOneResult] = { + coll.insertOne(Converter.toDocument(value), options) + } - def insertMany(values: Seq[A]): Observable[InsertManyResult] = - coll.insertMany(values) + def insertMany(values: Seq[A]): Observable[InsertManyResult] = { + coll.insertMany(values.map(Converter.toDocument)) + } - def insertMany(values: Seq[A], options: InsertManyOptions): Observable[InsertManyResult] = - coll.insertMany(values, options) + def insertMany(values: Seq[A], options: InsertManyOptions): Observable[InsertManyResult] = { + coll.insertMany(values.map(Converter.toDocument), options) + } // bulk write - def bulkWrite(requests: List[WriteModel[_ <: A]], options: BulkWriteOptions): SingleObservable[BulkWriteResult] = - coll.bulkWrite(requests, options) + def bulkWrite(requests: List[WriteModel[Document]], options: BulkWriteOptions): SingleObservable[BulkWriteResult] = { + coll.bulkWrite(requests.map(wM => wM), options) + } - def bulkWrite(requests: List[WriteModel[_ <: A]], ordered: Boolean = true): SingleObservable[BulkWriteResult] = + def bulkWrite(requests: List[WriteModel[Document]], ordered: Boolean = true): SingleObservable[BulkWriteResult] = { bulkWrite(requests, BulkWriteOptions().ordered(ordered)) + } def bulkWriteMany(values: Seq[A], options: BulkWriteOptions): SingleObservable[BulkWriteResult] = { - val requests: ArrayBuffer[WriteModel[_ <: A]] = ArrayBuffer() - values.foreach(value => requests.append(InsertOneModel(value))) + val requests: ArrayBuffer[WriteModel[Document]] = ArrayBuffer() + values.foreach(value => requests.append(InsertOneModel(Converter.toDocument(value)))) bulkWrite(requests.toList, options) } def bulkWriteMany(values: Seq[A], ordered: Boolean = true): SingleObservable[BulkWriteResult] = { - val requests: ArrayBuffer[WriteModel[_ <: A]] = ArrayBuffer() - values.foreach(value => requests.append(InsertOneModel(value))) + val requests: ArrayBuffer[WriteModel[Document]] = ArrayBuffer() + values.foreach(value => requests.append(InsertOneModel(Converter.toDocument(value)))) bulkWrite(requests.toList, ordered) } @@ -54,57 +61,72 @@ abstract class Crud[A]()(implicit ct: ClassTag[A]) extends Search[A] { def replaceOne(value: A): Observable[UpdateResult] = { val document = Converter.toDocument(value) val oid = document.get(DatabaseProvider.ObjectIdKey).get - coll.replaceOne(equal(DatabaseProvider.ObjectIdKey, oid), value) + coll.replaceOne(equal(DatabaseProvider.ObjectIdKey, oid), document) } def replaceOne(value: A, options: ReplaceOptions): Observable[UpdateResult] = { val document = Converter.toDocument(value) val oid = document.get(DatabaseProvider.ObjectIdKey).get - coll.replaceOne(equal(DatabaseProvider.ObjectIdKey, oid), value, options) + coll.replaceOne(equal(DatabaseProvider.ObjectIdKey, oid), document, options) } - def replaceOne(filter: Bson, value: A): Observable[UpdateResult] = - coll.replaceOne(filter, value) + def replaceOne(filter: Bson, value: A): Observable[UpdateResult] = { + coll.replaceOne(filter, Converter.toDocument(value)) + } - def replaceOne(filter: Bson, value: A, options: ReplaceOptions): Observable[UpdateResult] = - coll.replaceOne(filter, value, options) + def replaceOne(filter: Bson, value: A, options: ReplaceOptions): Observable[UpdateResult] = { + coll.replaceOne(filter, Converter.toDocument(value), options) + } - def updateOne(filter: Bson, update: Bson): Observable[UpdateResult] = + def updateOne(filter: Bson, update: Bson): Observable[UpdateResult] = { coll.updateOne(filter, update) + } - def updateOne(filter: Bson, update: Bson, options: UpdateOptions): Observable[UpdateResult] = + def updateOne(filter: Bson, update: Bson, options: UpdateOptions): Observable[UpdateResult] = { coll.updateOne(filter, update, options) + } - def updateMany(filter: Bson, update: Bson): Observable[UpdateResult] = + def updateMany(filter: Bson, update: Bson): Observable[UpdateResult] = { coll.updateMany(filter, update) + } - def updateMany(filter: Bson, update: Bson, options: UpdateOptions): Observable[UpdateResult] = + def updateMany(filter: Bson, update: Bson, options: UpdateOptions): Observable[UpdateResult] = { coll.updateMany(filter, update, options) + } - def touchInternal(filter: Bson): Observable[UpdateResult] = + def touchInternal(filter: Bson): Observable[UpdateResult] = { updateMany(filter, set(MongoSyncOperation.SyncColumnLastUpdate, new Date())) + } // delete - def deleteOne(filter: Bson): Observable[DeleteResult] = coll.deleteOne(filter) + def deleteOne(filter: Bson): Observable[DeleteResult] = { + coll.deleteOne(filter) + } - def deleteOne(filter: Bson, options: DeleteOptions): Observable[DeleteResult] = + def deleteOne(filter: Bson, options: DeleteOptions): Observable[DeleteResult] = { coll.deleteOne(filter, options) + } def deleteOne(value: A): Observable[DeleteResult] = { val oid = Converter.toDocument(value).get(DatabaseProvider.ObjectIdKey).get coll.deleteOne(equal(DatabaseProvider.ObjectIdKey, oid)) } - def deleteMany(filter: Bson): Observable[DeleteResult] = + def deleteMany(filter: Bson): Observable[DeleteResult] = { coll.deleteMany(filter) + } - def deleteMany(filter: Bson, options: DeleteOptions): Observable[DeleteResult] = + def deleteMany(filter: Bson, options: DeleteOptions): Observable[DeleteResult] = { coll.deleteMany(filter, options) + } - def deleteAll(): Observable[DeleteResult] = deleteMany(Map()) + def deleteAll(): Observable[DeleteResult] = { + deleteMany(Map()) + } - def deleteAll(options: DeleteOptions): Observable[DeleteResult] = + def deleteAll(options: DeleteOptions): Observable[DeleteResult] = { deleteMany(Map(), options) + } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/operation/CrudObserver.scala b/src/main/scala/dev/mongocamp/driver/mongodb/operation/CrudObserver.scala index 3943865d..11ba2232 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/operation/CrudObserver.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/operation/CrudObserver.scala @@ -1,8 +1,8 @@ package dev.mongocamp.driver.mongodb.operation +import com.typesafe.scalalogging.LazyLogging import dev.mongocamp.driver.mongodb.Converter import dev.mongocamp.driver.mongodb.database.DatabaseProvider -import com.typesafe.scalalogging.LazyLogging import org.mongodb.scala.Observer import org.mongodb.scala.bson.conversions.Bson import org.mongodb.scala.model.Filters.equal diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Search.scala b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Search.scala index d507ad11..21dd281c 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/operation/Search.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/operation/Search.scala @@ -3,43 +3,61 @@ package dev.mongocamp.driver.mongodb.operation import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.bson.BsonConverter._ import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import io.circe.Decoder import org.bson.BsonValue import org.mongodb.scala.bson.ObjectId import org.mongodb.scala.bson.conversions.Bson import org.mongodb.scala.model.Filters._ -import org.mongodb.scala.{ AggregateObservable, DistinctObservable, Document, FindObservable, MongoCollection } +import org.mongodb.scala.{ DistinctObservable, Document, MongoCollection, Observable } import scala.reflect.ClassTag -abstract class Search[A]()(implicit ct: ClassTag[A]) extends Base[A] { +abstract class Search[A]()(implicit ct: ClassTag[A], decoder: Decoder[A]) extends Base[A] { - protected def coll: MongoCollection[A] + protected def coll: MongoCollection[Document] def find( filter: Bson = Document(), sort: Bson = Document(), projection: Bson = Document(), - limit: Int = 0 - ): FindObservable[A] = - if (limit > 0) { - coll.find(filter).sort(sort).projection(projection).limit(limit) - } - else { - coll.find(filter).sort(sort).projection(projection) + limit: Int = 0, + skip: Int = 0 + ): Observable[A] = { + val findObservable = { + if (limit > 0) { + coll.find(filter).sort(sort).projection(projection).limit(limit).skip(skip) + } + else { + coll.find(filter).sort(sort).projection(projection).skip(skip) + } } + findObservable.map(doc => documentToObject[A](doc, decoder)) + } - def findById(oid: ObjectId): FindObservable[A] = find(equal(DatabaseProvider.ObjectIdKey, oid)) + def findById(oid: ObjectId): Observable[A] = { + find(equal(DatabaseProvider.ObjectIdKey, oid)) + } - def find(name: String, value: Any): FindObservable[A] = + def find(name: String, value: Any): Observable[A] = { find(equal(name, value)) + } - def distinct[S <: Any](fieldName: String, filter: Bson = Document()): DistinctObservable[BsonValue] = + def distinct[S <: Any](fieldName: String, filter: Bson = Document()): DistinctObservable[BsonValue] = { coll.distinct[BsonValue](fieldName, filter) + } - def distinctResult[S <: Any](fieldName: String, filter: Bson = Document()): Seq[S] = + def distinctResult[S <: Any](fieldName: String, filter: Bson = Document()): Seq[S] = { distinct(fieldName, filter).resultList().map(v => fromBson(v).asInstanceOf[S]) - - def findAggregated(pipeline: Seq[Bson], allowDiskUse: Boolean = false): AggregateObservable[A] = - coll.aggregate(pipeline).allowDiskUse(allowDiskUse) + } + + def findAggregated(pipeline: Seq[Bson], allowDiskUse: Boolean = false): Observable[A] = { + val aggregateObservable = coll.aggregate(pipeline).allowDiskUse(allowDiskUse) + aggregateObservable.map { + case a: A => + a + case doc => + documentToObject[A](doc, decoder) + } + } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/package.scala b/src/main/scala/dev/mongocamp/driver/mongodb/package.scala index 6c1c287e..50e00339 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/package.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/package.scala @@ -62,6 +62,5 @@ package object mongodb extends ObservableIncludes with DocumentIncludes { def getFloatValue(key: String): Float = getDoubleValue(key).floatValue() - def updateValue(key: String, value: Any): Any = BsonConverter.updateDocumentValue(document, key, value) } } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPaginatedAggregation.scala b/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPaginatedAggregation.scala index c195edc5..bf66103b 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPaginatedAggregation.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPaginatedAggregation.scala @@ -1,8 +1,9 @@ package dev.mongocamp.driver.mongodb.pagination import com.mongodb.client.model.Facet +import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.exception.MongoCampPaginationException -import dev.mongocamp.driver.mongodb.{ MongoDAO, _ } +import dev.mongocamp.driver.mongodb.json._ import org.mongodb.scala.bson.Document import org.mongodb.scala.bson.conversions.Bson import org.mongodb.scala.model.Aggregates diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPaginatedFilter.scala b/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPaginatedFilter.scala index 55d3aa7f..9f5d1f04 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPaginatedFilter.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPaginatedFilter.scala @@ -1,7 +1,9 @@ package dev.mongocamp.driver.mongodb.pagination import dev.mongocamp.driver.mongodb.exception.MongoCampPaginationException -import dev.mongocamp.driver.mongodb.{ MongoDAO, _ } +import dev.mongocamp.driver.mongodb.{ MongoDAO , _ } +import io.circe.Decoder +import io.circe.generic.auto._ import org.mongodb.scala.bson.conversions.Bson case class MongoPaginatedFilter[A <: Any](dao: MongoDAO[A], filter: Bson = Map(), sort: Bson = Map(), projection: Bson = Map(), maxWait: Int = DefaultMaxWait) @@ -17,7 +19,7 @@ case class MongoPaginatedFilter[A <: Any](dao: MongoDAO[A], filter: Bson = Map() } val allPages = Math.ceil(count.toDouble / rows).toInt val skip = (page - 1) * rows - val responseList = dao.find(filter, sort, projection, rows.toInt).skip(skip.toInt).resultList(maxWait) + val responseList = dao.find(filter, sort, projection, rows.toInt, skip.toInt).resultList(maxWait) PaginationResult(responseList, PaginationInfo(count, rows, page, allPages)) } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPagination.scala b/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPagination.scala index 569a3c8b..9c3da5f2 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPagination.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/pagination/MongoPagination.scala @@ -10,6 +10,7 @@ trait MongoPagination[A <: Any] extends ConfigHelper { val rows = intConfig(configPath = "dev.mongocamp.mongodb.pagination", key = "rows") foreach(rows)(a) } + def foreach(rows: Int)(a: A => Unit): Unit = { var currentPageNumber = 1 val rowsPerPage = if (rows < 1) Int.MaxValue else rows diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala index 9b633542..b7e49d54 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToManyRelationship.scala @@ -1,7 +1,8 @@ package dev.mongocamp.driver.mongodb.relation -import dev.mongocamp.driver.mongodb.{ GenericObservable, MongoDAO } import dev.mongocamp.driver.mongodb.relation.RelationCache.{ addCachedValue, getCachedValue, hasCachedValue } +import dev.mongocamp.driver.mongodb.{ GenericObservable, MongoDAO } +import io.circe.Decoder case class OneToManyRelationship[A](dao: MongoDAO[A], daoKey: String, useCache: Boolean = true) extends Relationship { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala index 126c34c2..ec6bd65e 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/relation/OneToOneRelationship.scala @@ -1,7 +1,8 @@ package dev.mongocamp.driver.mongodb.relation -import dev.mongocamp.driver.mongodb.{ GenericObservable, MongoDAO } import dev.mongocamp.driver.mongodb.relation.RelationCache.{ addCachedValue, getCachedValue, hasCachedValue } +import dev.mongocamp.driver.mongodb.{ GenericObservable, MongoDAO } +import io.circe.Decoder case class OneToOneRelationship[A](dao: MongoDAO[A], daoKey: String, useCache: Boolean = true) extends Relationship { diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchema.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchema.scala index 9e7e651e..f5b8bfc3 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchema.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/JsonSchema.scala @@ -1,5 +1,7 @@ package dev.mongocamp.driver.mongodb.schema +import dev.mongocamp.driver.mongodb.json.JsonConverter + case class JsonSchema(`$schema`: String, `$ref`: String, definitions: Map[String, JsonSchemaDefinition]) { def toJson: String = { new JsonConverter().toJson(this) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala index d232bf0e..57b8ea5e 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/schema/SchemaExplorer.scala @@ -1,14 +1,14 @@ package dev.mongocamp.driver.mongodb.schema -import better.files.Resource import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.json._ +import io.circe.generic.auto._ import org.bson.conversions.Bson -import org.mongodb.scala.Document +import org.mongodb.scala.{ documentToUntypedDocument, Document } import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.concurrent.duration.DurationInt -import io.circe.parser.decode class SchemaExplorer { private val NameSeparator: String = "." diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/server/LocalServer.scala b/src/main/scala/dev/mongocamp/driver/mongodb/server/LocalServer.scala index 19aebf62..10193028 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/server/LocalServer.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/server/LocalServer.scala @@ -1,10 +1,10 @@ package dev.mongocamp.driver.mongodb.server import better.files.File -import dev.mongocamp.driver.mongodb.server.ServerConfig.DefaultServerConfigPathPrefix import de.bwaldvogel.mongo.MongoServer import de.bwaldvogel.mongo.backend.h2.H2Backend import de.bwaldvogel.mongo.backend.memory.MemoryBackend +import dev.mongocamp.driver.mongodb.server.ServerConfig.DefaultServerConfigPathPrefix case class LocalServer(serverConfig: ServerConfig = ServerConfig()) { private var h2Path = "undefined" diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala index a4c8cee6..b4066827 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sql/MongoSqlQueryHolder.scala @@ -6,12 +6,12 @@ import dev.mongocamp.driver.mongodb.database.DatabaseProvider import dev.mongocamp.driver.mongodb.database.DatabaseProvider.CollectionSeparator import dev.mongocamp.driver.mongodb.exception.SqlCommandNotSupportedException import dev.mongocamp.driver.mongodb.sql.SQLCommandType.SQLCommandType +import net.sf.jsqlparser.expression.operators.arithmetic.Concat import net.sf.jsqlparser.expression.operators.conditional.{ AndExpression, OrExpression } import net.sf.jsqlparser.expression.operators.relational._ import net.sf.jsqlparser.expression.{ ArrayConstructor, Expression, NotExpression, SignedExpression } import net.sf.jsqlparser.parser.{ CCJSqlParser, StreamProvider } import net.sf.jsqlparser.schema.{ Column, Table } -import net.sf.jsqlparser.statement.ShowStatement import net.sf.jsqlparser.statement.alter.Alter import net.sf.jsqlparser.statement.create.index.CreateIndex import net.sf.jsqlparser.statement.create.table.CreateTable @@ -23,9 +23,10 @@ import net.sf.jsqlparser.statement.select.{ FromItem, PlainSelect, Select, Selec import net.sf.jsqlparser.statement.show.ShowTablesStatement import net.sf.jsqlparser.statement.truncate.Truncate import net.sf.jsqlparser.statement.update.Update +import net.sf.jsqlparser.statement.{ ShowStatement, Statement } import org.bson.conversions.Bson import org.mongodb.scala.model.IndexOptions -import org.mongodb.scala.model.Sorts.{ ascending, metaTextScore } +import org.mongodb.scala.model.Sorts.ascending import org.mongodb.scala.{ Document, Observable, SingleObservable } import java.sql.SQLException @@ -47,7 +48,7 @@ class MongoSqlQueryHolder { private var indexOptions: Option[IndexOptions] = None private var callFunction: Option[String] = None private var keepOneDocument: Boolean = false - private val keysForEmptyDocument: mutable.Set[String] = mutable.Set.empty + private val keysFromSelect: mutable.ListBuffer[String] = mutable.ListBuffer.empty def this(statement: net.sf.jsqlparser.statement.Statement) = { this() @@ -210,7 +211,7 @@ class MongoSqlQueryHolder { } } - def getKeysForEmptyDocument: Set[String] = keysForEmptyDocument.toSet + def getKeysFromSelect: List[String] = keysFromSelect.toList def hasFunctionCallInSelect: Boolean = keepOneDocument @@ -229,10 +230,12 @@ class MongoSqlQueryHolder { else { e.getValue } + case e: net.sf.jsqlparser.expression.BooleanValue => e.getValue case e: net.sf.jsqlparser.expression.DateValue => e.getValue case e: net.sf.jsqlparser.expression.TimeValue => e.getValue case e: net.sf.jsqlparser.expression.TimestampValue => e.getValue case _: net.sf.jsqlparser.expression.NullValue => null + case e: Concat => Map("$concat" -> List(convertValue(e.getLeftExpression), convertValue(e.getRightExpression))) case t: net.sf.jsqlparser.expression.TimeKeyExpression => t.getStringValue.toUpperCase match { case "CURRENT_TIMESTAMP" => new Date() @@ -340,6 +343,14 @@ class MongoSqlQueryHolder { if (classOf[net.sf.jsqlparser.expression.Function].isAssignableFrom(sI.getExpression.getClass)) { keepOneDocument = maybeDistinct.isEmpty } + sI match { + case se: SelectItem[Expression] => + val expressionName = se.getExpression.toString + val keyFromSelect = Option(se.getAlias).map(_.getName).getOrElse(expressionName) + if (keyFromSelect != "*") { + keysFromSelect += keyFromSelect + } + } }) val aliasList = ArrayBuffer[String]() sqlCommandType = SQLCommandType.Select @@ -493,7 +504,6 @@ class MongoSqlQueryHolder { val expression = if (functionName.equalsIgnoreCase(espr.last)) Map("$first" -> espr.last) else Map(functionName -> espr.last) group += expressionName -> expression } - keysForEmptyDocument += Option(se.getAlias).map(_.getName).getOrElse(expressionName) } val groupMap = Map("_id" -> idGroupMap) ++ group.toMap @@ -719,7 +729,7 @@ class MongoSqlQueryHolder { object MongoSqlQueryHolder { - def stringToStatement(sql: String, charset: String = "UTF-8") = { + def stringToStatement(sql: String, charset: String = "UTF-8"): Statement = { try { val stream: java.io.InputStream = new java.io.ByteArrayInputStream(sql.getBytes(charset)) val jSqlParser = new CCJSqlParser(new StreamProvider(stream, charset)) diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncOperation.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncOperation.scala index 4662a01d..5ae88670 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncOperation.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncOperation.scala @@ -5,10 +5,9 @@ import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.database.{ ConfigHelper, DatabaseProvider } import dev.mongocamp.driver.mongodb.sync.SyncDirection.SyncDirection import dev.mongocamp.driver.mongodb.sync.SyncStrategy.SyncStrategy -import org.mongodb.scala.Document import org.mongodb.scala.bson.ObjectId -import org.mongodb.scala.model.Projections._ import org.mongodb.scala.model.Updates._ +import org.mongodb.scala.{ documentToUntypedDocument, Document } import java.util.Date @@ -19,12 +18,11 @@ case class MongoSyncOperation( idColumnName: String = DatabaseProvider.ObjectIdKey ) extends LazyLogging with Filter { - val includes = include(idColumnName, MongoSyncOperation.SyncColumnLastSync, MongoSyncOperation.SyncColumnLastUpdate) def excecute(source: DatabaseProvider, target: DatabaseProvider): List[MongoSyncResult] = try { - val sourceInfos: Seq[Document] = source.dao(collectionName).find().projection(includes).results(MongoSyncOperation.MaxWait) - val targetInfos: Seq[Document] = target.dao(collectionName).find().projection(includes).results(MongoSyncOperation.MaxWait) + val sourceInfos: Seq[Document] = source.dao(collectionName).find().results(MongoSyncOperation.MaxWait) + val targetInfos: Seq[Document] = target.dao(collectionName).find().results(MongoSyncOperation.MaxWait) if (SyncDirection.SourceToTarget == syncDirection) { val diff = sourceInfos.diff(targetInfos) @@ -91,6 +89,6 @@ object MongoSyncOperation extends ConfigHelper { val SyncColumnLastSync: String = stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncColumnLastSync", default = "_lastSync").get val SyncColumnLastUpdate: String = stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncColumnLastUpdate", default = "_lastUpdate").get - val WriteSyncLogOnMaster = booleanConfig(configPath = "dev.mongocamp.mongodb.sync", key = "writeSyncLogOnMaster") - val SyncLogTableName: String = stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncLogTableName", default = "mongodb-sync-log").get + val WriteSyncLogOnMaster: Boolean = booleanConfig(configPath = "dev.mongocamp.mongodb.sync", key = "writeSyncLogOnMaster") + val SyncLogTableName: String = stringConfig(configPath = "dev.mongocamp.mongodb.sync", key = "syncLogTableName", default = "mongodb-sync-log").get } diff --git a/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncer.scala b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncer.scala index 6e7ae526..8fcf8ab7 100644 --- a/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncer.scala +++ b/src/main/scala/dev/mongocamp/driver/mongodb/sync/MongoSyncer.scala @@ -2,20 +2,32 @@ package dev.mongocamp.driver.mongodb.sync import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.database.{ DatabaseProvider, MongoConfig } -import org.bson.codecs.configuration.CodecRegistries.fromProviders -import org.mongodb.scala.bson.codecs.Macros._ +import dev.mongocamp.driver.mongodb.json._ +import io.circe.HCursor +import io.circe.generic.auto._ import scala.collection.mutable - case class MongoSyncer( sourceConfig: MongoConfig, targetConfig: MongoConfig, syncOperations: List[MongoSyncOperation] = List() ) { - private val registry = fromProviders(classOf[MongoSyncResult]) + + implicit private lazy val ThrowableFormat: io.circe.Decoder[Throwable] = { (c: HCursor) => + // not really needed only for decoder must exists + ??? + } + + implicit private lazy val ExceptionFormat: io.circe.Decoder[Exception] = { (c: HCursor) => + // not really needed only for decoder must exists + ??? + } + + // todo: check if this is correct +// private val registry = fromProviders(classOf[MongoSyncResult]) private val operationMap = new mutable.HashMap[String, MongoSyncOperation]() - val source: DatabaseProvider = DatabaseProvider(sourceConfig, registry) + val source: DatabaseProvider = DatabaseProvider(sourceConfig) val target: DatabaseProvider = DatabaseProvider(targetConfig) object MongoSyncResultDAO extends MongoDAO[MongoSyncResult](source, MongoSyncOperation.SyncLogTableName) diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/CompactSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/CompactSpec.scala deleted file mode 100644 index 24236478..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/CompactSpec.scala +++ /dev/null @@ -1,42 +0,0 @@ -package dev.mongocamp.driver.mongodb - -import better.files.{File, Resource} -import dev.mongocamp.driver.mongodb.database.CompactResult -import dev.mongocamp.driver.mongodb.test.TestDatabase -import dev.mongocamp.driver.mongodb.test.TestDatabase.BookDAO -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeAll - -import java.text.SimpleDateFormat -import java.util.Date - -class CompactSpec extends Specification with BeforeAll { - val DateFormat = new SimpleDateFormat("yyyy-MM-dd") - val From: Date = DateFormat.parse("2000-01-01") - - override def beforeAll(): Unit = { - BookDAO.drop().result() - BookDAO.importJsonFile(File(Resource.getUrl("json/books.json"))).result() - val stats = BookDAO.collectionStatus.result() - stats.count mustEqual 431 - } - - "CompactSpec" should { - "compact single collection" in { - val count: Option[CompactResult] = BookDAO.compact.result() - count must beSome() - count.get.bytesFreed must beGreaterThanOrEqualTo(0L) - } - "compact complete database" in { - val count: List[CompactResult] = TestDatabase.provider.compactDatabase() - count.size must beGreaterThanOrEqualTo(1) - count.head.bytesFreed must beGreaterThanOrEqualTo(0L) - } - "compact all databases in scope" in { - val count: List[CompactResult] = TestDatabase.provider.compact() - count.size must beGreaterThanOrEqualTo(1) - count.head.bytesFreed must beGreaterThanOrEqualTo(0L) - } - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/CompactSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/CompactSuite.scala new file mode 100644 index 00000000..cf1d17c5 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/CompactSuite.scala @@ -0,0 +1,41 @@ +package dev.mongocamp.driver.mongodb + +import better.files.{ File, Resource } +import dev.mongocamp.driver.mongodb.database.CompactResult +import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb.test.TestDatabase.BookDAO +import munit.FunSuite + +import java.text.SimpleDateFormat +import java.util.Date + +class CompactSuite extends FunSuite { + val DateFormat = new SimpleDateFormat("yyyy-MM-dd") + val From: Date = DateFormat.parse("2000-01-01") + + override def beforeAll(): Unit = { + super.beforeAll() + BookDAO.drop().result() + BookDAO.importJsonFile(File(Resource.getUrl("json/books.json"))).result() + val stats = BookDAO.collectionStatus.result() + assertEquals(stats.count, 431) + } + + test("compact single collection") { + val count: Option[CompactResult] = BookDAO.compact.result() + assertEquals(count.isDefined, true) + assertEquals(count.get.bytesFreed >= 0L, true) + } + + test("compact complete database") { + val count: List[CompactResult] = TestDatabase.provider.compactDatabase() + assertEquals(count.nonEmpty, true) + assertEquals(count.head.bytesFreed >= 0L, true) + } + + test("compact all databases in scope") { + val count: List[CompactResult] = TestDatabase.provider.compact() + assertEquals(count.nonEmpty, true) + assertEquals(count.head.bytesFreed >= 0L, true) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/DocumentIncludesSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/DocumentIncludesSuite.scala new file mode 100644 index 00000000..4623bcb2 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/DocumentIncludesSuite.scala @@ -0,0 +1,86 @@ +package dev.mongocamp.driver.mongodb + +import dev.mongocamp.driver.DocumentIncludes +import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import munit.FunSuite +import org.mongodb.scala.Document +import org.bson.types.ObjectId +import org.apache.lucene.search.MatchAllDocsQuery + +class DocumentIncludesSuite extends FunSuite with DocumentIncludes { + + test("mapToBson should convert Map to Bson") { + val map = Map("key" -> "value") + val bson = mapToBson(map) + assert(bson.isInstanceOf[Document]) + assertEquals(bson.toBsonDocument.toJson(), "{\"key\": \"value\"}") + assertEquals(bson.asInstanceOf[Document].getString("key"), "value") + } + + test("luceneQueryBson should convert Lucene Query to Bson") { + val query = new MatchAllDocsQuery() + val bson = luceneQueryBson(query) + assert(bson.isInstanceOf[Document]) + } + + test("documentFromJavaMap should convert java.util.Map to Document") { + val javaMap = new java.util.HashMap[String, Any]() + javaMap.put("key", "value") + val document = documentFromJavaMap(javaMap) + assert(document.isInstanceOf[Document]) + assertEquals(document.toBsonDocument.toJson(), "{\"key\": \"value\"}") + assertEquals(document.getString("key"), "value") + } + + test("documentFromMutableMap should convert mutable.Map to Document") { + val mutableMap: collection.mutable.Map[String, Any] = collection.mutable.Map("key" -> "value") + val document = documentFromMutableMap(mutableMap) + assert(document.isInstanceOf[Document]) + assertEquals(document.toBsonDocument.toJson(), "{\"key\": \"value\"}") + assertEquals(document.getString("key"), "value") + } + + test("documentFromScalaMap should convert Map to Document") { + val map = Map("key" -> "value") + val document = documentFromScalaMap(map) + assert(document.isInstanceOf[Document]) + assertEquals(document.toBsonDocument.toJson(), "{\"key\": \"value\"}") + assertEquals(document.getString("key"), "value") + } + + test("documentFromDocument should convert org.bson.Document to Document") { + val bsonDoc = new org.bson.Document("key", "value") + val document = documentFromDocument(bsonDoc) + assert(document.isInstanceOf[Document]) + assertEquals(document.toBsonDocument.toJson(), "{\"key\": \"value\"}") + assertEquals(document.getString("key"), "value") + } + + test("mapFromDocument should convert Document to Map") { + val document = Document("key" -> "value") + val map = mapFromDocument(document) + assert(map.isInstanceOf[Map[_, _]]) + assertEquals(map("key"), "value") + } + + test("mapListFromDocuments should convert List of Documents to List of Maps") { + val documents = List(Document("key" -> "value")) + val mapList = mapListFromDocuments(documents) + assert(mapList.isInstanceOf[List[_]]) + assertEquals(mapList.head("key"), "value") + } + + test("stringToObjectId should convert String to ObjectId") { + val str = "507f1f77bcf86cd799439011" + val objectId = stringToObjectId(str) + assert(objectId.isInstanceOf[ObjectId]) + assertEquals(objectId.toHexString, str) + } + + test("documentToObjectId should extract ObjectId from Document") { + val objectId = new ObjectId() + val document = Document(DatabaseProvider.ObjectIdKey -> objectId) + val extractedObjectId = documentToObjectId(document) + assertEquals(extractedObjectId, objectId) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/BsonConverterSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/BsonConverterSpec.scala deleted file mode 100644 index 18120ada..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/bson/BsonConverterSpec.scala +++ /dev/null @@ -1,108 +0,0 @@ -package dev.mongocamp.driver.mongodb.bson - -import org.mongodb.scala.bson.collection.mutable -import org.mongodb.scala.bson.{ObjectId, _} -import org.specs2.mutable.Specification - -import scala.collection.mutable.ArrayBuffer -import dev.mongocamp.driver.mongodb._ - -/** Created by tom on 22.01.17. - */ -class BsonConverterSpec extends Specification { - - sequential - - "BsonConverter" should { - - "convert values to BSON" in { - BsonConverter.toBson(3) must be equalTo BsonInt32(3) - BsonConverter.toBson(3L) must be equalTo BsonInt64(3) - BsonConverter.toBson(3f) must be equalTo BsonDouble(3) - BsonConverter.toBson(3d) must be equalTo BsonDouble(3) - - BsonConverter.toBson(false) must be equalTo BsonBoolean(false) - BsonConverter.toBson(true) must be equalTo BsonBoolean(true) - - BsonConverter.toBson(java.math.BigDecimal.TEN) must be equalTo BsonDecimal128.apply(10) - BsonConverter.toBson(BigDecimal(10)) must be equalTo BsonDecimal128.apply(10) - BsonConverter.toBson(BigInt(10)) must be equalTo BsonInt64(10) - BsonConverter.toBson(java.math.BigInteger.TEN) must be equalTo BsonInt64(10) - - BsonConverter.toBson(Some(5)) must be equalTo BsonInt32(5) - - BsonConverter.toBson(Some(new ObjectId("5b61455932ac3f0015ae2e7e"))) must be equalTo BsonObjectId( - "5b61455932ac3f0015ae2e7e" - ) - - BsonConverter.toBson(None) must be equalTo BsonNull() - - BsonConverter.toBson('M') must be equalTo BsonString("M") - } - - "convert Map to BSON" in { - BsonConverter.toBson(Map(("test" -> 1))) must beAnInstanceOf[org.bson.BsonDocument] - BsonConverter.toBson(scala.collection.mutable.Map(("test" -> 1))) must beAnInstanceOf[org.bson.BsonDocument] - } - - "convert List to BSON" in { - BsonConverter.toBson(List(("test"))) must beAnInstanceOf[org.bson.BsonArray] - val buffer = new ArrayBuffer[String]() - buffer.+=("Test") - BsonConverter.toBson(buffer) must beAnInstanceOf[org.bson.BsonArray] - } - - "convert values from BSON" in { - BsonConverter.fromBson(BsonInt32(3)) must be equalTo 3 - BsonConverter.fromBson(BsonInt64(3)) must be equalTo 3L - - BsonConverter.fromBson(BsonDouble(3)) must be equalTo 3.0 - - } - - "evaluate dot notation" in { - val document: mutable.Document = mutable.Document() - val secondLevelDocument = mutable.Document() - secondLevelDocument.put("test", 42) - document.put("secondLevelDocument", secondLevelDocument) - - document.get("secondLevelDocument") must beSome - - document.get("secondLevelDocument.test") must beNone - - val v = BsonConverter.documentValueOption(Document(document.toJson()), "secondLevelDocument.test") - - true must beTrue - } - - "evaluate get with dot notation" in { - val document: mutable.Document = mutable.Document() - val secondLevelDocument = mutable.Document() - secondLevelDocument.put("test", 42) - document.put("secondLevelDocument", secondLevelDocument) - - document.get("secondLevelDocument") must beSome - - document.get("secondLevelDocument.test") must beNone - - val v = BsonConverter.documentValueOption(Document(document.toJson()), "secondLevelDocument.test") - - true must beTrue - } - - "evaluate put with dot notation" in { - val document = Document() - - var updated: Document = BsonConverter.updateDocumentValue(document, "test", 42) - - updated.getIntValue("test") mustEqual (42) - - updated = BsonConverter.updateDocumentValue(document, "test.test.test.test", 42) - - updated.getIntValue("test.test.test.test") mustEqual 42 - true must beTrue - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/BsonConverterSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/BsonConverterSuite.scala new file mode 100644 index 00000000..29daa259 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/bson/BsonConverterSuite.scala @@ -0,0 +1,93 @@ +package dev.mongocamp.driver.mongodb.bson + +import com.typesafe.scalalogging.LazyLogging +import dev.mongocamp.driver.mongodb._ +import org.mongodb.scala.bson.collection.mutable +import org.mongodb.scala.bson.{ ObjectId, _ } + +import scala.collection.mutable.ArrayBuffer + +class BsonConverterSuite extends munit.FunSuite { + + test("BsonConverter convert values to BSON") { + assertEquals(BsonConverter.toBson(3), BsonInt32(3)) + assertEquals(BsonConverter.toBson(3L), BsonInt64(3)) + assertEquals(BsonConverter.toBson(3f), BsonDouble(3)) + assertEquals(BsonConverter.toBson(3d), BsonDouble(3)) + + assertEquals(BsonConverter.toBson(false), BsonBoolean(false)) + assertEquals(BsonConverter.toBson(true), BsonBoolean(true)) + + assertEquals(BsonConverter.toBson(java.math.BigDecimal.TEN), BsonDecimal128.apply(10)) + assertEquals(BsonConverter.toBson(BigDecimal(10)), BsonDecimal128.apply(10)) + assertEquals(BsonConverter.toBson(BigInt(10)), BsonInt64(10)) + assertEquals(BsonConverter.toBson(java.math.BigInteger.TEN), BsonInt64(10)) + + assertEquals(BsonConverter.toBson(Some(5)), BsonInt32(5)) + + assertEquals(BsonConverter.toBson(Some(new ObjectId("5b61455932ac3f0015ae2e7e"))), BsonObjectId("5b61455932ac3f0015ae2e7e")) + + assertEquals(BsonConverter.toBson(None), BsonNull()) + + assertEquals(BsonConverter.toBson('M'), BsonString("M")) + } + + test("convert Map to BSON") { + assertEquals(BsonConverter.toBson(Map("test" -> 1)).isInstanceOf[org.bson.BsonDocument], true) + assertEquals(BsonConverter.toBson(scala.collection.mutable.Map("test" -> 1)).isInstanceOf[org.bson.BsonDocument], true) + } + + test("convert List to BSON") { + assertEquals(BsonConverter.toBson(List("test")).isInstanceOf[org.bson.BsonArray], true) + val buffer = new ArrayBuffer[String]() + buffer.+=("Test") + assertEquals(BsonConverter.toBson(buffer).isInstanceOf[org.bson.BsonArray], true) + } + + test("convert values from BSON") { + assertEquals(BsonConverter.fromBson(BsonInt32(3)), 3) + assertEquals(BsonConverter.fromBson(BsonInt64(3)), 3L) + assertEquals(BsonConverter.fromBson(BsonDouble(3)), 3.0) + } + + test("evaluate dot notation") { + val document: mutable.Document = mutable.Document() + val secondLevelDocument = mutable.Document() + secondLevelDocument.put("test", 42) + document.put("secondLevelDocument", secondLevelDocument) + + assertEquals(document.get("secondLevelDocument").isDefined, true) + assertEquals(document.get("secondLevelDocument.test").isEmpty, true) + + val v = BsonConverter.documentValueOption(Document(document.toJson()), "secondLevelDocument.test") + assertEquals(v.isDefined, true) + + } + + test("evaluate get with dot notation") { + val document: mutable.Document = mutable.Document() + val secondLevelDocument = mutable.Document() + secondLevelDocument.put("test", 42) + document.put("secondLevelDocument", secondLevelDocument) + + assertEquals(document.get("secondLevelDocument").isDefined, true) + assertEquals(document.get("secondLevelDocument.test"), None) + + val v = BsonConverter.documentValueOption(Document(document.toJson()), "secondLevelDocument.test") + + assertEquals(v.isDefined, true) + } + + test("evaluate put with dot notation") { + val document = Document() + + var updated: Document = BsonConverter.updateDocumentValue(document, "test", 42) + + assertEquals(updated.getIntValue("test"), 42) + + updated = BsonConverter.updateDocumentValue(document, "test.test.test.test", 42) + + assertEquals(updated.getIntValue("test.test.test.test"), 42) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/ConverterSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/ConverterSpec.scala deleted file mode 100644 index bfe7924b..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/bson/ConverterSpec.scala +++ /dev/null @@ -1,32 +0,0 @@ -package dev.mongocamp.driver.mongodb.bson - -import dev.mongocamp.driver.mongodb.Converter -import org.specs2.mutable.Specification - -import scala.reflect.ClassTag - -/** Created by tom on 22.01.17. - */ -class ConverterSpec extends Specification { - - sequential - - "Converter" should { - - "support Document roundtrip" in { - - roundtrip[Base](Base()) - - true must beTrue - } - } - - def roundtrip[A <: AnyRef](value: A)(implicit ct: ClassTag[A]): Unit = { - val document = Converter.toDocument(value) - - value must not beNull - - value must haveClass[A] - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/ConverterSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/ConverterSuite.scala new file mode 100644 index 00000000..ece51538 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/bson/ConverterSuite.scala @@ -0,0 +1,29 @@ +package dev.mongocamp.driver.mongodb.bson + +import dev.mongocamp.driver.mongodb.Converter +import dev.mongocamp.driver.mongodb.model.Base + +class ConverterSuite extends munit.FunSuite { + + test("Converter support Document roundtrip") { + val base = Base() + val document = Converter.toDocument(base) + val integer: Int = document.getInteger("int") + val long: Long = document.getLong("Long") + val float : Float = document.getDouble("float").floatValue() + val double : Double = document.getDouble("double") + val maybeBsonValue = document.get("option") + + assertEquals(integer, base.int) + assertEquals(long, base.Long) + assertEquals(float, base.float) + assertEquals(double, base.double) + assertEquals(document.getString("string"), base.string) + assertEquals(document.getDate("date"), base.date) + assertEquals(maybeBsonValue.isDefined, true) + assertEquals(maybeBsonValue.get.asObjectId().getValue, base.option.get) + assertEquals(base != null, true) + assertEquals(base.isInstanceOf[Base], true) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelperSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelperSpec.scala deleted file mode 100644 index 62604c12..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelperSpec.scala +++ /dev/null @@ -1,24 +0,0 @@ -package dev.mongocamp.driver.mongodb.bson - -import better.files.{File, Resource} -import org.specs2.mutable.Specification - -/** Created by tom on 22.01.17. - */ -class DocumentHelperSpec extends Specification { - - sequential - - "DocumentHelper" should { - - "create Document" in { - val lines = File(Resource.getUrl("json/people.json")).lines - - val document = DocumentHelper.documentFromJsonString(lines.head) - - document must beSome() - - } - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelperSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelperSuite.scala new file mode 100644 index 00000000..9785483b --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/bson/DocumentHelperSuite.scala @@ -0,0 +1,16 @@ +package dev.mongocamp.driver.mongodb.bson + +import better.files.{ File, Resource } + +/** Created by tom on 22.01.17. + */ +class DocumentHelperSuite extends munit.FunSuite { + test("DocumentHelper should create Document") { + val lines = File(Resource.getUrl("json/people.json")).lines + + val document = DocumentHelper.documentFromJsonString(lines.head) + + assert(document.isDefined) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPlugin.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPlugin.scala index 8d435657..2ef66330 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPlugin.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPlugin.scala @@ -1,8 +1,8 @@ package dev.mongocamp.driver.mongodb.bson import org.bson.BsonValue -import org.joda.time.{DateTime, Duration} -import org.mongodb.scala.bson.{BsonDateTime, BsonNull, BsonString} +import org.joda.time.{ DateTime, Duration } +import org.mongodb.scala.bson.{ BsonDateTime, BsonNull, BsonString } class JodaConverterPlugin extends AbstractConverterPlugin { override def customClassList: List[Class[_]] = List(classOf[DateTime], classOf[Duration]) diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPluginSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPluginSpec.scala deleted file mode 100644 index 99210833..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPluginSpec.scala +++ /dev/null @@ -1,36 +0,0 @@ -package dev.mongocamp.driver.mongodb.bson - -import org.joda.time.DateTime -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeAfterAll - -import scala.concurrent.duration.Duration - -class JodaConverterPluginSpec extends Specification with BeforeAfterAll { - - sequential - - "JodaConverterPlugin" should { - - "convert joda dates to bson dates" in { - val dateTime = new DateTime("2023-11-02") - val bsonDocument = BsonConverter.toBson(dateTime) - (bsonDocument.toString must be).equalTo("BsonDateTime{value=1698879600000}") - } - - "convert joda duration to bson string" in { - val duration = org.joda.time.Duration.standardDays(1) - val bsonDocument = BsonConverter.toBson(duration) - (bsonDocument.toString must be).equalTo("BsonString{value='86400000ms'}") - (Duration("86400000ms").toMillis must be).equalTo(duration.getMillis) - } - - } - - override def beforeAll(): Unit = { - BsonConverter.converterPlugin = new JodaConverterPlugin() - } - override def afterAll(): Unit = { - BsonConverter.converterPlugin = new BaseConverterPlugin() - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPluginSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPluginSuite.scala new file mode 100644 index 00000000..1df89707 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/bson/JodaConverterPluginSuite.scala @@ -0,0 +1,30 @@ +package dev.mongocamp.driver.mongodb.bson + +import org.joda.time.DateTime + +import scala.concurrent.duration.Duration + +class JodaConverterPluginSuite extends munit.FunSuite { + + test("convert joda dates to bson dates") { + val dateTime = new DateTime("2023-11-02") + val bsonDocument = BsonConverter.toBson(dateTime) + val roundTripDate = new DateTime(bsonDocument.asDateTime().getValue) + assertEquals(roundTripDate, dateTime) + } + + test("convert joda duration to bson string") { + val duration = org.joda.time.Duration.standardDays(1) + val bsonDocument = BsonConverter.toBson(duration) + assertEquals(bsonDocument.toString, "BsonString{value='86400000ms'}") + assertEquals(Duration("86400000ms").toMillis, duration.getMillis) + } + + override def beforeAll(): Unit = { + BsonConverter.converterPlugin = new JodaConverterPlugin() + } + + override def afterAll(): Unit = { + BsonConverter.converterPlugin = new BaseConverterPlugin() + } +} \ No newline at end of file diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonSpecification.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/BasePersonSuite.scala similarity index 58% rename from src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonSpecification.scala rename to src/test/scala/dev/mongocamp/driver/mongodb/dao/BasePersonSuite.scala index 216015dd..485d038b 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonSpecification.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/dao/BasePersonSuite.scala @@ -1,14 +1,10 @@ package dev.mongocamp.driver.mongodb.dao -import better.files.{File, Resource} -import dev.mongocamp.driver.mongodb.test.TestDatabase._ +import better.files.{ File, Resource } import dev.mongocamp.driver.mongodb._ -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeAll - -abstract class PersonSpecification extends Specification with BeforeAll { +import dev.mongocamp.driver.mongodb.test.TestDatabase._ - sequential +abstract class BasePersonSuite extends munit.FunSuite { override def beforeAll(): Unit = { PersonDAO.drop().result() diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/BookDAOSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/BookDAOSpec.scala deleted file mode 100644 index 50256b7d..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/dao/BookDAOSpec.scala +++ /dev/null @@ -1,74 +0,0 @@ -package dev.mongocamp.driver.mongodb.dao - -import better.files.{File, Resource} -import dev.mongocamp.driver.mongodb.Filter._ -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.database.DatabaseProvider -import dev.mongocamp.driver.mongodb.test.TestDatabase.BookDAO -import org.mongodb.scala.bson.conversions.Bson -import org.mongodb.scala.model.Aggregates.{filter, group, project} -import org.mongodb.scala.model.Filters.and -import org.mongodb.scala.model.Projections -import org.mongodb.scala.model.Updates._ -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeAll - -import java.text.SimpleDateFormat -import java.util.Date - -class BookDAOSpec extends Specification with BeforeAll { - val DateFormat = new SimpleDateFormat("yyyy-MM-dd") - val From: Date = DateFormat.parse("2000-01-01") - - override def beforeAll(): Unit = { - BookDAO.drop().result() - BookDAO.importJsonFile(File(Resource.getUrl("json/books.json"))).result() - - val stats = BookDAO.collectionStatus.result() - stats.count mustEqual 431 - - } - - "BookDAO" should { - "support count" in { - val count: Long = BookDAO.count().result() - count mustEqual 431 - } - - "support columnNames" in { - val columnNames = BookDAO.columnNames() - columnNames.size mustEqual 11 - } - } - - "BookDAO Aggregation" should { - "support filter in" in { - val projectStage = project(Projections.include("categories")) - - val categoryFilter = valueFilter("categories", "Programming") - val dateFilter = dateInRangeFilter("publishedDate", From, new Date()) - - val filterStage = filter(and(categoryFilter, dateFilter)) - val pipeline = List(filterStage, projectStage) - - val list = BookDAO.Raw.findAggregated(pipeline).resultList() - list must haveSize(8) - } - - "support filter in" in { - - val groupStage: Bson = - group("$categories", Field.sumField("pageCount"), Field.minField("pageCount"), Field.maxField("pageCount")) - - val pipeline = List(groupStage) - - val list = BookDAO.Raw.findAggregated(pipeline).resultList().map(doc => doc.asPlainMap) - list must haveSize(58) - } - - "update one" in { - BookDAO.updateOne(Map(DatabaseProvider.ObjectIdKey -> 10), set("title", "new title")).result() - BookDAO.find(DatabaseProvider.ObjectIdKey, 10).result().title mustEqual "new title" - } - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/BookDAOSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/BookDAOSuite.scala new file mode 100644 index 00000000..a270a598 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/dao/BookDAOSuite.scala @@ -0,0 +1,66 @@ +package dev.mongocamp.driver.mongodb.dao + +import better.files.{ File, Resource } +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.Filter._ +import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import dev.mongocamp.driver.mongodb.test.TestDatabase.BookDAO +import org.mongodb.scala.bson.conversions.Bson +import org.mongodb.scala.model.Aggregates.{ filter, group, project } +import org.mongodb.scala.model.Filters.and +import org.mongodb.scala.model.Projections +import org.mongodb.scala.model.Updates._ + +import java.text.SimpleDateFormat +import java.util.Date + +class BookDAOSuite extends munit.FunSuite { + val DateFormat = new SimpleDateFormat("yyyy-MM-dd") + val From: Date = DateFormat.parse("2000-01-01") + + override def beforeAll(): Unit = { + BookDAO.drop().result() + BookDAO.importJsonFile(File(Resource.getUrl("json/books.json"))).result() + + val stats = BookDAO.collectionStatus.result() + assertEquals(stats.count, 431) + } + + test("BookDAO should support count") { + val count: Long = BookDAO.count().result() + assertEquals(count, 431L) + } + + test("BookDAO should support columnNames") { + val columnNames = BookDAO.columnNames() + assertEquals(columnNames.size, 11) + } + + test("BookDAO Aggregation should support filter in") { + val projectStage = project(Projections.include("categories")) + + val categoryFilter = valueFilter("categories", "Programming") + val dateFilter = dateInRangeFilter("publishedDate", From, new Date()) + + val filterStage = filter(and(categoryFilter, dateFilter)) + val pipeline = List(filterStage, projectStage) + + val list = BookDAO.Raw.findAggregated(pipeline).resultList() + assertEquals(list.size, 8) + } + + test("BookDAO Aggregation should support group by categories") { + val groupStage: Bson = + group("$categories", Field.sumField("pageCount"), Field.minField("pageCount"), Field.maxField("pageCount")) + + val pipeline = List(groupStage) + + val list = BookDAO.Raw.findAggregated(pipeline).resultList().map(doc => doc.asPlainMap) + assertEquals(list.size, 58) + } + + test("BookDAO should update one") { + BookDAO.updateOne(Map(DatabaseProvider.ObjectIdKey -> 10), set("title", "new title")).result() + assertEquals(BookDAO.find(DatabaseProvider.ObjectIdKey, 10).result().title, "new title") + } +} \ No newline at end of file diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala deleted file mode 100644 index 04b2fcbf..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSpec.scala +++ /dev/null @@ -1,50 +0,0 @@ -package dev.mongocamp.driver.mongodb.dao - -import dev.mongocamp.driver.MongoImplicits -import dev.mongocamp.driver.mongodb.model.Person -import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO - -import java.util.concurrent.TimeUnit -import scala.concurrent.ExecutionContext.Implicits.global -import scala.concurrent.duration.Duration -import scala.concurrent.{Await, Future} - -class PersonDAOSpec extends PersonSpecification with MongoImplicits { - - "PersonDAO" should { - - "support count" in { - val count: Long = PersonDAO.count() - count mustEqual 200 - } - - "support columnNames" in { - val columnNames = PersonDAO.columnNames(200) - columnNames.size mustEqual 20 - } - - "support results" in { - val seq: Seq[Person] = PersonDAO.find() - seq must haveSize(200) - } - - "support resultList" in { - val list: List[Person] = PersonDAO.find() - list must haveSize(200) - } - - "support resultList" in { - val option: Option[Person] = PersonDAO.find("id", 42) - option must not beEmpty - } - - "support asFuture" in { - val future: Future[Seq[Person]] = PersonDAO.find().asFuture() - val mapped: Future[Seq[String]] = future.map(personSeq => personSeq.map(p => p.name)) - val names: Seq[String] = Await.result(mapped, Duration(10, TimeUnit.SECONDS)) - names must haveSize(200) - - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSuite.scala new file mode 100644 index 00000000..72b3b781 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/dao/PersonDAOSuite.scala @@ -0,0 +1,45 @@ +package dev.mongocamp.driver.mongodb.dao + +import dev.mongocamp.driver.MongoImplicits +import dev.mongocamp.driver.mongodb.model.Person +import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO + +import java.util.concurrent.TimeUnit +import scala.concurrent.ExecutionContext.Implicits.global +import scala.concurrent.duration.Duration +import scala.concurrent.{ Await, Future } + +class PersonDAOSuite extends BasePersonSuite with MongoImplicits { + + test("support count") { + val count: Long = PersonDAO.count() + assertEquals(count, 200L) + } + + test("support columnNames") { + val columnNames = PersonDAO.columnNames(200) + assertEquals(columnNames.size, 20) + } + + test("support results") { + val seq: Seq[Person] = PersonDAO.find() + assertEquals(seq.size, 200) + } + + test("support resultList") { + val list: List[Person] = PersonDAO.find() + assertEquals(list.size, 200) + } + + test("support resultList with id") { + val option: Option[Person] = PersonDAO.find("id", 42) + assert(option.isDefined) + } + + test("support asFuture") { + val future: Future[Seq[Person]] = PersonDAO.find().asFuture() + val mapped: Future[Seq[String]] = future.map(personSeq => personSeq.map(p => p.name)) + val names: Seq[String] = Await.result(mapped, Duration(10, TimeUnit.SECONDS)) + assertEquals(names.size, 200) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/StudentDAOSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/StudentDAOSpec.scala deleted file mode 100644 index 04f162a3..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/dao/StudentDAOSpec.scala +++ /dev/null @@ -1,48 +0,0 @@ -package dev.mongocamp.driver.mongodb.dao - -import better.files.{File, Resource} -import dev.mongocamp.driver.MongoImplicits -import dev.mongocamp.driver.mongodb.model.Student -import dev.mongocamp.driver.mongodb.server.LocalServer -import dev.mongocamp.driver.mongodb.test.UniversityDatabase -import dev.mongocamp.driver.mongodb.test.UniversityDatabase.{GradeDAO, StudentDAO} -import org.specs2.mutable.Specification -import org.specs2.specification.{AfterAll, BeforeAll} - -class StudentDAOSpec extends Specification with BeforeAll with AfterAll with MongoImplicits { - sequential - - override def beforeAll(): Unit = { - UniversityDatabase.LocalTestServer = LocalServer.fromPath("unit.test.local.mongo.server") - StudentDAO.drop().result() - StudentDAO.importJsonFile(File(Resource.getUrl("json/university/students.json"))).result() - GradeDAO.drop().result() - GradeDAO.importJsonFile(File(Resource.getUrl("json/university/grades.json"))).result() - } - - override def afterAll(): Unit = - UniversityDatabase.LocalTestServer.shutdown() - - "StudentDAO" should { - "support count" in { - StudentDAO.name mustEqual "university-students" - StudentDAO.databaseName mustEqual "mongocamp-unit-test" - } - - "support count" in { - val students: Long = StudentDAO.count() - students mustEqual 200 - - val grades: Long = GradeDAO.count() - grades mustEqual 280 - - } - - "support count" in { - val student: Option[Student] = StudentDAO.find("name", "Aurelia Menendez") - student.get.scores must haveSize(3) - - } - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/dao/StudentDAOSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/dao/StudentDAOSuite.scala new file mode 100644 index 00000000..b919525e --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/dao/StudentDAOSuite.scala @@ -0,0 +1,43 @@ +package dev.mongocamp.driver.mongodb.dao + +import better.files.{ File, Resource } +import dev.mongocamp.driver.MongoImplicits +import dev.mongocamp.driver.mongodb.model.Student +import dev.mongocamp.driver.mongodb.server.LocalServer +import dev.mongocamp.driver.mongodb.test.UniversityDatabase +import dev.mongocamp.driver.mongodb.test.UniversityDatabase.{ GradeDAO, StudentDAO } +import munit.FunSuite + +class StudentDAOSuite extends FunSuite with MongoImplicits { + + override def beforeAll(): Unit = { + UniversityDatabase.LocalTestServer = LocalServer.fromPath("unit.test.local.mongo.server") + StudentDAO.drop().result() + StudentDAO.importJsonFile(File(Resource.getUrl("json/university/students.json"))).result() + GradeDAO.drop().result() + GradeDAO.importJsonFile(File(Resource.getUrl("json/university/grades.json"))).result() + } + + override def afterAll(): Unit = + UniversityDatabase.LocalTestServer.shutdown() + + test("StudentDAO should support count") { + assertEquals(StudentDAO.name, "universityStudents") + assertEquals(StudentDAO.databaseName, "mongocamp-unit-test") + } + + test("StudentDAO should support count of students and grades") { + val students: Long = StudentDAO.count() + assertEquals(students, 200L) + + val grades: Long = GradeDAO.count() + assertEquals(grades, 280L) + } + + test("StudentDAO should support finding a student by name") { + val student: Option[Student] = StudentDAO.find("name", "Aurelia Menendez") + assert(student.isDefined) + assertEquals(student.get.scores.size, 3) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/database/DatabaseProviderSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/database/DatabaseProviderSpec.scala deleted file mode 100644 index 804626fd..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/database/DatabaseProviderSpec.scala +++ /dev/null @@ -1,62 +0,0 @@ -package dev.mongocamp.driver.mongodb.database - -import better.files.{File, Resource} -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.test.TestDatabase._ -import org.mongodb.scala.Document - -class DatabaseProviderSpec extends PersonSpecification { - - "Database provider" should { - - "must evaluate databaseNames" in { - val names = provider.databaseNames - names must contain("mongocamp-unit-test") - } - - "must evaluate collectionNames" in { - val names = provider.collectionNames() - names must contain("people") - } - - "must evaluate mongo-dao by name" in { - val dao = provider.dao("people") - val count: Long = dao.count().result() - count mustEqual 200 - } - - "must evaluate mongo-dao by name in different database" in { - val dao = provider.dao("mongocamp-unit-test-2:people") - val databaseName = dao.databaseName - databaseName mustEqual "mongocamp-unit-test-2" - dao.name mustEqual "people" - provider.dropDatabase(databaseName).result() - - dao.importJsonFile(File(Resource.getUrl("json/people.json"))).result() - val count: Long = dao.count().result() - count mustEqual 200 - } - - "must evaluate buildInfo" in { - - val result: Document = provider.runCommand(Map("buildInfo" -> 1)).result() - - result.getDouble("ok") mustEqual 1.0 - } - - "must evaluate collection status" in { - val status: Option[CollectionStatus] = provider.collectionStatus("people").resultOption() - status must beSome() - status.get.ns mustEqual "mongocamp-unit-test.people" - } - - "must add ChangeObserver" in { - val observer = ChangeObserver(consumeDatabaseChanges) - // todo enable change log at test machine - provider.addChangeObserver(observer) - true must beTrue - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/database/DatabaseProviderSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/database/DatabaseProviderSuite.scala new file mode 100644 index 00000000..e9432fed --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/database/DatabaseProviderSuite.scala @@ -0,0 +1,58 @@ +package dev.mongocamp.driver.mongodb.database + +import better.files.{ File, Resource } +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.test.TestDatabase._ +import org.mongodb.scala.Document + +class DatabaseProviderSuite extends BasePersonSuite { + + test("must evaluate databaseNames") { + val names = provider.databaseNames + assert(names.contains("mongocamp-unit-test")) + } + + test("must evaluate collectionNames") { + val names = provider.collectionNames() + assert(names.contains("people")) + } + + test("must evaluate mongo-dao by name") { + // #region document-dao + val dao = provider.dao("people") + val count: Long = dao.count().result() + // #endregion document-dao + assertEquals(count, 200L) + } + + test("must evaluate mongo-dao by name in different database") { + val dao = provider.dao("mongocamp-unit-test-2:people") + val databaseName = dao.databaseName + assertEquals(databaseName, "mongocamp-unit-test-2") + assertEquals(dao.name, "people") + provider.dropDatabase(databaseName).result() + + dao.importJsonFile(File(Resource.getUrl("json/people.json"))).result() + val count: Long = dao.count().result() + assertEquals(count, 200L) + } + + test("must evaluate buildInfo") { + val result: Document = provider.runCommand(Map("buildInfo" -> 1)).result() + val double : Double = result.getDouble("ok") + assertEquals(double, 1.0) + } + + test("must evaluate collection status") { + val status: Option[CollectionStatus] = provider.collectionStatus("people").resultOption() + assertEquals(status.isDefined, true) + assertEquals(status.get.ns, "mongocamp-unit-test.people") + } + + test("must add ChangeObserver") { + val observer = ChangeObserver(consumeDatabaseChanges) + provider.addChangeObserver(observer) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/database/DocumentExtensionsSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/database/DocumentExtensionsSpec.scala deleted file mode 100644 index 43b79ad8..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/database/DocumentExtensionsSpec.scala +++ /dev/null @@ -1,28 +0,0 @@ -package dev.mongocamp.driver.mongodb.database - -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.model.Person -import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO - -import org.mongodb.scala.Document - -class DocumentExtensionsSpec extends PersonSpecification { - - "Document" should { - - "be converted to plain scala map" in { - val document: Document = PersonDAO.Raw.find(Map("id" -> 11)).result() - - val map: Map[String, Any] = document.asPlainMap - map("id") mustEqual 11 - - val tags = map("tags").asInstanceOf[List[String]] - tags must haveSize(7) - - tags.head mustEqual "occaecat" - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/database/DocumentExtensionsSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/database/DocumentExtensionsSuite.scala new file mode 100644 index 00000000..ee9e2394 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/database/DocumentExtensionsSuite.scala @@ -0,0 +1,22 @@ +package dev.mongocamp.driver.mongodb.database + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO +import org.mongodb.scala.Document + +class DocumentExtensionsSuite extends BasePersonSuite { + + test("Document should be converted to plain scala map") { + val document: Document = PersonDAO.Raw.find(Map("id" -> 11)).result() + + val map: Map[String, Any] = document.asPlainMap + assertEquals(map("id"), 11) + + val tags = map("tags").asInstanceOf[List[String]] + assertEquals(tags.size, 7) + + assertEquals(tags.head, "occaecat") + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/database/MongoConfigSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/database/MongoConfigSpec.scala deleted file mode 100644 index 36bf1501..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/database/MongoConfigSpec.scala +++ /dev/null @@ -1,68 +0,0 @@ -package dev.mongocamp.driver.mongodb.database - -import org.specs2.mutable.Specification - -class MongoConfigSpec extends Specification { - - sequential - - "MongoConfig" should { - - "be created by database name " in { - val config = MongoConfig("config_test") - config.database must beEqualTo("config_test") - val shortDescription = - "{hosts=[127.0.0.1:27017], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms'}" - config.clientSettings.getClusterSettings.getShortDescription must be equalTo shortDescription - config.clientSettings.getApplicationName must be equalTo "mongocampdb-app" - config.clientSettings.getClusterSettings.getHosts.size() must be equalTo 1 - config.clientSettings.getConnectionPoolSettings.getMinSize must be equalTo 0 - config.clientSettings.getConnectionPoolSettings.getMaxSize must be equalTo 50 - } - - "be created with Properties " in { - - val config = MongoConfig("config_test", host = "localhost", applicationName = "Awesome Application Name") - - config.database must beEqualTo("config_test") - val shortDescription = - "{hosts=[localhost:27017], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms'}" - config.clientSettings.getClusterSettings.getShortDescription must be equalTo shortDescription - config.clientSettings.getApplicationName must be equalTo "Awesome Application Name" - config.clientSettings.getClusterSettings.getHosts.size() must be equalTo 1 - config.clientSettings.getConnectionPoolSettings.getMinSize must be equalTo 0 - config.clientSettings.getConnectionPoolSettings.getMaxSize must be equalTo 50 - } - - "be created by config " in { - val config = MongoConfig.fromPath("config.test.mongo") - config.database must beEqualTo("mongocamp-unit-test") - val shortDescription = - "{hosts=[localhost:270007], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms'}" - config.clientSettings.getClusterSettings.getShortDescription must be equalTo shortDescription - config.clientSettings.getApplicationName must be equalTo "mongocamp-config-test" - config.clientSettings.getClusterSettings.getHosts.size() must be equalTo 1 - config.clientSettings.getConnectionPoolSettings.getMinSize must be equalTo 5 - config.clientSettings.getConnectionPoolSettings.getMaxSize must be equalTo 100 - config.clientSettings.getCredential must beNull - - } - - "be created by config with auth " in { - val config = MongoConfig.fromPath("config.test.auth.mongo") - config.database must beEqualTo("mongocamp-unit-test") - val shortDescription = - "{hosts=[localhost:270007], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms'}" - config.clientSettings.getClusterSettings.getShortDescription must be equalTo shortDescription - config.clientSettings.getApplicationName must be equalTo "mongocamp-config-test-with-auth" - config.clientSettings.getClusterSettings.getHosts.size() must be equalTo 1 - config.clientSettings.getConnectionPoolSettings.getMinSize must be equalTo 5 - config.clientSettings.getConnectionPoolSettings.getMaxSize must be equalTo 100 - config.clientSettings.getCredential.getUserName must be equalTo "admin_user" - config.clientSettings.getCredential.getPassword must not beEmpty - - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/database/MongoConfigSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/database/MongoConfigSuite.scala new file mode 100644 index 00000000..2e8a2f9a --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/database/MongoConfigSuite.scala @@ -0,0 +1,54 @@ +package dev.mongocamp.driver.mongodb.database + +import munit.FunSuite + +class MongoConfigSuite extends FunSuite { + + test("MongoConfig should be created by database name") { + val config = MongoConfig("config_test") + assertEquals(config.database, "config_test") + val shortDescription = "{hosts=[127.0.0.1:27017], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms'}" + assertEquals(config.clientSettings.getClusterSettings.getShortDescription, shortDescription) + assertEquals(config.clientSettings.getApplicationName, "mongocampdb-app") + assertEquals(config.clientSettings.getClusterSettings.getHosts.size(), 1) + assertEquals(config.clientSettings.getConnectionPoolSettings.getMinSize, 0) + assertEquals(config.clientSettings.getConnectionPoolSettings.getMaxSize, 50) + } + + test("MongoConfig should be created with Properties") { + val config = MongoConfig("config_test", host = "localhost", applicationName = "Awesome Application Name") + assertEquals(config.database, "config_test") + val shortDescription = "{hosts=[localhost:27017], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms'}" + assertEquals(config.clientSettings.getClusterSettings.getShortDescription, shortDescription) + assertEquals(config.clientSettings.getApplicationName, "Awesome Application Name") + assertEquals(config.clientSettings.getClusterSettings.getHosts.size(), 1) + assertEquals(config.clientSettings.getConnectionPoolSettings.getMinSize, 0) + assertEquals(config.clientSettings.getConnectionPoolSettings.getMaxSize, 50) + } + + test("MongoConfig should be created by config") { + val config = MongoConfig.fromPath("config.test.mongo") + assertEquals(config.database, "mongocamp-unit-test") + val shortDescription = "{hosts=[localhost:270007], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms'}" + assertEquals(config.clientSettings.getClusterSettings.getShortDescription, shortDescription) + assertEquals(config.clientSettings.getApplicationName, "mongocamp-config-test") + assertEquals(config.clientSettings.getClusterSettings.getHosts.size(), 1) + assertEquals(config.clientSettings.getConnectionPoolSettings.getMinSize, 5) + assertEquals(config.clientSettings.getConnectionPoolSettings.getMaxSize, 100) + assertEquals(Option(config.clientSettings.getCredential), None) + } + + test("MongoConfig should be created by config with auth") { + val config = MongoConfig.fromPath("config.test.auth.mongo") + assertEquals(config.database, "mongocamp-unit-test") + val shortDescription = "{hosts=[localhost:270007], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms'}" + assertEquals(config.clientSettings.getClusterSettings.getShortDescription, shortDescription) + assertEquals(config.clientSettings.getApplicationName, "mongocamp-config-test-with-auth") + assertEquals(config.clientSettings.getClusterSettings.getHosts.size(), 1) + assertEquals(config.clientSettings.getConnectionPoolSettings.getMinSize, 5) + assertEquals(config.clientSettings.getConnectionPoolSettings.getMaxSize, 100) + assertEquals(config.clientSettings.getCredential.getUserName, "admin_user") + assertNotEquals(Option(config.clientSettings.getCredential.getPassword), None) + assertNotEquals(config.clientSettings.getCredential.getPassword.length, 0) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala deleted file mode 100644 index ec47d97f..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSpec.scala +++ /dev/null @@ -1,90 +0,0 @@ -package dev.mongocamp.driver.mongodb.gridfs - -import better.files.File -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.model.ImageMetadata -import dev.mongocamp.driver.mongodb.test.TestDatabase._ -import org.bson.types.ObjectId -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeAll - -class GridFSDatabaseSpec extends Specification with GridfsDatabaseFunctions with BeforeAll { - - "GridFSDatabase" should { - - "find file in" in { - val fileName = "scala-logo.jpg" - - val file = findImage("filename", fileName) - file.getFilename must be equalTo fileName - - } - - "insert file and in" in { - val fileName = "scala-logo.png" - - val filePath = ImageDAOSourcePath + fileName - val uploadBytes = File(filePath).bytes.toList - val oid: ObjectId = insertImage(filePath, ImageMetadata("template1", group = "templates")) - - val file = findImage(oid) - file.getFilename must be equalTo fileName - file.getMetadata.get("name").toString must be equalTo "template1" - - ImageFilesDAO.renameFile(oid, "test.png").result() - findImage(oid).getFilename must be equalTo "test.png" - - val downloadedFile = File.newTemporaryFile(suffix = fileName) - val result: Long = downloadImage(oid, downloadedFile.toString()) - - result must not be equalTo(-1) - - downloadedFile.exists must beTrue - - val downloadBytes = downloadedFile.bytes.toList - - downloadBytes.size must be equalTo uploadBytes.size - - downloadBytes must be equalTo uploadBytes - - } - - "update metadata in" in { - - val files = findImages("group", "logos") - files must haveSize(1) - files.head.getMetadata.get("name").toString must be equalTo "logo2" - - // update complete metadata for one file - updateMetadata(files.head, ImageMetadata("logo22")) - // update metadata entry for all files - updateMetadataElements(Map(), Map("group" -> "logos3", "newKey" -> "newEntryValue")) - - val file = findImage(files.head) - file.getMetadata.get("name").toString must be equalTo "logo22" - file.getMetadata.get("newKey").toString must be equalTo "newEntryValue" - - } - - "find stats in file in" in { - val fileStats = ImageFilesDAO.fileCollectionStatus.result() - val chunkStats = ImageFilesDAO.chunkCollectionStats.result() - - fileStats.count must be greaterThan 0 - chunkStats.storageSize must be greaterThan 0 - - } - - } - - override def beforeAll(): Unit = { - dropImages() - insertImage(ImageDAOSourcePath + "scala-logo.jpg", ImageMetadata("logo2", indexSet = Set(5, 6, 7))) - imagesCount must be equalTo 1 - - val file = File(ImageDAOTargetPath) - if (!file.exists) { - file.createDirectory() - } - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSuite.scala new file mode 100644 index 00000000..17dfaacb --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/gridfs/GridFSDatabaseSuite.scala @@ -0,0 +1,79 @@ +package dev.mongocamp.driver.mongodb.gridfs + +import better.files.File +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.model.ImageMetadata +import dev.mongocamp.driver.mongodb.test.TestDatabase._ +import org.bson.types.ObjectId + +class GridFSDatabaseSuite extends munit.FunSuite with GridfsDatabaseFunctions { + + test("find file") { + val fileName = "scala-logo.jpg" + + val file = findImage("filename", fileName) + assertEquals(file.getFilename, fileName) + } + + test("insert file and check") { + val fileName = "scala-logo.png" + + val filePath = ImageDAOSourcePath + fileName + val uploadBytes = File(filePath).bytes.toList + val oid: ObjectId = insertImage(filePath, ImageMetadata("template1", group = "templates")) + + val file = findImage(oid) + assertEquals(file.getFilename, fileName) + assertEquals(file.getMetadata.get("name").toString, "template1") + + ImageFilesDAO.renameFile(oid, "test.png").result() + assertEquals(findImage(oid).getFilename, "test.png") + + val downloadedFile = File.newTemporaryFile(suffix = fileName) + val result: Long = downloadImage(oid, downloadedFile.toString()) + + assert(result != -1) + + assert(downloadedFile.exists) + + val downloadBytes = downloadedFile.bytes.toList + + assertEquals(downloadBytes.size, uploadBytes.size) + assertEquals(downloadBytes, uploadBytes) + } + + test("update metadata") { + val files = findImages("group", "logos") + assertEquals(files.size, 1) + assertEquals(files.head.getMetadata.get("name").toString, "logo2") + + // update complete metadata for one file + updateMetadata(files.head, ImageMetadata("logo22")) + // update metadata entry for all files + updateMetadataElements(Map(), Map("group" -> "logos3", "newKey" -> "newEntryValue")) + + val file = findImage(files.head) + assertEquals(file.getMetadata.get("name").toString, "logo22") + assertEquals(file.getMetadata.get("newKey").toString, "newEntryValue") + } + + test("find stats in file") { + val fileStats = ImageFilesDAO.fileCollectionStatus.result() + val chunkStats = ImageFilesDAO.chunkCollectionStats.result() + + assert(fileStats.count > 0) + assert(chunkStats.storageSize > 0) + } + + override def beforeAll(): Unit = { + dropImages() + insertImage(ImageDAOSourcePath + "scala-logo.jpg", ImageMetadata("logo2", indexSet = Set(5, 6, 7))) + assertEquals(imagesCount, 1L) + + val file = File(ImageDAOTargetPath) + if (!file.exists) { + file.createDirectory() + } + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSuite.scala similarity index 66% rename from src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala rename to src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSuite.scala index 8dac6347..cbbe7f94 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSpec.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/BaseJdbcSuite.scala @@ -1,14 +1,12 @@ package dev.mongocamp.driver.mongodb.jdbc -import better.files.{File, Resource} -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite -import java.sql.{Connection, DriverManager} +import java.sql.{ Connection, DriverManager } import java.util.Properties -class BaseJdbcSpec extends PersonSpecification { - var connection : Connection = _ +class BaseJdbcSuite extends BasePersonSuite { + var connection: Connection = _ override def beforeAll(): Unit = { super.beforeAll() diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ConnectionSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ConnectionSuite.scala new file mode 100644 index 00000000..895a0f05 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ConnectionSuite.scala @@ -0,0 +1,193 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.jdbc.statement.MongoPreparedStatement + +import java.sql.{ Connection, SQLFeatureNotSupportedException, Savepoint } +import java.util.Properties +import java.util.concurrent.Executor + +class ConnectionSuite extends BaseJdbcSuite { + + test("getDatabaseProvider should return the database provider") { + val driver = new MongoJdbcDriver() + val connectionUrl = "jdbc:mongodb://localhost:27017/mongocamp-unit-test?retryWrites=true&loadBalanced=false&serverSelectionTimeoutMS=5000&connectTimeoutMS=10000" + val propertiesInfo = driver.getPropertyInfo(connectionUrl, new Properties()) + assertEquals(propertiesInfo.length, 5) + } + + test("getDatabaseProvider should return the database provider") { + assertEquals(connection.asInstanceOf[MongoJdbcConnection].getDatabaseProvider.collections().results().isEmpty, false) + } + + test("createStatement should return a MongoPreparedStatement") { + assert(connection.createStatement().isInstanceOf[MongoPreparedStatement]) + assert(connection.createStatement(0, 0).isInstanceOf[MongoPreparedStatement]) + assert(connection.createStatement(0, 0, 0).isInstanceOf[MongoPreparedStatement]) + } + + test("prepareStatement should return a MongoPreparedStatement") { + assert(connection.prepareStatement("SELECT * FROM people").isInstanceOf[MongoPreparedStatement]) + assert(connection.prepareStatement("SELECT * FROM people", 0).isInstanceOf[MongoPreparedStatement]) + assert(connection.prepareStatement("SELECT * FROM people", 0, 0).isInstanceOf[MongoPreparedStatement]) + assert(connection.prepareStatement("SELECT * FROM people", 0, 0, 0).isInstanceOf[MongoPreparedStatement]) + assert(connection.prepareStatement("SELECT * FROM people", Array[Int]()).isInstanceOf[MongoPreparedStatement]) + assert(connection.prepareStatement("SELECT * FROM people", Array[String]()).isInstanceOf[MongoPreparedStatement]) + } + + test("prepareCall should return a MongoPreparedStatement") { + assert(connection.prepareCall("SELECT * FROM people").isInstanceOf[MongoPreparedStatement]) + assert(connection.prepareCall("SELECT * FROM people", 0, 0).isInstanceOf[MongoPreparedStatement]) + assert(connection.prepareCall("SELECT * FROM people", 0, 0, 0).isInstanceOf[MongoPreparedStatement]) + } + + test("nativeSQL should return the same SQL string") { + val sql = "SELECT * FROM people" + assertEquals(connection.nativeSQL(sql), sql) + } + + test("setAutoCommit should not throw an exception") { + connection.setAutoCommit(true) + } + + test("getAutoCommit should return true") { + assert(connection.getAutoCommit) + } + + test("commit should not throw an exception") { + connection.commit() + } + + test("rollback should not throw an exception") { + connection.rollback() + } + + test("getMetaData should return MongoDatabaseMetaData") { + assert(connection.getMetaData.isInstanceOf[MongoDatabaseMetaData]) + } + + test("setReadOnly should set the connection to read-only") { + connection.setReadOnly(true) + assert(connection.isReadOnly) + } + + test("setCatalog should not throw an exception") { + connection.setCatalog("testCatalog") + } + + test("getCatalog should return null") { + assertEquals(connection.getCatalog, null) + } + + test("intercept not implemented sql features") { + intercept[SQLFeatureNotSupportedException](connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED)) + intercept[SQLFeatureNotSupportedException](connection.createNClob()) + intercept[SQLFeatureNotSupportedException](connection.createBlob()) + intercept[SQLFeatureNotSupportedException](connection.createClob()) + intercept[SQLFeatureNotSupportedException](connection.createSQLXML()) + intercept[SQLFeatureNotSupportedException](connection.createStruct("", null)) + intercept[SQLFeatureNotSupportedException](connection.createArrayOf("typeName: String", null)) + } + + test("getTransactionIsolation should return TRANSACTION_NONE") { + assertEquals(connection.getTransactionIsolation, Connection.TRANSACTION_NONE) + } + + test("getWarnings should return null") { + assertEquals(connection.getWarnings, null) + } + + test("clearWarnings should not throw an exception") { + connection.clearWarnings() + } + + test("getTypeMap should return null") { + assertEquals(connection.getTypeMap, null) + } + + test("setTypeMap should not throw an exception") { + connection.setTypeMap(new java.util.HashMap[String, Class[_]]()) + } + + test("setHoldability should not throw an exception") { + connection.setHoldability(0) + } + + test("getHoldability should return 0") { + assertEquals(connection.getHoldability, 0) + } + + test("setSavepoint should return null") { + assertEquals(connection.setSavepoint(), null) + } + + test("setSavepoint with name should return null") { + assertEquals(connection.setSavepoint("savepoint"), null) + } + + test("rollback with savepoint should not throw an exception") { + connection.rollback(null.asInstanceOf[Savepoint]) + } + + test("releaseSavepoint should not throw an exception") { + connection.releaseSavepoint(null.asInstanceOf[Savepoint]) + } + + test("isValid should return true") { + assert(connection.isValid(0)) + } + + test("setClientInfo with name and value should not throw an exception") { + connection.setClientInfo("ApplicationName", "testApp") + } + + test("setClientInfo with properties should not throw an exception") { + val properties = new Properties() + properties.setProperty("ApplicationName", "testApp") + connection.setClientInfo(properties) + } + + test("getClientInfo with name should return the application name") { + connection.setClientInfo("ApplicationName", "testApp") + assertEquals(connection.getClientInfo("ApplicationName"), "testApp") + } + + test("getClientInfo should return properties with application name") { + connection.setClientInfo("ApplicationName", "testApp") + val properties = connection.getClientInfo + assertEquals(properties.getProperty("ApplicationName"), "testApp") + } + + test("getSchema should return the default database name") { + assertEquals(connection.getSchema, "mongocamp-unit-test") + } + + test("setSchema should not throw an exception") { + connection.setSchema("testSchema") + } + + test("abort should not throw an exception") { + connection.abort(null.asInstanceOf[Executor]) + } + + test("setNetworkTimeout should not throw an exception") { + connection.setNetworkTimeout(null.asInstanceOf[Executor], 0) + } + + test("getNetworkTimeout should return 0") { + assertEquals(connection.getNetworkTimeout, 0) + } + + test("unwrap should return null") { + assertEquals(connection.unwrap(classOf[Connection]), null) + } + + test("isWrapperFor should return false") { + assert(!connection.isWrapperFor(classOf[Connection])) + } + + test("close should close the connection") { + connection.close() + assertEquals(connection.isClosed, true) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala deleted file mode 100644 index 38430721..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSpec.scala +++ /dev/null @@ -1,65 +0,0 @@ -package dev.mongocamp.driver.mongodb.jdbc - -import java.sql.Types - -class ExploreJdbcSpec extends BaseJdbcSpec { - - val schemaPattern: String = "mongocamp-unit-test$" - - "Jdbc Connection" should { - - "get table names" in { - val tableNames = connection.getMetaData.getTables("%", schemaPattern, "", Array.empty) - var tables = 0 - var tablePersonFound = false - while (tableNames.next()) { - tableNames.getString("TABLE_NAME") match { - case "people" => - tablePersonFound = true - tableNames.getString("TYPE_CAT") must beEqualTo("mongodb") - tableNames.getString("REMARKS") must beEqualTo("COLLECTION") - tableNames.getString("TABLE_TYPE") must beEqualTo("TABLE") - tableNames.getString("TABLE_SCHEM") must beEqualTo("mongocamp-unit-test") - case _ => - } - tables += 1 - } - tables must beGreaterThanOrEqualTo(1) - val columnNames = connection.getMetaData.getColumns("%", schemaPattern, "people", "") - var columns = 0 - while (columnNames.next()) { - columnNames.getString("TABLE_CAT") must beEqualTo("mongodb") - columnNames.getString("TABLE_NAME") must beEqualTo("people") - columnNames.getString("TABLE_SCHEM") must beEqualTo("mongocamp-unit-test") - val KeyDataType = "DATA_TYPE" - columnNames.getString("COLUMN_NAME") match { - case "_id" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.VARCHAR) - case "id" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.BIGINT) - columnNames.getInt("DECIMAL_DIGITS") must beEqualTo(0) - case "guid" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.LONGVARCHAR) - case "isActive" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.BOOLEAN) - case "balance" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.DOUBLE) - columnNames.getInt("DECIMAL_DIGITS") must beEqualTo(Int.MaxValue) - case "registered" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.DATE) - case "tags" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.ARRAY) - case "friends" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.ARRAY) - case "bestFriend" => - columnNames.getInt(KeyDataType) must beEqualTo(Types.JAVA_OBJECT) - case _ => - } - columns += 1 - } - columns must beEqualTo(20) - tablePersonFound must beTrue - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSuite.scala new file mode 100644 index 00000000..8c91bbd8 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/ExploreJdbcSuite.scala @@ -0,0 +1,759 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import dev.mongocamp.driver.mongodb.BuildInfo +import dev.mongocamp.driver.mongodb.jdbc.resultSet.MongoDbResultSet + +import java.sql.{ Connection, DatabaseMetaData, ResultSet, Types } + +class ExploreJdbcSuite extends BaseJdbcSuite { + + val schemaPattern: String = "mongocamp-unit-test$" + + test("Jdbc Connection should get table names") { + val tableNames = connection.getMetaData.getTables("%", schemaPattern, "", Array.empty) + var tables = 0 + var tablePersonFound = false + while (tableNames.next()) { + tableNames.getString("TABLE_NAME") match { + case "people" => + tablePersonFound = true + assertEquals(tableNames.getString("TYPE_CAT"), "mongodb") + assertEquals(tableNames.getString("REMARKS"), "COLLECTION") + assertEquals(tableNames.getString("TABLE_TYPE"), "TABLE") + assertEquals(tableNames.getString("TABLE_SCHEM"), "mongocamp-unit-test") + case _ => + } + tables += 1 + } + assert(tables >= 1) + val columnNames = connection.getMetaData.getColumns("%", schemaPattern, "people", "") + var columns = 0 + while (columnNames.next()) { + assertEquals(columnNames.getString("TABLE_CAT"), "mongodb") + assertEquals(columnNames.getString("TABLE_NAME"), "people") + assertEquals(columnNames.getString("TABLE_SCHEM"), "mongocamp-unit-test") + val KeyDataType = "DATA_TYPE" + columnNames.getString("COLUMN_NAME") match { + case "_id" => + assertEquals(columnNames.getInt(KeyDataType), Types.VARCHAR) + case "id" => + assertEquals(columnNames.getInt(KeyDataType), Types.BIGINT) + assertEquals(columnNames.getInt("DECIMAL_DIGITS"), 0) + case "guid" => + assertEquals(columnNames.getInt(KeyDataType), Types.LONGVARCHAR) + case "isActive" => + assertEquals(columnNames.getInt(KeyDataType), Types.BOOLEAN) + case "balance" => + assertEquals(columnNames.getInt(KeyDataType), Types.DOUBLE) + assertEquals(columnNames.getInt("DECIMAL_DIGITS"), Int.MaxValue) + case "registered" => + assertEquals(columnNames.getInt(KeyDataType), Types.DATE) + case "tags" => + assertEquals(columnNames.getInt(KeyDataType), Types.ARRAY) + case "friends" => + assertEquals(columnNames.getInt(KeyDataType), Types.ARRAY) + case "bestFriend" => + assertEquals(columnNames.getInt(KeyDataType), Types.JAVA_OBJECT) + case _ => + } + columns += 1 + } + assertEquals(columns, 20) + assert(tablePersonFound) + } + + test("allProceduresAreCallable should return false") { + assertEquals(connection.getMetaData.allProceduresAreCallable(), false) + } + + test("allTablesAreSelectable should return false") { + assertEquals(connection.getMetaData.allTablesAreSelectable(), false) + } + + test("getURL should return connection string") { + assertEquals(connection.getMetaData.getURL, "mongodb://127.0.0.1:27017/mongocamp-unit-test") + } + + test("getUserName should return user name") { + assertEquals(connection.getMetaData.getUserName, "not set") + } + + test("isReadOnly should return false") { + assertEquals(connection.getMetaData.isReadOnly, false) + } + + test("nullsAreSortedHigh should return false") { + assertEquals(connection.getMetaData.nullsAreSortedHigh(), false) + } + + test("nullsAreSortedLow should return false") { + assertEquals(connection.getMetaData.nullsAreSortedLow(), false) + } + + test("nullsAreSortedAtStart should return false") { + assertEquals(connection.getMetaData.nullsAreSortedAtStart(), false) + } + + test("nullsAreSortedAtEnd should return false") { + assertEquals(connection.getMetaData.nullsAreSortedAtEnd(), false) + } + + test("getDatabaseProductName should return mongodb") { + assertEquals(connection.getMetaData.getDatabaseProductName, "mongodb") + } + + test("getDatabaseProductVersion should return version") { + assertNotEquals(connection.getMetaData.getDatabaseProductVersion, null) + } + + test("getDriverName should return driver name") { + assertEquals(connection.getMetaData.getDriverName, BuildInfo.name) + } + + test("getDriverVersion should return driver version") { + assertEquals(connection.getMetaData.getDriverVersion, BuildInfo.version) + } + + test("getDriverMajorVersion should return major version") { + assertEquals(connection.getMetaData.getDriverMajorVersion, BuildInfo.version.split("\\.")(0).toInt) + } + + test("getDriverMinorVersion should return minor version") { + assertEquals(connection.getMetaData.getDriverMinorVersion, BuildInfo.version.split("\\.")(1).toInt) + } + + test("usesLocalFiles should return false") { + assertEquals(connection.getMetaData.usesLocalFiles(), false) + } + + test("usesLocalFilePerTable should return false") { + assertEquals(connection.getMetaData.usesLocalFilePerTable(), false) + } + + test("supportsMixedCaseIdentifiers should return false") { + assertEquals(connection.getMetaData.supportsMixedCaseIdentifiers(), false) + } + + test("storesUpperCaseIdentifiers should return false") { + assertEquals(connection.getMetaData.storesUpperCaseIdentifiers(), false) + } + + test("storesLowerCaseIdentifiers should return false") { + assertEquals(connection.getMetaData.storesLowerCaseIdentifiers(), false) + } + + test("storesMixedCaseIdentifiers should return false") { + assertEquals(connection.getMetaData.storesMixedCaseIdentifiers(), false) + } + + test("supportsMixedCaseQuotedIdentifiers should return false") { + assertEquals(connection.getMetaData.supportsMixedCaseQuotedIdentifiers(), false) + } + + test("storesUpperCaseQuotedIdentifiers should return false") { + assertEquals(connection.getMetaData.storesUpperCaseQuotedIdentifiers(), false) + } + + test("storesLowerCaseQuotedIdentifiers should return false") { + assertEquals(connection.getMetaData.storesLowerCaseQuotedIdentifiers(), false) + } + + test("storesMixedCaseQuotedIdentifiers should return false") { + assertEquals(connection.getMetaData.storesMixedCaseQuotedIdentifiers(), false) + } + + test("getIdentifierQuoteString should return null") { + assertEquals(connection.getMetaData.getIdentifierQuoteString, null) + } + + test("getSQLKeywords should return empty string") { + assertEquals(connection.getMetaData.getSQLKeywords, "") + } + + test("getNumericFunctions should return null") { + assertEquals(connection.getMetaData.getNumericFunctions, null) + } + + test("getStringFunctions should return null") { + assertEquals(connection.getMetaData.getStringFunctions, null) + } + + test("getSystemFunctions should return null") { + assertEquals(connection.getMetaData.getSystemFunctions, null) + } + + test("getTimeDateFunctions should return date") { + assertEquals(connection.getMetaData.getTimeDateFunctions, "date") + } + + test("getSearchStringEscape should return \\") { + assertEquals(connection.getMetaData.getSearchStringEscape, "\\") + } + + test("getExtraNameCharacters should return null") { + assertEquals(connection.getMetaData.getExtraNameCharacters, null) + } + + test("supportsAlterTableWithAddColumn should return false") { + assertEquals(connection.getMetaData.supportsAlterTableWithAddColumn(), false) + } + + test("supportsAlterTableWithDropColumn should return false") { + assertEquals(connection.getMetaData.supportsAlterTableWithDropColumn(), false) + } + + test("supportsColumnAliasing should return true") { + assertEquals(connection.getMetaData.supportsColumnAliasing(), true) + } + + test("nullPlusNonNullIsNull should return false") { + assertEquals(connection.getMetaData.nullPlusNonNullIsNull(), false) + } + + test("supportsConvert should return false") { + assertEquals(connection.getMetaData.supportsConvert(), false) + } + + test("supportsConvert with parameters should return false") { + assertEquals(connection.getMetaData.supportsConvert(0, 0), false) + } + + test("supportsTableCorrelationNames should return false") { + assertEquals(connection.getMetaData.supportsTableCorrelationNames(), false) + } + + test("supportsDifferentTableCorrelationNames should return false") { + assertEquals(connection.getMetaData.supportsDifferentTableCorrelationNames(), false) + } + + test("supportsExpressionsInOrderBy should return false") { + assertEquals(connection.getMetaData.supportsExpressionsInOrderBy(), false) + } + + test("supportsOrderByUnrelated should return true") { + assertEquals(connection.getMetaData.supportsOrderByUnrelated(), true) + } + + test("supportsGroupBy should return true") { + assertEquals(connection.getMetaData.supportsGroupBy(), true) + } + + test("supportsGroupByUnrelated should return true") { + assertEquals(connection.getMetaData.supportsGroupByUnrelated(), true) + } + + test("supportsGroupByBeyondSelect should return true") { + assertEquals(connection.getMetaData.supportsGroupByBeyondSelect(), true) + } + + test("supportsLikeEscapeClause should return true") { + assertEquals(connection.getMetaData.supportsLikeEscapeClause(), true) + } + + test("supportsMultipleResultSets should return true") { + assertEquals(connection.getMetaData.supportsMultipleResultSets(), true) + } + + test("supportsMultipleTransactions should return false") { + assertEquals(connection.getMetaData.supportsMultipleTransactions(), false) + } + + test("supportsNonNullableColumns should return true") { + assertEquals(connection.getMetaData.supportsNonNullableColumns(), true) + } + + test("supportsMinimumSQLGrammar should return false") { + assertEquals(connection.getMetaData.supportsMinimumSQLGrammar(), false) + } + + test("supportsCoreSQLGrammar should return false") { + assertEquals(connection.getMetaData.supportsCoreSQLGrammar(), false) + } + + test("supportsExtendedSQLGrammar should return false") { + assertEquals(connection.getMetaData.supportsExtendedSQLGrammar(), false) + } + + test("supportsANSI92EntryLevelSQL should return false") { + assertEquals(connection.getMetaData.supportsANSI92EntryLevelSQL(), false) + } + + test("supportsANSI92IntermediateSQL should return false") { + assertEquals(connection.getMetaData.supportsANSI92IntermediateSQL(), false) + } + + test("supportsANSI92FullSQL should return false") { + assertEquals(connection.getMetaData.supportsANSI92FullSQL(), false) + } + + test("supportsIntegrityEnhancementFacility should return false") { + assertEquals(connection.getMetaData.supportsIntegrityEnhancementFacility(), false) + } + + test("supportsOuterJoins should return false") { + assertEquals(connection.getMetaData.supportsOuterJoins(), false) + } + + test("supportsFullOuterJoins should return false") { + assertEquals(connection.getMetaData.supportsFullOuterJoins(), false) + } + + test("supportsLimitedOuterJoins should return false") { + assertEquals(connection.getMetaData.supportsLimitedOuterJoins(), false) + } + + test("getSchemaTerm should return database") { + assertEquals(connection.getMetaData.getSchemaTerm, "database") + } + + test("getProcedureTerm should return null") { + assertEquals(connection.getMetaData.getProcedureTerm, null) + } + + test("getCatalogTerm should return database") { + assertEquals(connection.getMetaData.getCatalogTerm, "database") + } + + test("isCatalogAtStart should return true") { + assertEquals(connection.getMetaData.isCatalogAtStart, true) + } + + test("getCatalogSeparator should return .") { + assertEquals(connection.getMetaData.getCatalogSeparator, ".") + } + + test("supportsSchemasInDataManipulation should return false") { + assertEquals(connection.getMetaData.supportsSchemasInDataManipulation(), false) + } + + test("supportsSchemasInProcedureCalls should return false") { + assertEquals(connection.getMetaData.supportsSchemasInProcedureCalls(), false) + } + + test("supportsSchemasInTableDefinitions should return false") { + assertEquals(connection.getMetaData.supportsSchemasInTableDefinitions(), false) + } + + test("supportsSchemasInIndexDefinitions should return false") { + assertEquals(connection.getMetaData.supportsSchemasInIndexDefinitions(), false) + } + + test("supportsSchemasInPrivilegeDefinitions should return false") { + assertEquals(connection.getMetaData.supportsSchemasInPrivilegeDefinitions(), false) + } + + test("supportsCatalogsInDataManipulation should return true") { + assertEquals(connection.getMetaData.supportsCatalogsInDataManipulation(), true) + } + + test("supportsCatalogsInProcedureCalls should return false") { + assertEquals(connection.getMetaData.supportsCatalogsInProcedureCalls(), false) + } + + test("supportsCatalogsInTableDefinitions should return false") { + assertEquals(connection.getMetaData.supportsCatalogsInTableDefinitions(), false) + } + + test("supportsCatalogsInIndexDefinitions should return false") { + assertEquals(connection.getMetaData.supportsCatalogsInIndexDefinitions(), false) + } + + test("supportsCatalogsInPrivilegeDefinitions should return false") { + assertEquals(connection.getMetaData.supportsCatalogsInPrivilegeDefinitions(), false) + } + + test("supportsPositionedDelete should return false") { + assertEquals(connection.getMetaData.supportsPositionedDelete(), false) + } + + test("supportsPositionedUpdate should return false") { + assertEquals(connection.getMetaData.supportsPositionedUpdate(), false) + } + + test("supportsSelectForUpdate should return false") { + assertEquals(connection.getMetaData.supportsSelectForUpdate(), false) + } + + test("supportsStoredProcedures should return false") { + assertEquals(connection.getMetaData.supportsStoredProcedures(), false) + } + + test("supportsSubqueriesInComparisons should return false") { + assertEquals(connection.getMetaData.supportsSubqueriesInComparisons(), false) + } + + test("supportsSubqueriesInExists should return false") { + assertEquals(connection.getMetaData.supportsSubqueriesInExists(), false) + } + + test("supportsSubqueriesInIns should return false") { + assertEquals(connection.getMetaData.supportsSubqueriesInIns(), false) + } + + test("supportsSubqueriesInQuantifieds should return false") { + assertEquals(connection.getMetaData.supportsSubqueriesInQuantifieds(), false) + } + + test("supportsCorrelatedSubqueries should return false") { + assertEquals(connection.getMetaData.supportsCorrelatedSubqueries(), false) + } + + test("supportsUnion should return true") { + assertEquals(connection.getMetaData.supportsUnion(), true) + } + + test("supportsUnionAll should return true") { + assertEquals(connection.getMetaData.supportsUnionAll(), true) + } + + test("supportsOpenCursorsAcrossCommit should return false") { + assertEquals(connection.getMetaData.supportsOpenCursorsAcrossCommit(), false) + } + + test("supportsOpenCursorsAcrossRollback should return false") { + assertEquals(connection.getMetaData.supportsOpenCursorsAcrossRollback(), false) + } + + test("supportsOpenStatementsAcrossCommit should return false") { + assertEquals(connection.getMetaData.supportsOpenStatementsAcrossCommit(), false) + } + + test("supportsOpenStatementsAcrossRollback should return false") { + assertEquals(connection.getMetaData.supportsOpenStatementsAcrossRollback(), false) + } + + test("getMaxBinaryLiteralLength should return 0") { + assertEquals(connection.getMetaData.getMaxBinaryLiteralLength, 0) + } + + test("getMaxCharLiteralLength should return 0") { + assertEquals(connection.getMetaData.getMaxCharLiteralLength, 0) + } + + test("getMaxColumnNameLength should return 0") { + assertEquals(connection.getMetaData.getMaxColumnNameLength, 0) + } + + test("getMaxColumnsInGroupBy should return 0") { + assertEquals(connection.getMetaData.getMaxColumnsInGroupBy, 0) + } + + test("getMaxColumnsInIndex should return 0") { + assertEquals(connection.getMetaData.getMaxColumnsInIndex, 0) + } + + test("getMaxColumnsInOrderBy should return 0") { + assertEquals(connection.getMetaData.getMaxColumnsInOrderBy, 0) + } + + test("getMaxColumnsInSelect should return 0") { + assertEquals(connection.getMetaData.getMaxColumnsInSelect, 0) + } + + test("getMaxColumnsInTable should return 0") { + assertEquals(connection.getMetaData.getMaxColumnsInTable, 0) + } + + test("getMaxConnections should return 0") { + assertEquals(connection.getMetaData.getMaxConnections, 0) + } + + test("getMaxCursorNameLength should return 0") { + assertEquals(connection.getMetaData.getMaxCursorNameLength, 0) + } + + test("getMaxIndexLength should return 0") { + assertEquals(connection.getMetaData.getMaxIndexLength, 0) + } + + test("getMaxSchemaNameLength should return 0") { + assertEquals(connection.getMetaData.getMaxSchemaNameLength, 0) + } + + test("getMaxProcedureNameLength should return 0") { + assertEquals(connection.getMetaData.getMaxProcedureNameLength, 0) + } + + test("getMaxCatalogNameLength should return 0") { + assertEquals(connection.getMetaData.getMaxCatalogNameLength, 0) + } + + test("getMaxRowSize should return 0") { + assertEquals(connection.getMetaData.getMaxRowSize, 0) + } + + test("doesMaxRowSizeIncludeBlobs should return false") { + assertEquals(connection.getMetaData.doesMaxRowSizeIncludeBlobs(), false) + } + + test("getMaxStatementLength should return 0") { + assertEquals(connection.getMetaData.getMaxStatementLength, 0) + } + + test("getMaxStatements should return 0") { + assertEquals(connection.getMetaData.getMaxStatements, 0) + } + + test("getMaxTableNameLength should return 90") { + assertEquals(connection.getMetaData.getMaxTableNameLength, 90) + } + + test("getMaxTablesInSelect should return 0") { + assertEquals(connection.getMetaData.getMaxTablesInSelect, 0) + } + + test("getMaxUserNameLength should return 0") { + assertEquals(connection.getMetaData.getMaxUserNameLength, 0) + } + + test("getDefaultTransactionIsolation should return TRANSACTION_NONE") { + assertEquals(connection.getMetaData.getDefaultTransactionIsolation, Connection.TRANSACTION_NONE) + } + + test("supportsTransactions should return false") { + assertEquals(connection.getMetaData.supportsTransactions(), false) + } + + test("supportsTransactionIsolationLevel should return false") { + assertEquals(connection.getMetaData.supportsTransactionIsolationLevel(0), false) + } + + test("supportsDataDefinitionAndDataManipulationTransactions should return false") { + assertEquals(connection.getMetaData.supportsDataDefinitionAndDataManipulationTransactions(), false) + } + + test("supportsDataManipulationTransactionsOnly should return false") { + assertEquals(connection.getMetaData.supportsDataManipulationTransactionsOnly(), false) + } + + test("dataDefinitionCausesTransactionCommit should return false") { + assertEquals(connection.getMetaData.dataDefinitionCausesTransactionCommit(), false) + } + + test("dataDefinitionIgnoredInTransactions should return false") { + assertEquals(connection.getMetaData.dataDefinitionIgnoredInTransactions(), false) + } + + test("getProcedures should return empty ResultSet") { + val resultSet = connection.getMetaData.getProcedures("", "", "") + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), false) + } + + test("getProcedureColumns should return empty ResultSet") { + val resultSet = connection.getMetaData.getProcedureColumns("", "", "", "") + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), false) + } + + test("getTables should return ResultSet with tables") { + val resultSet = connection.getMetaData.getTables("", "", "", Array("TABLE")) + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), true) + assertEquals(resultSet.getString("TABLE_NAME"), "system.version") + } + + test("getSchemas should return ResultSet with schemas") { + val resultSet = connection.getMetaData.getSchemas + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), true) + assertEquals(resultSet.getString("TABLE_SCHEM"), "admin") + } + + test("getCatalogs should return ResultSet with catalogs") { + val resultSet = connection.getMetaData.getCatalogs + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), true) + assertEquals(resultSet.getString("TABLE_CAT"), "mongodb") + } + + test("getTableTypes should return ResultSet with table types") { + val resultSet = connection.getMetaData.getTableTypes + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), true) + assertEquals(resultSet.getString("TABLE_TYPE"), "COLLECTION") + } + + test("getColumns should return ResultSet with columns") { + val resultSet = connection.getMetaData.getColumns("", "db1", "coll1", "") + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), false) + } + + test("getColumnPrivileges should return null") { + assertEquals(connection.getMetaData.getColumnPrivileges("", "", "", ""), null) + } + + test("getTablePrivileges should return null") { + assertEquals(connection.getMetaData.getTablePrivileges("", "", ""), null) + } + + test("getBestRowIdentifier should return null") { + assertEquals(connection.getMetaData.getBestRowIdentifier("", "", "", 0, false), null) + } + + test("getVersionColumns should return null") { + assertEquals(connection.getMetaData.getVersionColumns("", "", ""), null) + } + + test("getPrimaryKeys should return ResultSet with primary keys") { + val resultSet = connection.getMetaData.getPrimaryKeys("", "testSchema", "testTable") + assert(resultSet.isInstanceOf[MongoDbResultSet]) + // Add more assertions based on the expected content of the ResultSet + } + + test("getImportedKeys should return null") { + assertEquals(connection.getMetaData.getImportedKeys("", "", ""), null) + } + + test("getExportedKeys should return null") { + assertEquals(connection.getMetaData.getExportedKeys("", "", ""), null) + } + + test("getCrossReference should return null") { + assertEquals(connection.getMetaData.getCrossReference("", "", "", "", "", ""), null) + } + + test("getTypeInfo should return ResultSet with type info") { + val resultSet = connection.getMetaData.getTypeInfo + assert(resultSet.isInstanceOf[MongoDbResultSet]) + // Add more assertions based on the expected content of the ResultSet + } + + test("getIndexInfo should return ResultSet with index info") { + val resultSet = connection.getMetaData.getIndexInfo("", "testSchema", "testTable", false, false) + assert(resultSet.isInstanceOf[MongoDbResultSet]) + } + + test("supportsResultSetType should return true for TYPE_FORWARD_ONLY") { + assertEquals(connection.getMetaData.supportsResultSetType(ResultSet.TYPE_FORWARD_ONLY), true) + } + + test("supportsResultSetConcurrency should return false") { + assertEquals(connection.getMetaData.supportsResultSetConcurrency(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY), false) + } + + test("ownUpdatesAreVisible should return false") { + assertEquals(connection.getMetaData.ownUpdatesAreVisible(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("ownDeletesAreVisible should return false") { + assertEquals(connection.getMetaData.ownDeletesAreVisible(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("ownInsertsAreVisible should return false") { + assertEquals(connection.getMetaData.ownInsertsAreVisible(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("othersUpdatesAreVisible should return false") { + assertEquals(connection.getMetaData.othersUpdatesAreVisible(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("othersDeletesAreVisible should return false") { + assertEquals(connection.getMetaData.othersDeletesAreVisible(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("othersInsertsAreVisible should return false") { + assertEquals(connection.getMetaData.othersInsertsAreVisible(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("updatesAreDetected should return false") { + assertEquals(connection.getMetaData.updatesAreDetected(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("deletesAreDetected should return false") { + assertEquals(connection.getMetaData.deletesAreDetected(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("insertsAreDetected should return false") { + assertEquals(connection.getMetaData.insertsAreDetected(ResultSet.TYPE_FORWARD_ONLY), false) + } + + test("supportsBatchUpdates should return false") { + assertEquals(connection.getMetaData.supportsBatchUpdates(), false) + } + + test("getUDTs should return empty ResultSet") { + val resultSet = connection.getMetaData.getUDTs("", "", "", Array()) + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), false) + } + + test("getConnection should return the connection") { + assertEquals(connection.getMetaData.getConnection, connection) + } + + test("supportsSavepoints should return false") { + assertEquals(connection.getMetaData.supportsSavepoints(), false) + } + + test("supportsNamedParameters should return false") { + assertEquals(connection.getMetaData.supportsNamedParameters(), false) + } + + test("supportsMultipleOpenResults should return false") { + assertEquals(connection.getMetaData.supportsMultipleOpenResults(), false) + } + + test("supportsGetGeneratedKeys should return false") { + assertEquals(connection.getMetaData.supportsGetGeneratedKeys(), false) + } + + test("getSuperTypes should return empty ResultSet") { + val resultSet = connection.getMetaData.getSuperTypes("", "", "") + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), false) + } + + test("getSuperTables should return empty ResultSet") { + val resultSet = connection.getMetaData.getSuperTables("", "", "") + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), false) + } + + test("getAttributes should return empty ResultSet") { + val resultSet = connection.getMetaData.getAttributes("", "", "", "") + assert(resultSet.isInstanceOf[MongoDbResultSet]) + assertEquals(resultSet.next(), false) + } + + test("supportsResultSetHoldability should return false") { + assertEquals(connection.getMetaData.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT), false) + } + + test("getResultSetHoldability should return HOLD_CURSORS_OVER_COMMIT") { + assertEquals(connection.getMetaData.getResultSetHoldability, ResultSet.HOLD_CURSORS_OVER_COMMIT) + } + + test("getDatabaseMajorVersion should return the major version") { + assertEquals(connection.getMetaData.getDatabaseMajorVersion, 3) // Replace with the actual major version + } + + test("getDatabaseMinorVersion should return the minor version") { + assertEquals(connection.getMetaData.getDatabaseMinorVersion, 0) // Replace with the actual minor version + } + + test("getJDBCMajorVersion should return the JDBC major version") { + assertEquals(connection.getMetaData.getJDBCMajorVersion, 4) + } + + test("getJDBCMinorVersion should return the JDBC minor version") { + assertEquals(connection.getMetaData.getJDBCMinorVersion, 2) + } + + test("getSQLStateType should return sqlStateXOpen") { + assertEquals(connection.getMetaData.getSQLStateType, DatabaseMetaData.sqlStateXOpen) + } + + test("locatorsUpdateCopy should return false") { + assertEquals(connection.getMetaData.locatorsUpdateCopy(), false) + } + + test("supportsStatementPooling should return false") { + assertEquals(connection.getMetaData.supportsStatementPooling(), false) + } + + test("getRowIdLifetime should return null") { + assertEquals(connection.getMetaData.getRowIdLifetime, null) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSpec.scala deleted file mode 100644 index eb101900..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSpec.scala +++ /dev/null @@ -1,41 +0,0 @@ -package dev.mongocamp.driver.mongodb.jdbc - -import com.typesafe.scalalogging.LazyLogging -import liquibase.database.jvm.JdbcConnection -import liquibase.exception.LiquibaseException -import liquibase.resource.ClassLoaderResourceAccessor -import liquibase.{Contexts, LabelExpression, Liquibase} - -import scala.jdk.CollectionConverters._ -import scala.language.implicitConversions - - -class LiquibaseJdbcSpec extends BaseJdbcSpec with LazyLogging { - - "Jdbc Connection" should { - - "migrate database with liquibase" in { - val jdbcConnection = new JdbcConnection(connection) - val liquibase: Liquibase = new Liquibase("liquibase/changelog.xml", new ClassLoaderResourceAccessor(), jdbcConnection ) - val contexts = new Contexts() - val unrunChangesets = liquibase.listUnrunChangeSets(contexts, new LabelExpression()) - val changes = unrunChangesets.asScala.toList - if (changes.isEmpty) { - logger.info("liquibase - nothing to update") - true must beTrue - } - logger.info("liquibase - %s changesets to update".format(changes)) - try { - liquibase.update(contexts) - true must beTrue - } - catch { - case e: LiquibaseException => - logger.error(e.getMessage, e) - false must beTrue - } - - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSuite.scala new file mode 100644 index 00000000..a80b5736 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/LiquibaseJdbcSuite.scala @@ -0,0 +1,43 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import com.typesafe.scalalogging.LazyLogging +import dev.mongocamp.driver.mongodb.GenericObservable +import dev.mongocamp.driver.mongodb.json._ +import dev.mongocamp.driver.mongodb.test.TestDatabase +import liquibase.database.jvm.JdbcConnection +import liquibase.exception.LiquibaseException +import liquibase.resource.ClassLoaderResourceAccessor +import liquibase.{ Contexts, LabelExpression, Liquibase } + +import scala.concurrent.Future +import scala.jdk.CollectionConverters._ +import scala.language.implicitConversions + +class LiquibaseJdbcSuite extends BaseJdbcSuite with LazyLogging { + + override def beforeAll(): Unit = { + TestDatabase.provider.dropDatabase("mongocamp-unit-test").results() + super.beforeAll() + } + + test("Jdbc Connection should migrate database with liquibase") { + val jdbcConnection = new JdbcConnection(connection) + val liquibase: Liquibase = new Liquibase("liquibase/changelog.xml", new ClassLoaderResourceAccessor(), jdbcConnection) + val contexts = new Contexts() + val unrunChangesets = liquibase.listUnrunChangeSets(contexts, new LabelExpression()) + val changes = unrunChangesets.asScala.toList + assert(changes.nonEmpty) + logger.info("liquibase - %s changesets to update".format(changes)) + try { + liquibase.update(contexts) + } + catch { + case e: LiquibaseException => + logger.error(e.getMessage, e) + assert(false) + } + val unrunChangesetsAfter = liquibase.listUnrunChangeSets(contexts, new LabelExpression()) + assert(unrunChangesetsAfter.asScala.isEmpty) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDbResultSetMetaDataSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDbResultSetMetaDataSuite.scala new file mode 100644 index 00000000..e4ed75ac --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDbResultSetMetaDataSuite.scala @@ -0,0 +1,130 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import dev.mongocamp.driver.mongodb.MongoDAO +import dev.mongocamp.driver.mongodb.jdbc.resultSet.MongoDbResultSetMetaData +import dev.mongocamp.driver.mongodb.jdbc.statement.MongoPreparedStatement +import munit.FunSuite +import org.mongodb.scala.Document + +import java.sql.ResultSetMetaData + +class MongoDbResultSetMetaDataSuite extends BaseJdbcSuite { + + var metaData: ResultSetMetaData = _ + + override def beforeAll(): Unit = { + super.beforeAll() + val preparedStatement2 = MongoPreparedStatement(connection.asInstanceOf[MongoJdbcConnection]) + preparedStatement2.executeUpdate("drop table testCollection") + preparedStatement2.executeUpdate("insert into testCollection (intField, stringField, booleanField) values (1, 'test', true)") + preparedStatement2.executeUpdate("insert into testCollection (intField, stringField, booleanField) values (2, 'test2', false)") + metaData = preparedStatement2.executeQuery("select intField, booleanField, stringField from testCollection").getMetaData + } + + test("getColumnCount should return the correct column count") { + assertEquals(metaData.getColumnCount, 3) + } + + test("getColumnLabel should return the correct column label") { + assertEquals(metaData.getColumnLabel(1), "intField") + assertEquals(metaData.getColumnLabel(2), "booleanField") + assertEquals(metaData.getColumnLabel(3), "stringField") + } + + test("getColumnName should return the correct column name") { + assertEquals(metaData.getColumnName(1), "intField") + } + + test("isAutoIncrement should return false") { + assert(!metaData.isAutoIncrement(1)) + } + + test("isCaseSensitive should return true") { + assert(metaData.isCaseSensitive(1)) + } + + test("isSearchable should return true") { + assert(metaData.isSearchable(1)) + } + + test("isCurrency should return false") { + assert(!metaData.isCurrency(1)) + } + + test("isNullable should return columnNullable") { + assertEquals(metaData.isNullable(1), java.sql.ResultSetMetaData.columnNullable) + } + + test("isSigned should return false") { + assert(!metaData.isSigned(1)) + } + + test("getColumnDisplaySize should return Int.MaxValue") { + assertEquals(metaData.getColumnDisplaySize(1), Int.MaxValue) + } + + test("getSchemaName should return the database name") { + assertEquals(metaData.getSchemaName(1), "mongocamp-unit-test") + } + + test("getPrecision should return 0") { + assertEquals(metaData.getPrecision(1), 0) + } + + test("getScale should return 0") { + assertEquals(metaData.getScale(1), 0) + } + + test("getTableName should return the collection name") { + assertEquals(metaData.getTableName(1), "testCollection") + } + + test("getCatalogName should return the collection name") { + assertEquals(metaData.getCatalogName(1), "testCollection") + } + + test("getColumnType should return the correct SQL type") { + assertEquals(metaData.getColumnType(1), java.sql.Types.BIGINT) + assertEquals(metaData.getColumnType(2), java.sql.Types.BOOLEAN) + assertEquals(metaData.getColumnType(3), java.sql.Types.VARCHAR) + } + + test("getColumnTypeName should return the correct SQL type name") { + assertEquals(metaData.getColumnTypeName(1), "BIGINT") + assertEquals(metaData.getColumnTypeName(2), "BOOLEAN") + assertEquals(metaData.getColumnTypeName(3), "VARCHAR") + } + + test("isReadOnly should return false") { + assert(!metaData.isReadOnly(1)) + } + + test("isWritable should return true") { + assert(metaData.isWritable(1)) + } + + test("isDefinitelyWritable should return true") { + assert(metaData.isDefinitelyWritable(1)) + } + + test("getColumnClassName should return the correct class name") { + assertEquals(metaData.getColumnClassName(1), classOf[java.lang.Long].getName) + assertEquals(metaData.getColumnClassName(2), classOf[java.lang.Boolean].getName) + assertEquals(metaData.getColumnClassName(3), classOf[java.lang.String].getName) + } + + test("unwrap should return null") { + assertEquals(metaData.unwrap(classOf[Object]), null) + } + + test("isWrapperFor should return false") { + assert(!metaData.isWrapperFor(classOf[Object])) + } + + test("getColumnIndex should return the correct index") { + val metaData2 = metaData.asInstanceOf[MongoDbResultSetMetaData] + assertEquals(metaData2.getColumnIndex("intField"), 1) + assertEquals(metaData2.getColumnIndex("booleanField"), 2) + assertEquals(metaData2.getColumnIndex("stringField"), 3) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDbResultSetSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDbResultSetSuite.scala new file mode 100644 index 00000000..535f2180 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoDbResultSetSuite.scala @@ -0,0 +1,371 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.jdbc.resultSet.MongoDbResultSet +import org.joda.time.DateTime +import org.mongodb.scala.bson.collection.immutable.Document +import org.mongodb.scala.model.Updates + +import java.sql._ + +class MongoDbResultSetSuite extends BaseJdbcSuite { + + def initializeResultSet(): ResultSet = { + val data = List( + Document("id" -> 1, "name" -> "test_name", "active" -> true, "date" -> new DateTime("2021-01-01T00:00:00Z").toDate), + Document("id" -> 2, "name" -> "another_name", "active" -> false) + ) + val resultSet = new MongoDbResultSet(null, data, 0) + resultSet + } + + test("next() should move to the next row") { + val resultSet = initializeResultSet() + assert(resultSet.next()) + assertEquals(resultSet.getInt("id"), 1) + assert(resultSet.next()) + assertEquals(resultSet.getInt("id"), 2) + assert(!resultSet.next()) + } + + test("wasNull() should return false") { + val resultSet = initializeResultSet() + assert(!resultSet.wasNull()) + } + + test("getString() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getString("name"), "test_name") + } + + test("getBoolean() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assert(resultSet.getBoolean("active")) + } + + test("getInt() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getInt("id"), 1) + } + + test("getByte() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getByte("id"), 1.toByte) + } + + test("getBytes() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getByte("id"), 1.toByte) + } + + test("getShort() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getShort("id"), 1.toShort) + } + + test("getFloat() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getFloat("id"), 1.toFloat) + } + + test("getDouble() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getDouble("id"), 1.0) + } + + test("getBigDecimal() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getBigDecimal("id"), new java.math.BigDecimal(1)) + assertEquals(resultSet.getBigDecimal("id", 1), new java.math.BigDecimal(1).setScale(1)) + } + + test("getDate() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getDate("date").toString, "2021-01-01") + } + + test("getTime() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getTime("date"), new Time(new DateTime("2021-01-01T00:00:00Z").toDate.getTime)) + } + + test("getTimestamp() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getTimestamp("date").toInstant.toString, "2021-01-01T00:00:00Z") + } + + test("isBeforeFirst() should return true initially") { + val resultSet = initializeResultSet() + assert(resultSet.isBeforeFirst) + } + + test("isAfterLast() should return false initially") { + val resultSet = initializeResultSet() + assert(!resultSet.isAfterLast) + } + + test("isFirst() should return true after first next()") { + val resultSet = initializeResultSet() + resultSet.next() + assert(resultSet.isFirst) + } + + test("isLast() should return true after last next()") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.next() + assert(resultSet.isLast) + } + + test("getRow() should return the correct row number") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getRow, 1) + } + + test("absolute() should return false") { + val resultSet = initializeResultSet() + assert(!resultSet.absolute(1)) + } + + test("relative() should return false") { + val resultSet = initializeResultSet() + assert(!resultSet.relative(1)) + } + + test("previous() should return false") { + val resultSet = initializeResultSet() + assert(!resultSet.previous()) + } + + test("getFetchDirection() should return FETCH_FORWARD") { + val resultSet = initializeResultSet() + assertEquals(resultSet.getFetchDirection, ResultSet.FETCH_FORWARD) + } + + test("getFetchSize() should return 1") { + val resultSet = initializeResultSet() + assertEquals(resultSet.getFetchSize, 1) + } + + test("getType() should return TYPE_FORWARD_ONLY") { + val resultSet = initializeResultSet() + assertEquals(resultSet.getType, ResultSet.TYPE_FORWARD_ONLY) + } + + test("getObject() should return the correct value") { + val resultSet = initializeResultSet() + resultSet.next() + assertEquals(resultSet.getObject("id"), 1.asInstanceOf[AnyRef]) + } + + test("updateString() should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.updateString("name", "updated_name") + assertEquals(resultSet.getString("name"), "updated_name") + } + + test("insertRow() should insert a new row") { + intercept[SQLFeatureNotSupportedException] { + val resultSet = initializeResultSet() + resultSet.moveToInsertRow() + resultSet.updateInt("id", 3) + resultSet.updateString("name", "new_name") + resultSet.updateBoolean("active", true) + resultSet.insertRow() + } + } + + test("updateRow() should update the current row") { + val resultSet = connection.createStatement().executeQuery("select _id, id, name, age from people where age < 30 order by id asc") + resultSet.next() + resultSet.updateString("name", "updated_name") + resultSet.updateRow() + assertEquals(resultSet.getString("name"), "updated_name") + resultSet.refreshRow() + val document = connection.asInstanceOf[MongoJdbcConnection].getDatabaseProvider.dao("people").find("id", resultSet.getLong("id")).result() + assertEquals(document, resultSet.asInstanceOf[MongoDbResultSet].getDocument) + } + + test("deleteRow() should delete the current row") { + val resultSet = connection.createStatement().executeQuery("select _id, id, name, age from people where id = 10") + resultSet.next() + resultSet.deleteRow() + assert(!resultSet.next()) + val document = connection.asInstanceOf[MongoJdbcConnection].getDatabaseProvider.dao("people").find("id", 10).resultOption() + assert(document.isEmpty) + } + + test("refreshRow() should refresh the current row") { + val resultSet = connection.createStatement().executeQuery("select _id, id, name, age from people where id = 42") + resultSet.next() + assertEquals(resultSet.getString("name"), "Aisha Buckner") + connection.asInstanceOf[MongoJdbcConnection].getDatabaseProvider.dao("people").updateOne(Map("id" -> 42), Updates.set("name", "updated_name")).result() + resultSet.refreshRow() + assertEquals(resultSet.getString("name"), "updated_name") + } + + test("getMetaData() should return the correct metadata") { + val resultSet = initializeResultSet() + assert(resultSet.getMetaData != null) + } + + test("findColumn() should return the correct column index") { + val resultSet = initializeResultSet() + assertEquals(resultSet.findColumn("id"), 1) + } + + test("getWarnings() should return null") { + val resultSet = initializeResultSet() + assert(resultSet.getWarnings == null) + } + + test("clearWarnings() should not throw an exception") { + val resultSet = initializeResultSet() + resultSet.clearWarnings() + } + + test("getCursorName() should return null") { + val resultSet = initializeResultSet() + assert(resultSet.getCursorName == null) + } + + test("getStatement() should return null") { + val resultSet = initializeResultSet() + assert(resultSet.getStatement == null) + } + + test("unwrap() should return null") { + val resultSet = initializeResultSet() + assert(resultSet.unwrap(classOf[MongoDbResultSet]) == null) + } + + test("isWrapperFor() should return false") { + val resultSet = initializeResultSet() + assert(!resultSet.isWrapperFor(classOf[MongoDbResultSet])) + } + + test("updateNull should update the value to null") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.updateNull(1) + assert(resultSet.getObject(1) == null) + } + + test("updateBoolean should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.updateBoolean(3, false) + assert(!resultSet.getBoolean(3)) + } + + test("updateInt should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.updateInt(1, 42) + assertEquals(resultSet.getInt(1), 42) + } + + test("updateFloat should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.updateFloat(1, 42.toFloat) + assertEquals(resultSet.getFloat(1), 42.toFloat) + } + + test("updateBigDecimal should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.updateBigDecimal(1, new java.math.BigDecimal(42)) + assertEquals(resultSet.getBigDecimal(1), new java.math.BigDecimal(42)) + } + + test("updateString should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.updateString(2, "updated_name") + assertEquals(resultSet.getString(2), "updated_name") + } + + test("updateDate should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + val newDate = new Date(1622505600000L) + resultSet.updateDate(4, newDate) + assertEquals(resultSet.getDate(4), newDate) + } + + test("updateTime should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + val newTime = new Time(1622505600000L) + resultSet.updateTime(4, newTime) + assertEquals(resultSet.getTime(4), newTime) + } + + test("updateTimestamp should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + val newTimestamp = new Timestamp(1622505600000L) + resultSet.updateTimestamp(4, newTimestamp) + assertEquals(resultSet.getTimestamp(4), newTimestamp) + } + + test("updateObject should update the value") { + val resultSet = initializeResultSet() + resultSet.next() + resultSet.updateObject(1, 99) + assertEquals(resultSet.getObject(1).asInstanceOf[Int], 99) + } + + test("rowUpdated should return false") { + val resultSet = initializeResultSet() + assert(!resultSet.rowUpdated()) + } + + test("rowInserted should return false") { + val resultSet = initializeResultSet() + assert(!resultSet.rowInserted()) + } + + test("rowDeleted should return false") { + val resultSet = initializeResultSet() + assert(!resultSet.rowDeleted()) + } + + test("getConcurrency should throw SQLFeatureNotSupportedException") { + val resultSet = initializeResultSet() + intercept[SQLFeatureNotSupportedException] (resultSet.getConcurrency) + intercept[SQLFeatureNotSupportedException] (resultSet.updateAsciiStream(99, null, 1)) + intercept[SQLFeatureNotSupportedException] (resultSet.updateAsciiStream("updateAsciiStream", null, 1)) + intercept[SQLFeatureNotSupportedException] (resultSet.updateBinaryStream(99, null, 1)) + intercept[SQLFeatureNotSupportedException] (resultSet.updateBinaryStream("updateBinaryStream", null, 1)) + intercept[SQLFeatureNotSupportedException] (resultSet.updateCharacterStream(99, null, 1)) + intercept[SQLFeatureNotSupportedException] (resultSet.updateCharacterStream("updateCharacterStream", null, 1)) + + } + + test("null values for not implemented get methods") { + val resultSet = initializeResultSet() + assertEquals(resultSet.getAsciiStream(1), null) + assertEquals(resultSet.getUnicodeStream(1), null) + assertEquals(resultSet.getBinaryStream(1), null) + assertEquals(resultSet.getAsciiStream("id"), null) + assertEquals(resultSet.getUnicodeStream("id"), null) + assertEquals(resultSet.getBinaryStream("id"), null) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatementSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatementSuite.scala new file mode 100644 index 00000000..3ee5ba19 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/MongoPreparedStatementSuite.scala @@ -0,0 +1,350 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import dev.mongocamp.driver.mongodb.jdbc.statement.MongoPreparedStatement + +import java.io.{InputStream, Reader} +import java.net.URL +import java.sql.{Date, Time, Timestamp} +import java.util.Calendar + +class MongoPreparedStatementSuite extends BaseJdbcSuite { + var preparedStatement: MongoPreparedStatement = _ + + override def beforeAll(): Unit = { + super.beforeAll() + preparedStatement = MongoPreparedStatement(connection.asInstanceOf[MongoJdbcConnection]) + val preparedStatement2 = MongoPreparedStatement(connection.asInstanceOf[MongoJdbcConnection]) + preparedStatement2.executeUpdate("DELETE FROM table_name WHERE column2 = 123;") + } + + test("execute should return false for null SQL") { + assert(!preparedStatement.execute(null)) + } + + test("executeQuery should return empty result set for unsupported SQL") { + val resultSet = preparedStatement.executeQuery("unsupported SQL") + assert(resultSet != null) + assertEquals(resultSet.next(), false) + } + + test("setSql should set the SQL string") { + preparedStatement.setSql("SELECT * FROM test") + assertNotEquals(preparedStatement.executeQuery(), null) + } + + test("setNull should set parameter to null") { + preparedStatement.setNull(1, java.sql.Types.VARCHAR) + assertEquals(preparedStatement.getString(1), "null") + } + + test("setBoolean should set boolean parameter") { + preparedStatement.setBoolean(1, true) + assert(preparedStatement.getBoolean(1)) + } + + test("setByte should set byte parameter") { + preparedStatement.setByte(1, 1.toByte) + assertEquals(preparedStatement.getByte(1), 1.toByte) + } + + test("setShort should set short parameter") { + preparedStatement.setShort(1, 1.toShort) + assertEquals(preparedStatement.getShort(1), 1.toShort) + } + + test("setInt should set int parameter") { + preparedStatement.setInt(1, 1) + assertEquals(preparedStatement.getInt(1), 1) + } + + test("setLong should set long parameter") { + preparedStatement.setLong(1, 1L) + assertEquals(preparedStatement.getLong(1), 1L) + } + + test("setFloat should set float parameter") { + preparedStatement.setFloat(1, 1.0f) + assertEquals(preparedStatement.getFloat(1), 1.0f) + } + + test("setDouble should set double parameter") { + preparedStatement.setDouble(1, 1.0) + assertEquals(preparedStatement.getDouble(1), 1.0) + } + + test("setBigDecimal should set BigDecimal parameter") { + preparedStatement.setBigDecimal(1, new java.math.BigDecimal("1.0")) + assertEquals(preparedStatement.getBigDecimal(1), new java.math.BigDecimal(1.0)) + } + + test("setString should set string parameter") { + preparedStatement.setString(1, "test") + assertEquals(preparedStatement.getString(1), "test") + } + + test("setBytes should set byte array parameter") { + val bytes = Array[Byte](1.toByte, 2.toByte, 3.toByte) + preparedStatement.setBytes(1, bytes) + assertEquals(preparedStatement.getBytes(1).toList, bytes.toList) + } + + test("setDate should set date parameter") { + val date = new Date(System.currentTimeMillis()) + preparedStatement.setDate(1, date) + assertEquals(preparedStatement.getDate(1), date) + } + + test("setTime should set time parameter") { + val time = new Time(System.currentTimeMillis()) + preparedStatement.setTime(1, time) + assertEquals(preparedStatement.getTime(1), time) + } + + test("setTimestamp should set timestamp parameter") { + val timestamp = new Timestamp(System.currentTimeMillis()) + preparedStatement.setTimestamp(1, timestamp) + assertEquals(preparedStatement.getTimestamp(1), timestamp) + } + + test("clearParameters should clear all parameters") { + preparedStatement.setString(1, "test") + preparedStatement.clearParameters() + assertEquals(preparedStatement.getString(1), null) + } + + test("getConnection should return the connection") { + assertEquals(preparedStatement.getConnection, connection) + } + + test("getQueryTimeout should return the query timeout") { + assertEquals(preparedStatement.getQueryTimeout, 10) + } + + test("setQueryTimeout should set the query timeout") { + preparedStatement.setQueryTimeout(20) + assertEquals(preparedStatement.getQueryTimeout, 20) + } + + test("getWarnings should return null") { + assertEquals(preparedStatement.getWarnings, null) + } + + test("clearWarnings should not throw exception") { + preparedStatement.clearWarnings() + } + + test("getResultSet should return the last result set") { + assertNotEquals(preparedStatement.getResultSet, null) + } + + test("getUpdateCount should return the last update count") { + assertEquals(preparedStatement.getUpdateCount, -1) + preparedStatement.executeUpdate( + "INSERT INTO table_name (column1, column2, column3) VALUES ('value1', 123, '2022-01-01T00:00:00.000Z'), ('value2', 456, '2022-02-01T00:00:00.000Z');" + ) + assertEquals(preparedStatement.getUpdateCount, 2) + preparedStatement.executeUpdate("Update table_name SET column1 = 'value3' WHERE column2 = 123;") + assertEquals(preparedStatement.getUpdateCount, 1) + preparedStatement.executeUpdate("DELETE FROM table_name WHERE column2 = 123;") + assertEquals(preparedStatement.getUpdateCount, 1) + } + + test("getMoreResults should return false") { + assert(!preparedStatement.getMoreResults) + } + + test("getFetchDirection should return FETCH_FORWARD") { + assertEquals(preparedStatement.getFetchDirection, java.sql.ResultSet.FETCH_FORWARD) + } + + test("getFetchSize should return -1") { + assertEquals(preparedStatement.getFetchSize, -1) + } + + test("getResultSetType should return TYPE_FORWARD_ONLY") { + assertEquals(preparedStatement.getResultSetType, java.sql.ResultSet.TYPE_FORWARD_ONLY) + } + + test("getGeneratedKeys should return null") { + assertEquals(preparedStatement.getGeneratedKeys, null) + } + + test("getResultSetHoldability should return 0") { + assertEquals(preparedStatement.getResultSetHoldability, 0) + } + + test("isPoolable should return false") { + assert(!preparedStatement.isPoolable) + } + + test("isCloseOnCompletion should return false") { + assert(!preparedStatement.isCloseOnCompletion) + } + + test("wasNull should return false") { + assert(!preparedStatement.wasNull()) + } + + test("getObject should return the parameter value") { + preparedStatement.setString(1, "test") + assertEquals(preparedStatement.getObject(1), "test") + } + + test("getURL should return the URL parameter") { + preparedStatement.setString(1, "http://example.com") + assertEquals(preparedStatement.getURL(1), new java.net.URL("http://example.com")) + } + + test("setObject should return an string") { + preparedStatement.setObject(1, "value") + assertEquals(preparedStatement.getString(1), "value") + preparedStatement.setObject(1, "value1", java.sql.Types.VARCHAR) + assertEquals(preparedStatement.getString(1), "value1") + preparedStatement.setObject(1, "value2", java.sql.Types.VARCHAR, 0) + assertEquals(preparedStatement.getString(1), "value2") + preparedStatement.setObject(1, null) + assertEquals(preparedStatement.getString(1), "null") + preparedStatement.setObject(1, List(1, 2, 3)) + assertEquals(preparedStatement.getString(1), "[1,2,3]") + preparedStatement.setObject(1, List("hallo", "world")) + assertEquals(preparedStatement.getString(1), "[\"hallo\",\"world\"]") + } + + test("set URL should set the URL parameter") { + preparedStatement.setURL(1, new java.net.URL("http://example.com")) + assertEquals(preparedStatement.getURL(1), new java.net.URL("http://example.com")) + assertEquals(preparedStatement.getString(1), "http://example.com") + } + + import java.sql.SQLFeatureNotSupportedException + + test("All unsupported methods should throw SQLFeatureNotSupportedException") { + val preparedStatement = new MongoPreparedStatement(connection.asInstanceOf[MongoJdbcConnection]) + + def assertThrowsFeatureNotSupportedException(f: => Unit): Unit = { + intercept[SQLFeatureNotSupportedException](f) + } + + assertThrowsFeatureNotSupportedException(preparedStatement.getString("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getBoolean("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getByte("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getShort("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getInt("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getLong("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getFloat("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getDouble("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getBytes("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getDate("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getTime("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getTimestamp("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getObject("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getBigDecimal("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getObject("param", classOf[Any])) + assertThrowsFeatureNotSupportedException(preparedStatement.getRef("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getBlob("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getClob("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getArray("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getDate("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.getTime("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.getTimestamp("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.getURL("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getRowId(1)) + assertThrowsFeatureNotSupportedException(preparedStatement.getRowId("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.setRowId("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setNString("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setNCharacterStream("param", null, 0L)) + assertThrowsFeatureNotSupportedException(preparedStatement.setNClob("param", null.asInstanceOf[java.sql.NClob])) + assertThrowsFeatureNotSupportedException(preparedStatement.setClob("param", null, 0L)) + assertThrowsFeatureNotSupportedException(preparedStatement.setBlob("param", null, 0L)) + assertThrowsFeatureNotSupportedException(preparedStatement.setNClob("param", null, 0L)) + assertThrowsFeatureNotSupportedException(preparedStatement.getNClob(1)) + assertThrowsFeatureNotSupportedException(preparedStatement.getNClob("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.setSQLXML("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.getSQLXML(1)) + assertThrowsFeatureNotSupportedException(preparedStatement.getSQLXML("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getNString(1)) + assertThrowsFeatureNotSupportedException(preparedStatement.getNString("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getNCharacterStream(1)) + assertThrowsFeatureNotSupportedException(preparedStatement.getNCharacterStream("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.getCharacterStream(1)) + assertThrowsFeatureNotSupportedException(preparedStatement.getCharacterStream("param")) + assertThrowsFeatureNotSupportedException(preparedStatement.setBlob("param", null.asInstanceOf[java.sql.Blob])) + assertThrowsFeatureNotSupportedException(preparedStatement.setClob("param", null.asInstanceOf[java.sql.NClob])) + assertThrowsFeatureNotSupportedException(preparedStatement.setAsciiStream("param", null, 0L)) + assertThrowsFeatureNotSupportedException(preparedStatement.setBinaryStream("param", null, 0L)) + assertThrowsFeatureNotSupportedException(preparedStatement.setCharacterStream("param", null, 0L)) + assertThrowsFeatureNotSupportedException(preparedStatement.setAsciiStream("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setBinaryStream("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setCharacterStream("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setNCharacterStream("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setClob("param", null.asInstanceOf[java.sql.NClob])) + assertThrowsFeatureNotSupportedException(preparedStatement.setBlob("param", null.asInstanceOf[java.sql.Blob])) + assertThrowsFeatureNotSupportedException(preparedStatement.setNClob("param", null.asInstanceOf[java.sql.NClob])) + assertThrowsFeatureNotSupportedException(preparedStatement.getObject(1, classOf[Any])) + assertThrowsFeatureNotSupportedException(preparedStatement.getObject("param", classOf[Any])) + assertThrowsFeatureNotSupportedException(preparedStatement.setURL("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setNull("param", 0)) + assertThrowsFeatureNotSupportedException(preparedStatement.setBoolean("param", false)) + assertThrowsFeatureNotSupportedException(preparedStatement.setByte("param", 0.toByte)) + assertThrowsFeatureNotSupportedException(preparedStatement.setShort("param", 0.toShort)) + assertThrowsFeatureNotSupportedException(preparedStatement.setInt("param", 0)) + assertThrowsFeatureNotSupportedException(preparedStatement.setLong("param", 0L)) + assertThrowsFeatureNotSupportedException(preparedStatement.setFloat("param", 0.0f)) + assertThrowsFeatureNotSupportedException(preparedStatement.setDouble("param", 0.0)) + assertThrowsFeatureNotSupportedException(preparedStatement.setBigDecimal("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setString("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setBytes("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setDate("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setTime("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setTimestamp("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setAsciiStream("param", null, 0)) + assertThrowsFeatureNotSupportedException(preparedStatement.setBinaryStream("param", null, 0)) + assertThrowsFeatureNotSupportedException(preparedStatement.setObject("param", null, 0, 0)) + assertThrowsFeatureNotSupportedException(preparedStatement.setObject("param", null, 0)) + assertThrowsFeatureNotSupportedException(preparedStatement.setObject("param", null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setCharacterStream("param", null, 0)) + assertThrowsFeatureNotSupportedException(preparedStatement.setDate("param", null, null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setTime("param", null, null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setTimestamp("param", null, null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setNull("param", 0, null)) + assertThrowsFeatureNotSupportedException(preparedStatement.setClob("param", null.asInstanceOf[Reader])) + assertThrowsFeatureNotSupportedException(preparedStatement.setNClob("param", null.asInstanceOf[Reader])) + assertThrowsFeatureNotSupportedException(preparedStatement.setBlob("param", null.asInstanceOf[InputStream])) + } + + test("set values should not throw exception") { + preparedStatement.addBatch() + preparedStatement.setCharacterStream(1, null, 0) + preparedStatement.setRef(1, null) + preparedStatement.setBlob(1, null.asInstanceOf[java.sql.Blob]) + preparedStatement.setClob(1, null.asInstanceOf[java.sql.Clob]) + preparedStatement.setDate(1, new Date(0), Calendar.getInstance()) + preparedStatement.setTime(1, new Time(0), Calendar.getInstance()) + preparedStatement.setTimestamp(1, new Timestamp(0), Calendar.getInstance()) + preparedStatement.setNull(1, 0, "typeName") + preparedStatement.setURL(1, new URL("http://example.com")) + preparedStatement.setRowId(1, null) + preparedStatement.setNString(1, null) + preparedStatement.setNCharacterStream(1, null, 0L) + preparedStatement.setNClob(1, null.asInstanceOf[java.sql.NClob]) + preparedStatement.setClob(1, null, 0L) + preparedStatement.setBlob(1, null, 0L) + preparedStatement.setNClob(1, null, 0L) + preparedStatement.setSQLXML(1, null) + preparedStatement.setAsciiStream(1, null, 0L) + preparedStatement.setBinaryStream(1, null, 0L) + preparedStatement.setCharacterStream(1, null, 0L) + preparedStatement.setAsciiStream(1, null) + preparedStatement.setBinaryStream(1, null) + preparedStatement.setCharacterStream(1, null) + preparedStatement.setNCharacterStream(1, null) + preparedStatement.setClob(1, null.asInstanceOf[java.sql.Clob]) + preparedStatement.setBlob(1, null.asInstanceOf[java.sql.Blob]) + preparedStatement.setNClob(1, null.asInstanceOf[java.sql.NClob]) + preparedStatement.setArray(1, null.asInstanceOf[java.sql.Array]) + preparedStatement.setAsciiStream(1, null.asInstanceOf[InputStream], 1) + preparedStatement.setUnicodeStream(1, null.asInstanceOf[InputStream], 1) + preparedStatement.setBinaryStream(1, null.asInstanceOf[InputStream], 1) + assertEquals(preparedStatement.getMetaData, null) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSpec.scala deleted file mode 100644 index 40af0d36..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSpec.scala +++ /dev/null @@ -1,50 +0,0 @@ -package dev.mongocamp.driver.mongodb.jdbc - -import java.sql.ResultSet -import scala.collection.mutable.ArrayBuffer - -class SelectJDBCSpec extends BaseJdbcSpec { - - "Jdbc Connection" should { - - "execute simple select" in { - val stmt = connection.createStatement() - val result = stmt.executeQuery("select id, guid, name, age, balance from people where age < 30 order by id asc") - var i = 0 - val arrayBuffer = ArrayBuffer[ResultSet]() - while (result.next()) { - i += 1 - arrayBuffer += result - } - arrayBuffer.size must beEqualTo(99) - i must beEqualTo(99) - } - - "execute prepared statement" in { - val preparedStatement = connection.prepareStatement("select * from `mongocamp-unit-test`.people where age < ? order by id asc") - preparedStatement.setLong(1, 30) - val result = preparedStatement.executeQuery() - var i = 0 - val arrayBuffer = ArrayBuffer[ResultSet]() - while (result.next()) { - i += 1 - arrayBuffer += result - } - arrayBuffer.size must beEqualTo(99) - i must beEqualTo(99) - } - - "count on empty table" in { - val stmt = connection.createStatement() - val result = stmt.executeQuery("select count(*) as tmp, sum(age) from empty;") - var i = 0 - while (result.next()) { - result.getInt("tmp") must beEqualTo(0) - result.getInt("sum(age)") must beEqualTo(0) - i += 1 - } - i must beEqualTo(1) - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSuite.scala new file mode 100644 index 00000000..ff8c4c01 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/jdbc/SelectJDBCSuite.scala @@ -0,0 +1,52 @@ +package dev.mongocamp.driver.mongodb.jdbc + +import java.sql.ResultSet +import scala.collection.mutable.ArrayBuffer + +class SelectJDBCSuite extends BaseJdbcSuite { + + test("execute simple select") { + val stmt = connection.createStatement() + val result = stmt.executeQuery("select id, guid, name, age, balance from people where age < 30 order by id asc") + var i = 0 + val arrayBuffer = ArrayBuffer[ResultSet]() + while (result.next()) { + assertEquals(result.getLong(1),result.getLong("id")) + assertEquals(result.getString(2),result.getString("guid")) + assertEquals(result.getString(3),result.getString("name")) + assertEquals(result.getInt(4),result.getInt("age")) + assertEquals(result.getDouble(5),result.getDouble("balance")) + i += 1 + arrayBuffer += result + } + assertEquals(arrayBuffer.size, 99) + assertEquals(i, 99) + } + + test("execute prepared statement") { + val preparedStatement = connection.prepareStatement("select * from `mongocamp-unit-test`.people where age < ? order by id asc") + preparedStatement.setLong(1, 30) + val result = preparedStatement.executeQuery() + var i = 0 + val arrayBuffer = ArrayBuffer[ResultSet]() + while (result.next()) { + i += 1 + arrayBuffer += result + } + assertEquals(arrayBuffer.size, 99) + assertEquals(i, 99) + } + + test("count on empty table") { + val stmt = connection.createStatement() + val result = stmt.executeQuery("select count(*) as tmp, sum(age) from empty;") + var i = 0 + while (result.next()) { + assertEquals(result.getInt("tmp"), 0) + assertEquals(result.getInt("sum(age)"), 0) + i += 1 + } + assertEquals(i, 1) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala deleted file mode 100644 index fa3f4090..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSpec.scala +++ /dev/null @@ -1,135 +0,0 @@ -package dev.mongocamp.driver.mongodb.lucene - -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.test.TestDatabase._ - -class LuceneSearchSpec extends PersonSpecification { - lazy val sortByBalance = Map("balance" -> -1) - - "LuceneSearch" should { - - "search with with number in string" in { - val luceneQuery = LuceneQueryConverter.parse("stringNumber: 123", "id") - val search2 = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search2 must haveSize(0) - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery, searchWithValueAndString = true), sortByBalance).resultList() - search must haveSize(1) - search.head.age mustEqual 25 - search.head.name mustEqual "Cheryl Hoffman" - } - - "search with extended query" in { - val luceneQuery = LuceneQueryConverter.parse("(favoriteFruit:\"apple\" AND age:\"25\") OR name:*Cecile* AND -active:false AND 123", "id") - // #region lucene-parser-with-explicit - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - // #endregion lucene-parser-with-explicit - search must haveSize(1) - search.head.age mustEqual 25 - search.head.name mustEqual "Terra Salinas" - } - - "search with extended query use implicit" in { - // #region lucene-parser - val luceneQuery = LuceneQueryConverter.parse("(favoriteFruit:\"apple\" AND age:\"25\") OR name:*Cecile* AND -active:false AND 123", "id") - // #endregion lucene-parser - // #region lucene-parser-with-implicit - val search = PersonDAO.find(luceneQuery, sortByBalance).resultList() - // #endregion lucene-parser-with-implicit - search must haveSize(1) - search.head.age mustEqual 25 - search.head.name mustEqual "Terra Salinas" - } - - "between filter for number value" in { - val luceneQuery = LuceneQueryConverter.parse("[1010 TO 1052.3]", "balance") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(3) - search.head.age mustEqual 28 - search.head.name mustEqual "Mason Donaldson" - search.last.name mustEqual "Nash Dunn" - } - - "between filter for number value not" in { - val luceneQuery = LuceneQueryConverter.parse("-[1010 TO 1052.3]", "balance") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(197) - search.head.age mustEqual 29 - search.head.balance mustEqual 3996.0 - search.head.name mustEqual "Diaz Jacobs" - } - - "between filter for date value" in { - val luceneQuery = LuceneQueryConverter.parse("[2014-04-20T00:00:00Z TO 2014-04-22T23:59:59Z]", "registered") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(10) - search.head.age mustEqual 25 - search.head.name mustEqual "Allison Turner" - search.head.balance mustEqual 3961.0 - } - - "equals Query with Date" in { - val luceneQuery = LuceneQueryConverter.parse("registered:20140420T004427000+0200", "unbekannt") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(1) - search.head.age mustEqual 31 - search.head.name mustEqual "Latasha Mcmillan" - search.head.balance mustEqual 3403.0 - } - - "wildcard at the end" in { - val luceneQuery = LuceneQueryConverter.parse("Latasha*", "name") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(1) - search.head.age mustEqual 31 - search.head.name mustEqual "Latasha Mcmillan" - search.head.balance mustEqual 3403.0 - } - - "wildcard at the start" in { - val luceneQuery = LuceneQueryConverter.parse("*Mcmillan", "name") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(1) - search.head.age mustEqual 31 - search.head.name mustEqual "Latasha Mcmillan" - search.head.balance mustEqual 3403.0 - } - - "not wildcard at the start" in { - val luceneQuery = LuceneQueryConverter.parse("-name:*Mcmillan", "ube") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(199) - } - - "wildcard at the start and end" in { - val luceneQuery = LuceneQueryConverter.parse("*Mcmil*", "name") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(1) - search.head.age mustEqual 31 - search.head.name mustEqual "Latasha Mcmillan" - search.head.balance mustEqual 3403.0 - } - - "not wildcard at the start and end" in { - val luceneQuery = LuceneQueryConverter.parse("-name:*Mcmil*", "ube") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(199) - } - - "wildcard in the middle" in { - val luceneQuery = LuceneQueryConverter.parse("\"Latasha *millan\"", "name") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(1) - search.head.age mustEqual 31 - search.head.name mustEqual "Latasha Mcmillan" - search.head.balance mustEqual 3403.0 - } - - "not wildcard in the middle" in { - val luceneQuery = LuceneQueryConverter.parse("-name:\"Latasha*millan\"", "ube") - val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() - search must haveSize(199) - } - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSuite.scala new file mode 100644 index 00000000..7295d44c --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/lucene/LuceneSearchSuite.scala @@ -0,0 +1,148 @@ +package dev.mongocamp.driver.mongodb.lucene + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.test.TestDatabase._ +import org.mongodb.scala.Document + +import java.util.TimeZone + +class LuceneSearchSuite extends BasePersonSuite { + lazy val sortByBalance: Map[String, Int] = Map("balance" -> -1) + TimeZone.setDefault(TimeZone.getTimeZone("UTC")) + + test("search with with number in string") { + val luceneQuery = LuceneQueryConverter.parse("stringNumber: 123", "id") + val search2 = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search2.size, 0) + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery, searchWithValueAndString = true), sortByBalance).resultList() + assertEquals(search.size, 1) + assertEquals(search.head.age, 25) + assertEquals(search.head.name, "Cheryl Hoffman") + } + + test("search with extended query") { + val luceneQuery = LuceneQueryConverter.parse("(favoriteFruit:\"apple\" AND age:\"25\") OR name:*Cecile* AND -active:false AND 123", "id") + // #region lucene-parser-with-explicit + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + // #endregion lucene-parser-with-explicit + assertEquals(search.size, 1) + assertEquals(search.head.age, 25) + assertEquals(search.head.name, "Terra Salinas") + } + + test("search with extended query use implicit") { + // #region lucene-parser + val luceneQuery = LuceneQueryConverter.parse("(favoriteFruit:\"apple\" AND age:\"25\") OR name:*Cecile* AND -active:false AND 123", "id") + // #endregion lucene-parser + // #region lucene-parser-with-implicit + val search = PersonDAO.find(luceneQuery, sortByBalance).resultList() + // #endregion lucene-parser-with-implicit + assertEquals(search.size, 1) + assertEquals(search.head.age, 25) + assertEquals(search.head.name, "Terra Salinas") + } + + test("between filter for number value") { + val luceneQuery = LuceneQueryConverter.parse("[1010 TO 1052.3]", "balance") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 3) + assertEquals(search.head.age, 28) + assertEquals(search.head.name, "Mason Donaldson") + assertEquals(search.last.name, "Nash Dunn") + } + + test("between filter for number value not") { + val luceneQuery = LuceneQueryConverter.parse("-[1010 TO 1052.3]", "balance") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 197) + assertEquals(search.head.age, 29) + assertEquals(search.head.balance, 3996.0) + assertEquals(search.head.name, "Diaz Jacobs") + } + + test("between filter for date value") { + val luceneQuery = LuceneQueryConverter.parse("[2014-04-20T00:00:00Z TO 2014-04-22T23:59:59Z]", "registered") + val luceneDocument = LuceneQueryConverter.toDocument(luceneQuery) + val expected = "Iterable((registered,{\"$lte\": {\"$date\": \"2014-04-22T23:59:59Z\"}, \"$gte\": {\"$date\": \"2014-04-20T00:00:00Z\"}}))" + assertEquals(luceneDocument.toString, expected) + val search = PersonDAO.find(luceneDocument, sortByBalance).resultList() + assertEquals(search.size, 7) + assertEquals(search.head.age, 25) + assertEquals(search.head.name, "Allison Turner") + assertEquals(search.head.balance, 3961.0) + } + + test("equals Query with Date") { + val luceneQuery = LuceneQueryConverter.parse("registered:20140419T224427000\\+0200", "unbekannt") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 1) + assertEquals(search.head.age, 31) + assertEquals(search.head.name, "Latasha Mcmillan") + assertEquals(search.head.balance, 3403.0) + } + + test("wildcard at the end") { + val luceneQuery = LuceneQueryConverter.parse("Latasha*", "name") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 1) + assertEquals(search.head.age, 31) + assertEquals(search.head.name, "Latasha Mcmillan") + assertEquals(search.head.balance, 3403.0) + } + + test("wildcard at the start") { + val luceneQuery = LuceneQueryConverter.parse("*Mcmillan", "name") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 1) + assertEquals(search.head.age, 31) + assertEquals(search.head.name, "Latasha Mcmillan") + assertEquals(search.head.balance, 3403.0) + } + + test("not wildcard at the start") { + val luceneQuery = LuceneQueryConverter.parse("-name:*Mcmillan", "ube") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 199) + } + + test("wildcard at the start and end") { + val luceneQuery = LuceneQueryConverter.parse("*Mcmil*", "name") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 1) + assertEquals(search.head.age, 31) + assertEquals(search.head.name, "Latasha Mcmillan") + assertEquals(search.head.balance, 3403.0) + } + + test("not wildcard at the start and end") { + val luceneQuery = LuceneQueryConverter.parse("-name:*Mcmil*", "ube") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 199) + } + + test("wildcard in the middle") { + val luceneQuery = LuceneQueryConverter.parse("\"Latasha *millan\"", "name") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 1) + assertEquals(search.head.age, 31) + assertEquals(search.head.name, "Latasha Mcmillan") + assertEquals(search.head.balance, 3403.0) + } + + test("not wildcard in the middle") { + val luceneQuery = LuceneQueryConverter.parse("-name:\"Latasha*millan\"", "ube") + val search = PersonDAO.find(LuceneQueryConverter.toDocument(luceneQuery), sortByBalance).resultList() + assertEquals(search.size, 199) + } + + test("negate query with values in braces") { + val luceneQuery = LuceneQueryConverter.parse("NOT fieldName:('value1' OR 'value2' OR 'value2')", "ube") + val document = LuceneQueryConverter.toDocument(luceneQuery) + assertEquals("{\"$and\": [{\"$nor\": [{\"fieldName\": {\"$eq\": \"value1\"}}, {\"fieldName\": {\"$eq\": \"value2\"}}, {\"fieldName\": {\"$eq\": \"value2\"}}]}]}", document.asInstanceOf[Document].toJson()) + val luceneQuery2 = LuceneQueryConverter.parse("NOT fieldName:('value1' AND 'value2' AND 'value2')", "ube") + val document2 = LuceneQueryConverter.toDocument(luceneQuery2) + assertEquals("{\"$and\": [{\"$nor\": [{\"fieldName\": {\"$eq\": \"value1\"}}, {\"fieldName\": {\"$eq\": \"value2\"}}, {\"fieldName\": {\"$eq\": \"value2\"}}]}]}", document2.asInstanceOf[Document].toJson()) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/bson/Base.scala b/src/test/scala/dev/mongocamp/driver/mongodb/model/Base.scala similarity index 89% rename from src/test/scala/dev/mongocamp/driver/mongodb/bson/Base.scala rename to src/test/scala/dev/mongocamp/driver/mongodb/model/Base.scala index 9327d21a..06b5df7f 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/bson/Base.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/model/Base.scala @@ -1,9 +1,9 @@ -package dev.mongocamp.driver.mongodb.bson - -import java.util.Date +package dev.mongocamp.driver.mongodb.model import org.bson.types.ObjectId +import java.util.Date + /** Created by tom on 22.01.17. */ case class Base( diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSpec.scala deleted file mode 100644 index 0b885554..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSpec.scala +++ /dev/null @@ -1,57 +0,0 @@ -package dev.mongocamp.driver.mongodb.operation - -// #region agg_imports -import dev.mongocamp.driver.mongodb.Aggregate._ -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -// #endregion agg_imports - -import dev.mongocamp.driver.mongodb.test.TestDatabase._ -import org.mongodb.scala.bson.conversions.Bson -import org.mongodb.scala.model.Aggregates.{filter, group, sort} -import org.mongodb.scala.model.Filters.{and, equal} - -class AggregationSpec extends PersonSpecification { - - // #region agg_stages - val filterStage: Bson = filter(and(equal("gender", "female"), notNullFilter("balance"))) - - val groupStage: Bson = group(Map("age" -> "$age"), sumField("balance"), firstField("age")) - - val sortStage: Bson = sort(sortByKey("age")) - // #endregion agg_stages - - "Search" should { - - "support aggregation filter" in { - - val pipeline = List(filterStage, sortStage) - - val aggregated = PersonDAO.findAggregated(pipeline).resultList() - - (aggregated.size must be).equalTo(98) - - } - - "support aggregation filter and group" in { - // #region agg_execute - val pipeline = List(filterStage, groupStage, sortStage) - - val aggregated = PersonDAO.Raw.findAggregated(pipeline).resultList() - // #endregion agg_execute - - (aggregated.size must be).equalTo(21) - - // #region agg_convert - val list: List[Map[String, Any]] = aggregated - // #endregion agg_convert - list.foreach(m => println(m("age").toString + " -> " + m("balance"))) - - (list.head("age") must be).equalTo(20) - (list.head("balance") must be).equalTo(8333.0) - - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSuite.scala new file mode 100644 index 00000000..2b3299af --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/operation/AggregationSuite.scala @@ -0,0 +1,52 @@ +package dev.mongocamp.driver.mongodb.operation + +// #region agg_imports +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.Aggregate._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +// #endregion agg_imports + +import dev.mongocamp.driver.mongodb.test.TestDatabase._ +import org.mongodb.scala.bson.conversions.Bson +import org.mongodb.scala.model.Aggregates.{ filter, group, sort } +import org.mongodb.scala.model.Filters.{ and, equal } + +class AggregationSuite extends BasePersonSuite { + + // #region agg_stages + val filterStage: Bson = filter(and(equal("gender", "female"), notNullFilter("balance"))) + + val groupStage: Bson = group(Map("age" -> "$age"), sumField("balance"), firstField("age")) + + val sortStage: Bson = sort(sortByKey("age")) + // #endregion agg_stages + + test("support aggregation filter") { + + val pipeline = List(filterStage, sortStage) + + val aggregated = PersonDAO.findAggregated(pipeline).resultList() + + assertEquals(aggregated.size, 98) + + } + + test("support aggregation filter and group") { + // #region agg_execute + val pipeline = List(filterStage, groupStage, sortStage) + + val aggregated = PersonDAO.Raw.findAggregated(pipeline).resultList() + // #endregion agg_execute + + assertEquals(aggregated.size, 21) + + // #region agg_convert + val list: List[Map[String, Any]] = aggregated + // #endregion agg_convert + list.foreach(m => println(m("age").toString + " -> " + m("balance"))) + + assertEquals(list.head("age"), 20) + assertEquals(list.head("balance"), 8333.0) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/BaseSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/BaseSpec.scala deleted file mode 100644 index bc6b90c8..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/operation/BaseSpec.scala +++ /dev/null @@ -1,31 +0,0 @@ -package dev.mongocamp.driver.mongodb.operation - -import dev.mongocamp.driver.mongodb.test.TestDatabase._ -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification - -class BaseSpec extends PersonSpecification { - - sequential - - "Base Operations" should { - - "must evaluate distinct" in { - - val genderList = PersonDAO.distinctResult("gender") - - genderList must have size 2 - } - - "must evaluate distinct with filter" in { - - val genderList = PersonDAO.distinctResult("gender", Map("gender" -> "male")) - - genderList must have size 1 - } - - } - - override def beforeAll: Unit = BookDAO.drop().result() - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/BaseSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/BaseSuite.scala new file mode 100644 index 00000000..e9bba86a --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/operation/BaseSuite.scala @@ -0,0 +1,21 @@ +package dev.mongocamp.driver.mongodb.operation + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.test.TestDatabase._ + +class BaseSuite extends BasePersonSuite { + + test("must evaluate distinct") { + val genderList = PersonDAO.distinctResult("gender") + assertEquals(genderList.size, 2) + } + + test("must evaluate distinct with filter") { + val genderList = PersonDAO.distinctResult("gender", Map("gender" -> "male")) + assertEquals(genderList.size, 1) + } + + override def beforeAll(): Unit = BookDAO.drop().result() + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/CrudSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/CrudSpec.scala deleted file mode 100644 index 5370e5c9..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/operation/CrudSpec.scala +++ /dev/null @@ -1,72 +0,0 @@ -package dev.mongocamp.driver.mongodb.operation - -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.database.DatabaseProvider -import dev.mongocamp.driver.mongodb.model.CodecTest -import dev.mongocamp.driver.mongodb.test.TestDatabase._ -import com.typesafe.scalalogging.LazyLogging -import org.bson.conversions.Bson -import org.mongodb.scala.bson.ObjectId -import org.mongodb.scala.model.Filters.equal -import org.mongodb.scala.result.DeleteResult -import org.mongodb.scala.model.Updates._ - -class CrudSpec extends PersonSpecification with LazyLogging { - - sequential - - override def beforeAll(): Unit = { - super.beforeAll() - CodecDao.drop().result() - - } - - "Crud Operations" should { - - "create Document in" in { - val result = CodecDao.insertOne(CodecTest()).result() - - val list: List[CodecTest] = CodecDao.find().resultList() - list.size mustEqual 1 - - } - - "update Document in" in { - var list: List[CodecTest] = CodecDao.find().resultList() - var codec = list.head - codec.id mustEqual 1 - CodecDao.updateOne(Map("id" -> 1), set("id", 2)).result() - list = CodecDao.find().resultList() - codec = list.head - codec.id mustEqual 2 - } - - "replace Document in" in { - var list: List[CodecTest] = CodecDao.find().resultList() - var codec = list.head - codec.id mustEqual 2 - CodecDao.replaceOne(codec.copy(id = 1)).result() - list = CodecDao.find().resultList() - codec = list.head - codec.id mustEqual 1 - } - - "delete Document in" in { - val hexString = CodecDao.find().result()._id.toHexString - hexString must not beEmpty - - val result: DeleteResult = - CodecDao.deleteOne(equal(DatabaseProvider.ObjectIdKey, new ObjectId(hexString))).result() - - result.wasAcknowledged() must beTrue - result.getDeletedCount must beEqualTo(1) - - val list: List[CodecTest] = CodecDao.find().resultList() - - list.size mustEqual 0 - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/CrudSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/CrudSuite.scala new file mode 100644 index 00000000..7628cd6a --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/operation/CrudSuite.scala @@ -0,0 +1,61 @@ +package dev.mongocamp.driver.mongodb.operation + +import com.typesafe.scalalogging.LazyLogging +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import dev.mongocamp.driver.mongodb.model.CodecTest +import dev.mongocamp.driver.mongodb.test.TestDatabase._ +import org.mongodb.scala.bson.ObjectId +import org.mongodb.scala.model.Filters.equal +import org.mongodb.scala.model.Updates._ +import org.mongodb.scala.result.DeleteResult + +class CrudSuite extends BasePersonSuite with LazyLogging { + + override def beforeAll(): Unit = { + super.beforeAll() + CodecDao.drop().result() + } + + test("create Document") { + CodecDao.insertOne(CodecTest()).result() + val list: List[CodecTest] = CodecDao.find().resultList() + assertEquals(list.size, 1) + } + + test("update Document") { + var list: List[CodecTest] = CodecDao.find().resultList() + var codec = list.head + assertEquals(codec.id, 1L) + CodecDao.updateOne(Map("id" -> 1), set("id", 2)).result() + list = CodecDao.find().resultList() + codec = list.head + assertEquals(codec.id, 2L) + } + + test("replace Document") { + var list: List[CodecTest] = CodecDao.find().resultList() + var codec = list.head + assertEquals(codec.id, 2L) + CodecDao.replaceOne(codec.copy(id = 1)).result() + list = CodecDao.find().resultList() + codec = list.head + assertEquals(codec.id, 1L) + } + + test("delete Document") { + val hexString = CodecDao.find().result()._id.toHexString + assert(hexString != null) + assert(hexString != "") + + val result: DeleteResult = CodecDao.deleteOne(equal(DatabaseProvider.ObjectIdKey, new ObjectId(hexString))).result() + + assert(result.wasAcknowledged()) + assertEquals(result.getDeletedCount, 1L) + + val list: List[CodecTest] = CodecDao.find().resultList() + assertEquals(list.size, 0) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala deleted file mode 100644 index 60d0bd5d..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSpec.scala +++ /dev/null @@ -1,127 +0,0 @@ -package dev.mongocamp.driver.mongodb.operation - -import java.util.concurrent.TimeUnit - -import dev.mongocamp.driver.mongodb.test.TestDatabase._ -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.database.{DatabaseProvider, MongoIndex} - -import scala.concurrent.duration.Duration - -class IndexSpec extends PersonSpecification { - - "Base Index Operations" should { - - "create / drop indexes for key" in { - - val createIndexResult: String = PersonDAO.createIndexForField("name").result() - - createIndexResult mustEqual "name_1" - - PersonDAO.indexList() must haveSize(2) - val index: MongoIndex = PersonDAO.indexForName("name_1").get - index.expire must beFalse - - PersonDAO.dropIndexForName(createIndexResult).result() - - PersonDAO.indexList() must haveSize(1) - } - - "evaluate has index" in { - - PersonDAO.hasIndexForField(DatabaseProvider.ObjectIdKey) must beTrue - - PersonDAO.hasIndexForField("unknown") must beFalse - } - - "create descending index for key" in { - - val createIndexResult: String = PersonDAO.createIndexForFieldWithName("name", sortAscending = false, "myIndex").result() - - createIndexResult mustEqual "myIndex" - - PersonDAO.indexList() must haveSize(2) - PersonDAO.indexForName("myIndex").get - - PersonDAO.dropIndexForName(createIndexResult).result() - - PersonDAO.indexList() must haveSize(1) - } - - "create unique index for key" in { - - val createIndexResult: String = PersonDAO.createUniqueIndexForField("id", sortAscending = false, Some("myUniqueIndex")).result() - - createIndexResult mustEqual "myUniqueIndex" - - PersonDAO.indexList() must haveSize(2) - PersonDAO.indexForName("myUniqueIndex").get - - PersonDAO.dropIndexForName(createIndexResult).result() - - PersonDAO.indexList() must haveSize(1) - } - - "create text index for key" in { - - val createIndexResult: String = PersonDAO.createTextIndexForField("email").result() - - createIndexResult mustEqual "email_text" - - PersonDAO.indexList() must haveSize(2) - PersonDAO.indexForName("email_text").get - - PersonDAO.dropIndexForName(createIndexResult).result() - - PersonDAO.indexList() must haveSize(1) - } - - "create hashed index for key" in { - - val createIndexResult: String = PersonDAO.createHashedIndexForField("email").result() - - createIndexResult mustEqual "email_hashed" - - PersonDAO.indexList() must haveSize(2) - PersonDAO.indexForName("email_hashed").get - - PersonDAO.dropIndexForName(createIndexResult).result() - - PersonDAO.indexList() must haveSize(1) - } - - "create expiring index for key" in { - - val createIndexResult: String = PersonDAO.createExpiringIndexForField("email", Duration(1, TimeUnit.SECONDS)).result() - - createIndexResult mustEqual "email_1" - - PersonDAO.indexList() must haveSize(2) - - val index: MongoIndex = PersonDAO.indexForName("email_1").get - index.expire must beTrue - - PersonDAO.dropIndexForName(createIndexResult).result() - - PersonDAO.indexList() must haveSize(1) - } - - "return an index list" in { - - val list = PersonDAO.indexList() - list must haveSize(1) - - val mongoIndex: MongoIndex = list.head - mongoIndex.name mustEqual "_id_" - mongoIndex.fields must contain(DatabaseProvider.ObjectIdKey) - mongoIndex.version mustEqual 2 - mongoIndex.keys must haveSize(1) - mongoIndex.keys.head._1 mustEqual DatabaseProvider.ObjectIdKey - mongoIndex.keys.head._2 mustEqual 1 - - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSuite.scala new file mode 100644 index 00000000..a91c809c --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/operation/IndexSuite.scala @@ -0,0 +1,77 @@ +package dev.mongocamp.driver.mongodb.operation + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.database.{ DatabaseProvider, MongoIndex } +import dev.mongocamp.driver.mongodb.test.TestDatabase._ + +import java.util.concurrent.TimeUnit +import scala.concurrent.duration.Duration + +class IndexSuite extends BasePersonSuite { + + test("create / drop indexes for key") { + val createIndexResult: String = PersonDAO.createIndexForField("name").result() + assertEquals(createIndexResult, "name_1") + assertEquals(PersonDAO.indexList().size, 2) + val index: MongoIndex = PersonDAO.indexForName("name_1").get + assert(!index.expire) + PersonDAO.dropIndexForName(createIndexResult).result() + assertEquals(PersonDAO.indexList().size, 1) + } + + test("evaluate has index") { + assert(PersonDAO.hasIndexForField(DatabaseProvider.ObjectIdKey)) + assert(!PersonDAO.hasIndexForField("unknown")) + } + + test("create descending index for key") { + val createIndexResult: String = PersonDAO.createIndexForFieldWithName("name", sortAscending = false, "myIndex").result() + assertEquals(createIndexResult, "myIndex") + assertEquals(PersonDAO.indexList().size, 2) + PersonDAO.indexForName("myIndex").get + PersonDAO.dropIndexForName(createIndexResult).result() + assertEquals(PersonDAO.indexList().size, 1) + } + + test("create unique index for key") { + val createIndexResult: String = PersonDAO.createUniqueIndexForField("id", sortAscending = false, Some("myUniqueIndex")).result() + assertEquals(createIndexResult, "myUniqueIndex") + assertEquals(PersonDAO.indexList().size, 2) + PersonDAO.indexForName("myUniqueIndex").get + PersonDAO.dropIndexForName(createIndexResult).result() + assertEquals(PersonDAO.indexList().size, 1) + } + + test("create text index for key") { + val createIndexResult: String = PersonDAO.createTextIndexForField("email").result() + assertEquals(createIndexResult, "email_text") + assertEquals(PersonDAO.indexList().size, 2) + PersonDAO.indexForName("email_text").get + PersonDAO.dropIndexForName(createIndexResult).result() + assertEquals(PersonDAO.indexList().size, 1) + } + + test("create expiring index for key") { + val createIndexResult: String = PersonDAO.createExpiringIndexForField("email", Duration(1, TimeUnit.SECONDS)).result() + assertEquals(createIndexResult, "email_1") + assertEquals(PersonDAO.indexList().size, 2) + val index: MongoIndex = PersonDAO.indexForName("email_1").get + assert(index.expire) + PersonDAO.dropIndexForName(createIndexResult).result() + assertEquals(PersonDAO.indexList().size, 1) + } + + test("return an index list") { + val list = PersonDAO.indexList() + assertEquals(list.size, 1) + val mongoIndex: MongoIndex = list.head + assertEquals(mongoIndex.name, "_id_") + assert(mongoIndex.fields.contains(DatabaseProvider.ObjectIdKey)) + assertEquals(mongoIndex.version, 2) + assertEquals(mongoIndex.keys.size, 1) + assertEquals(mongoIndex.keys.head._1, DatabaseProvider.ObjectIdKey) + assertEquals(mongoIndex.keys.head._2, 1) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/SearchSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/SearchSpec.scala deleted file mode 100644 index eb83a710..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/operation/SearchSpec.scala +++ /dev/null @@ -1,72 +0,0 @@ -package dev.mongocamp.driver.mongodb.operation -import dev.mongocamp.driver.MongoImplicits -import dev.mongocamp.driver.mongodb.Sort._ -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.model.Person -import dev.mongocamp.driver.mongodb.test.TestDatabase._ - -class SearchSpec extends PersonSpecification with MongoImplicits { - - "Search Operations" should { - - "support findAll" in { - - val findAllResult: List[Person] = PersonDAO.find() - - findAllResult.size must be equalTo PersonDAO.count().result().toInt - - findAllResult.head.name must not beEmpty - - findAllResult.head._id.toString must not beEmpty - - } - - "support findOneById" in { - - val findAllResult: List[Person] = PersonDAO.find() - - val findOneByIdResult: Option[Person] = PersonDAO.findById(findAllResult.head._id) - - findOneByIdResult must beSome[Person] - - findOneByIdResult.get must be equalTo findAllResult.head - } - - "support findOne with Filter" in { - - val findOneResult = PersonDAO.find(Map("id" -> 11)).resultOption() - - findOneResult must beSome[Person] - - findOneResult.get.name must be equalTo "Dyer Mayer" - - PersonDAO.find(Map("id" -> 125)).result().name must be equalTo "Gaines Valentine" - } - - "support findOne with field name and value" in { - - val findOneResult = PersonDAO.find("id", 11).resultOption() - - findOneResult must beSome[Person] - - findOneResult.get.name must be equalTo "Dyer Mayer" - - PersonDAO.find("name", "Gaines Valentine").result().name must be equalTo "Gaines Valentine" - } - - "support findOne with Filter" in { - - val females = PersonDAO.find(Map("gender" -> "female"), sortByKey("name")).resultList() - - females.size must be equalTo 98 - - val males = PersonDAO.find(Map("gender" -> "male")).resultList() - - males.size must be equalTo 102 - - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/operation/SearchSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/operation/SearchSuite.scala new file mode 100644 index 00000000..5a5837e5 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/operation/SearchSuite.scala @@ -0,0 +1,48 @@ +package dev.mongocamp.driver.mongodb.operation +import dev.mongocamp.driver.MongoImplicits +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.Sort._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.model.Person +import dev.mongocamp.driver.mongodb.test.TestDatabase._ + +class SearchSuite extends BasePersonSuite with MongoImplicits { + + test("support findAll") { + val findAllResult: List[Person] = PersonDAO.find() + assertEquals(findAllResult.size, PersonDAO.count().result().toInt) + assert(findAllResult.head.name != null) + assert(findAllResult.head.name != "") + assert(findAllResult.head._id.toString != null) + assert(findAllResult.head._id.toString != "") + } + + test("support findOneById") { + val findAllResult: List[Person] = PersonDAO.find() + val findOneByIdResult: Option[Person] = PersonDAO.findById(findAllResult.head._id) + assert(findOneByIdResult.isDefined) + assertEquals(findOneByIdResult.get, findAllResult.head) + } + + test("support findOne with Filter") { + val findOneResult = PersonDAO.find(Map("id" -> 11)).resultOption() + assert(findOneResult.isDefined) + assertEquals(findOneResult.get.name, "Dyer Mayer") + assertEquals(PersonDAO.find(Map("id" -> 125)).result().name, "Gaines Valentine") + } + + test("support findOne with field name and value") { + val findOneResult = PersonDAO.find("id", 11).resultOption() + assert(findOneResult.isDefined) + assertEquals(findOneResult.get.name, "Dyer Mayer") + assertEquals(PersonDAO.find("name", "Gaines Valentine").result().name, "Gaines Valentine") + } + + test("support many with Filter") { + val females = PersonDAO.find(Map("gender" -> "female"), sortByKey("name")).resultList() + assertEquals(females.size, 98) + val males = PersonDAO.find(Map("gender" -> "male")).resultList() + assertEquals(males.size, 102) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationAggregationSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationAggregationSpec.scala deleted file mode 100644 index d5337250..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationAggregationSpec.scala +++ /dev/null @@ -1,88 +0,0 @@ -package dev.mongocamp.driver.mongodb.pagination - -import dev.mongocamp.driver.mongodb.Aggregate._ -import dev.mongocamp.driver.mongodb.bson.BsonConverter -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -// #agg_imports - -import dev.mongocamp.driver.mongodb.test.TestDatabase._ -import org.mongodb.scala.bson.conversions.Bson -import org.mongodb.scala.model.Aggregates.{ filter, group, sort } -import org.mongodb.scala.model.Filters.{ and, equal } - -class PaginationAggregationSpec extends PersonSpecification { - - val filterStage: Bson = filter(and(equal("gender", "female"), notNullFilter("balance"))) - - val groupStage: Bson = group(Map("age" -> "$age"), sumField("balance"), firstField("age")) - - val sortStage: Bson = sort(sortByKey("age")) - - "Search" should { - - "support aggregation filter" in { - - // #region aggregation-pagination - val pipeline = List(filterStage, sortStage) - - val pagination = MongoPaginatedAggregation(PersonDAO.Raw, pipeline, allowDiskUse = true) - - val page = pagination.paginate(1, 10) - // #endregion aggregation-pagination - - (pagination.countResult must be).equalTo(98) - - (page.paginationInfo.allCount must be).equalTo(98) - - (page.paginationInfo.pagesCount must be).equalTo(10) - - (page.databaseObjects.size must be).equalTo(10) - } - - "support aggregation filter and group" in { - // #agg_execute - val pipeline = List(filterStage, groupStage, sortStage) - - val pagination = MongoPaginatedAggregation(PersonDAO, pipeline, allowDiskUse = true) - - val page = pagination.paginate(1, 10) - - (pagination.countResult must be).equalTo(21) - - (page.paginationInfo.allCount must be).equalTo(21) - - (page.paginationInfo.pagesCount must be).equalTo(3) - - (page.databaseObjects.size must be).equalTo(10) - - // #agg_convert - val list: List[Map[String, Any]] = page.databaseObjects.map(d => BsonConverter.asMap(d)) - // #agg_convert - list.foreach(m => println(m("age").toString + " -> " + m("balance"))) - - (list.head("age") must be).equalTo(20) - (list.head("balance") must be).equalTo(8333.0) - - } - - "aggregation with empty response" in { - val pipeline = List(filter(and(equal("unknown", "filter"))), groupStage, sortStage) - - val pagination = MongoPaginatedAggregation(PersonDAO, pipeline, allowDiskUse = true) - - val page = pagination.paginate(1, 10) - - (pagination.countResult must be).equalTo(0) - - (page.paginationInfo.allCount must be).equalTo(0) - - (page.paginationInfo.pagesCount must be).equalTo(0) - - (page.databaseObjects.size must be).equalTo(0) - - - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationAggregationSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationAggregationSuite.scala new file mode 100644 index 00000000..08343e8c --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationAggregationSuite.scala @@ -0,0 +1,79 @@ +package dev.mongocamp.driver.mongodb.pagination + +import dev.mongocamp.driver.mongodb.Aggregate._ +import dev.mongocamp.driver.mongodb.bson.BsonConverter +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.test.TestDatabase._ +import org.mongodb.scala.bson.conversions.Bson +import org.mongodb.scala.model.Aggregates.{ filter, group, sort } +import org.mongodb.scala.model.Filters.{ and, equal } + +class PaginationAggregationSuite extends BasePersonSuite { + + val filterStage: Bson = filter(and(equal("gender", "female"), notNullFilter("balance"))) + + val groupStage: Bson = group(Map("age" -> "$age"), sumField("balance"), firstField("age")) + + val sortStage: Bson = sort(sortByKey("age")) + + test("support aggregation filter") { + + // #region aggregation-pagination + val pipeline = List(filterStage, sortStage) + + val pagination = MongoPaginatedAggregation(PersonDAO.Raw, pipeline, allowDiskUse = true) + + val page = pagination.paginate(1, 10) + // #endregion aggregation-pagination + + assertEquals(pagination.countResult, 98L) + + assertEquals(page.paginationInfo.allCount, 98L) + + assertEquals(page.paginationInfo.pagesCount, 10L) + + assertEquals(page.databaseObjects.size, 10) + } + + test("support aggregation filter and group") { + // #agg_execute + val pipeline = List(filterStage, groupStage, sortStage) + + val pagination = MongoPaginatedAggregation(PersonDAO, pipeline, allowDiskUse = true) + + val page = pagination.paginate(1, 10) + + assertEquals(pagination.countResult, 21L) + + assertEquals(page.paginationInfo.allCount, 21L) + + assertEquals(page.paginationInfo.pagesCount, 3L) + + assertEquals(page.databaseObjects.size, 10) + + // #agg_convert + val list: List[Map[String, Any]] = page.databaseObjects.map(d => BsonConverter.asMap(d)) + // #agg_convert + list.foreach(m => println(m("age").toString + " -> " + m("balance"))) + + assertEquals(list.head("age"), 20) + assertEquals(list.head("balance"), 8333.0) + } + + test("aggregation with empty response") { + val pipeline = List(filter(and(equal("unknown", "filter"))), groupStage, sortStage) + + val pagination = MongoPaginatedAggregation(PersonDAO, pipeline, allowDiskUse = true) + + val page = pagination.paginate(1, 10) + + assertEquals(pagination.countResult, 0L) + + assertEquals(page.paginationInfo.allCount, 0L) + + assertEquals(page.paginationInfo.pagesCount, 0L) + + assertEquals(page.databaseObjects.size, 0) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationFilterSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationFilterSpec.scala deleted file mode 100644 index 3118c3a5..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationFilterSpec.scala +++ /dev/null @@ -1,62 +0,0 @@ -package dev.mongocamp.driver.mongodb.pagination -import dev.mongocamp.driver.MongoImplicits -import dev.mongocamp.driver.mongodb.Sort._ -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.test.TestDatabase._ - -class PaginationFilterSpec extends PersonSpecification with MongoImplicits { - - "Search Operations" should { - - "support findAll" in { - - val pagination = MongoPaginatedFilter(PersonDAO) - - val page = pagination.paginate(1, 10) - - val personCollectionCount = PersonDAO.count().result().toInt - page.paginationInfo.allCount mustEqual personCollectionCount - pagination.countResult mustEqual personCollectionCount - - page.databaseObjects.size must beEqualTo(10) - - page.databaseObjects.head.name must not beEmpty - - page.databaseObjects.head._id.toString must not beEmpty - - } - - "support with Filter" in { - // #region filter-pagination - val paginationFemale = MongoPaginatedFilter(PersonDAO, Map("gender" -> "female"), sortByKey("name")) - - val pageFemale = paginationFemale.paginate(1, 10) - // #endregion filter-pagination - - paginationFemale.countResult mustEqual 98 - pageFemale.paginationInfo.pagesCount mustEqual 10 - pageFemale.paginationInfo.allCount mustEqual 98 - pageFemale.paginationInfo.page mustEqual 1 - pageFemale.paginationInfo.perPage mustEqual 10 - - pageFemale.databaseObjects.size mustEqual 10 - pageFemale.databaseObjects.head.name mustEqual "Adele Melton" - - val paginationMales = MongoPaginatedFilter(PersonDAO, Map("gender" -> "male")) - val pageMale = paginationMales.paginate(1, 10) - - paginationMales.countResult mustEqual 102 - pageMale.paginationInfo.pagesCount mustEqual 11 - pageMale.paginationInfo.allCount mustEqual 102 - pageMale.paginationInfo.page mustEqual 1 - pageMale.paginationInfo.perPage mustEqual 10 - - pageMale.databaseObjects.size mustEqual 10 - pageMale.databaseObjects.head.name mustEqual "Bowen Leon" - - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationFilterSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationFilterSuite.scala new file mode 100644 index 00000000..e493b4ec --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationFilterSuite.scala @@ -0,0 +1,60 @@ +package dev.mongocamp.driver.mongodb.pagination +import dev.mongocamp.driver.MongoImplicits +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.Sort._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.test.TestDatabase._ + +class PaginationFilterSuite extends BasePersonSuite with MongoImplicits { + + test("support findAll") { + + val pagination = MongoPaginatedFilter(PersonDAO) + + val page = pagination.paginate(1, 10) + + val personCollectionCount = PersonDAO.count().result().toInt + assertEquals(page.paginationInfo.allCount, personCollectionCount.toLong) + assertEquals(pagination.countResult, personCollectionCount.toLong) + + assertEquals(page.databaseObjects.size, 10) + + assertNotEquals(page.databaseObjects.head.name, null) + assertNotEquals(page.databaseObjects.head.name, "") + + assertNotEquals(page.databaseObjects.head._id, null) + assertNotEquals(page.databaseObjects.head._id.toHexString, "") + + } + + test("support with Filter") { + // #region filter-pagination + val paginationFemale = MongoPaginatedFilter(PersonDAO, Map("gender" -> "female"), sortByKey("name")) + + val pageFemale = paginationFemale.paginate(1, 10) + // #endregion filter-pagination + + assertEquals(paginationFemale.countResult, 98L) + assertEquals(pageFemale.paginationInfo.pagesCount, 10L) + assertEquals(pageFemale.paginationInfo.allCount, 98L) + assertEquals(pageFemale.paginationInfo.page, 1L) + assertEquals(pageFemale.paginationInfo.perPage, 10L) + + assertEquals(pageFemale.databaseObjects.size, 10) + assertEquals(pageFemale.databaseObjects.head.name, "Adele Melton") + + val paginationMales = MongoPaginatedFilter(PersonDAO, Map("gender" -> "male")) + val pageMale = paginationMales.paginate(1, 10) + + assertEquals(paginationMales.countResult, 102L) + assertEquals(pageMale.paginationInfo.pagesCount, 11L) + assertEquals(pageMale.paginationInfo.allCount, 102L) + assertEquals(pageMale.paginationInfo.page, 1L) + assertEquals(pageMale.paginationInfo.perPage, 10L) + + assertEquals(pageMale.databaseObjects.size, 10) + assertEquals(pageMale.databaseObjects.head.name, "Bowen Leon") + + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSpec.scala deleted file mode 100644 index 93a187cc..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSpec.scala +++ /dev/null @@ -1,65 +0,0 @@ -package dev.mongocamp.driver.mongodb.pagination - -import com.typesafe.scalalogging.LazyLogging -import dev.mongocamp.driver.MongoImplicits -import dev.mongocamp.driver.mongodb.Aggregate._ -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.test.TestDatabase._ -import org.mongodb.scala.bson.conversions.Bson -import org.mongodb.scala.model.Aggregates.{filter, sort} -import org.mongodb.scala.model.Filters.{and, equal} -class PaginationIterationSpec extends PersonSpecification with MongoImplicits with LazyLogging { - - "Pagination Iteration" should { - - "support with Filter" in { - val paginationFemale = MongoPaginatedFilter(PersonDAO, Map("gender" -> "female"), sortByKey("name")) - - val pageFemale = paginationFemale.paginate(1, 10) - - pageFemale.paginationInfo.allCount mustEqual 98 - - var i = 0 - - // #region foreach-with-rows - paginationFemale.foreach(5) { person => - { - logger.trace(person.toString) - i = i + 1 - } - } - i mustEqual 98 - // #endregion foreach-with-rows - - } - - "support with aggregation" in { - val filterStage: Bson = filter(and(equal("gender", "female"), notNullFilter("balance"))) - - val sortStage: Bson = sort(sortByKey("age")) - - val pipeline = List(filterStage, sortStage) - - val pagination = MongoPaginatedAggregation(PersonDAO, pipeline, allowDiskUse = true) - - val page = pagination.paginate(1, 10) - - page.paginationInfo.allCount mustEqual 98 - - // #region foreach-default-rows - var i = 0 - pagination.foreach { element => - { - logger.trace(element.toJson()) - i = i + 1 - } - } - i mustEqual 98 - // #endregion foreach-default-rows - - } - - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSuite.scala new file mode 100644 index 00000000..1251cb18 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/pagination/PaginationIterationSuite.scala @@ -0,0 +1,64 @@ +package dev.mongocamp.driver.mongodb.pagination + +import com.typesafe.scalalogging.LazyLogging +import dev.mongocamp.driver.MongoImplicits +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.Aggregate._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.model.Person +import dev.mongocamp.driver.mongodb.test.TestDatabase._ +import org.mongodb.scala.bson.conversions.Bson +import org.mongodb.scala.model.Aggregates.{ filter, sort } +import org.mongodb.scala.model.Filters.{ and, equal } + +import scala.collection.mutable.ArrayBuffer + +class PaginationIterationSuite extends BasePersonSuite with MongoImplicits with LazyLogging { + + test("support with Filter") { + val paginationFemale = MongoPaginatedFilter(PersonDAO, Map("gender" -> "female"), sortByKey("name")) + + val pageFemale = paginationFemale.paginate(1, 10) + + assertEquals(pageFemale.paginationInfo.allCount, 98L) + + var i = 0 + val personArray = new ArrayBuffer[Person]() + // #region foreach-with-rows + paginationFemale.foreach(5) { person => + { + logger.trace(person.toString) + personArray += person + i = i + 1 + } + } + assertEquals(i, 98) + // #endregion foreach-with-rows + } + + test("support with aggregation") { + val filterStage: Bson = filter(and(equal("gender", "female"), notNullFilter("balance"))) + + val sortStage: Bson = sort(sortByKey("age")) + + val pipeline = List(filterStage, sortStage) + + val pagination = MongoPaginatedAggregation(PersonDAO, pipeline, allowDiskUse = true) + + val page = pagination.paginate(1, 10) + + assertEquals(page.paginationInfo.allCount, 98L) + + // #region foreach-default-rows + var i = 0 + pagination.foreach { element => + { + logger.trace(element.toJson()) + i = i + 1 + } + } + assertEquals(i, 98) + // #endregion foreach-default-rows + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoApp.scala b/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoApp.scala index a100dc74..e4854ee6 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoApp.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoApp.scala @@ -12,7 +12,7 @@ object RelationDemoApp extends App { NodeDAO.insertMany(List(Node(1, 0), Node(2, 1), Node(3, 1))).result() - val node2 = NodeDAO.find("id", 2).result() + val node2: Node = NodeDAO.find("id", 2).result() println(node2) @@ -20,12 +20,12 @@ object RelationDemoApp extends App { println(node2Parent) - var node1 = NodeDAO.find("id", 1).result() + var node1: Node = NodeDAO.find("id", 1).result() println(node1.children) println(node1.children) - val node3 = NodeDAO.find("id", 3).result() + val node3: Node = NodeDAO.find("id", 3).result() node3.setParent(node2) println(node1.children) diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoDatabase.scala b/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoDatabase.scala index 6b112a41..0285cc18 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoDatabase.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoDatabase.scala @@ -1,10 +1,10 @@ package dev.mongocamp.driver.mongodb.relation +import dev.mongocamp.driver.mongodb._ import dev.mongocamp.driver.mongodb.database.DatabaseProvider -import dev.mongocamp.driver.mongodb.{MongoDAO, _} -import org.bson.codecs.configuration.CodecRegistries.fromProviders +import dev.mongocamp.driver.mongodb.json._ +import io.circe.generic.auto._ import org.mongodb.scala.bson.ObjectId -import org.mongodb.scala.bson.codecs.Macros._ import org.mongodb.scala.result.UpdateResult object RelationDemoDatabase { @@ -41,8 +41,8 @@ object RelationDemoDatabase { // #region user_dao object UserDAO extends MongoDAO[User](provider, "user") { - lazy val loginRelation = OneToOneRelationship(LoginDAO, "id") - lazy val friendsRelation = OneToManyRelationship(SimplePersonDAO, "userId") + lazy val loginRelation: OneToOneRelationship[Login] = OneToOneRelationship(LoginDAO, "id") + lazy val friendsRelation: OneToManyRelationship[SimplePerson] = OneToManyRelationship(SimplePersonDAO, "userId") } // #endregion user_dao @@ -50,15 +50,11 @@ object RelationDemoDatabase { object SimplePersonDAO extends MongoDAO[SimplePerson](provider, "friend") - // #region registry - private val registry = fromProviders(classOf[Node], classOf[User], classOf[Login], classOf[SimplePerson]) - // #endregion registry - - val provider = DatabaseProvider.fromPath("unit.test.mongo", registry) + val provider: DatabaseProvider = DatabaseProvider.fromPath("unit.test.mongo") object NodeDAO extends MongoDAO[Node](provider, "nodes") { - lazy val parentRelation = OneToOneRelationship(this, "id") - lazy val childrenRelation = OneToManyRelationship(this, "parentId") + lazy val parentRelation: OneToOneRelationship[Node] = OneToOneRelationship(this, "id") + lazy val childrenRelation: OneToManyRelationship[Node] = OneToManyRelationship(this, "parentId") } } diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoSpec.scala deleted file mode 100644 index 4f09c6cf..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoSpec.scala +++ /dev/null @@ -1,70 +0,0 @@ -package dev.mongocamp.driver.mongodb.relation - -import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.relation.RelationDemoDatabase._ -import org.specs2.mutable.{Before, Specification} - -class RelationDemoSpec extends Specification with Before { - - sequential - - "Relations" should { - - "support OneToOne" in { - - val list: List[User] = UserDAO.find("name", "Massey Sears").resultList() - - list.size must be equalTo 1 - - val user = list.head - val login = user.login - - login must beSome[Login] - - login.get.email must be equalTo "masseysears@kog.com" - - } - - "support OneToMany" in { - - val list: List[User] = UserDAO.find("name", "Massey Sears").resultList() - - list.size must be equalTo 1 - - val user = list.head - - val friends = user.friends - - friends.size must be equalTo 5 - - friends.head.userId must be equalTo user.id - - friends.head.name must be equalTo "Katie Holden" - - } - - } - - override def before: Any = { - - try { - UserDAO.drop().result() - LoginDAO.drop().result() - SimplePersonDAO.drop().result() - } - catch { - case e: Exception => - } - - val personList = PersonDAO.find().resultList() - personList.foreach { person => - UserDAO.insertOne(User(person.id, person.name, person.guid)).result() - LoginDAO.insertOne(Login(person.guid, person.email, person.email.reverse)).result() - person.friends.foreach { f => - SimplePersonDAO.insertOne(SimplePerson((person.id + 11) * (f.id + 3), f.name, person.id)).result() - } - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoSuite.scala new file mode 100644 index 00000000..d8e76677 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/relation/RelationDemoSuite.scala @@ -0,0 +1,52 @@ +package dev.mongocamp.driver.mongodb.relation + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.relation.RelationDemoDatabase._ +import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO + +class RelationDemoSuite extends munit.FunSuite { + + test("support OneToOne") { + val list: List[User] = UserDAO.find("name", "Massey Sears").resultList() + assertEquals(list.size, 1) + + val user = list.head + val login = user.login + + assert(login.isDefined) + assertEquals(login.get.email, "masseysears@kog.com") + } + + test("support OneToMany") { + val list: List[User] = UserDAO.find("name", "Massey Sears").resultList() + assertEquals(list.size, 1) + + val user = list.head + val friends = user.friends + + assertEquals(friends.size, 5) + assertEquals(friends.head.userId, user.id) + assertEquals(friends.head.name, "Katie Holden") + } + + override def beforeAll(): Unit = { + super.beforeAll() + try { + UserDAO.drop().result() + LoginDAO.drop().result() + SimplePersonDAO.drop().result() + } + catch { + case e: Exception => + } + + val personList = PersonDAO.find().resultList() + personList.foreach { person => + UserDAO.insertOne(User(person.id, person.name, person.guid)).result() + LoginDAO.insertOne(Login(person.guid, person.email, person.email.reverse)).result() + person.friends.foreach { f => + SimplePersonDAO.insertOne(SimplePerson((person.id + 11) * (f.id + 3), f.name, person.id)).result() + } + } + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala deleted file mode 100644 index e548be1c..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSpec.scala +++ /dev/null @@ -1,66 +0,0 @@ -package dev.mongocamp.driver.mongodb.schema - -import dev.mongocamp.driver.mongodb.test.TestDatabase.{ PersonDAO, PersonDocumentDAO } -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.relation.RelationDemoDatabase._ -import org.specs2.mutable.{ Before, Specification } - -import scala.util.Try - -class SchemaSpec extends Specification with Before { - - sequential - - "Schema" should { - "analyse Json Schema from document dao" in { - // #region schema-analysis - val schemaExplorer = new SchemaExplorer() - val schemaAnalysis = schemaExplorer.analyzeSchema(PersonDocumentDAO) - // #endregion schema-analysis - schemaAnalysis.count must beEqualTo(200) - schemaAnalysis.sample must beEqualTo(200) - schemaAnalysis.percentageOfAnalysed must beEqualTo(1.0) - schemaAnalysis.fields.size must beEqualTo(20) - val idField = schemaAnalysis.fields.head - idField.name must beEqualTo("_id") - idField.fieldTypes.head.fieldType must beEqualTo("objectId") - idField.fieldTypes.head.count must beEqualTo(200) - idField.fieldTypes.head.percentageOfParent must beEqualTo(1.0) - } - - "detect Json Schema from document dao" in { - // #region schema-explorer - val schemaExplorer = new SchemaExplorer() - val schema = schemaExplorer.detectSchema(PersonDocumentDAO) - val schemaJson = schema.toJson - // #endregion schema-explorer - schemaJson.contains("\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"") must beTrue - schemaJson.contains("\"Friends\":") must beTrue - schemaJson.contains("\"title\":\"Friends\"") must beTrue - schemaJson.contains("\"People\":") must beTrue - schemaJson.contains("\"title\":\"People\"") must beTrue - val idPattern1 = schemaJson.contains("\"_id\":{\"pattern\":\"^([a-fA-F0-9]{2})+$\",\"type\":\"string\"}") - val idPattern2 = schemaJson.contains("\"_id\":{\"type\":\"string\",\"pattern\":\"^([a-fA-F0-9]{2})+$\"}") - (idPattern1 || idPattern2) must beTrue - schemaJson.contains("\"isActive\":{\"type\":\"boolean\"}") must beTrue - } - - } - - override def before: Any = { - Try { - UserDAO.drop().result() - LoginDAO.drop().result() - SimplePersonDAO.drop().result() - } - val personList = PersonDAO.find().resultList() - personList.foreach { person => - UserDAO.insertOne(User(person.id, person.name, person.guid)).result() - LoginDAO.insertOne(Login(person.guid, person.email, person.email.reverse)).result() - person.friends.foreach { f => - SimplePersonDAO.insertOne(SimplePerson((person.id + 11) * (f.id + 3), f.name, person.id)).result() - } - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSuite.scala new file mode 100644 index 00000000..ff059105 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/schema/SchemaSuite.scala @@ -0,0 +1,61 @@ +package dev.mongocamp.driver.mongodb.schema + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.relation.RelationDemoDatabase._ +import dev.mongocamp.driver.mongodb.test.TestDatabase.{ PersonDAO, PersonDocumentDAO } + +import scala.util.Try + +class SchemaSuite extends BasePersonSuite { + + override def beforeAll(): Unit = { + super.beforeAll() + Try { + UserDAO.drop().result() + LoginDAO.drop().result() + SimplePersonDAO.drop().result() + } + val personList = PersonDAO.find().resultList() + personList.foreach { person => + UserDAO.insertOne(User(person.id, person.name, person.guid)).result() + LoginDAO.insertOne(Login(person.guid, person.email, person.email.reverse)).result() + person.friends.foreach { f => + SimplePersonDAO.insertOne(SimplePerson((person.id + 11) * (f.id + 3), f.name, person.id)).result() + } + } + } + + test("analyse Json Schema from document dao") { + // #region schema-analysis + val schemaExplorer = new SchemaExplorer() + val schemaAnalysis = schemaExplorer.analyzeSchema(PersonDocumentDAO) + // #endregion schema-analysis + assertEquals(schemaAnalysis.count, 200L) + assertEquals(schemaAnalysis.sample, 200L) + assertEquals(schemaAnalysis.percentageOfAnalysed, 1.0) + assertEquals(schemaAnalysis.fields.size, 20) + val idField = schemaAnalysis.fields.head + assertEquals(idField.name, "_id") + assertEquals(idField.fieldTypes.head.fieldType, "objectId") + assertEquals(idField.fieldTypes.head.count, 200L) + assertEquals(idField.fieldTypes.head.percentageOfParent, 1.0) + } + + test("detect Json Schema from document dao") { + // #region schema-explorer + val schemaExplorer = new SchemaExplorer() + val schema = schemaExplorer.detectSchema(PersonDocumentDAO) + val schemaJson = schema.toJson + // #endregion schema-explorer + assert(schemaJson.contains("\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"")) + assert(schemaJson.contains("\"Friends\":")) + assert(schemaJson.contains("\"title\":\"Friends\"")) + assert(schemaJson.contains("\"People\":")) + assert(schemaJson.contains("\"title\":\"People\"")) + val idPattern1 = schemaJson.contains("\"_id\":{\"pattern\":\"^([a-fA-F0-9]{2})+$\",\"type\":\"string\"}") + val idPattern2 = schemaJson.contains("\"_id\":{\"type\":\"string\",\"pattern\":\"^([a-fA-F0-9]{2})+$\"}") + assert(idPattern1 || idPattern2) + assert(schemaJson.contains("\"isActive\":{\"type\":\"boolean\"}")) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/server/ServerConfigSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/server/ServerConfigSpec.scala deleted file mode 100644 index 794f5f32..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/server/ServerConfigSpec.scala +++ /dev/null @@ -1,27 +0,0 @@ -package dev.mongocamp.driver.mongodb.server - -import org.specs2.mutable.Specification - -class ServerConfigSpec extends Specification { - - sequential - - "ServerConfig" should { - - "be created" in { - val config = ServerConfig() - config.host mustEqual "localhost" - config.port mustEqual 28018 - config.serverName mustEqual "local-mongodb-server" - } - - "be created from config path" in { - val config = ServerConfig.fromPath("unit.test.local.mongo.server") - config.host mustEqual "localhost" - config.port mustEqual 28028 - config.serverName mustEqual "local-unit-test-server" - config.h2BackendConfig.get.inMemory must beFalse - config.h2BackendConfig.get.path must beNone - } - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/server/ServerConfigSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/server/ServerConfigSuite.scala new file mode 100644 index 00000000..5836b1e9 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/server/ServerConfigSuite.scala @@ -0,0 +1,23 @@ +package dev.mongocamp.driver.mongodb.server + +import munit.FunSuite + +class ServerConfigSuite extends FunSuite { + + test("ServerConfig should be created") { + val config = ServerConfig() + assertEquals(config.host, "localhost") + assertEquals(config.port, 28018) + assertEquals(config.serverName, "local-mongodb-server") + } + + test("ServerConfig should be created from config path") { + val config = ServerConfig.fromPath("unit.test.local.mongo.server") + assertEquals(config.host, "localhost") + assertEquals(config.port, 28028) + assertEquals(config.serverName, "local-unit-test-server") + assert(!config.h2BackendConfig.get.inMemory) + assert(config.h2BackendConfig.get.path.isEmpty) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala deleted file mode 100644 index f92ce1f6..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSpec.scala +++ /dev/null @@ -1,61 +0,0 @@ -package dev.mongocamp.driver.mongodb.sql - -import dev.mongocamp.driver.mongodb.model.{Grade, Score} -import dev.mongocamp.driver.mongodb.test.TestDatabase -import dev.mongocamp.driver.mongodb.{GenericObservable, MongoDAO} -import org.bson.types.ObjectId -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeEach - -class DeleteSqlSpec extends Specification with BeforeEach { - sequential - - object GradeDAO extends MongoDAO[Grade](TestDatabase.provider, "universityGrades") - - override def before(): Unit = { - this.GradeDAO.drop().result() - this.GradeDAO - .insertMany( - List( - Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), - Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), - Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) - ) - ) - .result() - } - - "MongoSqlQueryHolder" should { - - "delete with where" in { - val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades WHERE studentId = 1;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - selectResponse.head.getLong("deletedCount") mustEqual 1 - val documents = GradeDAO.count().result() - documents mustEqual 2 - } - - "delete all" in { - val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - selectResponse.head.getLong("deletedCount") mustEqual 3 - val documents = GradeDAO.count().result() - documents mustEqual 0 - } - - "delete all with or" in { - val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades WHERE classId = 4 or classId = 7;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - selectResponse.head.getLong("deletedCount") mustEqual 2 - val documents = GradeDAO.count().result() - documents mustEqual 1 - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSuite.scala new file mode 100644 index 00000000..b1e3f112 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/DeleteSqlSuite.scala @@ -0,0 +1,57 @@ +package dev.mongocamp.driver.mongodb.sql + +import dev.mongocamp.driver.mongodb.GenericObservable +import dev.mongocamp.driver.mongodb.model.{ Grade, Score } +import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb.test.UniversityDatabase.GradeDAO +import org.bson.types.ObjectId + +class DeleteSqlSuite extends munit.FunSuite { + + def prepareDatabase(): Unit = { + GradeDAO.drop().result() + GradeDAO + .insertMany( + List( + Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), + Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), + Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) + ) + ) + .result() + } + + test("delete with where") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades WHERE studentId = 1;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + assertEquals(selectResponse.head.getLong("deletedCount").toLong, 1L) + val documents = GradeDAO.count().result() + assertEquals(documents, 2L) + } + + test("delete all") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + assertEquals(selectResponse.head.getLong("deletedCount").toLong, 3L) + val documents = GradeDAO.count().result() + assertEquals(documents, 0L) + } + + test("delete all with or") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("DELETE FROM universityGrades WHERE classId = 4 or classId = 7;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged"), true) + assertEquals(selectResponse.head.getLong("deletedCount").toLong, 2L) + val documents = GradeDAO.count().result() + assertEquals(documents, 1L) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSpec.scala deleted file mode 100644 index d674a8db..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSpec.scala +++ /dev/null @@ -1,58 +0,0 @@ -package dev.mongocamp.driver.mongodb.sql - -import dev.mongocamp.driver.mongodb.GenericObservable -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.test.TestDatabase -import org.mongodb.scala.Document - -class InsertSqlSpec extends PersonSpecification { - - "MongoSqlQueryHolder" should { - - "insert" in { - val dao = TestDatabase.provider.dao("table_name") - dao.drop().resultList() - val queryConverter = MongoSqlQueryHolder("INSERT INTO table_name (column1, column2, column3) VALUES ('value1', 123, '2022-01-01T00:00:00.000Z');") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - selectResponse.head.get("insertedIds") mustNotEqual null - val documents = dao.find().resultList() - documents.size mustEqual 1 - documents.head.getString("column1") mustEqual "value1" - documents.head.getLong("column2") mustEqual 123 - } - - "insert 2 rows" in { - val dao = TestDatabase.provider.dao("table_name") - dao.drop().resultList() - val queryConverter = MongoSqlQueryHolder( - "INSERT INTO table_name (column1, column2, column3) VALUES ('value1', 123, '2022-01-01T00:00:00.000Z'), ('value2', 456, '2022-02-01T00:00:00.000Z');" - ) - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - selectResponse.head.get("insertedIds") mustNotEqual null - val documents = dao.find().resultList() - documents.size mustEqual 2 - documents.head.getString("column1") mustEqual "value1" - documents.head.getLong("column2") mustEqual 123 - } - - "insert not named" in { - var errorCaught = false - try { - val dao = TestDatabase.provider.dao("table_name") - dao.drop().resultList() - MongoSqlQueryHolder("INSERT INTO table_name VALUES ('value1', 123, '2022-01-01T00:00:00.000Z');") - } - catch { - case e: Exception => - e.getMessage mustEqual "column names must be specified" - errorCaught = true - } - errorCaught mustEqual true - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSuite.scala new file mode 100644 index 00000000..a389ba89 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/InsertSqlSuite.scala @@ -0,0 +1,54 @@ +package dev.mongocamp.driver.mongodb.sql + +import dev.mongocamp.driver.mongodb.GenericObservable +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.test.TestDatabase + +class InsertSqlSuite extends BasePersonSuite { + + test("insert") { + val dao = TestDatabase.provider.dao("table_name") + dao.drop().resultList() + val queryConverter = MongoSqlQueryHolder("INSERT INTO table_name (column1, column2, column3) VALUES ('value1', 123, '2022-01-01T00:00:00.000Z');") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + assertNotEquals(selectResponse.head.get("insertedIds"), null) + val documents = dao.find().resultList() + assertEquals(documents.size, 1) + assertEquals(documents.head.getString("column1"), "value1") + assertEquals(documents.head.getLong("column2").toLong, 123L) + } + + test("insert 2 rows") { + val dao = TestDatabase.provider.dao("table_name") + dao.drop().resultList() + val queryConverter = MongoSqlQueryHolder( + "INSERT INTO table_name (column1, column2, column3) VALUES ('value1', 123, '2022-01-01T00:00:00.000Z'), ('value2', 456, '2022-02-01T00:00:00.000Z');" + ) + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + assertNotEquals(selectResponse.head.get("insertedIds"), null) + val documents = dao.find().resultList() + assertEquals(documents.size, 2) + assertEquals(documents.head.getString("column1"), "value1") + assertEquals(documents.head.getLong("column2").toLong, 123L) + } + + test("insert not named") { + var errorCaught = false + try { + val dao = TestDatabase.provider.dao("table_name") + dao.drop().resultList() + MongoSqlQueryHolder("INSERT INTO table_name VALUES ('value1', 123, '2022-01-01T00:00:00.000Z');") + } + catch { + case e: Exception => + assertEquals(e.getMessage, "column names must be specified") + errorCaught = true + } + assertEquals(errorCaught, true) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala deleted file mode 100644 index 2926b9f3..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSpec.scala +++ /dev/null @@ -1,143 +0,0 @@ -package dev.mongocamp.driver.mongodb.sql - -import com.mongodb.client.model.IndexOptions -import dev.mongocamp.driver.mongodb.model.{Grade, Score} -import dev.mongocamp.driver.mongodb.test.TestDatabase -import dev.mongocamp.driver.mongodb._ -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import org.bson.types.ObjectId -import org.mongodb.scala.model.Sorts.ascending -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeEach - -import java.sql.SQLException -import scala.concurrent.duration.DurationInt - -class OtherSqlSpec extends PersonSpecification with BeforeEach{ - sequential - - object GradeDAO extends MongoDAO[Grade](TestDatabase.provider, "universityGrades") - - override protected def before: Any = { - this.GradeDAO.drop().result() - this.GradeDAO - .insertMany( - List( - Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), - Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), - Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) - ) - ) - .result() - this.GradeDAO.createIndex(ascending("studentId"), new IndexOptions().name("student_idx")).result() - } - - override def beforeAll(): Unit = { - super.beforeAll() - } - - "MongoSqlQueryHolder" should { - - "drop collection" in { - val queryConverter = MongoSqlQueryHolder("Drop table universityGrades;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - val grade = GradeDAO.count(Map("studentId" -> 1)).result() - grade mustEqual 0 - val collections = TestDatabase.provider.collectionNames() - collections must not contain "universityGrades" - } - - "catch sql error on converting sql" in { - var errorCaught = false - try { - MongoSqlQueryHolder("blub from universityGrades;") - } catch { - case _: SQLException => - errorCaught = true - } - errorCaught mustEqual true - } - - "truncate collection" in { - val queryConverter = MongoSqlQueryHolder("TRUNCATE TABLE universityGrades;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - val grade = GradeDAO.count(Map("studentId" -> 1)).result() - grade mustEqual 0 - val collections = TestDatabase.provider.collectionNames() - collections.contains("universityGrades") must beTrue - } - - "create index " in { - val queryConverter = MongoSqlQueryHolder("CREATE INDEX idx_name ON people (name);") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getStringValue("indexName") mustEqual "idx_name" - val indices = TestDatabase.provider.collection("people").listIndexes().resultList() - indices.find(_.getString("name") == "idx_name") must beSome - } - - "create unique index " in { - val queryConverter = MongoSqlQueryHolder("CREATE unique INDEX uidx_name ON people (email);") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getStringValue("indexName") mustEqual "uidx_name" - val indices = TestDatabase.provider.collection("people").listIndexes().resultList() - indices.find(_.getString("name") == "uidx_name") must beSome - } - - "create index with multi" in { - val queryConverter = MongoSqlQueryHolder("CREATE INDEX idx_multiname ON people (name, gender);") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getStringValue("indexName") mustEqual "idx_multiname" - val indices = TestDatabase.provider.collection("people").listIndexes().resultList() - indices.find(_.getString("name") == "idx_multiname") must beSome - } - - "drop index " in { - val queryConverter = MongoSqlQueryHolder("DROP INDEX universityGrades.student_idx;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getStringValue("indexName") mustEqual "student_idx" - val indices = TestDatabase.provider.collection("universityGrades").listIndexes().resultList() - indices.find(_.getString("name") == "student_idx") must beNone - } - - "show tables" in { - val queryConverter = MongoSqlQueryHolder("show tables;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size must be greaterThanOrEqualTo(1) - val filteredDocuments = selectResponse.filter(d => d.getStringValue("name").equalsIgnoreCase("people")) - filteredDocuments.head.getStringValue("name") mustEqual "people" - } - - "show databases" in { - val queryConverter = MongoSqlQueryHolder("SHOW DATABASES;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size must be greaterThanOrEqualTo(1) - } - - "show schemas" in { - val queryConverter = MongoSqlQueryHolder("SHOW SCHEMAS;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size must be greaterThanOrEqualTo(1) - } - - "show databases" in { - val queryConverter = MongoSqlQueryHolder("SHOW databases;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size must be greaterThanOrEqualTo(1) - } - - "show schemas" in { - val queryConverter = MongoSqlQueryHolder("show SCHEMAS;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size must be greaterThanOrEqualTo(1) - } - } - -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSuite.scala new file mode 100644 index 00000000..f551f090 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/OtherSqlSuite.scala @@ -0,0 +1,142 @@ +package dev.mongocamp.driver.mongodb.sql + +import com.mongodb.client.model.IndexOptions +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.model.{ Grade, Score } +import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb.test.UniversityDatabase.GradeDAO +import org.bson.types.ObjectId +import org.mongodb.scala.model.Sorts.ascending + +import java.sql.SQLException + +class OtherSqlSuite extends BasePersonSuite { + + def prepareDatabase(): Unit = { + GradeDAO.drop().result() + GradeDAO + .insertMany( + List( + Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), + Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), + Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) + ) + ) + .result() + GradeDAO.createIndex(ascending("studentId"), new IndexOptions().name("student_idx")).result() + } + + test("drop collection") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("Drop table universityGrades;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + val grade = GradeDAO.count(Map("studentId" -> 1)).result() + assertEquals(grade, 0L) + val collections = TestDatabase.provider.collectionNames() + assert(!collections.contains("universityGrades")) + } + + test("catch sql error on converting sql") { + prepareDatabase() + var errorCaught = false + try MongoSqlQueryHolder("blub from universityGrades;") + catch { + case _: SQLException => + errorCaught = true + } + assertEquals(errorCaught, true) + } + + test("truncate collection") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("TRUNCATE TABLE universityGrades;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + val grade = GradeDAO.count(Map("studentId" -> 1)).result() + assertEquals(grade, 0L) + val collections = TestDatabase.provider.collectionNames() + assert(collections.contains("universityGrades")) + } + + test("create index") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("CREATE INDEX idx_name ON people (name);") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assertEquals(selectResponse.head.getStringValue("indexName"), "idx_name") + val indices = TestDatabase.provider.collection("people").listIndexes().resultList() + assert(indices.exists(_.getString("name") == "idx_name")) + } + + test("create unique index") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("CREATE unique INDEX uidx_name ON people (email);") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assertEquals(selectResponse.head.getStringValue("indexName"), "uidx_name") + val indices = TestDatabase.provider.collection("people").listIndexes().resultList() + assert(indices.exists(_.getString("name") == "uidx_name")) + } + + test("create index with multi") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("CREATE INDEX idx_multiname ON people (name, gender);") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assertEquals(selectResponse.head.getStringValue("indexName"), "idx_multiname") + val indices = TestDatabase.provider.collection("people").listIndexes().resultList() + assert(indices.exists(_.getString("name") == "idx_multiname")) + } + + test("drop index") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("DROP INDEX universityGrades.student_idx;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assertEquals(selectResponse.head.getStringValue("indexName"), "student_idx") + val indices = TestDatabase.provider.collection("universityGrades").listIndexes().resultList() + assertEquals(indices.find(_.getString("name") == "student_idx"), None) + } + + test("show tables") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("show tables;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.nonEmpty, true) + val filteredDocuments = selectResponse.filter(d => d.getStringValue("name").equalsIgnoreCase("people")) + assertEquals(filteredDocuments.head.getStringValue("name"), "people") + } + + test("show databases") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("SHOW DATABASES;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assert(selectResponse.nonEmpty) + } + + test("show schemas") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("SHOW SCHEMAS;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.nonEmpty, true) + } + + test("show databases") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("SHOW databases;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.nonEmpty, true) + } + + test("show schemas again") { + prepareDatabase() + val queryConverter = MongoSqlQueryHolder("show SCHEMAS;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.nonEmpty, true) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala deleted file mode 100644 index 2274c3c2..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSpec.scala +++ /dev/null @@ -1,194 +0,0 @@ -package dev.mongocamp.driver.mongodb.sql - -import dev.mongocamp.driver.mongodb.GenericObservable -import dev.mongocamp.driver.mongodb.dao.PersonSpecification -import dev.mongocamp.driver.mongodb.lucene.LuceneQueryConverter -import dev.mongocamp.driver.mongodb.test.TestDatabase -import dev.mongocamp.driver.mongodb.test.TestDatabase.PersonDAO -import org.mongodb.scala.bson.BsonDocument - -class SelectSqlSpec extends PersonSpecification { - - "MongoSqlQueryHolder Select" should { - "simple sql" in { - val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age < 30 order by id asc") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 99 - selectResponse.head.getInteger("age") mustEqual 25 - selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - } - - "simple sql with schema" in { - val queryConverter = MongoSqlQueryHolder("select * from `mongocamp-unit-test`.`people`") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - queryConverter.getCollection mustEqual "mongocamp-unit-test:people" - selectResponse.size mustEqual 200 - selectResponse.head.getString("name") mustEqual "Cheryl Hoffman" - selectResponse.head.getLong("id") mustEqual 0 - } - - "sql with in query" in { - val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age in (30, 18, 25, 22) order by id asc") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 41 - selectResponse.head.getInteger("age") mustEqual 25 - selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - } - - "sql with not in query" in { - val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age not in (30, 18, 25, 22) order by id asc") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 159 - selectResponse.head.getInteger("age") mustEqual 40 - selectResponse.head.getString("guid") mustEqual "6ee53e07-2e61-48cd-9bc9-b3505a0438f3" - } - - "and sql" in { - val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age < 30 and (age < 30 or age > 30) order by id asc") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 99 - selectResponse.head.getInteger("age") mustEqual 25 - selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - } - - "and with count" in { - // #region initialize-query-holder - val queryConverter = MongoSqlQueryHolder("select count(*) as anz from people where age < 30 and (age < 30 or age > 30) order by id asc") - // #endregion initialize-query-holder - // #region query-holder-run - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - // #endregion query-holder-run - selectResponse.head.getInteger("anz") mustEqual 99 - // #region extract-collection - queryConverter.getCollection mustEqual "people" - // #endregion extract-collection - // #region select-keys - queryConverter.getKeysForEmptyDocument mustEqual Set("anz") - // #endregion select-keys - // #region has-function-call - queryConverter.hasFunctionCallInSelect mustEqual true - // #endregion has-function-call - } - - "simple select all sql" in { - val queryConverter = MongoSqlQueryHolder("select * from people where age < 30 order by id asc") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 99 - selectResponse.head.getInteger("age") mustEqual 25 - selectResponse.head.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - } - - "simple select between" in { - val queryConverter = MongoSqlQueryHolder("select age, guid as internal from people where balance BETWEEN 1500 AND 2000") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 41 - selectResponse.head.getInteger("age") mustEqual 40 - selectResponse.head.getString("internal") mustEqual "6ee53e07-2e61-48cd-9bc9-b3505a0438f3" - } - - "simple select not between" in { - val queryConverter = MongoSqlQueryHolder("select age, guid as internal from people where balance not BETWEEN 1500 AND 2000") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 159 - selectResponse.head.getInteger("age") mustEqual 25 - selectResponse.head.getString("internal") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - } - - "search with with number in string" in { - val queryConverter = MongoSqlQueryHolder( - "select p1.id, p1.guid, p1.name, p2.age, p2.balance from people as p1 join people as p2 on p1.id = p2.id where p2.age < 30 order by p2.id asc" - ) - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 99 - val document = selectResponse.head - document.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - document.getInteger("age") mustEqual 25 - } - - "search on join without on expression" in { - val queryConverter = MongoSqlQueryHolder( - "select p1.id, p1.guid, p1.name, p2.age, p2.balance from people as p1, people as p2 where p1.id = p2.id and p2.age < 30 order by p2.id asc" - ) - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 99 - val document = selectResponse.head - document.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - document("p2").asInstanceOf[BsonDocument].getInt32("age").getValue mustEqual 25 - } - - "is not null" in { - val queryConverter = MongoSqlQueryHolder("select * from people where age is not null") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 200 - val document = selectResponse.head - document.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - document.getInteger("age") mustEqual 25 - } - - "is null" in { - val queryConverter = MongoSqlQueryHolder("select * from people where blubber is null") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 200 - val document = selectResponse.head - document.getString("guid") mustEqual "a17be99a-8913-4bb6-8f14-16d4fa1b3559" - document.getInteger("age") mustEqual 25 - } - - "only count" in { - val queryConverter = MongoSqlQueryHolder("select count(*) as tmp, sum(age) from people;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - val document = selectResponse.head - document.getInteger("tmp") mustEqual 200 - document.getInteger("sum(age)") mustEqual 5961 - } - - "group by with count" in { - val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age order by age;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 21 - val document = selectResponse.head - document.getInteger("age") mustEqual 20 - document.getInteger("tmp") mustEqual 4 - } - - "having filter" in { - val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 9 - val document = selectResponse.head - document.getInteger("age") mustEqual 21 - document.getInteger("tmp") mustEqual 11 - } - - "with limit 5" in { - val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age limit 5;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 5 - val document = selectResponse.head - document.getInteger("age") mustEqual 21 - document.getInteger("tmp") mustEqual 11 - } - - "with limit 5 and offset 10" in { - val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age limit 5 offset 5;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 4 - val document = selectResponse.head - document.getInteger("age") mustEqual 27 - document.getInteger("tmp") mustEqual 12 - document.getInteger("sum(age)") mustEqual 324 - } - - "destinct" in { - val queryConverter = MongoSqlQueryHolder("select distinct favoriteFruit, count(*) from people order by count(*) desc;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 3 - val document = selectResponse.head - document.getString("favoriteFruit") mustEqual "strawberry" - document.getInteger("count(*)") mustEqual 71 - selectResponse.map(_.getString("favoriteFruit")) mustEqual List("strawberry", "apple", "banana") - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSuite.scala new file mode 100644 index 00000000..db92379b --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/SelectSqlSuite.scala @@ -0,0 +1,181 @@ +package dev.mongocamp.driver.mongodb.sql + +import dev.mongocamp.driver.mongodb.GenericObservable +import dev.mongocamp.driver.mongodb.dao.BasePersonSuite +import dev.mongocamp.driver.mongodb.test.TestDatabase +import org.mongodb.scala.bson.BsonDocument + +class SelectSqlSuite extends BasePersonSuite { + + test("simple sql") { + val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age < 30 order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 99) + assertEquals(selectResponse.head.getInteger("age").toInt, 25) + assertEquals(selectResponse.head.getString("guid"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + } + + test("simple sql with schema") { + val queryConverter = MongoSqlQueryHolder("select * from `mongocamp-unit-test`.`people`") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(queryConverter.getCollection, "mongocamp-unit-test:people") + assertEquals(selectResponse.size, 200) + assertEquals(selectResponse.head.getString("name"), "Cheryl Hoffman") + assertEquals(selectResponse.head.getLong("id").toInt, 0) + } + + test("sql with in query") { + val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age in (30, 18, 25, 22) order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 41) + assertEquals(selectResponse.head.getInteger("age").toInt, 25) + assertEquals(selectResponse.head.getString("guid"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + } + + test("sql with not in query") { + val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age not in (30, 18, 25, 22) order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 159) + assertEquals(selectResponse.head.getInteger("age").toInt, 40) + assertEquals(selectResponse.head.getString("guid"), "6ee53e07-2e61-48cd-9bc9-b3505a0438f3") + } + + test("and sql") { + val queryConverter = MongoSqlQueryHolder("select id, guid, name, age, balance from people where age < 30 and (age < 30 or age > 30) order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 99) + assertEquals(selectResponse.head.getInteger("age").toInt, 25) + assertEquals(selectResponse.head.getString("guid"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + } + + test("and with count") { + val queryConverter = MongoSqlQueryHolder("select count(*) as anz from people where age < 30 and (age < 30 or age > 30) order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.head.getInteger("anz").toInt, 99) + assertEquals(queryConverter.getCollection, "people") + assertEquals(queryConverter.getKeysFromSelect, List("anz")) + assertEquals(queryConverter.hasFunctionCallInSelect, true) + } + + test("simple select all sql") { + val queryConverter = MongoSqlQueryHolder("select * from people where age < 30 order by id asc") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 99) + assertEquals(selectResponse.head.getInteger("age").toInt, 25) + assertEquals(selectResponse.head.getString("guid"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + } + + test("simple select between") { + val queryConverter = MongoSqlQueryHolder("select age, guid as internal from people where balance BETWEEN 1500 AND 2000") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 41) + assertEquals(selectResponse.head.getInteger("age").toInt, 40) + assertEquals(selectResponse.head.getString("internal"), "6ee53e07-2e61-48cd-9bc9-b3505a0438f3") + } + + test("simple select not between") { + val queryConverter = MongoSqlQueryHolder("select age, guid as internal from people where balance not BETWEEN 1500 AND 2000") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 159) + assertEquals(selectResponse.head.getInteger("age").toInt, 25) + assertEquals(selectResponse.head.getString("internal"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + } + + test("search with with number in string") { + val queryConverter = MongoSqlQueryHolder( + "select p1.id, p1.guid, p1.name, p2.age, p2.balance from people as p1 join people as p2 on p1.id = p2.id where p2.age < 30 order by p2.id asc" + ) + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 99) + val document = selectResponse.head + assertEquals(document.getString("guid"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + assertEquals(document.getInteger("age").toInt, 25) + } + + test("search on join without on expression") { + val queryConverter = MongoSqlQueryHolder( + "select p1.id, p1.guid, p1.name, p2.age, p2.balance from people as p1, people as p2 where p1.id = p2.id and p2.age < 30 order by p2.id asc" + ) + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 99) + val document = selectResponse.head + assertEquals(document.getString("guid"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + assertEquals(document("p2").asInstanceOf[BsonDocument].getInt32("age").getValue, 25) + } + + test("is not null") { + val queryConverter = MongoSqlQueryHolder("select * from people where age is not null") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 200) + val document = selectResponse.head + assertEquals(document.getString("guid"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + assertEquals(document.getInteger("age").toInt, 25) + } + + test("is null") { + val queryConverter = MongoSqlQueryHolder("select * from people where blubber is null") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 200) + val document = selectResponse.head + assertEquals(document.getString("guid"), "a17be99a-8913-4bb6-8f14-16d4fa1b3559") + assertEquals(document.getInteger("age").toInt, 25) + } + + test("only count") { + val queryConverter = MongoSqlQueryHolder("select count(*) as tmp, sum(age) from people;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + val document = selectResponse.head + assertEquals(document.getInteger("tmp").toInt, 200) + assertEquals(document.getInteger("sum(age)").toInt, 5961) + } + + test("group by with count") { + val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age order by age;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 21) + val document = selectResponse.head + assertEquals(document.getInteger("age").toInt, 20) + assertEquals(document.getInteger("tmp").toInt, 4) + } + + test("having filter") { + val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 9) + val document = selectResponse.head + assertEquals(document.getInteger("age").toInt, 21) + assertEquals(document.getInteger("tmp").toInt, 11) + } + + test("with limit 5") { + val queryConverter = MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age limit 5;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 5) + val document = selectResponse.head + assertEquals(document.getInteger("age").toInt, 21) + assertEquals(document.getInteger("tmp").toInt, 11) + } + + test("with limit 5 and offset 10") { + val queryConverter = + MongoSqlQueryHolder("select age, count(*) as tmp, sum(age) from people group by age having count(*) > 10 order by age limit 5 offset 5;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 4) + val document = selectResponse.head + assertEquals(document.getInteger("age").toInt, 27) + assertEquals(document.getInteger("tmp").toInt, 12) + assertEquals(document.getInteger("sum(age)").toInt, 324) + } + + test("destinct") { + val queryConverter = MongoSqlQueryHolder("select distinct favoriteFruit, count(*) from people order by count(*) desc;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 3) + val document = selectResponse.head + assertEquals(document.getString("favoriteFruit"), "strawberry") + assertEquals(document.getInteger("count(*)").toInt, 71) + assertEquals(selectResponse.map(_.getString("favoriteFruit")), List("strawberry", "apple", "banana")) + } + +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSpec.scala deleted file mode 100644 index 7f0257ea..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSpec.scala +++ /dev/null @@ -1,73 +0,0 @@ -package dev.mongocamp.driver.mongodb.sql - -import dev.mongocamp.driver.mongodb.model.{Grade, Score} -import dev.mongocamp.driver.mongodb.test.TestDatabase -import dev.mongocamp.driver.mongodb.{GenericObservable, MongoDAO} -import org.bson.types.ObjectId -import org.specs2.mutable.Specification -import org.specs2.specification.BeforeEach -import dev.mongocamp.driver.mongodb._ -class UpdateSqlSpec extends Specification with BeforeEach { - sequential - - object GradeDAO extends MongoDAO[Grade](TestDatabase.provider, "universityGrades") - - override def before(): Unit = { - this.GradeDAO.drop().result() - this.GradeDAO - .insertMany( - List( - Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), - Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), - Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) - ) - ) - .result() - } - - "MongoSqlQueryHolder" should { - - "update single document" in { - val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47 WHERE studentId = 1;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - selectResponse.head.getLong("modifiedCount") mustEqual 1 - selectResponse.head.getLong("matchedCount") mustEqual 1 - val grade = GradeDAO.find(Map("studentId" -> 1)).result() - grade.classId mustEqual 47 - val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 1)).result() - documents.getLong("classId") mustEqual 47 - documents.getStringValue("column1") mustEqual "hello" - } - - "update all" in { - val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - selectResponse.head.getLong("modifiedCount") mustEqual 3 - selectResponse.head.getLong("matchedCount") mustEqual 3 - val grade = GradeDAO.find(Map("studentId" -> 1)).result() - grade.classId mustEqual 47 - val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 1)).result() - documents.getLong("classId") mustEqual 47 - documents.getStringValue("column1") mustEqual "hello" - } - - "update multiple with or" in { - val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47 WHERE classId = 4 or classId = 7;") - val selectResponse = queryConverter.run(TestDatabase.provider).resultList() - selectResponse.size mustEqual 1 - selectResponse.head.getBoolean("wasAcknowledged") mustEqual true - selectResponse.head.getLong("modifiedCount") mustEqual 2 - selectResponse.head.getLong("matchedCount") mustEqual 2 - val grade = GradeDAO.find(Map("studentId" -> 2)).result() - grade.classId mustEqual 47 - val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 2)).result() - documents.getLong("classId") mustEqual 47 - documents.getStringValue("column1") mustEqual "hello" - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSuite.scala new file mode 100644 index 00000000..172ef869 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sql/UpdateSqlSuite.scala @@ -0,0 +1,67 @@ +package dev.mongocamp.driver.mongodb.sql + +import dev.mongocamp.driver.mongodb._ +import dev.mongocamp.driver.mongodb.model.{ Grade, Score } +import dev.mongocamp.driver.mongodb.test.TestDatabase +import dev.mongocamp.driver.mongodb.test.UniversityDatabase.GradeDAO +import munit.FunSuite +import org.bson.types.ObjectId + +class UpdateSqlSuite extends FunSuite { + + override def beforeEach(context: BeforeEach): Unit = { + GradeDAO.drop().result() + GradeDAO + .insertMany( + List( + Grade(new ObjectId(), 1, 2, List(Score(1.20, "test"), Score(120, "test1"))), + Grade(new ObjectId(), 2, 4, List(Score(10, "test2"), Score(20, "test3"))), + Grade(new ObjectId(), 3, 7, List(Score(10, "test4"), Score(20, "test5"))) + ) + ) + .result() + } + + test("update single document") { + val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47 WHERE studentId = 1;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + assertEquals(selectResponse.head.getLong("modifiedCount").toInt, 1) + assertEquals(selectResponse.head.getLong("matchedCount").toInt, 1) + val grade = GradeDAO.find(Map("studentId" -> 1)).result() + assertEquals(grade.classId.toInt, 47) + val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 1)).result() + assertEquals(documents.getLong("classId").toInt, 47) + assertEquals(documents.getStringValue("column1"), "hello") + } + + test("update all") { + val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + assertEquals(selectResponse.head.getLong("modifiedCount").toInt, 3) + assertEquals(selectResponse.head.getLong("matchedCount").toInt, 3) + val grade = GradeDAO.find(Map("studentId" -> 1)).result() + assertEquals(grade.classId.toInt, 47) + val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 1)).result() + assertEquals(documents.getLong("classId").toInt, 47) + assertEquals(documents.getStringValue("column1"), "hello") + } + + test("update multiple with or") { + val queryConverter = MongoSqlQueryHolder("UPDATE universityGrades SET column1 = 'hello', classId = 47 WHERE classId = 4 or classId = 7;") + val selectResponse = queryConverter.run(TestDatabase.provider).resultList() + assertEquals(selectResponse.size, 1) + assert(selectResponse.head.getBoolean("wasAcknowledged")) + assertEquals(selectResponse.head.getLong("modifiedCount").toInt, 2) + assertEquals(selectResponse.head.getLong("matchedCount").toInt, 2) + val grade = GradeDAO.find(Map("studentId" -> 2)).result() + assertEquals(grade.classId.toInt, 47) + val documents = TestDatabase.provider.dao("universityGrades").find(Map("studentId" -> 2)).result() + assertEquals(documents.getLong("classId").toInt, 47) + assertEquals(documents.getStringValue("column1"), "hello") + } + +} \ No newline at end of file diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sync/SyncSpec.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sync/SyncSpec.scala deleted file mode 100644 index 3cee7fc7..00000000 --- a/src/test/scala/dev/mongocamp/driver/mongodb/sync/SyncSpec.scala +++ /dev/null @@ -1,93 +0,0 @@ -package dev.mongocamp.driver.mongodb.sync - -import dev.mongocamp.driver.mongodb.server.LocalServer -import dev.mongocamp.driver.mongodb.sync.TestSync._ -import dev.mongocamp.driver.mongodb.test.UniversityDatabase -import org.specs2.mutable.Specification -import org.specs2.specification.{AfterAll, BeforeAll} - -class SyncSpec extends Specification with BeforeAll with AfterAll { - - val CountSmall = 5 - val CountMedium = 500 - - sequential - - override def beforeAll(): Unit = - UniversityDatabase.LocalTestServer = LocalServer.fromPath("unit.test.local.mongo.server") - - override def afterAll(): Unit = { - TestSync.mongoSyncer.terminate() - UniversityDatabase.LocalTestServer.shutdown() - } - - "Collection" should { - - "be synced from source to target" in { - var result: MongoSyncResult = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName).head - result.acknowleged must beTrue - TestSync.insertIntoSource(CountMedium, TestCollectionSourceTargetName) - result = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName).head - result.acknowleged must beTrue - result.synced mustEqual 500 - result.countBefore mustEqual 0 - result.countAfter mustEqual 500 - TestSync.targetCount(TestCollectionSourceTargetName) mustEqual 500 - result = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName).head - result.acknowleged must beTrue - result.countBefore mustEqual 500 - result.synced mustEqual 0 - - TestSync.insertIntoSource(CountSmall, TestCollectionSourceTargetName) - result = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName).head - result.acknowleged must beTrue - result.countBefore mustEqual 500 - result.countAfter mustEqual 505 - result.synced mustEqual 5 - TestSync.targetCount(TestCollectionSourceTargetName) mustEqual 505 - - TestSync.insertIntoTarget(CountSmall, TestCollectionSourceTargetName) - TestSync.targetCount(TestCollectionSourceTargetName) mustEqual 510 - val resultList = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName) - result = resultList.head - result.acknowleged must beTrue - result.countBefore mustEqual 510 - result.synced mustEqual 0 - TestSync.sourceCount(TestCollectionSourceTargetName) mustEqual 505 - } - - "be synced two way" in { - var result: MongoSyncResult = TestSync.mongoSyncer.sync(TestCollectionTwoWayName).head - result.acknowleged must beTrue - TestSync.insertIntoSource(CountMedium, TestCollectionTwoWayName) - result = TestSync.mongoSyncer.sync(TestCollectionTwoWayName).head - result.acknowleged must beTrue - result.synced mustEqual 500 - result.countBefore mustEqual 0 - result.countAfter mustEqual 500 - TestSync.targetCount(TestCollectionTwoWayName) mustEqual 500 - result = TestSync.mongoSyncer.sync(TestCollectionTwoWayName).head - result.acknowleged must beTrue - result.countBefore mustEqual 500 - result.synced mustEqual 0 - - TestSync.insertIntoSource(CountSmall, TestCollectionTwoWayName) - result = TestSync.mongoSyncer.sync(TestCollectionTwoWayName).head - result.acknowleged must beTrue - result.countBefore mustEqual 500 - result.countAfter mustEqual 505 - result.synced mustEqual 5 - TestSync.targetCount(TestCollectionTwoWayName) mustEqual 505 - - TestSync.insertIntoTarget(CountSmall, TestCollectionTwoWayName) - TestSync.targetCount(TestCollectionTwoWayName) mustEqual 510 - val resultList = TestSync.mongoSyncer.sync(TestCollectionTwoWayName) - result = resultList.head - result.acknowleged must beTrue - result.countBefore mustEqual 510 - result.synced mustEqual 0 - TestSync.sourceCount(TestCollectionTwoWayName) mustEqual 510 - } - - } -} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/sync/SyncSuite.scala b/src/test/scala/dev/mongocamp/driver/mongodb/sync/SyncSuite.scala new file mode 100644 index 00000000..aa607503 --- /dev/null +++ b/src/test/scala/dev/mongocamp/driver/mongodb/sync/SyncSuite.scala @@ -0,0 +1,86 @@ +package dev.mongocamp.driver.mongodb.sync + +import dev.mongocamp.driver.mongodb.server.LocalServer +import dev.mongocamp.driver.mongodb.sync.TestSync._ +import dev.mongocamp.driver.mongodb.test.UniversityDatabase +import munit.FunSuite + +class SyncSuite extends FunSuite { + + val CountSmall = 5 + val CountMedium = 500 + + override def beforeAll(): Unit = + UniversityDatabase.LocalTestServer = LocalServer.fromPath("unit.test.local.mongo.server") + + override def afterAll(): Unit = { + TestSync.mongoSyncer.terminate() + UniversityDatabase.LocalTestServer.shutdown() + } + + test("Collection should be synced from source to target") { + var result: MongoSyncResult = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName).head + assert(result.acknowleged) + TestSync.insertIntoSource(CountMedium, TestCollectionSourceTargetName) + result = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName).head + assert(result.acknowleged) + assertEquals(result.synced, 500) + assertEquals(result.countBefore, 0) + assertEquals(result.countAfter, 500) + assertEquals(TestSync.targetCount(TestCollectionSourceTargetName), 500L) + result = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName).head + assert(result.acknowleged) + assertEquals(result.countBefore, 500) + assertEquals(result.synced, 0) + + TestSync.insertIntoSource(CountSmall, TestCollectionSourceTargetName) + result = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName).head + assert(result.acknowleged) + assertEquals(result.countBefore, 500) + assertEquals(result.countAfter, 505) + assertEquals(result.synced, 5) + assertEquals(TestSync.targetCount(TestCollectionSourceTargetName), 505L) + + TestSync.insertIntoTarget(CountSmall, TestCollectionSourceTargetName) + assertEquals(TestSync.targetCount(TestCollectionSourceTargetName), 510L) + val resultList = TestSync.mongoSyncer.sync(TestCollectionSourceTargetName) + result = resultList.head + assert(result.acknowleged) + assertEquals(result.countBefore, 510) + assertEquals(result.synced, 0) + assertEquals(TestSync.sourceCount(TestCollectionSourceTargetName), 505L) + } + + test("Collection should be synced two way") { + var result: MongoSyncResult = TestSync.mongoSyncer.sync(TestCollectionTwoWayName).head + assert(result.acknowleged) + TestSync.insertIntoSource(CountMedium, TestCollectionTwoWayName) + result = TestSync.mongoSyncer.sync(TestCollectionTwoWayName).head + assert(result.acknowleged) + assertEquals(result.synced, 500) + assertEquals(result.countBefore, 0) + assertEquals(result.countAfter, 500) + assertEquals(TestSync.targetCount(TestCollectionTwoWayName), 500L) + result = TestSync.mongoSyncer.sync(TestCollectionTwoWayName).head + assert(result.acknowleged) + assertEquals(result.countBefore, 500) + assertEquals(result.synced, 0) + + TestSync.insertIntoSource(CountSmall, TestCollectionTwoWayName) + result = TestSync.mongoSyncer.sync(TestCollectionTwoWayName).head + assert(result.acknowleged) + assertEquals(result.countBefore, 500) + assertEquals(result.countAfter, 505) + assertEquals(result.synced, 5) + assertEquals(TestSync.targetCount(TestCollectionTwoWayName), 505L) + + TestSync.insertIntoTarget(CountSmall, TestCollectionTwoWayName) + assertEquals(TestSync.targetCount(TestCollectionTwoWayName), 510L) + val resultList = TestSync.mongoSyncer.sync(TestCollectionTwoWayName) + result = resultList.head + assert(result.acknowleged) + assertEquals(result.countBefore, 510) + assertEquals(result.synced, 0) + assertEquals(TestSync.sourceCount(TestCollectionTwoWayName), 510L) + } +} diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala b/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala index fe16b240..22b5b290 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/test/TestDatabase.scala @@ -4,11 +4,11 @@ import better.files.File import com.mongodb.client.model.changestream.OperationType import com.typesafe.scalalogging.LazyLogging import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import dev.mongocamp.driver.mongodb.json._ import dev.mongocamp.driver.mongodb.model._ -import dev.mongocamp.driver.mongodb.{GridFSDAO, MongoDAO} -import org.bson.codecs.configuration.CodecRegistries.{fromProviders, fromRegistries} +import dev.mongocamp.driver.mongodb.{ GridFSDAO, MongoDAO } +import io.circe.generic.auto._ import org.mongodb.scala.Document -import org.mongodb.scala.bson.codecs.Macros._ import org.mongodb.scala.model.changestream.ChangeStreamDocument object TestDatabase extends LazyLogging { @@ -17,9 +17,8 @@ object TestDatabase extends LazyLogging { File(ImageDAOTargetPath).createIfNotExists() - private val registry = fromProviders(classOf[Person], classOf[Friend], classOf[CodecTest], classOf[Book], classOf[Grade], classOf[Score]) - val provider: DatabaseProvider = DatabaseProvider.fromPath(configPath = "unit.test.mongo", registry = fromRegistries(registry)) + val provider: DatabaseProvider = DatabaseProvider.fromPath(configPath = "unit.test.mongo") def consumeDatabaseChanges(changeStreamDocument: ChangeStreamDocument[Document]): Unit = { if (changeStreamDocument.getOperationType != OperationType.INSERT) { diff --git a/src/test/scala/dev/mongocamp/driver/mongodb/test/UniversityDatabase.scala b/src/test/scala/dev/mongocamp/driver/mongodb/test/UniversityDatabase.scala index 07bda511..2abbb11f 100644 --- a/src/test/scala/dev/mongocamp/driver/mongodb/test/UniversityDatabase.scala +++ b/src/test/scala/dev/mongocamp/driver/mongodb/test/UniversityDatabase.scala @@ -1,31 +1,28 @@ package dev.mongocamp.driver.mongodb.test import dev.mongocamp.driver.mongodb.database.DatabaseProvider +import dev.mongocamp.driver.mongodb.json._ import dev.mongocamp.driver.mongodb.model._ import dev.mongocamp.driver.mongodb.server.LocalServer -import dev.mongocamp.driver.mongodb.{GridFSDAO, MongoDAO} -import org.bson.codecs.configuration.CodecRegistries.{fromProviders, fromRegistries} -import org.bson.codecs.configuration.CodecRegistry -import org.mongodb.scala.bson.codecs.Macros._ +import dev.mongocamp.driver.mongodb.{ GridFSDAO, MongoDAO } +import io.circe.generic.auto._ + object UniversityDatabase { // create local test server (mongodb-java-server) var LocalTestServer: LocalServer = _ // create codecs for custom classes - private val universityRegistry: CodecRegistry = fromProviders(classOf[Student], classOf[Score], classOf[Grade]) - - private val registry: CodecRegistry = fromRegistries(universityRegistry) // create provider - val provider: DatabaseProvider = DatabaseProvider.fromPath(configPath = "unit.test.mongo.local", registry = registry) + val provider: DatabaseProvider = DatabaseProvider.fromPath(configPath = "unit.test.mongo.local") // setup DAO objects with mongodb collection names - object StudentDAO extends MongoDAO[Student](provider, "university-students") + object StudentDAO extends MongoDAO[Student](provider, "universityStudents") - object GradeDAO extends MongoDAO[Book](provider, "university-grades") + object GradeDAO extends MongoDAO[Grade](TestDatabase.provider, "universityGrades") - object SudentImagesDAO extends GridFSDAO(provider, "university-images") + object SudentImagesDAO extends GridFSDAO(provider, "universityImages") }