Skip to content

Commit

Permalink
[0.4.1] - small api refactoring (#111)
Browse files Browse the repository at this point in the history
* [CORE][ISSUE-110] - updating api

* [AKKA][ISSUE-110] - updating api

* [AHC][ISSUE-110] - updating api

* [URL][ISSUE-110] - update api
  • Loading branch information
fsanaulla authored Nov 13, 2018
1 parent fccfb84 commit 9a6f997
Show file tree
Hide file tree
Showing 36 changed files with 184 additions and 156 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import com.github.fsanaulla.chronicler.ahc.io.api.Database
import com.github.fsanaulla.chronicler.ahc.io.models.InfluxConfig
import com.github.fsanaulla.chronicler.ahc.io.{AhcIOClient, InfluxIO}
import com.github.fsanaulla.chronicler.ahc.management.{AsyncManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.core.enums.Epochs
import com.github.fsanaulla.chronicler.core.model.Point
import com.github.fsanaulla.chronicler.core.utils.Extensions.RichJValue
import com.github.fsanaulla.chronicler.testing.it.ResultMatchers._
Expand Down Expand Up @@ -119,7 +120,7 @@ class DatabaseApiSpec extends FlatSpecWithMatchers with Futures with DockerizedI
.futureValue shouldEqual NoContentResult

db
.readJs("SELECT SUM(\"age\") FROM \"test5\" GROUP BY \"sex\"")
.readJs("SELECT SUM(\"age\") FROM \"test5\" GROUP BY \"sex\"", epoch = Some(Epochs.NANOSECONDS))
.futureValue
.groupedResult
.map { case (k, v) => k.toSeq -> v } shouldEqual Array(Seq("Male") -> JArray(Array(JNum(0), JNum(49))))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import com.github.fsanaulla.chronicler.ahc.io.api.Database
import com.github.fsanaulla.chronicler.ahc.io.models.InfluxConfig
import com.github.fsanaulla.chronicler.ahc.io.{AhcIOClient, InfluxIO}
import com.github.fsanaulla.chronicler.ahc.management.{AsyncManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.core.enums.Epochs
import com.github.fsanaulla.chronicler.core.model.Point
import com.github.fsanaulla.chronicler.core.utils.Extensions.RichJValue
import com.github.fsanaulla.chronicler.testing.it.ResultMatchers._
Expand Down Expand Up @@ -119,7 +120,7 @@ class GzippedDatabaseApiSpec extends FlatSpecWithMatchers with Futures with Dock
.futureValue shouldEqual NoContentResult

db
.readJs("SELECT SUM(\"age\") FROM \"test5\" GROUP BY \"sex\"")
.readJs("SELECT SUM(\"age\") FROM \"test5\" GROUP BY \"sex\"", epoch = Some(Epochs.NANOSECONDS))
.futureValue
.groupedResult
.map { case (k, v) => k.toSeq -> v } shouldEqual Array(Seq("Male") -> JArray(Array(JNum(0), JNum(49))))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,44 +41,44 @@ final class Database(private[ahc] val host: String,
with AhcReader {

def writeFromFile(filePath: String,
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeFromFile(dbName, filePath, consistency, precision, retentionPolicy, gzipped)


def writeNative(point: String,
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeTo(dbName, point, consistency, precision, retentionPolicy, gzipped)


def bulkWriteNative(points: Seq[String],
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeTo(dbName, points, consistency, precision, retentionPolicy, gzipped)


def writePoint(point: Point,
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeTo(dbName, point, consistency, precision, retentionPolicy, gzipped)


def bulkWritePoints(points: Seq[Point],
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeTo(dbName, points, consistency, precision, retentionPolicy, gzipped)


override def read[A: ClassTag](query: String,
epoch: Epoch,
pretty: Boolean,
chunked: Boolean)
epoch: Option[Epoch] = None,
pretty: Boolean = false,
chunked: Boolean = false)
(implicit reader: InfluxReader[A]): Future[ReadResult[A]] = {
readJs(query, epoch, pretty, chunked) map {
case qr: QueryResult[JArray] => qr.map(reader.read)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ final class Measurement[E: ClassTag](private[ahc] val host: String,
with AhcReader {

def write(entity: E,
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None)
(implicit wr: InfluxWriter[E]): Future[WriteResult] =
writeTo(
Expand All @@ -54,8 +54,8 @@ final class Measurement[E: ClassTag](private[ahc] val host: String,
)

def bulkWrite(entitys: Seq[E],
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None)
(implicit wr: InfluxWriter[E]): Future[WriteResult] =
writeTo(
Expand All @@ -68,7 +68,7 @@ final class Measurement[E: ClassTag](private[ahc] val host: String,
)

def read(query: String,
epoch: Epoch = Epochs.NANOSECONDS,
epoch: Option[Epoch] = None,
pretty: Boolean = false,
chunked: Boolean = false)
(implicit rd: InfluxReader[E]): Future[ReadResult[E]] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@ private[ahc] trait AhcReader
with ReadOperations[Future] { self: HasCredentials =>

private[chronicler] override def readJs(dbName: String,
query: String,
epoch: Epoch,
pretty: Boolean,
chunked: Boolean): Future[ReadResult[JArray]] = {
query: String,
epoch: Option[Epoch],
pretty: Boolean,
chunked: Boolean): Future[ReadResult[JArray]] = {
val uri = readFromInfluxSingleQuery(dbName, query, epoch, pretty, chunked)
val executionResult = execute(uri)
query match {
Expand All @@ -47,10 +47,10 @@ private[ahc] trait AhcReader
}

private[chronicler] override def bulkReadJs(dbName: String,
queries: Seq[String],
epoch: Epoch,
pretty: Boolean,
chunked: Boolean): Future[QueryResult[Array[JArray]]] = {
queries: Seq[String],
epoch: Option[Epoch],
pretty: Boolean,
chunked: Boolean): Future[QueryResult[Array[JArray]]] = {
val uri = readFromInfluxBulkQuery(dbName, queries, epoch, pretty, chunked)
execute(uri).flatMap(toBulkQueryJsResult)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@ private[ahc] trait AhcWriter
with PointTransformer { self: HasCredentials =>

private[chronicler] override def writeTo(dbName: String,
entity: String,
consistency: Consistency,
precision: Precision,
retentionPolicy: Option[String],
gzipped: Boolean): Future[WriteResult] = {
entity: String,
consistency: Option[Consistency],
precision: Option[Precision],
retentionPolicy: Option[String],
gzipped: Boolean): Future[WriteResult] = {

val uri = writeToInfluxQuery(dbName, consistency, precision, retentionPolicy)
val req = sttp
Expand All @@ -54,11 +54,11 @@ private[ahc] trait AhcWriter
}

private[chronicler] override def writeFromFile(dbName: String,
filePath: String,
consistency: Consistency,
precision: Precision,
retentionPolicy: Option[String],
gzipped: Boolean): Future[WriteResult] = {
filePath: String,
consistency: Option[Consistency],
precision: Option[Precision],
retentionPolicy: Option[String],
gzipped: Boolean): Future[WriteResult] = {

val uri = writeToInfluxQuery(dbName, consistency, precision, retentionPolicy)
val req = sttp
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ import com.github.fsanaulla.chronicler.core.query.DatabaseOperationQuery
import com.github.fsanaulla.chronicler.testing.unit.{EmptyCredentials, FlatSpecWithMatchers, NonEmptyCredentials}
import com.softwaremill.sttp.Uri

import scala.language.implicitConversions

/**
* Created by
* Author: fayaz.sanaulla@gmail.com
Expand All @@ -39,6 +41,8 @@ class DatabaseApiOperationQuerySpec extends FlatSpecWithMatchers {
val testDB = "db"
val testQuery = "SELECT * FROM test"

implicit def a2Opt[A](a: A): Option[A] = Some(a)


it should "return correct write query" in new AuthEnv {

Expand Down Expand Up @@ -66,10 +70,10 @@ class DatabaseApiOperationQuerySpec extends FlatSpecWithMatchers {

it should "return correct write query without auth " in new NonAuthEnv {
writeToInfluxQuery(testDB, Consistencies.ONE, Precisions.NANOSECONDS, None).toString() shouldEqual
queryTester("/write", Map("db" -> testDB, "precision" -> "ns", "consistency" -> "one"))
queryTester("/write", Map("db" -> testDB, "consistency" -> "one", "precision" -> "ns"))

writeToInfluxQuery(testDB, Consistencies.ONE, Precisions.MICROSECONDS, None).toString() shouldEqual
queryTester("/write", Map("db" -> testDB, "precision" -> "u", "consistency" -> "one"))
queryTester("/write", Map("db" -> testDB, "consistency" -> "one", "precision" -> "u"))
}

it should "return correct single read query" in new AuthEnv {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
/*
* Copyright 2017-2018 Faiaz Sanaulla
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.github.fsanaulla.chronicler.ahc.io

import java.net.URLEncoder
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
/*
* Copyright 2017-2018 Faiaz Sanaulla
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.github.fsanaulla.chronicler.ahc.shared

import com.softwaremill.sttp.Response
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import com.github.fsanaulla.chronicler.akka.io.api.Database
import com.github.fsanaulla.chronicler.akka.io.models.InfluxConfig
import com.github.fsanaulla.chronicler.akka.io.{AkkaIOClient, InfluxIO}
import com.github.fsanaulla.chronicler.akka.management.{AkkaManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.core.enums.Epochs
import com.github.fsanaulla.chronicler.core.model.Point
import com.github.fsanaulla.chronicler.core.utils.Extensions.RichJValue
import com.github.fsanaulla.chronicler.testing.it.ResultMatchers._
Expand Down Expand Up @@ -126,7 +127,7 @@ class DatabaseApiSpec
.futureValue shouldEqual NoContentResult

db
.readJs("SELECT SUM(\"age\") FROM \"test5\" GROUP BY \"sex\"")
.readJs("SELECT SUM(\"age\") FROM \"test5\" GROUP BY \"sex\"", epoch = Some(Epochs.NANOSECONDS))
.futureValue
.groupedResult
.map { case (k, v) => k.toSeq -> v } shouldEqual Array(Seq("Male") -> JArray(Array(JNum(0), JNum(49))))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import com.github.fsanaulla.chronicler.akka.io.api.Database
import com.github.fsanaulla.chronicler.akka.io.models.InfluxConfig
import com.github.fsanaulla.chronicler.akka.io.{AkkaIOClient, InfluxIO}
import com.github.fsanaulla.chronicler.akka.management.{AkkaManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.core.enums.Epochs
import com.github.fsanaulla.chronicler.core.model.Point
import com.github.fsanaulla.chronicler.core.utils.Extensions.RichJValue
import com.github.fsanaulla.chronicler.testing.it.ResultMatchers._
Expand Down Expand Up @@ -121,7 +122,7 @@ class GzippedDatabaseApiSpec
.futureValue shouldEqual NoContentResult

db
.readJs("SELECT SUM(\"age\") FROM \"test5\" GROUP BY \"sex\"")
.readJs("SELECT SUM(\"age\") FROM \"test5\" GROUP BY \"sex\"", epoch = Some(Epochs.NANOSECONDS))
.futureValue
.groupedResult
.map { case (k, v) => k.toSeq -> v } shouldEqual Array(Seq("Male") -> JArray(Array(JNum(0), JNum(49))))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,37 +50,37 @@ final class Database(dbName: String,
with HasCredentials {

def writeFromFile(filePath: String,
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeFromFile(dbName, filePath, consistency, precision, retentionPolicy, gzipped)

def writeNative(point: String,
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeTo(dbName, point, consistency, precision, retentionPolicy, gzipped)

def bulkWriteNative(points: Seq[String],
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeTo(dbName, points, consistency, precision, retentionPolicy, gzipped)

def writePoint(point: Point,
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeTo(dbName, point, consistency, precision, retentionPolicy, gzipped)

def bulkWritePoints(points: Seq[Point],
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
retentionPolicy: Option[String] = None): Future[WriteResult] =
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None): Future[WriteResult] =
writeTo(dbName, points, consistency, precision, retentionPolicy, gzipped)

def read[A: ClassTag](query: String,
epoch: Epoch = Epochs.NANOSECONDS,
epoch: Option[Epoch] = None,
pretty: Boolean = false,
chunked: Boolean = false)
(implicit reader: InfluxReader[A]): Future[ReadResult[A]] =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ final class Measurement[E: ClassTag](dbName: String,
with PointTransformer {

def write(entity: E,
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None)(implicit wr: InfluxWriter[E]): Future[WriteResult] =
writeTo(
dbName,
Expand All @@ -63,8 +63,8 @@ final class Measurement[E: ClassTag](dbName: String,


def bulkWrite(entitys: Seq[E],
consistency: Consistency = Consistencies.ONE,
precision: Precision = Precisions.NANOSECONDS,
consistency: Option[Consistency] = None,
precision: Option[Precision] = None,
retentionPolicy: Option[String] = None)(implicit wr: InfluxWriter[E]): Future[WriteResult] =
writeTo(
dbName,
Expand All @@ -77,7 +77,7 @@ final class Measurement[E: ClassTag](dbName: String,


def read(query: String,
epoch: Epoch = Epochs.NANOSECONDS,
epoch: Option[Epoch] = None,
pretty: Boolean = false,
chunked: Boolean = false)(implicit reader: InfluxReader[E]): Future[ReadResult[E]] =
readJs(dbName, query, epoch, pretty, chunked) map {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ private[akka] trait AkkaReader

private[chronicler] override def readJs(dbName: String,
query: String,
epoch: Epoch,
epoch: Option[Epoch],
pretty: Boolean,
chunked: Boolean): Future[ReadResult[JArray]] = {

Expand All @@ -56,7 +56,7 @@ private[akka] trait AkkaReader

private[chronicler] override def bulkReadJs(dbName: String,
queries: Seq[String],
epoch: Epoch,
epoch: Option[Epoch],
pretty: Boolean,
chunked: Boolean): Future[QueryResult[Array[JArray]]] = {
val uri = readFromInfluxBulkQuery(dbName, queries, epoch, pretty, chunked)
Expand Down
Loading

0 comments on commit 9a6f997

Please sign in to comment.