repo_name
stringlengths
6
97
path
stringlengths
3
341
text
stringlengths
8
1.02M
dorely103/Raphtory
mainproject/src/main/scala/com/raphtory/core/actors/Router/RouterWorker.scala
<reponame>dorely103/Raphtory<gh_stars>0 package com.raphtory.core.actors.Router import akka.actor.{Actor, ActorLogging} import akka.cluster.pubsub.{DistributedPubSub, DistributedPubSubMediator} import com.raphtory.core.actors.RaphtoryActor import com.raphtory.core.actors.Router.RouterWorker.CommonMessage.TimeBroadcast...
dorely103/Raphtory
mainproject/src/main/scala/com/raphtory/core/model/communication/VertexMutliQueue.scala
<reponame>dorely103/Raphtory<filename>mainproject/src/main/scala/com/raphtory/core/model/communication/VertexMutliQueue.scala package com.raphtory.core.model.communication import com.raphtory.core.actors.PartitionManager.Workers.ViewJob import scala.collection.mutable import scala.collection.parallel.mutable.ParTrieM...
dorely103/Raphtory
mainproject/src/main/scala/com/raphtory/core/model/communication/AnalysisType.scala
package com.raphtory.core.model.communication object AnalysisType extends Enumeration { val live: Value = Value("live") val view: Value = Value("view") val range: Value = Value("range") }
karateboy/slcems
app/models/Record.scala
package models import play.api.Logging import scalikejdbc.{AutoSession, DBSession, SQLSyntax, scalikejdbcSQLInterpolationImplicitDef} import java.sql.Timestamp import java.time.LocalDateTime case class Stat( avg: Option[Double], min: Option[Double], max: ...
karateboy/slcems
app/controllers/JWTAuthentication.scala
package controllers import com.auth0.jwt.JWT import com.auth0.jwt.algorithms.Algorithm import com.auth0.jwt.interfaces.DecodedJWT import play.api.Logging import play.api.mvc.Results.Unauthorized import play.api.mvc.Security.AuthenticatedBuilder import play.api.mvc.{ActionBuilder, Request, RequestHeader, Result...
karateboy/slcems
app/controllers/DataLogger.scala
<reponame>karateboy/slcems<filename>app/controllers/DataLogger.scala package controllers import models._ import play.api.Logging import play.api.libs.json._ import play.api.mvc.{AbstractController, Action, AnyContent, ControllerComponents} import java.time.ZoneId import java.util.Date import javax.inject.{Inje...
karateboy/slcems
app/models/User.scala
<filename>app/models/User.scala package models import scalikejdbc._ case class User(username: String, password: String) object User { implicit val session: DBSession = AutoSession def get(username: String) = { sql""" Select * From User Where username = ${username} ...
karateboy/slcems
build.sbt
<reponame>karateboy/slcems lazy val root = (project in file(".")) .enablePlugins(PlayScala, SwaggerPlugin, LauncherJarPlugin) .settings( name := """slcems""", organization := "com.wecc", version := "1.0-3", scalaVersion := "2.13.6", libraryDependencies ++= Seq( guice, "org.scalatestp...
karateboy/slcems
project/plugins.sbt
<filename>project/plugins.sbt addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.11") addSbtPlugin("com.iheart" % "sbt-play-swagger" % "0.10.6-PLAY2.8")
karateboy/slcems
app/models/RdCenterCollector.scala
<filename>app/models/RdCenterCollector.scala package models import akka.actor.{Actor, Cancellable, Props} import play.api.Logging import play.api.libs.json.{JsError, Json} import play.api.libs.ws.WSClient import java.time.{LocalDateTime, LocalTime} import javax.inject.Inject import scala.concurrent.Execution...
karateboy/slcems
app/models/MonitorType.scala
package models case class MonitorType(id:String, desp:String, unit:String) object MonitorType { val list: List[MonitorType] = List( MonitorType("generating", "發電量", "KW"), MonitorType("storing", "儲能量", "KW"), MonitorType("consuming", "用電量", "KW"), MonitorType("consumingPercent", "契約容量占比", "%"...
karateboy/slcems
app/models/Monitor.scala
package models import scalikejdbc.{AutoSession, DBSession, scalikejdbcSQLInterpolationImplicitDef} case class Monitor(id:Int, name:String, displayName:String, contractCapacity:Double) object Monitor { implicit val session: DBSession = AutoSession def getList: List[Monitor] = { sql""" Select...
karateboy/slcems
app/models/ITRIhandler.scala
<reponame>karateboy/slcems package models import controllers.RecordList import java.time.{LocalDateTime, ZoneId} object ITRIhandler { def dataHandler(tableType: TableType.Value, dataList:Seq[RecordList]) = { dataList.foreach(data=>{ def getGenerating(): Double = { val generatingMT=Seq("...
karateboy/slcems
app/models/TableType.scala
package models object TableType extends Enumeration { val Min = Value("Min") val Hour = Value("Hour") val defaultMap = Map((Min -> "分鐘資料"), (Hour -> "小時資料")) }
karateboy/slcems
app/controllers/HomeController.scala
<reponame>karateboy/slcems package controllers import akka.actor.ActorSystem import models._ import play.api.Logging import play.api.libs.json._ import play.api.libs.ws._ import play.api.mvc._ import java.time.{Duration, Instant, LocalDateTime, Period, ZoneId} import javax.inject._ import scala.concurrent....
karateboy/slcems
app/controllers/JwtAuthenticator.scala
package controllers import com.auth0.jwt.JWT import com.auth0.jwt.algorithms.Algorithm import play.api.Logging import java.time.Instant import java.util.Date class JwtAuthenticator extends Logging { val secrets = "<KEY>" val algorithm = Algorithm.HMAC256(secrets) val issuer = "WECC" def getT...
rluta/metorikku
src/test/scala/com/yotpo/metorikku/code/steps/test/SelectiveMergeTests.scala
package com.yotpo.metorikku.code.steps.test import com.yotpo.metorikku.code.steps.SelectiveMerge import com.yotpo.metorikku.code.steps.SelectiveMerge.merge import com.yotpo.metorikku.exceptions.MetorikkuException import org.apache.log4j.{Level, LogManager, Logger} import org.apache.spark.sql.types.StructField import o...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/output/writers/kafka/KafkaOutputWriter.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.output.writers.kafka import com.yotpo.metorikku.configuration.job.Streaming import com.yotpo.metorikku.configuration.job.output.Kafka import com.yotpo.metorikku.exceptions.MetorikkuException import com.yotpo.metorikku.output.Writer import org.apache.log4j.{LogManag...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/output/writers/file/CSVOutputWriter.scala
package com.yotpo.metorikku.output.writers.file import com.yotpo.metorikku.configuration.job.Streaming import com.yotpo.metorikku.configuration.job.output.File import com.yotpo.metorikku.output.Writer import org.apache.spark.sql.DataFrame class CSVOutputWriter(var props: Map[String, Any], outputFile: Option[File]) ex...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/input/readers/kafka/KafkaInput.scala
package com.yotpo.metorikku.input.readers.kafka import java.util.Properties import com.yotpo.metorikku.input.Reader import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.spark.sql.{DataFrame, SparkSession} import za.co.absa.abris.avro.read.confluent.SchemaManager import za.co.absa.abris.avro.functi...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/RemoveDuplicates.scala
package com.yotpo.metorikku.code.steps import com.yotpo.metorikku.exceptions.MetorikkuException import org.apache.log4j.LogManager object RemoveDuplicates { val tableParameterName = "table" val columnsParameterName = "columns" val message = "You need to send a 'table' parameter containing the table name to cha...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/operators/HasSize.scala
<gh_stars>100-1000 package com.yotpo.metorikku.metric.stepActions.dataQuality.operators import com.amazon.deequ.checks.Check import com.yotpo.metorikku.metric.stepActions.dataQuality.Operator class HasSize(level: Option[String], size: String, operator: String) extends Operator(level = level) { override def getChec...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/instrumentation/NullInstrumentation.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.instrumentation class NullInstrumentationFactory extends InstrumentationFactory { override def create(): InstrumentationProvider = { new NullInstrumentation() } } class NullInstrumentation extends InstrumentationProvider { override def count(name: String...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/SelectiveMerge.scala
<gh_stars>0 package com.yotpo.metorikku.code.steps import com.yotpo.metorikku.exceptions.MetorikkuException import org.apache.log4j.{LogManager, Logger} import org.apache.spark.sql.catalyst.expressions.NamedExpression import org.apache.spark.sql.{Column, DataFrame} import org.apache.spark.sql.functions._ object Selec...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/StepAction.scala
package com.yotpo.metorikku.metric import org.apache.spark.sql.SparkSession trait StepAction[A] { def dataFrameName: String def run(sparkSession: SparkSession): A }
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/Output.scala
package com.yotpo.metorikku.configuration.job import com.yotpo.metorikku.configuration.job.output._ case class Output(cassandra: Option[Cassandra] = None, redshift: Option[Redshift] = None, redis: Option[Redis] = None, segment: Option[Segment] = None, ...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/DropColumns.scala
package com.yotpo.metorikku.code.steps import com.yotpo.metorikku.exceptions.MetorikkuException object DropColumns { val message = "You need to send 2 parameters with the names of the dropped columns and the table to change: columns, table" def run(ss: org.apache.spark.sql.SparkSession, metricName: String, dataF...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/output/Kafka.scala
package com.yotpo.metorikku.configuration.job.output case class Kafka(servers: Seq[String], checkpointLocation: Option[String], compressionType: Option[String] ) { require(Option(servers).isDefined, "Kafka connection: servers are mandatory.") }
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/metric/Output.scala
<reponame>rluta/metorikku<gh_stars>100-1000 package com.yotpo.metorikku.configuration.metric import com.fasterxml.jackson.core.`type`.TypeReference import com.fasterxml.jackson.module.scala.JsonScalaEnumeration case class Output(name: Option[String], dataFrameName: String, @JsonSca...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/input/Elasticsearch.scala
package com.yotpo.metorikku.configuration.job.input import com.yotpo.metorikku.configuration.job.InputConfig import com.yotpo.metorikku.input.Reader import com.yotpo.metorikku.input.readers.elasticsearch.ElasticsearchInput case class Elasticsearch(nodes: String, user: Option[String], ...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/input/File.scala
package com.yotpo.metorikku.configuration.job.input import com.yotpo.metorikku.configuration.job.InputConfig import com.yotpo.metorikku.input.Reader import com.yotpo.metorikku.input.readers.file.{FileInput, FileStreamInput} case class File(path: String, options: Option[Map[String, String]], ...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/output/writers/redshift/RedshiftOutputWriter.scala
<reponame>rluta/metorikku<filename>src/main/scala/com/yotpo/metorikku/output/writers/redshift/RedshiftOutputWriter.scala package com.yotpo.metorikku.output.writers.redshift import com.yotpo.metorikku.configuration.job.output.Redshift import com.yotpo.metorikku.output.Writer import org.apache.log4j.LogManager import or...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/exceptions/MetorikkuException.scala
package com.yotpo.metorikku.exceptions case class MetorikkuException(private val message: String = "", private val cause: Throwable = None.orNull) extends Exception(message, cause)
rluta/metorikku
src/test/scala/com/yotpo/metorikku/code/steps/test/AlignTablesTests.scala
package com.yotpo.metorikku.code.steps.test import com.holdenkarau.spark.testing.DataFrameSuiteBase import com.yotpo.metorikku.code.steps.AlignTables import org.apache.spark.sql.SparkSession import org.scalatest.{FunSuite, _} //noinspection ScalaStyle class AlignTablesTests extends FunSuite with DataFrameSuiteBase wi...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/Instrumentation.scala
<filename>src/main/scala/com/yotpo/metorikku/configuration/job/Instrumentation.scala package com.yotpo.metorikku.configuration.job import com.yotpo.metorikku.configuration.job.instrumentation.InfluxDBConfig case class Instrumentation(influxdb: Option[InfluxDBConfig])
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/test/ConfigurationParser.scala
package com.yotpo.metorikku.configuration.test import java.io.File import java.nio.file.{Files, Paths} import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.yotpo.metorikku.exceptions.{MetorikkuException, MetorikkuInvalidMetricFileException} import com.yotpo.metorikku.utils.FileUtils import org.apac...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/LoadIfExists.scala
<reponame>rluta/metorikku<filename>src/main/scala/com/yotpo/metorikku/code/steps/LoadIfExists.scala package com.yotpo.metorikku.code.steps import com.yotpo.metorikku.exceptions.MetorikkuException import org.apache.log4j.{LogManager, Logger} import org.apache.spark.sql.{Row} object LoadIfExists { val message = "You...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/input/readers/file/FileInput.scala
package com.yotpo.metorikku.input.readers.file import com.yotpo.metorikku.input.Reader import org.apache.spark.sql.{DataFrame, SparkSession} case class FileInput(val name: String, path: String, options: Option[Map[String, String]], schemaPath: Option[Stri...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/input/Cassandra.scala
package com.yotpo.metorikku.configuration.job.input import com.yotpo.metorikku.configuration.job.InputConfig import com.yotpo.metorikku.input.Reader import com.yotpo.metorikku.input.readers.cassandra.CassandraInput case class Cassandra(host: String, user: Option[String], password: Op...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/Operator.scala
package com.yotpo.metorikku.metric.stepActions.dataQuality import com.amazon.deequ.checks.{Check, CheckLevel} abstract case class Operator(level: Option[String]) { def getCheck(level: String): Check def getLevel(level: String): CheckLevel.Value = { level match { case "error" => CheckLevel.Error c...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/Streaming.scala
package com.yotpo.metorikku.configuration.job import com.yotpo.metorikku.exceptions.MetorikkuWriteFailedException import org.apache.spark.sql.streaming.{DataStreamWriter, Trigger} case class Streaming(triggerMode: Option[String], triggerDuration: Option[String], outputMode: O...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/Watermark.scala
<filename>src/main/scala/com/yotpo/metorikku/code/steps/Watermark.scala<gh_stars>100-1000 package com.yotpo.metorikku.code.steps import com.yotpo.metorikku.exceptions.MetorikkuException object Watermark { val message = "You need to send 3 parameters: table, eventTime, delayThreshold" def run(ss: org.apache.spark...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/utils/TableUtils.scala
package com.yotpo.metorikku.utils import com.yotpo.metorikku.exceptions.MetorikkuException import org.apache.spark.sql.catalog.Catalog case class TableInfo(database: String, tableName: String) object TableUtils { def getTableInfo(tableFullName: String, catalog: Catalog): TableInfo = { tableFullName.count(_ == ...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/input/MongoDB.scala
package com.yotpo.metorikku.configuration.job.input import com.yotpo.metorikku.configuration.job.InputConfig import com.yotpo.metorikku.input.Reader import com.yotpo.metorikku.input.readers.elasticsearch.{ElasticsearchInput, MongoDBInput} case class MongoDB(uri: String, database: String, ...
rluta/metorikku
src/test/scala/com/yotpo/metorikku/code/steps/functions/test/UserDefinedFunctionsTests.scala
package com.yotpo.metorikku.code.steps.functions.test import java.sql.Timestamp import org.scalatest.{FunSuite, Suites} import com.yotpo.metorikku.code.steps.functions.UserDefinedFunctions._ class SerDefinedFunctionsTests extends Suites ( new EpochMilliToTimestampTest ) class EpochMilliToTimestampTest extends Fun...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/operators/HasUniqueness.scala
<filename>src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/operators/HasUniqueness.scala package com.yotpo.metorikku.metric.stepActions.dataQuality.operators import com.amazon.deequ.checks.Check import com.yotpo.metorikku.metric.stepActions.dataQuality.Operator class HasUniqueness(level: Option[Strin...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/instrumentation/influxdb/InfluxDBInstrumentation.scala
package com.yotpo.metorikku.instrumentation.influxdb import java.util.concurrent.TimeUnit import com.yotpo.metorikku.configuration.job.instrumentation.InfluxDBConfig import com.yotpo.metorikku.instrumentation.{InstrumentationFactory, InstrumentationProvider} import org.influxdb.dto.Point import org.influxdb.{BatchOpt...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/output/WriterSessionRegistration.scala
package com.yotpo.metorikku.output import org.apache.spark.sql.SparkSession trait WriterSessionRegistration { def addToSparkSession(sparkSession: SparkSession): Unit = {} }
rluta/metorikku
src/test/scala/com/yotpo/metorikku/code/steps/test/RemoveDuplicatesTests.scala
<filename>src/test/scala/com/yotpo/metorikku/code/steps/test/RemoveDuplicatesTests.scala package com.yotpo.metorikku.code.steps.test import com.holdenkarau.spark.testing.DataFrameSuiteBase import com.yotpo.metorikku.code.steps.RemoveDuplicates import com.yotpo.metorikku.exceptions.MetorikkuException import org.apache....
rluta/metorikku
src/main/scala/com/yotpo/metorikku/test/StreamMockInput.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.test import com.yotpo.metorikku.configuration.job.input.File import com.yotpo.metorikku.input.Reader import org.apache.spark.sql.catalyst.encoders.RowEncoder import org.apache.spark.sql.execution.streaming.MemoryStream import org.apache.spark.sql.{DataFrame, Row, S...
rluta/metorikku
build.sbt
<reponame>rluta/metorikku name := "metorikku" organization := "com.yotpo" homepage := Some(url("https://github.com/YotpoLtd/metorikku")) licenses := Seq("MIT License" -> url("http://www.opensource.org/licenses/mit-license.html")) scmInfo := Some( ScmInfo(url("https://github.com/YotpoLtd/metorikku"), "scm:git:<EMA...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/operators/IsContainedIn.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.metric.stepActions.dataQuality.operators import com.amazon.deequ.checks.Check import com.yotpo.metorikku.metric.stepActions.dataQuality.Operator class IsContainedIn(level: Option[String], column: String, allowedValues: Array[String]) extends Operator(level = level...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/Registrator.scala
<reponame>rluta/metorikku<gh_stars>100-1000 package com.yotpo.metorikku.code.steps import java.sql.Timestamp import com.yotpo.metorikku.code.steps.functions.UserDefinedFunctions import org.apache.spark.sql.functions.udf object Registrator { def run(ss: org.apache.spark.sql.SparkSession, metricName: String, dataF...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/output/writers/file/JSONOutputWriter.scala
<filename>src/main/scala/com/yotpo/metorikku/output/writers/file/JSONOutputWriter.scala package com.yotpo.metorikku.output.writers.file import com.yotpo.metorikku.configuration.job.output.File class JSONOutputWriter(props: Map[String, String], outputFile: Option[File]) extends FileOutputWriter(Option(props).getOrEl...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/output/Redshift.scala
<gh_stars>100-1000 package com.yotpo.metorikku.configuration.job.output case class Redshift(jdbcURL: String, tempS3Dir: String) { require(Option(jdbcURL).isDefined, "Redshift Database arguments: jdbcURL is mandatory.") require(Option(tempS3Dir).isDefined, "Redshift Database arguments: tempS3Dir...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/ValidationRunner.scala
<filename>src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/ValidationRunner.scala package com.yotpo.metorikku.metric.stepActions.dataQuality import com.amazon.deequ.checks.{CheckResult, CheckStatus} import com.amazon.deequ.metrics.DoubleMetric import com.amazon.deequ.{VerificationResult, VerificationS...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/test/TesterSortData.scala
package com.yotpo.metorikku.test case class TesterSortData(keys: List[String]) { def sortEnrichedRows(a: EnrichedRow, b: EnrichedRow): Boolean = { for (colName <- keys) { if (a.row.get(colName) != b.row.get(colName)) { return a.row.getOrElse(colName, 0).toString().hashCode() < b.row.getOrElse(colN...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/test/Params.scala
<reponame>rluta/metorikku<filename>src/main/scala/com/yotpo/metorikku/configuration/test/Params.scala package com.yotpo.metorikku.configuration.test case class Params(variables: Option[Map[String, String]])
rluta/metorikku
src/test/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/FailedDFHandlerTest.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.metric.stepActions.dataQuality import com.yotpo.metorikku.metric.stepActions.dataQuality.operators.HasUniqueness import org.apache.log4j.{Level, Logger} import org.apache.spark.sql.SparkSession import org.scalatest.{BeforeAndAfterEach, FunSuite} class FailedDFHan...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/Catalog.scala
package com.yotpo.metorikku.configuration.job case class Catalog(database: Option[String])
rluta/metorikku
src/main/scala/com/yotpo/metorikku/test/TestUtil.scala
<filename>src/main/scala/com/yotpo/metorikku/test/TestUtil.scala package com.yotpo.metorikku.test import org.apache.log4j.LogManager import org.apache.spark.sql.DataFrame import org.apache.spark.sql.functions.{col, when} import scala.collection.mutable.ArrayBuffer object TestUtil { val log = LogManager.getLogger(t...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/output/WriterFactory.scala
package com.yotpo.metorikku.output import com.yotpo.metorikku.Job import com.yotpo.metorikku.configuration.job.Configuration import com.yotpo.metorikku.configuration.metric.{Output, OutputType} import com.yotpo.metorikku.exceptions.MetorikkuException import com.yotpo.metorikku.output.writers.cassandra.CassandraOutputW...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/input/readers/file/FilesInput.scala
package com.yotpo.metorikku.input.readers.file import com.yotpo.metorikku.input.Reader import org.apache.spark.sql.{DataFrame, SparkSession} case class FilesInput(name: String, paths: Seq[String], options: Option[Map[String, String]], schemaPath: Option...
rluta/metorikku
version.sbt
version in ThisBuild := "0.0.127-SNAPSHOT"
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/metric/Configuration.scala
<gh_stars>100-1000 package com.yotpo.metorikku.configuration.metric case class Configuration(steps: List[Step], output: Option[List[Output]])
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/DataQualityCheck.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.metric.stepActions.dataQuality import com.amazon.deequ.checks.Check import com.yotpo.metorikku.metric.stepActions.dataQuality.operators.{HasSize, HasUniqueness, IsComplete, IsUnique, IsContainedIn} import org.apache.log4j.LogManager case class DataQualityCheck( ...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/output/Elasticsearch.scala
package com.yotpo.metorikku.configuration.job.output case class Elasticsearch(nodes: String, user: Option[String], password: Option[String]) { require(Option(nodes).isDefined, "Elasticsearch connection: nodes is mandatory.") }
rluta/metorikku
src/main/scala/com/yotpo/metorikku/MetorikkuTester.scala
package com.yotpo.metorikku import com.yotpo.metorikku.configuration.test.ConfigurationParser import com.yotpo.metorikku.test.Tester import org.apache.log4j.LogManager object MetorikkuTester extends App { lazy val log = LogManager.getLogger(this.getClass) val configs = ConfigurationParser.parse(args) configs.f...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/functions/UserDefinedFunctions.scala
<filename>src/main/scala/com/yotpo/metorikku/code/steps/functions/UserDefinedFunctions.scala package com.yotpo.metorikku.code.steps.functions import java.sql.Timestamp import java.time.Instant object UserDefinedFunctions { def epochMilliToTimestamp(timestamp_epoch: Long): Timestamp = { val instant: Instant =...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/operators/IsUnique.scala
package com.yotpo.metorikku.metric.stepActions.dataQuality.operators import com.amazon.deequ.checks.Check import com.yotpo.metorikku.metric.stepActions.dataQuality.Operator class IsUnique(level: Option[String], column: String) extends Operator(level = level) { override def getCheck(level: String): Check = { new...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/output/Segment.scala
package com.yotpo.metorikku.configuration.job.output case class Segment(apiKey: String) { require(Option(apiKey).isDefined, "Segment API Key is mandatory.") }
rluta/metorikku
src/main/scala/com/yotpo/metorikku/test/ErrorMessage.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.test import org.apache.log4j.LogManager import org.apache.spark.sql.SparkSession object ResultsType extends Enumeration { val expected = Value("Expected") val actual = Value("Actual") } case class MismatchData(expectedIndex: Int, actualIndex: Int, ...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/ToAvro.scala
package com.yotpo.metorikku.code.steps import com.yotpo.metorikku.exceptions.MetorikkuException import org.apache.spark.sql.DataFrame import org.apache.spark.sql.functions.col import org.apache.spark.sql.functions.struct import za.co.absa.abris.avro.functions.to_confluent_avro import za.co.absa.abris.avro.read.conflue...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/CamelCaseColumnNames.scala
package com.yotpo.metorikku.code.steps import com.yotpo.metorikku.exceptions.MetorikkuException object CamelCaseColumnNames { val message = "You need to send 1 parameters with the names of the table to change: table" def run(ss: org.apache.spark.sql.SparkSession, metricName: String, dataFrameName: String, params...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/input/readers/elasticsearch/ElasticsearchInput.scala
package com.yotpo.metorikku.input.readers.elasticsearch import com.yotpo.metorikku.input.Reader import org.apache.spark.sql.{DataFrame, SparkSession} case class ElasticsearchInput(name: String, nodes: String, user: Option[String], password: Option[String], index: String, opti...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/input/readers/jdbc/JDBCInput.scala
package com.yotpo.metorikku.input.readers.jdbc import com.yotpo.metorikku.input.Reader import org.apache.spark.sql.{DataFrame, SparkSession} case class JDBCInput(val name: String, connectionUrl: String, user: String, password: String, table: String, options: Option[Map[String...
rluta/metorikku
src/test/scala/com/yotpo/metorikku/metric/test/MetricReporterTester.scala
<filename>src/test/scala/com/yotpo/metorikku/metric/test/MetricReporterTester.scala package com.yotpo.metorikku.metric.test import com.yotpo.metorikku.metric.MetricReporting import org.apache.log4j.{Level, LogManager, Logger} import org.apache.spark.sql.SparkSession import org.scalatest.{FunSuite, _} import org.apache....
rluta/metorikku
src/test/scala/com/yotpo/metorikku/tags/UnsupportedInCurrentVersion.scala
package com.yotpo.metorikku.tags import org.scalatest.Tag object UnsupportedInCurrentVersion extends Tag("com.yotpo.metorikku.tags.UnsupportedInCurrentVersion")
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/operators/Evaluator.scala
package com.yotpo.metorikku.metric.stepActions.dataQuality.operators case class Evaluator() { def dqAssertion[N<%Ordered[N]](operator: String, evaluatee: N): N => Boolean = operator match { case "==" => {_ == evaluatee} case "!=" => {_ != evaluatee} case ">=" => {_ >= evaluatee} ca...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/stepActions/Sql.scala
package com.yotpo.metorikku.metric.stepActions import com.yotpo.metorikku.metric.StepAction import com.yotpo.metorikku.metric.stepActions.dataQuality.DataQualityCheckList import org.apache.log4j.LogManager import org.apache.spark.sql.{DataFrame, SparkSession} /** * Represents the SQL query to run */ case class Sq...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/instrumentation/InfluxDBConfig.scala
<filename>src/main/scala/com/yotpo/metorikku/configuration/job/instrumentation/InfluxDBConfig.scala<gh_stars>100-1000 package com.yotpo.metorikku.configuration.job.instrumentation case class InfluxDBConfig(url: String, username: Option[String], password: Option[Strin...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/code/steps/AlignTables.scala
package com.yotpo.metorikku.code.steps import com.yotpo.metorikku.exceptions.MetorikkuException import org.apache.spark.sql.Column import org.apache.spark.sql.functions.{col, lit} object AlignTables { val message = "You need to send 2 parameters with the names of the dataframes to align: from, to" private def al...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/output/Hudi.scala
package com.yotpo.metorikku.configuration.job.output case class Hudi(dir: String, parallelism: Option[String], maxFileSize: Option[String], operation: Option[String], storageType: Option[String], maxVersions: Option[String], ...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/output/writers/redis/RedisOutputWriter.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.output.writers.redis import com.redislabs.provider.redis._ import com.yotpo.metorikku.Job import com.yotpo.metorikku.configuration.job.output.Redis import com.yotpo.metorikku.output.{WriterSessionRegistration, Writer} import org.apache.log4j.LogManager import org.a...
rluta/metorikku
src/test/scala/com/yotpo/metorikku/metric/stepActions/dataQuality/IsCompleteTest.scala
package com.yotpo.metorikku.metric.stepActions.dataQuality import com.yotpo.metorikku.metric.stepActions.dataQuality.operators.IsComplete import com.yotpo.metorikku.tags.UnsupportedInCurrentVersion import org.apache.log4j.{Level, Logger} import org.apache.spark.sql.SparkSession import org.scalatest.{BeforeAndAfterEach...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/output/Cassandra.scala
package com.yotpo.metorikku.configuration.job.output case class Cassandra(host: String, username: Option[String], password: Option[String]) { require(Option(host).isDefined, "Cassandra database connection: host is mandatory.") }
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/metric/ConfigurationParser.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.configuration.metric import java.io.{File, FileNotFoundException} import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.yotpo.metorikku.exceptions.MetorikkuInvalidMetricFileException import com.yotpo.metorikku.metric.Metric import com.yotpo.metor...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/test/Mock.scala
package com.yotpo.metorikku.configuration.test case class Mock(name: String, path: String, var streaming: Option[Boolean]) { streaming = Option(streaming.getOrElse(false)) }
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/input/FileDateRange.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.configuration.job.input import com.fasterxml.jackson.annotation.JsonProperty import com.yotpo.metorikku.configuration.job.InputConfig import com.yotpo.metorikku.input.Reader import com.yotpo.metorikku.input.readers.file.FilesInput import org.joda.time.format.{DateT...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/output/Redis.scala
<reponame>rluta/metorikku package com.yotpo.metorikku.configuration.job.output case class Redis(host: String, port: Option[String], auth: Option[String], db: Option[String]) { require(Option(host).isDefined, "Redis database connection: host is manda...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/output/File.scala
package com.yotpo.metorikku.configuration.job.output case class File(dir: String, checkpointLocation: Option[String]) { require(Option(dir).isDefined, "Output file directory: dir is mandatory.") }
rluta/metorikku
src/main/scala/com/yotpo/metorikku/configuration/job/Periodic.scala
package com.yotpo.metorikku.configuration.job import com.yotpo.metorikku.exceptions.MetorikkuException import scala.concurrent.duration.Duration case class Periodic(triggerDuration: Option[String]) { def getTriggerDurationInSeconds(): Long = { try { Duration(triggerDuration.get).toSeconds } catch { ...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/DeequFactory.scala
package com.yotpo.metorikku.metric import com.yotpo.metorikku.metric.stepActions.dataQuality.DataQualityCheckList import org.apache.log4j.Logger class DeequFactory(log: Logger, failedDFLocation: Option[String] = None, ignoreDeequeValidations: Option[Boolean] = None) { def gen...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/input/readers/cassandra/CassandraInput.scala
<filename>src/main/scala/com/yotpo/metorikku/input/readers/cassandra/CassandraInput.scala package com.yotpo.metorikku.input.readers.cassandra import com.yotpo.metorikku.input.Reader import org.apache.spark.sql.{DataFrame, SparkSession} import org.apache.spark.sql.cassandra._ case class CassandraInput(name: String, ho...
rluta/metorikku
examples/udf/Example.scala
<filename>examples/udf/Example.scala package com.yotpo.udf.test import org.apache.spark.sql.SparkSession import org.apache.spark.sql.functions.udf object TestUDF { def addZPrefix(s: String): String = { "Z" + s } def run(ss: org.apache.spark.sql.SparkSession, metricName: String, dataFrameName: String, param...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/output/writers/jdbc/JDBCQueryWriter.scala
<filename>src/main/scala/com/yotpo/metorikku/output/writers/jdbc/JDBCQueryWriter.scala package com.yotpo.metorikku.output.writers.jdbc import java.sql.{Date, DriverManager, PreparedStatement, Timestamp} import com.yotpo.metorikku.configuration.job.output.JDBC import com.yotpo.metorikku.output.Writer import org.apach...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/test/EnrichedRow.scala
<filename>src/main/scala/com/yotpo/metorikku/test/EnrichedRow.scala package com.yotpo.metorikku.test import com.yotpo.metorikku.test.TestUtil.log import org.apache.spark import org.apache.spark.sql.{DataFrame, Row, SparkSession} import org.apache.spark.sql.types.{StringType, StructField} import scala.collection.JavaCo...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/metric/MetricReporting.scala
package com.yotpo.metorikku.metric import java.util.concurrent.TimeUnit import com.yotpo.metorikku.exceptions.{MetorikkuException, MetorikkuWriteFailedException} import com.yotpo.metorikku.instrumentation.InstrumentationProvider import org.apache.log4j.LogManager import org.apache.spark.sql.DataFrame class MetricRep...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/instrumentation/StreamingQueryMetricsListener.scala
<filename>src/main/scala/com/yotpo/metorikku/instrumentation/StreamingQueryMetricsListener.scala package com.yotpo.metorikku.instrumentation import org.apache.log4j.{LogManager, Logger} import org.apache.spark.sql.SparkSession import org.apache.spark.sql.streaming.StreamingQueryListener import org.apache.spark.sql.str...
rluta/metorikku
src/main/scala/com/yotpo/metorikku/utils/HudiUtils.scala
package com.yotpo.metorikku.utils import org.apache.hudi.avro.model.HoodieCompactionPlan import org.apache.hudi.common.table.timeline.HoodieInstant import org.apache.hudi.common.table.timeline.HoodieInstant.State import org.apache.hudi.common.table.{HoodieTableMetaClient, HoodieTimeline} import org.apache.hudi.common....