This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
object KeysTracker { | |
def allKeys(json: JsValue, parent: String): Seq[String] = json match { | |
case JsObject(fields) => (fields.map(parent + "." + _._1) ++ fields.map(x => allKeys(x._2, parent + "." + x._1)).flatMap(_.toSeq)).toSeq | |
case JsArray(as) => as.flatMap(x => allKeys(x, parent)) | |
case _ => Seq.empty[String] | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# After stopping all application instances, reset the application | |
$ bin/kafka-streams-application-reset --application-id my-streams-app \ | |
--input-topics my-input-topic \ | |
--intermediate-topics rekeyed-topic \ | |
--bootstrap-servers brokerHost:9092 \ | |
--zookeeper zookeeperHost:2181 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
@tailrec | |
def reverseList[T](inputList: List[T], rList: List[T]): List[T] ={ | |
inputList match { | |
case Nil => rList | |
case h::t => reverseList(t, h::rList) | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from pyspark import SparkContext | |
# Close to the same solution of randomSplit | |
# https://github.com/apache/spark/blob/master/python/pyspark/rdd.py#L429 | |
class SplitByKey: | |
def __init__(self, key): | |
self.key = key | |
self.split = None | |
def func(self, split, iterator): |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def updateMap[A, B](map: Map[A, List[B]], key: A, value: B) = | |
map + ((key, map.getOrElse(key, List()) ::: List(value))) | |
var topicsMap = offsetsRanges.map { o => (o.topic, List[String]())}.toMap | |
offsetsRanges.foreach { o => | |
topicsMap = updateMap[String, String](topicsMap, o.topic, (o.partition + ":" + o.untilOffset))} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
hive_context.setConf("spark.sql.hive.convertMetastoreOrc", "false") | |
hive_context.setConf("hive.exec.dynamic.partition", "true") | |
hive_context.setConf("hive.exec.dynamic.partition.mode", "nonstrict") | |
my_df.select(*[col(c).alias(c) for c in columns])\ | |
.write.mode("append")\ | |
.partitionBy('year-month')\ | |
.insertInto("my_table_patitioned") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import com.ibm.mq.jms.MQQueueConnectionFactory | |
import com.ibm.mq.jms.JMSC | |
import scala.io.Source | |
import javax.jms.DeliveryMode | |
import javax.jms.Session | |
def toRun(){ | |
val fileName = "/home/my_home/path/local_file" | |
val cf = new MQQueueConnectionFactory(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from pyspark.serializers import PickleSerializer, AutoBatchedSerializer | |
reserialized_rdd = rdd._reserialize(AutoBatchedSerializer(PickleSerializer())) | |
rdd_java = rdd.ctx._jvm.SerDe.pythonToJava(rdd._jrdd, True) | |
_jvm = sc._jvm | |
_jvm.myclass.apps.mine\ | |
.pyKafka\ | |
.sendMessageByRDD("host:6667", |