Using perf:
$ perf record -g binary
$ perf script | stackcollapse-perf.pl | rust-unmangle | flamegraph.pl > flame.svg
NOTE: See @GabrielMajeri's comments below about the
-goption.
Using perf:
$ perf record -g binary
$ perf script | stackcollapse-perf.pl | rust-unmangle | flamegraph.pl > flame.svg
NOTE: See @GabrielMajeri's comments below about the
-goption.
| // Colors reference | |
| // You can use the following as so: | |
| // console.log(colorCode, data); | |
| // console.log(`${colorCode}some colorful text string${resetCode} rest of string in normal color`); | |
| // | |
| // ... and so on. | |
| export const reset = "\x1b[0m" | |
| export const bright = "\x1b[1m" | |
| export const dim = "\x1b[2m" |
| Byobu Commands | |
| ============== | |
| byobu Screen manager | |
| Level 0 Commands (Quick Start) | |
| ------------------------------ | |
| <F2> Create a new window |
| import kafka.server.KafkaConfig; | |
| import kafka.server.KafkaServer; | |
| import java.io.File; | |
| import java.io.FileNotFoundException; | |
| import java.util.ArrayList; | |
| import java.util.Collections; | |
| import java.util.List; | |
| import java.util.Properties; |
| registered table thread_459 <--Register table gets called | |
| dropped table thread_259 | |
| dropped table thread_759 | |
| registered table thread_860 | |
| dropped table thread_860 | |
| registered table thread_560 | |
| registered table thread_060 | |
| registered table thread_960 | |
| registered table thread_160 | |
| dropped table thread_560 |
| wget http://www.eng.lsu.edu/mirrors/apache/spark/spark-1.2.1/spark-1.2.1-bin-hadoop2.3.tgz | |
| tar -xf spark-1.2.1-bin-hadoop2.3.tgz | |
| cd spark-1.2.1-bin-hadoop2.3/bin/ | |
| wget https://oss.sonatype.org/content/repositories/snapshots/org/elasticsearch/elasticsearch-hadoop/2.1.0.BUILD-SNAPSHOT/elasticsearch-hadoop-2.1.0.BUILD-20150324.023417-341.jar | |
| ./spark-shell --jars elasticsearch-hadoop-2.1.0.BUILD-20150324.023417-341.jar | |
| import org.apache.spark.sql.SQLContext | |
| case class KeyValue(key: Int, value: String) | |
| val sqlContext = new org.apache.spark.sql.SQLContext(sc) |