Homebrew is a great little package manager for OS X. If you haven't already, installing it is pretty easy:
ruby -e "$(curl -fsSL https://raw.github.com/Homebrew/homebrew/go/install)"| package main | |
| import ( | |
| "io" | |
| "log" | |
| "net/http" | |
| ) | |
| type httpClient interface { | |
| Get(string) (*http.Response, error) |
| func main() { | |
| ctx := context.Background() | |
| // trap Ctrl+C and call cancel on the context | |
| ctx, cancel := context.WithCancel(ctx) | |
| c := make(chan os.Signal, 1) | |
| signal.Notify(c, os.Interrupt) | |
| defer func() { | |
| signal.Stop(c) |
| package main | |
| import ( | |
| "log" | |
| "myserver" | |
| "net/http" | |
| ) | |
| const addr = "localhost:12345" |
| spark_streaming_context.start() | |
| spark_streaming_context.awaitTermination() | |
| spark_streaming_context.stop() |
| def toRedshift(time, rdd): | |
| try: | |
| sqlContext = getSqlContextInstance(rdd.context) | |
| schema = StructType([ | |
| StructField('user_id', StringType(), True), | |
| StructField('device_id', StringType(), True), | |
| StructField('steps', IntegerType(), True), | |
| StructField('battery_level', IntegerType(), True), | |
| StructField('calories_spent', IntegerType(), True), |
| spark_context.saveAsTextFile("s3n://parents/activity_log/01010101.txt") |
| spark_context = SparkContext(appName=kinesis_app_name) | |
| spark_streaming_context = StreamingContext(spark_context, spark_batch_interval) | |
| sql_context = SQLContext(spark_context) | |
| kinesis_stream = KinesisUtils.createStream( | |
| spark_streaming_context, kinesis_app_name, kinesis_stream, kinesis_endpoint, | |
| aws_region, kinesis_initial_position, kinesis_checkpoint_interval) | |
| py_rdd = kinesis_stream.map(lambda x: json.loads(x)) |
| from __future__ import print_function | |
| from pyspark import SparkContext | |
| from pyspark.streaming import StreamingContext | |
| from pyspark.streaming.kinesis import KinesisUtils, InitialPositionInStream | |
| import datetime | |
| import json | |
| from pyspark.sql import SQLContext, Row | |
| from pyspark.sql.types import * | |
| aws_region = 'us-east-1' |