Tested with Apache Spark 2.1.0, Python 2.7.13 and Java 1.8.0_112
For older versions of Spark and ipython, please, see also previous version of text.
| from collections import deque | |
| from concurrent.futures import ThreadPoolExecutor as _ThreadPoolExecutor | |
| class ThreadPoolExecutor(_ThreadPoolExecutor): | |
| """Subclass with a lazy consuming imap method.""" | |
| def imap(self, fn, *iterables, timeout=None, queued_tasks_per_worker=2): | |
| """Ordered imap that consumes iterables just-in-time ref https://gist.github.com/ddelange/c98b05437f80e4b16bf4fc20fde9c999. |
| import sys | |
| sys.path.append('../../facenet/src') | |
| import facenet | |
| import argparse | |
| import os | |
| import importlib | |
| import tensorflow as tf | |
| from tqdm import tqdm | |
| import tensorflow.contrib.slim as slim |
| from __future__ import with_statement | |
| from alembic import context | |
| from sqlalchemy import engine_from_config, pool | |
| from logging.config import fileConfig | |
| from models import Base | |
| config = context.config | |
| fileConfig(config.config_file_name) |
| # | |
| # Original solution via StackOverflow: | |
| # http://stackoverflow.com/questions/35802939/install-only-available-packages-using-conda-install-yes-file-requirements-t | |
| # | |
| # | |
| # Install via `conda` directly. | |
| # This will fail to install all | |
| # dependencies. If one fails, | |
| # all dependencies will fail to install. |
| stage "preparation" | |
| node { | |
| parallel ( | |
| phase1: { sh "echo p1; echo phase1" }, | |
| phase2: { sh "echo p2; echo phase2" } | |
| ) | |
| sh "echo 42 > data" | |
| stash includes: '*', name: 'binary' | |
Tested with Apache Spark 2.1.0, Python 2.7.13 and Java 1.8.0_112
For older versions of Spark and ipython, please, see also previous version of text.
| #include <curl/curl.h> | |
| #include <string> | |
| size_t writeFunction(void *ptr, size_t size, size_t nmemb, std::string* data) { | |
| data->append((char*) ptr, size * nmemb); | |
| return size * nmemb; | |
| } | |
| int main(int argc, char** argv) { | |
| auto curl = curl_easy_init(); |
| #!/usr/bin/env python | |
| # This example shows how to use multiprocessing with an initializer function. | |
| # We take advantage of that to make the workers each have a custom initial | |
| # load. And in particular example, we will make the workers sleep. | |
| # Because we make them sleep different amounts, one of them is going to be | |
| # ready much before the others, and thus we can guess easily which worker | |
| # will do most of the work. | |
| # | |
| # see https://stackoverflow.com/questions/26693797/python-multiprocessing-process-number |
| #!/usr/bin/env python | |
| # -*- coding: utf-8 -*- | |
| # | |
| # @file gevent_zmq_redis_demo.py | |
| # @author kaka_ace <[email protected]> | |
| # @date Tue Oct 14 2014 | |
| """ | |
| fetures: there are two concurrent tasks | |
| 1. From Redis server, Getting the notify msg with BLPOP operation, |