This is a Node.js CLI utility to convert a file containing one JSON object per line into a CSV file.
The CSV file is written to stdout so that the output can be redirected or piped anywhere you want.
node json2csv.js records.txt > records.csv
When restoring a PostgreSQL database from a dump, the process is to simply feed PostgreSQL the dump file, whose SQL statements it executes.
CockroachDB seems to take a different approach in order to restore the database more efficiently. Unfortunately, it lacks support for the types of things that often occur in a PostgreSQL dump, such as sequences and computed indexes. Fortunately, these things can usually be removed from the database dump before importing, then executed as DDL after importing.
Also, it's not easy for a new CockroachDB user to follow the import process as documented, so I wanted to make it easy.
SEQUENCEs, INDEXes and VIEWs into a separate SQL file. You'll need this later.client_min_messages is set to debug5 in the PostgreSQL dump file.| import {fromBinder} from 'baconjs' | |
| /** | |
| * Create a Bacon.js EventStream that fetches a resource specified by the fetch | |
| * parameters. | |
| * | |
| * If the EventStream loses all subscriptions while the underlying request is pending, | |
| * the request will be aborted using <code>AbortController</code>. NOTE: If | |
| * <code>init.signal</code> is provided, it will be ignored in favour of an internally | |
| * provided signal. |
| <!doctype html> | |
| <html> | |
| <head> | |
| <style> | |
| html, body { | |
| font-family: -apple-system, BlinkMacSystemFont, 'Helvetica Neue', Roboto, 'Segoe UI', sans-serif; | |
| } | |
| .color-grid { | |
| display: grid; |
| #!/usr/bin/env node | |
| const fs = require('fs'); | |
| const util = require('util'); | |
| const readline = require('readline'); | |
| const crypto = require('crypto'); | |
| const readFile = util.promisify(fs.readFile); | |
| const rl = readline.createInterface({ |
| # RancherOS cloud-config.yml options I find useful. | |
| # Enable 4GB swap | |
| runcmd: | |
| - sudo dd if=/dev/zero of=/swapfile bs=4K count=1M | |
| - sudo chmod 600 /swapfile | |
| - sudo mkswap /swapfile | |
| - sudo swapon /swapfile | |
| mounts: | |
| - [ /swapfile, none, swap, sw, 0, 0 ] |
| #!/usr/bin/env bash | |
| export REGISTRY_DOMAIN=docker.example.com | |
| export [email protected] | |
| export REGISTRY_USER=docker |
| <!DOCTYPE html> | |
| <html> | |
| <head> | |
| <meta charset="utf-8"> | |
| <meta http-equiv="content-type" content="text/html; charset=UTF-8"> | |
| <meta name="viewport" content="width=device-width, initial-scale=1"> | |
| <title>Lua editor with Syntax highlighting</title> | |
| <script src="https://cdnjs.cloudflare.com/ajax/libs/babel-standalone/6.26.0/babel.min.js"></script> | |
| <script src="https://cdnjs.cloudflare.com/ajax/libs/react/16.2.0/umd/react.production.min.js"></script> | |
| <script src="https://cdnjs.cloudflare.com/ajax/libs/react-dom/16.2.0/umd/react-dom.production.min.js"></script> |
| function *sequence(...args) { | |
| const start = args.length < 2 ? 0 : args[0]; | |
| const end = args[args.length < 2 ? 0 : 1] || 0; | |
| const step = args.length < 3 ? 1 : args[2]; | |
| for (let i = start; i < end; i += step) { | |
| yield i; | |
| } | |
| } |
| function *parseBif(buffer) { | |
| const data = new DataView(buffer); | |
| // Ensure this is a BIF v0. | |
| if (data.getUint32(0, true) !== 0x46494289 || data.getUint32(4, true) !== 0x0a1a0a0d || data.getUint32(8, true) !== 0) { | |
| return; | |
| } | |
| const separation = data.getUint32(16, true) || 1000; | |
| const start = 64; |