Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
Contra committed Jun 29, 2016
0 parents commit 6b643d5
Show file tree
Hide file tree
Showing 15 changed files with 611 additions and 0 deletions.
7 changes: 7 additions & 0 deletions .babelrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"presets": ["es2015-loose", "stage-0"],
"plugins": [
"transform-runtime",
"add-module-exports"
]
}
24 changes: 24 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
.DS_Store
build
lib-cov
*.seed
*.log
*.csv
*.dat
*.out
*.pid
*.gz
_book

pids
logs
results

npm-debug.log
node_modules
*.sublime*
*.node
coverage
*.orig
.idea
sandbox
11 changes: 11 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
language: node_js
node_js:
- "4"
env:
- CXX=g++-4.8
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.8
20 changes: 20 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
Copyright (c) 2016 Contra <[email protected]>

Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:

The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
47 changes: 47 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
<p align='center'>
<img src='https://i.imgur.com/JXSMT0k.png' width='400'/>
</p>

# census-boundaries [![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url]

Downloads and converts US Census TIGER data representing all boundaries in the United States.

You define the processing logic, so you can put it into any DB you like.

By default, this imports the boundaries of every state and incorporated place (~30K boundaries). Takes quite a bit of time depending on your internet speed.

## Install

```
npm install census-boundaries
```

## Example

```js
import census from 'census-boundaries'

// specify your own options if you want
const overrides = {
objects: [
'STATE',
'PLACE'
]
}

census(overriders, {
// this function is called every time a record is parsed
onBoundary: (objectType, doc, cb) => {
cb() // make sure to call the cb
},

// this function is called when all records are parsed and processed
onFinish: (err) => {

}
})
```

[downloads-image]: http://img.shields.io/npm/dm/census-boundaries.svg
[npm-url]: https://npmjs.org/package/census-boundaries
[npm-image]: http://img.shields.io/npm/v/census-boundaries.svg
23 changes: 23 additions & 0 deletions book.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"gitbook": "2.4.3",
"structure": {
"summary": "docs/Introduction.md"
},
"plugins": [
"edit-link",
"prism",
"-highlight",
"github",
"anchors",
"collapsible-menu"
],
"pluginsConfig": {
"edit-link": {
"base": "https://github.com/contra/rethinkdb-boundaries/tree/master",
"label": "Edit This Page"
},
"github": {
"url": "https://github.com/contra/rethinkdb-boundaries/"
}
}
}
12 changes: 12 additions & 0 deletions dist/defaultConfig.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
'use strict';

exports.__esModule = true;
exports.default = {
ftp: {
host: 'ftp2.census.gov',
port: 21
},
base: '/geo/tiger/TIGER2015/',
objects: ['STATE', 'PLACE']
};
module.exports = exports['default'];
35 changes: 35 additions & 0 deletions dist/getFTP.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
'use strict';

exports.__esModule = true;

var _ftp = require('ftp');

var _ftp2 = _interopRequireDefault(_ftp);

var _once = require('once');

var _once2 = _interopRequireDefault(_once);

function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }

var makeConnection = function makeConnection(opt, cb) {
cb = (0, _once2.default)(cb);
var client = new _ftp2.default();
var retry = setTimeout(function () {
console.log('Trying FTP again...');
client.end();
makeConnection(opt, cb);
}, 2000);

client.once('ready', function () {
client.removeListener('error', cb);
clearTimeout(retry);
cb(null, client);
});
client.once('error', cb);

client.connect(opt);
};

exports.default = makeConnection;
module.exports = exports['default'];
130 changes: 130 additions & 0 deletions dist/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
'use strict';

exports.__esModule = true;

var _async = require('async');

var _async2 = _interopRequireDefault(_async);

var _buffer = require('buffer');

var _path = require('path');

var _path2 = _interopRequireDefault(_path);

var _chalk = require('chalk');

var _chalk2 = _interopRequireDefault(_chalk);

var _shp2json = require('shp2json');

var _shp2json2 = _interopRequireDefault(_shp2json);

var _plural = require('plural');

var _plural2 = _interopRequireDefault(_plural);

var _lodash = require('lodash.defaultsdeep');

var _lodash2 = _interopRequireDefault(_lodash);

var _once = require('once');

var _once2 = _interopRequireDefault(_once);

var _debug2 = require('debug');

var _debug3 = _interopRequireDefault(_debug2);

var _defaultConfig = require('./defaultConfig');

var _defaultConfig2 = _interopRequireDefault(_defaultConfig);

var _getFTP = require('./getFTP');

var _getFTP2 = _interopRequireDefault(_getFTP);

function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }

var debug = (0, _debug3.default)('census'); /*eslint no-console: 0 */

exports.default = function (overrides, _ref) {
var onBoundary = _ref.onBoundary;
var onFinish = _ref.onFinish;

if (!onBoundary) throw new Error('Missing onBoundary!');
if (!onFinish) throw new Error('Missing onFinish!');
onFinish = (0, _once2.default)(onFinish);
var options = (0, _lodash2.default)({}, overrides, _defaultConfig2.default);

debug(_chalk2.default.bold('Establishing connection:'));
debug(' -- ' + _chalk2.default.cyan('US Census Bureau @ ' + options.ftp.host));

(0, _getFTP2.default)(options, function (err, ftp) {
if (err) return onFinish(err);
var context = {
ftp: ftp,
options: options,
onBoundary: onBoundary
};

_async2.default.forEachSeries(options.objects, processObject.bind(null, context), onFinish);
});
};

function processObject(context, object, cb) {
cb = (0, _once2.default)(cb);
fetchObjectFiles(context, object, function (err, filePaths) {
if (err) return cb(err);
debug(_chalk2.default.bold('Processing ' + filePaths.length + ' boundary ' + (0, _plural2.default)('file', filePaths.length) + ' for ' + object));
_async2.default.forEachSeries(filePaths, processFilePath.bind(null, context), cb);
});
}

function processFilePath(context, file, cb) {
cb = (0, _once2.default)(cb);
var ftp = context.ftp;

ftp.get(file.path, function (err, stream) {
if (err) return cb(err);

var srcStream = (0, _shp2json2.default)(stream);
var chunks = [];

srcStream.on('data', function (data) {
chunks.push(data);
});

srcStream.once('error', function (err) {
return cb(err);
});
srcStream.once('end', function () {
var docs = JSON.parse(_buffer.Buffer.concat(chunks)).features;
debug(' -- ' + _chalk2.default.cyan('Parsed ' + file.path + ', inserting ' + docs.length + ' boundaries now...'));
_async2.default.forEachSeries(docs, context.onBoundary.bind(null, file.type), cb);
});

stream.resume();
});
}

function fetchObjectFiles(_ref2, object, cb) {
var ftp = _ref2.ftp;
var options = _ref2.options;

cb = (0, _once2.default)(cb);
var folderName = _path2.default.join(options.base, object);
ftp.list(folderName, function (err, list) {
if (err) return cb(err);
var newList = list.filter(function (i) {
return i.type === '-';
}).map(function (i) {
return {
type: object,
path: _path2.default.join(folderName, i.name)
};
});
cb(null, newList);
});
}
module.exports = exports['default'];
55 changes: 55 additions & 0 deletions dist/saveBoundary.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
'use strict';

exports.__esModule = true;

var _once = require('once');

var _once2 = _interopRequireDefault(_once);

function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }

function saveBoundary(_ref, json, cb) {
var rethink = _ref.rethink;
var options = _ref.options;

cb = (0, _once2.default)(cb);
var polygons = getPolygons(json.geometry);
var data = {
id: json.properties.GEOID,
type: inferType(json),
name: json.properties.GEOID10 || json.properties.NAME,
geo: polygons.map(function (p) {
return rethink.r.geojson(p);
})
};

rethink.Boundary.insert(data, { conflict: 'replace' }).run().catch(function (err) {
return cb(err);
}).then(function () {
return cb();
});
}

function getPolygons(geometry) {
if (geometry.type === 'MultiPolygon') {
return geometry.coordinates.map(function (coords) {
return {
type: 'Polygon',
coordinates: coords,
properties: geometry.properties
};
});
}

return [geometry];
}

function inferType(feature) {
if (feature.properties.STATENS) return 'state';
if (feature.properties.GEOID10) return 'zip';
if (feature.properties.PLACENS) return 'place';
throw new Error('Unknown feature type for ' + feature.properties.NAME);
}

exports.default = saveBoundary;
module.exports = exports['default'];
Loading

0 comments on commit 6b643d5

Please sign in to comment.