Skip to content

Commit

Permalink
decaffeinate: Convert Gulpfile.coffee and 29 other files to JS
Browse files Browse the repository at this point in the history
  • Loading branch information
austinhallock committed Jun 11, 2020
1 parent 3955e8f commit 66c6002
Show file tree
Hide file tree
Showing 30 changed files with 2,418 additions and 2,094 deletions.
53 changes: 29 additions & 24 deletions Gulpfile.js
Original file line number Diff line number Diff line change
@@ -1,33 +1,38 @@
gulp = require 'gulp'
shell = require 'gulp-shell'
coffeelint = require 'gulp-coffeelint'
spawn = require('child_process').spawn
import gulp from 'gulp';
import shell from 'gulp-shell';
import coffeelint from 'gulp-coffeelint';
import { spawn } from 'child_process';

paths =
serverBin: './bin/server.coffee'
const paths = {
serverBin: './bin/server.coffee',
coffee: [
'./**/*.coffee'
'./**/*.coffee',
'!./node_modules/**/*'
]
};

gulp.task 'default', 'dev'
gulp.task('default', 'dev');

gulp.task 'dev', 'watch:dev'
gulp.task('dev', 'watch:dev');

gulp.task 'watch:dev', gup.series('dev:server', ->
gulp.watch paths.coffee, ['dev:server'])
gulp.task('watch:dev', gup.series('dev:server', () => gulp.watch(paths.coffee, ['dev:server']))
);

gulp.task 'dev:server', do ->
devServer = null
process.on 'exit', -> devServer?.kill()
->
devServer?.kill()
devServer = spawn 'coffee', [paths.serverBin], {stdio: 'inherit'}
devServer.on 'close', (code) ->
if code is 8
gulp.log 'Error detected, waiting for changes'
gulp.task('dev:server', (function() {
let devServer = null;
process.on('exit', () => devServer?.kill());
return function() {
devServer?.kill();
devServer = spawn('coffee', [paths.serverBin], {stdio: 'inherit'});
return devServer.on('close', function(code) {
if (code === 8) {
return gulp.log('Error detected, waiting for changes');
}
});
};
})()
);

gulp.task 'lint', ->
gulp.src paths.coffee
.pipe coffeelint()
.pipe coffeelint.reporter()
gulp.task('lint', () => gulp.src(paths.coffee)
.pipe(coffeelint())
.pipe(coffeelint.reporter()));
56 changes: 26 additions & 30 deletions bin/server.js
Original file line number Diff line number Diff line change
@@ -1,32 +1,28 @@
#!/usr/bin/env coffee
_ = require 'lodash'
log = require 'loga'
cluster = require 'cluster'
os = require 'os'
#!/usr/bin/env nodeimport _ from 'lodash';
import log from 'loga';
import cluster from 'cluster';
import os from 'os';
import { setup, childSetup, server } from '../';
import config from '../config';

{setup, childSetup, server} = require '../'
config = require '../config'
if (config.ENV === config.ENVS.PROD) {
const cpus = config.MAX_CPU || os.cpus().length;
if (cluster.isMaster) {
setup().then(function() {
console.log('setup done', cpus);
_.map(_.range(cpus), function() {
console.log('forking...');
return cluster.fork();
});

if config.ENV is config.ENVS.PROD
cpus = config.MAX_CPU or os.cpus().length
if cluster.isMaster
setup().then ->
console.log 'setup done', cpus
_.map _.range(cpus), ->
console.log 'forking...'
cluster.fork()

cluster.on 'exit', (worker) ->
log "Worker #{worker.id} died, respawning"
cluster.fork()
.catch log.error
else
childSetup().then ->
server.listen config.PORT, ->
log.info 'Worker %d, listening on %d', cluster.worker.id, config.PORT
else
console.log 'Setting up'
setup().then ->
server.listen config.PORT, ->
log.info 'Server listening on port %d', config.PORT
.catch log.error
return cluster.on('exit', function(worker) {
log(`Worker ${worker.id} died, respawning`);
return cluster.fork();
});}).catch(log.error);
} else {
childSetup().then(() => server.listen(config.PORT, () => log.info('Worker %d, listening on %d', cluster.worker.id, config.PORT)));
}
} else {
console.log('Setting up');
setup().then(() => server.listen(config.PORT, () => log.info('Server listening on port %d', config.PORT))).catch(log.error);
}
69 changes: 38 additions & 31 deletions config.js
Original file line number Diff line number Diff line change
@@ -1,42 +1,49 @@
_ = require 'lodash'
assertNoneMissing = require 'assert-none-missing'
import _ from 'lodash';
import assertNoneMissing from 'assert-none-missing';

env = process.env
const {
env
} = process;

config =
CURRENT_IMPORT_VERSION: 20 # increment any time you want to repull all data
const config = {
CURRENT_IMPORT_VERSION: 20, // increment any time you want to repull all data
VALID_RETURN_VERSIONS: [
# https://github.com/techbyorg/990-xml-reader/blob/master/irs_reader/settings.py#L36
// https://github.com/techbyorg/990-xml-reader/blob/master/irs_reader/settings.py#L36
'2013v3.0', '2013v3.1', '2013v4.0', '2014v5.0', '2014v6.0',
'2015v2.0', '2015v2.1', '2015v3.0', '2016v3.0', '2016v3.1',
'2017v2.0', '2017v2.1', '2017v2.2', '2017v2.3', '2018v3.0',
'2018v3.1'
]
PORT: env.IRS_990_PORT or 3000
ENV: env.DEBUG_ENV or env.NODE_ENV
MAX_CPU: env.IRS_990_API_MAX_CPU or 1
IRSX_CACHE_DIRECTORY: '/tmp'
IRSX_XML_HTTP_BASE: env.IRSX_XML_HTTP_BASE or 'https://s3.amazonaws.com/irs-form-990'
NTEE_CSV: 'https://nccs-data.urban.org/data/bmf/2019/bmf.bm1908.csv'
REDIS:
PREFIX: 'irs_990_api'
PUB_SUB_PREFIX: 'irs_990_api_pub_sub'
PORT: 6379
CACHE_HOST: env.REDIS_CACHE_HOST or 'localhost'
PUB_SUB_HOST: env.REDIS_PUB_SUB_HOST or 'localhost'
SCYLLA:
KEYSPACE: 'irs_990_api'
PORT: 9042
CONTACT_POINTS: (env.SCYLLA_CONTACT_POINTS or 'localhost').split(',')
ELASTICSEARCH:
PORT: 9200
HOST: env.ELASTICSEARCH_HOST or 'localhost'
ENVS:
DEV: 'development'
PROD: 'production'
],
PORT: env.IRS_990_PORT || 3000,
ENV: env.DEBUG_ENV || env.NODE_ENV,
MAX_CPU: env.IRS_990_API_MAX_CPU || 1,
IRSX_CACHE_DIRECTORY: '/tmp',
IRSX_XML_HTTP_BASE: env.IRSX_XML_HTTP_BASE || 'https://s3.amazonaws.com/irs-form-990',
NTEE_CSV: 'https://nccs-data.urban.org/data/bmf/2019/bmf.bm1908.csv',
REDIS: {
PREFIX: 'irs_990_api',
PUB_SUB_PREFIX: 'irs_990_api_pub_sub',
PORT: 6379,
CACHE_HOST: env.REDIS_CACHE_HOST || 'localhost',
PUB_SUB_HOST: env.REDIS_PUB_SUB_HOST || 'localhost'
},
SCYLLA: {
KEYSPACE: 'irs_990_api',
PORT: 9042,
CONTACT_POINTS: (env.SCYLLA_CONTACT_POINTS || 'localhost').split(',')
},
ELASTICSEARCH: {
PORT: 9200,
HOST: env.ELASTICSEARCH_HOST || 'localhost'
},
ENVS: {
DEV: 'development',
PROD: 'production',
TEST: 'test'
},
SHARED_WITH_PHIL_HELPERS: ['REDIS', 'SCYLLA', 'ELASTICSEARCH', 'ENVS', 'ENV']
};

assertNoneMissing config
assertNoneMissing(config);

module.exports = config
export default config;
46 changes: 25 additions & 21 deletions graphql/directives.js
Original file line number Diff line number Diff line change
@@ -1,23 +1,27 @@
router = require 'exoid-router'
{Format} = require 'backend-shared'
let NameCase, SentenceCase;
import router from 'exoid-router';
import { Format } from 'backend-shared';
import { defaultFieldResolver } from 'graphql';
import { SchemaDirectiveVisitor } from 'graphql-tools';

{defaultFieldResolver} = require 'graphql'
{SchemaDirectiveVisitor} = require 'graphql-tools'
export let nameCase = NameCase = class NameCase extends SchemaDirectiveVisitor {
visitFieldDefinition(field) {
const {resolve = defaultFieldResolver} = field;
field.resolve = function(...args) {
const str = resolve.apply(this, args);
return Format.nameCase(str);
};

}
};

module.exports = {
nameCase: class NameCase extends SchemaDirectiveVisitor
visitFieldDefinition: (field) ->
{resolve = defaultFieldResolver} = field
field.resolve = (args...) ->
str = resolve.apply this, args
Format.nameCase str
return # req'd bc of coffeescript

sentenceCase: class SentenceCase extends SchemaDirectiveVisitor
visitFieldDefinition: (field) ->
{resolve = defaultFieldResolver} = field
field.resolve = (...args) ->
str = resolve.apply this, args
Format.sentenceCase str
return # req'd bc of coffeescript
}
export let sentenceCase = SentenceCase = class SentenceCase extends SchemaDirectiveVisitor {
visitFieldDefinition(field) {
const {resolve = defaultFieldResolver} = field;
field.resolve = function(...args) {
const str = resolve.apply(this, args);
return Format.sentenceCase(str);
};

}
};
Loading

0 comments on commit 66c6002

Please sign in to comment.