From 28a74b05f232a60102390cef5848c3992dcfa165 Mon Sep 17 00:00:00 2001 From: maxgrossman Date: Tue, 19 Sep 2017 05:34:25 -0400 Subject: [PATCH] include fill holes and run pipeline pipeline is now running with the fill holes step. --- .DS_Store | Bin 10244 -> 0 bytes db/local/index.js | 2 +- docker/clean-geometries/Dockerfile | 2 +- docker/clean-geometries/clean-geom.py | 5 +- docker/clean-geometries/{run.sh => main.sh} | 2 +- docker/delete-holes/Dockerfile | 5 +- docker/delete-holes/main.sh | 6 ++ docker/delete-holes/run.sh | 7 -- js/insert-into-table.js | 97 ++++++++++++++++++ js/simplify-props.js | 106 ++++++++++++++++++++ package.json | 1 + processing/b-dissolve.sh | 8 +- processing/c-delete-holes.sh | 5 +- processing/d-update-geojson-spec.sh | 2 +- processing/e-simplify-props.js | 81 --------------- processing/e-simplify-props.sh | 9 ++ processing/f-insert-into-table.sh | 9 ++ processing/f-insert-tables.js | 85 ---------------- qgis-processing/a-clean-geom.sh | 16 +++ yarn.lock | 78 ++++++++++++-- 20 files changed, 330 insertions(+), 196 deletions(-) delete mode 100644 .DS_Store rename docker/clean-geometries/{run.sh => main.sh} (70%) create mode 100644 docker/delete-holes/main.sh delete mode 100644 docker/delete-holes/run.sh create mode 100755 js/insert-into-table.js create mode 100755 js/simplify-props.js delete mode 100755 processing/e-simplify-props.js create mode 100755 processing/e-simplify-props.sh create mode 100755 processing/f-insert-into-table.sh delete mode 100755 processing/f-insert-tables.js create mode 100644 qgis-processing/a-clean-geom.sh diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index a0a49ecae3d37fdee799408ab86d301932bb6b63..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10244 zcmeHMYiu0V6+YkENoFULcpQ>=?M>`WjPt@tY{xh;d9Z#Y*s-Ie*ul<&IQtlq5$heZ zvulSKBUkjPLMk3@LxDd)RnepcRg|_AsZxP9rBy4$wgsf3s;$(jfBO7`P#@>sJG-nm z4uT4`X*IK&Idjjs=g!`(D2 zcOss0;#|ITS#4k8N5GGO9|1oCegymoTn!Om&K8HZSV{ltN5GGOAAw>7#P|^5Q^=?% zXO)yg2Vdk8fMq4C<-zx+xM3ObWYm+hO0tCyC|8!s72Xm9%ANHQuU^!Xvq~y=g17j9 z_hfi06v(G@`w>~45LME@`VsIWaCrp8+P#vJ6r(J~O4sl4iInYTTUvgBKt<((g^RRc zPzx^6ZXG)9ObumY*__)Q%N&y>J88we`(ejP74Kv5TWo7;%v|2%q_Z(QZDmTTA0M&P zNoT6t$)%I7Z0ArcdzyXagT~C7>FM^?=16DTOmk$qwXJJq$RfvU>7_;6|el5Qdu=Qk9Ui=9rnlRawFBJfn9({C0#tuHo~g!=mX zBF4a&xondAb8sSSJ8Ab8E90^pGpk&VNPogfAIez5K9X=!x$(3+W>zO+sYEUn%UWHj z)R2AFVqKM+$T+FgA;+~vGe<_V)|srZU1l0R>N=Thk8EtrG;^+Xv&C)MlU6Uc=#JX_ zNa1KAcwuSHsx?iUwzhZf>B(1_)ys_K#>$aN+qL5<>+qDF%$`0Ha}!oNX{S#e<+#x3a;sttuTvr!w(#qB$CcJ@xyR(&WEG{2!8Px50AI7#7|RQ2ma z*J|1^q3_99d{t^=eQ0CQjA_FzznyBL+1L^^-Qe);{LPhXtFcWplflEOSlmh}Rfn-d zGY5hrlPa=GxzpIKRiimLnsCO)`Mq7Gj)uB5^OAN#JbN(aW)DtS>3831pQarVmab&d zN@|S-NHj(_sM}y;w8c|uw3^zemj>x1WoVl2qw{ouo~7sLoAe_6nDX={{g(bnf1i6I#Zq#g@m{7kaT5`*08+!4QUVn2~uBF~o7ZM5lu@IE%Y5gS+ubd>Wr&gx<#pJ{Jo zE!W?0V{d*5r>4G4UtUO&^3<81PRx}xB8S4|wF|8jDTe7r%bQE9meeEYsl zqlN&Xj&_B^Wy6mg4@B?w@>`XVjHVUYMGiP~D5jRS@B*rjjx*RN=pMS4&e3P-5qgp? z(%0z)`T_r4@*^gjSD9>nMSq~b&^z>Z`Um|J8dji2Qq5Xy!B(`O6ML9yy3maQ9Ac^w zDSayvuy7krG1Z7-O%``~Ex7eZvrVl~hvSxZypCq!d!`$sKc0nODHpR~L|6j&MaV za?WBdQne#>YXy&(nkqKct=AabWsGy0OpMG0%G$1Xh?$dFsjQv) zPBBL^7v^hgLMwOg(RGhKRPClZ=8+GM#K(Ec^E`c@G59L|oZh0}@O0;G0EU763)oW<7q5iCGBTO#=9l~k_z`#?5vWjgB4Yo4@0I`me;+ORRs0C}5%^z? z04jTjdb|1U3)>Cl9T9u&5k61yDGpI@R!Ql?7r7nJQf|j{L2Sp%-4@mr_F9Brl=VW+ fD#>?z{Kx+cD4( new RegExp(process.argv[3]).test(admin)); +// base name mirrors admin name +var basename = admin.split('-')[1] +// here's the path to the current admin file +var adminFile = path.join(adminInPath, admin) +// stream of this admin file +var adminFileStream = createReadStream(adminFile) +// pipe split for the lines needed to send along to the geojson parser +.pipe(split()) +// the geojson parser for parsing the feature collection +.pipe(parser) +.on('data', (feature) => { +// for each feature, insert it into the table using the insertIntoTable function + if (feature.properties) { + console.log(feature.properties); + insertIntoTable(feature, basename, st, db) + } +}) +.on('end', () => { + db.destroy(); +}) + +/** + * transforms feature into postgis table row and inserts it into the proper admin table + * + * @param {object} feature geojson feature + * @param {string} admin admin name + * @param {object} st spatial type object (generated by knex postgis extension) that allows for making st/postgis statements + * @param {object} db kenx object for connecting to the database + * + */ +function insertIntoTable (feature, admin, st, db) { + // generate properties and geometry objects from feature object + const properties = feature.properties; + const geometry = feature.geometry; + if (admin === 'communes') { + admin = 'commune'; + } + if (!properties.en_name) { + properties.en_name = '...' + } + return db.transaction((t) => { + return db('admin_boundaries') + .transacting(t) + .insert({ + // shared identifier for each row in admin table + type: admin, + // numeric id for current admin unit + id: properties.id, + // numeric id for currrent admin unit's parent (for instance a commune's parent district) + // this is helpful for future spatial analysis + parent_id: properties.p_id, + // admin unit geometry + geom: st.geomFromGeoJSON(geometry), + // english name of admin unit + name_en: properties.en_name, + // vietnamese name of admin unit + name_vn: '' + }) + .then(t.commit) + .catch((e) => { + t.rollback(); + throw e; + }) + }); +} diff --git a/js/simplify-props.js b/js/simplify-props.js new file mode 100755 index 0000000..c020323 --- /dev/null +++ b/js/simplify-props.js @@ -0,0 +1,106 @@ +/** + * @file reads streaming admin geojson and reduces properties to match the schema of the table to which it is going to be written + */ + +// these modules are needed for streaming geojsons +var createReadStream = require('fs').createReadStream; +var createWriteStream = require('fs').createWriteStream; +var readdirSync = require('fs').readdirSync; +var geojsonStream = require('geojson-stream'); +var parser = geojsonStream.parse(); +var stringifier = geojsonStream.stringify(); +// module to read path +var path = require('path'); +// parallel allows for reading each admin geojson stream asynchronously +var parallel = require('async').parallel; +// since the output of `c-update-geojson-spec.sh` writes geojsons to a single line, the stream needs to be broken up into lines, otherwise it will not work +// split is a module that does just this. +var split = require('split'); +// directory for geojson input and output +var baseDir = process.argv[2]; +var adminInPath = `${baseDir}/tmp`; +var adminOutPath = `${baseDir}/output` +// read in files as a list usable in the parallel function +var admin = readdirSync(adminInPath).find((admin) => new RegExp(process.argv[3]).test(admin)); +writeSimplifiedProps(admin); + +/** + * simplifies input properties to spec needed to make admin postgis tables + * + * @param {object} properties original properties from streaming geojson + * @param {string} admin admin unit name, like 'commune', 'district,' + * @return {object} newProperties simplified properties generated from properties + */ +function makeNewProperties (properties, admin) { + const newProperties = {}; + if (new RegExp(/commune/).test(admin)) { + newProperties.en_name = properties.EN_name + newProperties.id = properties.COMCODE02; + newProperties.p_id = properties.DISTCODE02 + } else if (new RegExp(/district/).test(admin)) { + newProperties.en_name = properties.D_EName + newProperties.id = properties.DISTCODE02 + newProperties.p_id = properties.PROCODE02 + } else if (new RegExp(/province/).test(admin)) { + newProperties.en_name = properties.P_EName + newProperties.id = properties.PROCODE02 + } + newProperties.en_name = cleanName(newProperties.en_name, admin); + return newProperties; +} + +/** + * reads in raw geojson and writes out simplified geojson for provided admin level + * @param {string} admin string representation of admin type + * + */ +function writeSimplifiedProps(adminPath) { + // the basename, really the admin level name, of the current admin + var basename = admin.split('-')[1]; + // the relative path to the current admin file + var adminInFile = path.join(adminInPath, admin) + // a read stream of admin file + createReadStream(adminInFile) + // piping split makes the new lines mentioned to be neccessary above + .pipe(split()) + // parser is a transform stream that parses geojson feature collections (the form of the input geojson) + .pipe(parser) + .on('data', (feature) => { + // make and pass feature's properties to the make makeNewProperties function that correctly transforms + // the properties to uniform spec needed to insert into the postgis tables + const properties = feature.properties; + // reset the feature properties as the returj from makeNewProperties + feature.properties = makeNewProperties(properties, basename) + }) + // stringify the geojson to send to createWriteStream, then write it to fiel + .pipe(stringifier) + .pipe(createWriteStream(`${adminOutPath}/vietnam-${basename}-simplified.geojson`)) +} + +/** + * returns cleaned version of place name + * @param {string} name admin unit name + * @return {string} cleaned admin unit name + */ +function cleanName(name, admin) { + let cleanName = name; + if (name) { + if (new RegExp(/X. /).test(name)) { + cleanName = name.replace('X. ',''); + } else if (new RegExp(/P. /).test(name)) { + cleanName = name.replace('P. ', '') + } else if (new RegExp(/Tt. /).test(name)) { + cleanName = name.replace('Tt. ', '') + } else if (new RegExp(/P. /).test(name)) { + cleanName = name.replace('P. ', '') + } else if (new RegExp(/ D./).test(name)){ + cleanName = name.replace(' D.', '') + } else if (new RegExp(/\\?/).test(name)) { + cleanName = name.replace('?', 'ỉ') + } + if (Boolean(Number(cleanName))) { + cleanName = `${admin} ${cleanName}` + } + } + return cleanName +} diff --git a/package.json b/package.json index 68d6e6c..bde478a 100644 --- a/package.json +++ b/package.json @@ -16,6 +16,7 @@ "iconv-lite": "^0.4.19", "knex": "^0.13.0", "knex-postgis": "^0.2.2", + "pg": "^7.3.0", "split": "^1.0.1" } } diff --git a/processing/b-dissolve.sh b/processing/b-dissolve.sh index 8bdfd3f..cf10bc1 100755 --- a/processing/b-dissolve.sh +++ b/processing/b-dissolve.sh @@ -7,7 +7,7 @@ INPUT=${1}/tmp/${INPUT_NAME}.shp # for both the district and province, create a new geojson that dissolves features on the unique field id supplied # on the right hand side of the semi-colon -for ADMIN in 'district;DISTCODE02' 'province;PROCODE02' +for ADMIN in 'communes;COMCODE02' 'district;DISTCODE02' 'province;PROCODE02' do # split ${ADMIN} string on the semi-colon to grab the admin name and field id ADMIN_ARRAY=(${ADMIN//;/ }) @@ -24,8 +24,4 @@ do # http://spatialreference.org/ref/epsg/wgs-84/ ogr2ogr -t_srs EPSG:4326 -f 'GeoJSON' "${OUTPUT}" "${INPUT}" -dialect sqlite -sql $'SELECT ST_Union(geometry), * FROM "'"$INPUT_NAME"$'" GROUP BY '"$DISSOLVE_FIELD" done -# name of geojson output file -OUT_GJSN=${1}/output/${INPUT_NAME}.geojson -# since communes don't need to be dissolved, do a simple shp->geojson conversion -# make sure also to reproject -ogr2ogr -t_srs EPSG:4326 -f 'GeoJSON' "${OUT_GJSN}" "${INPUT}" +# ç diff --git a/processing/c-delete-holes.sh b/processing/c-delete-holes.sh index 666ec89..860d914 100755 --- a/processing/c-delete-holes.sh +++ b/processing/c-delete-holes.sh @@ -9,8 +9,9 @@ docker run -it qgis_headless # get admin areas from container add copy them over to the output folder of the process # using the docker cp command, copying from the most recently built container to the output folder # `docker ps --latest -q` grabs the most recent container -docker cp `docker ps --latest -q`:workspace/vietnam-communes-filled-holes.geojson ${1}/output/vietnam-communes-filled-holes.geojson -docker cp `docker ps --latest -q`:workspace/vietnam-district-filled-holes.geojson ${1}/output/vietnam-district-filled-holes.geojson docker cp `docker ps --latest -q`:workspace/vietnam-province-filled-holes.geojson ${1}/output/vietnam-province-filled-holes.geojson +docker cp `docker ps --latest -q`:workspace/vietnam-district-filled-holes.geojson ${1}/output/vietnam-district-filled-holes.geojson +# communes do not need filled holes. so they are just copied directly from the tmp to output folderope +cp ${1}/tmp/vietnam-communes.geojson ${1}/output/vietnam-communes-filled-holes.geojson # clean up the docker directory rm -f ./docker/delete-holes/vietnam* diff --git a/processing/d-update-geojson-spec.sh b/processing/d-update-geojson-spec.sh index 577667c..6342f2f 100755 --- a/processing/d-update-geojson-spec.sh +++ b/processing/d-update-geojson-spec.sh @@ -6,7 +6,7 @@ for ADMIN in communes district province do # generate unique input and output files as it has been done in previous examples - INPUT_FILE=${1}/tmp/vietnam-${ADMIN}-wgs84.geojson + INPUT_FILE=${1}/tmp/vietnam-${ADMIN}-filled-holes.geojson OUTPUT_FILE=${1}/output/vietnam-${ADMIN}-cleaned.geojson # remove crs object to match current GeoJSON spec using sed. # the below command was found in following place diff --git a/processing/e-simplify-props.js b/processing/e-simplify-props.js deleted file mode 100755 index 34ce5be..0000000 --- a/processing/e-simplify-props.js +++ /dev/null @@ -1,81 +0,0 @@ -/** - * @file reads streaming admin geojson and reduces properties to match the schema of the table to which it is going to be written - */ - -// these modules are needed for streaming geojsons -var createReadStream = require('fs').createReadStream; -var createWriteStream = require('fs').createWriteStream; -var readdirSync = require('fs').readdirSync; -var geojsonStream = require('geojson-stream'); -var parser = geojsonStream.parse(); -var stringifier = geojsonStream.stringify(); -// module to read path -var path = require('path'); -// parallel allows for reading each admin geojson stream asynchronously -var parallel = require('async').parallel; -// since the output of `c-update-geojson-spec.sh` writes geojsons to a single line, the stream needs to be broken up into lines, otherwise it will not work -// split is a module that does just this. -var split = require('split'); - -// directory with geojson files -var adminPath = 'data/processing/d-simplify-props/tmp' -// read in files as a list usable in the parallel function -var admins = readdirSync(adminPath) - -// create that list of async functions to pass to parallel -const adminTasks = admins.map((admin) => { - return function(cb) { - // the basename, really the admin level name, of the current admin - var basename = admin.split('-')[1] - // the relative path to the current admin file - var adminFile = path.join(adminPath, admin) - // a read stream of admin file - var adminFileStream = createReadStream(adminFile) - // piping split makes the new lines mentioned to be neccessary above - .pipe(split()) - // parser is a transform stream that parses geojson feature collections (the form of the input geojson) - .pipe(parser) - .on('data', (feature) => { - // make and pass feature's properties to the make makeNewProperties function that correctly transforms - // the properties to uniform spec needed to insert into the postgis tables - const properties = feature.properties; - // reset the feature properties as the returj from makeNewProperties - feature.properties = makeNewProperties(properties, basename) - }) - // stringify the geojson to send to createWriteStream, then write it to fiel - .pipe(stringifier) - .pipe(createWriteStream(`${baseDir}/output/vietnam-${basename}-simplified.geojson`)) - // when createWriteStream is closed, fire a callback. - .on('close', () => { cb(null, null) }) - } -}); - -/** - * simplifies input properties to spec needed to make admin postgis tables - * - * @param {object} properties original properties from streaming geojson - * @param {string} admin admin unit name, like 'commune', 'district,' - * @return {object} newProperties simplified properties generated from properties - */ -function makeNewProperties (properties, admin) { - const newProperties = {}; - if (RegExp(/commune/).test(admin)) { - newProperties.en_name = properties.EN_name - newProperties.id = properties.COMCODE02; - newProperties.p_id = properties.DISTCODE02 - } else if (RegExp(/district/).test(admin)) { - newProperties.en_name = properties.D_EName - newProperties.id = properties.DISTCODE02 - newProperties.p_id = properties.PROCODE02 - } else { - newProperties.en_name = properties.P_EName - newProperties.id = properties.PROCODE02 - } - return newProperties; -} - -// run adminTasks in parallel -parallel(adminTasks, (err, res) => { - // do nothing when the are all finished - if (!err) {} -}); diff --git a/processing/e-simplify-props.sh b/processing/e-simplify-props.sh new file mode 100755 index 0000000..2361c4a --- /dev/null +++ b/processing/e-simplify-props.sh @@ -0,0 +1,9 @@ +# Synopsys: reduces geojson properties to a uniform spec used to insert data into postgis tables + +# javascipt code that does the props simplifying +SIMPLIFY_PROPS=./js/simplify-props.js +# loop over each admin level, simplifying each's props. +for ADMIN in province district commune +do + node ${SIMPLIFY_PROPS} ${1} ${ADMIN} +done \ No newline at end of file diff --git a/processing/f-insert-into-table.sh b/processing/f-insert-into-table.sh new file mode 100755 index 0000000..ae8082d --- /dev/null +++ b/processing/f-insert-into-table.sh @@ -0,0 +1,9 @@ +# Synopsis: Insert each feature as a row into admin_boundaries table + +# javascipt code that does the props simplifying +INSERT_INTO_TABLE=./js/insert-into-table.js +# loop over each admin level, simplifying each's props. +for ADMIN in province district commune +do + node ${INSERT_INTO_TABLE} ${1} ${ADMIN} +done \ No newline at end of file diff --git a/processing/f-insert-tables.js b/processing/f-insert-tables.js deleted file mode 100755 index 854048f..0000000 --- a/processing/f-insert-tables.js +++ /dev/null @@ -1,85 +0,0 @@ -/** - * @file reads streaming admin geojson and 'inserts' each feature 'into' matching admin postgis table - */ - -// // these modules are needed for streaming geojsons -// var createReadStream = require('fs').createReadStream; -// var createWriteStream = require('fs').createWriteStream; -// var readdirSync = require('fs').readdirSync; -// var geojsonStream = require('geojson-stream'); -// var parser = geojsonStream.parse(); -// var stringifier = geojsonStream.stringify(); -// // module to read path -// var path = require('path'); -// // parallel allows for reading each admin geojson stream asynchronously -// var parallel = require('async').parallel; -// // knex creates a knex obj that links to the current environmnets database -// var knex = require('./db/connection/.js') -// // postgis is a knex extension to generate postgis statements -// var postgis = require('knex-postgis'); -// // helps split single-line json into chunked-by-line geojson as mentinoed in d-simplify-props.js -// var split = require('split'); -// // directory with geojsons -// var adminPath = `data/processing/d-simplify-props/tmp`; -// // array including elements with each file in that directory -// var admins = readdirSync(adminPath) -// // st is short for spatial type. spatial type is the prefix for postgis functions that allow for spatial sql statements -// // see https://postgis.net/docs/reference.html -// var st = postgis(knex); -// -// // create list of async functions to pass to parallel -// const adminTasks = admins.map((admin) => { -// return function(cb) { -// // base name mirrors admin name -// var basename = admin.split('-')[1] -// // here's the path to the current admin file -// var adminFile = path.join(adminPath, admin) -// // stream of this admin file -// var adminFileStream = createReadStream(adminFile) -// // pipe split for the lines needed to send along to the geojson parser -// .pipe(split()) -// // the geojson parser for parsing the feature collection -// .pipe(parser) -// .on('data', (feature) => { -// // for each feature, insert it into the table using the insertIntoTable function -// insertIntoTable(feature, basename) -// }) -// // fire a callback on end event -// .on('end', () => { cb(null, null) }) -// } -// }); -// -// /** -// * transforms feature into postgis table row and inserts it into the proper admin table -// * -// * @param {object} feature geojson feature -// * @param {string} admin admin name -// */ -// function insertIntoTable (feature, admin) { -// // generate properties and geometry objects from feature object -// const properties = feature.properties; -// const geometry = feature.geometry; -// const statement = db.insert({ -// // shared identifier for each row in admin table -// type: admin, -// // numeric id for current admin unit -// id: properties.id, -// // numeric id for currrent admin unit's parent (for instance a commune's parent district) -// // this is helpful for future spatial analysis -// parent_id: properties.p_id, -// // admin unit geometry -// geo: st.geomFromGeoJSON(geometry), -// // english name of admin unit -// name_en: properties.en_name, -// // vietnamese name of admin unit -// name_vn: '' -// }) -// // method that inserts the insert statement into its correct table -// .into(`${admin}-table`).toString(); -// } -// -// // run tasks in parallel -// parallel(adminTasks, (err, res) => { -// // do nothing on result -// if (!err) {} -// }); diff --git a/qgis-processing/a-clean-geom.sh b/qgis-processing/a-clean-geom.sh new file mode 100644 index 0000000..8bd6521 --- /dev/null +++ b/qgis-processing/a-clean-geom.sh @@ -0,0 +1,16 @@ +# Synopysis: cleans commune admin geometries so that dissolving in step b works properly + +# take contents of tmp directory add add it to the docker directory +cp ${1}/tmp/* ./docker/delete-holes +# build clean-geom container +docker build -t 'qgis_headless' ./docker/clean-geometries +# run the docker contianer, entering at run.sh +docker run -it qgis_headless +# get admin areas from container add copy them over to the output folder of the process +# using the docker cp command, copying from the most recently built container to the output folder +# `docker ps --latest -q` grabs the most recent container +docker cp `docker ps --latest -q`:workspace/vietnam-communes-cleaned.geojson ${1}/output/vietnam-communes-cleaned.geojson +docker cp `docker ps --latest -q`:workspace/vietnam-districts-cleaned.geojson ${1}/output/vietnam-districts-cleaned.geojson +docker cp `docker ps --latest -q`:workspace/vietnam-province-cleaned.geojson ${1}/output/vietnam-province-cleaned.geojson +# clean up the docker directory +rm -f ./docker/clean-geometries/vietnam* diff --git a/yarn.lock b/yarn.lock index 1baaaf5..1dea220 100644 --- a/yarn.lock +++ b/yarn.lock @@ -85,6 +85,10 @@ braces@^1.8.2: preserve "^0.2.0" repeat-element "^1.1.2" +buffer-writer@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buffer-writer/-/buffer-writer-1.0.1.tgz#22a936901e3029afcd7547eb4487ceb697a3bf08" + chalk@^1.0.0, chalk@^1.1.0: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" @@ -214,10 +218,6 @@ fs-exists-sync@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add" -fs@^0.0.1-security: - version "0.0.1-security" - resolved "https://registry.yarnpkg.com/fs/-/fs-0.0.1-security.tgz#8a7bd37186b6dddf3813f23858b57ecaaf5e41d4" - generic-pool@^2.4.2: version "2.5.4" resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-2.5.4.tgz#38c6188513e14030948ec6e5cf65523d9779299b" @@ -374,6 +374,10 @@ isobject@^2.0.0: dependencies: isarray "1.0.0" +js-string-escape@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/js-string-escape/-/js-string-escape-1.0.1.tgz#e2625badbc0d67c7533e9edc1068c587ae4137ef" + jsonlint-lines@1.7.1: version "1.7.1" resolved "https://registry.yarnpkg.com/jsonlint-lines/-/jsonlint-lines-1.7.1.tgz#507de680d3fb8c4be1641cc57d6f679f29f178ff" @@ -508,6 +512,10 @@ os-homedir@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" +packet-reader@0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/packet-reader/-/packet-reader-0.3.1.tgz#cd62e60af8d7fea8a705ec4ff990871c46871f27" + parse-glob@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" @@ -525,10 +533,60 @@ path-parse@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" -pg-connection-string@^0.1.3: +pg-connection-string@0.1.3, pg-connection-string@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-0.1.3.tgz#da1847b20940e42ee1492beaf65d49d91b245df7" +pg-pool@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-2.0.3.tgz#c022032c8949f312a4f91fb6409ce04076be3257" + +pg-types@~1.12.1: + version "1.12.1" + resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-1.12.1.tgz#d64087e3903b58ffaad279e7595c52208a14c3d2" + dependencies: + postgres-array "~1.0.0" + postgres-bytea "~1.0.0" + postgres-date "~1.0.0" + postgres-interval "^1.1.0" + +pg@^7.3.0: + version "7.3.0" + resolved "https://registry.yarnpkg.com/pg/-/pg-7.3.0.tgz#275e27466e54a645f6b4a16f6acadf6b849ad83b" + dependencies: + buffer-writer "1.0.1" + js-string-escape "1.0.1" + packet-reader "0.3.1" + pg-connection-string "0.1.3" + pg-pool "~2.0.3" + pg-types "~1.12.1" + pgpass "1.x" + semver "4.3.2" + +pgpass@1.x: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.2.tgz#2a7bb41b6065b67907e91da1b07c1847c877b306" + dependencies: + split "^1.0.0" + +postgres-array@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/postgres-array/-/postgres-array-1.0.2.tgz#8e0b32eb03bf77a5c0a7851e0441c169a256a238" + +postgres-bytea@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/postgres-bytea/-/postgres-bytea-1.0.0.tgz#027b533c0aa890e26d172d47cf9ccecc521acd35" + +postgres-date@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/postgres-date/-/postgres-date-1.0.3.tgz#e2d89702efdb258ff9d9cee0fe91bd06975257a8" + +postgres-interval@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/postgres-interval/-/postgres-interval-1.1.1.tgz#acdb0f897b4b1c6e496d9d4e0a853e1c428f06f0" + dependencies: + xtend "^4.0.0" + preserve@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" @@ -594,7 +652,11 @@ safe-buffer@^5.0.1: version "5.1.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" -split@^1.0.1: +semver@4.3.2: + version "4.3.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-4.3.2.tgz#c7a07158a80bedd052355b770d82d6640f803be7" + +split@^1.0.0, split@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/split/-/split-1.0.1.tgz#605bd9be303aa59fb35f9229fbea0ddec9ea07d9" dependencies: @@ -667,3 +729,7 @@ which@^1.2.12: resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a" dependencies: isexe "^2.0.0" + +xtend@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"