Skip to content

Commit

Permalink
Merged main, bumped to latest globe view version, added some pitch/ro…
Browse files Browse the repository at this point in the history
…tate controls back for globe view and adjusted minZoom.
  • Loading branch information
sdl60660 committed Mar 26, 2022
2 parents 4007cb7 + 37137f8 commit 7b434cb
Show file tree
Hide file tree
Showing 17 changed files with 491 additions and 82 deletions.
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
/public/data/rivers_full_detail.json
/public/data/detailed_rivers_streams.json
/public/data/flowrates.json
/public/data/global_stopping_features_backup.json
/public/data/global_stopping_features_update.json

# Data files too large for github storage
# Original VAA .parquet file can be found here: https://www.hydroshare.org/resource/6092c8a62fac45be97a09bfd0b0bf726/
Expand All @@ -22,4 +24,5 @@
# Original shapefile can be found here: https://hub.arcgis.com/datasets/esri::usa-detailed-water-bodies?geometry=167.943%2C22.299%2C-35.172%2C47.182
# /data_processing/data/water_bodies.geojson
package-lock.json
name_server/package-lock.json
name_server/package-lock.json
name_server/data_processing/data
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ Here's part of the path from Southwest Arizona down to the Mexican border:

![Screenshot of the river runner in progress from Southwest Arizona to Mexican border. Mountain features, desert, and river are visible.](https://github.com/sdl60660/river-runner/blob/main/public/images/example-2-az.png?raw=true)

You can look at a heatmap of previous searches [here](https://river-runner-query-heatmap.vercel.app/) or find a list of some of our favorite paths [here](https://docs.google.com/document/d/1EqRNDvvCwJdfNvejHzw-0zCd6Ax-0i7nyHkU4h0M9Kg/edit?usp=sharing)

## Running this on your own

If you'd like to run this locally and play around with it, just run the following commands in your terminal (assuming you have [npm](https://www.npmjs.com/get-npm) installed):
Expand All @@ -37,6 +39,6 @@ Thank you to [Mapbox](https://www.mapbox.com/) for sponsoring this project!
<img src="https://user-images.githubusercontent.com/12772904/129089126-5c528d47-961f-427f-820f-df58974d15c3.png" alt="mapbox-logo-black" width="300"/>

## Updates
* **January 2021**: The [global version](https://river-runner-global.samlearner.com/) of this tool is now released and in beta! While some lingering issues are resolved and it remains in beta, it can be found on this branch, while the original, US-only version is preserved [here](https://github.com/sdl60660/river-runner/tree/us-only) in Github, and at its original URL: https://river-runner.samlearner.com/. This is to avoid any breaking changes to existing share links/paths due to any discrepancies and because minor US issues persist on the global version, mainly when paths involve dams, canals, or conduits.
* **January 2022**: The [global version](https://river-runner-global.samlearner.com/) of this tool is now released and in beta! While some lingering issues are resolved and it remains in beta, it can be found on this branch, while the original, US-only version is preserved [here](https://github.com/sdl60660/river-runner/tree/us-only) in Github, and at its original URL: https://river-runner.samlearner.com/. This is to avoid any breaking changes to existing share links/paths due to any discrepancies and because minor US issues persist on the global version, mainly when paths involve dams, canals, or conduits.

If you'd like to be notified about major updates to the tool, you can sign up for an email list [here](https://tinyletter.com/samlearner)
130 changes: 130 additions & 0 deletions name_server/data_processing/aggregate_name_suggestions.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
const dotenv = require("dotenv");
const path = require("path");
const assert = require("assert");
const MongoClient = require("mongodb").MongoClient;

const fs = require("fs");
const csv = require("fast-csv");

dotenv.config({ path: path.join(__dirname, "../.env") });

const unnamedFeatureCounts = [
{
$group: {
_id: "$levelpathid",
count: {
$sum: 1,
},
route_url: {
$first: "$route_url",
},
},
},
{
$sort: {
count: -1,
},
},
];

const suggestionCounts = [
{
$group: {
_id: "$nameid",
count: {
$sum: 1,
},
route_url: {
$push: "$route_url",
},
suggestions: {
$push: "$suggested_name",
},
},
},
{
$sort: {
count: -1,
},
},
];

const getExistingOverrides = async (filepath) => {
let rows = [];

return new Promise((resolve) => {
fs.createReadStream(path.join(__dirname, filepath))
.pipe(csv.parse({ headers: true }))
.on("error", (error) => console.error(error))
.on("data", (row) => rows.push(row))
.on("end", () => resolve(rows));
});
};

const runAggregation = async (aggregationPipeline, collection) => {
return new Promise((resolve) => {
MongoClient.connect(
process.env.MONGODB_URL,
{ useNewUrlParser: true, useUnifiedTopology: true },
function (connectErr, client) {
assert.equal(null, connectErr);

const coll = client.db("river_runner").collection(collection);

coll.aggregate(aggregationPipeline, { allowDiskUse: true, cursor: { batchSize: 1000 } }, async (cmdErr, result) => {
assert.equal(null, cmdErr);

const aggregationData = await result.toArray();
client.close();

resolve(aggregationData);
});
}
);
});
};

const main = async () => {
const csvFilepath = "../../public/data/name_overrides.csv";

const existingOverrides = await getExistingOverrides(csvFilepath);
// const groupedOccurences = await runAggregation(unnamedFeatureCounts, "unnamed_features");
const groupedSuggestions = await runAggregation(suggestionCounts, "suggestions");

const overridenIDs = existingOverrides.map((d) => Number(d.levelpathid));
const unhandledSuggestions = groupedSuggestions
.filter((d) => d.count > 3)
.filter((d) => !overridenIDs.includes(d._id));

console.log(unhandledSuggestions);
// const joinedData = groupedOccurences.map((d) => {
// const suggestions = groupedSuggestions.find((a) => a._id === d._id) || {
// count: 0,
// route_url: [],
// suggestions: null,
// };

// const override = existingOverrides.find((a) => Number(a.levelpathid) === d._id);

// return {
// levelpathid: d._id,
// sample_route_url: [...suggestions.route_url, d.route_url].filter((d) => d)[0],
// num_suggestions: suggestions.count,
// num_occurences: d.count,
// suggestions: suggestions.suggestions,
// overriden: override ? true : false,
// overriden_val: override ? override.feature_name : null,
// };
// });

// const outputFile = fs.createWriteStream("data/aggregated_suggestions.csv");

// csv
// .write(joinedData, { headers: true })
// .on("finish", function () {
// console.log("Write to CSV successfully!");
// })
// .pipe(outputFile);
};

main();
3 changes: 3 additions & 0 deletions name_server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,17 @@
"compression": "^1.7.4",
"cors": "^2.8.5",
"cross-env": "^7.0.0",
"csv-parse": "^5.0.4",
"dotenv": "^10.0.0",
"express": "^4.17.2",
"express-mongo-sanitize": "^2.1.0",
"express-rate-limit": "^5.5.1",
"fast-csv": "^4.3.6",
"helmet": "^4.1.0",
"http-status": "^1.5.0",
"joi": "^17.5.0",
"mongoose": "^5.13.13",
"node-csv": "^0.1.2",
"nodemailer": "^6.3.1",
"passport": "^0.5.2",
"pm2": "^5.1.2",
Expand Down
21 changes: 21 additions & 0 deletions name_server/src/models/unnamedFeature.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
const mongoose = require("mongoose");

const unamedFeatureSchema = new mongoose.Schema(
{
levelpathid: {
type: Number,
required: true,
},
current_name: {
type: String,
required: true,
},
timestamp: Number,
route_start: String,
route_url: String,
},
{ collection: "unnamed_features" }
);

const UnnamedFeature = mongoose.model("UnnamedFeature", unamedFeatureSchema);
module.exports = UnnamedFeature;
21 changes: 20 additions & 1 deletion name_server/src/routes/index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
const express = require("express");
const Suggestion = require("../models/suggestion");
const Query = require("../models/query");
const UnnamedFeature = require("../models/unnamedFeature");

const router = express.Router();

Expand All @@ -16,7 +17,7 @@ router.post("/suggestions", async (req, res) => {
const suggestions = req.body.map((item) => ({
...item,
timestamp: Date.now(),
route_url: `https://river-runner-global.samlearner.com/?lat=${JSON.parse(item.route_start).lat}&lng=${JSON.parse(item.route_start).lng}`
route_url: `https://river-runner-global.samlearner.com/?lng=${JSON.parse(item.route_start).lng}&lat=${JSON.parse(item.route_start).lat}`
}));

suggestions.forEach(async (item) => {
Expand All @@ -27,6 +28,24 @@ router.post("/suggestions", async (req, res) => {
res.status(201).json(suggestions);
});

router.post("/unnamed_features", async (req, res) => {
const unnamedFeatures = req.body.map((item) => ({
// Corrects for an earlier frontend mistake
levelpathid: item.levelpathid || Number(item.name_id),
current_name: item.current_name,
timestamp: Date.now(),
route_start: item.route_start,
route_url: `https://river-runner-global.samlearner.com/?lng=${JSON.parse(item.route_start).lng}&lat=${JSON.parse(item.route_start).lat}`
}));

unnamedFeatures.forEach(async (item) => {
const unnamedFeature = new UnnamedFeature(item);
await unnamedFeature.save();
});

res.status(201).json(unnamedFeatures);
});

// stash (completely anonymized) user queries to better understand where people are looking
router.post("/query", async (req, res) => {
const queryData = { ...req.body, timestamp: Date.now() };
Expand Down
5 changes: 2 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "river-runner",
"version": "2.0.2",
"version": "2.2.1",
"private": true,
"author": "Sam Learner",
"repository": {
Expand Down Expand Up @@ -53,8 +53,7 @@
"d3-fetch": "^2.0.0",
"dateformat": "^4.5.1",
"fast-xml-parser": "^3.19.0",
"mapbox-gl": "2.7.0-alpha.1",
"node-fetch-polyfill": "^2.0.6",
"mapbox-gl": "^2.8.0-alpha.4",
"node-html-parser": "^3.3.5",
"scrollama": "^2.2.2",
"sirv-cli": "^1.0.0",
Expand Down
2 changes: 1 addition & 1 deletion public/data/active_nwis_sites.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion public/data/global_stopping_features.json

Large diffs are not rendered by default.

Loading

0 comments on commit 7b434cb

Please sign in to comment.