Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dead code elimination: Remove Unused HTTP Endpoints #859

Open
wants to merge 7 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -5,41 +5,24 @@
import com.conveyal.analysis.datasource.derivation.AggregationAreaDerivation;
import com.conveyal.analysis.datasource.derivation.DataDerivation;
import com.conveyal.analysis.models.AggregationArea;
import com.conveyal.analysis.models.DataSource;
import com.conveyal.analysis.persistence.AnalysisCollection;
import com.conveyal.analysis.persistence.AnalysisDB;
import com.conveyal.analysis.util.JsonUtil;
import com.conveyal.file.FileStorage;
import com.conveyal.r5.analyst.progress.Task;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.bson.conversions.Bson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.Request;
import spark.Response;

import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;

import static com.conveyal.analysis.util.JsonUtil.toJson;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;

/**
* Stores vector aggregationAreas (used to define the region of a weighted average accessibility metric).
*/
public class AggregationAreaController implements HttpController {

private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

private final FileStorage fileStorage;
private final AnalysisDB analysisDb;
private final TaskScheduler taskScheduler;

private final AnalysisCollection<DataSource> dataSourceCollection;
private final AnalysisCollection<AggregationArea> aggregationAreaCollection;

public AggregationAreaController (
Expand All @@ -50,7 +33,6 @@ public AggregationAreaController (
this.fileStorage = fileStorage;
this.analysisDb = database;
this.taskScheduler = taskScheduler;
dataSourceCollection = database.getAnalysisCollection("dataSources", DataSource.class);
aggregationAreaCollection = database.getAnalysisCollection("aggregationAreas", AggregationArea.class);
}

Expand All @@ -65,7 +47,7 @@ public AggregationAreaController (
* @return the ID of the Task representing the enqueued background action that will create the aggregation areas.
*/
private String createAggregationAreas (Request req, Response res) throws Exception {
// Create and enqueue an asynchronous background action to derive aggreagation areas from spatial data source.
// Create and enqueue an asynchronous background action to derive aggregation areas from spatial data source.
// The constructor will extract query parameters and range check them (not ideal separation, but it works).
DataDerivation derivation = AggregationAreaDerivation.fromRequest(req, fileStorage, analysisDb);
Task backgroundTask = Task.create("Aggregation area creation: " + derivation.dataSource().name)
Expand All @@ -77,26 +59,6 @@ private String createAggregationAreas (Request req, Response res) throws Excepti
return backgroundTask.id.toString();
}

/**
* Get all aggregation area documents meeting the supplied criteria.
* The request must contain a query parameter for the regionId or the dataGroupId or both.
*/
private Collection<AggregationArea> getAggregationAreas (Request req, Response res) {
List<Bson> filters = new ArrayList<>();
String regionId = req.queryParams("regionId");
if (regionId != null) {
filters.add(eq("regionId", regionId));
}
String dataGroupId = req.queryParams("dataGroupId");
if (dataGroupId != null) {
filters.add(eq("dataGroupId", dataGroupId));
}
if (filters.isEmpty()) {
throw new IllegalArgumentException("You must supply either a regionId or a dataGroupId or both.");
}
return aggregationAreaCollection.findPermitted(and(filters), UserPermissions.from(req));
}

/** Returns a JSON-wrapped URL for the mask grid of the aggregation area whose id matches the path parameter. */
private ObjectNode getAggregationAreaGridUrl (Request req, Response res) {
AggregationArea aggregationArea = aggregationAreaCollection.findPermittedByRequestParamId(req);
Expand All @@ -106,9 +68,7 @@ private ObjectNode getAggregationAreaGridUrl (Request req, Response res) {

@Override
public void registerEndpoints (spark.Service sparkService) {
sparkService.get("/api/aggregationArea", this::getAggregationAreas, toJson);
sparkService.get("/api/aggregationArea/:_id", this::getAggregationAreaGridUrl, toJson);
sparkService.post("/api/aggregationArea", this::createAggregationAreas, toJson);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,13 @@
import com.conveyal.file.FileUtils;
import com.conveyal.gtfs.GTFSCache;
import com.conveyal.gtfs.GTFSFeed;
import com.conveyal.gtfs.error.GTFSError;
import com.conveyal.gtfs.error.GeneralError;
import com.conveyal.gtfs.model.Stop;
import com.conveyal.gtfs.validator.PostLoadValidator;
import com.conveyal.osmlib.Node;
import com.conveyal.osmlib.OSM;
import com.conveyal.r5.analyst.progress.ProgressInputStream;
import com.conveyal.r5.analyst.cluster.TransportNetworkConfig;
import com.conveyal.r5.analyst.progress.ProgressInputStream;
import com.conveyal.r5.analyst.progress.Task;
import com.conveyal.r5.streets.OSMCache;
import com.conveyal.r5.util.ExceptionUtils;
Expand All @@ -41,7 +40,6 @@
import java.io.Writer;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -80,10 +78,7 @@ public BundleController (BackendComponents components) {
@Override
public void registerEndpoints (Service sparkService) {
sparkService.path("/api/bundle", () -> {
sparkService.get("", this::getBundles, toJson);
sparkService.get("/:_id", this::getBundle, toJson);
sparkService.post("", this::create, toJson);
sparkService.put("/:_id", this::update, toJson);
sparkService.delete("/:_id", this::deleteBundle, toJson);
});
}
Expand Down Expand Up @@ -295,27 +290,6 @@ private Bundle deleteBundle (Request req, Response res) throws IOException {
return bundle;
}

private Bundle update (Request req, Response res) throws IOException {
return Persistence.bundles.updateFromJSONRequest(req);
}

private Bundle getBundle (Request req, Response res) {
Bundle bundle = Persistence.bundles.findByIdFromRequestIfPermitted(req);

// Progressively update older bundles with service start and end dates on retrieval
try {
setBundleServiceDates(bundle, gtfsCache);
} catch (Exception e) {
throw AnalysisServerException.unknown(e);
}

return bundle;
}

private Collection<Bundle> getBundles (Request req, Response res) {
return Persistence.bundles.findPermittedForQuery(req);
}

// UTILITY METHODS

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,11 @@
import org.apache.commons.fileupload.FileItem;
import org.geotools.data.geojson.GeoJSONWriter;
import org.opengis.feature.simple.SimpleFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.Request;
import spark.Response;

import java.io.File;
import java.io.OutputStream;
import java.lang.invoke.MethodHandles;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
Expand All @@ -36,17 +33,13 @@
import static com.conveyal.file.FileCategory.DATASOURCES;
import static com.conveyal.file.FileStorageFormat.SHP;
import static com.conveyal.r5.analyst.WebMercatorGridPointSet.parseZoom;
import static com.mongodb.client.model.Filters.eq;

/**
* Controller that handles CRUD of DataSources, which are Mongo metadata about user-uploaded files.
* Unlike some Mongo documents, these are mostly created and updated by backend validation and processing methods.
* Currently this handles only one subtype: SpatialDataSource, which represents GIS-like vector geospatial data.
*/
public class DataSourceController implements HttpController {

private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

// Component Dependencies
private final FileStorage fileStorage;
private final TaskScheduler taskScheduler;
Expand All @@ -72,13 +65,6 @@ public DataSourceController (
);
}

/** HTTP GET: Retrieve all DataSource records, filtered by the (required) regionId query parameter. */
private List<DataSource> getAllDataSourcesForRegion (Request req, Response res) {
return dataSourceCollection.findPermitted(
eq("regionId", req.queryParams("regionId")), UserPermissions.from(req)
);
}

/** HTTP GET: Retrieve a single DataSource record by the ID supplied in the URL path parameter. */
private DataSource getOneDataSourceById (Request req, Response res) {
return dataSourceCollection.findPermittedByRequestParamId(req);
Expand Down Expand Up @@ -180,8 +166,6 @@ private String handleUpload (Request req, Response res) {
@Override
public void registerEndpoints (spark.Service sparkService) {
sparkService.path("/api/dataSource", () -> {
sparkService.get("/", this::getAllDataSourcesForRegion, toJson);
sparkService.get("/:_id", this::getOneDataSourceById, toJson);
sparkService.delete("/:_id", this::deleteOneDataSourceById, toJson);
sparkService.get("/:_id/preview", this::getDataSourcePreview, toJson);
sparkService.post("", this::handleUpload, toJson);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import com.conveyal.r5.analyst.PointSet;
import com.conveyal.r5.analyst.PointSetCache;
import com.conveyal.r5.analyst.cluster.RegionalTask;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.primitives.Ints;
import com.mongodb.QueryBuilder;
import gnu.trove.list.array.TIntArrayList;
Expand All @@ -42,9 +41,7 @@
import java.util.zip.GZIPOutputStream;

import static com.conveyal.analysis.util.JsonUtil.toJson;
import static com.conveyal.file.FileCategory.BUNDLES;
import static com.conveyal.file.FileCategory.RESULTS;
import static com.conveyal.r5.transit.TransportNetworkCache.getScenarioFilename;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
Expand Down Expand Up @@ -85,15 +82,6 @@ private Collection<RegionalAnalysis> getRegionalAnalysesForRegion(String regionI
);
}

private Collection<RegionalAnalysis> getRegionalAnalysesForRegion(Request req, Response res) {
return getRegionalAnalysesForRegion(req.params("regionId"), UserPermissions.from(req));
}

// Note: this includes the modifications object which can be very large
private RegionalAnalysis getRegionalAnalysis(Request req, Response res) {
return Persistence.regionalAnalyses.findByIdIfPermitted(req.params("_id"), UserPermissions.from(req));
}

/**
* Looks up all regional analyses for a region and checks the broker for jobs associated with them. If a JobStatus
* exists it adds it to the list of running analyses.
Expand Down Expand Up @@ -517,43 +505,17 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro
return regionalAnalysis;
}

private RegionalAnalysis updateRegionalAnalysis (Request request, Response response) throws IOException {
RegionalAnalysis regionalAnalysis = JsonUtil.objectMapper.readValue(request.body(), RegionalAnalysis.class);
return Persistence.regionalAnalyses.updateByUserIfPermitted(regionalAnalysis, UserPermissions.from(request));
}

/**
* Return a JSON-wrapped URL for the file in FileStorage containing the JSON representation of the scenario for
* the given regional analysis.
*/
private JsonNode getScenarioJsonUrl (Request request, Response response) {
RegionalAnalysis regionalAnalysis = Persistence.regionalAnalyses
.findByIdIfPermitted(request.params("_id"), UserPermissions.from(request));
// In the persisted objects, regionalAnalysis.scenarioId seems to be null. Get it from the embedded request.
final String networkId = regionalAnalysis.bundleId;
final String scenarioId = regionalAnalysis.request.scenarioId;
checkNotNull(networkId, "RegionalAnalysis did not contain a network ID.");
checkNotNull(scenarioId, "RegionalAnalysis did not contain an embedded request with scenario ID.");
String scenarioUrl = fileStorage.getURL(
new FileStorageKey(BUNDLES, getScenarioFilename(regionalAnalysis.bundleId, scenarioId)));
return JsonUtil.objectNode().put("url", scenarioUrl);
}

@Override
public void registerEndpoints (spark.Service sparkService) {
sparkService.path("/api/region", () -> {
sparkService.get("/:regionId/regional", this::getRegionalAnalysesForRegion, toJson);
sparkService.get("/:regionId/regional/running", this::getRunningAnalyses, toJson);
});
sparkService.path("/api/regional", () -> {
// For grids, no transformer is supplied: render raw bytes or input stream rather than transforming to JSON.
sparkService.get("/:_id", this::getRegionalAnalysis);
sparkService.get("/:_id/grid/:format", this::getRegionalResults);
sparkService.get("/:_id/csv/:resultType", this::getCsvResults);
sparkService.get("/:_id/scenarioJsonUrl", this::getScenarioJsonUrl);
sparkService.delete("/:_id", this::deleteRegionalAnalysis, toJson);
sparkService.post("", this::createRegionalAnalysis, toJson);
sparkService.put("/:_id", this::updateRegionalAnalysis, toJson);
});
}

Expand Down
Loading