Skip to content

Commit 0a6f9b1

Browse files
Move createResultWriters method
Into the RegionalAnalysisController
1 parent ca1ba48 commit 0a6f9b1

File tree

4 files changed

+71
-70
lines changed

4 files changed

+71
-70
lines changed

src/main/java/com/conveyal/analysis/components/broker/Broker.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -455,7 +455,7 @@ public void handleRegionalWorkResult(RegionalWorkResult workResult) {
455455

456456
// Store all result files permanently.
457457
for (var resultFile : resultFiles.entrySet()) {
458-
this.fileStorage.moveIntoStorage(resultFile.getKey(), resultFile.getValue());
458+
fileStorage.moveIntoStorage(resultFile.getKey(), resultFile.getValue());
459459
}
460460
} catch (Throwable t) {
461461
recordJobError(job, ExceptionUtils.stackTraceString(t));

src/main/java/com/conveyal/analysis/controllers/RegionalAnalysisController.java

+64-3
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,14 @@
1111
import com.conveyal.analysis.models.OpportunityDataset;
1212
import com.conveyal.analysis.models.RegionalAnalysis;
1313
import com.conveyal.analysis.persistence.Persistence;
14+
import com.conveyal.analysis.results.AccessCsvResultWriter;
1415
import com.conveyal.analysis.results.CsvResultType;
16+
import com.conveyal.analysis.results.GridResultWriter;
1517
import com.conveyal.analysis.results.MultiOriginAssembler;
18+
import com.conveyal.analysis.results.PathCsvResultWriter;
19+
import com.conveyal.analysis.results.RegionalResultWriter;
20+
import com.conveyal.analysis.results.TemporalDensityCsvResultWriter;
21+
import com.conveyal.analysis.results.TimeCsvResultWriter;
1622
import com.conveyal.analysis.util.JsonUtil;
1723
import com.conveyal.file.FileStorage;
1824
import com.conveyal.file.FileStorageFormat;
@@ -47,6 +53,7 @@
4753
import static com.conveyal.analysis.util.JsonUtil.toJson;
4854
import static com.conveyal.file.FileCategory.BUNDLES;
4955
import static com.conveyal.file.FileCategory.RESULTS;
56+
import static com.conveyal.r5.common.Util.notNullOrEmpty;
5057
import static com.conveyal.r5.transit.TransportNetworkCache.getScenarioFilename;
5158
import static com.google.common.base.Preconditions.checkArgument;
5259
import static com.google.common.base.Preconditions.checkNotNull;
@@ -459,7 +466,7 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro
459466
// In fact, there are three separate classes all containing almost the same info:
460467
// AnalysisRequest (from UI to backend), RegionalTask (template sent to worker), RegionalAnalysis (in Mongo).
461468
// And for regional analyses, two instances of the worker task: the one with the scenario, and the templateTask.
462-
RegionalAnalysis regionalAnalysis = new RegionalAnalysis();
469+
final RegionalAnalysis regionalAnalysis = new RegionalAnalysis();
463470
regionalAnalysis.request = task;
464471
regionalAnalysis.height = task.height;
465472
regionalAnalysis.north = task.north;
@@ -508,13 +515,16 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro
508515

509516
// Persist this newly created RegionalAnalysis to Mongo.
510517
// This assigns it creation/update time stamps and an ID, which is needed to name any output CSV files.
511-
regionalAnalysis = Persistence.regionalAnalyses.create(regionalAnalysis);
518+
Persistence.regionalAnalyses.create(regionalAnalysis);
512519

513520
// Create the regional job
514521
var regionalJob = new Job(task, WorkerTags.fromRegionalAnalysis(regionalAnalysis));
515522

523+
// Create the result writers
524+
var writers = createResultWriters(regionalAnalysis, task);
525+
516526
// Create the multi-origin assembler with the writers.
517-
var assembler = new MultiOriginAssembler(regionalJob, regionalAnalysis.createResultWriters(task));
527+
var assembler = new MultiOriginAssembler(regionalJob, writers);
518528

519529
// Stored scenario is needed by workers. Must be done ahead of enqueueing the job.
520530
storeRegionalAnalysisScenarioJson(task);
@@ -530,6 +540,57 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro
530540
return regionalAnalysis;
531541
}
532542

543+
/**
544+
* Create results writers for this regional analysis and a task. Stores the result paths that are created by the
545+
* writers.
546+
*/
547+
private static List<RegionalResultWriter> createResultWriters(RegionalAnalysis analysis, RegionalTask task) {
548+
// Create the result writers. Store their result file paths in the database.
549+
var resultWriters = new ArrayList<RegionalResultWriter>();
550+
if (!task.makeTauiSite) {
551+
if (task.recordAccessibility) {
552+
if (task.originPointSet != null) {
553+
// Freeform origins - create CSV regional analysis results
554+
var accessWriter = new AccessCsvResultWriter(task);
555+
resultWriters.add(accessWriter);
556+
analysis.resultStorage.put(accessWriter.resultType(), accessWriter.getFileName());
557+
} else {
558+
// Gridded origins - create gridded regional analysis results
559+
resultWriters.addAll(GridResultWriter.createWritersFromTask(analysis.destinationPointSetIds, task));
560+
}
561+
}
562+
563+
if (task.recordTimes) {
564+
var timesWriter = new TimeCsvResultWriter(task);
565+
resultWriters.add(timesWriter);
566+
analysis.resultStorage.put(timesWriter.resultType(), timesWriter.getFileName());
567+
}
568+
569+
if (task.includePathResults) {
570+
var pathsWriter = new PathCsvResultWriter(task);
571+
resultWriters.add(pathsWriter);
572+
analysis.resultStorage.put(pathsWriter.resultType(), pathsWriter.getFileName());
573+
}
574+
575+
if (task.includeTemporalDensity) {
576+
if (task.originPointSet == null) {
577+
// Gridded origins. The full temporal density information is probably too voluminous to be useful.
578+
// We might want to record a grid of dual accessibility values, but this will require some serious
579+
// refactoring of the GridResultWriter.
580+
// if (job.templateTask.dualAccessibilityThreshold > 0) { ... }
581+
throw new RuntimeException("Temporal density of opportunities cannot be recorded for gridded origin points.");
582+
} else {
583+
var tDensityWriter = new TemporalDensityCsvResultWriter(task);
584+
resultWriters.add(tDensityWriter);
585+
analysis.resultStorage.put(tDensityWriter.resultType(), tDensityWriter.getFileName());
586+
}
587+
}
588+
589+
checkArgument(notNullOrEmpty(resultWriters), "A regional analysis should always create at least one grid or CSV file.");
590+
}
591+
return resultWriters;
592+
}
593+
533594
/**
534595
* Store the regional analysis scenario as JSON for retrieval by the workers.
535596
*/

src/main/java/com/conveyal/analysis/models/RegionalAnalysis.java

-61
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,12 @@
11
package com.conveyal.analysis.models;
22

3-
import com.conveyal.analysis.results.AccessCsvResultWriter;
43
import com.conveyal.analysis.results.CsvResultType;
5-
import com.conveyal.analysis.results.GridResultWriter;
6-
import com.conveyal.analysis.results.PathCsvResultWriter;
7-
import com.conveyal.analysis.results.RegionalResultWriter;
8-
import com.conveyal.analysis.results.TemporalDensityCsvResultWriter;
9-
import com.conveyal.analysis.results.TimeCsvResultWriter;
104
import com.conveyal.r5.analyst.cluster.RegionalTask;
115
import org.locationtech.jts.geom.Geometry;
126

13-
import java.util.ArrayList;
147
import java.util.HashMap;
15-
import java.util.List;
168
import java.util.Map;
179

18-
import static com.conveyal.r5.common.Util.notNullOrEmpty;
19-
import static com.google.common.base.Preconditions.checkArgument;
20-
2110
/**
2211
* Represents a single regional (multi-origin) accessibility analysis,
2312
* which may have more than one percentile and cutoff.
@@ -112,56 +101,6 @@ public class RegionalAnalysis extends Model implements Cloneable {
112101
*/
113102
public Map<CsvResultType, String> resultStorage = new HashMap<>();
114103

115-
/**
116-
* Create results writers for this regional analysis and a task. Stores the result paths that are created by the
117-
* writers.
118-
*/
119-
public List<RegionalResultWriter> createResultWriters(RegionalTask task) {
120-
// Create the result writers. Store their result file paths in the database.
121-
var resultWriters = new ArrayList<RegionalResultWriter>();
122-
if (!task.makeTauiSite) {
123-
if (task.recordAccessibility) {
124-
if (task.originPointSet != null) {
125-
// Freeform origins - create CSV regional analysis results
126-
var accessWriter = new AccessCsvResultWriter(task);
127-
resultWriters.add(accessWriter);
128-
resultStorage.put(accessWriter.resultType(), accessWriter.getFileName());
129-
} else {
130-
// Gridded origins - create gridded regional analysis results
131-
resultWriters.addAll(GridResultWriter.createWritersFromTask(this, task));
132-
}
133-
}
134-
135-
if (task.recordTimes) {
136-
var timesWriter = new TimeCsvResultWriter(task);
137-
resultWriters.add(timesWriter);
138-
resultStorage.put(timesWriter.resultType(), timesWriter.getFileName());
139-
}
140-
141-
if (task.includePathResults) {
142-
var pathsWriter = new PathCsvResultWriter(task);
143-
resultWriters.add(pathsWriter);
144-
resultStorage.put(pathsWriter.resultType(), pathsWriter.getFileName());
145-
}
146-
147-
if (task.includeTemporalDensity) {
148-
if (task.originPointSet == null) {
149-
// Gridded origins. The full temporal density information is probably too voluminous to be useful.
150-
// We might want to record a grid of dual accessibility values, but this will require some serious
151-
// refactoring of the GridResultWriter.
152-
// if (job.templateTask.dualAccessibilityThreshold > 0) { ... }
153-
throw new RuntimeException("Temporal density of opportunities cannot be recorded for gridded origin points.");
154-
} else {
155-
var tDensityWriter = new TemporalDensityCsvResultWriter(task);
156-
resultWriters.add(tDensityWriter);
157-
resultStorage.put(tDensityWriter.resultType(), tDensityWriter.getFileName());
158-
}
159-
}
160-
checkArgument(notNullOrEmpty(resultWriters), "A regional analysis should always create at least one grid or CSV file.");
161-
}
162-
return resultWriters;
163-
}
164-
165104
public RegionalAnalysis clone () {
166105
try {
167106
return (RegionalAnalysis) super.clone();

src/main/java/com/conveyal/analysis/results/GridResultWriter.java

+6-5
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
package com.conveyal.analysis.results;
22

3-
import com.conveyal.analysis.models.RegionalAnalysis;
43
import com.conveyal.file.FileCategory;
54
import com.conveyal.file.FileStorageKey;
65
import com.conveyal.file.FileUtils;
@@ -76,14 +75,14 @@ public class GridResultWriter implements RegionalResultWriter {
7675
* We create one GridResultWriter for each destination pointset and percentile.
7776
* Each of those output files contains data for all specified travel time cutoffs at each origin.
7877
*/
79-
public static List<GridResultWriter> createWritersFromTask(RegionalAnalysis regionalAnalysis, RegionalTask task) {
78+
public static List<GridResultWriter> createWritersFromTask(String[] destinationPointSetIds, RegionalTask task) {
8079
int nPercentiles = task.percentiles.length;
81-
int nDestinationPointSets = task.makeTauiSite ? 0 : task.destinationPointSetKeys.length;
80+
int nDestinationPointSets = destinationPointSetIds.length;
8281
// Create one grid writer per percentile and destination pointset.
8382
var gridWriters = new ArrayList<GridResultWriter>();
8483
for (int destinationIndex = 0; destinationIndex < nDestinationPointSets; destinationIndex++) {
8584
for (int percentileIndex = 0; percentileIndex < nPercentiles; percentileIndex++) {
86-
String destinationPointSetId = regionalAnalysis.destinationPointSetIds[destinationIndex];
85+
String destinationPointSetId = destinationPointSetIds[destinationIndex];
8786
gridWriters.add(new GridResultWriter(
8887
task,
8988
percentileIndex,
@@ -148,7 +147,9 @@ public static List<GridResultWriter> createWritersFromTask(RegionalAnalysis regi
148147
}
149148
}
150149

151-
/** Gzip the access grid and upload it to file storage (such as AWS S3). */
150+
/**
151+
* Gzip the access grid and return the files.
152+
*/
152153
@Override
153154
public synchronized Map.Entry<FileStorageKey, File> finish () throws IOException {
154155
randomAccessFile.close();

0 commit comments

Comments
 (0)