diff --git a/hoot-services/src/main/java/hoot/services/controllers/grail/GrailResource.java b/hoot-services/src/main/java/hoot/services/controllers/grail/GrailResource.java
index 797cd0c..7bbab14 100644
--- a/hoot-services/src/main/java/hoot/services/controllers/grail/GrailResource.java
+++ b/hoot-services/src/main/java/hoot/services/controllers/grail/GrailResource.java
@@ -51,6 +51,7 @@ import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@@ -430,13 +431,12 @@ public class GrailResource {
}
}
- // The parents parents of the current job is the conflate job which contains the resource id of the merged layer
- String grandparentJobId = DbUtils.getParentId(reqParams.getParentId());
- Long resourceId = DbUtils.getMapIdByJobId(grandparentJobId); // the merged layer
+ Map<String, String> tags = DbUtils.getJobTags(reqParams.getParentId());
+ String resourceId = tags.get("input1");
if(resourceId != null) {
// Setup workflow to refresh rails data after the push
- long referenceId = DbUtils.getMergedReference(resourceId);
+ long referenceId = Long.parseLong(resourceId);
Long parentFolderId = DbUtils.getParentFolder(referenceId);
Map<String, String> mapTags = DbUtils.getMapsTableTags(referenceId);
@@ -455,7 +455,11 @@ public class GrailResource {
}
}
- jobProcessor.submitAsync(new Job(jobId, user.getId(), workflow.toArray(new Command[workflow.size()]), JobType.UPLOAD_CHANGESET, reqParams.getParentId()));
+ Map<String, Object> jobStatusTags = new HashMap<>();
+ jobStatusTags.put("bbox", reqParams.getBounds());
+ jobStatusTags.put("parentId", reqParams.getParentId());
+
+ jobProcessor.submitAsync(new Job(jobId, user.getId(), workflow.toArray(new Command[workflow.size()]), JobType.UPLOAD_CHANGESET, jobStatusTags));
}
catch (WebApplicationException wae) {
throw wae;
@@ -497,6 +501,7 @@ public class GrailResource {
@Produces(MediaType.APPLICATION_JSON)
public Response deriveChangeset(@Context HttpServletRequest request,
GrailParams reqParams,
+ @QueryParam("replacement") @DefaultValue("false") Boolean replacement,
@QueryParam("DEBUG_LEVEL") @DefaultValue("info") String debugLevel) {
Users user = Users.fromRequest(request);
@@ -520,7 +525,7 @@ public class GrailResource {
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ioe.getMessage()).build();
}
- GrailParams params = new GrailParams();
+ GrailParams params = new GrailParams(reqParams);
params.setUser(user);
try {
@@ -533,11 +538,17 @@ public class GrailResource {
params.setOutput(changeSet.getAbsolutePath());
// Run changeset-derive
- ExternalCommand makeChangeset = grailCommandFactory.build(mainJobId, params, debugLevel, DeriveChangesetCommand.class, this.getClass());
+ ExternalCommand makeChangeset = grailCommandFactory.build(mainJobId, params, debugLevel, (replacement) ? DeriveChangesetReplacementCommand.class : DeriveChangesetCommand.class, this.getClass());
workflow.add(makeChangeset);
+ Map<String, Object> jobStatusTags = new HashMap<>();
+ jobStatusTags.put("bbox", reqParams.getBounds());
+ jobStatusTags.put("input1", input1);
+ jobStatusTags.put("input2", input2);
+ jobStatusTags.put("parentId", reqParams.getParentId());
+
// Now roll the dice and run everything.....
- jobProcessor.submitAsync(new Job(mainJobId, user.getId(), workflow.toArray(new Command[workflow.size()]), JobType.DERIVE_CHANGESET, reqParams.getParentId()));
+ jobProcessor.submitAsync(new Job(mainJobId, user.getId(), workflow.toArray(new Command[workflow.size()]), JobType.DERIVE_CHANGESET, jobStatusTags));
}
catch (WebApplicationException wae) {
throw wae;
@@ -546,7 +557,7 @@ public class GrailResource {
throw new WebApplicationException(iae, Response.status(Response.Status.BAD_REQUEST).entity(iae.getMessage()).build());
}
catch (Exception e) {
- String msg = "Error during conflate differential! Params: " + params;
+ String msg = "Error during derive changeset! Params: " + params;
throw new WebApplicationException(e, Response.serverError().entity(msg).build());
}
@@ -597,7 +608,7 @@ public class GrailResource {
List<Command> workflow = new LinkedList<>();
// Write the data to the hoot db
- GrailParams params = new GrailParams();
+ GrailParams params = new GrailParams(reqParams);
params.setUser(user);
params.setPullUrl(PUBLIC_OVERPASS_URL);
@@ -619,11 +630,20 @@ public class GrailResource {
ExternalCommand importOverpass = grailCommandFactory.build(jobId, params, "info", PushToDbCommand.class, this.getClass());
workflow.add(importOverpass);
+ // Set map tags marking dataset as eligible for derive changeset
+ Map<String, String> tags = new HashMap<>();
+ tags.put("bbox", params.getBounds());
+ InternalCommand setMapTags = setMapTagsCommandFactory.build(tags, jobId);
+ workflow.add(setMapTags);
+
// Move the data to the folder
InternalCommand setFolder = updateParentCommandFactory.build(jobId, folderId, layerName, user, this.getClass());
workflow.add(setFolder);
- jobProcessor.submitAsync(new Job(jobId, user.getId(), workflow.toArray(new Command[workflow.size()]), JobType.IMPORT));
+ Map<String, Object> jobStatusTags = new HashMap<>();
+ jobStatusTags.put("bbox", bbox);
+
+ jobProcessor.submitAsync(new Job(jobId, user.getId(), workflow.toArray(new Command[workflow.size()]), JobType.IMPORT, jobStatusTags));
ResponseBuilder responseBuilder = Response.ok(json.toJSONString());
response = responseBuilder.build();
@@ -681,8 +701,8 @@ public class GrailResource {
// first line that lists columns which are counts for each feature type
overpassQuery = overpassQuery.replace("[out:json]", "[out:csv(::count, ::\"count:nodes\", ::\"count:ways\", ::\"count:relations\")]");
- // last row that lists output format
- overpassQuery = overpassQuery.replace("out meta", "out count");
+ // overpass query can have multiple "out *" lines so need to replace all
+ overpassQuery = overpassQuery.replaceAll("out [\\s\\w]+;", "out count;");
}
@@ -692,22 +712,64 @@ public class GrailResource {
overpassQuery = URLEncoder.encode(overpassQuery, "UTF-8").replace("+", "%20"); // need to encode url for the get
} catch (UnsupportedEncodingException ignored) {} // Can be safely ignored because UTF-8 is always supported
+
+ List<String> columns = new ArrayList<>();
+ List<JSONObject> data = new ArrayList<>();
+ JSONObject nodeObj = new JSONObject();
+ nodeObj.put("label", "node");
+ JSONObject wayObj = new JSONObject();
+ wayObj.put("label", "way");
+ JSONObject relationObj = new JSONObject();
+ relationObj.put("label", "relation");
+ JSONObject totalObj = new JSONObject();
+ totalObj.put("label", "total");
+
// Get public overpass data
String publicUrl = replaceSensitiveData(PUBLIC_OVERPASS_URL) + "?data=" + overpassQuery;
- String publicStats = retrieveOverpassStats(publicUrl, false);
+ ArrayList<Double> publicStats = retrieveOverpassStats(publicUrl, false);
+ if(publicStats.size() != 0) {
+ columns.add(GRAIL_OVERPASS_LABEL);
+ totalObj.put(GRAIL_OVERPASS_LABEL, publicStats.get(0));
+ nodeObj.put(GRAIL_OVERPASS_LABEL, publicStats.get(1));
+ wayObj.put(GRAIL_OVERPASS_LABEL, publicStats.get(2));
+ relationObj.put(GRAIL_OVERPASS_LABEL, publicStats.get(3));
+ }
// Get private overpass data if private overpass url was provided
- String privateStats = null;
if (!replaceSensitiveData(PRIVATE_OVERPASS_URL).equals(PRIVATE_OVERPASS_URL)) {
String privateUrl = replaceSensitiveData(PRIVATE_OVERPASS_URL) + "?data=" + overpassQuery;
- privateStats = retrieveOverpassStats(privateUrl, true);
+ ArrayList<Double> privateStats = retrieveOverpassStats(privateUrl, true);
+ if(privateStats.size() != 0) {
+ columns.add(GRAIL_RAILS_LABEL);
+ totalObj.put(GRAIL_RAILS_LABEL, privateStats.get(0));
+ nodeObj.put(GRAIL_RAILS_LABEL, privateStats.get(1));
+ wayObj.put(GRAIL_RAILS_LABEL, privateStats.get(2));
+ relationObj.put(GRAIL_RAILS_LABEL, privateStats.get(3));
+ }
}
- JSONObject jobInfo = new JSONObject();
- jobInfo.put("publicStats", publicStats);
- jobInfo.put("privateStats", privateStats);
+ data.add(nodeObj);
+ data.add(wayObj);
+ data.add(relationObj);
+ data.add(totalObj);
+ /*
+ * Example response object
+ {
+ columns: ["OSM", "NOME"],
+ data: [
+ {label: "node", NOME: 3, OSM: 5},
+ {label: "way", NOME: 1, OSM: 2},
+ {label: "relation", NOME: 0, OSM: 1},
+ {label: "total", NOME: 4, OSM: 7}
+ ]
+ }
+ *
+ */
+ JSONObject stats = new JSONObject();
+ stats.put("columns", columns);
+ stats.put("data", data);
- return Response.ok(jobInfo.toJSONString()).build();
+ return Response.ok().entity(stats).build();
}
/**
@@ -739,7 +801,6 @@ public class GrailResource {
Users user = Users.fromRequest(request);
advancedUserCheck(user);
- String bbox = reqParams.getBounds();
String layerName = reqParams.getInput1();
String jobId = UUID.randomUUID().toString().replace("-", "");
File workDir = new File(TEMP_OUTPUT_PATH, "grail_" + jobId);
@@ -748,16 +809,13 @@ public class GrailResource {
throw new BadRequestException("Record with name : " + layerName + " already exists. Please try a different name.");
}
- Response response;
JSONObject json = new JSONObject();
json.put("jobid", jobId);
- GrailParams params = new GrailParams();
+ GrailParams params = new GrailParams(reqParams);
params.setUser(user);
params.setWorkDir(workDir);
params.setOutput(layerName);
- params.setBounds(bbox);
- params.setCustomQuery(reqParams.getCustomQuery());
List<Command> workflow;
try {
@@ -767,12 +825,12 @@ public class GrailResource {
return Response.status(Response.Status.SERVICE_UNAVAILABLE).entity(exc.getMessage()).build();
}
- jobProcessor.submitAsync(new Job(jobId, user.getId(), workflow.toArray(new Command[workflow.size()]), JobType.IMPORT));
+ Map<String, Object> jobStatusTags = new HashMap<>();
+ jobStatusTags.put("bbox", reqParams.getBounds());
- ResponseBuilder responseBuilder = Response.ok(json.toJSONString());
- response = responseBuilder.build();
+ jobProcessor.submitAsync(new Job(jobId, user.getId(), workflow.toArray(new Command[workflow.size()]), JobType.IMPORT, jobStatusTags));
- return response;
+ return Response.ok(json.toJSONString()).build();
}
private List<Command> setupRailsPull(String jobId, GrailParams params, Long parentFolderId) throws UnavailableException {
@@ -802,7 +860,7 @@ public class GrailResource {
// Set map tags marking dataset as eligible for derive changeset
Map<String, String> tags = new HashMap<>();
- tags.put("source", "rails");
+ tags.put("grailReference", "true");
tags.put("bbox", params.getBounds());
InternalCommand setMapTags = setMapTagsCommandFactory.build(tags, jobId);
workflow.add(setMapTags);
@@ -893,8 +951,8 @@ public class GrailResource {
* If false then no cert will need to be used for the request
* @return
*/
- private static String retrieveOverpassStats(String url, boolean usePrivateOverpass) {
- StringBuilder statsInfo = new StringBuilder();
+ private static ArrayList<Double> retrieveOverpassStats(String url, boolean usePrivateOverpass) {
+ ArrayList<Double> statCounts = new ArrayList<>();
try {
InputStream inputStream;
@@ -912,8 +970,24 @@ public class GrailResource {
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
String inputLine;
+
+ boolean firstLine = true;
while ((inputLine = br.readLine()) != null) {
- statsInfo.append(inputLine + "\n");
+ //After the first line it is all stat numbers
+ if(!firstLine){
+ String[] rowCounts = inputLine.split("\t");
+ for(int i = 0; i < rowCounts.length; i++) {
+ statCounts.set(i, statCounts.get(i) + Double.parseDouble(rowCounts[i]));
+ }
+ } else {
+ // This else is only entered for the first line which contains the column names so we initialize arraylist with that many elements
+ int numColumns = inputLine.split("\t").length;
+ for(int i = 0; i < numColumns; i++) {
+ statCounts.add(0.0);
+ }
+ }
+
+ firstLine = false;
}
br.close();
@@ -923,7 +997,7 @@ public class GrailResource {
throw new WebApplicationException(exc, Response.status(Response.Status.NOT_FOUND).entity(msg).build());
}
- return statsInfo.toString();
+ return statCounts;
}
}