Refactor rendering types. Remove polyline/point/polygon concept
This commit is contained in:
parent
c9b9466f2e
commit
2612e665ad
4 changed files with 783 additions and 783 deletions
|
@ -215,7 +215,7 @@ public class BinaryMapIndexWriter {
|
||||||
int highestTargetId = types.size();
|
int highestTargetId = types.size();
|
||||||
// 1. prepare map rule type to write
|
// 1. prepare map rule type to write
|
||||||
for (MapRulType t : types.values()) {
|
for (MapRulType t : types.values()) {
|
||||||
if (!t.isMapIndexed() || t.getTargetTagValue() != null || t.getFreq() == 0) {
|
if (t.getTargetTagValue() != null || t.getFreq() == 0) {
|
||||||
t.setTargetId(highestTargetId++);
|
t.setTargetId(highestTargetId++);
|
||||||
} else {
|
} else {
|
||||||
out.add(t);
|
out.add(t);
|
||||||
|
@ -348,7 +348,6 @@ public class BinaryMapIndexWriter {
|
||||||
byte[] additionalTypes, Map<MapRulType, String> names, Map<String, Integer> stringTable,
|
byte[] additionalTypes, Map<MapRulType, String> names, Map<String, Integer> stringTable,
|
||||||
MapDataBlock.Builder dataBlock) throws IOException{
|
MapDataBlock.Builder dataBlock) throws IOException{
|
||||||
|
|
||||||
Bounds bounds = stackBounds.peek();
|
|
||||||
|
|
||||||
MapData.Builder data = MapData.newBuilder();
|
MapData.Builder data = MapData.newBuilder();
|
||||||
// calculate size
|
// calculate size
|
||||||
|
@ -359,7 +358,7 @@ public class BinaryMapIndexWriter {
|
||||||
int x = Algoritms.parseIntFromBytes(coordinates, i * 8);
|
int x = Algoritms.parseIntFromBytes(coordinates, i * 8);
|
||||||
int y = Algoritms.parseIntFromBytes(coordinates, i * 8 + 4);
|
int y = Algoritms.parseIntFromBytes(coordinates, i * 8 + 4);
|
||||||
int tx = (x - pcalcx) >> SHIFT_COORDINATES;
|
int tx = (x - pcalcx) >> SHIFT_COORDINATES;
|
||||||
int ty = (x - pcalcy) >> SHIFT_COORDINATES;
|
int ty = (y - pcalcy) >> SHIFT_COORDINATES;
|
||||||
|
|
||||||
writeRawVarint32(mapDataBuf, tx);
|
writeRawVarint32(mapDataBuf, tx);
|
||||||
writeRawVarint32(mapDataBuf, ty);
|
writeRawVarint32(mapDataBuf, ty);
|
||||||
|
@ -389,7 +388,7 @@ public class BinaryMapIndexWriter {
|
||||||
pcalcy = ptop;
|
pcalcy = ptop;
|
||||||
} else {
|
} else {
|
||||||
int tx = (x - pcalcx) >> SHIFT_COORDINATES;
|
int tx = (x - pcalcx) >> SHIFT_COORDINATES;
|
||||||
int ty = (x - pcalcy) >> SHIFT_COORDINATES;
|
int ty = (y - pcalcy) >> SHIFT_COORDINATES;
|
||||||
|
|
||||||
writeRawVarint32(mapDataBuf, tx);
|
writeRawVarint32(mapDataBuf, tx);
|
||||||
writeRawVarint32(mapDataBuf, ty);
|
writeRawVarint32(mapDataBuf, ty);
|
||||||
|
|
|
@ -36,6 +36,7 @@ import net.osmand.osm.Relation;
|
||||||
import net.osmand.osm.Way;
|
import net.osmand.osm.Way;
|
||||||
import net.osmand.osm.Entity.EntityId;
|
import net.osmand.osm.Entity.EntityId;
|
||||||
import net.osmand.osm.Entity.EntityType;
|
import net.osmand.osm.Entity.EntityType;
|
||||||
|
import net.osmand.osm.MapRenderingTypes.MapRulType;
|
||||||
import net.osmand.osm.OSMSettings.OSMTagKey;
|
import net.osmand.osm.OSMSettings.OSMTagKey;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
|
@ -57,7 +58,6 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
private MapRenderingTypes renderingTypes;
|
private MapRenderingTypes renderingTypes;
|
||||||
private MapZooms mapZooms;
|
private MapZooms mapZooms;
|
||||||
|
|
||||||
|
|
||||||
// MEMORY map : save it in memory while that is allowed
|
// MEMORY map : save it in memory while that is allowed
|
||||||
private Map<Long, Set<Integer>>[] multiPolygonsWays;
|
private Map<Long, Set<Integer>>[] multiPolygonsWays;
|
||||||
private Map<Long, String> multiPolygonsNames = new LinkedHashMap<Long, String>();
|
private Map<Long, String> multiPolygonsNames = new LinkedHashMap<Long, String>();
|
||||||
|
@ -68,8 +68,6 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
TIntArrayList typeUse = new TIntArrayList(8);
|
TIntArrayList typeUse = new TIntArrayList(8);
|
||||||
List<Long> restrictionsUse = new ArrayList<Long>(8);
|
List<Long> restrictionsUse = new ArrayList<Long>(8);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private PreparedStatement mapBinaryStat;
|
private PreparedStatement mapBinaryStat;
|
||||||
private PreparedStatement mapLowLevelBinaryStat;
|
private PreparedStatement mapLowLevelBinaryStat;
|
||||||
private int lowLevelWays = -1;
|
private int lowLevelWays = -1;
|
||||||
|
@ -79,7 +77,6 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
private int zoomWaySmothness = 0;
|
private int zoomWaySmothness = 0;
|
||||||
private final Log logMapDataWarn;
|
private final Log logMapDataWarn;
|
||||||
|
|
||||||
|
|
||||||
public IndexVectorMapCreator(Log logMapDataWarn, MapZooms mapZooms, MapRenderingTypes renderingTypes, int zoomWaySmothness) {
|
public IndexVectorMapCreator(Log logMapDataWarn, MapZooms mapZooms, MapRenderingTypes renderingTypes, int zoomWaySmothness) {
|
||||||
this.logMapDataWarn = logMapDataWarn;
|
this.logMapDataWarn = logMapDataWarn;
|
||||||
this.mapZooms = mapZooms;
|
this.mapZooms = mapZooms;
|
||||||
|
@ -116,7 +113,7 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(!outerFound){
|
if (!outerFound) {
|
||||||
logMapDataWarn.warn("Probably map bug: Multipoligon id=" + e.getId() + " contains only inner ways : "); //$NON-NLS-1$ //$NON-NLS-2$
|
logMapDataWarn.warn("Probably map bug: Multipoligon id=" + e.getId() + " contains only inner ways : "); //$NON-NLS-1$ //$NON-NLS-2$
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -147,15 +144,18 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
for (Way way : l) {
|
for (Way way : l) {
|
||||||
boolean inner = "inner".equals(entities.get(way)); //$NON-NLS-1$
|
boolean inner = "inner".equals(entities.get(way)); //$NON-NLS-1$
|
||||||
if (innerType != inner) {
|
if (innerType != inner) {
|
||||||
logMapDataWarn.warn("Probably map bug: Multipoligon contains outer and inner ways.\n" + //$NON-NLS-1$
|
logMapDataWarn
|
||||||
"Way:" + way.getId() + " is strange part of completed ring. InnerType:" + innerType + " way inner: " + inner + " way inner string:" + entities.get(way)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
|
.warn("Probably map bug: Multipoligon contains outer and inner ways.\n" + //$NON-NLS-1$
|
||||||
|
"Way:"
|
||||||
|
+ way.getId()
|
||||||
|
+ " is strange part of completed ring. InnerType:" + innerType + " way inner: " + inner + " way inner string:" + entities.get(way)); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Node nodeOut = checkOuterWaysEncloseInnerWays(completedRings, entities);
|
Node nodeOut = checkOuterWaysEncloseInnerWays(completedRings, entities);
|
||||||
if(nodeOut != null){
|
if (nodeOut != null) {
|
||||||
logMapDataWarn.warn("Map bug: Multipoligon contains 'inner' way point outside of 'outer' border.\n" + //$NON-NLS-1$
|
logMapDataWarn.warn("Map bug: Multipoligon contains 'inner' way point outside of 'outer' border.\n" + //$NON-NLS-1$
|
||||||
"Multipolygon id : " + e.getId() + ", inner node out id : " + nodeOut.getId()); //$NON-NLS-1$
|
"Multipolygon id : " + e.getId() + ", inner node out id : " + nodeOut.getId()); //$NON-NLS-1$
|
||||||
}
|
}
|
||||||
|
@ -185,15 +185,13 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private Node checkOuterWaysEncloseInnerWays(List<List<Way>> completedRings, Map<Entity, String> entities) {
|
private Node checkOuterWaysEncloseInnerWays(List<List<Way>> completedRings, Map<Entity, String> entities) {
|
||||||
List<List<Way>> innerWays = new ArrayList<List<Way>>();
|
List<List<Way>> innerWays = new ArrayList<List<Way>>();
|
||||||
Boundary outerBoundary = new Boundary(true);
|
Boundary outerBoundary = new Boundary(true);
|
||||||
Node toReturn = null;
|
Node toReturn = null;
|
||||||
for(List<Way> ring : completedRings){
|
for (List<Way> ring : completedRings) {
|
||||||
boolean innerType = "inner".equals(entities.get(ring.get(0))); //$NON-NLS-1$
|
boolean innerType = "inner".equals(entities.get(ring.get(0))); //$NON-NLS-1$
|
||||||
if(!innerType){
|
if (!innerType) {
|
||||||
outerBoundary.getOuterWays().addAll(ring);
|
outerBoundary.getOuterWays().addAll(ring);
|
||||||
} else {
|
} else {
|
||||||
innerWays.add(ring);
|
innerWays.add(ring);
|
||||||
|
@ -216,7 +214,6 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
return toReturn;
|
return toReturn;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void putMultipolygonType(Map<Long, Set<Integer>> multiPolygonsWays, long baseId, int mtType, boolean inverse) {
|
private void putMultipolygonType(Map<Long, Set<Integer>> multiPolygonsWays, long baseId, int mtType, boolean inverse) {
|
||||||
if (mtType == 0) {
|
if (mtType == 0) {
|
||||||
return;
|
return;
|
||||||
|
@ -275,8 +272,244 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private long encodeTypesToOneLong(int mainType) {
|
||||||
|
long i = 0;
|
||||||
|
int ind = 0;
|
||||||
|
int sh = 0;
|
||||||
|
if (typeUse.size() > 3) {
|
||||||
|
logMapDataWarn.error("Types for low index way more than 4"); //$NON-NLS-1$
|
||||||
|
}
|
||||||
|
i |= (mainType << sh);
|
||||||
|
if (typeUse.size() > ind) {
|
||||||
|
sh += 16;
|
||||||
|
i |= ((long) typeUse.get(ind++) << sh);
|
||||||
|
if (typeUse.size() > ind) {
|
||||||
|
sh += 16;
|
||||||
|
i |= ((long) typeUse.get(ind++) << sh);
|
||||||
|
if (typeUse.size() > ind) {
|
||||||
|
sh += 16;
|
||||||
|
i |= ((long) typeUse.get(ind++) << sh);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
|
||||||
private void writeBinaryEntityToMapDatabase(Entity e, long baseId, boolean inverse, int level) throws SQLException {
|
protected int decodeTypesFromOneLong(long i) {
|
||||||
|
typeUse.clear();
|
||||||
|
int mask = (1 << 16) - 1;
|
||||||
|
int k = (int) (i & mask);
|
||||||
|
int r = 0;
|
||||||
|
if (k > 0) {
|
||||||
|
r = k;
|
||||||
|
i >>= 16;
|
||||||
|
k = (int) (i & mask);
|
||||||
|
if (k > 0) {
|
||||||
|
typeUse.add(k);
|
||||||
|
i >>= 16;
|
||||||
|
k = (int) (i & mask);
|
||||||
|
if (k > 0) {
|
||||||
|
typeUse.add(k);
|
||||||
|
i >>= 16;
|
||||||
|
k = (int) (i & mask);
|
||||||
|
if (k > 0) {
|
||||||
|
typeUse.add(k);
|
||||||
|
i >>= 16;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Way simplifyWay(Way originalE, long id, boolean hasMulti, int zoom, String name, int type, int level) throws SQLException {
|
||||||
|
List<Node> nodes = originalE.getNodes();
|
||||||
|
Way way = new Way(id);
|
||||||
|
for (String t : originalE.getTagKeySet()) {
|
||||||
|
way.putTag(t, originalE.getTag(t));
|
||||||
|
}
|
||||||
|
boolean cycle = originalE.getNodeIds().get(0).longValue() == originalE.getNodeIds().get(nodes.size() - 1).longValue();
|
||||||
|
long longType = encodeTypesToOneLong(type);
|
||||||
|
|
||||||
|
if (cycle) {
|
||||||
|
if (checkForSmallAreas(nodes, zoom + Math.min(zoomWaySmothness / 2, 3), 1, 4)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
MapAlgorithms.simplifyDouglasPeucker(nodes, zoom + 8 + zoomWaySmothness, 3, way);
|
||||||
|
if (way.getNodes().size() < 2) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (cycle) {
|
||||||
|
// nothing to do
|
||||||
|
return way;
|
||||||
|
} else {
|
||||||
|
lowLevelWays++;
|
||||||
|
insertLowLevelMapBinaryObject(level, longType, id, way.getNodes(), name);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getLowLevelWays() {
|
||||||
|
return lowLevelWays;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void loadNodes(byte[] nodes, List<Float> toPut) {
|
||||||
|
toPut.clear();
|
||||||
|
for (int i = 0; i < nodes.length;) {
|
||||||
|
int lat = Algoritms.parseIntFromBytes(nodes, i);
|
||||||
|
i += 4;
|
||||||
|
int lon = Algoritms.parseIntFromBytes(nodes, i);
|
||||||
|
i += 4;
|
||||||
|
toPut.add(Float.intBitsToFloat(lat));
|
||||||
|
toPut.add(Float.intBitsToFloat(lon));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void processingLowLevelWays(IProgress progress) throws SQLException {
|
||||||
|
restrictionsUse.clear();
|
||||||
|
mapLowLevelBinaryStat.executeBatch();
|
||||||
|
mapLowLevelBinaryStat.close();
|
||||||
|
pStatements.remove(mapLowLevelBinaryStat);
|
||||||
|
mapLowLevelBinaryStat = null;
|
||||||
|
mapConnection.commit();
|
||||||
|
|
||||||
|
PreparedStatement startStat = mapConnection.prepareStatement("SELECT id, start_node, end_node, nodes FROM low_level_map_objects"
|
||||||
|
+ " WHERE start_node = ? AND type=? AND level = ? AND name=?");
|
||||||
|
PreparedStatement endStat = mapConnection.prepareStatement("SELECT id, start_node, end_node, nodes FROM low_level_map_objects"
|
||||||
|
+ " WHERE end_node = ? AND type=? AND level = ? AND name=?");
|
||||||
|
Statement selectStatement = mapConnection.createStatement();
|
||||||
|
ResultSet rs = selectStatement.executeQuery("SELECT id, start_node, end_node, name, nodes, type, level FROM low_level_map_objects");
|
||||||
|
Set<Long> visitedWays = new LinkedHashSet<Long>();
|
||||||
|
ArrayList<Float> list = new ArrayList<Float>(100);
|
||||||
|
while (rs.next()) {
|
||||||
|
if (lowLevelWays != -1) {
|
||||||
|
progress.progress(1);
|
||||||
|
}
|
||||||
|
long id = rs.getLong(1);
|
||||||
|
if (visitedWays.contains(id)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
visitedWays.add(id);
|
||||||
|
int level = rs.getInt(7);
|
||||||
|
int zoom = mapZooms.getLevel(level).getMaxZoom();
|
||||||
|
|
||||||
|
long startNode = rs.getLong(2);
|
||||||
|
long endNode = rs.getLong(3);
|
||||||
|
|
||||||
|
String name = rs.getString(4);
|
||||||
|
long ltype = rs.getLong(6);
|
||||||
|
loadNodes(rs.getBytes(5), list);
|
||||||
|
ArrayList<Float> wayNodes = new ArrayList<Float>(list);
|
||||||
|
|
||||||
|
// combine startPoint with EndPoint
|
||||||
|
boolean combined = true;
|
||||||
|
while (combined) {
|
||||||
|
combined = false;
|
||||||
|
endStat.setLong(1, startNode);
|
||||||
|
endStat.setLong(2, ltype);
|
||||||
|
endStat.setShort(3, (short) level);
|
||||||
|
endStat.setString(4, name);
|
||||||
|
ResultSet fs = endStat.executeQuery();
|
||||||
|
while (fs.next()) {
|
||||||
|
if (!visitedWays.contains(fs.getLong(1))) {
|
||||||
|
combined = true;
|
||||||
|
long lid = fs.getLong(1);
|
||||||
|
startNode = fs.getLong(2);
|
||||||
|
visitedWays.add(lid);
|
||||||
|
loadNodes(fs.getBytes(4), list);
|
||||||
|
ArrayList<Float> li = new ArrayList<Float>(list);
|
||||||
|
// remove first lat/lon point
|
||||||
|
wayNodes.remove(0);
|
||||||
|
wayNodes.remove(0);
|
||||||
|
li.addAll(wayNodes);
|
||||||
|
wayNodes = li;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fs.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// combined end point
|
||||||
|
combined = true;
|
||||||
|
while (combined) {
|
||||||
|
combined = false;
|
||||||
|
startStat.setLong(1, endNode);
|
||||||
|
startStat.setLong(2, ltype);
|
||||||
|
startStat.setShort(3, (short) level);
|
||||||
|
startStat.setString(4, name);
|
||||||
|
ResultSet fs = startStat.executeQuery();
|
||||||
|
while (fs.next()) {
|
||||||
|
if (!visitedWays.contains(fs.getLong(1))) {
|
||||||
|
combined = true;
|
||||||
|
long lid = fs.getLong(1);
|
||||||
|
endNode = fs.getLong(3);
|
||||||
|
visitedWays.add(lid);
|
||||||
|
loadNodes(fs.getBytes(4), list);
|
||||||
|
for (int i = 2; i < list.size(); i++) {
|
||||||
|
wayNodes.add(list.get(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fs.close();
|
||||||
|
}
|
||||||
|
List<Node> wNodes = new ArrayList<Node>();
|
||||||
|
int wNsize = wayNodes.size();
|
||||||
|
for (int i = 0; i < wNsize; i += 2) {
|
||||||
|
wNodes.add(new Node(wayNodes.get(i), wayNodes.get(i + 1), i == 0 ? startNode : endNode));
|
||||||
|
}
|
||||||
|
boolean skip = false;
|
||||||
|
boolean cycle = startNode == endNode;
|
||||||
|
boolean hasMulti = multiPolygonsWays[level].containsKey(id >> 3);
|
||||||
|
if (cycle || !hasMulti) {
|
||||||
|
skip = checkForSmallAreas(wNodes, zoom - 1 + Math.min(zoomWaySmothness / 2, 3), 1, 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!skip) {
|
||||||
|
Way newWs = new Way(id);
|
||||||
|
MapAlgorithms.simplifyDouglasPeucker(wNodes, zoom - 1 + 8 + zoomWaySmothness, 3, newWs);
|
||||||
|
|
||||||
|
int type = decodeTypesFromOneLong(ltype);
|
||||||
|
insertBinaryMapRenderObjectIndex(mapTree[level], newWs, name, id, type, typeUse, 0, restrictionsUse, false, false, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean checkForSmallAreas(List<Node> nodes, int zoom, int minz, int maxz) {
|
||||||
|
int minX = Integer.MAX_VALUE;
|
||||||
|
int maxX = Integer.MIN_VALUE;
|
||||||
|
int minY = Integer.MAX_VALUE;
|
||||||
|
int maxY = Integer.MIN_VALUE;
|
||||||
|
int c = 0;
|
||||||
|
int nsize = nodes.size();
|
||||||
|
for (int i = 0; i < nsize; i++) {
|
||||||
|
if (nodes.get(i) != null) {
|
||||||
|
c++;
|
||||||
|
int x = (int) (MapUtils.getTileNumberX(zoom, nodes.get(i).getLongitude()) * 256d);
|
||||||
|
int y = (int) (MapUtils.getTileNumberY(zoom, nodes.get(i).getLatitude()) * 256d);
|
||||||
|
minX = Math.min(minX, x);
|
||||||
|
maxX = Math.max(maxX, x);
|
||||||
|
minY = Math.min(minY, y);
|
||||||
|
maxY = Math.max(maxY, y);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (c < 2) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return ((maxX - minX) <= minz && (maxY - minY) <= maxz) || ((maxX - minX) <= maxz && (maxY - minY) <= minz);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void iterateMainEntity(Entity e, OsmDbAccessorContext ctx) throws SQLException {
|
||||||
|
if (e instanceof Way || e instanceof Node) {
|
||||||
|
// manipulate what kind of way to load
|
||||||
|
ctx.loadEntityData(e);
|
||||||
|
for (int i = 0; i < mapZooms.size(); i++) {
|
||||||
|
boolean inverse = i == 0 ? oneway : false;
|
||||||
int type = renderingTypes.encodeEntityWithType(e, mapZooms.getLevel(level).getMaxZoom(), false, typeUse);
|
int type = renderingTypes.encodeEntityWithType(e, mapZooms.getLevel(level).getMaxZoom(), false, typeUse);
|
||||||
Map<Long, Set<Integer>> multiPolygonsWays = this.multiPolygonsWays[level];
|
Map<Long, Set<Integer>> multiPolygonsWays = this.multiPolygonsWays[level];
|
||||||
boolean hasMulti = e instanceof Way && multiPolygonsWays.containsKey(e.getId());
|
boolean hasMulti = e instanceof Way && multiPolygonsWays.containsKey(e.getId());
|
||||||
|
@ -346,254 +579,6 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
insertBinaryMapRenderObjectIndex(rtree, e, eName, id, type, typeUse, highwayAttributes, restrictionsUse, inverse, point, true);
|
insertBinaryMapRenderObjectIndex(rtree, e, eName, id, type, typeUse, highwayAttributes, restrictionsUse, inverse, point, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private long encodeTypesToOneLong(int mainType) {
|
|
||||||
long i = 0;
|
|
||||||
int ind = 0;
|
|
||||||
int sh = 0;
|
|
||||||
if(typeUse.size() > 3){
|
|
||||||
logMapDataWarn.error("Types for low index way more than 4"); //$NON-NLS-1$
|
|
||||||
}
|
|
||||||
i |= (mainType << sh);
|
|
||||||
if (typeUse.size() > ind) {
|
|
||||||
sh += 16;
|
|
||||||
i |= ((long)typeUse.get(ind++) << sh );
|
|
||||||
if (typeUse.size() > ind) {
|
|
||||||
sh += 16;
|
|
||||||
i |= ((long)typeUse.get(ind++) << sh );
|
|
||||||
if (typeUse.size() > ind) {
|
|
||||||
sh += 16;
|
|
||||||
i |= ((long)typeUse.get(ind++) << sh);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected int decodeTypesFromOneLong(long i) {
|
|
||||||
typeUse.clear();
|
|
||||||
int mask = (1 << 16) - 1;
|
|
||||||
int k = (int) (i & mask);
|
|
||||||
int r = 0;
|
|
||||||
if (k > 0) {
|
|
||||||
r = k;
|
|
||||||
i >>= 16;
|
|
||||||
k = (int) (i & mask);
|
|
||||||
if (k > 0) {
|
|
||||||
typeUse.add(k);
|
|
||||||
i >>= 16;
|
|
||||||
k = (int) (i & mask);
|
|
||||||
if (k > 0) {
|
|
||||||
typeUse.add(k);
|
|
||||||
i >>= 16;
|
|
||||||
k = (int) (i & mask);
|
|
||||||
if (k > 0) {
|
|
||||||
typeUse.add(k);
|
|
||||||
i >>= 16;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return r;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
protected Way simplifyWay(Way originalE, long id, boolean hasMulti, int zoom, String name, int type, int level) throws SQLException {
|
|
||||||
List<Node> nodes = originalE.getNodes();
|
|
||||||
Way way = new Way(id);
|
|
||||||
for (String t : originalE.getTagKeySet()) {
|
|
||||||
way.putTag(t, originalE.getTag(t));
|
|
||||||
}
|
|
||||||
boolean cycle = originalE.getNodeIds().get(0).longValue() == originalE.getNodeIds().get(nodes.size() - 1).longValue();
|
|
||||||
long longType = encodeTypesToOneLong(type);
|
|
||||||
|
|
||||||
if (cycle) {
|
|
||||||
if(checkForSmallAreas(nodes, zoom + Math.min(zoomWaySmothness / 2, 3), 1, 4)){
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
MapAlgorithms.simplifyDouglasPeucker(nodes, zoom + 8 + zoomWaySmothness, 3, way);
|
|
||||||
if (way.getNodes().size() < 2) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (cycle) {
|
|
||||||
// nothing to do
|
|
||||||
return way;
|
|
||||||
} else {
|
|
||||||
lowLevelWays ++;
|
|
||||||
insertLowLevelMapBinaryObject(level, longType, id, way.getNodes(), name);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getLowLevelWays() {
|
|
||||||
return lowLevelWays;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void loadNodes(byte[] nodes, List<Float> toPut){
|
|
||||||
toPut.clear();
|
|
||||||
for (int i = 0; i < nodes.length;) {
|
|
||||||
int lat = Algoritms.parseIntFromBytes(nodes, i);
|
|
||||||
i += 4;
|
|
||||||
int lon = Algoritms.parseIntFromBytes(nodes, i);
|
|
||||||
i += 4;
|
|
||||||
toPut.add(Float.intBitsToFloat(lat));
|
|
||||||
toPut.add(Float.intBitsToFloat(lon));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void processingLowLevelWays(IProgress progress) throws SQLException {
|
|
||||||
restrictionsUse.clear();
|
|
||||||
mapLowLevelBinaryStat.executeBatch();
|
|
||||||
mapLowLevelBinaryStat.close();
|
|
||||||
pStatements.remove(mapLowLevelBinaryStat);
|
|
||||||
mapLowLevelBinaryStat = null;
|
|
||||||
mapConnection.commit();
|
|
||||||
|
|
||||||
PreparedStatement startStat = mapConnection.prepareStatement("SELECT id, start_node, end_node, nodes FROM low_level_map_objects"
|
|
||||||
+ " WHERE start_node = ? AND type=? AND level = ? AND name=?");
|
|
||||||
PreparedStatement endStat = mapConnection.prepareStatement("SELECT id, start_node, end_node, nodes FROM low_level_map_objects"
|
|
||||||
+ " WHERE end_node = ? AND type=? AND level = ? AND name=?");
|
|
||||||
Statement selectStatement = mapConnection.createStatement();
|
|
||||||
ResultSet rs = selectStatement.executeQuery("SELECT id, start_node, end_node, name, nodes, type, level FROM low_level_map_objects");
|
|
||||||
Set<Long> visitedWays = new LinkedHashSet<Long>();
|
|
||||||
ArrayList<Float> list = new ArrayList<Float>(100);
|
|
||||||
while(rs.next()){
|
|
||||||
if(lowLevelWays != -1){
|
|
||||||
progress.progress(1);
|
|
||||||
}
|
|
||||||
long id = rs.getLong(1);
|
|
||||||
if(visitedWays.contains(id)){
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
visitedWays.add(id);
|
|
||||||
int level = rs.getInt(7);
|
|
||||||
int zoom = mapZooms.getLevel(level).getMaxZoom();
|
|
||||||
|
|
||||||
long startNode = rs.getLong(2);
|
|
||||||
long endNode = rs.getLong(3);
|
|
||||||
|
|
||||||
String name = rs.getString(4);
|
|
||||||
long ltype = rs.getLong(6);
|
|
||||||
loadNodes(rs.getBytes(5), list);
|
|
||||||
ArrayList<Float> wayNodes = new ArrayList<Float>(list);
|
|
||||||
|
|
||||||
|
|
||||||
// combine startPoint with EndPoint
|
|
||||||
boolean combined = true;
|
|
||||||
while (combined) {
|
|
||||||
combined = false;
|
|
||||||
endStat.setLong(1, startNode);
|
|
||||||
endStat.setLong(2, ltype);
|
|
||||||
endStat.setShort(3, (short) level);
|
|
||||||
endStat.setString(4, name);
|
|
||||||
ResultSet fs = endStat.executeQuery();
|
|
||||||
while (fs.next()) {
|
|
||||||
if (!visitedWays.contains(fs.getLong(1))) {
|
|
||||||
combined = true;
|
|
||||||
long lid = fs.getLong(1);
|
|
||||||
startNode = fs.getLong(2);
|
|
||||||
visitedWays.add(lid);
|
|
||||||
loadNodes(fs.getBytes(4), list);
|
|
||||||
ArrayList<Float> li = new ArrayList<Float>(list);
|
|
||||||
// remove first lat/lon point
|
|
||||||
wayNodes.remove(0);
|
|
||||||
wayNodes.remove(0);
|
|
||||||
li.addAll(wayNodes);
|
|
||||||
wayNodes = li;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fs.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
// combined end point
|
|
||||||
combined = true;
|
|
||||||
while (combined) {
|
|
||||||
combined = false;
|
|
||||||
startStat.setLong(1, endNode);
|
|
||||||
startStat.setLong(2, ltype);
|
|
||||||
startStat.setShort(3, (short) level);
|
|
||||||
startStat.setString(4, name);
|
|
||||||
ResultSet fs = startStat.executeQuery();
|
|
||||||
while (fs.next()) {
|
|
||||||
if (!visitedWays.contains(fs.getLong(1))) {
|
|
||||||
combined = true;
|
|
||||||
long lid = fs.getLong(1);
|
|
||||||
endNode = fs.getLong(3);
|
|
||||||
visitedWays.add(lid);
|
|
||||||
loadNodes(fs.getBytes(4), list);
|
|
||||||
for (int i = 2; i < list.size(); i++) {
|
|
||||||
wayNodes.add(list.get(i));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fs.close();
|
|
||||||
}
|
|
||||||
List<Node> wNodes = new ArrayList<Node>();
|
|
||||||
int wNsize = wayNodes.size();
|
|
||||||
for (int i = 0; i < wNsize; i += 2) {
|
|
||||||
wNodes.add(new Node(wayNodes.get(i), wayNodes.get(i + 1), i == 0 ? startNode : endNode));
|
|
||||||
}
|
|
||||||
boolean skip = false;
|
|
||||||
boolean cycle = startNode == endNode;
|
|
||||||
boolean hasMulti = multiPolygonsWays[level].containsKey(id >> 3);
|
|
||||||
if(cycle || !hasMulti){
|
|
||||||
skip = checkForSmallAreas(wNodes, zoom - 1 + Math.min(zoomWaySmothness / 2, 3), 1, 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!skip) {
|
|
||||||
Way newWs = new Way(id);
|
|
||||||
MapAlgorithms.simplifyDouglasPeucker(wNodes, zoom - 1 + 8 + zoomWaySmothness, 3, newWs);
|
|
||||||
|
|
||||||
int type = decodeTypesFromOneLong(ltype);
|
|
||||||
insertBinaryMapRenderObjectIndex(mapTree[level], newWs, name,
|
|
||||||
id, type, typeUse, 0, restrictionsUse, false, false, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean checkForSmallAreas(List<Node> nodes, int zoom, int minz, int maxz) {
|
|
||||||
int minX = Integer.MAX_VALUE;
|
|
||||||
int maxX = Integer.MIN_VALUE;
|
|
||||||
int minY = Integer.MAX_VALUE;
|
|
||||||
int maxY = Integer.MIN_VALUE;
|
|
||||||
int c = 0;
|
|
||||||
int nsize = nodes.size();
|
|
||||||
for (int i = 0; i < nsize; i++) {
|
|
||||||
if (nodes.get(i) != null) {
|
|
||||||
c++;
|
|
||||||
int x = (int) (MapUtils.getTileNumberX(zoom, nodes.get(i).getLongitude()) * 256d);
|
|
||||||
int y = (int) (MapUtils.getTileNumberY(zoom, nodes.get(i).getLatitude()) * 256d);
|
|
||||||
minX = Math.min(minX, x);
|
|
||||||
maxX = Math.max(maxX, x);
|
|
||||||
minY = Math.min(minY, y);
|
|
||||||
maxY = Math.max(maxY, y);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (c < 2) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return ((maxX - minX) <= minz && (maxY - minY) <= maxz) ||
|
|
||||||
((maxX - minX) <= maxz && (maxY - minY) <= minz);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void iterateMainEntity(Entity e, OsmDbAccessorContext ctx) throws SQLException {
|
|
||||||
if (e instanceof Way || e instanceof Node) {
|
|
||||||
// manipulate what kind of way to load
|
|
||||||
ctx.loadEntityData(e);
|
|
||||||
boolean oneway = "-1".equals(e.getTag(OSMTagKey.ONEWAY)); //$NON-NLS-1$
|
|
||||||
for (int i = 0; i < mapZooms.size(); i++) {
|
|
||||||
boolean inverse = i == 0 ? oneway : false;
|
|
||||||
writeBinaryEntityToMapDatabase(e, e.getId(), inverse, i);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -614,10 +599,11 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
Rect rootBounds = calcBounds(root);
|
Rect rootBounds = calcBounds(root);
|
||||||
if (rootBounds != null) {
|
if (rootBounds != null) {
|
||||||
boolean last = nodeIsLastSubTree(rtree, rootIndex);
|
boolean last = nodeIsLastSubTree(rtree, rootIndex);
|
||||||
writer.startWriteMapLevelIndex(mapZooms.getLevel(i).getMinZoom(), mapZooms.getLevel(i).getMaxZoom(), rootBounds
|
writer.startWriteMapLevelIndex(mapZooms.getLevel(i).getMinZoom(), mapZooms.getLevel(i).getMaxZoom(),
|
||||||
.getMinX(), rootBounds.getMaxX(), rootBounds.getMinY(), rootBounds.getMaxY());
|
rootBounds.getMinX(), rootBounds.getMaxX(), rootBounds.getMinY(), rootBounds.getMaxY());
|
||||||
if (last) {
|
if (last) {
|
||||||
BinaryFileReference ref = writer.startMapTreeElement(rootBounds.getMinX(), rootBounds.getMaxX(), rootBounds.getMinY(), rootBounds.getMaxY(), true);
|
BinaryFileReference ref = writer.startMapTreeElement(rootBounds.getMinX(), rootBounds.getMaxX(),
|
||||||
|
rootBounds.getMinY(), rootBounds.getMaxY(), true);
|
||||||
bounds.put(rootIndex, ref);
|
bounds.put(rootIndex, ref);
|
||||||
}
|
}
|
||||||
writeBinaryMapTree(root, rtree, writer, bounds);
|
writeBinaryMapTree(root, rtree, writer, bounds);
|
||||||
|
@ -629,18 +615,20 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// write map data blocks
|
// write map data blocks
|
||||||
PreparedStatement selectData = mapConnection.prepareStatement("SELECT nodes, types, name FROM binary_map_objects WHERE id = ?"); //$NON-NLS-1$
|
|
||||||
|
PreparedStatement selectData = mapConnection
|
||||||
|
.prepareStatement("SELECT area, coordinates, innerPolygons, types, additionalTypes, name FROM binary_map_objects WHERE id = ?");
|
||||||
for (int i = 0; i < mapZooms.size(); i++) {
|
for (int i = 0; i < mapZooms.size(); i++) {
|
||||||
RTree rtree = mapTree[i];
|
RTree rtree = mapTree[i];
|
||||||
long rootIndex = rtree.getFileHdr().getRootIndex();
|
long rootIndex = rtree.getFileHdr().getRootIndex();
|
||||||
rtree.Node root = rtree.getReadNode(rootIndex);
|
rtree.Node root = rtree.getReadNode(rootIndex);
|
||||||
Rect rootBounds = calcBounds(root);
|
Rect rootBounds = calcBounds(root);
|
||||||
if (rootBounds != null) {
|
if (rootBounds != null) {
|
||||||
writeBinaryMapBlock(root, rtree, writer, selectData, bounds);
|
writeBinaryMapBlock(root, rtree, writer, selectData, bounds, new LinkedHashMap<String, Integer>(),
|
||||||
|
new LinkedHashMap<MapRenderingTypes.MapRulType, String>());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
selectData.close();
|
selectData.close();
|
||||||
|
|
||||||
writer.endWriteMapIndex();
|
writer.endWriteMapIndex();
|
||||||
|
@ -651,55 +639,72 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
}
|
}
|
||||||
|
|
||||||
private long convertBaseIdToGeneratedId(long baseId, int level) {
|
private long convertBaseIdToGeneratedId(long baseId, int level) {
|
||||||
if(level >= MAP_LEVELS_MAX){
|
if (level >= MAP_LEVELS_MAX) {
|
||||||
throw new IllegalArgumentException("Number of zoom levels " + level + " exceeds allowed maximum : " + MAP_LEVELS_MAX);
|
throw new IllegalArgumentException("Number of zoom levels " + level + " exceeds allowed maximum : " + MAP_LEVELS_MAX);
|
||||||
}
|
}
|
||||||
return ((baseId << MAP_LEVELS_POWER) | level) << 1;
|
return ((baseId << MAP_LEVELS_POWER) | level) << 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long convertGeneratedIdToObfWrite(long id){
|
public long convertGeneratedIdToObfWrite(long id) {
|
||||||
return (id >> (MAP_LEVELS_POWER)) + (id & 1);
|
return (id >> (MAP_LEVELS_POWER)) + (id & 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void writeBinaryMapBlock(rtree.Node parent, RTree r, BinaryMapIndexWriter writer, PreparedStatement selectData, TLongObjectHashMap<BinaryFileReference> bounds) throws IOException, RTreeException, SQLException {
|
private static final char SPECIAL_CHAR = ((char) 0x60000);
|
||||||
|
|
||||||
|
private void decodeNames(String name, Map<MapRulType, String> tempNames) {
|
||||||
|
int i = name.indexOf(SPECIAL_CHAR);
|
||||||
|
while (i != -1) {
|
||||||
|
int n = name.indexOf(SPECIAL_CHAR, i + 2);
|
||||||
|
char ch = name.charAt(i + 1);
|
||||||
|
MapRulType rt = renderingTypes.getTypeByInternalId(ch);
|
||||||
|
if (n == -1) {
|
||||||
|
tempNames.put(rt, name.substring(i + 2));
|
||||||
|
} else {
|
||||||
|
tempNames.put(rt, name.substring(i + 2, n));
|
||||||
|
}
|
||||||
|
i = n;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void writeBinaryMapBlock(rtree.Node parent, RTree r, BinaryMapIndexWriter writer, PreparedStatement selectData,
|
||||||
|
TLongObjectHashMap<BinaryFileReference> bounds, Map<String, Integer> tempStringTable, Map<MapRulType, String> tempNames)
|
||||||
|
throws IOException, RTreeException, SQLException {
|
||||||
Element[] e = parent.getAllElements();
|
Element[] e = parent.getAllElements();
|
||||||
|
|
||||||
Map<String, Integer> stringTable = null;
|
|
||||||
MapDataBlock.Builder dataBlock = null;
|
MapDataBlock.Builder dataBlock = null;
|
||||||
BinaryFileReference ref = bounds.get(parent.getNodeIndex());
|
BinaryFileReference ref = bounds.get(parent.getNodeIndex());
|
||||||
long baseId = 0;
|
long baseId = 0;
|
||||||
for (int i = 0; i < parent.getTotalElements(); i++) {
|
for (int i = 0; i < parent.getTotalElements(); i++) {
|
||||||
Rect re = e[i].getRect();
|
Rect re = e[i].getRect();
|
||||||
if (e[i].getElementType() == rtree.Node.LEAF_NODE) {
|
if (e[i].getElementType() == rtree.Node.LEAF_NODE) {
|
||||||
|
|
||||||
|
|
||||||
long id = ((LeafElement) e[i]).getPtr();
|
long id = ((LeafElement) e[i]).getPtr();
|
||||||
selectData.setLong(1, id);
|
selectData.setLong(1, id);
|
||||||
ResultSet rs = selectData.executeQuery();
|
ResultSet rs = selectData.executeQuery();
|
||||||
if (rs.next()) {
|
if (rs.next()) {
|
||||||
long cid = convertGeneratedIdToObfWrite(id);
|
long cid = convertGeneratedIdToObfWrite(id);
|
||||||
if(dataBlock == null){
|
if (dataBlock == null) {
|
||||||
baseId = cid;
|
baseId = cid;
|
||||||
dataBlock = writer.createWriteMapDataBlock(baseId);
|
dataBlock = writer.createWriteMapDataBlock(baseId);
|
||||||
|
tempStringTable.clear();
|
||||||
|
|
||||||
}
|
}
|
||||||
renderingTypes.getEncodingRuleTypes()
|
tempNames.clear();
|
||||||
// mapConnection.prepareStatement("SELECT nodes, types, name FROM binary_map_objects WHERE id = ?");
|
decodeNames(rs.getString(6), tempNames);
|
||||||
writer.writeMapData(cid - id, rs.getBytes(1),
|
writer.writeMapData(cid - baseId, re.getMinX(), re.getMinY(), rs.getBoolean(1), rs.getBytes(2), rs.getBytes(3),
|
||||||
rs.getBytes(2), rs.getString(3));
|
rs.getBytes(4), rs.getBytes(5), tempNames, tempStringTable, dataBlock);
|
||||||
} else {
|
} else {
|
||||||
logMapDataWarn.error("Something goes wrong with id = " + id); //$NON-NLS-1$
|
logMapDataWarn.error("Something goes wrong with id = " + id); //$NON-NLS-1$
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(dataBlock != null){
|
if (dataBlock != null) {
|
||||||
writer.writeMapDataBlock(dataBlock, stringTable, ref);
|
writer.writeMapDataBlock(dataBlock, tempStringTable, ref);
|
||||||
}
|
}
|
||||||
for (int i = 0; i < parent.getTotalElements(); i++) {
|
for (int i = 0; i < parent.getTotalElements(); i++) {
|
||||||
Rect re = e[i].getRect();
|
|
||||||
if (e[i].getElementType() != rtree.Node.LEAF_NODE) {
|
if (e[i].getElementType() != rtree.Node.LEAF_NODE) {
|
||||||
long ptr = ((NonLeafElement) e[i]).getPtr();
|
long ptr = ((NonLeafElement) e[i]).getPtr();
|
||||||
rtree.Node ns = r.getReadNode(ptr);
|
rtree.Node ns = r.getReadNode(ptr);
|
||||||
writeBinaryMapBlock(ns, r, writer, selectData, bounds);
|
writeBinaryMapBlock(ns, r, writer, selectData, bounds, tempStringTable, tempNames);
|
||||||
writer.endWriteMapTreeElement();
|
writer.endWriteMapTreeElement();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -746,8 +751,8 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
return r;
|
return r;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void createDatabaseStructure(Connection mapConnection, DBDialect dialect,
|
public void createDatabaseStructure(Connection mapConnection, DBDialect dialect, String rtreeMapIndexNonPackFileName)
|
||||||
String rtreeMapIndexNonPackFileName) throws SQLException, IOException {
|
throws SQLException, IOException {
|
||||||
createMapIndexStructure(mapConnection);
|
createMapIndexStructure(mapConnection);
|
||||||
this.mapConnection = mapConnection;
|
this.mapConnection = mapConnection;
|
||||||
mapBinaryStat = createStatementMapBinaryInsert(mapConnection);
|
mapBinaryStat = createStatementMapBinaryInsert(mapConnection);
|
||||||
|
@ -770,31 +775,31 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
pStatements.put(mapLowLevelBinaryStat, 0);
|
pStatements.put(mapLowLevelBinaryStat, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void createMapIndexStructure(Connection conn) throws SQLException {
|
||||||
|
|
||||||
private void createMapIndexStructure(Connection conn) throws SQLException{
|
|
||||||
Statement stat = conn.createStatement();
|
Statement stat = conn.createStatement();
|
||||||
stat.executeUpdate("create table binary_map_objects (id bigint primary key, name varchar(1024), " +
|
stat.executeUpdate("create table binary_map_objects (id bigint primary key, name varchar(4096), "
|
||||||
"types binary, restrictions binary, nodes binary, highway int)");
|
+ "area smallint, types binary, additionalTypes binary, coordinates binary, innerPolygons binary)");
|
||||||
stat.executeUpdate("create index binary_map_objects_ind on binary_map_objects (id)");
|
stat.executeUpdate("create index binary_map_objects_ind on binary_map_objects (id)");
|
||||||
|
|
||||||
stat.executeUpdate("create table low_level_map_objects (id bigint primary key, start_node bigint, " +
|
stat.executeUpdate("create table low_level_map_objects (id bigint primary key, start_node bigint, "
|
||||||
"end_node bigint, name varchar(1024), nodes binary, type bigint, level smallint)");
|
+ "end_node bigint, name varchar(1024), nodes binary, type bigint, level smallint)");
|
||||||
stat.executeUpdate("create index low_level_map_objects_ind on low_level_map_objects (id)");
|
stat.executeUpdate("create index low_level_map_objects_ind on low_level_map_objects (id)");
|
||||||
stat.executeUpdate("create index low_level_map_objects_ind_st on low_level_map_objects (start_node, type)");
|
stat.executeUpdate("create index low_level_map_objects_ind_st on low_level_map_objects (start_node, type)");
|
||||||
stat.executeUpdate("create index low_level_map_objects_ind_end on low_level_map_objects (end_node, type)");
|
stat.executeUpdate("create index low_level_map_objects_ind_end on low_level_map_objects (end_node, type)");
|
||||||
stat.close();
|
stat.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private PreparedStatement createStatementMapBinaryInsert(Connection conn) throws SQLException{
|
private PreparedStatement createStatementMapBinaryInsert(Connection conn) throws SQLException {
|
||||||
return conn.prepareStatement("insert into binary_map_objects(id, name, types, restrictions, nodes, highway) values(?, ?, ?, ?, ?, ?)");
|
return conn
|
||||||
|
.prepareStatement("insert into binary_map_objects(id, area, coordinates, innerPolygons, types, additionalTypes, name) values(?, ?, ?, ?, ?, ?, ?)");
|
||||||
}
|
}
|
||||||
|
|
||||||
private PreparedStatement createStatementLowLevelMapBinaryInsert(Connection conn) throws SQLException{
|
private PreparedStatement createStatementLowLevelMapBinaryInsert(Connection conn) throws SQLException {
|
||||||
return conn.prepareStatement("insert into low_level_map_objects(id, start_node, end_node, name, nodes, type, level) values(?, ?, ?, ?, ?, ?, ?)");
|
return conn
|
||||||
|
.prepareStatement("insert into low_level_map_objects(id, start_node, end_node, name, nodes, type, level) values(?, ?, ?, ?, ?, ?, ?)");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void insertLowLevelMapBinaryObject(int level,long types, long id, List<Node> nodes, String name) throws SQLException{
|
private void insertLowLevelMapBinaryObject(int level, long types, long id, List<Node> nodes, String name) throws SQLException {
|
||||||
boolean first = true;
|
boolean first = true;
|
||||||
long firstId = -1;
|
long firstId = -1;
|
||||||
long lastId = -1;
|
long lastId = -1;
|
||||||
|
@ -814,7 +819,7 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new IllegalStateException(e);
|
throw new IllegalStateException(e);
|
||||||
}
|
}
|
||||||
if(firstId == -1){
|
if (firstId == -1) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// conn.prepareStatement("insert into binary_map_objects(id, name, types, restrictions, nodes, highway) values(?, ?, ?, ?, ?, ?)");
|
// conn.prepareStatement("insert into binary_map_objects(id, name, types, restrictions, nodes, highway) values(?, ?, ?, ?, ?, ?)");
|
||||||
|
@ -886,7 +891,8 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
throw new IllegalStateException(es);
|
throw new IllegalStateException(es);
|
||||||
}
|
}
|
||||||
if (init) {
|
if (init) {
|
||||||
// conn.prepareStatement("insert into binary_map_objects(id, name, types, restrictions, nodes, highway) values(?, ?, ?, ?, ?, ?)");
|
TODO;
|
||||||
|
// conn.prepareStatement("insert into binary_map_objects(id, area, coordinates, innerPolygons, types, additionalTypes, name) values(?, ?, ?, ?, ?, ?, ?)");
|
||||||
mapBinaryStat.setLong(1, id);
|
mapBinaryStat.setLong(1, id);
|
||||||
mapBinaryStat.setString(2, name);
|
mapBinaryStat.setString(2, name);
|
||||||
mapBinaryStat.setBytes(3, btypes.toByteArray());
|
mapBinaryStat.setBytes(3, btypes.toByteArray());
|
||||||
|
@ -919,7 +925,8 @@ public class IndexVectorMapCreator extends AbstractIndexPartCreator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void commitAndCloseFiles(String rTreeMapIndexNonPackFileName, String rTreeMapIndexPackFileName, boolean deleteDatabaseIndexes) throws IOException, SQLException {
|
public void commitAndCloseFiles(String rTreeMapIndexNonPackFileName, String rTreeMapIndexPackFileName, boolean deleteDatabaseIndexes)
|
||||||
|
throws IOException, SQLException {
|
||||||
|
|
||||||
// delete map rtree files
|
// delete map rtree files
|
||||||
if (mapTree != null) {
|
if (mapTree != null) {
|
||||||
|
|
|
@ -5,6 +5,7 @@ import gnu.trove.list.array.TIntArrayList;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
|
@ -58,6 +59,7 @@ public class MapRenderingTypes {
|
||||||
|
|
||||||
// stored information to convert from osm tags to int type
|
// stored information to convert from osm tags to int type
|
||||||
private Map<String, MapRulType> types = null;
|
private Map<String, MapRulType> types = null;
|
||||||
|
private List<MapRulType> typeList = new ArrayList<MapRenderingTypes.MapRulType>();
|
||||||
|
|
||||||
private Map<AmenityType, Map<String, String>> amenityTypeNameToTagVal = null;
|
private Map<AmenityType, Map<String, String>> amenityTypeNameToTagVal = null;
|
||||||
private Map<String, AmenityType> amenityNameToType = null;
|
private Map<String, AmenityType> amenityNameToType = null;
|
||||||
|
@ -81,67 +83,75 @@ public class MapRenderingTypes {
|
||||||
public Map<String, MapRulType> getEncodingRuleTypes(){
|
public Map<String, MapRulType> getEncodingRuleTypes(){
|
||||||
if (types == null) {
|
if (types == null) {
|
||||||
types = new LinkedHashMap<String, MapRulType>();
|
types = new LinkedHashMap<String, MapRulType>();
|
||||||
nameRuleType = new MapRulType(types.size());
|
typeList.clear();
|
||||||
|
nameRuleType = new MapRulType();
|
||||||
nameRuleType.tag = "name";
|
nameRuleType.tag = "name";
|
||||||
nameRuleType.additional = true;
|
nameRuleType.additional = true;
|
||||||
types.put(constructRuleKey("name", null), nameRuleType);
|
registerRuleType("name", null, nameRuleType);
|
||||||
|
|
||||||
init();
|
init();
|
||||||
}
|
}
|
||||||
return types;
|
return types;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public MapRulType getTypeByInternalId(int id) {
|
||||||
|
return typeList.get(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void registerRuleType(String tag, String val, MapRulType rt){
|
||||||
|
rt.id = types.size();
|
||||||
|
String keyVal = constructRuleKey(tag, val);
|
||||||
|
if(types.containsKey(keyVal)){
|
||||||
|
throw new RuntimeException("Duplicate " + keyVal);
|
||||||
|
}
|
||||||
|
types.put(keyVal, rt);
|
||||||
|
typeList.add(rt);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// if type equals 0 no need to save that point
|
// if type equals 0 no need to save that point
|
||||||
public int encodeEntityWithType(Entity e, int zoom, boolean multipolygon, TIntArrayList types) {
|
public boolean encodeEntityWithType(Entity e, int zoom, boolean multipolygon, TIntArrayList outTypes,
|
||||||
Map<String, MapRulType> rules = getEncodingRuleTypes();
|
TIntArrayList outaddTypes, Map<MapRulType, String> namesToEncode, List<MapRulType> tempList) {
|
||||||
types.clear();
|
Map<String, MapRulType> types = getEncodingRuleTypes();
|
||||||
if ("coastline".equals(e.getTag(OSMTagKey.NATURAL))) { //$NON-NLS-1$
|
outTypes.clear();
|
||||||
multipolygon = false;
|
outaddTypes.clear();
|
||||||
}
|
namesToEncode.clear();
|
||||||
|
tempList.clear();
|
||||||
|
tempList.add(nameRuleType);
|
||||||
|
|
||||||
boolean point = e instanceof Node;
|
boolean area = multipolygon || "yes".equals(e.getTag("area"));
|
||||||
boolean polygon = multipolygon || "yes".equals(e.getTag("area"));
|
|
||||||
if (!point && !polygon) {
|
|
||||||
// determining area or path
|
|
||||||
boolean highway = e.getTag("highway") != null; //$NON-NLS-1$
|
|
||||||
if (!highway) {
|
|
||||||
List<Long> ids = ((Way) e).getNodeIds();
|
|
||||||
if (ids.size() > 1) {
|
|
||||||
polygon = ((long) ids.get(0) == (long) ids.get(ids.size() - 1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Collection<String> tagKeySet = e.getTagKeySet();
|
Collection<String> tagKeySet = e.getTagKeySet();
|
||||||
int type = -1;
|
|
||||||
for (String tag : tagKeySet) {
|
for (String tag : tagKeySet) {
|
||||||
String val = e.getTag(tag);
|
String val = e.getTag(tag);
|
||||||
MapRulType rType = rules.get(constructRuleKey(tag, val));
|
MapRulType rType = types.get(constructRuleKey(tag, val));
|
||||||
if (rType == null) {
|
if (rType == null) {
|
||||||
rType = rules.get(constructRuleKey(tag, null));
|
rType = types.get(constructRuleKey(tag, null));
|
||||||
}
|
}
|
||||||
if (rType != null) {
|
if (rType != null) {
|
||||||
if (rType.minzoom > zoom) {
|
if (rType.minzoom > zoom) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
boolean accept;
|
|
||||||
if (point) {
|
|
||||||
accept = rType.point;
|
|
||||||
} else if (polygon) {
|
|
||||||
accept = rType.point || rType.polygon;
|
|
||||||
} else {
|
|
||||||
accept = rType.polyline;
|
|
||||||
}
|
|
||||||
if (accept) {
|
|
||||||
rType.freq++;
|
rType.freq++;
|
||||||
types.add(rType.id);
|
if (rType.names != null) {
|
||||||
type = point ? POINT_TYPE : (polygon ? POLYGON_TYPE : POLYLINE_TYPE);
|
for (int i = 0; i < rType.names.length; i++) {
|
||||||
|
tempList.add(rType.names[i]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (rType.additional) {
|
||||||
|
outaddTypes.add(rType.id);
|
||||||
|
} else {
|
||||||
|
outTypes.add(rType.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return type;
|
}
|
||||||
|
for(MapRulType mt : tempList){
|
||||||
|
String val = e.getTag(mt.tag);
|
||||||
|
if(val != null && val.length() > 0){
|
||||||
|
namesToEncode.put(mt, val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return area;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -237,15 +247,15 @@ public class MapRenderingTypes {
|
||||||
poiParentPrefix = attributes.getValue("poi:prefix");
|
poiParentPrefix = attributes.getValue("poi:prefix");
|
||||||
String tag = attributes.getValue("poi:tag");
|
String tag = attributes.getValue("poi:tag");
|
||||||
if (tag != null) {
|
if (tag != null) {
|
||||||
MapRulType rtype = new MapRulType(types.size());
|
MapRulType rtype = new MapRulType();
|
||||||
rtype.poiCategory = AmenityType.valueOf(poiParentCategory.toUpperCase());
|
rtype.poiCategory = AmenityType.valueOf(poiParentCategory.toUpperCase());
|
||||||
rtype.poiSpecified = true;
|
rtype.poiSpecified = true;
|
||||||
rtype.poiPrefix = poiParentPrefix;
|
rtype.poiPrefix = poiParentPrefix;
|
||||||
rtype.tag = tag;
|
rtype.tag = tag;
|
||||||
types.put(constructRuleKey(tag, null), rtype);
|
registerRuleType(tag, null, rtype);
|
||||||
}
|
}
|
||||||
} else if (name.equals("type")) { //$NON-NLS-1$
|
} else if (name.equals("type")) { //$NON-NLS-1$
|
||||||
MapRulType rtype = new MapRulType(types.size());
|
MapRulType rtype = new MapRulType();
|
||||||
String val = attributes.getValue("minzoom"); //$NON-NLS-1$
|
String val = attributes.getValue("minzoom"); //$NON-NLS-1$
|
||||||
rtype.minzoom = 15;
|
rtype.minzoom = 15;
|
||||||
if (val != null) {
|
if (val != null) {
|
||||||
|
@ -256,15 +266,7 @@ public class MapRenderingTypes {
|
||||||
if (rtype.value != null && rtype.value.length() == 0) { //$NON-NLS-1$
|
if (rtype.value != null && rtype.value.length() == 0) { //$NON-NLS-1$
|
||||||
rtype.value = null;
|
rtype.value = null;
|
||||||
}
|
}
|
||||||
String keyVal = constructRuleKey(rtype.tag, rtype.value);
|
registerRuleType(rtype.tag, rtype.value, rtype);
|
||||||
if(types.containsKey(keyVal)){
|
|
||||||
throw new RuntimeException("Duplicate " + keyVal);
|
|
||||||
}
|
|
||||||
types.put(keyVal, rtype);
|
|
||||||
|
|
||||||
rtype.polygon = Boolean.parseBoolean(attributes.getValue("polygon")); //$NON-NLS-1$
|
|
||||||
rtype.polyline= Boolean.parseBoolean(attributes.getValue("polyline")); //$NON-NLS-1$
|
|
||||||
rtype.point = Boolean.parseBoolean(attributes.getValue("point")); //$NON-NLS-1$
|
|
||||||
rtype.additional = Boolean.parseBoolean(attributes.getValue("additional")); //$NON-NLS-1$
|
rtype.additional = Boolean.parseBoolean(attributes.getValue("additional")); //$NON-NLS-1$
|
||||||
String v = attributes.getValue("nameTags");
|
String v = attributes.getValue("nameTags");
|
||||||
if(v != null) {
|
if(v != null) {
|
||||||
|
@ -273,10 +275,10 @@ public class MapRenderingTypes {
|
||||||
for(int i=0; i<names.length; i++){
|
for(int i=0; i<names.length; i++){
|
||||||
MapRulType mt = types.get(constructRuleKey(names[i], null));
|
MapRulType mt = types.get(constructRuleKey(names[i], null));
|
||||||
if(mt == null){
|
if(mt == null){
|
||||||
mt = new MapRulType(types.size());
|
mt = new MapRulType();
|
||||||
mt.tag = names[i];
|
mt.tag = names[i];
|
||||||
mt.additional = true;
|
mt.additional = true;
|
||||||
types.put(constructRuleKey(names[i], null), mt);
|
registerRuleType(names[i], null, mt);
|
||||||
}
|
}
|
||||||
rtype.names[i] = mt;
|
rtype.names[i] = mt;
|
||||||
}
|
}
|
||||||
|
@ -406,7 +408,8 @@ public class MapRenderingTypes {
|
||||||
boolean additional;
|
boolean additional;
|
||||||
MapRulType targetTagValue;
|
MapRulType targetTagValue;
|
||||||
|
|
||||||
final int id;
|
// inner id
|
||||||
|
private int id;
|
||||||
int freq;
|
int freq;
|
||||||
int targetId;
|
int targetId;
|
||||||
|
|
||||||
|
@ -414,16 +417,8 @@ public class MapRenderingTypes {
|
||||||
AmenityType poiCategory;
|
AmenityType poiCategory;
|
||||||
boolean poiSpecified;
|
boolean poiSpecified;
|
||||||
|
|
||||||
boolean polyline;
|
|
||||||
boolean point;
|
|
||||||
boolean polygon;
|
|
||||||
|
|
||||||
public MapRulType(int id){
|
public MapRulType(){
|
||||||
this.id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isMapIndexed(){
|
|
||||||
return polygon || polyline || polygon;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public String poiPrefix(){
|
public String poiPrefix(){
|
||||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue