Generate indexes in memory
This commit is contained in:
parent
00213e62ce
commit
5f35a11893
9 changed files with 82 additions and 63 deletions
|
@ -1,26 +1,22 @@
|
|||
package net.osmand.data;
|
||||
|
||||
import gnu.trove.map.hash.TLongObjectHashMap;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import net.osmand.osm.MapUtils;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param <T> - object to store in that manager
|
||||
*/
|
||||
public class DataTileManager<T> {
|
||||
|
||||
private int zoom = 15;
|
||||
private final int zoom;
|
||||
|
||||
/**
|
||||
* map for objects stores as 'xTile_yTile' -> List<T>
|
||||
*/
|
||||
private Map<String, List<T>> objects = new HashMap<String, List<T>>();
|
||||
private TLongObjectHashMap<List<T>> objects = new TLongObjectHashMap<List<T>>();
|
||||
|
||||
public DataTileManager(){
|
||||
zoom = 15;
|
||||
|
@ -33,30 +29,18 @@ public class DataTileManager<T> {
|
|||
public int getZoom() {
|
||||
return zoom;
|
||||
}
|
||||
|
||||
public void setZoom(int zoom) {
|
||||
// it is required to reindex all stored objects
|
||||
if(!isEmpty()){
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
this.zoom = zoom;
|
||||
}
|
||||
|
||||
|
||||
public boolean isEmpty(){
|
||||
for(String s : objects.keySet()){
|
||||
if(!objects.get(s).isEmpty()){
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
return getObjectsCount() == 0;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public int getObjectsCount(){
|
||||
int x = 0;
|
||||
for(String s : objects.keySet()){
|
||||
x += objects.get(s).size();
|
||||
for(List s : objects.valueCollection()){
|
||||
x += s.size();
|
||||
}
|
||||
|
||||
return x;
|
||||
}
|
||||
|
||||
|
@ -66,10 +50,11 @@ public class DataTileManager<T> {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public List<T> getAllObjects(){
|
||||
List<T> l = new ArrayList<T>();
|
||||
for(String s : objects.keySet()){
|
||||
l.addAll(objects.get(s));
|
||||
for(List s : objects.valueCollection()){
|
||||
l.addAll(s);
|
||||
}
|
||||
return l;
|
||||
}
|
||||
|
@ -146,27 +131,29 @@ public class DataTileManager<T> {
|
|||
return result;
|
||||
}
|
||||
|
||||
private String evTile(int tileX, int tileY){
|
||||
return tileX +"_"+tileY; //$NON-NLS-1$
|
||||
private long evTile(int tileX, int tileY){
|
||||
long tx = tileX;
|
||||
long ty = tileY;
|
||||
return ((tx) << zoom) + ty;
|
||||
}
|
||||
|
||||
|
||||
public String evaluateTile(double latitude, double longitude){
|
||||
public long evaluateTile(double latitude, double longitude){
|
||||
int tileX = (int) MapUtils.getTileNumberX(zoom, longitude);
|
||||
int tileY = (int) MapUtils.getTileNumberY(zoom, latitude);
|
||||
return evTile(tileX, tileY);
|
||||
}
|
||||
|
||||
public void unregisterObject(double latitude, double longitude, T object){
|
||||
String tile = evaluateTile(latitude, longitude);
|
||||
long tile = evaluateTile(latitude, longitude);
|
||||
if(objects.containsKey(tile)){
|
||||
objects.get(tile).remove(object);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public String registerObject(double latitude, double longitude, T object){
|
||||
String tile = evaluateTile(latitude, longitude);
|
||||
public long registerObject(double latitude, double longitude, T object){
|
||||
long tile = evaluateTile(latitude, longitude);
|
||||
if(!objects.containsKey(tile)){
|
||||
objects.put(tile, new ArrayList<T>());
|
||||
}
|
||||
|
|
|
@ -20,7 +20,8 @@ public enum DBDialect {
|
|||
DERBY,
|
||||
H2,
|
||||
NOSQL,
|
||||
SQLITE;
|
||||
SQLITE,
|
||||
SQLITE_IN_MEMORY;
|
||||
|
||||
public void deleteTableIfExists(String table, Statement stat) throws SQLException {
|
||||
if(this == DERBY){
|
||||
|
@ -87,14 +88,15 @@ public enum DBDialect {
|
|||
throw new SQLException(status.ToString());
|
||||
}
|
||||
return dbAccessor;
|
||||
} else if (DBDialect.SQLITE == this) {
|
||||
} else if (DBDialect.SQLITE == this || DBDialect.SQLITE_IN_MEMORY == this) {
|
||||
try {
|
||||
Class.forName("org.sqlite.JDBC");
|
||||
} catch (ClassNotFoundException e) {
|
||||
log.error("Illegal configuration", e);
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
Connection connection = DriverManager.getConnection("jdbc:sqlite:" + fileName);
|
||||
Connection connection = DriverManager.getConnection("jdbc:sqlite:" + (DBDialect.SQLITE_IN_MEMORY == this? ":memory:":
|
||||
fileName));
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("PRAGMA synchronous = 0");
|
||||
//no journaling, saves some I/O access, but database can go corrupt
|
||||
|
|
|
@ -692,7 +692,7 @@ public class IndexCreator {
|
|||
deleteDatabaseIndexes);
|
||||
indexMapCreator.commitAndCloseFiles(getRTreeMapIndexNonPackFileName(), getRTreeMapIndexPackFileName(),
|
||||
deleteDatabaseIndexes);
|
||||
indexRouteCreator.commitAndCloseFiles(getRTreeRouteIndexPackFileName(), getRTreeRouteIndexPackFileName(),
|
||||
indexRouteCreator.commitAndCloseFiles(getRTreeRouteIndexNonPackFileName(), getRTreeRouteIndexPackFileName(),
|
||||
deleteDatabaseIndexes);
|
||||
|
||||
if (mapConnection != null) {
|
||||
|
|
|
@ -238,17 +238,17 @@ public class IndexRouteCreator extends AbstractIndexPartCreator {
|
|||
if (routeTree != null) {
|
||||
RandomAccessFile file = routeTree.getFileHdr().getFile();
|
||||
file.close();
|
||||
if (rTreeMapIndexNonPackFileName != null) {
|
||||
File f = new File(rTreeMapIndexNonPackFileName);
|
||||
if (f.exists() && deleteDatabaseIndexes) {
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
if (rTreeMapIndexNonPackFileName != null) {
|
||||
File f = new File(rTreeMapIndexNonPackFileName);
|
||||
if (f.exists() && deleteDatabaseIndexes) {
|
||||
f.delete();
|
||||
}
|
||||
if (rTreeMapIndexPackFileName != null) {
|
||||
File f = new File(rTreeMapIndexPackFileName);
|
||||
if (f.exists() && deleteDatabaseIndexes) {
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
if (rTreeMapIndexPackFileName != null) {
|
||||
File f = new File(rTreeMapIndexPackFileName);
|
||||
if (f.exists() && deleteDatabaseIndexes) {
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
closeAllPreparedStatements();
|
||||
|
|
|
@ -224,8 +224,7 @@ public class MinskTransReader {
|
|||
OsmBaseStorage storage = new OsmBaseStorage();
|
||||
|
||||
final Map<String, Relation> definedRoutes = new HashMap<String, Relation>();
|
||||
final DataTileManager<Node> busStops = new DataTileManager<Node>();
|
||||
busStops.setZoom(17);
|
||||
final DataTileManager<Node> busStops = new DataTileManager<Node>(17);
|
||||
storage.getFilters().add(new IOsmStorageFilter(){
|
||||
|
||||
@Override
|
||||
|
|
|
@ -87,8 +87,7 @@ public class MapClusterLayer implements MapPanelLayer {
|
|||
try {
|
||||
List<RouteSegment> ways = clustering(latitude, longitude);
|
||||
if (!ANIMATE_CLUSTERING) {
|
||||
DataTileManager<Way> points = new DataTileManager<Way>();
|
||||
points.setZoom(11);
|
||||
DataTileManager<Way> points = new DataTileManager<Way>(11);
|
||||
for (RouteSegment s : ways) {
|
||||
Way w = new Way(-1);
|
||||
for (int i = 0; i < s.getRoad().getPointsLength(); i++) {
|
||||
|
@ -132,8 +131,7 @@ public class MapClusterLayer implements MapPanelLayer {
|
|||
log.info("ROAD TO START " + highway + " " + //road.getName() + " "
|
||||
+ road.id);
|
||||
}
|
||||
final DataTileManager<Way> points = new DataTileManager<Way>();
|
||||
points.setZoom(11);
|
||||
final DataTileManager<Way> points = new DataTileManager<Way>(11);
|
||||
map.setPoints(points);
|
||||
|
||||
ctx.setVisitor(new RouteSegmentVisitor() {
|
||||
|
|
|
@ -176,8 +176,7 @@ public class MapRouterLayer implements MapPanelLayer {
|
|||
public void run() {
|
||||
List<Way> ways = selfRoute(startRoute, endRoute, intermediates, null);
|
||||
if (ways != null) {
|
||||
DataTileManager<Way> points = new DataTileManager<Way>();
|
||||
points.setZoom(11);
|
||||
DataTileManager<Way> points = new DataTileManager<Way>(11);
|
||||
for (Way w : ways) {
|
||||
LatLon n = w.getLatLon();
|
||||
points.registerObject(n.getLatitude(), n.getLongitude(), w);
|
||||
|
@ -205,8 +204,7 @@ public class MapRouterLayer implements MapPanelLayer {
|
|||
public void run() {
|
||||
List<Way> ways = selfRoute(startRoute, endRoute, intermediates, previousRoute);
|
||||
if (ways != null) {
|
||||
DataTileManager<Way> points = new DataTileManager<Way>();
|
||||
points.setZoom(11);
|
||||
DataTileManager<Way> points = new DataTileManager<Way>(11);
|
||||
for (Way w : ways) {
|
||||
LatLon n = w.getLatLon();
|
||||
points.registerObject(n.getLatitude(), n.getLongitude(), w);
|
||||
|
@ -229,8 +227,7 @@ public class MapRouterLayer implements MapPanelLayer {
|
|||
@Override
|
||||
public void run() {
|
||||
List<Way> ways = route_YOURS(startRoute, endRoute);
|
||||
DataTileManager<Way> points = new DataTileManager<Way>();
|
||||
points.setZoom(11);
|
||||
DataTileManager<Way> points = new DataTileManager<Way>(11);
|
||||
for(Way w : ways){
|
||||
LatLon n = w.getLatLon();
|
||||
points.registerObject(n.getLatitude(), n.getLongitude(), w);
|
||||
|
@ -250,8 +247,7 @@ public class MapRouterLayer implements MapPanelLayer {
|
|||
@Override
|
||||
public void run() {
|
||||
List<Way> ways = route_CloudMate(startRoute, endRoute);
|
||||
DataTileManager<Way> points = new DataTileManager<Way>();
|
||||
points.setZoom(11);
|
||||
DataTileManager<Way> points = new DataTileManager<Way>(11);
|
||||
for (Way w : ways) {
|
||||
LatLon n = w.getLatLon();
|
||||
points.registerObject(n.getLatitude(), n.getLongitude(), w);
|
||||
|
@ -644,8 +640,7 @@ public class MapRouterLayer implements MapPanelLayer {
|
|||
}
|
||||
}
|
||||
|
||||
final DataTileManager<Entity> points = new DataTileManager<Entity>();
|
||||
points.setZoom(11);
|
||||
final DataTileManager<Entity> points = new DataTileManager<Entity>(11);
|
||||
map.setPoints(points);
|
||||
ctx.setVisitor(new RouteSegmentVisitor() {
|
||||
|
||||
|
|
24
build-scripts/indexes-batch-generate-inmem.xml
Normal file
24
build-scripts/indexes-batch-generate-inmem.xml
Normal file
|
@ -0,0 +1,24 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<batch_process>
|
||||
<process_attributes mapZooms="" renderingTypesFile="" zoomWaySmoothness="2"
|
||||
osmDbDialect="sqlite_in_memory" mapDbDialect="sqlite_in_memory"/>
|
||||
|
||||
<!-- There are 3 subprocess :
|
||||
1. Download fresh osm files from servers to 'directory_for_osm_files' (override existings).
|
||||
2. Generate index files from all files in 'directory_for_osm_files' and put all indexes into 'directory_for_index_files'
|
||||
3. Upload index files from 'directory_for_index_files' to googlecode.
|
||||
If directory directory_for_uploaded_files is specified all uploaded files will be moved to it
|
||||
All these subprocess could be ran independently ! So you can create some files check them and after that try to upload on googlecode,
|
||||
or you can upload any file you have to googlecode (just put into 'directory_for_index_files')
|
||||
-->
|
||||
<!-- zoomWaySmoothness - 1-4, typical mapZooms - 8-10;11-12;13-14;15 -->
|
||||
<process directory_for_osm_files=".work/osm" directory_for_index_files="/var/lib/jenkins/indexes" directory_for_generation=".work"
|
||||
skipExistingIndexesAt="/var/lib/jenkins/indexes/uploaded" indexPOI="true" indexMap="true" indexRouting="true" indexTransport="true" indexAddress="true">
|
||||
<!-- Add wget="C:/Program Files/GNUWin32/bin/wget.exe" to process, to use wget for download.
|
||||
On linux systems if wget is in your path it can be wget="wget" or you can make own script with wget command:
|
||||
wget="/path/to/script/wget.sh"
|
||||
Defaultly enabled parameter of wget is: &-&-read-timeout=5 that prevents hanging of download from cloudmade/geofabrik server
|
||||
-->
|
||||
|
||||
</process>
|
||||
</batch_process>
|
14
build-scripts/indexes-generate-inmem.sh
Executable file
14
build-scripts/indexes-generate-inmem.sh
Executable file
|
@ -0,0 +1,14 @@
|
|||
# remove backup and create new backup
|
||||
# we should not rm, just do incremental updates for now! rm -rf backup
|
||||
|
||||
# remove all previous files
|
||||
mkdir ~/indexes
|
||||
mkdir ~/indexes/uploaded
|
||||
|
||||
rm -rf .work
|
||||
mkdir .work
|
||||
mkdir .work/osm
|
||||
if [ -z $INDEXES_FILE ]; then INDEXES_FILE="build-scripts/regions/indexes.xml"; echo "$INDEXES_FILE"; fi
|
||||
|
||||
echo 'Running java net.osmand.data.index.IndexBatchCreator with $INDEXES_FILE'
|
||||
java -XX:+UseParallelGC -Xmx8096M -Xmn512M -Djava.util.logging.config.file=build-scripts/batch-logging.properties -cp "DataExtractionOSM/OsmAndMapCreator.jar:DataExtractionOSM/lib/*.jar" net.osmand.data.index.IndexBatchCreator build-scripts/indexes-batch-generate-inmem.xml "$INDEXES_FILE"
|
Loading…
Reference in a new issue