001 //$HeadURL: svn+ssh://rbezema@svn.wald.intevation.org/deegree/base/branches/2.2_testing/src/org/deegree/io/datastore/shape/ShapeDatastore.java $ 002 /*---------------- FILE HEADER ------------------------------------------ 003 004 This file is part of deegree. 005 Copyright (C) 2001-2008 by: 006 EXSE, Department of Geography, University of Bonn 007 http://www.giub.uni-bonn.de/deegree/ 008 lat/lon GmbH 009 http://www.lat-lon.de 010 011 This library is free software; you can redistribute it and/or 012 modify it under the terms of the GNU Lesser General Public 013 License as published by the Free Software Foundation; either 014 version 2.1 of the License, or (at your option) any later version. 015 016 This library is distributed in the hope that it will be useful, 017 but WITHOUT ANY WARRANTY; without even the implied warranty of 018 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 019 Lesser General Public License for more details. 020 021 You should have received a copy of the GNU Lesser General Public 022 License along with this library; if not, write to the Free Software 023 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 024 025 Contact: 026 027 Andreas Poth 028 lat/lon GmbH 029 Aennchenstr. 19 030 53115 Bonn 031 Germany 032 E-Mail: poth@lat-lon.de 033 034 Prof. Dr. Klaus Greve 035 Department of Geography 036 University of Bonn 037 Meckenheimer Allee 166 038 53115 Bonn 039 Germany 040 E-Mail: greve@giub.uni-bonn.de 041 042 ---------------------------------------------------------------------------*/ 043 044 package org.deegree.io.datastore.shape; 045 046 import java.io.IOException; 047 import java.net.URL; 048 import java.util.Collection; 049 import java.util.HashMap; 050 import java.util.HashSet; 051 import java.util.Map; 052 import java.util.Set; 053 054 import org.deegree.datatypes.QualifiedName; 055 import org.deegree.framework.log.ILogger; 056 import org.deegree.framework.log.LoggerFactory; 057 import org.deegree.framework.util.IDGenerator; 058 import org.deegree.framework.util.StringTools; 059 import org.deegree.framework.xml.NamespaceContext; 060 import org.deegree.framework.xml.XMLTools; 061 import org.deegree.i18n.Messages; 062 import org.deegree.io.datastore.AnnotationDocument; 063 import org.deegree.io.datastore.Datastore; 064 import org.deegree.io.datastore.DatastoreException; 065 import org.deegree.io.datastore.DatastoreTransaction; 066 import org.deegree.io.datastore.PropertyPathResolver; 067 import org.deegree.io.datastore.PropertyPathResolvingException; 068 import org.deegree.io.datastore.schema.MappedFeatureType; 069 import org.deegree.io.datastore.schema.MappedGMLSchema; 070 import org.deegree.io.datastore.schema.MappedGeometryPropertyType; 071 import org.deegree.io.datastore.schema.MappedPropertyType; 072 import org.deegree.io.datastore.schema.MappedSimplePropertyType; 073 import org.deegree.io.datastore.schema.content.MappingField; 074 import org.deegree.io.datastore.schema.content.SimpleContent; 075 import org.deegree.io.dbaseapi.DBaseException; 076 import org.deegree.io.dbaseapi.DBaseFile; 077 import org.deegree.io.shpapi.HasNoDBaseFileException; 078 import org.deegree.io.shpapi.ShapeFile; 079 import org.deegree.model.crs.CRSFactory; 080 import org.deegree.model.crs.CoordinateSystem; 081 import org.deegree.model.feature.Feature; 082 import org.deegree.model.feature.FeatureCollection; 083 import org.deegree.model.feature.FeatureFactory; 084 import org.deegree.model.feature.schema.FeatureType; 085 import org.deegree.model.feature.schema.PropertyType; 086 import org.deegree.model.filterencoding.ComparisonOperation; 087 import org.deegree.model.filterencoding.ComplexFilter; 088 import org.deegree.model.filterencoding.Expression; 089 import org.deegree.model.filterencoding.Filter; 090 import org.deegree.model.filterencoding.FilterEvaluationException; 091 import org.deegree.model.filterencoding.FilterTools; 092 import org.deegree.model.filterencoding.LogicalOperation; 093 import org.deegree.model.filterencoding.Operation; 094 import org.deegree.model.filterencoding.PropertyIsBetweenOperation; 095 import org.deegree.model.filterencoding.PropertyIsCOMPOperation; 096 import org.deegree.model.filterencoding.PropertyIsInstanceOfOperation; 097 import org.deegree.model.filterencoding.PropertyIsLikeOperation; 098 import org.deegree.model.filterencoding.PropertyIsNullOperation; 099 import org.deegree.model.filterencoding.PropertyName; 100 import org.deegree.model.filterencoding.SpatialOperation; 101 import org.deegree.model.spatialschema.Envelope; 102 import org.deegree.model.spatialschema.GeometryImpl; 103 import org.deegree.ogcbase.CommonNamespaces; 104 import org.deegree.ogcbase.PropertyPath; 105 import org.deegree.ogcwebservices.wfs.operation.Query; 106 import org.w3c.dom.Element; 107 108 /** 109 * {@link Datastore} implementation that allows (read-only) access to ESRI shape files. 110 * 111 * @author <a href="mailto:deshmukh@lat-lon.de">Anup Deshmukh</a> 112 * @author <a href="mailto:schneider@lat-lon.de">Markus Schneider </a> 113 * @author last edited by: $Author: apoth $ 114 * 115 * @version $Revision: 9342 $, $Date: 2007-12-27 13:32:57 +0100 (Do, 27 Dez 2007) $ 116 */ 117 public class ShapeDatastore extends Datastore { 118 119 private static final ILogger LOG = LoggerFactory.getLogger( ShapeDatastore.class ); 120 121 private static final NamespaceContext nsContext = CommonNamespaces.getNamespaceContext(); 122 123 // keys: FeatureTypes, values: Property-To-column mappings 124 private Map<FeatureType, Map<PropertyType, String>> ftMappings = new HashMap<FeatureType, Map<PropertyType, String>>(); 125 126 // NOTE: this is equal for all bound schemas 127 private URL shapeFileURL; 128 129 private String srsName; 130 131 @Override 132 public AnnotationDocument getAnnotationParser() { 133 return new ShapeAnnotationDocument(); 134 } 135 136 @Override 137 public void bindSchema( MappedGMLSchema schema ) 138 throws DatastoreException { 139 super.bindSchema( schema ); 140 validate( schema ); 141 srsName = schema.getDefaultSRS().toString(); 142 } 143 144 @Override 145 public FeatureCollection performQuery( Query query, MappedFeatureType[] rootFts ) 146 throws DatastoreException { 147 148 if ( rootFts.length > 1 ) { 149 String msg = Messages.getMessage( "DATASTORE_SHAPE_DOES_NOT_SUPPORT_JOINS" ); 150 throw new DatastoreException( msg ); 151 } 152 153 MappedFeatureType ft = rootFts[0]; 154 155 // perform CRS transformation (if necessary) 156 query = transformQuery( query ); 157 158 // determine which properties must be contained in the returned features 159 Map<PropertyType, String> fetchPropsToColumns = determineSelectedProps( ft, query ); 160 if ( LOG.getLevel() == ILogger.LOG_DEBUG ) { 161 LOG.logDebug( "Selected properties / columns from the shapefile:" ); 162 for ( PropertyType pt : fetchPropsToColumns.keySet() ) { 163 LOG.logDebug( "- " + pt.getName() + " / " + fetchPropsToColumns.get( pt ) ); 164 } 165 } 166 167 // determine the properties that have to be removed after filter evaluation (because they 168 // are not requested, but used inside the filter expression) 169 Set<PropertyType> filterProps = determineFilterProps( ft, query.getFilter() ); 170 Set<PropertyType> removeProps = new HashSet<PropertyType>( filterProps ); 171 for ( PropertyType pt : fetchPropsToColumns.keySet() ) { 172 removeProps.remove( pt ); 173 } 174 175 // add all property-to-column mappings for properties needed for the filter evaluation 176 Map<PropertyType, String> allPropsToCols = this.ftMappings.get( ft ); 177 for ( PropertyType pt : filterProps ) { 178 fetchPropsToColumns.put( pt, allPropsToCols.get( pt ) ); 179 } 180 181 FeatureCollection result = null; 182 ShapeFile shapeFile = null; 183 int startPosition = -1; 184 int maxFeatures = -1; 185 186 int record = -1; 187 try { 188 LOG.logDebug( "Opening shapefile '" + shapeFileURL.getFile() + ".shp'." ); 189 shapeFile = new ShapeFile( shapeFileURL.getFile() ); 190 startPosition = query.getStartPosition(); 191 maxFeatures = query.getMaxFeatures(); 192 Filter filter = query.getFilter(); 193 Envelope bbox = null; 194 if ( filter instanceof ComplexFilter ) { 195 Object[] objects = null; 196 try { 197 objects = FilterTools.extractFirstBBOX( (ComplexFilter) filter ); 198 } catch ( Exception e ) { 199 LOG.logError( e.getMessage(), e ); 200 String msg = Messages.getMessage( "DATASTORE_EXTRACTBBOX", record ); 201 throw new DatastoreException( msg, e ); 202 } 203 bbox = (Envelope) objects[0]; 204 filter = (Filter) objects[1]; 205 } 206 if ( bbox == null ) { 207 bbox = shapeFile.getFileMBR(); 208 } 209 210 shapeFile.setFeatureType( ft, fetchPropsToColumns ); 211 212 int[] idx = shapeFile.getGeoNumbersByRect( bbox ); 213 // id=identity required 214 IDGenerator idg = IDGenerator.getInstance(); 215 String id = ft.getName().getLocalName(); 216 id += idg.generateUniqueID(); 217 if ( idx != null ) { 218 // check parameters for sanity 219 if ( startPosition < 1 ) { 220 startPosition = 1; 221 } 222 if ( ( maxFeatures < 0 ) || ( maxFeatures >= idx.length ) ) { 223 maxFeatures = idx.length; 224 } 225 LOG.logDebug( "Generating ID '" + id + "' for the FeatureCollection." ); 226 result = FeatureFactory.createFeatureCollection( id, idx.length ); 227 228 // TODO: respect startposition 229 230 CoordinateSystem crs = CRSFactory.create( srsName ); 231 for ( int i = 0; i < maxFeatures; i++ ) { 232 record = idx[i]; 233 Feature feat = shapeFile.getFeatureByRecNo( idx[i] ); 234 if ( filter == null || filter.evaluate( feat ) ) { 235 String msg = StringTools.concat( 200, "Adding feature '", feat.getId(), 236 "' to FeatureCollection (with CRS ", srsName, ")." ); 237 LOG.logDebug( msg ); 238 for ( PropertyType unrequestedPt : removeProps ) { 239 msg = StringTools.concat( 200, "Removing unrequested property '", unrequestedPt.getName(), 240 "' from feature: filter expression used it." ); 241 LOG.logDebug( msg ); 242 feat.removeProperty( unrequestedPt.getName() ); 243 } 244 GeometryImpl geom = (GeometryImpl) feat.getDefaultGeometryPropertyValue(); 245 geom.setCoordinateSystem( crs ); 246 feat.setEnvelopesUpdated(); 247 result.add( feat ); 248 } 249 } 250 251 // update the envelopes 252 result.setEnvelopesUpdated(); 253 result.getBoundedBy(); 254 } else { 255 result = FeatureFactory.createFeatureCollection( id, 1 ); 256 } 257 } catch ( IOException e ) { 258 LOG.logError( e.getMessage(), e ); 259 String msg = Messages.getMessage( "DATASTORE_READINGFROMDBF", record ); 260 throw new DatastoreException( msg, e ); 261 } catch ( DBaseException e ) { 262 LOG.logError( e.getMessage(), e ); 263 String msg = Messages.getMessage( "DATASTORE_READINGFROMDBF", record ); 264 throw new DatastoreException( msg, e ); 265 } catch ( HasNoDBaseFileException e ) { 266 LOG.logError( e.getMessage(), e ); 267 String msg = Messages.getMessage( "DATASTORE_NODBASEFILE", record ); 268 throw new DatastoreException( msg, e ); 269 } catch ( FilterEvaluationException e ) { 270 throw new DatastoreException( e.getMessage(), e ); 271 } catch ( Exception e ) { 272 LOG.logError( e.getMessage(), e ); 273 String msg = Messages.getMessage( "DATASTORE_READINGFROMDBF", record ); 274 throw new DatastoreException( msg, e ); 275 } finally { 276 LOG.logDebug( "Closing shapefile." ); 277 try { 278 shapeFile.close(); 279 } catch ( Exception e ) { 280 String msg = Messages.getMessage( "DATASTORE_ERROR_CLOSING_SHAPEFILE", this.shapeFileURL.getFile() ); 281 throw new DatastoreException( msg ); 282 } 283 } 284 285 // transform result to queried srs if necessary 286 String targetSrsName = query.getSrsName(); 287 if ( targetSrsName != null && !targetSrsName.equals( this.srsName ) ) { 288 result = transformResult( result, query.getSrsName() ); 289 } 290 291 return result; 292 } 293 294 /** 295 * Determines the {@link PropertyType}s of the given feature type that are selected by the given {@link Query} 296 * implicitly and explicitly, i.e that are either listed or that have a <code>minOccurs</code> value greater than 297 * one. * 298 * 299 * @param ft 300 * feature type 301 * @param query 302 * @return all properties that need to be fetched, mapped to the shapefile columns that store them 303 * @throws PropertyPathResolvingException 304 * if a selected property does not denote a property of the feature type 305 */ 306 private Map<PropertyType, String> determineSelectedProps( MappedFeatureType ft, Query query ) 307 throws PropertyPathResolvingException { 308 309 Map<PropertyType, String> allPropsToCols = this.ftMappings.get( ft ); 310 Map<PropertyType, String> fetchPropsToCols = new HashMap<PropertyType, String>(); 311 // TODO: respect aliases 312 PropertyPath[] selectedPaths = PropertyPathResolver.normalizePropertyPaths( ft, null, query.getPropertyNames() ); 313 // TODO respect alias 314 Map<MappedPropertyType, Collection<PropertyPath>> fetchProps = PropertyPathResolver.determineFetchProperties( 315 ft, 316 null, 317 selectedPaths ); 318 for ( MappedPropertyType pt : fetchProps.keySet() ) { 319 fetchPropsToCols.put( pt, allPropsToCols.get( pt ) ); 320 } 321 return fetchPropsToCols; 322 } 323 324 /** 325 * Determines the {@link PropertyType}s that are necessary to apply the given {@link Filter} expression, i.e. the 326 * <code>PropertyNames</code> that occur in it. 327 * 328 * @see PropertyPathResolver#determineFetchProperties(MappedFeatureType, PropertyPath[]) 329 * @param ft 330 * feature type on which the filter shall be applicable 331 * @param filter 332 * filter expression 333 * @return all <code>PropertyType</code>s that are referenced inside the filter 334 * @throws PropertyPathResolvingException 335 */ 336 private Set<PropertyType> determineFilterProps( MappedFeatureType ft, Filter filter ) 337 throws PropertyPathResolvingException { 338 339 Set<PropertyType> filterPts = new HashSet<PropertyType>(); 340 if ( filter != null && filter instanceof ComplexFilter ) { 341 ComplexFilter complexFilter = (ComplexFilter) filter; 342 Operation operation = complexFilter.getOperation(); 343 addFilterProps( ft, filterPts, operation ); 344 } 345 return filterPts; 346 } 347 348 private void addFilterProps( MappedFeatureType ft, Set<PropertyType> filterPts, Operation operation ) 349 throws PropertyPathResolvingException { 350 351 if ( operation instanceof ComparisonOperation ) { 352 if ( operation instanceof PropertyIsBetweenOperation ) { 353 PropertyIsBetweenOperation betweenOperation = (PropertyIsBetweenOperation) operation; 354 filterPts.add( getFilterProperty( ft, betweenOperation.getPropertyName() ) ); 355 } else if ( operation instanceof PropertyIsCOMPOperation ) { 356 PropertyIsCOMPOperation compOperation = (PropertyIsCOMPOperation) operation; 357 Expression firstExpression = compOperation.getFirstExpression(); 358 Expression secondExpression = compOperation.getSecondExpression(); 359 if ( firstExpression instanceof PropertyName ) { 360 filterPts.add( getFilterProperty( ft, (PropertyName) firstExpression ) ); 361 } 362 if ( secondExpression instanceof PropertyName ) { 363 filterPts.add( getFilterProperty( ft, (PropertyName) secondExpression ) ); 364 } 365 } else if ( operation instanceof PropertyIsInstanceOfOperation ) { 366 PropertyIsInstanceOfOperation instanceOfOperation = (PropertyIsInstanceOfOperation) operation; 367 filterPts.add( getFilterProperty( ft, instanceOfOperation.getPropertyName() ) ); 368 } else if ( operation instanceof PropertyIsLikeOperation ) { 369 PropertyIsLikeOperation likeOperation = (PropertyIsLikeOperation) operation; 370 filterPts.add( getFilterProperty( ft, likeOperation.getPropertyName() ) ); 371 } else if ( operation instanceof PropertyIsNullOperation ) { 372 PropertyIsNullOperation nullOperation = (PropertyIsNullOperation) operation; 373 filterPts.add( getFilterProperty( ft, nullOperation.getPropertyName() ) ); 374 } else { 375 assert false; 376 } 377 } else if ( operation instanceof LogicalOperation ) { 378 LogicalOperation logicalOperation = (LogicalOperation) operation; 379 for ( Operation subOperation : logicalOperation.getArguments() ) { 380 addFilterProps( ft, filterPts, subOperation ); 381 } 382 } else if ( operation instanceof SpatialOperation ) { 383 SpatialOperation spatialOperation = (SpatialOperation) operation; 384 filterPts.add( getFilterProperty( ft, spatialOperation.getPropertyName() ) ); 385 } else { 386 assert false; 387 } 388 } 389 390 private PropertyType getFilterProperty( MappedFeatureType ft, PropertyName propName ) 391 throws PropertyPathResolvingException { 392 393 // TODO respect aliases 394 PropertyPath path = PropertyPathResolver.normalizePropertyPath( ft, null, propName.getValue() ); 395 396 QualifiedName propStep = path.getStep( 1 ).getPropertyName(); 397 PropertyType pt = ft.getProperty( propStep ); 398 if ( pt == null ) { 399 String msg = Messages.getMessage( "DATASTORE_PROPERTY_PATH_RESOLVE4", path, 2, propStep, ft.getName(), 400 propName ); 401 throw new PropertyPathResolvingException( msg ); 402 } 403 return pt; 404 } 405 406 @Override 407 public FeatureCollection performQuery( final Query query, final MappedFeatureType[] rootFts, 408 final DatastoreTransaction context ) 409 throws DatastoreException { 410 return performQuery( query, rootFts ); 411 } 412 413 /** 414 * Validates the given {@link MappedGMLSchema} against the available columns in the referenced shape file. 415 * 416 * @param schema 417 * @throws DatastoreException 418 */ 419 private void validate( MappedGMLSchema schema ) 420 throws DatastoreException { 421 422 Set<String> columnNames = determineShapeFileColumns( schema ); 423 424 FeatureType[] featureTypes = schema.getFeatureTypes(); 425 for ( int i = 0; i < featureTypes.length; i++ ) { 426 Map<PropertyType, String> ftMapping = getFTMapping( featureTypes[i], columnNames ); 427 ftMappings.put( featureTypes[i], ftMapping ); 428 } 429 } 430 431 private Map<PropertyType, String> getFTMapping( FeatureType ft, Set<String> columnNames ) 432 throws DatastoreException { 433 Map<PropertyType, String> ftMapping = new HashMap<PropertyType, String>(); 434 PropertyType[] properties = ft.getProperties(); 435 for ( int i = 0; i < properties.length; i++ ) { 436 MappedPropertyType pt = (MappedPropertyType) properties[i]; 437 if ( pt instanceof MappedSimplePropertyType ) { 438 SimpleContent content = ( (MappedSimplePropertyType) pt ).getContent(); 439 if ( !( content instanceof MappingField ) ) { 440 String msg = Messages.getMessage( "DATASTORE_UNSUPPORTED_CONTENT", pt.getName() ); 441 throw new DatastoreException( msg ); 442 } 443 String field = ( (MappingField) content ).getField(); 444 if ( !columnNames.contains( field ) ) { 445 String msg = Messages.getMessage( "DATASTORE_FIELDNOTFOUND", field, pt.getName(), 446 shapeFileURL.getFile(), columnNames ); 447 throw new DatastoreException( msg ); 448 } 449 ftMapping.put( pt, field ); 450 } else if ( pt instanceof MappedGeometryPropertyType ) { 451 // nothing to do 452 } else { 453 String msg = Messages.getMessage( "DATASTORE_NO_NESTED_FEATURE_TYPES", pt.getName() ); 454 throw new DatastoreException( msg ); 455 } 456 } 457 return ftMapping; 458 } 459 460 private Set<String> determineShapeFileColumns( MappedGMLSchema schema ) 461 throws DatastoreException { 462 463 Set<String> columnNames = new HashSet<String>(); 464 DBaseFile dbfFile = null; 465 466 try { 467 Element schemaRoot = schema.getDocument().getRootElement(); 468 String shapePath = XMLTools.getNodeAsString( schemaRoot, "xs:annotation/xs:appinfo/deegreewfs:File/text()", 469 nsContext, null ); 470 shapeFileURL = schema.getDocument().resolve( shapePath ); 471 LOG.logDebug( "Opening dbf file '" + shapeFileURL + "'." ); 472 dbfFile = new DBaseFile( shapeFileURL.getFile() ); 473 String[] columns = dbfFile.getProperties(); 474 for ( int i = 0; i < columns.length; i++ ) { 475 columnNames.add( columns[i] ); 476 } 477 String s = "Successfully opened dbf file '" + shapeFileURL.getFile() 478 + "' and retrieved the property columns."; 479 LOG.logDebug( s ); 480 } catch ( Exception e ) { 481 LOG.logError( e.getMessage(), e ); 482 throw new DatastoreException( Messages.getMessage( "DATASTORE_DBACCESSERROR" ) ); 483 } finally { 484 if ( dbfFile != null ) { 485 dbfFile.close(); 486 } 487 } 488 489 return columnNames; 490 } 491 492 /** 493 * Closes the datastore so it can free dependent resources. 494 * 495 * @throws DatastoreException 496 */ 497 @Override 498 public void close() 499 throws DatastoreException { 500 // TODO 501 } 502 }