001 //$HeadURL: https://svn.wald.intevation.org/svn/deegree/base/branches/2.3_testing/src/org/deegree/io/datastore/shape/ShapeDatastore.java $
002 /*----------------------------------------------------------------------------
003 This file is part of deegree, http://deegree.org/
004 Copyright (C) 2001-2009 by:
005 Department of Geography, University of Bonn
006 and
007 lat/lon GmbH
008
009 This library is free software; you can redistribute it and/or modify it under
010 the terms of the GNU Lesser General Public License as published by the Free
011 Software Foundation; either version 2.1 of the License, or (at your option)
012 any later version.
013 This library is distributed in the hope that it will be useful, but WITHOUT
014 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
015 FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
016 details.
017 You should have received a copy of the GNU Lesser General Public License
018 along with this library; if not, write to the Free Software Foundation, Inc.,
019 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
020
021 Contact information:
022
023 lat/lon GmbH
024 Aennchenstr. 19, 53177 Bonn
025 Germany
026 http://lat-lon.de/
027
028 Department of Geography, University of Bonn
029 Prof. Dr. Klaus Greve
030 Postfach 1147, 53001 Bonn
031 Germany
032 http://www.geographie.uni-bonn.de/deegree/
033
034 e-mail: info@deegree.org
035 ----------------------------------------------------------------------------*/
036
037 package org.deegree.io.datastore.shape;
038
039 import java.io.IOException;
040 import java.net.URL;
041 import java.util.Collection;
042 import java.util.HashMap;
043 import java.util.HashSet;
044 import java.util.Map;
045 import java.util.Set;
046
047 import org.deegree.datatypes.QualifiedName;
048 import org.deegree.framework.log.ILogger;
049 import org.deegree.framework.log.LoggerFactory;
050 import org.deegree.framework.util.IDGenerator;
051 import org.deegree.framework.util.StringTools;
052 import org.deegree.framework.xml.NamespaceContext;
053 import org.deegree.framework.xml.XMLTools;
054 import org.deegree.i18n.Messages;
055 import org.deegree.io.datastore.AnnotationDocument;
056 import org.deegree.io.datastore.Datastore;
057 import org.deegree.io.datastore.DatastoreException;
058 import org.deegree.io.datastore.DatastoreTransaction;
059 import org.deegree.io.datastore.PropertyPathResolver;
060 import org.deegree.io.datastore.PropertyPathResolvingException;
061 import org.deegree.io.datastore.schema.MappedFeatureType;
062 import org.deegree.io.datastore.schema.MappedGMLSchema;
063 import org.deegree.io.datastore.schema.MappedGeometryPropertyType;
064 import org.deegree.io.datastore.schema.MappedPropertyType;
065 import org.deegree.io.datastore.schema.MappedSimplePropertyType;
066 import org.deegree.io.datastore.schema.content.MappingField;
067 import org.deegree.io.datastore.schema.content.SimpleContent;
068 import org.deegree.io.dbaseapi.DBaseException;
069 import org.deegree.io.dbaseapi.DBaseFile;
070 import org.deegree.io.shpapi.HasNoDBaseFileException;
071 import org.deegree.io.shpapi.ShapeFile;
072 import org.deegree.model.crs.CRSFactory;
073 import org.deegree.model.crs.CoordinateSystem;
074 import org.deegree.model.feature.Feature;
075 import org.deegree.model.feature.FeatureCollection;
076 import org.deegree.model.feature.FeatureFactory;
077 import org.deegree.model.feature.schema.FeatureType;
078 import org.deegree.model.feature.schema.PropertyType;
079 import org.deegree.model.filterencoding.ComparisonOperation;
080 import org.deegree.model.filterencoding.ComplexFilter;
081 import org.deegree.model.filterencoding.Expression;
082 import org.deegree.model.filterencoding.Filter;
083 import org.deegree.model.filterencoding.FilterEvaluationException;
084 import org.deegree.model.filterencoding.FilterTools;
085 import org.deegree.model.filterencoding.LogicalOperation;
086 import org.deegree.model.filterencoding.Operation;
087 import org.deegree.model.filterencoding.PropertyIsBetweenOperation;
088 import org.deegree.model.filterencoding.PropertyIsCOMPOperation;
089 import org.deegree.model.filterencoding.PropertyIsInstanceOfOperation;
090 import org.deegree.model.filterencoding.PropertyIsLikeOperation;
091 import org.deegree.model.filterencoding.PropertyIsNullOperation;
092 import org.deegree.model.filterencoding.PropertyName;
093 import org.deegree.model.filterencoding.SpatialOperation;
094 import org.deegree.model.spatialschema.Envelope;
095 import org.deegree.model.spatialschema.GeometryImpl;
096 import org.deegree.ogcbase.CommonNamespaces;
097 import org.deegree.ogcbase.PropertyPath;
098 import org.deegree.ogcwebservices.wfs.operation.Query;
099 import org.w3c.dom.Element;
100
101 /**
102 * {@link Datastore} implementation that allows (read-only) access to ESRI shape files.
103 *
104 * @author <a href="mailto:deshmukh@lat-lon.de">Anup Deshmukh</a>
105 * @author <a href="mailto:schneider@lat-lon.de">Markus Schneider </a>
106 * @author last edited by: $Author: mschneider $
107 *
108 * @version $Revision: 18195 $, $Date: 2009-06-18 17:55:39 +0200 (Do, 18. Jun 2009) $
109 */
110 public class ShapeDatastore extends Datastore {
111
112 private static final ILogger LOG = LoggerFactory.getLogger( ShapeDatastore.class );
113
114 private static final NamespaceContext nsContext = CommonNamespaces.getNamespaceContext();
115
116 // keys: FeatureTypes, values: Property-To-column mappings
117 private Map<FeatureType, Map<PropertyType, String>> ftMappings = new HashMap<FeatureType, Map<PropertyType, String>>();
118
119 // NOTE: this is equal for all bound schemas
120 private URL shapeFileURL;
121
122 private String srsName;
123
124 @Override
125 public AnnotationDocument getAnnotationParser() {
126 return new ShapeAnnotationDocument();
127 }
128
129 @Override
130 public void bindSchema( MappedGMLSchema schema )
131 throws DatastoreException {
132 super.bindSchema( schema );
133 validate( schema );
134 srsName = schema.getDefaultSRS().toString();
135 }
136
137 @Override
138 public FeatureCollection performQuery( Query query, MappedFeatureType[] rootFts )
139 throws DatastoreException {
140
141 if ( rootFts.length > 1 ) {
142 String msg = Messages.getMessage( "DATASTORE_SHAPE_DOES_NOT_SUPPORT_JOINS" );
143 throw new DatastoreException( msg );
144 }
145
146 MappedFeatureType ft = rootFts[0];
147
148 // perform CRS transformation (if necessary)
149 Query transformedQuery = transformQuery( query );
150
151 // determine which properties must be contained in the returned features
152 Map<PropertyType, String> fetchPropsToColumns = determineSelectedProps( ft, transformedQuery );
153 if ( LOG.getLevel() == ILogger.LOG_DEBUG ) {
154 LOG.logDebug( "Selected properties / columns from the shapefile:" );
155 for ( PropertyType pt : fetchPropsToColumns.keySet() ) {
156 LOG.logDebug( "- " + pt.getName() + " / " + fetchPropsToColumns.get( pt ) );
157 }
158 }
159
160 // determine the properties that have to be removed after filter evaluation (because they
161 // are not requested, but used inside the filter expression)
162 Set<PropertyType> filterProps = determineFilterProps( ft, transformedQuery.getFilter() );
163 Set<PropertyType> removeProps = new HashSet<PropertyType>( filterProps );
164 for ( PropertyType pt : fetchPropsToColumns.keySet() ) {
165 removeProps.remove( pt );
166 }
167
168 // add all property-to-column mappings for properties needed for the filter evaluation
169 Map<PropertyType, String> allPropsToCols = this.ftMappings.get( ft );
170 for ( PropertyType pt : filterProps ) {
171 fetchPropsToColumns.put( pt, allPropsToCols.get( pt ) );
172 }
173
174 FeatureCollection result = null;
175 ShapeFile shapeFile = null;
176 int startPosition = -1;
177 int maxFeatures = -1;
178
179 int record = -1;
180 try {
181 LOG.logDebug( "Opening shapefile '" + shapeFileURL.getFile() + ".shp'." );
182 shapeFile = new ShapeFile( shapeFileURL.getFile() );
183 startPosition = transformedQuery.getStartPosition()-1;
184 maxFeatures = transformedQuery.getMaxFeatures();
185 Filter filter = transformedQuery.getFilter();
186 Envelope bbox = null;
187 if ( filter instanceof ComplexFilter ) {
188 bbox = FilterTools.firstBBOX( (ComplexFilter) filter );
189 }
190 if ( bbox == null ) {
191 bbox = shapeFile.getFileMBR();
192 }
193
194 shapeFile.setFeatureType( ft, fetchPropsToColumns );
195
196 int[] idx = shapeFile.getGeoNumbersByRect( bbox );
197 // id=identity required
198 IDGenerator idg = IDGenerator.getInstance();
199 String id = ft.getName().getLocalName();
200 id += idg.generateUniqueID();
201 if ( idx != null ) {
202 // check parameters for sanity
203 if ( startPosition < 0 ) {
204 startPosition = 0;
205 }
206 maxFeatures = maxFeatures + startPosition;
207 if ( ( maxFeatures < 0 ) || ( maxFeatures >= idx.length ) ) {
208 maxFeatures = idx.length;
209 }
210 LOG.logDebug( "Generating ID '" + id + "' for the FeatureCollection." );
211 result = FeatureFactory.createFeatureCollection( id, idx.length );
212
213 // TODO: respect startposition
214
215 CoordinateSystem crs = CRSFactory.create( srsName );
216 for ( int i = startPosition; i < maxFeatures; i++ ) {
217 record = idx[i];
218 Feature feat = shapeFile.getFeatureByRecNo( idx[i] );
219 if ( filter == null || filter.evaluate( feat ) ) {
220 String msg = StringTools.concat( 200, "Adding feature '", feat.getId(),
221 "' to FeatureCollection (with CRS ", srsName, ")." );
222 LOG.logDebug( msg );
223 for ( PropertyType unrequestedPt : removeProps ) {
224 msg = StringTools.concat( 200, "Removing unrequested property '", unrequestedPt.getName(),
225 "' from feature: filter expression used it." );
226 LOG.logDebug( msg );
227 feat.removeProperty( unrequestedPt.getName() );
228 }
229 GeometryImpl geom = (GeometryImpl) feat.getDefaultGeometryPropertyValue();
230 geom.setCoordinateSystem( crs );
231 feat.setEnvelopesUpdated();
232 result.add( feat );
233 }
234 }
235
236 // update the envelopes
237 result.setEnvelopesUpdated();
238 result.getBoundedBy();
239 } else {
240 result = FeatureFactory.createFeatureCollection( id, 1 );
241 }
242 } catch ( IOException e ) {
243 LOG.logError( e.getMessage(), e );
244 String msg = Messages.getMessage( "DATASTORE_READINGFROMDBF", record );
245 throw new DatastoreException( msg, e );
246 } catch ( DBaseException e ) {
247 LOG.logError( e.getMessage(), e );
248 String msg = Messages.getMessage( "DATASTORE_READINGFROMDBF", record );
249 throw new DatastoreException( msg, e );
250 } catch ( HasNoDBaseFileException e ) {
251 LOG.logError( e.getMessage(), e );
252 String msg = Messages.getMessage( "DATASTORE_NODBASEFILE", record );
253 throw new DatastoreException( msg, e );
254 } catch ( FilterEvaluationException e ) {
255 throw new DatastoreException( e.getMessage(), e );
256 } catch ( Exception e ) {
257 LOG.logError( e.getMessage(), e );
258 String msg = Messages.getMessage( "DATASTORE_READINGFROMDBF", record );
259 throw new DatastoreException( msg, e );
260 } finally {
261 LOG.logDebug( "Closing shapefile." );
262 try {
263 shapeFile.close();
264 } catch ( Exception e ) {
265 String msg = Messages.getMessage( "DATASTORE_ERROR_CLOSING_SHAPEFILE", this.shapeFileURL.getFile() );
266 throw new DatastoreException( msg );
267 }
268 }
269
270 // transform result to queried srs if necessary
271 String targetSrsName = transformedQuery.getSrsName();
272 if ( targetSrsName != null && !targetSrsName.equals( this.srsName ) ) {
273 result = transformResult( result, transformedQuery.getSrsName() );
274 }
275
276 return result;
277 }
278
279 /**
280 * Determines the {@link PropertyType}s of the given feature type that are selected by the given {@link Query}
281 * implicitly and explicitly, i.e that are either listed or that have a <code>minOccurs</code> value greater than
282 * one. *
283 *
284 * @param ft
285 * feature type
286 * @param query
287 * @return all properties that need to be fetched, mapped to the shapefile columns that store them
288 * @throws PropertyPathResolvingException
289 * if a selected property does not denote a property of the feature type
290 */
291 private Map<PropertyType, String> determineSelectedProps( MappedFeatureType ft, Query query )
292 throws PropertyPathResolvingException {
293
294 Map<PropertyType, String> allPropsToCols = this.ftMappings.get( ft );
295 Map<PropertyType, String> fetchPropsToCols = new HashMap<PropertyType, String>();
296 // TODO: respect aliases
297 PropertyPath[] selectedPaths = PropertyPathResolver.normalizePropertyPaths( ft, null, query.getPropertyNames() );
298 // TODO respect alias
299 Map<MappedPropertyType, Collection<PropertyPath>> fetchProps = PropertyPathResolver.determineFetchProperties(
300 ft,
301 null,
302 selectedPaths );
303 for ( MappedPropertyType pt : fetchProps.keySet() ) {
304 fetchPropsToCols.put( pt, allPropsToCols.get( pt ) );
305 }
306 return fetchPropsToCols;
307 }
308
309 /**
310 * Determines the {@link PropertyType}s that are necessary to apply the given {@link Filter} expression, i.e. the
311 * <code>PropertyNames</code> that occur in it.
312 *
313 * @see PropertyPathResolver#determineFetchProperties(MappedFeatureType, String, PropertyPath[])
314 * @param ft
315 * feature type on which the filter shall be applicable
316 * @param filter
317 * filter expression
318 * @return all <code>PropertyType</code>s that are referenced inside the filter
319 * @throws PropertyPathResolvingException
320 */
321 private Set<PropertyType> determineFilterProps( MappedFeatureType ft, Filter filter )
322 throws PropertyPathResolvingException {
323
324 Set<PropertyType> filterPts = new HashSet<PropertyType>();
325 if ( filter != null && filter instanceof ComplexFilter ) {
326 ComplexFilter complexFilter = (ComplexFilter) filter;
327 Operation operation = complexFilter.getOperation();
328 addFilterProps( ft, filterPts, operation );
329 }
330 return filterPts;
331 }
332
333 private void addFilterProps( MappedFeatureType ft, Set<PropertyType> filterPts, Operation operation )
334 throws PropertyPathResolvingException {
335
336 if ( operation instanceof ComparisonOperation ) {
337 if ( operation instanceof PropertyIsBetweenOperation ) {
338 PropertyIsBetweenOperation betweenOperation = (PropertyIsBetweenOperation) operation;
339 filterPts.add( getFilterProperty( ft, betweenOperation.getPropertyName() ) );
340 } else if ( operation instanceof PropertyIsCOMPOperation ) {
341 PropertyIsCOMPOperation compOperation = (PropertyIsCOMPOperation) operation;
342 Expression firstExpression = compOperation.getFirstExpression();
343 Expression secondExpression = compOperation.getSecondExpression();
344 if ( firstExpression instanceof PropertyName ) {
345 filterPts.add( getFilterProperty( ft, (PropertyName) firstExpression ) );
346 }
347 if ( secondExpression instanceof PropertyName ) {
348 filterPts.add( getFilterProperty( ft, (PropertyName) secondExpression ) );
349 }
350 } else if ( operation instanceof PropertyIsInstanceOfOperation ) {
351 PropertyIsInstanceOfOperation instanceOfOperation = (PropertyIsInstanceOfOperation) operation;
352 filterPts.add( getFilterProperty( ft, instanceOfOperation.getPropertyName() ) );
353 } else if ( operation instanceof PropertyIsLikeOperation ) {
354 PropertyIsLikeOperation likeOperation = (PropertyIsLikeOperation) operation;
355 filterPts.add( getFilterProperty( ft, likeOperation.getPropertyName() ) );
356 } else if ( operation instanceof PropertyIsNullOperation ) {
357 PropertyIsNullOperation nullOperation = (PropertyIsNullOperation) operation;
358 filterPts.add( getFilterProperty( ft, nullOperation.getPropertyName() ) );
359 } else {
360 assert false;
361 }
362 } else if ( operation instanceof LogicalOperation ) {
363 LogicalOperation logicalOperation = (LogicalOperation) operation;
364 for ( Operation subOperation : logicalOperation.getArguments() ) {
365 addFilterProps( ft, filterPts, subOperation );
366 }
367 } else if ( operation instanceof SpatialOperation ) {
368 SpatialOperation spatialOperation = (SpatialOperation) operation;
369 filterPts.add( getFilterProperty( ft, spatialOperation.getPropertyName() ) );
370 } else {
371 assert false;
372 }
373 }
374
375 private PropertyType getFilterProperty( MappedFeatureType ft, PropertyName propName )
376 throws PropertyPathResolvingException {
377
378 // TODO respect aliases
379 PropertyPath path = PropertyPathResolver.normalizePropertyPath( ft, null, propName.getValue() );
380
381 QualifiedName propStep = path.getStep( 1 ).getPropertyName();
382 PropertyType pt = ft.getProperty( propStep );
383 if ( pt == null ) {
384 String msg = Messages.getMessage( "DATASTORE_PROPERTY_PATH_RESOLVE4", path, 2, propStep, ft.getName(),
385 propName );
386 throw new PropertyPathResolvingException( msg );
387 }
388 return pt;
389 }
390
391 @Override
392 public FeatureCollection performQuery( final Query query, final MappedFeatureType[] rootFts,
393 final DatastoreTransaction context )
394 throws DatastoreException {
395 return performQuery( query, rootFts );
396 }
397
398 /**
399 * Validates the given {@link MappedGMLSchema} against the available columns in the referenced shape file.
400 *
401 * @param schema
402 * @throws DatastoreException
403 */
404 private void validate( MappedGMLSchema schema )
405 throws DatastoreException {
406
407 Set<String> columnNames = determineShapeFileColumns( schema );
408
409 FeatureType[] featureTypes = schema.getFeatureTypes();
410 for ( int i = 0; i < featureTypes.length; i++ ) {
411 Map<PropertyType, String> ftMapping = getFTMapping( featureTypes[i], columnNames );
412 ftMappings.put( featureTypes[i], ftMapping );
413 }
414 }
415
416 private Map<PropertyType, String> getFTMapping( FeatureType ft, Set<String> columnNames )
417 throws DatastoreException {
418 Map<PropertyType, String> ftMapping = new HashMap<PropertyType, String>();
419 PropertyType[] properties = ft.getProperties();
420 for ( int i = 0; i < properties.length; i++ ) {
421 MappedPropertyType pt = (MappedPropertyType) properties[i];
422 if ( pt instanceof MappedSimplePropertyType ) {
423 SimpleContent content = ( (MappedSimplePropertyType) pt ).getContent();
424 if ( !( content instanceof MappingField ) ) {
425 String msg = Messages.getMessage( "DATASTORE_UNSUPPORTED_CONTENT", pt.getName() );
426 throw new DatastoreException( msg );
427 }
428 // ensure that field name is in uppercase
429 String field = ( (MappingField) content ).getField().toUpperCase();
430 if ( !columnNames.contains( field ) ) {
431 String msg = Messages.getMessage( "DATASTORE_FIELDNOTFOUND", field, pt.getName(),
432 shapeFileURL.getFile(), columnNames );
433 throw new DatastoreException( msg );
434 }
435 ftMapping.put( pt, field );
436 } else if ( pt instanceof MappedGeometryPropertyType ) {
437 // nothing to do
438 } else {
439 String msg = Messages.getMessage( "DATASTORE_NO_NESTED_FEATURE_TYPES", pt.getName() );
440 throw new DatastoreException( msg );
441 }
442 }
443 return ftMapping;
444 }
445
446 /**
447 * Determines the column names (in uppercase) of the shape file that is referenced by the given schema.
448 *
449 * @param schema
450 * @return column names (in uppercase)
451 * @throws DatastoreException
452 */
453 private Set<String> determineShapeFileColumns( MappedGMLSchema schema )
454 throws DatastoreException {
455
456 Set<String> columnNames = new HashSet<String>();
457 DBaseFile dbfFile = null;
458
459 try {
460 Element schemaRoot = schema.getDocument().getRootElement();
461 String shapePath = XMLTools.getNodeAsString( schemaRoot, "xs:annotation/xs:appinfo/deegreewfs:File/text()",
462 nsContext, null );
463 shapeFileURL = schema.getDocument().resolve( shapePath );
464 LOG.logDebug( "Opening dbf file '" + shapeFileURL + "'." );
465 dbfFile = new DBaseFile( shapeFileURL.getFile() );
466 String[] columns = dbfFile.getProperties();
467 for ( int i = 0; i < columns.length; i++ ) {
468 columnNames.add( columns[i].toUpperCase() );
469 }
470 String s = "Successfully opened dbf file '" + shapeFileURL.getFile()
471 + "' and retrieved the property columns.";
472 LOG.logDebug( s );
473 } catch ( Exception e ) {
474 LOG.logError( e.getMessage(), e );
475 throw new DatastoreException( Messages.getMessage( "DATASTORE_DBACCESSERROR" ) );
476 } finally {
477 if ( dbfFile != null ) {
478 dbfFile.close();
479 }
480 }
481
482 return columnNames;
483 }
484
485 /**
486 * Closes the datastore so it can free dependent resources.
487 *
488 * @throws DatastoreException
489 */
490 @Override
491 public void close()
492 throws DatastoreException {
493 // TODO
494 }
495 }