001    //$HeadURL: svn+ssh://rbezema@svn.wald.intevation.org/deegree/base/branches/2.2_testing/src/org/deegree/io/datastore/sql/transaction/delete/DeleteHandler.java $
002    /*----------------    FILE HEADER  ------------------------------------------
003    
004     This file is part of deegree.
005     Copyright (C) 2001-2008 by:
006     EXSE, Department of Geography, University of Bonn
007     http://www.giub.uni-bonn.de/deegree/
008     lat/lon GmbH
009     http://www.lat-lon.de
010    
011     This library is free software; you can redistribute it and/or
012     modify it under the terms of the GNU Lesser General Public
013     License as published by the Free Software Foundation; either
014     version 2.1 of the License, or (at your option) any later version.
015    
016     This library is distributed in the hope that it will be useful,
017     but WITHOUT ANY WARRANTY; without even the implied warranty of
018     MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
019     Lesser General Public License for more details.
020    
021     You should have received a copy of the GNU Lesser General Public
022     License along with this library; if not, write to the Free Software
023     Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
024    
025     Contact:
026    
027     Andreas Poth
028     lat/lon GmbH
029     Aennchenstraße 19
030     53177 Bonn
031     Germany
032     E-Mail: poth@lat-lon.de
033    
034     Prof. Dr. Klaus Greve
035     Department of Geography
036     University of Bonn
037     Meckenheimer Allee 166
038     53115 Bonn
039     Germany
040     E-Mail: greve@giub.uni-bonn.de
041     
042     ---------------------------------------------------------------------------*/
043    package org.deegree.io.datastore.sql.transaction.delete;
044    
045    import java.sql.Connection;
046    import java.sql.PreparedStatement;
047    import java.sql.ResultSet;
048    import java.sql.SQLException;
049    import java.util.ArrayList;
050    import java.util.Collection;
051    import java.util.List;
052    
053    import org.deegree.datatypes.Types;
054    import org.deegree.framework.log.ILogger;
055    import org.deegree.framework.log.LoggerFactory;
056    import org.deegree.i18n.Messages;
057    import org.deegree.io.datastore.Datastore;
058    import org.deegree.io.datastore.DatastoreException;
059    import org.deegree.io.datastore.FeatureId;
060    import org.deegree.io.datastore.schema.MappedFeaturePropertyType;
061    import org.deegree.io.datastore.schema.MappedFeatureType;
062    import org.deegree.io.datastore.schema.MappedGMLSchema;
063    import org.deegree.io.datastore.schema.MappedPropertyType;
064    import org.deegree.io.datastore.schema.TableRelation;
065    import org.deegree.io.datastore.schema.content.MappingField;
066    import org.deegree.io.datastore.sql.AbstractRequestHandler;
067    import org.deegree.io.datastore.sql.StatementBuffer;
068    import org.deegree.io.datastore.sql.TableAliasGenerator;
069    import org.deegree.io.datastore.sql.transaction.SQLTransaction;
070    import org.deegree.io.datastore.sql.transaction.UpdateHandler;
071    import org.deegree.model.feature.schema.FeatureType;
072    import org.deegree.model.feature.schema.PropertyType;
073    import org.deegree.model.filterencoding.Filter;
074    import org.deegree.ogcwebservices.wfs.operation.transaction.Delete;
075    import org.deegree.ogcwebservices.wfs.operation.transaction.Transaction;
076    
077    /**
078     * Handler for {@link Delete} operations (which usually occur as parts of {@link Transaction}
079     * requests).
080     * <p>
081     * When a {@link Delete} operation is performed, the following actions are taken:
082     * <ul>
083     * <li>the {@link FeatureId}s of all (root) feature instances that match the associated
084     * {@link Filter} are determined</li>
085     * <li>the {@link FeatureGraph} is built in order to determine which features may be deleted
086     * without removing subfeatures of independent features</li>
087     * <li>the {@link TableGraph} is built that contains explicit information on all table rows that
088     * have to be deleted (and their dependencies)</li>
089     * <li>the {@link TableNode}s of the {@link TableGraph} are sorted in topological order, i.e. they
090     * may be deleted in that order without violating any foreign key constraints</li>
091     * </ul>
092     * 
093     * @see FeatureGraph
094     * @see TableGraph
095     * 
096     * @author <a href="mailto:schneider@lat-lon.de">Markus Schneider</a>
097     * @author last edited by: $Author: apoth $
098     * 
099     * @version $Revision: 9342 $, $Date: 2007-12-27 13:32:57 +0100 (Do, 27 Dez 2007) $
100     */
101    public class DeleteHandler extends AbstractRequestHandler {
102    
103        private static final ILogger LOG = LoggerFactory.getLogger( DeleteHandler.class );
104    
105        private String lockId;
106    
107        /**
108         * Creates a new <code>DeleteHandler</code> from the given parameters.
109         * 
110         * @param dsTa
111         * @param aliasGenerator
112         * @param conn
113         * @param lockId
114         *            optional id of associated lock (may be null)
115         */
116        public DeleteHandler( SQLTransaction dsTa, TableAliasGenerator aliasGenerator, Connection conn,
117                              String lockId ) {
118            super( dsTa.getDatastore(), aliasGenerator, conn );
119            this.lockId = lockId;
120        }
121    
122        /**
123         * Deletes the features from the {@link Datastore} that have a certain type and are matched by
124         * the given filter.
125         * 
126         * @param ft
127         *            non-abstract feature type of the features to be deleted
128         * @param filter
129         *            constraints the feature instances to be deleted
130         * @return number of deleted feature instances
131         * @throws DatastoreException
132         */
133        public int performDelete( MappedFeatureType ft, Filter filter )
134                                throws DatastoreException {
135    
136            assert !ft.isAbstract();
137    
138            if ( !ft.isDeletable() ) {
139                String msg = Messages.getMessage( "DATASTORE_FT_NOT_DELETABLE", ft.getName() );
140                throw new DatastoreException( msg );
141            }
142    
143            List<FeatureId> fids = determineAffectedAndModifiableFIDs( ft, filter, this.lockId );
144    
145            if ( LOG.getLevel() == ILogger.LOG_DEBUG ) {
146                LOG.logDebug( "Affected fids:" );            
147                for ( FeatureId fid : fids ) {
148                    LOG.logDebug( "" + fid );
149                }
150            }
151    
152            FeatureGraph featureGraph = new FeatureGraph( fids, this );
153            TableGraph tableGraph = new TableGraph( featureGraph, this );
154    
155            if ( LOG.getLevel() == ILogger.LOG_DEBUG ) {
156                LOG.logDebug( "FeatureGraph: " + featureGraph );
157                LOG.logDebug( "TableGraph: " + tableGraph );
158            }
159    
160            List<TableNode> sortedNodes = tableGraph.getNodesInTopologicalOrder();
161            for ( TableNode node : sortedNodes ) {
162                boolean delete = true;
163                if ( node.isDeleteVetoPossible() ) {
164                    List<TableNode> referencingRows = getReferencingRows( node );
165                    if ( referencingRows.size() > 0 ) {
166                        delete = false;
167                        LOG.logDebug( "Skipping delete of " + node + ": " + referencingRows.size()
168                                      + " reference(s) exist." );
169                        for ( TableNode referencingNode : referencingRows ) {
170                            LOG.logDebug( "Referenced by: " + referencingNode );
171                        }
172                    }
173                }
174                if ( delete ) {
175                    performDelete( node );
176                }
177            }
178    
179            int deletedFeatures = tableGraph.getDeletableRootFeatureCount();
180    
181            if ( deletedFeatures != fids.size() ) {
182                String msg = Messages.getMessage( "DATASTORE_COULD_NOT_DELETE_ALL" );
183                LOG.logInfo( msg );
184            }
185    
186            // return count of actually deleted (root) features
187            return deletedFeatures;
188        }
189    
190        /**
191         * Deletes the table entry from the SQL database that is represented by the given
192         * {@link TableNode}.
193         * 
194         * @param node
195         * @throws DatastoreException
196         */
197        private void performDelete( TableNode node )
198                                throws DatastoreException {
199    
200            StatementBuffer query = new StatementBuffer();
201            query.append( "DELETE FROM " );
202            query.append( node.getTable() );
203            query.append( " WHERE " );
204            boolean first = true;
205            for ( KeyColumn column : node.getKeyColumns() ) {
206                if ( first ) {
207                    first = false;
208                } else {
209                    query.append( " AND " );
210                }
211                query.append( column.getName() );
212                query.append( "=?" );
213                query.addArgument( column.getValue(), column.getTypeCode() );
214            }
215    
216            PreparedStatement stmt = null;
217            try {
218                stmt = this.datastore.prepareStatement( conn, query );
219                LOG.logDebug( "Deleting row: " + query );
220                stmt.execute();
221            } catch ( SQLException e ) {
222                String msg = "Error performing delete '" + query + "': " + e.getMessage();
223                LOG.logInfo( msg, e );
224                throw new DatastoreException( msg );
225            } finally {
226                if ( stmt != null ) {
227                    try {
228                        stmt.close();
229                    } catch ( SQLException e ) {
230                        String msg = "Error closing statement: " + e.getMessage();
231                        LOG.logError( msg, e );
232                    }
233                }
234            }
235        }
236    
237        /**
238         * Adds nodes to the given {@link TableNode} that represent the simple/geometry properties in
239         * the property table attached by the given {@link TableRelation}.
240         * 
241         * @param fid
242         *            id of the feature that owns the properties
243         * @param relation
244         *            describes how the property table is joined to the feature table
245         * @throws DatastoreException
246         */
247        List<TableNode> determinePropNodes( FeatureId fid, TableRelation relation )
248                                throws DatastoreException {
249    
250            List<TableNode> propEntries = new ArrayList<TableNode>();
251    
252            this.aliasGenerator.reset();
253            String fromAlias = this.aliasGenerator.generateUniqueAlias();
254            String toAlias = this.aliasGenerator.generateUniqueAlias();
255            MappingField[] fromFields = relation.getFromFields();
256            MappingField[] toFields = relation.getToFields();
257    
258            StatementBuffer query = new StatementBuffer();
259            query.append( "SELECT DISTINCT " );
260            for ( int i = 0; i < toFields.length; i++ ) {
261                query.append( toAlias );
262                query.append( "." );
263                query.append( toFields[i].getField() );
264                if ( i != toFields.length - 1 ) {
265                    query.append( ',' );
266                }
267            }
268            query.append( " FROM " );
269            query.append( fid.getFeatureType().getTable() );
270            query.append( " " );
271            query.append( fromAlias );
272            query.append( " INNER JOIN " );
273            query.append( relation.getToTable() );
274            query.append( " " );
275            query.append( toAlias );
276            query.append( " ON " );
277            for ( int j = 0; j < fromFields.length; j++ ) {
278                query.append( fromAlias );
279                query.append( '.' );
280                query.append( fromFields[j].getField() );
281                query.append( '=' );
282                query.append( toAlias );
283                query.append( '.' );
284                query.append( toFields[j].getField() );
285            }
286            query.append( " WHERE " );
287            appendFeatureIdConstraint( query, fid, fromAlias );
288    
289            PreparedStatement stmt = null;
290            ResultSet rs = null;
291            try {
292                stmt = this.datastore.prepareStatement( conn, query );
293                LOG.logDebug( "Performing: " + query );
294                rs = stmt.executeQuery();
295                while ( rs.next() ) {
296                    Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>();
297                    for ( int i = 0; i < toFields.length; i++ ) {
298                        KeyColumn column = new KeyColumn( toFields[i].getField(),
299                                                          toFields[i].getType(), rs.getObject( i + 1 ) );
300                        keyColumns.add( column );
301                    }
302                    TableNode propEntry = new TableNode( relation.getToTable(), keyColumns );
303                    propEntries.add( propEntry );
304                }
305            } catch ( SQLException e ) {
306                LOG.logInfo( e.getMessage(), e );
307                throw new DatastoreException( "Error in addPropertyNodes(): " + e.getMessage() );
308            } finally {
309                try {
310                    if ( rs != null ) {
311                        try {
312                            rs.close();
313                        } catch ( SQLException e ) {
314                            LOG.logError( "Error closing result set: '" + e.getMessage() + "'.", e );
315                        }
316                    }
317                } finally {
318                    if ( stmt != null ) {
319                        try {
320                            stmt.close();
321                        } catch ( SQLException e ) {
322                            LOG.logError( "Error closing statement: '" + e.getMessage() + "'.", e );
323                        }
324                    }
325                }
326            }
327            return propEntries;
328        }
329    
330        /**
331         * Determines the row in the join table that connects a certain feature with a subfeature.
332         * 
333         * @param fid
334         *            id of the (super-) feature
335         * @param subFid
336         *            id of the subfeature
337         * @param relation1
338         *            describes how the join table is attached
339         * @param relation2
340         *            describes how the subfeature table is joined
341         * @throws DatastoreException
342         */
343        TableNode determineJTNode( FeatureId fid, FeatureId subFid, TableRelation relation1,
344                                   TableRelation relation2 )
345                                throws DatastoreException {
346    
347            LOG.logDebug( "Determining join table entry for feature " + fid + " and subfeature "
348                          + subFid );
349            TableNode jtEntry = null;
350    
351            this.aliasGenerator.reset();
352    
353            String featureTableAlias = this.aliasGenerator.generateUniqueAlias();
354            String joinTableAlias = this.aliasGenerator.generateUniqueAlias();
355            String subFeatureTableAlias = this.aliasGenerator.generateUniqueAlias();
356    
357            MappingField[] fromFields = relation1.getFromFields();
358            MappingField[] fromFields2 = relation2.getFromFields();
359            MappingField[] toFields = relation1.getToFields();
360            MappingField[] toFields2 = relation2.getToFields();
361    
362            // need to select 'from' fields of second relation element as well
363            MappingField[] selectFields = new MappingField[toFields.length + fromFields2.length];
364            for ( int i = 0; i < toFields.length; i++ ) {
365                selectFields[i] = toFields[i];
366            }
367            for ( int i = 0; i < fromFields2.length; i++ ) {
368                selectFields[i + toFields.length] = fromFields2[i];
369            }
370    
371            StatementBuffer query = new StatementBuffer();
372            query.append( "SELECT DISTINCT " );
373            for ( int i = 0; i < selectFields.length; i++ ) {
374                query.append( joinTableAlias );
375                query.append( "." );
376                query.append( selectFields[i].getField() );
377                if ( i != selectFields.length - 1 ) {
378                    query.append( ',' );
379                }
380            }
381            query.append( " FROM " );
382            query.append( fid.getFeatureType().getTable() );
383            query.append( " " );
384            query.append( featureTableAlias );
385            query.append( " INNER JOIN " );
386            query.append( relation1.getToTable() );
387            query.append( " " );
388            query.append( joinTableAlias );
389            query.append( " ON " );
390            for ( int j = 0; j < fromFields.length; j++ ) {
391                query.append( featureTableAlias );
392                query.append( '.' );
393                query.append( fromFields[j].getField() );
394                query.append( '=' );
395                query.append( joinTableAlias );
396                query.append( '.' );
397                query.append( toFields[j].getField() );
398            }
399            query.append( " INNER JOIN " );
400            query.append( subFid.getFeatureType().getTable() );
401            query.append( " " );
402            query.append( subFeatureTableAlias );
403            query.append( " ON " );
404            for ( int j = 0; j < fromFields2.length; j++ ) {
405                query.append( joinTableAlias );
406                query.append( '.' );
407                query.append( fromFields2[j].getField() );
408                query.append( '=' );
409                query.append( subFeatureTableAlias );
410                query.append( '.' );
411                query.append( toFields2[j].getField() );
412            }
413    
414            query.append( " WHERE " );
415            appendFeatureIdConstraint( query, fid, featureTableAlias );
416            query.append( " AND " );
417            appendFeatureIdConstraint( query, subFid, subFeatureTableAlias );
418    
419            PreparedStatement stmt = null;
420            ResultSet rs = null;
421            try {
422                stmt = this.datastore.prepareStatement( conn, query );
423                LOG.logDebug( "Determining join table row: " + query );
424                rs = stmt.executeQuery();
425                if ( rs.next() ) {
426                    Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>( selectFields.length );
427                    for ( int i = 0; i < selectFields.length; i++ ) {
428                        KeyColumn column = new KeyColumn( selectFields[i].getField(),
429                                                          selectFields[i].getType(),
430                                                          rs.getObject( i + 1 ) );
431                        keyColumns.add( column );
432                    }
433                    
434                    if ( subFid.getFeatureType().hasSeveralImplementations() ) {
435                        String localSubFtName = subFid.getFeatureType().getName().getLocalName();
436                        KeyColumn column = new KeyColumn( FT_COLUMN, Types.VARCHAR, localSubFtName );
437                        keyColumns.add( column );
438                    }
439                    jtEntry = new TableNode( relation1.getToTable(), keyColumns );
440                } else {
441                    String msg = "This is impossible: No join table row between feature and subfeature!?";
442                    throw new DatastoreException( msg );
443                }
444            } catch ( SQLException e ) {
445                LOG.logInfo( e.getMessage(), e );
446                throw new DatastoreException( "Error in determineJTNode(): " + e.getMessage() );
447            } finally {
448                try {
449                    if ( rs != null ) {
450                        try {
451                            rs.close();
452                        } catch ( SQLException e ) {
453                            LOG.logError( "Error closing result set: '" + e.getMessage() + "'.", e );
454                        }
455                    }
456                } finally {
457                    if ( stmt != null ) {
458                        try {
459                            stmt.close();
460                        } catch ( SQLException e ) {
461                            LOG.logError( "Error closing statement: '" + e.getMessage() + "'.", e );
462                        }
463                    }
464                }
465            }
466            return jtEntry;
467        }
468    
469        /**
470         * Delete orphaned rows in the specified property table (target table of the given
471         * {@link TableRelation}).
472         * <p>
473         * Only used by the {@link UpdateHandler}.
474         * 
475         * @param relation
476         * @param keyValues
477         * @throws DatastoreException
478         */
479        public void deleteOrphanedPropertyRows( TableRelation relation, Object[] keyValues )
480                                throws DatastoreException {
481            Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>( keyValues.length );
482            for ( int i = 0; i < keyValues.length; i++ ) {
483                KeyColumn keyColumn = new KeyColumn( relation.getToFields()[i].getField(),
484                                                     relation.getToFields()[i].getType(), keyValues[i] );
485                keyColumns.add( keyColumn );
486            }
487            TableNode node = new TableNode( relation.getToTable(), keyColumns );
488            if ( getReferencingRows( node ).size() == 0 ) {
489                performDelete( node );
490            }
491        }
492    
493        /**
494         * Returns all table rows that reference the given table row ({@link TableNode}).
495         * 
496         * @param node
497         * @return all table rows that reference the given table row
498         * @throws DatastoreException
499         */
500        private List<TableNode> getReferencingRows( TableNode node )
501                                throws DatastoreException {
502    
503            List<TableNode> rows = new ArrayList<TableNode>();
504            for ( TableReference tableReference : getReferencingTables( node.getTable() ) ) {
505                rows.addAll( getReferencingRows( node, tableReference ) );
506            }
507            return rows;
508        }
509    
510        /**
511         * Returns all stored rows (as {@link TableNode}s) that reference the given row ({@link TableNode})
512         * via the also given reference relation.
513         * 
514         * @param node
515         * @param ref
516         * @return all stored rows that reference the given row
517         * @throws DatastoreException
518         */
519        private List<TableNode> getReferencingRows( TableNode node, TableReference ref )
520                                throws DatastoreException {
521    
522            List<TableNode> referencingRows = new ArrayList<TableNode>();
523            this.aliasGenerator.reset();
524            String fromAlias = this.aliasGenerator.generateUniqueAlias();
525            String toAlias = this.aliasGenerator.generateUniqueAlias();
526            MappingField[] fromFields = ref.getFkColumns();
527            MappingField[] toFields = ref.getKeyColumns();
528    
529            StatementBuffer query = new StatementBuffer();
530            query.append( "SELECT DISTINCT " );
531            for ( int i = 0; i < fromFields.length; i++ ) {
532                query.append( fromAlias );
533                query.append( "." );
534                query.append( fromFields[i].getField() );
535                if ( i != fromFields.length - 1 ) {
536                    query.append( ',' );
537                }
538            }
539            query.append( " FROM " );
540            query.append( ref.getFromTable() );
541            query.append( " " );
542            query.append( fromAlias );
543            query.append( " INNER JOIN " );
544            query.append( ref.getToTable() );
545            query.append( " " );
546            query.append( toAlias );
547            query.append( " ON " );
548            for ( int j = 0; j < fromFields.length; j++ ) {
549                query.append( fromAlias );
550                query.append( '.' );
551                query.append( fromFields[j].getField() );
552                query.append( '=' );
553                query.append( toAlias );
554                query.append( '.' );
555                query.append( toFields[j].getField() );
556            }
557            query.append( " WHERE " );
558            int i = node.getKeyColumns().size();
559            for ( KeyColumn column : node.getKeyColumns() ) {
560                query.append( toAlias );
561                query.append( '.' );
562                query.append( column.getName() );
563                query.append( "=?" );
564                query.addArgument( column.getValue(), column.getTypeCode() );
565                if ( --i != 0 ) {
566                    query.append( " AND " );
567                }
568            }
569    
570            PreparedStatement stmt = null;
571            ResultSet rs = null;
572            try {
573                stmt = this.datastore.prepareStatement( conn, query );
574                LOG.logDebug( "Performing: " + query );
575                rs = stmt.executeQuery();
576                while ( rs.next() ) {
577                    Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>( fromFields.length );
578                    for ( i = 0; i < fromFields.length; i++ ) {
579                        KeyColumn column = new KeyColumn( fromFields[i].getField(),
580                                                          fromFields[i].getType(), rs.getObject( i + 1 ) );
581                        keyColumns.add( column );
582                    }
583                    TableNode referencingRow = new TableNode( ref.getFromTable(), keyColumns );
584                    referencingRows.add( referencingRow );
585                }
586            } catch ( SQLException e ) {
587                throw new DatastoreException( "Error in getReferencingRows(): " + e.getMessage() );
588            } finally {
589                try {
590                    if ( rs != null ) {
591                        try {
592                            rs.close();
593                        } catch ( SQLException e ) {
594                            LOG.logError( "Error closing result set: '" + e.getMessage() + "'.", e );
595                        }
596                    }
597                } finally {
598                    if ( stmt != null ) {
599                        try {
600                            stmt.close();
601                        } catch ( SQLException e ) {
602                            LOG.logError( "Error closing statement: '" + e.getMessage() + "'.", e );
603                        }
604                    }
605                }
606            }
607            return referencingRows;
608        }
609    
610        /**
611         * Returns all tables that reference the given table.
612         * 
613         * TODO cache search
614         * 
615         * @param table
616         * @return all tables that reference the given table
617         */
618        private List<TableReference> getReferencingTables( String table ) {
619    
620            List<TableReference> tables = new ArrayList<TableReference>();
621            MappedGMLSchema[] schemas = this.datastore.getSchemas();
622            for ( int i = 0; i < schemas.length; i++ ) {
623                MappedGMLSchema schema = schemas[i];
624                FeatureType[] fts = schema.getFeatureTypes();
625                for ( int j = 0; j < fts.length; j++ ) {
626                    MappedFeatureType ft = (MappedFeatureType) fts[j];
627                    if ( !ft.isAbstract() ) {
628                        PropertyType[] props = ft.getProperties();
629                        for ( int k = 0; k < props.length; k++ ) {
630                            tables.addAll( getReferencingTables( (MappedPropertyType) props[k], table ) );
631                        }
632                    }
633                }
634            }
635            return tables;
636        }
637    
638        /**
639         * Returns all tables that reference the given table and that are defined in the mapping of the
640         * given property type.
641         * 
642         * @param property
643         * @param table
644         * @return all tables that reference the given table
645         */
646        private List<TableReference> getReferencingTables( MappedPropertyType property, String table ) {
647    
648            List<TableReference> tables = new ArrayList<TableReference>();
649            if ( property instanceof MappedFeaturePropertyType
650                 && ( (MappedFeaturePropertyType) property ).getFeatureTypeReference().getFeatureType().isAbstract() ) {
651                TableRelation[] relations = property.getTableRelations();
652                for ( int j = 0; j < relations.length - 1; j++ ) {
653                    TableReference ref = new TableReference( relations[j] );
654                    if ( ref.getToTable().equals( table ) ) {
655                        tables.add( ref );
656                    }
657                }
658                MappedFeaturePropertyType pt = (MappedFeaturePropertyType) property;
659                MappedFeatureType abstractFt = pt.getFeatureTypeReference().getFeatureType();
660                MappedFeatureType[] substitutions = abstractFt.getConcreteSubstitutions();
661                for ( MappedFeatureType concreteType : substitutions ) {
662                    TableRelation finalStep = relations[relations.length - 1];
663                    TableReference ref = new TableReference( getTableRelation( finalStep,
664                                                                               concreteType.getTable() ) );
665                    if ( ref.getToTable().equals( table ) ) {
666                        tables.add( ref );
667                    }
668                }
669    
670            } else {
671                TableRelation[] relations = property.getTableRelations();
672                for ( int j = 0; j < relations.length; j++ ) {
673                    TableReference ref = new TableReference( relations[j] );
674                    if ( ref.getToTable().equals( table ) ) {
675                        tables.add( ref );
676                    }
677                }
678            }
679            return tables;
680        }
681    
682        private TableRelation getTableRelation( TableRelation toAbstractSubFt, String table ) {
683            MappingField[] toConcreteFields = new MappingField[toAbstractSubFt.getToFields().length];
684            for ( int i = 0; i < toConcreteFields.length; i++ ) {
685                MappingField toAbstractField = toAbstractSubFt.getToFields()[i];
686                toConcreteFields[i] = new MappingField( table, toAbstractField.getField(),
687                                                        toAbstractField.getType() );
688            }
689            TableRelation toConcreteSubFt = new TableRelation( toAbstractSubFt.getFromFields(),
690                                                               toConcreteFields,
691                                                               toAbstractSubFt.getFKInfo(),
692                                                               toAbstractSubFt.getIdGenerator() );
693            return toConcreteSubFt;
694        }
695    }