001    //$HeadURL: svn+ssh://jwilden@svn.wald.intevation.org/deegree/base/branches/2.5_testing/src/org/deegree/io/datastore/sql/transaction/delete/DeleteHandler.java $
002    /*----------------------------------------------------------------------------
003     This file is part of deegree, http://deegree.org/
004     Copyright (C) 2001-2009 by:
005       Department of Geography, University of Bonn
006     and
007       lat/lon GmbH
008    
009     This library is free software; you can redistribute it and/or modify it under
010     the terms of the GNU Lesser General Public License as published by the Free
011     Software Foundation; either version 2.1 of the License, or (at your option)
012     any later version.
013     This library is distributed in the hope that it will be useful, but WITHOUT
014     ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
015     FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
016     details.
017     You should have received a copy of the GNU Lesser General Public License
018     along with this library; if not, write to the Free Software Foundation, Inc.,
019     59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
020    
021     Contact information:
022    
023     lat/lon GmbH
024     Aennchenstr. 19, 53177 Bonn
025     Germany
026     http://lat-lon.de/
027    
028     Department of Geography, University of Bonn
029     Prof. Dr. Klaus Greve
030     Postfach 1147, 53001 Bonn
031     Germany
032     http://www.geographie.uni-bonn.de/deegree/
033    
034     e-mail: info@deegree.org
035    ----------------------------------------------------------------------------*/
036    package org.deegree.io.datastore.sql.transaction.delete;
037    
038    import java.sql.Connection;
039    import java.sql.PreparedStatement;
040    import java.sql.ResultSet;
041    import java.sql.SQLException;
042    import java.util.ArrayList;
043    import java.util.Collection;
044    import java.util.List;
045    
046    import org.deegree.datatypes.Types;
047    import org.deegree.framework.log.ILogger;
048    import org.deegree.framework.log.LoggerFactory;
049    import org.deegree.i18n.Messages;
050    import org.deegree.io.datastore.Datastore;
051    import org.deegree.io.datastore.DatastoreException;
052    import org.deegree.io.datastore.FeatureId;
053    import org.deegree.io.datastore.schema.MappedFeaturePropertyType;
054    import org.deegree.io.datastore.schema.MappedFeatureType;
055    import org.deegree.io.datastore.schema.MappedGMLSchema;
056    import org.deegree.io.datastore.schema.MappedPropertyType;
057    import org.deegree.io.datastore.schema.TableRelation;
058    import org.deegree.io.datastore.schema.content.MappingField;
059    import org.deegree.io.datastore.sql.AbstractRequestHandler;
060    import org.deegree.io.datastore.sql.StatementBuffer;
061    import org.deegree.io.datastore.sql.TableAliasGenerator;
062    import org.deegree.io.datastore.sql.transaction.SQLTransaction;
063    import org.deegree.io.datastore.sql.transaction.UpdateHandler;
064    import org.deegree.model.feature.schema.FeatureType;
065    import org.deegree.model.feature.schema.PropertyType;
066    import org.deegree.model.filterencoding.Filter;
067    import org.deegree.ogcwebservices.wfs.operation.transaction.Delete;
068    import org.deegree.ogcwebservices.wfs.operation.transaction.Transaction;
069    
070    /**
071     * Handler for {@link Delete} operations (which usually occur as parts of {@link Transaction} requests).
072     * <p>
073     * When a {@link Delete} operation is performed, the following actions are taken:
074     * <ul>
075     * <li>the {@link FeatureId}s of all (root) feature instances that match the associated {@link Filter} are determined</li>
076     * <li>the {@link FeatureGraph} is built in order to determine which features may be deleted without removing
077     * subfeatures of independent features</li>
078     * <li>the {@link TableGraph} is built that contains explicit information on all table rows that have to be deleted (and
079     * their dependencies)</li>
080     * <li>the {@link TableNode}s of the {@link TableGraph} are sorted in topological order, i.e. they may be deleted in
081     * that order without violating any foreign key constraints</li>
082     * </ul>
083     *
084     * @see FeatureGraph
085     * @see TableGraph
086     *
087     * @author <a href="mailto:schneider@lat-lon.de">Markus Schneider</a>
088     * @author last edited by: $Author: mschneider $
089     *
090     * @version $Revision: 18195 $, $Date: 2009-06-18 17:55:39 +0200 (Do, 18 Jun 2009) $
091     */
092    public class DeleteHandler extends AbstractRequestHandler {
093    
094        private static final ILogger LOG = LoggerFactory.getLogger( DeleteHandler.class );
095    
096        private String lockId;
097    
098        /**
099         * Creates a new <code>DeleteHandler</code> from the given parameters.
100         *
101         * @param dsTa
102         * @param aliasGenerator
103         * @param conn
104         * @param lockId
105         *            optional id of associated lock (may be null)
106         */
107        public DeleteHandler( SQLTransaction dsTa, TableAliasGenerator aliasGenerator, Connection conn, String lockId ) {
108            super( dsTa.getDatastore(), aliasGenerator, conn );
109            this.lockId = lockId;
110        }
111    
112        /**
113         * Deletes the features from the {@link Datastore} that have a certain type and are matched by the given filter.
114         *
115         * @param ft
116         *            non-abstract feature type of the features to be deleted
117         * @param filter
118         *            constraints the feature instances to be deleted
119         * @return number of deleted feature instances
120         * @throws DatastoreException
121         */
122        public int performDelete( MappedFeatureType ft, Filter filter )
123                                throws DatastoreException {
124    
125            assert !ft.isAbstract();
126    
127            if ( !ft.isDeletable() ) {
128                String msg = Messages.getMessage( "DATASTORE_FT_NOT_DELETABLE", ft.getName() );
129                throw new DatastoreException( msg );
130            }
131    
132            List<FeatureId> fids = determineAffectedAndModifiableFIDs( ft, filter, this.lockId );
133    
134            if ( LOG.getLevel() == ILogger.LOG_DEBUG ) {
135                LOG.logDebug( "Affected fids:" );
136                for ( FeatureId fid : fids ) {
137                    LOG.logDebug( "" + fid );
138                }
139            }
140    
141            FeatureGraph featureGraph = new FeatureGraph( fids, this );
142            TableGraph tableGraph = new TableGraph( featureGraph, this );
143    
144            if ( LOG.getLevel() == ILogger.LOG_DEBUG ) {
145                LOG.logDebug( "FeatureGraph: " + featureGraph );
146                LOG.logDebug( "TableGraph: " + tableGraph );
147            }
148    
149            List<TableNode> sortedNodes = tableGraph.getDeletionOrder();
150            for ( TableNode node : sortedNodes ) {
151                boolean delete = true;
152                if ( node.isDeleteVetoPossible() ) {
153                    List<TableNode> referencingRows = getReferencingRows( node );
154                    if ( referencingRows.size() > 0 ) {
155                        delete = false;
156                        LOG.logDebug( "Skipping delete of " + node + ": " + referencingRows.size() + " reference(s) exist." );
157                        for ( TableNode referencingNode : referencingRows ) {
158                            LOG.logDebug( "Referenced by: " + referencingNode );
159                        }
160                    }
161                }
162                if ( delete ) {
163                    performDelete( node );
164                }
165            }
166    
167            int deletedFeatures = tableGraph.getDeletableRootFeatureCount();
168    
169            if ( deletedFeatures != fids.size() ) {
170                String msg = Messages.getMessage( "DATASTORE_COULD_NOT_DELETE_ALL" );
171                LOG.logInfo( msg );
172            }
173    
174            // return count of actually deleted (root) features
175            return deletedFeatures;
176        }
177    
178        /**
179         * Deletes the table entry from the SQL database that is represented by the given {@link TableNode}.
180         *
181         * @param node
182         * @throws DatastoreException
183         */
184        private void performDelete( TableNode node )
185                                throws DatastoreException {
186    
187            StatementBuffer query = new StatementBuffer();
188            query.append( "DELETE FROM " );
189            query.append( node.getTable() );
190            query.append( " WHERE " );
191            boolean first = true;
192            for ( KeyColumn column : node.getKeyColumns() ) {
193                if ( first ) {
194                    first = false;
195                } else {
196                    query.append( " AND " );
197                }
198                query.append( column.getName() );
199                query.append( "=?" );
200                query.addArgument( column.getValue(), column.getTypeCode() );
201            }
202    
203            PreparedStatement stmt = null;
204            try {
205                stmt = this.datastore.prepareStatement( conn, query );
206                LOG.logDebug( "Deleting row: " + query );
207                stmt.execute();
208            } catch ( SQLException e ) {
209                String msg = "Error performing delete '" + query + "': " + e.getMessage();
210                LOG.logInfo( msg, e );
211                throw new DatastoreException( msg );
212            } finally {
213                if ( stmt != null ) {
214                    try {
215                        stmt.close();
216                    } catch ( SQLException e ) {
217                        String msg = "Error closing statement: " + e.getMessage();
218                        LOG.logError( msg, e );
219                    }
220                }
221            }
222        }
223    
224        /**
225         * Determines the {@link TableNode} that represent the simple/geometry properties in the property table attached by
226         * the given {@link TableRelation}.
227         *
228         * @param fid
229         *            id of the feature that owns the properties
230         * @param relation
231         *            describes how the property table is joined to the feature table
232         * @return the simple/geometry properties in the the related property table
233         * @throws DatastoreException
234         */
235        List<TableNode> determinePropNodes( FeatureId fid, TableRelation relation )
236                                throws DatastoreException {
237    
238            List<TableNode> propEntries = new ArrayList<TableNode>();
239    
240            this.aliasGenerator.reset();
241            String fromAlias = this.aliasGenerator.generateUniqueAlias();
242            String toAlias = this.aliasGenerator.generateUniqueAlias();
243            MappingField[] fromFields = relation.getFromFields();
244            MappingField[] toFields = relation.getToFields();
245    
246            StatementBuffer query = new StatementBuffer();
247            query.append( "SELECT DISTINCT " );
248            for ( int i = 0; i < toFields.length; i++ ) {
249                query.append( toAlias );
250                query.append( "." );
251                query.append( toFields[i].getField() );
252                if ( i != toFields.length - 1 ) {
253                    query.append( ',' );
254                }
255            }
256            query.append( " FROM " );
257            query.append( fid.getFeatureType().getTable() );
258            query.append( " " );
259            query.append( fromAlias );
260            query.append( " INNER JOIN " );
261            query.append( relation.getToTable() );
262            query.append( " " );
263            query.append( toAlias );
264            query.append( " ON " );
265            for ( int j = 0; j < fromFields.length; j++ ) {
266                query.append( fromAlias );
267                query.append( '.' );
268                query.append( fromFields[j].getField() );
269                query.append( '=' );
270                query.append( toAlias );
271                query.append( '.' );
272                query.append( toFields[j].getField() );
273            }
274            query.append( " WHERE " );
275            appendFeatureIdConstraint( query, fid, fromAlias );
276    
277            PreparedStatement stmt = null;
278            ResultSet rs = null;
279            try {
280                stmt = this.datastore.prepareStatement( conn, query );
281                LOG.logDebug( "Performing: " + query );
282                rs = stmt.executeQuery();
283                while ( rs.next() ) {
284                    Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>();
285                    for ( int i = 0; i < toFields.length; i++ ) {
286                        KeyColumn column = new KeyColumn( toFields[i].getField(), toFields[i].getType(),
287                                                          rs.getObject( i + 1 ) );
288                        keyColumns.add( column );
289                    }
290                    TableNode propEntry = new TableNode( relation.getToTable(), keyColumns );
291                    propEntries.add( propEntry );
292                }
293            } catch ( SQLException e ) {
294                LOG.logInfo( e.getMessage(), e );
295                throw new DatastoreException( "Error in addPropertyNodes(): " + e.getMessage() );
296            } finally {
297                try {
298                    if ( rs != null ) {
299                        try {
300                            rs.close();
301                        } catch ( SQLException e ) {
302                            LOG.logError( "Error closing result set: '" + e.getMessage() + "'.", e );
303                        }
304                    }
305                } finally {
306                    if ( stmt != null ) {
307                        try {
308                            stmt.close();
309                        } catch ( SQLException e ) {
310                            LOG.logError( "Error closing statement: '" + e.getMessage() + "'.", e );
311                        }
312                    }
313                }
314            }
315            return propEntries;
316        }
317    
318        /**
319         * Determines the row in the join table that connects a certain feature with a subfeature.
320         *
321         * @param fid
322         *            id of the (super-) feature
323         * @param subFid
324         *            id of the subfeature
325         * @param relation1
326         *            describes how the join table is attached
327         * @param relation2
328         *            describes how the subfeature table is joined
329         * @return join table row (as a {@link TableNode})
330         * @throws DatastoreException
331         */
332        TableNode determineJTNode( FeatureId fid, FeatureId subFid, TableRelation relation1, TableRelation relation2 )
333                                throws DatastoreException {
334    
335            LOG.logDebug( "Determining join table entry for feature " + fid + " and subfeature " + subFid );
336            TableNode jtEntry = null;
337    
338            this.aliasGenerator.reset();
339    
340            String featureTableAlias = this.aliasGenerator.generateUniqueAlias();
341            String joinTableAlias = this.aliasGenerator.generateUniqueAlias();
342            String subFeatureTableAlias = this.aliasGenerator.generateUniqueAlias();
343    
344            MappingField[] fromFields = relation1.getFromFields();
345            MappingField[] fromFields2 = relation2.getFromFields();
346            MappingField[] toFields = relation1.getToFields();
347            MappingField[] toFields2 = relation2.getToFields();
348    
349            // need to select 'from' fields of second relation element as well
350            MappingField[] selectFields = new MappingField[toFields.length + fromFields2.length];
351            for ( int i = 0; i < toFields.length; i++ ) {
352                selectFields[i] = toFields[i];
353            }
354            for ( int i = 0; i < fromFields2.length; i++ ) {
355                selectFields[i + toFields.length] = fromFields2[i];
356            }
357    
358            StatementBuffer query = new StatementBuffer();
359            query.append( "SELECT DISTINCT " );
360            for ( int i = 0; i < selectFields.length; i++ ) {
361                query.append( joinTableAlias );
362                query.append( "." );
363                query.append( selectFields[i].getField() );
364                if ( i != selectFields.length - 1 ) {
365                    query.append( ',' );
366                }
367            }
368            query.append( " FROM " );
369            query.append( fid.getFeatureType().getTable() );
370            query.append( " " );
371            query.append( featureTableAlias );
372            query.append( " INNER JOIN " );
373            query.append( relation1.getToTable() );
374            query.append( " " );
375            query.append( joinTableAlias );
376            query.append( " ON " );
377            for ( int j = 0; j < fromFields.length; j++ ) {
378                query.append( featureTableAlias );
379                query.append( '.' );
380                query.append( fromFields[j].getField() );
381                query.append( '=' );
382                query.append( joinTableAlias );
383                query.append( '.' );
384                query.append( toFields[j].getField() );
385            }
386            query.append( " INNER JOIN " );
387            query.append( subFid.getFeatureType().getTable() );
388            query.append( " " );
389            query.append( subFeatureTableAlias );
390            query.append( " ON " );
391            for ( int j = 0; j < fromFields2.length; j++ ) {
392                query.append( joinTableAlias );
393                query.append( '.' );
394                query.append( fromFields2[j].getField() );
395                query.append( '=' );
396                query.append( subFeatureTableAlias );
397                query.append( '.' );
398                query.append( toFields2[j].getField() );
399            }
400    
401            query.append( " WHERE " );
402            appendFeatureIdConstraint( query, fid, featureTableAlias );
403            query.append( " AND " );
404            appendFeatureIdConstraint( query, subFid, subFeatureTableAlias );
405    
406            PreparedStatement stmt = null;
407            ResultSet rs = null;
408            try {
409                stmt = this.datastore.prepareStatement( conn, query );
410                LOG.logDebug( "Determining join table row: " + query );
411                rs = stmt.executeQuery();
412                if ( rs.next() ) {
413                    Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>( selectFields.length );
414                    for ( int i = 0; i < selectFields.length; i++ ) {
415                        KeyColumn column = new KeyColumn( selectFields[i].getField(), selectFields[i].getType(),
416                                                          rs.getObject( i + 1 ) );
417                        keyColumns.add( column );
418                    }
419    
420                    if ( subFid.getFeatureType().hasSeveralImplementations() ) {
421                        String localSubFtName = subFid.getFeatureType().getName().getLocalName();
422                        KeyColumn column = new KeyColumn( FT_COLUMN, Types.VARCHAR, localSubFtName );
423                        keyColumns.add( column );
424                    }
425                    jtEntry = new TableNode( relation1.getToTable(), keyColumns );
426                } else {
427                    String msg = "This is impossible: No join table row between feature and subfeature!?";
428                    throw new DatastoreException( msg );
429                }
430            } catch ( SQLException e ) {
431                LOG.logInfo( e.getMessage(), e );
432                throw new DatastoreException( "Error in determineJTNode(): " + e.getMessage() );
433            } finally {
434                try {
435                    if ( rs != null ) {
436                        try {
437                            rs.close();
438                        } catch ( SQLException e ) {
439                            LOG.logError( "Error closing result set: '" + e.getMessage() + "'.", e );
440                        }
441                    }
442                } finally {
443                    if ( stmt != null ) {
444                        try {
445                            stmt.close();
446                        } catch ( SQLException e ) {
447                            LOG.logError( "Error closing statement: '" + e.getMessage() + "'.", e );
448                        }
449                    }
450                }
451            }
452            return jtEntry;
453        }
454    
455        /**
456         * Delete orphaned rows in the specified property table (target table of the given {@link TableRelation}).
457         * <p>
458         * Only used by the {@link UpdateHandler}.
459         *
460         * @param relation
461         * @param keyValues
462         * @throws DatastoreException
463         */
464        public void deleteOrphanedPropertyRows( TableRelation relation, Object[] keyValues )
465                                throws DatastoreException {
466            Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>( keyValues.length );
467            for ( int i = 0; i < keyValues.length; i++ ) {
468                KeyColumn keyColumn = new KeyColumn( relation.getToFields()[i].getField(),
469                                                     relation.getToFields()[i].getType(), keyValues[i] );
470                keyColumns.add( keyColumn );
471            }
472            TableNode node = new TableNode( relation.getToTable(), keyColumns );
473            if ( getReferencingRows( node ).size() == 0 ) {
474                performDelete( node );
475            }
476        }
477    
478        /**
479         * Returns all table rows that reference the given table row ({@link TableNode}).
480         *
481         * @param node
482         * @return all table rows that reference the given table row
483         * @throws DatastoreException
484         */
485        private List<TableNode> getReferencingRows( TableNode node )
486                                throws DatastoreException {
487    
488            List<TableNode> rows = new ArrayList<TableNode>();
489            for ( TableReference tableReference : getReferencingTables( node.getTable() ) ) {
490                rows.addAll( getReferencingRows( node, tableReference ) );
491            }
492            return rows;
493        }
494    
495        /**
496         * Returns all stored rows (as {@link TableNode}s) that reference the given row ({@link TableNode}) via the also
497         * given reference relation.
498         *
499         * @param node
500         * @param ref
501         * @return all stored rows that reference the given row
502         * @throws DatastoreException
503         */
504        private List<TableNode> getReferencingRows( TableNode node, TableReference ref )
505                                throws DatastoreException {
506    
507            List<TableNode> referencingRows = new ArrayList<TableNode>();
508            this.aliasGenerator.reset();
509            String fromAlias = this.aliasGenerator.generateUniqueAlias();
510            String toAlias = this.aliasGenerator.generateUniqueAlias();
511            MappingField[] fromFields = ref.getFkColumns();
512            MappingField[] toFields = ref.getKeyColumns();
513    
514            StatementBuffer query = new StatementBuffer();
515            query.append( "SELECT DISTINCT " );
516            for ( int i = 0; i < fromFields.length; i++ ) {
517                query.append( fromAlias );
518                query.append( "." );
519                query.append( fromFields[i].getField() );
520                if ( i != fromFields.length - 1 ) {
521                    query.append( ',' );
522                }
523            }
524            query.append( " FROM " );
525            query.append( ref.getFromTable() );
526            query.append( " " );
527            query.append( fromAlias );
528            query.append( " INNER JOIN " );
529            query.append( ref.getToTable() );
530            query.append( " " );
531            query.append( toAlias );
532            query.append( " ON " );
533            for ( int j = 0; j < fromFields.length; j++ ) {
534                query.append( fromAlias );
535                query.append( '.' );
536                query.append( fromFields[j].getField() );
537                query.append( '=' );
538                query.append( toAlias );
539                query.append( '.' );
540                query.append( toFields[j].getField() );
541            }
542            query.append( " WHERE " );
543            int i = node.getKeyColumns().size();
544            for ( KeyColumn column : node.getKeyColumns() ) {
545                query.append( toAlias );
546                query.append( '.' );
547                query.append( column.getName() );
548                query.append( "=?" );
549                query.addArgument( column.getValue(), column.getTypeCode() );
550                if ( --i != 0 ) {
551                    query.append( " AND " );
552                }
553            }
554    
555            PreparedStatement stmt = null;
556            ResultSet rs = null;
557            try {
558                stmt = this.datastore.prepareStatement( conn, query );
559                LOG.logDebug( "Performing: " + query );
560                rs = stmt.executeQuery();
561                while ( rs.next() ) {
562                    Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>( fromFields.length );
563                    for ( i = 0; i < fromFields.length; i++ ) {
564                        KeyColumn column = new KeyColumn( fromFields[i].getField(), fromFields[i].getType(),
565                                                          rs.getObject( i + 1 ) );
566                        keyColumns.add( column );
567                    }
568                    TableNode referencingRow = new TableNode( ref.getFromTable(), keyColumns );
569                    referencingRows.add( referencingRow );
570                }
571            } catch ( SQLException e ) {
572                throw new DatastoreException( "Error in getReferencingRows(): " + e.getMessage() );
573            } finally {
574                try {
575                    if ( rs != null ) {
576                        try {
577                            rs.close();
578                        } catch ( SQLException e ) {
579                            LOG.logError( "Error closing result set: '" + e.getMessage() + "'.", e );
580                        }
581                    }
582                } finally {
583                    if ( stmt != null ) {
584                        try {
585                            stmt.close();
586                        } catch ( SQLException e ) {
587                            LOG.logError( "Error closing statement: '" + e.getMessage() + "'.", e );
588                        }
589                    }
590                }
591            }
592            return referencingRows;
593        }
594    
595        /**
596         * Returns all tables that reference the given table.
597         *
598         * TODO cache search
599         *
600         * @param table
601         * @return all tables that reference the given table
602         */
603        private List<TableReference> getReferencingTables( String table ) {
604    
605            List<TableReference> tables = new ArrayList<TableReference>();
606            MappedGMLSchema[] schemas = this.datastore.getSchemas();
607            for ( int i = 0; i < schemas.length; i++ ) {
608                MappedGMLSchema schema = schemas[i];
609                FeatureType[] fts = schema.getFeatureTypes();
610                for ( int j = 0; j < fts.length; j++ ) {
611                    MappedFeatureType ft = (MappedFeatureType) fts[j];
612                    if ( !ft.isAbstract() ) {
613                        PropertyType[] props = ft.getProperties();
614                        for ( int k = 0; k < props.length; k++ ) {
615                            tables.addAll( getReferencingTables( (MappedPropertyType) props[k], table ) );
616                        }
617                    }
618                }
619            }
620            return tables;
621        }
622    
623        /**
624         * Returns all tables that reference the given table and that are defined in the mapping of the given property type.
625         *
626         * @param property
627         * @param table
628         * @return all tables that reference the given table
629         */
630        private List<TableReference> getReferencingTables( MappedPropertyType property, String table ) {
631    
632            List<TableReference> tables = new ArrayList<TableReference>();
633            if ( property instanceof MappedFeaturePropertyType
634                 && ( (MappedFeaturePropertyType) property ).getFeatureTypeReference().getFeatureType().isAbstract() ) {
635                TableRelation[] relations = property.getTableRelations();
636                for ( int j = 0; j < relations.length - 1; j++ ) {
637                    TableReference ref = new TableReference( relations[j] );
638                    if ( ref.getToTable().equals( table ) ) {
639                        tables.add( ref );
640                    }
641                }
642                MappedFeaturePropertyType pt = (MappedFeaturePropertyType) property;
643                MappedFeatureType abstractFt = pt.getFeatureTypeReference().getFeatureType();
644                MappedFeatureType[] substitutions = abstractFt.getConcreteSubstitutions();
645                for ( MappedFeatureType concreteType : substitutions ) {
646                    TableRelation finalStep = relations[relations.length - 1];
647                    TableReference ref = new TableReference( getTableRelation( finalStep, concreteType.getTable() ) );
648                    if ( ref.getToTable().equals( table ) ) {
649                        tables.add( ref );
650                    }
651                }
652    
653            } else {
654                TableRelation[] relations = property.getTableRelations();
655                for ( int j = 0; j < relations.length; j++ ) {
656                    TableReference ref = new TableReference( relations[j] );
657                    if ( ref.getToTable().equals( table ) ) {
658                        tables.add( ref );
659                    }
660                }
661            }
662            return tables;
663        }
664    
665        private TableRelation getTableRelation( TableRelation toAbstractSubFt, String table ) {
666            MappingField[] toConcreteFields = new MappingField[toAbstractSubFt.getToFields().length];
667            for ( int i = 0; i < toConcreteFields.length; i++ ) {
668                MappingField toAbstractField = toAbstractSubFt.getToFields()[i];
669                toConcreteFields[i] = new MappingField( table, toAbstractField.getField(), toAbstractField.getType() );
670            }
671            TableRelation toConcreteSubFt = new TableRelation( toAbstractSubFt.getFromFields(), toConcreteFields,
672                                                               toAbstractSubFt.getFKInfo(),
673                                                               toAbstractSubFt.getIdGenerator() );
674            return toConcreteSubFt;
675        }
676    }