001    //$HeadURL: svn+ssh://jwilden@svn.wald.intevation.org/deegree/base/branches/2.5_testing/src/org/deegree/tools/datastore/DDLGenerator.java $
002    /*----------------------------------------------------------------------------
003     This file is part of deegree, http://deegree.org/
004     Copyright (C) 2001-2009 by:
005       Department of Geography, University of Bonn
006     and
007       lat/lon GmbH
008    
009     This library is free software; you can redistribute it and/or modify it under
010     the terms of the GNU Lesser General Public License as published by the Free
011     Software Foundation; either version 2.1 of the License, or (at your option)
012     any later version.
013     This library is distributed in the hope that it will be useful, but WITHOUT
014     ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
015     FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
016     details.
017     You should have received a copy of the GNU Lesser General Public License
018     along with this library; if not, write to the Free Software Foundation, Inc.,
019     59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
020    
021     Contact information:
022    
023     lat/lon GmbH
024     Aennchenstr. 19, 53177 Bonn
025     Germany
026     http://lat-lon.de/
027    
028     Department of Geography, University of Bonn
029     Prof. Dr. Klaus Greve
030     Postfach 1147, 53001 Bonn
031     Germany
032     http://www.geographie.uni-bonn.de/deegree/
033    
034     e-mail: info@deegree.org
035    ----------------------------------------------------------------------------*/
036    package org.deegree.tools.datastore;
037    
038    import java.io.File;
039    import java.io.FileOutputStream;
040    import java.io.IOException;
041    import java.io.OutputStreamWriter;
042    import java.io.PrintWriter;
043    import java.net.MalformedURLException;
044    import java.net.URL;
045    import java.util.ArrayList;
046    import java.util.Collection;
047    import java.util.HashMap;
048    import java.util.HashSet;
049    import java.util.Iterator;
050    import java.util.LinkedHashMap;
051    import java.util.Map;
052    import java.util.Set;
053    
054    import org.deegree.datatypes.Types;
055    import org.deegree.datatypes.UnknownTypeException;
056    import org.deegree.framework.xml.XMLParsingException;
057    import org.deegree.framework.xml.schema.XMLSchemaException;
058    import org.deegree.io.datastore.schema.MappedFeaturePropertyType;
059    import org.deegree.io.datastore.schema.MappedFeatureType;
060    import org.deegree.io.datastore.schema.MappedGMLId;
061    import org.deegree.io.datastore.schema.MappedGMLSchema;
062    import org.deegree.io.datastore.schema.MappedGMLSchemaDocument;
063    import org.deegree.io.datastore.schema.MappedGeometryPropertyType;
064    import org.deegree.io.datastore.schema.MappedPropertyType;
065    import org.deegree.io.datastore.schema.MappedSimplePropertyType;
066    import org.deegree.io.datastore.schema.TableRelation;
067    import org.deegree.io.datastore.schema.content.MappingField;
068    import org.deegree.io.datastore.schema.content.MappingGeometryField;
069    import org.deegree.io.datastore.schema.content.SimpleContent;
070    import org.deegree.io.datastore.sql.idgenerator.DBSeqIdGenerator;
071    import org.deegree.model.crs.UnknownCRSException;
072    import org.deegree.model.feature.schema.FeatureType;
073    import org.deegree.model.feature.schema.PropertyType;
074    import org.xml.sax.SAXException;
075    
076    /**
077     * Abstract base class for DDL generation from annotated GML schema files.
078     * <p>
079     * This abstract base class only implements the functionality needed to retrieve the necessary tables and columns used
080     * in an annotated GML schema. Some DDL generation may be dependent on the specific SQL backend to be used, so this is
081     * implemented in concrete extensions of this class.
082     *
083     * @author <a href="mailto:schneider@lat-lon.de">Markus Schneider</a>
084     * @author last edited by: $Author: mschneider $
085     *
086     * @version $Revision: 18195 $, $Date: 2009-06-18 17:55:39 +0200 (Do, 18 Jun 2009) $
087     */
088    public abstract class DDLGenerator {
089    
090        protected static final String FT_PREFIX = "FT_";
091    
092        protected static final int FEATURE_TYPE_TABLE = 0;
093    
094        protected static final int JOIN_TABLE = 1;
095    
096        protected static final int MULTI_PROPERTY_TABLE = 2;
097    
098        protected MappedGMLSchema schema;
099    
100        // key type: String (table names), value type: TableDefinition
101        protected Map<String, TableDefinition> tables = new HashMap<String, TableDefinition>();
102    
103        // names of sequences (for id generation)
104        protected Set<String> sequences = new HashSet<String>();
105    
106        /**
107         * Generates the SQL statements necessary for setting the schema search path. Must be overwritten by the concrete
108         * implementation.
109         *
110         * @param dbSchemaName
111         * @return the SQL statements necessary for setting the schema search path accordingly
112         */
113        protected abstract StringBuffer generateSetSchemaStmt( String dbSchemaName );
114    
115        /**
116         * Generates the DDL statements necessary for the creation of the given schema. May be overwritten by a concrete
117         * implementation.
118         *
119         * @param dbSchemaName
120         * @return the DDL statements necessary for the creation of the given db schema
121         */
122        protected StringBuffer generateCreateSchemaStmts( String dbSchemaName ) {
123            StringBuffer sb = new StringBuffer( "CREATE SCHEMA " );
124            sb.append( dbSchemaName );
125            sb.append( ";\n" );
126            return sb;
127        }
128    
129        /**
130         * Generates the DDL statements necessary for the creation of the given table definition. Must be overwritten by the
131         * concrete implementation.
132         *
133         * @param table
134         * @return the DDL statements necessary for the creation of the given table definition
135         */
136        protected abstract StringBuffer generateCreateTableStmt( TableDefinition table );
137    
138        /**
139         * Generates the DDL statements necessary for the creation of standard indexes for the given table definition. Must
140         * be overwritten by the concrete implementation.
141         *
142         * @param table
143         * @return the DDL statements necessary for the creation of standard indexes for the given table definition
144         */
145        protected abstract StringBuffer generateCreateIndexStmts( TableDefinition table );
146    
147        /**
148         * Generates the DDL statements necessary for the creation of the given sequence. May be overwritten by a concrete
149         * implementation.
150         *
151         * @param sequenceName
152         * @return the DDL statements necessary for the creation of the given sequence definition
153         */
154        protected StringBuffer generateCreateSequenceStmt( String sequenceName ) {
155            StringBuffer sb = new StringBuffer( "CREATE SEQUENCE " );
156            sb.append( sequenceName );
157            sb.append( ";\n" );
158            return sb;
159        }
160    
161        /**
162         * Generates the DDL statements necessary for the removal of the given schema. May be overwritten by a concrete
163         * implementation.
164         *
165         * @param dbSchemaName
166         * @return the DDL statements necessary for the removal of the given db schema
167         */
168        protected StringBuffer generateDropSchemaStmt( String dbSchemaName ) {
169            StringBuffer sb = new StringBuffer();
170            sb.append( "DROP SCHEMA " );
171            sb.append( dbSchemaName );
172            sb.append( " CASCADE;\n" );
173            return sb;
174        }
175    
176        /**
177         * Generates the DDL statements necessary for the removal of the given table definition. May be overwritten by a
178         * concrete implementation.
179         *
180         * @param table
181         * @return the DDL statements necessary for the removal of the given table definition
182         */
183        protected StringBuffer generateDropTableStmt( TableDefinition table ) {
184            StringBuffer sb = new StringBuffer();
185            sb.append( "DROP TABLE " );
186            sb.append( table.getName() );
187            sb.append( " CASCADE;\n" );
188            return sb;
189        }
190    
191        /**
192         * Generates the DDL statements necessary for the dropping of standard indexes for the given table definition. May
193         * be overwritten by a concrete implementation.
194         *
195         * @param table
196         * @return the DDL statements necessary for the dropping of standard indexes for the given table definition
197         */
198        protected StringBuffer generateDropIndexStmts( TableDefinition table ) {
199            StringBuffer sb = new StringBuffer();
200    
201            // build drop statements for geometry indexes
202            Collection<ColumnDefinition> geometryColumns = new ArrayList<ColumnDefinition>();
203            for ( ColumnDefinition column : table.getColumns() ) {
204                if ( column.isGeometry() ) {
205                    geometryColumns.add( column );
206                }
207            }
208    
209            Iterator<ColumnDefinition> iter = geometryColumns.iterator();
210            int spatialIdxCnt = 1;
211            while ( iter.hasNext() ) {
212                iter.next();
213                sb.append( "DROP INDEX " );
214                sb.append( table.getName() + ( spatialIdxCnt++ ) );
215                sb.append( "_SPATIAL_IDX;" );
216                sb.append( '\n' );
217            }
218    
219            // build table type specific drop index statements
220            switch ( table.getType() ) {
221            case JOIN_TABLE: {
222                // create an index on every column
223                ColumnDefinition[] columns = table.getColumns();
224                for ( int i = 0; i < columns.length; i++ ) {
225                    if ( columns[i].isFK() ) {
226                        sb.append( "DROP INDEX " );
227                        sb.append( table.getName().toUpperCase() );
228                        sb.append( "_" );
229                        sb.append( columns[i].getName() + "_IDX" );
230                        sb.append( ';' );
231                        sb.append( '\n' );
232                    }
233                }
234                break;
235            }
236            default: {
237                break;
238            }
239            }
240            return sb;
241        }
242    
243        /**
244         * Generates the DDL statements necessary for the removal of the given sequence. May be overwritten by a concrete
245         * implementation.
246         *
247         * @param sequenceName
248         * @return the DDL statements necessary for the removal of the given sequence definition
249         */
250        protected StringBuffer generateDropSequenceStmt( String sequenceName ) {
251            StringBuffer sb = new StringBuffer( "DROP SEQUENCE " );
252            sb.append( sequenceName );
253            sb.append( ";\n" );
254            return sb;
255        }
256    
257        /**
258         * Creates a new instance of <code>DDLGenerator</code> from the given parameters.
259         *
260         * @param schemaURL
261         * @throws MalformedURLException
262         * @throws IOException
263         * @throws SAXException
264         * @throws XMLParsingException
265         * @throws XMLSchemaException
266         * @throws UnknownCRSException
267         */
268        protected DDLGenerator( URL schemaURL ) throws MalformedURLException, IOException, SAXException,
269                                XMLParsingException, XMLSchemaException, UnknownCRSException {
270    
271            System.out.println( Messages.format( "LOADING_SCHEMA_FILE", schemaURL ) );
272            MappedGMLSchemaDocument schemaDoc = new MappedGMLSchemaDocument();
273            schemaDoc.load( schemaURL );
274            schema = schemaDoc.parseMappedGMLSchema();
275            FeatureType[] featureTypes = schema.getFeatureTypes();
276            int concreteCount = 0;
277            for ( int i = 0; i < featureTypes.length; i++ ) {
278                if ( !featureTypes[i].isAbstract() ) {
279                    concreteCount++;
280                }
281            }
282            System.out.println( Messages.format( "SCHEMA_INFO", new Integer( featureTypes.length ),
283                                                 new Integer( featureTypes.length - concreteCount ),
284                                                 new Integer( concreteCount ) ) );
285            System.out.println( Messages.getString( "RETRIEVING_TABLES" ) );
286            buildTableMap();
287        }
288    
289        /**
290         * Returns all table definitions of the given type.
291         *
292         * @param type
293         *            FEATURE_TYPE_TABLE, JOIN_TABLE or MULTI_PROPERTY_TABLE
294         * @return all table definitions of the given type.
295         */
296        protected TableDefinition[] getTables( int type ) {
297            Collection<TableDefinition> tableList = new ArrayList<TableDefinition>();
298            Iterator<String> iter = this.tables.keySet().iterator();
299            while ( iter.hasNext() ) {
300                String tableName = iter.next();
301                TableDefinition table = this.tables.get( tableName );
302                if ( table.getType() == type ) {
303                    tableList.add( table );
304                }
305            }
306            return tableList.toArray( new TableDefinition[tableList.size()] );
307        }
308    
309        /**
310         * Returns the table definition for the table with the given name. If no such definition exists, a new table
311         * definition is created and added to the internal <code>tables</code> map.
312         *
313         * @param tableName
314         *            table definition to look up
315         * @param type
316         *            type of the table (only respected, if a new TableDefinition instance is created)
317         * @return the table definition for the table with the given name.
318         */
319        private TableDefinition lookupTableDefinition( String tableName, int type ) {
320            TableDefinition table = this.tables.get( tableName );
321            if ( table == null ) {
322                table = new TableDefinition( tableName, type );
323                this.tables.put( tableName, table );
324            }
325            return table;
326        }
327    
328        /**
329         * Collects the referenced tables and their columns from the input schema. Builds the member map <code>tables</code>
330         * from this data.
331         */
332        private void buildTableMap() {
333            FeatureType[] featureTypes = schema.getFeatureTypes();
334            for ( int i = 0; i < featureTypes.length; i++ ) {
335                if ( !featureTypes[i].isAbstract() ) {
336                    buildTableMap( (MappedFeatureType) featureTypes[i] );
337                }
338            }
339        }
340    
341        /**
342         * Collects the tables and their columns used in the annotation of the given feature type. Builds the member map
343         * <code>tables</code> from this data.
344         *
345         * @param ft
346         *            feature type to process
347         */
348        private void buildTableMap( MappedFeatureType ft ) {
349            TableDefinition table = lookupTableDefinition( ft.getTable(), FEATURE_TYPE_TABLE );
350    
351            MappedGMLId gmlId = ft.getGMLId();
352            addGMLIdColumns( gmlId, table );
353    
354            if ( gmlId.getIdGenerator() instanceof DBSeqIdGenerator ) {
355                extractSequence( (DBSeqIdGenerator) ft.getGMLId().getIdGenerator() );
356            }
357    
358            PropertyType[] properties = ft.getProperties();
359            for ( int i = 0; i < properties.length; i++ ) {
360                MappedPropertyType property = (MappedPropertyType) properties[i];
361                if ( property instanceof MappedSimplePropertyType ) {
362                    buildTableMap( (MappedSimplePropertyType) property, table );
363                } else if ( property instanceof MappedGeometryPropertyType ) {
364                    buildTableMap( (MappedGeometryPropertyType) property, table );
365                } else if ( property instanceof MappedFeaturePropertyType ) {
366                    buildTableMap( (MappedFeaturePropertyType) property, table );
367                } else {
368                    throw new RuntimeException( Messages.format( "ERROR_UNEXPECTED_PROPERTY_TYPE",
369                                                                 property.getClass().getName() ) );
370                }
371            }
372        }
373    
374        /**
375         * Adds the name of the sequence that the given {@link DBSeqIdGenerator} refers to.
376         *
377         * @param idGenerator
378         *            generator instance
379         */
380        private void extractSequence( DBSeqIdGenerator idGenerator ) {
381            this.sequences.add( idGenerator.getSequenceName() );
382        }
383    
384        /**
385         * Adds the columns used in the given <code>MappedGMLId</code> to the also given <code>TableDefinition</code>.
386         *
387         * @param gmlId
388         *            columns are taken from this gmlId mapping
389         * @param table
390         *            columns are added to this table definition
391         */
392        private void addGMLIdColumns( MappedGMLId gmlId, TableDefinition table ) {
393            MappingField[] idFields = gmlId.getIdFields();
394            for ( int i = 0; i < idFields.length; i++ ) {
395                ColumnDefinition column = new ColumnDefinition( idFields[i].getField(), idFields[i].getType(), false, true,
396                                                                false, -1, false );
397                table.addColumn( column );
398            }
399        }
400    
401        /**
402         * Collects the tables and their columns used in the annotation of the given simple property type. Builds the
403         * <code>table</code> member map from this data.
404         * <p>
405         * If the data for the property is stored in a related table, the table and column information used on the path to
406         * this table is also added to the <code>tables</code> member map.
407         *
408         * @param simpleProperty
409         *            simple property type to process
410         * @param table
411         *            table definition associated with the property definition
412         */
413        private void buildTableMap( MappedSimplePropertyType simpleProperty, TableDefinition table ) {
414            Collection<ColumnDefinition> newColumns = new ArrayList<ColumnDefinition>();
415            // array must always have length 1
416            TableRelation[] relations = simpleProperty.getTableRelations();
417            if ( simpleProperty.getMaxOccurs() != 1 && ( relations == null || relations.length < 1 ) ) {
418                throw new RuntimeException( Messages.format( "ERROR_INVALID_PROPERTY_DEFINITION", simpleProperty.getName() ) );
419            }
420    
421            SimpleContent content = simpleProperty.getContent();
422            if ( content instanceof MappingField ) {
423                MappingField mf = (MappingField) content;
424                if ( relations == null || relations.length == 0 ) {
425                    newColumns.add( new ColumnDefinition( mf.getField(), mf.getType(), simpleProperty.getMinOccurs() == 0,
426                                                          false, -1, false ) );
427                } else {
428                    TableRelation firstRelation = relations[0];
429                    MappingField[] fromFields = firstRelation.getFromFields();
430                    for ( int i = 0; i < fromFields.length; i++ ) {
431                        MappingField fromField = fromFields[i];
432                        newColumns.add( new ColumnDefinition( fromField.getField(), fromField.getType(), false, false, -1,
433                                                              true ) );
434                    }
435                    buildTableMap( relations, mf );
436                }
437            } else {
438                String msg = "Ignoring property '" + simpleProperty + "' - has virtual content.";
439                System.out.println( msg );
440            }
441            table.addColumns( newColumns );
442        }
443    
444        /**
445         * Collects the tables and their columns used in the annotation of the given geometry property type. Builds the
446         * <code>table</code> member map from this data.
447         * <p>
448         * If the geometry for the property is stored in a related table, the table and column information used on the path
449         * to this table is also added to the <code>tables</code> member map.
450         *
451         * @param geometryProperty
452         *            feature property type to process
453         * @param table
454         *            table definition associated with the property definition
455         */
456        private void buildTableMap( MappedGeometryPropertyType geometryProperty, TableDefinition table ) {
457            Collection<ColumnDefinition> newColumns = new ArrayList<ColumnDefinition>();
458            TableRelation[] relations = geometryProperty.getTableRelations();
459            if ( geometryProperty.getMaxOccurs() != 1 && ( relations == null || relations.length < 1 ) ) {
460                throw new RuntimeException( Messages.format( "ERROR_INVALID_PROPERTY_DEFINITION",
461                                                             geometryProperty.getName() ) );
462            }
463            if ( relations == null || relations.length == 0 ) {
464                newColumns.add( new ColumnDefinition( geometryProperty.getMappingField().getField(),
465                                                      geometryProperty.getMappingField().getType(),
466                                                      geometryProperty.getMinOccurs() == 0, true,
467                                                      geometryProperty.getMappingField().getSRS(), false ) );
468            } else {
469                TableRelation firstRelation = relations[0];
470                MappingField[] fromFields = firstRelation.getFromFields();
471                for ( int i = 0; i < fromFields.length; i++ ) {
472                    MappingField fromField = fromFields[i];
473                    newColumns.add( new ColumnDefinition( fromField.getField(), fromField.getType(), false, true,
474                                                          geometryProperty.getMappingField().getSRS(), true ) );
475                }
476                buildTableMap( relations, geometryProperty.getMappingField() );
477            }
478            table.addColumns( newColumns );
479        }
480    
481        /**
482         * Collects the tables and their columns used in the annotation of the given feature property type. Builds the
483         * <code>table</code> member map from this data.
484         * <p>
485         * The table and column information used on the path to the table of the feature type is also added to the
486         * <code>tables</code> member map.
487         *
488         * @param featureProperty
489         *            feature property type to process
490         * @param table
491         *            table definition associated with the property definition
492         */
493        private void buildTableMap( MappedFeaturePropertyType featureProperty, TableDefinition table ) {
494            Collection<ColumnDefinition> newColumns = new ArrayList<ColumnDefinition>();
495    
496            // array must always have length 1
497            TableRelation[] relations = featureProperty.getTableRelations();
498    
499            // target feature type table must always be accessed via 'Relation'-elements
500            if ( relations == null || relations.length < 1 ) {
501                throw new RuntimeException( Messages.format( "ERROR_INVALID_FEATURE_PROPERTY_DEFINITION_1",
502                                                             featureProperty.getName() ) );
503            }
504    
505            // maxOccurs > 1: target feature type table must be accessed via join table
506            if ( featureProperty.getMaxOccurs() != 1 && ( relations.length < 2 ) ) {
507                throw new RuntimeException( Messages.format( "ERROR_INVALID_FEATURE_PROPERTY_DEFINITION_2",
508                                                             featureProperty.getName() ) );
509            }
510    
511            // add this feature type's key columns to current table
512            TableRelation firstRelation = relations[0];
513            MappingField[] fromFields = firstRelation.getFromFields();
514            boolean isNullable = featureProperty.getMinOccurs() == 0 && relations.length == 1;
515            for ( int i = 0; i < fromFields.length; i++ ) {
516                MappingField fromField = fromFields[i];
517                if ( featureProperty.externalLinksAllowed() ) {
518                    newColumns.add( new ColumnDefinition( fromField.getField(), fromField.getType(), true, false, -1, true ) );
519                    newColumns.add( new ColumnDefinition( fromField.getField() + "_external", fromField.getType(), true,
520                                                          false, -1, true ) );
521                } else {
522                    newColumns.add( new ColumnDefinition( fromField.getField(), fromField.getType(), isNullable, false, -1,
523                                                          true ) );
524                }
525            }
526            table.addColumns( newColumns );
527    
528            MappedFeatureType contentType = featureProperty.getFeatureTypeReference().getFeatureType();
529            buildTableMap( relations, featureProperty, contentType );
530        }
531    
532        /**
533         * Collects the tables and their columns used in the relation tables from a simple/geometry property to it's content
534         * table. Builds the <code>table</code> member map from this data.
535         *
536         * @param relations
537         *            relation tables from annotation of property type
538         * @param targetField
539         *            holds the properties data
540         */
541        private void buildTableMap( TableRelation[] relations, MappingField targetField ) {
542    
543            // process tables used in 'To'-element of each 'Relation'-element
544            for ( int i = 0; i < relations.length; i++ ) {
545                String tableName = relations[i].getToTable();
546                TableDefinition table = lookupTableDefinition( tableName, MULTI_PROPERTY_TABLE );
547                MappingField[] toFields = relations[i].getToFields();
548                for ( int j = 0; j < toFields.length; j++ ) {
549                    boolean toIsFK = relations[i].getFKInfo() == TableRelation.FK_INFO.fkIsToField;
550                    ColumnDefinition column = new ColumnDefinition( toFields[j].getField(), toFields[j].getType(), false,
551                                                                    !toIsFK, false, -1, toIsFK );
552                    // schmitz: assuming not part of PK
553                    table.addColumn( column );
554                }
555            }
556    
557            // process table used in 'To'-element of last 'Relation'-element (targetField refers to
558            // this)
559            ColumnDefinition column = null;
560            if ( targetField instanceof MappingGeometryField ) {
561                column = new ColumnDefinition( targetField.getField(), targetField.getType(), false, true,
562                                               ( (MappingGeometryField) targetField ).getSRS(), false );
563            } else {
564                column = new ColumnDefinition( targetField.getField(), targetField.getType(), false, false, -1, false );
565            }
566    
567            TableDefinition table = lookupTableDefinition( relations[relations.length - 1].getToTable(),
568                                                           MULTI_PROPERTY_TABLE );
569            table.addColumn( column );
570        }
571    
572        /**
573         * Collects the tables and their columns used in the relation tables from a feature property to it's content feature
574         * type. Builds the <code>table</code> member map from this data.
575         *
576         * @param relations
577         *            relation tables from annotation of feature property type
578         * @param property
579         * @param targetType
580         *            type contained in the feature property
581         */
582        private void buildTableMap( TableRelation[] relations, MappedPropertyType property, MappedFeatureType targetType ) {
583    
584            TableDefinition table = lookupTableDefinition( relations[0].getFromTable(), FEATURE_TYPE_TABLE );
585    
586            // process tables used in 'To'-element of each 'Relation'-element (except the last)
587            for ( int i = 0; i < relations.length - 1; i++ ) {
588                String tableName = relations[i].getToTable();
589                table = lookupTableDefinition( tableName, JOIN_TABLE );
590                MappingField[] toFields = relations[i].getToFields();
591                for ( int j = 0; j < toFields.length; j++ ) {
592                    boolean toIsFK = relations[i].getFKInfo() == TableRelation.FK_INFO.fkIsToField;
593                    ColumnDefinition column = new ColumnDefinition( toFields[j].getField(), toFields[j].getType(), false,
594                                                                    true, false, -1, toIsFK );
595                    // schmitz: assuming NOT part of PK
596                    table.addColumn( column );
597                }
598            }
599    
600            // process table used in 'To'-element of last 'Relation'-element
601            MappedFeatureType[] concreteTypes = targetType.getConcreteSubstitutions();
602            MappingField[] toFields = relations[relations.length - 1].getToFields();
603    
604            // if it refers to several target tables (target feature type is abstract), an additional
605            // column is needed (which determines the target feature type)
606            if ( concreteTypes.length > 1 ) {
607                String typeColumn = "featuretype";
608                if ( relations.length == 1 ) {
609                    typeColumn = FT_PREFIX + property.getTableRelations()[0].getFromFields()[0].getField();
610                }
611                ColumnDefinition column = new ColumnDefinition( typeColumn, Types.VARCHAR, property.getMinOccurs() == 0,
612                                                                false, -1, false );
613                table.addColumn( column );
614            }
615            for ( int i = 0; i < concreteTypes.length; i++ ) {
616                MappedFeatureType concreteType = concreteTypes[i];
617                String tableName = concreteType.getTable();
618                table = lookupTableDefinition( tableName, FEATURE_TYPE_TABLE );
619                for ( int j = 0; j < toFields.length; j++ ) {
620                    ColumnDefinition column = new ColumnDefinition( toFields[j].getField(), toFields[j].getType(), false,
621                                                                    false, -1, false );
622                    table.addColumn( column );
623                }
624            }
625    
626            // process tables used in 'From'-element of each 'Relation'-element (except the first)
627            for ( int i = 1; i < relations.length; i++ ) {
628                String tableName = relations[i].getFromTable();
629                if ( i != relations.length - 1 ) {
630                    table = lookupTableDefinition( tableName, JOIN_TABLE );
631                } else {
632                    table = lookupTableDefinition( tableName, FEATURE_TYPE_TABLE );
633                }
634                MappingField[] fromFields = relations[i].getFromFields();
635                for ( int j = 0; j < fromFields.length; j++ ) {
636                    boolean fromIsFK = relations[i].getFKInfo() == TableRelation.FK_INFO.fkIsFromField;
637                    ColumnDefinition column = new ColumnDefinition( fromFields[j].getField(), fromFields[j].getType(),
638                                                                    false, true, false, -1, fromIsFK );
639                    table.addColumn( column );
640                }
641            }
642        }
643    
644        /**
645         * @param outputFile
646         * @throws IOException
647         */
648        public void generateCreateScript( String outputFile )
649                                throws IOException {
650            generateCreateScript( outputFile, null );
651        }
652    
653        /**
654         * Generates the DDL statements to create a relational schema that backs the GML schema.
655         *
656         * @param outputFile
657         * @param dbSchema
658         *            (may be null)
659         * @throws IOException
660         */
661        public void generateCreateScript( String outputFile, String dbSchema )
662                                throws IOException {
663            PrintWriter writer = new PrintWriter( new OutputStreamWriter( new FileOutputStream( outputFile ), "UTF-8" ) );
664    
665            if ( dbSchema != null ) {
666                writer.print( "/* CREATE DB SCHEMA (" + dbSchema + ") */\n\n" );
667                writer.print( generateCreateSchemaStmts( dbSchema ) );
668                writer.print( generateSetSchemaStmt( dbSchema ) );
669                writer.println();
670                writer.println();
671            }
672    
673            System.out.println( Messages.format( "CREATE_SEQUENCES", new Integer( sequences.size() ) ) );
674            if ( sequences.size() > 0 ) {
675                writer.print( "/* CREATE SEQUENCES (" + sequences.size() + ") */\n" );
676                for ( String sequenceName : sequences ) {
677                    writer.print( '\n' );
678                    writer.print( generateCreateSequenceStmt( sequenceName ) );
679                }
680            }
681    
682            TableDefinition[] tables = getTables( FEATURE_TYPE_TABLE );
683            System.out.println( Messages.format( "CREATE_FEATURE_TYPE", new Integer( tables.length ) ) );
684            writer.print( "\n\n/* CREATE FEATURE TABLES (" + tables.length + ") */\n" );
685            for ( int i = 0; i < tables.length; i++ ) {
686                System.out.println( tables[i].tableName );
687                writer.print( '\n' );
688                writer.print( generateCreateTableStmt( tables[i] ) );
689                writer.print( generateCreateIndexStmts( tables[i] ) );
690            }
691    
692            tables = getTables( JOIN_TABLE );
693            if ( tables.length != 0 ) {
694                writer.print( "\n\n/* CREATE JOIN TABLES (" + tables.length + ") */\n" );
695            }
696            System.out.println( Messages.format( "CREATE_JOIN_TABLES", new Integer( tables.length ) ) );
697            for ( int i = 0; i < tables.length; i++ ) {
698                System.out.println( tables[i].tableName );
699                writer.print( '\n' );
700                writer.print( generateCreateTableStmt( tables[i] ) );
701                writer.print( generateCreateIndexStmts( tables[i] ) );
702            }
703    
704            tables = getTables( MULTI_PROPERTY_TABLE );
705            if ( tables.length != 0 ) {
706                writer.print( "\n\n/* CREATE PROPERTY TABLES (" + tables.length + ") */\n" );
707            }
708            System.out.println( Messages.format( "CREATE_PROPERTY_TABLES", new Integer( tables.length ) ) );
709            for ( int i = 0; i < tables.length; i++ ) {
710                System.out.println( tables[i].tableName );
711                writer.print( '\n' );
712                writer.print( generateCreateTableStmt( tables[i] ) );
713                writer.print( generateCreateIndexStmts( tables[i] ) );
714            }
715            writer.close();
716        }
717    
718        /**
719         * Generates the DDL statements that can be used to remove the relational schema again.
720         *
721         * @param outputFile
722         * @param dbSchema
723         *            (may be null)
724         * @throws IOException
725         */
726        public void generateDropScript( String outputFile, String dbSchema )
727                                throws IOException {
728            PrintWriter writer = new PrintWriter( new OutputStreamWriter( new FileOutputStream( outputFile ), "UTF-8" ) );
729    
730            if ( dbSchema != null ) {
731                writer.println( generateSetSchemaStmt( dbSchema ) );
732                writer.println();
733                writer.println();
734            }
735    
736            TableDefinition[] tables = getTables( FEATURE_TYPE_TABLE );
737            System.out.println( Messages.format( "DROP_FEATURE_TYPE", new Integer( tables.length ) ) );
738            writer.print( "/* DROP FEATURE TABLES (" + tables.length + ") */\n" );
739            for ( int i = 0; i < tables.length; i++ ) {
740                writer.print( '\n' );
741                writer.print( generateDropIndexStmts( tables[i] ) );
742                writer.print( generateDropTableStmt( tables[i] ) );
743            }
744    
745            tables = getTables( JOIN_TABLE );
746            writer.print( "\n\n/* DROP JOIN TABLES (" + tables.length + ") */\n" );
747            System.out.println( Messages.format( "DROP_JOIN_TABLES", new Integer( tables.length ) ) );
748            for ( int i = 0; i < tables.length; i++ ) {
749                writer.print( '\n' );
750                writer.print( generateDropIndexStmts( tables[i] ) );
751                writer.print( generateDropTableStmt( tables[i] ) );
752            }
753    
754            tables = getTables( MULTI_PROPERTY_TABLE );
755            writer.print( "\n\n/* DROP PROPERTY TABLES (" + tables.length + ") */\n" );
756            System.out.println( Messages.format( "DROP_PROPERTY_TABLES", new Integer( tables.length ) ) );
757            for ( int i = 0; i < tables.length; i++ ) {
758                writer.print( '\n' );
759                writer.print( generateDropIndexStmts( tables[i] ) );
760                writer.print( generateDropTableStmt( tables[i] ) );
761            }
762    
763            System.out.println( Messages.format( "DROP_SEQUENCES", new Integer( sequences.size() ) ) );
764            if ( sequences.size() > 0 ) {
765                writer.print( "\n\n/* DROP SEQUENCES (" + sequences.size() + ") */\n" );
766                for ( String sequenceName : sequences ) {
767                    writer.print( '\n' );
768                    writer.print( generateDropSequenceStmt( sequenceName ) );
769                }
770            }
771    
772            if ( dbSchema != null ) {
773                writer.print( "\n\n/* DROP DB SCHEMA (" + dbSchema + ") */\n" );
774                writer.print( generateDropSchemaStmt( dbSchema ) );
775                writer.println();
776            }
777    
778            writer.close();
779        }
780    
781        /**
782         * @param args
783         * @throws IOException
784         * @throws SAXException
785         * @throws XMLParsingException
786         * @throws XMLSchemaException
787         * @throws UnknownCRSException
788         */
789        public static void main( String[] args )
790                                throws IOException, SAXException, XMLParsingException, XMLSchemaException,
791                                UnknownCRSException {
792    
793            if ( args.length < 4 || args.length > 5 ) {
794                System.out.println( "Usage: DDLGenerator <FLAVOUR> <input.xsd> <create.sql> <drop.sql> [DB_SCHEMA]" );
795                System.exit( 0 );
796            }
797    
798            String flavour = args[0];
799            String schemaFile = args[1];
800            String createFile = args[2];
801            String dropFile = args[3];
802            String dbSchema = args.length == 4 ? null : args[4];
803    
804            DDLGenerator generator = null;
805            if ( "POSTGIS".equals( flavour ) ) {
806                generator = new PostGISDDLGenerator( new File( schemaFile ).toURI().toURL() );
807            } else if ( "ORACLE".equals( flavour ) ) {
808                generator = new OracleDDLGenerator( new File( schemaFile ).toURI().toURL() );
809            } else {
810                System.out.println( Messages.format( "ERROR_UNSUPPORTED_FLAVOUR", flavour ) );
811            }
812    
813            generator.generateCreateScript( createFile, dbSchema );
814            generator.generateDropScript( dropFile, dbSchema );
815        }
816    
817        /**
818         * Returns a string representation of the object.
819         *
820         * @return a string representation of the object.
821         */
822        @Override
823        public String toString() {
824            StringBuffer sb = new StringBuffer( Messages.getString( "RELATIONAL_SCHEMA" ) );
825            sb.append( '\n' );
826    
827            TableDefinition[] tables = getTables( FEATURE_TYPE_TABLE );
828            sb.append( '\n' );
829            sb.append( tables.length );
830            sb.append( " feature type tables\n\n" );
831            for ( int i = 0; i < tables.length; i++ ) {
832                sb.append( tables[i] );
833                sb.append( '\n' );
834            }
835    
836            sb.append( '\n' );
837            tables = getTables( JOIN_TABLE );
838            sb.append( tables.length );
839            sb.append( " join tables\n\n" );
840            for ( int i = 0; i < tables.length; i++ ) {
841                sb.append( tables[i] );
842                sb.append( '\n' );
843            }
844    
845            sb.append( '\n' );
846            tables = getTables( MULTI_PROPERTY_TABLE );
847            sb.append( tables.length );
848            sb.append( " property tables\n\n" );
849            for ( int i = 0; i < tables.length; i++ ) {
850                sb.append( tables[i] );
851                sb.append( '\n' );
852            }
853            return sb.toString();
854        }
855    
856        class TableDefinition {
857    
858            private int type;
859    
860            String tableName;
861    
862            private Map<String, ColumnDefinition> columnsMap = new LinkedHashMap<String, ColumnDefinition>();
863    
864            TableDefinition( String tableName, int type ) {
865                this.type = type;
866                this.tableName = tableName;
867            }
868    
869            String getName() {
870                return this.tableName;
871            }
872    
873            int getType() {
874                return this.type;
875            }
876    
877            ColumnDefinition[] getColumns() {
878                Collection<ColumnDefinition> columns = new ArrayList<ColumnDefinition>();
879                Iterator<String> iter = columnsMap.keySet().iterator();
880                while ( iter.hasNext() ) {
881                    String columnName = iter.next();
882                    columns.add( columnsMap.get( columnName ) );
883                }
884                return columns.toArray( new ColumnDefinition[columns.size()] );
885            }
886    
887            ColumnDefinition[] getPKColumns() {
888                Collection<ColumnDefinition> columns = new ArrayList<ColumnDefinition>();
889                Iterator<String> iter = columnsMap.keySet().iterator();
890                while ( iter.hasNext() ) {
891                    String columnName = iter.next();
892                    ColumnDefinition column = columnsMap.get( columnName );
893                    if ( column.isPartOfPK() ) {
894                        columns.add( columnsMap.get( columnName ) );
895                    }
896                }
897                return columns.toArray( new ColumnDefinition[columns.size()] );
898            }
899    
900            ColumnDefinition getColumn( String name ) {
901                return columnsMap.get( name );
902            }
903    
904            void addColumn( ColumnDefinition column ) {
905                ColumnDefinition oldColumn = columnsMap.get( column.getName() );
906                if ( oldColumn != null ) {
907                    if ( !( column.getType() == oldColumn.getType() ) ) {
908                        String msg = null;
909                        try {
910                            msg = Messages.format( "ERROR_COLUMN_DEFINITION_TYPES", column.getName(),
911                                                   Types.getTypeNameForSQLTypeCode( oldColumn.getType() ),
912                                                   Types.getTypeNameForSQLTypeCode( column.getType() ) );
913                        } catch ( UnknownTypeException e ) {
914                            msg = e.getMessage();
915                            e.printStackTrace();
916                        }
917                        throw new RuntimeException( msg );
918    
919                    }
920                    if ( oldColumn.isPartOfPK() ) {
921                        column = oldColumn;
922                    }
923                }
924                columnsMap.put( column.getName(), column );
925            }
926    
927            void addColumns( Collection<ColumnDefinition> columns ) {
928                Iterator<ColumnDefinition> iter = columns.iterator();
929                while ( iter.hasNext() ) {
930                    ColumnDefinition column = iter.next();
931                    addColumn( column );
932                }
933            }
934    
935            @Override
936            public String toString() {
937                StringBuffer sb = new StringBuffer();
938                sb.append( Messages.format( "TABLE", this.tableName ) );
939                sb.append( Messages.getString( "PRIMARY_KEY" ) );
940                ColumnDefinition[] pkColumns = getPKColumns();
941                for ( int i = 0; i < pkColumns.length; i++ ) {
942                    sb.append( '"' );
943                    sb.append( pkColumns[i].getName() );
944                    sb.append( '"' );
945                    if ( i != pkColumns.length - 1 ) {
946                        sb.append( ", " );
947                    }
948                }
949                sb.append( '\n' );
950                Iterator<String> columnNameIter = this.columnsMap.keySet().iterator();
951                while ( columnNameIter.hasNext() ) {
952                    String columnName = columnNameIter.next();
953                    ColumnDefinition column = this.columnsMap.get( columnName );
954                    try {
955                        sb.append( Messages.format( "COLUMN", columnName,
956                                                    Types.getTypeNameForSQLTypeCode( column.getType() ) + ":"
957                                                                            + column.getType(),
958                                                    new Boolean( column.isNullable() ) ) );
959                    } catch ( UnknownTypeException e ) {
960                        // TODO Auto-generated catch block
961                        e.printStackTrace();
962                    }
963                    sb.append( '\n' );
964                }
965                return sb.toString();
966            }
967        }
968    
969        class ColumnDefinition {
970    
971            private String columnName;
972    
973            private int type;
974    
975            private boolean isNullable;
976    
977            private boolean isGeometryColumn;
978    
979            private int srsCode;
980    
981            private boolean isPartOfPK;
982    
983            private boolean isFK;
984    
985            ColumnDefinition( String columnName, int type, boolean isNullable, boolean isGeometryColumn, int srsCode,
986                              boolean isFK ) {
987                this.columnName = columnName;
988                this.type = type;
989                this.isNullable = isNullable;
990                this.isGeometryColumn = isGeometryColumn;
991                this.srsCode = srsCode;
992                this.isFK = isFK;
993            }
994    
995            ColumnDefinition( String columnName, int type, boolean isNullable, boolean isPartOfPK,
996                              boolean isGeometryColumn, int srsCode, boolean isFK ) {
997                this( columnName, type, isNullable, isGeometryColumn, srsCode, isFK );
998                this.isPartOfPK = isPartOfPK;
999            }
1000    
1001            String getName() {
1002                return this.columnName;
1003            }
1004    
1005            int getType() {
1006                return this.type;
1007            }
1008    
1009            boolean isNullable() {
1010                return this.isNullable;
1011            }
1012    
1013            boolean isGeometry() {
1014                return this.isGeometryColumn;
1015            }
1016    
1017            int getSRS() {
1018                return this.srsCode;
1019            }
1020    
1021            boolean isPartOfPK() {
1022                return this.isPartOfPK;
1023            }
1024    
1025            boolean isFK() {
1026                return isFK;
1027            }
1028        }
1029    }