001 //$HeadURL: svn+ssh://rbezema@svn.wald.intevation.org/deegree/base/branches/2.2_testing/src/org/deegree/tools/datastore/DDLGenerator.java $ 002 /*---------------- FILE HEADER ------------------------------------------ 003 004 This file is part of deegree. 005 Copyright (C) 2001-2008 by: 006 EXSE, Department of Geography, University of Bonn 007 http://www.giub.uni-bonn.de/deegree/ 008 lat/lon GmbH 009 http://www.lat-lon.de 010 011 This library is free software; you can redistribute it and/or 012 modify it under the terms of the GNU Lesser General Public 013 License as published by the Free Software Foundation; either 014 version 2.1 of the License, or (at your option) any later version. 015 016 This library is distributed in the hope that it will be useful, 017 but WITHOUT ANY WARRANTY; without even the implied warranty of 018 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 019 Lesser General Public License for more details. 020 021 You should have received a copy of the GNU Lesser General Public 022 License along with this library; if not, write to the Free Software 023 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 024 025 Contact: 026 027 Andreas Poth 028 lat/lon GmbH 029 Aennchenstraße 19 030 53177 Bonn 031 Germany 032 E-Mail: poth@lat-lon.de 033 034 Prof. Dr. Klaus Greve 035 Department of Geography 036 University of Bonn 037 Meckenheimer Allee 166 038 53115 Bonn 039 Germany 040 E-Mail: greve@giub.uni-bonn.de 041 042 ---------------------------------------------------------------------------*/ 043 package org.deegree.tools.datastore; 044 045 import java.io.File; 046 import java.io.FileWriter; 047 import java.io.IOException; 048 import java.io.PrintWriter; 049 import java.net.MalformedURLException; 050 import java.net.URL; 051 import java.util.ArrayList; 052 import java.util.Collection; 053 import java.util.HashMap; 054 import java.util.HashSet; 055 import java.util.Iterator; 056 import java.util.LinkedHashMap; 057 import java.util.Map; 058 import java.util.Set; 059 060 import org.deegree.datatypes.Types; 061 import org.deegree.datatypes.UnknownTypeException; 062 import org.deegree.framework.xml.XMLParsingException; 063 import org.deegree.framework.xml.schema.XMLSchemaException; 064 import org.deegree.io.datastore.schema.MappedFeaturePropertyType; 065 import org.deegree.io.datastore.schema.MappedFeatureType; 066 import org.deegree.io.datastore.schema.MappedGMLId; 067 import org.deegree.io.datastore.schema.MappedGMLSchema; 068 import org.deegree.io.datastore.schema.MappedGMLSchemaDocument; 069 import org.deegree.io.datastore.schema.MappedGeometryPropertyType; 070 import org.deegree.io.datastore.schema.MappedPropertyType; 071 import org.deegree.io.datastore.schema.MappedSimplePropertyType; 072 import org.deegree.io.datastore.schema.TableRelation; 073 import org.deegree.io.datastore.schema.content.MappingField; 074 import org.deegree.io.datastore.schema.content.MappingGeometryField; 075 import org.deegree.io.datastore.schema.content.SimpleContent; 076 import org.deegree.io.datastore.sql.idgenerator.DBSeqIdGenerator; 077 import org.deegree.model.crs.UnknownCRSException; 078 import org.deegree.model.feature.schema.FeatureType; 079 import org.deegree.model.feature.schema.PropertyType; 080 import org.xml.sax.SAXException; 081 082 /** 083 * Abstract base class for DDL generation from annotated GML schema files. 084 * <p> 085 * This abstract base class only implements the functionality needed to retrieve the necessary 086 * tables and columns used in an annotated GML schema. Some DDL generation may be dependent on the 087 * specific SQL backend to be used, so this is implemented in concrete extensions of this class. 088 * 089 * @author <a href="mailto:schneider@lat-lon.de">Markus Schneider</a> 090 * @author last edited by: $Author: apoth $ 091 * 092 * @version $Revision: 9346 $, $Date: 2007-12-27 17:39:07 +0100 (Do, 27 Dez 2007) $ 093 */ 094 public abstract class DDLGenerator { 095 096 protected static final String FT_PREFIX = "FT_"; 097 098 protected static final int FEATURE_TYPE_TABLE = 0; 099 100 protected static final int JOIN_TABLE = 1; 101 102 protected static final int MULTI_PROPERTY_TABLE = 2; 103 104 protected MappedGMLSchema schema; 105 106 // key type: String (table names), value type: TableDefinition 107 protected Map<String, TableDefinition> tables = new HashMap<String, TableDefinition>(); 108 109 // names of sequences (for id generation) 110 protected Set<String> sequences = new HashSet<String>(); 111 112 /** 113 * Generates the DDL statements necessary for the creation of the given table definition. Must 114 * be overwritten by the concrete implementation. 115 * 116 * @param table 117 * @return the DDL statements necessary for the creation of the given table definition 118 */ 119 protected abstract StringBuffer generateCreateTableStmt( TableDefinition table ); 120 121 /** 122 * Generates the DDL statements necessary for the creation of standard indexes for the given 123 * table definition. Must be overwritten by the concrete implementation. 124 * 125 * @param table 126 * @return the DDL statements necessary for the creation of standard indexes for the given table 127 * definition 128 */ 129 protected abstract StringBuffer generateCreateIndexStmts( TableDefinition table ); 130 131 /** 132 * Generates the DDL statements necessary for the creation of the given sequence. May be 133 * overwritten by a concrete implementation. 134 * 135 * @param sequenceName 136 * @return the DDL statements necessary for the creation of the given sequence definition 137 */ 138 protected StringBuffer generateCreateSequenceStmt( String sequenceName ) { 139 StringBuffer sb = new StringBuffer( "CREATE SEQUENCE " ); 140 sb.append( sequenceName ); 141 sb.append( ";\n" ); 142 return sb; 143 } 144 145 /** 146 * Generates the DDL statements necessary for the removal of the given table definition. May be 147 * overwritten by a concrete implementation. 148 * 149 * @param table 150 * @return the DDL statements necessary for the removal of the given table definition 151 */ 152 protected StringBuffer generateDropTableStmt( TableDefinition table ) { 153 StringBuffer sb = new StringBuffer(); 154 sb.append( "DROP TABLE " ); 155 sb.append( table.getName() ); 156 sb.append( " CASCADE;\n" ); 157 return sb; 158 } 159 160 /** 161 * Generates the DDL statements necessary for the dropping of standard indexes for the given 162 * table definition. May be overwritten by a concrete implementation. 163 * 164 * @param table 165 * @return the DDL statements necessary for the dropping of standard indexes for the given table 166 * definition 167 */ 168 protected StringBuffer generateDropIndexStmts( TableDefinition table ) { 169 StringBuffer sb = new StringBuffer(); 170 171 // build drop statements for geometry indexes 172 Collection<ColumnDefinition> geometryColumns = new ArrayList<ColumnDefinition>(); 173 for ( ColumnDefinition column : table.getColumns() ) { 174 if ( column.isGeometry() ) { 175 geometryColumns.add( column ); 176 } 177 } 178 179 Iterator<ColumnDefinition> iter = geometryColumns.iterator(); 180 int spatialIdxCnt = 1; 181 while ( iter.hasNext() ) { 182 iter.next(); 183 sb.append( "DROP INDEX " ); 184 sb.append( table.getName() + ( spatialIdxCnt++ ) ); 185 sb.append( "_SPATIAL_IDX;" ); 186 sb.append( '\n' ); 187 } 188 189 // build table type specific drop index statements 190 switch ( table.getType() ) { 191 case JOIN_TABLE: { 192 // create an index on every column 193 ColumnDefinition[] columns = table.getColumns(); 194 for ( int i = 0; i < columns.length; i++ ) { 195 sb.append( "DROP INDEX " ); 196 sb.append( table.getName().toUpperCase() ); 197 sb.append( '_' ); 198 sb.append( columns[i].getName() ); 199 sb.append( ';' ); 200 sb.append( '\n' ); 201 } 202 break; 203 } 204 default: { 205 break; 206 } 207 } 208 return sb; 209 } 210 211 /** 212 * Generates the DDL statements necessary for the removal of the given sequence. May be 213 * overwritten by a concrete implementation. 214 * 215 * @param sequenceName 216 * @return the DDL statements necessary for the removal of the given sequence definition 217 */ 218 protected StringBuffer generateDropSequenceStmt( String sequenceName ) { 219 StringBuffer sb = new StringBuffer( "DROP SEQUENCE " ); 220 sb.append( sequenceName ); 221 sb.append( ";\n" ); 222 return sb; 223 } 224 225 /** 226 * Creates a new instance of <code>DDLGenerator</code> from the given parameters. 227 * 228 * @param schemaURL 229 * @throws MalformedURLException 230 * @throws IOException 231 * @throws SAXException 232 * @throws XMLParsingException 233 * @throws XMLSchemaException 234 * @throws UnknownCRSException 235 */ 236 protected DDLGenerator( URL schemaURL ) throws MalformedURLException, IOException, SAXException, 237 XMLParsingException, XMLSchemaException, UnknownCRSException { 238 239 System.out.println( Messages.format( "LOADING_SCHEMA_FILE", schemaURL ) ); 240 MappedGMLSchemaDocument schemaDoc = new MappedGMLSchemaDocument(); 241 schemaDoc.load( schemaURL ); 242 schema = schemaDoc.parseMappedGMLSchema(); 243 FeatureType[] featureTypes = schema.getFeatureTypes(); 244 int concreteCount = 0; 245 for ( int i = 0; i < featureTypes.length; i++ ) { 246 if ( !featureTypes[i].isAbstract() ) { 247 concreteCount++; 248 } 249 } 250 System.out.println( Messages.format( "SCHEMA_INFO", new Integer( featureTypes.length ), 251 new Integer( featureTypes.length - concreteCount ), 252 new Integer( concreteCount ) ) ); 253 System.out.println( Messages.getString( "RETRIEVING_TABLES" ) ); 254 buildTableMap(); 255 } 256 257 /** 258 * Returns all table definitions of the given type. 259 * 260 * @param type 261 * FEATURE_TYPE_TABLE, JOIN_TABLE or MULTI_PROPERTY_TABLE 262 * @return all table definitions of the given type. 263 */ 264 protected TableDefinition[] getTables( int type ) { 265 Collection<TableDefinition> tableList = new ArrayList<TableDefinition>(); 266 Iterator<String> iter = this.tables.keySet().iterator(); 267 while ( iter.hasNext() ) { 268 String tableName = iter.next(); 269 TableDefinition table = this.tables.get( tableName ); 270 if ( table.getType() == type ) { 271 tableList.add( table ); 272 } 273 } 274 return tableList.toArray( new TableDefinition[tableList.size()] ); 275 } 276 277 /** 278 * Returns the table definition for the table with the given name. If no such definition exists, 279 * a new table definition is created and added to the internal <code>tables</code> map. 280 * 281 * @param tableName 282 * table definition to look up 283 * @param type 284 * type of the table (only respected, if a new TableDefinition instance is created) 285 * @return the table definition for the table with the given name. 286 */ 287 private TableDefinition lookupTableDefinition( String tableName, int type ) { 288 TableDefinition table = this.tables.get( tableName ); 289 if ( table == null ) { 290 table = new TableDefinition( tableName, type ); 291 this.tables.put( tableName, table ); 292 } 293 return table; 294 } 295 296 /** 297 * Collects the referenced tables and their columns from the input schema. Builds the member map 298 * <code>tables</code> from this data. 299 */ 300 private void buildTableMap() { 301 FeatureType[] featureTypes = schema.getFeatureTypes(); 302 for ( int i = 0; i < featureTypes.length; i++ ) { 303 if ( !featureTypes[i].isAbstract() ) { 304 buildTableMap( (MappedFeatureType) featureTypes[i] ); 305 } 306 } 307 } 308 309 /** 310 * Collects the tables and their columns used in the annotation of the given feature type. 311 * Builds the member map <code>tables</code> from this data. 312 * 313 * @param featureType 314 * feature type to process 315 */ 316 private void buildTableMap( MappedFeatureType featureType ) { 317 TableDefinition table = lookupTableDefinition( featureType.getTable(), FEATURE_TYPE_TABLE ); 318 319 MappedGMLId gmlId = featureType.getGMLId(); 320 addGMLIdColumns( gmlId, table ); 321 322 if ( gmlId.getIdGenerator() instanceof DBSeqIdGenerator ) { 323 extractSequence( (DBSeqIdGenerator) featureType.getGMLId().getIdGenerator() ); 324 } 325 326 PropertyType[] properties = featureType.getProperties(); 327 for ( int i = 0; i < properties.length; i++ ) { 328 MappedPropertyType property = (MappedPropertyType) properties[i]; 329 if ( property instanceof MappedSimplePropertyType ) { 330 buildTableMap( (MappedSimplePropertyType) property, table ); 331 } else if ( property instanceof MappedGeometryPropertyType ) { 332 buildTableMap( (MappedGeometryPropertyType) property, table ); 333 } else if ( property instanceof MappedFeaturePropertyType ) { 334 buildTableMap( (MappedFeaturePropertyType) property, table ); 335 } else { 336 throw new RuntimeException( Messages.format( "ERROR_UNEXPECTED_PROPERTY_TYPE", 337 property.getClass().getName() ) ); 338 } 339 } 340 } 341 342 /** 343 * Adds the name of the sequence that the given {@link DBSeqIdGenerator} refers to. 344 * 345 * @param idGenerator 346 * generator instance 347 */ 348 private void extractSequence( DBSeqIdGenerator idGenerator ) { 349 this.sequences.add( idGenerator.getSequenceName() ); 350 } 351 352 /** 353 * Adds the columns used in the given <code>MappedGMLId</code> to the also given 354 * <code>TableDefinition</code>. 355 * 356 * @param gmlId 357 * columns are taken from this gmlId mapping 358 * @param table 359 * columns are added to this table definition 360 */ 361 private void addGMLIdColumns( MappedGMLId gmlId, TableDefinition table ) { 362 MappingField[] idFields = gmlId.getIdFields(); 363 for ( int i = 0; i < idFields.length; i++ ) { 364 ColumnDefinition column = new ColumnDefinition( idFields[i].getField(), idFields[i].getType(), false, true, 365 false, -1 ); 366 table.addColumn( column ); 367 } 368 } 369 370 /** 371 * Collects the tables and their columns used in the annotation of the given simple property 372 * type. Builds the <code>table</code> member map from this data. 373 * <p> 374 * If the data for the property is stored in a related table, the table and column information 375 * used on the path to this table is also added to the <code>tables</code> member map. 376 * 377 * @param simpleProperty 378 * simple property type to process 379 * @param table 380 * table definition associated with the property definition 381 */ 382 private void buildTableMap( MappedSimplePropertyType simpleProperty, TableDefinition table ) { 383 Collection<ColumnDefinition> newColumns = new ArrayList<ColumnDefinition>(); 384 // array must always have length 1 385 TableRelation[] relations = simpleProperty.getTableRelations(); 386 if ( simpleProperty.getMaxOccurs() != 1 && ( relations == null || relations.length < 1 ) ) { 387 throw new RuntimeException( Messages.format( "ERROR_INVALID_PROPERTY_DEFINITION", simpleProperty.getName() ) ); 388 } 389 390 SimpleContent content = simpleProperty.getContent(); 391 if ( content instanceof MappingField ) { 392 MappingField mf = (MappingField) content; 393 if ( relations == null || relations.length == 0 ) { 394 newColumns.add( new ColumnDefinition( mf.getField(), mf.getType(), simpleProperty.getMinOccurs() == 0, 395 false, -1 ) ); 396 } else { 397 TableRelation firstRelation = relations[0]; 398 MappingField[] fromFields = firstRelation.getFromFields(); 399 for ( int i = 0; i < fromFields.length; i++ ) { 400 MappingField fromField = fromFields[i]; 401 newColumns.add( new ColumnDefinition( fromField.getField(), fromField.getType(), false, false, -1 ) ); 402 } 403 buildTableMap( relations, mf ); 404 } 405 } else { 406 String msg = "Ignoring property '" + simpleProperty + "' - has virtual content."; 407 System.out.println( msg ); 408 } 409 table.addColumns( newColumns ); 410 } 411 412 /** 413 * Collects the tables and their columns used in the annotation of the given geometry property 414 * type. Builds the <code>table</code> member map from this data. 415 * <p> 416 * If the geometry for the property is stored in a related table, the table and column 417 * information used on the path to this table is also added to the <code>tables</code> member 418 * map. 419 * 420 * @param geometryProperty 421 * feature property type to process 422 * @param table 423 * table definition associated with the property definition 424 */ 425 private void buildTableMap( MappedGeometryPropertyType geometryProperty, TableDefinition table ) { 426 Collection<ColumnDefinition> newColumns = new ArrayList<ColumnDefinition>(); 427 TableRelation[] relations = geometryProperty.getTableRelations(); 428 if ( geometryProperty.getMaxOccurs() != 1 && ( relations == null || relations.length < 1 ) ) { 429 throw new RuntimeException( Messages.format( "ERROR_INVALID_PROPERTY_DEFINITION", 430 geometryProperty.getName() ) ); 431 } 432 if ( relations == null || relations.length == 0 ) { 433 newColumns.add( new ColumnDefinition( geometryProperty.getMappingField().getField(), 434 geometryProperty.getMappingField().getType(), 435 geometryProperty.getMinOccurs() == 0, true, 436 geometryProperty.getMappingField().getSRS() ) ); 437 } else { 438 TableRelation firstRelation = relations[0]; 439 MappingField[] fromFields = firstRelation.getFromFields(); 440 for ( int i = 0; i < fromFields.length; i++ ) { 441 MappingField fromField = fromFields[i]; 442 newColumns.add( new ColumnDefinition( fromField.getField(), fromField.getType(), false, true, 443 geometryProperty.getMappingField().getSRS() ) ); 444 } 445 buildTableMap( relations, geometryProperty.getMappingField() ); 446 } 447 table.addColumns( newColumns ); 448 } 449 450 /** 451 * Collects the tables and their columns used in the annotation of the given feature property 452 * type. Builds the <code>table</code> member map from this data. 453 * <p> 454 * The table and column information used on the path to the table of the feature type is also 455 * added to the <code>tables</code> member map. 456 * 457 * @param featureProperty 458 * feature property type to process 459 * @param table 460 * table definition associated with the property definition 461 */ 462 private void buildTableMap( MappedFeaturePropertyType featureProperty, TableDefinition table ) { 463 Collection<ColumnDefinition> newColumns = new ArrayList<ColumnDefinition>(); 464 465 // array must always have length 1 466 TableRelation[] relations = featureProperty.getTableRelations(); 467 468 // target feature type table must always be accessed via 'Relation'-elements 469 if ( relations == null || relations.length < 1 ) { 470 throw new RuntimeException( Messages.format( "ERROR_INVALID_FEATURE_PROPERTY_DEFINITION_1", 471 featureProperty.getName() ) ); 472 } 473 474 // maxOccurs > 1: target feature type table must be accessed via join table 475 if ( featureProperty.getMaxOccurs() != 1 && ( relations.length < 2 ) ) { 476 throw new RuntimeException( Messages.format( "ERROR_INVALID_FEATURE_PROPERTY_DEFINITION_2", 477 featureProperty.getName() ) ); 478 } 479 480 // add this feature type's key columns to current table 481 TableRelation firstRelation = relations[0]; 482 MappingField[] fromFields = firstRelation.getFromFields(); 483 boolean isNullable = featureProperty.getMinOccurs() == 0 && relations.length == 1; 484 for ( int i = 0; i < fromFields.length; i++ ) { 485 MappingField fromField = fromFields[i]; 486 newColumns.add( new ColumnDefinition( fromField.getField(), fromField.getType(), isNullable, false, -1 ) ); 487 } 488 table.addColumns( newColumns ); 489 490 MappedFeatureType contentType = featureProperty.getFeatureTypeReference().getFeatureType(); 491 buildTableMap( relations, featureProperty, contentType ); 492 } 493 494 /** 495 * Collects the tables and their columns used in the relation tables from a simple/geometry 496 * property to it's content table. Builds the <code>table</code> member map from this data. 497 * 498 * @param relations 499 * relation tables from annotation of property type 500 * @param targetField 501 * holds the properties data 502 */ 503 private void buildTableMap( TableRelation[] relations, MappingField targetField ) { 504 505 // process tables used in 'To'-element of each 'Relation'-element 506 for ( int i = 0; i < relations.length; i++ ) { 507 String tableName = relations[i].getToTable(); 508 TableDefinition table = lookupTableDefinition( tableName, MULTI_PROPERTY_TABLE ); 509 MappingField[] toFields = relations[i].getToFields(); 510 for ( int j = 0; j < toFields.length; j++ ) { 511 boolean toIsFK = relations[i].getFKInfo() == TableRelation.FK_INFO.fkIsToField; 512 ColumnDefinition column = new ColumnDefinition( toFields[j].getField(), toFields[j].getType(), false, 513 !toIsFK, false, -1 ); 514 // schmitz: assuming not part of PK 515 table.addColumn( column ); 516 } 517 } 518 519 // process table used in 'To'-element of last 'Relation'-element (targetField refers to 520 // this) 521 ColumnDefinition column = null; 522 if ( targetField instanceof MappingGeometryField ) { 523 524 column = new ColumnDefinition( targetField.getField(), targetField.getType(), false, true, 525 ( (MappingGeometryField) targetField ).getSRS() ); 526 } else { 527 column = new ColumnDefinition( targetField.getField(), targetField.getType(), false, false, -1 ); 528 } 529 530 TableDefinition table = lookupTableDefinition( relations[relations.length - 1].getToTable(), 531 MULTI_PROPERTY_TABLE ); 532 table.addColumn( column ); 533 } 534 535 /** 536 * Collects the tables and their columns used in the relation tables from a feature property to 537 * it's content feature type. Builds the <code>table</code> member map from this data. 538 * 539 * @param relations 540 * relation tables from annotation of feature property type 541 * @param property 542 * @param targetType 543 * type contained in the feature property 544 */ 545 private void buildTableMap( TableRelation[] relations, MappedPropertyType property, MappedFeatureType targetType ) { 546 547 TableDefinition table = lookupTableDefinition( relations[0].getFromTable(), FEATURE_TYPE_TABLE ); 548 549 // process tables used in 'To'-element of each 'Relation'-element (except the last) 550 for ( int i = 0; i < relations.length - 1; i++ ) { 551 String tableName = relations[i].getToTable(); 552 table = lookupTableDefinition( tableName, JOIN_TABLE ); 553 MappingField[] toFields = relations[i].getToFields(); 554 for ( int j = 0; j < toFields.length; j++ ) { 555 // boolean toIsFK = relations[i].getFKInfo() == TableRelation.FK_INFO.fkIsToField; 556 ColumnDefinition column = new ColumnDefinition( toFields[j].getField(), toFields[j].getType(), false, 557 true, false, -1 ); 558 // schmitz: assuming NOT part of PK 559 table.addColumn( column ); 560 } 561 } 562 563 // process table used in 'To'-element of last 'Relation'-element 564 MappedFeatureType[] concreteTypes = targetType.getConcreteSubstitutions(); 565 MappingField[] toFields = relations[relations.length - 1].getToFields(); 566 567 // if it refers to several target tables (target feature type is abstract), an additional 568 // column is needed (which determines the target feature type) 569 if ( concreteTypes.length > 1 ) { 570 String typeColumn = "featuretype"; 571 if ( relations.length == 1 ) { 572 typeColumn = FT_PREFIX + property.getTableRelations()[0].getFromFields()[0].getField(); 573 } 574 ColumnDefinition column = new ColumnDefinition( typeColumn, Types.VARCHAR, property.getMinOccurs() == 0, 575 false, -1 ); 576 table.addColumn( column ); 577 } 578 for ( int i = 0; i < concreteTypes.length; i++ ) { 579 MappedFeatureType concreteType = concreteTypes[i]; 580 String tableName = concreteType.getTable(); 581 table = lookupTableDefinition( tableName, FEATURE_TYPE_TABLE ); 582 for ( int j = 0; j < toFields.length; j++ ) { 583 ColumnDefinition column = new ColumnDefinition( toFields[j].getField(), toFields[j].getType(), false, 584 false, -1 ); 585 table.addColumn( column ); 586 } 587 } 588 589 // process tables used in 'From'-element of each 'Relation'-element (except the first) 590 for ( int i = 1; i < relations.length; i++ ) { 591 String tableName = relations[i].getFromTable(); 592 if ( i != relations.length - 1 ) { 593 table = lookupTableDefinition( tableName, JOIN_TABLE ); 594 } else { 595 table = lookupTableDefinition( tableName, FEATURE_TYPE_TABLE ); 596 } 597 MappingField[] fromFields = relations[i].getFromFields(); 598 for ( int j = 0; j < fromFields.length; j++ ) { 599 ColumnDefinition column = new ColumnDefinition( fromFields[j].getField(), fromFields[j].getType(), 600 false, true, false, -1 ); 601 table.addColumn( column ); 602 } 603 } 604 } 605 606 /** 607 * Generates the DDL statements that can be used to build a relational schema that backs the GML 608 * schema. 609 * 610 * @param outputFile 611 * @throws IOException 612 */ 613 public void generateCreateScript( String outputFile ) 614 throws IOException { 615 PrintWriter writer = new PrintWriter( new FileWriter( outputFile ) ); 616 617 System.out.println( Messages.format( "CREATE_SEQUENCES", new Integer( sequences.size() ) ) ); 618 if ( sequences.size() > 0 ) { 619 writer.print( "/* CREATE SEQUENCES (" + sequences.size() + ") */\n" ); 620 for ( String sequenceName : sequences ) { 621 writer.print( '\n' ); 622 writer.print( generateCreateSequenceStmt( sequenceName ) ); 623 } 624 } 625 626 TableDefinition[] tables = getTables( FEATURE_TYPE_TABLE ); 627 System.out.println( Messages.format( "CREATE_FEATURE_TYPE", new Integer( tables.length ) ) ); 628 writer.print( "\n\n/* CREATE FEATURE TABLES (" + tables.length + ") */\n" ); 629 for ( int i = 0; i < tables.length; i++ ) { 630 System.out.println( tables[i].tableName ); 631 writer.print( '\n' ); 632 writer.print( generateCreateTableStmt( tables[i] ) ); 633 writer.print( generateCreateIndexStmts( tables[i] ) ); 634 } 635 636 tables = getTables( JOIN_TABLE ); 637 if ( tables.length != 0 ) { 638 writer.print( "\n\n/* CREATE JOIN TABLES (" + tables.length + ") */\n" ); 639 } 640 System.out.println( Messages.format( "CREATE_JOIN_TABLES", new Integer( tables.length ) ) ); 641 for ( int i = 0; i < tables.length; i++ ) { 642 System.out.println( tables[i].tableName ); 643 writer.print( '\n' ); 644 writer.print( generateCreateTableStmt( tables[i] ) ); 645 writer.print( generateCreateIndexStmts( tables[i] ) ); 646 } 647 648 tables = getTables( MULTI_PROPERTY_TABLE ); 649 if ( tables.length != 0 ) { 650 writer.print( "\n\n/* CREATE PROPERTY TABLES (" + tables.length + ") */\n" ); 651 } 652 System.out.println( Messages.format( "CREATE_PROPERTY_TABLES", new Integer( tables.length ) ) ); 653 for ( int i = 0; i < tables.length; i++ ) { 654 System.out.println( tables[i].tableName ); 655 writer.print( '\n' ); 656 writer.print( generateCreateTableStmt( tables[i] ) ); 657 writer.print( generateCreateIndexStmts( tables[i] ) ); 658 } 659 writer.close(); 660 } 661 662 /** 663 * Generates the DDL statements that can be used to remove the relational schema again. 664 * 665 * @param outputFile 666 * @throws IOException 667 */ 668 public void generateDropScript( String outputFile ) 669 throws IOException { 670 PrintWriter writer = new PrintWriter( new FileWriter( outputFile ) ); 671 672 TableDefinition[] tables = getTables( FEATURE_TYPE_TABLE ); 673 System.out.println( Messages.format( "DROP_FEATURE_TYPE", new Integer( tables.length ) ) ); 674 writer.print( "/* DROP FEATURE TABLES (" + tables.length + ") */\n" ); 675 for ( int i = 0; i < tables.length; i++ ) { 676 writer.print( '\n' ); 677 writer.print( generateDropIndexStmts( tables[i] ) ); 678 writer.print( generateDropTableStmt( tables[i] ) ); 679 } 680 681 tables = getTables( JOIN_TABLE ); 682 writer.print( "\n\n/* DROP JOIN TABLES (" + tables.length + ") */\n" ); 683 System.out.println( Messages.format( "DROP_JOIN_TABLES", new Integer( tables.length ) ) ); 684 for ( int i = 0; i < tables.length; i++ ) { 685 writer.print( '\n' ); 686 writer.print( generateDropIndexStmts( tables[i] ) ); 687 writer.print( generateDropTableStmt( tables[i] ) ); 688 } 689 690 tables = getTables( MULTI_PROPERTY_TABLE ); 691 writer.print( "\n\n/* DROP PROPERTY TABLES (" + tables.length + ") */\n" ); 692 System.out.println( Messages.format( "DROP_PROPERTY_TABLES", new Integer( tables.length ) ) ); 693 for ( int i = 0; i < tables.length; i++ ) { 694 writer.print( '\n' ); 695 writer.print( generateDropIndexStmts( tables[i] ) ); 696 writer.print( generateDropTableStmt( tables[i] ) ); 697 } 698 699 System.out.println( Messages.format( "DROP_SEQUENCES", new Integer( sequences.size() ) ) ); 700 if ( sequences.size() > 0 ) { 701 writer.print( "\n\n/* DROP SEQUENCES (" + sequences.size() + ") */\n" ); 702 for ( String sequenceName : sequences ) { 703 writer.print( '\n' ); 704 writer.print( generateDropSequenceStmt( sequenceName ) ); 705 } 706 } 707 708 writer.close(); 709 } 710 711 /** 712 * @param args 713 * @throws IOException 714 * @throws SAXException 715 * @throws XMLParsingException 716 * @throws XMLSchemaException 717 * @throws UnknownCRSException 718 */ 719 public static void main( String[] args ) 720 throws IOException, SAXException, XMLParsingException, XMLSchemaException, 721 UnknownCRSException { 722 723 if ( args.length != 4 ) { 724 System.out.println( "Usage: DDLGenerator [FLAVOUR] <input.xsd> <create.sql> <drop.sql>" ); 725 System.exit( 0 ); 726 } 727 728 String flavour = args[0]; 729 String schemaFile = args[1]; 730 String createFile = args[2]; 731 String dropFile = args[3]; 732 733 DDLGenerator generator = null; 734 if ( "POSTGIS".equals( flavour ) ) { 735 generator = new PostGISDDLGenerator( new File( schemaFile ).toURI().toURL() ); 736 } else if ( "ORACLE".equals( flavour ) ) { 737 generator = new OracleDDLGenerator( new File( schemaFile ).toURI().toURL() ); 738 } else { 739 System.out.println( Messages.format( "ERROR_UNSUPPORTED_FLAVOUR", flavour ) ); 740 } 741 742 generator.generateCreateScript( createFile ); 743 generator.generateDropScript( dropFile ); 744 } 745 746 /** 747 * Returns a string representation of the object. 748 * 749 * @return a string representation of the object. 750 */ 751 @Override 752 public String toString() { 753 StringBuffer sb = new StringBuffer( Messages.getString( "RELATIONAL_SCHEMA" ) ); 754 sb.append( '\n' ); 755 756 TableDefinition[] tables = getTables( FEATURE_TYPE_TABLE ); 757 sb.append( '\n' ); 758 sb.append( tables.length ); 759 sb.append( " feature type tables\n\n" ); 760 for ( int i = 0; i < tables.length; i++ ) { 761 sb.append( tables[i] ); 762 sb.append( '\n' ); 763 } 764 765 sb.append( '\n' ); 766 tables = getTables( JOIN_TABLE ); 767 sb.append( tables.length ); 768 sb.append( " join tables\n\n" ); 769 for ( int i = 0; i < tables.length; i++ ) { 770 sb.append( tables[i] ); 771 sb.append( '\n' ); 772 } 773 774 sb.append( '\n' ); 775 tables = getTables( MULTI_PROPERTY_TABLE ); 776 sb.append( tables.length ); 777 sb.append( " property tables\n\n" ); 778 for ( int i = 0; i < tables.length; i++ ) { 779 sb.append( tables[i] ); 780 sb.append( '\n' ); 781 } 782 return sb.toString(); 783 } 784 785 class TableDefinition { 786 787 private int type; 788 789 String tableName; 790 791 private Map<String, ColumnDefinition> columnsMap = new LinkedHashMap<String, ColumnDefinition>(); 792 793 TableDefinition( String tableName, int type ) { 794 this.type = type; 795 this.tableName = tableName; 796 } 797 798 String getName() { 799 return this.tableName; 800 } 801 802 int getType() { 803 return this.type; 804 } 805 806 ColumnDefinition[] getColumns() { 807 Collection<ColumnDefinition> columns = new ArrayList<ColumnDefinition>(); 808 Iterator<String> iter = columnsMap.keySet().iterator(); 809 while ( iter.hasNext() ) { 810 String columnName = iter.next(); 811 columns.add( columnsMap.get( columnName ) ); 812 } 813 return columns.toArray( new ColumnDefinition[columns.size()] ); 814 } 815 816 ColumnDefinition[] getPKColumns() { 817 Collection<ColumnDefinition> columns = new ArrayList<ColumnDefinition>(); 818 Iterator<String> iter = columnsMap.keySet().iterator(); 819 while ( iter.hasNext() ) { 820 String columnName = iter.next(); 821 ColumnDefinition column = columnsMap.get( columnName ); 822 if ( column.isPartOfPK() ) { 823 columns.add( columnsMap.get( columnName ) ); 824 } 825 } 826 return columns.toArray( new ColumnDefinition[columns.size()] ); 827 } 828 829 ColumnDefinition getColumn( String name ) { 830 return columnsMap.get( name ); 831 } 832 833 void addColumn( ColumnDefinition column ) { 834 ColumnDefinition oldColumn = columnsMap.get( column.getName() ); 835 if ( oldColumn != null ) { 836 if ( !( column.getType() == oldColumn.getType() ) ) { 837 String msg = Messages.format( "ERROR_COLUMN_DEFINITION_TYPES", column.getName(), 838 oldColumn.isNullable() ? "NULLABLE" : "NOT NULLABLE", 839 column.isNullable() ? "NULLABLE" : "NOT NULLABLE" ); 840 throw new RuntimeException( msg ); 841 842 } 843 if ( oldColumn.isPartOfPK() ) { 844 column = oldColumn; 845 } 846 } 847 columnsMap.put( column.getName(), column ); 848 } 849 850 void addColumns( Collection<ColumnDefinition> columns ) { 851 Iterator<ColumnDefinition> iter = columns.iterator(); 852 while ( iter.hasNext() ) { 853 ColumnDefinition column = iter.next(); 854 addColumn( column ); 855 } 856 } 857 858 @Override 859 public String toString() { 860 StringBuffer sb = new StringBuffer(); 861 sb.append( Messages.format( "TABLE", this.tableName ) ); 862 sb.append( Messages.getString( "PRIMARY_KEY" ) ); 863 ColumnDefinition[] pkColumns = getPKColumns(); 864 for ( int i = 0; i < pkColumns.length; i++ ) { 865 sb.append( '"' ); 866 sb.append( pkColumns[i].getName() ); 867 sb.append( '"' ); 868 if ( i != pkColumns.length - 1 ) { 869 sb.append( ", " ); 870 } 871 } 872 sb.append( '\n' ); 873 Iterator<String> columnNameIter = this.columnsMap.keySet().iterator(); 874 while ( columnNameIter.hasNext() ) { 875 String columnName = columnNameIter.next(); 876 ColumnDefinition column = this.columnsMap.get( columnName ); 877 try { 878 sb.append( Messages.format( "COLUMN", columnName, 879 Types.getTypeNameForSQLTypeCode( column.getType() ) + ":" 880 + column.getType(), 881 new Boolean( column.isNullable() ) ) ); 882 } catch ( UnknownTypeException e ) { 883 // TODO Auto-generated catch block 884 e.printStackTrace(); 885 } 886 sb.append( '\n' ); 887 } 888 return sb.toString(); 889 } 890 } 891 892 class ColumnDefinition { 893 894 private String columnName; 895 896 private int type; 897 898 private boolean isNullable; 899 900 private boolean isGeometryColumn; 901 902 private int srsCode; 903 904 private boolean isPartOfPK; 905 906 ColumnDefinition( String columnName, int type, boolean isNullable, boolean isGeometryColumn, int srsCode ) { 907 this.columnName = columnName; 908 this.type = type; 909 this.isNullable = isNullable; 910 this.isGeometryColumn = isGeometryColumn; 911 this.srsCode = srsCode; 912 } 913 914 ColumnDefinition( String columnName, int type, boolean isNullable, boolean isPartOfPK, 915 boolean isGeometryColumn, int srsCode ) { 916 this( columnName, type, isNullable, isGeometryColumn, srsCode ); 917 this.isPartOfPK = isPartOfPK; 918 } 919 920 String getName() { 921 return this.columnName; 922 } 923 924 int getType() { 925 return this.type; 926 } 927 928 boolean isNullable() { 929 return this.isNullable; 930 } 931 932 boolean isGeometry() { 933 return this.isGeometryColumn; 934 } 935 936 int getSRS() { 937 return this.srsCode; 938 } 939 940 boolean isPartOfPK() { 941 return this.isPartOfPK; 942 } 943 } 944 }