001 //$HeadURL: svn+ssh://jwilden@svn.wald.intevation.org/deegree/base/branches/2.5_testing/src/org/deegree/io/dbaseapi/DBaseFile.java $ 002 /*---------------------------------------------------------------------------- 003 This file is part of deegree, http://deegree.org/ 004 Copyright (C) 2001-2009 by: 005 Department of Geography, University of Bonn 006 and 007 lat/lon GmbH 008 009 This library is free software; you can redistribute it and/or modify it under 010 the terms of the GNU Lesser General Public License as published by the Free 011 Software Foundation; either version 2.1 of the License, or (at your option) 012 any later version. 013 This library is distributed in the hope that it will be useful, but WITHOUT 014 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 015 FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 016 details. 017 You should have received a copy of the GNU Lesser General Public License 018 along with this library; if not, write to the Free Software Foundation, Inc., 019 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 020 021 Contact information: 022 023 lat/lon GmbH 024 Aennchenstr. 19, 53177 Bonn 025 Germany 026 http://lat-lon.de/ 027 028 Department of Geography, University of Bonn 029 Prof. Dr. Klaus Greve 030 Postfach 1147, 53001 Bonn 031 Germany 032 http://www.geographie.uni-bonn.de/deegree/ 033 034 e-mail: info@deegree.org 035 ----------------------------------------------------------------------------*/ 036 package org.deegree.io.dbaseapi; 037 038 import java.io.ByteArrayOutputStream; 039 import java.io.File; 040 import java.io.FileOutputStream; 041 import java.io.IOException; 042 import java.io.RandomAccessFile; 043 import java.net.URI; 044 import java.util.ArrayList; 045 import java.util.HashMap; 046 import java.util.List; 047 import java.util.Map; 048 049 import org.deegree.datatypes.QualifiedName; 050 import org.deegree.datatypes.Types; 051 import org.deegree.framework.util.TimeTools; 052 import org.deegree.model.feature.Feature; 053 import org.deegree.model.feature.FeatureFactory; 054 import org.deegree.model.feature.FeatureProperty; 055 import org.deegree.model.feature.schema.FeatureType; 056 import org.deegree.model.feature.schema.GeometryPropertyType; 057 import org.deegree.model.feature.schema.PropertyType; 058 import org.deegree.model.spatialschema.ByteUtils; 059 import org.deegree.ogcbase.CommonNamespaces; 060 061 /** 062 * the datatypes of the dBase file and their representation as java types: 063 * 064 * dBase-type dBase-type-ID java-type 065 * 066 * character "C" String float "F" Float number "N" Double logical "L" String memo "M" String date "D" Date binary "B" 067 * ByteArrayOutputStream 068 * 069 * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a> 070 * @author last edited by: $Author: apoth $ 071 * 072 * @version $Revision: 24227 $, $Date: 2010-05-07 11:57:54 +0200 (Fr, 07 Mai 2010) $ 073 */ 074 public class DBaseFile { 075 076 private static final URI DEEGREEAPP = CommonNamespaces.buildNSURI( "http://www.deegree.org/app" ); 077 078 private static final String APP_PREFIX = "app"; 079 080 private ArrayList<String> colHeader = new ArrayList<String>(); 081 082 // representing the datasection of the dBase file 083 // only needed for writing a dBase file 084 private DBFDataSection dataSection = null; 085 086 // feature type of generated features 087 private FeatureType ft; 088 089 // keys: property types, values: column (in dbase file) 090 private Map<PropertyType, String> ftMapping = new HashMap<PropertyType, String>( 100 ); 091 092 // Hashtable to contain info abouts in the table 093 private Map<String, dbfCol> column_info = new HashMap<String, dbfCol>(); 094 095 // references to the dbase file 096 private RandomAccessFile rafDbf; 097 098 // represents the dBase file header 099 // only needed for writing the dBase file 100 private DBFHeader header = null; 101 102 // representing the name of the dBase file 103 // only needed for writing the dBase file 104 private String fname = null; 105 106 private String ftName = null; 107 108 // number of records in the table 109 private double file_numrecs; 110 111 // data start position, and length of the data 112 private int file_datalength; 113 114 // data start position, and length of the data 115 private int file_datap; 116 117 // flag which indicates if a dBase file should be 118 // read or writed. 119 // filemode = 0 : read only 120 // filemode = 1 : write only 121 private int filemode = 0; 122 123 // number of columns 124 private int num_fields; 125 126 // current record 127 private long record_number = 0; 128 129 // size of the cache used for reading data from the dbase table 130 private long cacheSize = 1000000; 131 132 // array containing the data of the cache 133 private byte[] dataArray = null; 134 135 // file position the caches starts 136 private long startIndex = 0; 137 138 private List<FeatureProperty> props = new ArrayList<FeatureProperty>(50); 139 140 /** 141 * constructor<BR> 142 * only for reading a dBase file<BR> 143 * 144 * @param url 145 * @throws IOException 146 */ 147 public DBaseFile( String url ) throws IOException { 148 fname = url; 149 150 // creates rafDbf 151 File file = new File( url + ".dbf" ); 152 if ( !file.exists() ) { 153 file = new File( url + ".DBF" ); 154 } 155 rafDbf = new RandomAccessFile( file, "r" ); 156 157 // dataArray = new byte[(int)rafDbf.length()]; 158 if ( cacheSize > rafDbf.length() ) { 159 cacheSize = rafDbf.length(); 160 } 161 162 dataArray = new byte[(int) cacheSize]; 163 rafDbf.read( dataArray ); 164 rafDbf.seek( 0 ); 165 166 // initialize dbase file 167 initDBaseFile(); 168 169 filemode = 0; 170 } 171 172 /** 173 * constructor<BR> 174 * only for writing a dBase file<BR> 175 * 176 * @param url 177 * @param fieldDesc 178 * @throws DBaseException 179 * 180 */ 181 public DBaseFile( String url, FieldDescriptor[] fieldDesc ) throws DBaseException { 182 fname = url; 183 184 // create header 185 header = new DBFHeader( fieldDesc ); 186 187 // create data section 188 dataSection = new DBFDataSection( fieldDesc ); 189 190 filemode = 1; 191 } 192 193 /** 194 * 195 */ 196 public void close() { 197 try { 198 if ( rafDbf != null ) { 199 // just true for reading access 200 rafDbf.close(); 201 } 202 } catch ( Exception ex ) { 203 // should never happen 204 ex.printStackTrace(); 205 } 206 } 207 208 /** 209 * method: initDBaseFile(); inits a DBF file. This is based on Pratap Pereira's Xbase.pm perl module 210 * 211 */ 212 private void initDBaseFile() 213 throws IOException { 214 // position the record pointer at 0 215 rafDbf.seek( 0 ); 216 217 /* 218 * // read the file type file_type = fixByte( rafDbf.readByte() ); // get the last update date file_update_year 219 * = fixByte( rafDbf.readByte() ); file_update_month = fixByte( rafDbf.readByte() ); file_update_day = fixByte( 220 * rafDbf.readByte() ); 221 */ 222 223 fixByte( rafDbf.readByte() ); 224 fixByte( rafDbf.readByte() ); 225 fixByte( rafDbf.readByte() ); 226 fixByte( rafDbf.readByte() ); 227 228 // a byte array to hold little-endian long data 229 byte[] b = new byte[4]; 230 231 // read that baby in... 232 rafDbf.readFully( b ); 233 234 // convert the byte array into a long (really a double) 235 file_numrecs = ByteUtils.readLEInt( b, 0 ); 236 237 b = null; 238 239 // a byte array to hold little-endian short data 240 b = new byte[2]; 241 242 // get the data position (where it starts in the file) 243 rafDbf.readFully( b ); 244 file_datap = ByteUtils.readLEShort( b, 0 ); 245 246 // find out the length of the data portion 247 rafDbf.readFully( b ); 248 file_datalength = ByteUtils.readLEShort( b, 0 ); 249 250 // calculate the number of fields 251 num_fields = ( file_datap - 33 ) / 32; 252 253 // read in the column data 254 int locn = 0; // offset of the current column 255 256 // process each field 257 for ( int i = 1; i <= num_fields; i++ ) { 258 // seek the position of the field definition data. 259 // This information appears after the first 32 byte 260 // table information, and lives in 32 byte chunks. 261 rafDbf.seek( ( ( i - 1 ) * 32 ) + 32 ); 262 263 b = null; 264 265 // get the column name into a byte array 266 b = new byte[11]; 267 rafDbf.readFully( b ); 268 269 // convert the byte array to a String 270 String col_name = new String( b ).trim().toUpperCase(); 271 while ( colHeader.contains( col_name ) ) { 272 col_name = col_name + "__" + i; // do it like shp2pgsql to avoid same-column names all over 273 } 274 275 // read in the column type 276 char[] c = new char[1]; 277 c[0] = (char) rafDbf.readByte(); 278 279 // String ftyp = new String( c ); 280 281 // skip four bytes 282 rafDbf.skipBytes( 4 ); 283 284 // get field length and precision 285 short flen = fixByte( rafDbf.readByte() ); 286 short fdec = fixByte( rafDbf.readByte() ); 287 288 // set the field position to the current 289 // value of locn 290 int fpos = locn; 291 292 // increment locn by the length of this field. 293 locn += flen; 294 295 // create a new dbfCol object and assign it the 296 // attributes of the current field 297 dbfCol column = new dbfCol( col_name ); 298 column.type = new String( c ); 299 column.size = flen; 300 column.position = fpos + 1; 301 column.prec = fdec; 302 303 // to be done: get the name of dbf-table via method in ShapeFile 304 column.table = "NOT"; 305 306 column_info.put( col_name, column ); 307 colHeader.add( col_name ); 308 } // end for 309 310 ft = createCanonicalFeatureType(); 311 312 } // end of initDBaseFile 313 314 /** 315 * Overrides the default feature type (which is generated from all columns in the dbase file) to allow customized 316 * naming and ordering of properties. 317 * 318 * @param ft 319 * @param ftMapping 320 */ 321 public void setFeatureType( FeatureType ft, Map<PropertyType, String> ftMapping ) { 322 this.ft = ft; 323 this.ftMapping = ftMapping; 324 } 325 326 /** 327 * Creates a canonical {@link FeatureType} from all fields of the <code>DBaseFile</code>. 328 * 329 * @return feature type that contains all fields as property types 330 */ 331 private FeatureType createCanonicalFeatureType() { 332 dbfCol column = null; 333 334 PropertyType[] ftp = new PropertyType[colHeader.size() + 1]; 335 336 for ( int i = 0; i < colHeader.size(); i++ ) { 337 // retrieve the dbfCol object which corresponds // to this column. 338 column = column_info.get( colHeader.get( i ) ); 339 340 QualifiedName name = new QualifiedName( APP_PREFIX, column.name, DEEGREEAPP ); 341 342 if ( column.type.equalsIgnoreCase( "C" ) ) { 343 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true ); 344 } else if ( column.type.equalsIgnoreCase( "F" ) || column.type.equalsIgnoreCase( "N" ) ) { 345 if ( column.prec == 0 ) { 346 if ( column.size < 10 ) { 347 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.INTEGER, true ); 348 } else { 349 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.BIGINT, true ); 350 } 351 } else { 352 if ( column.size < 8 ) { 353 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.FLOAT, true ); 354 } else { 355 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.DOUBLE, true ); 356 } 357 } 358 } else if ( column.type.equalsIgnoreCase( "M" ) ) { 359 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true ); 360 } else if ( column.type.equalsIgnoreCase( "L" ) ) { 361 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true ); 362 } else if ( column.type.equalsIgnoreCase( "D" ) ) { 363 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true ); 364 } else if ( column.type.equalsIgnoreCase( "B" ) ) { 365 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.BLOB, true ); 366 } 367 368 this.ftMapping.put( ftp[i], column.name ); 369 } 370 371 int index = fname.lastIndexOf( "/" ); 372 ftName = fname; 373 if ( index >= 0 ) { 374 ftName = fname.substring( index + 1 ); 375 } else { 376 index = fname.lastIndexOf( "\\" ); 377 if ( index >= 0 ) { 378 ftName = fname.substring( index + 1 ); 379 } 380 } 381 382 QualifiedName featureTypeName = new QualifiedName( APP_PREFIX, ftName, DEEGREEAPP ); 383 384 QualifiedName name = new QualifiedName( APP_PREFIX, "GEOM", DEEGREEAPP ); 385 ftp[ftp.length - 1] = FeatureFactory.createGeometryPropertyType( name, Types.GEOMETRY_PROPERTY_NAME, 1, 1 ); 386 387 return FeatureFactory.createFeatureType( featureTypeName, false, ftp ); 388 } 389 390 /** 391 * 392 * @return number of records in the table 393 * @throws DBaseException 394 */ 395 public int getRecordNum() 396 throws DBaseException { 397 if ( filemode == 1 ) { 398 throw new DBaseException( "class is initialized in write-only mode" ); 399 } 400 401 return (int) file_numrecs; 402 } 403 404 /** 405 * 406 * Positions the record pointer at the top of the table. 407 * 408 * @throws DBaseException 409 */ 410 public void goTop() 411 throws DBaseException { 412 if ( filemode == 1 ) { 413 throw new DBaseException( "class is initialized in write-only mode" ); 414 } 415 416 record_number = 0; 417 } 418 419 /** 420 * Advance the record pointer to the next record. 421 * 422 * @return true if pointer has been increased 423 * @throws DBaseException 424 */ 425 public boolean nextRecord() 426 throws DBaseException { 427 if ( filemode == 1 ) { 428 throw new DBaseException( "class is initialized in write-only mode" ); 429 } 430 431 if ( record_number < file_numrecs ) { 432 record_number++; 433 return true; 434 } 435 return false; 436 437 } 438 439 /** 440 * 441 * @param col_name 442 * @return column's string value from the current row. 443 * @throws DBaseException 444 */ 445 public String getColumn( String col_name ) 446 throws DBaseException { 447 if ( filemode == 1 ) { 448 throw new DBaseException( "class is initialized in write-only mode" ); 449 } 450 451 try { 452 // retrieve the dbfCol object which corresponds 453 // to this column. 454 // System.out.println( columnNames.get( col_name ) + "/" + col_name ); 455 dbfCol column = column_info.get( col_name ); 456 457 // seek the starting offset of the current record, 458 // as indicated by record_number 459 long pos = file_datap + ( ( record_number - 1 ) * file_datalength ); 460 461 // read data from cache if the requested part of the dbase file is 462 // within it 463 if ( ( pos >= startIndex ) && ( ( pos + column.position + column.size ) < ( startIndex + cacheSize ) ) ) { 464 pos = pos - startIndex; 465 } else { 466 // actualize cache starting at the current cursor position 467 // if neccesary correct cursor position 468 rafDbf.seek( pos ); 469 rafDbf.read( dataArray ); 470 startIndex = pos; 471 pos = 0; 472 } 473 int ff = (int) ( pos + column.position ); 474 return new String( dataArray, ff, column.size ).trim(); 475 } catch ( Exception e ) { 476 e.printStackTrace(); 477 return e.toString(); 478 } 479 } 480 481 /** 482 * @return properties (column headers) of the dBase-file<BR> 483 * @throws DBaseException 484 */ 485 public String[] getProperties() 486 throws DBaseException { 487 if ( filemode == 1 ) { 488 throw new DBaseException( "class is initialized in write-only mode" ); 489 } 490 491 return colHeader.toArray( new String[colHeader.size()] ); 492 } 493 494 /** 495 * @return datatype of each column of the database<BR> 496 * @throws DBaseException 497 */ 498 public String[] getDataTypes() 499 throws DBaseException { 500 if ( filemode == 1 ) { 501 throw new DBaseException( "class is initialized in write-only mode" ); 502 } 503 504 String[] datatypes = new String[colHeader.size()]; 505 dbfCol column; 506 507 for ( int i = 0; i < colHeader.size(); i++ ) { 508 // retrieve the dbfCol object which corresponds 509 // to this column. 510 column = column_info.get( colHeader.get( i ) ); 511 512 datatypes[i] = column.type.trim(); 513 } 514 515 return datatypes; 516 } 517 518 /** 519 * @param container 520 * @param element 521 * @return true if the container sting array contains element<BR> 522 */ 523 private boolean contains( String[] container, String element ) { 524 for ( int i = 0; i < container.length; i++ ) 525 526 if ( container[i].equals( element ) ) { 527 return true; 528 } 529 530 return false; 531 } 532 533 /** 534 * @param field 535 * @return the size of a column 536 * @throws DBaseException 537 */ 538 public int getDataLength( String field ) 539 throws DBaseException { 540 dbfCol col = column_info.get( field ); 541 if ( col == null ) 542 throw new DBaseException( "Field " + field + " not found" ); 543 544 return col.size; 545 } 546 547 /** 548 * @param fields 549 * @return the datatype of each column of the database specified by fields<BR> 550 * @throws DBaseException 551 */ 552 public String[] getDataTypes( String[] fields ) 553 throws DBaseException { 554 if ( filemode == 1 ) { 555 throw new DBaseException( "class is initialized in write-only mode" ); 556 } 557 558 ArrayList<String> vec = new ArrayList<String>(); 559 dbfCol column; 560 561 for ( int i = 0; i < colHeader.size(); i++ ) { 562 // check if the current (i'th) column (string) is 563 // within the array of specified columns 564 if ( contains( fields, colHeader.get( i ) ) ) { 565 // retrieve the dbfCol object which corresponds 566 // to this column. 567 column = column_info.get( colHeader.get( i ) ); 568 569 vec.add( column.type.trim() ); 570 } 571 } 572 573 return vec.toArray( new String[vec.size()] ); 574 } 575 576 /** 577 * Returns a row of the dBase file as a {@link Feature} instance. 578 * 579 * @param rowNo 580 * @return a row of the dBase file as a Feature instance 581 * @throws DBaseException 582 */ 583 public Feature getFRow( int rowNo ) 584 throws DBaseException { 585 586 Map<String, Object> columnValues = getRow( rowNo ); 587 588 PropertyType[] propTypes = this.ft.getProperties(); 589 590 props.clear(); 591 for ( int i = 0; i < propTypes.length; i++ ) { 592 PropertyType pt = propTypes[i]; 593 if ( pt instanceof GeometryPropertyType ) { 594 // insert dummy property for geometry 595 FeatureProperty prop = FeatureFactory.createFeatureProperty( pt.getName(), null ); 596 props.add( prop ); 597 } else { 598 String columnName = this.ftMapping.get( pt ); 599 Object columnValue = columnValues.get( columnName ); 600 if ( columnValue != null ) { 601 FeatureProperty prop = FeatureFactory.createFeatureProperty( pt.getName(), columnValue ); 602 props.add( prop ); 603 } 604 } 605 } 606 FeatureProperty[] fp = props.toArray( new FeatureProperty[props.size()] ); 607 return FeatureFactory.createFeature( ftName + rowNo, ft, fp ); 608 } 609 610 /** 611 * 612 * @param rowNo 613 * @return a row of the dbase file 614 * @throws DBaseException 615 */ 616 private Map<String, Object> getRow( int rowNo ) 617 throws DBaseException { 618 619 Map<String, Object> columnValues = new HashMap<String, Object>(); 620 621 goTop(); 622 record_number += rowNo; 623 624 for ( int i = 0; i < colHeader.size(); i++ ) { 625 626 // retrieve the dbfCol object which corresponds to this column. 627 dbfCol column = column_info.get( colHeader.get( i ) ); 628 629 String value = getColumn( column.name ); 630 Object columnValue = value; 631 632 if ( value != null ) { 633 // cast the value of the i'th column to corresponding datatype 634 if ( column.type.equalsIgnoreCase( "C" ) ) { 635 // nothing to do 636 } else if ( column.type.equalsIgnoreCase( "F" ) || column.type.equalsIgnoreCase( "N" ) ) { 637 try { 638 if ( column.prec == 0 ) { 639 if ( column.size < 10 ) { 640 columnValue = new Integer( value ); 641 } else { 642 columnValue = new Long( value ); 643 } 644 } else { 645 if ( column.size < 8 ) { 646 columnValue = new Float( value ); 647 } else { 648 columnValue = new Double( value ); 649 } 650 } 651 } catch ( Exception ex ) { 652 columnValue = new Double( "0" ); 653 } 654 } else if ( column.type.equalsIgnoreCase( "M" ) ) { 655 // nothing to do 656 } else if ( column.type.equalsIgnoreCase( "L" ) ) { 657 // nothing to do 658 } else if ( column.type.equalsIgnoreCase( "D" ) ) { 659 if ( value.equals( "" ) ) { 660 columnValue = null; 661 } else { 662 String s = value.substring( 0, 4 ) + '-' + value.substring( 4, 6 ) + '-' 663 + value.substring( 6, 8 ); 664 columnValue = TimeTools.createCalendar( s ).getTime(); 665 } 666 } else if ( column.type.equalsIgnoreCase( "B" ) ) { 667 ByteArrayOutputStream os = new ByteArrayOutputStream( 10000 ); 668 try { 669 os.write( value.getBytes() ); 670 } catch ( IOException e ) { 671 e.printStackTrace(); 672 } 673 columnValue = os; 674 } 675 } else { 676 columnValue = ""; 677 } 678 columnValues.put( column.name, columnValue ); 679 } 680 681 return columnValues; 682 } 683 684 /** 685 * bytes are signed; let's fix them... 686 * 687 * @param b 688 * @return unsigned byte as short 689 */ 690 private static short fixByte( byte b ) { 691 if ( b < 0 ) { 692 return (short) ( b + 256 ); 693 } 694 695 return b; 696 } 697 698 /** 699 * creates the dbase file and writes all data to it if the file specified by fname (s.o.) exists it will be deleted! 700 * 701 * @throws IOException 702 * @throws DBaseException 703 */ 704 public void writeAllToFile() 705 throws IOException, DBaseException { 706 if ( filemode == 0 ) { 707 throw new DBaseException( "class is initialized in read-only mode" ); 708 } 709 710 // if a file with the retrieved filename exists, delete it! 711 File file = new File( fname + ".dbf" ); 712 713 if ( file.exists() ) { 714 file.delete(); 715 } 716 717 // create a new file 718 // RandomAccessFile rdbf = new RandomAccessFile( fname + ".dbf", "rw" ); 719 FileOutputStream fos = new FileOutputStream( fname + ".dbf" ); 720 try { 721 byte[] b = header.getHeader(); 722 int nRecords = dataSection.getNoOfRecords(); 723 // write number of records 724 ByteUtils.writeLEInt( b, 4, nRecords ); 725 // write header to the file 726 // rdbf.write( b ); 727 fos.write( b ); 728 // b = dataSection.getDataSection(fos); 729 dataSection.getDataSection( fos ); 730 // write datasection to the file 731 // rdbf.write( b ); 732 } catch ( IOException e ) { 733 throw e; 734 } finally { 735 // rdbf.close(); 736 fos.close(); 737 } 738 } 739 740 /** 741 * writes a data record to byte array representing the data section of the dBase file. The method gets the data type 742 * of each field in recData from fieldDesc wich has been set at the constructor. 743 * 744 * @param recData 745 * @throws DBaseException 746 */ 747 public void setRecord( List<?> recData ) 748 throws DBaseException { 749 if ( filemode == 0 ) { 750 throw new DBaseException( "class is initialized in read-only mode" ); 751 } 752 753 dataSection.setRecord( recData ); 754 } 755 756 /** 757 * writes a data record to byte array representing the data section of the dBase file. The method gets the data type 758 * of each field in recData from fieldDesc wich has been set at the constructor. index specifies the location of the 759 * retrieved record in the datasection. if an invalid index is used an exception will be thrown 760 * 761 * @param index 762 * @param recData 763 * @throws DBaseException 764 */ 765 public void setRecord( int index, List<?> recData ) 766 throws DBaseException { 767 if ( filemode == 0 ) { 768 throw new DBaseException( "class is initialized in read-only mode" ); 769 } 770 771 dataSection.setRecord( index, recData ); 772 } 773 774 /** 775 * @return the feature type of the generated features 776 */ 777 public FeatureType getFeatureType() { 778 return ft; 779 } 780 781 } // end of class DBaseFile 782 783 /** 784 * 785 * 786 * @version $Revision: 24227 $ 787 * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a> 788 */ 789 class tsColumn { 790 public String name = null; // the column's name 791 792 public String table = null; // the table which "owns" the column 793 794 public String type = null; // the column's type 795 796 public int prec = 0; // the column's precision 797 798 public int size = 0; // the column's size 799 800 /** 801 * 802 * Constructs a tsColumn object. 803 * 804 * @param s 805 * the column name 806 */ 807 tsColumn( String s ) { 808 name = s; 809 } 810 } // end of class tsColumn 811 812 /** 813 * 814 * 815 * @version $Revision: 24227 $ 816 * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a> 817 */ 818 class dbfCol extends tsColumn { 819 int position = 0; 820 821 /** 822 * Creates a new dbfCol object. 823 * 824 * @param c 825 */ 826 public dbfCol( String c ) { 827 super( c ); 828 } 829 }