001    //$HeadURL: https://svn.wald.intevation.org/svn/deegree/base/branches/2.3_testing/src/org/deegree/io/dbaseapi/DBaseFile.java $
002    /*----------------------------------------------------------------------------
003     This file is part of deegree, http://deegree.org/
004     Copyright (C) 2001-2009 by:
005     Department of Geography, University of Bonn
006     and
007     lat/lon GmbH
008    
009     This library is free software; you can redistribute it and/or modify it under
010     the terms of the GNU Lesser General Public License as published by the Free
011     Software Foundation; either version 2.1 of the License, or (at your option)
012     any later version.
013     This library is distributed in the hope that it will be useful, but WITHOUT
014     ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
015     FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
016     details.
017     You should have received a copy of the GNU Lesser General Public License
018     along with this library; if not, write to the Free Software Foundation, Inc.,
019     59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
020    
021     Contact information:
022    
023     lat/lon GmbH
024     Aennchenstr. 19, 53177 Bonn
025     Germany
026     http://lat-lon.de/
027    
028     Department of Geography, University of Bonn
029     Prof. Dr. Klaus Greve
030     Postfach 1147, 53001 Bonn
031     Germany
032     http://www.geographie.uni-bonn.de/deegree/
033    
034     e-mail: info@deegree.org
035     ----------------------------------------------------------------------------*/
036    package org.deegree.io.dbaseapi;
037    
038    import java.io.ByteArrayOutputStream;
039    import java.io.File;
040    import java.io.IOException;
041    import java.io.RandomAccessFile;
042    import java.net.URI;
043    import java.util.ArrayList;
044    import java.util.HashMap;
045    import java.util.List;
046    import java.util.Map;
047    
048    import org.deegree.datatypes.QualifiedName;
049    import org.deegree.datatypes.Types;
050    import org.deegree.framework.util.TimeTools;
051    import org.deegree.model.feature.Feature;
052    import org.deegree.model.feature.FeatureFactory;
053    import org.deegree.model.feature.FeatureProperty;
054    import org.deegree.model.feature.schema.FeatureType;
055    import org.deegree.model.feature.schema.GeometryPropertyType;
056    import org.deegree.model.feature.schema.PropertyType;
057    import org.deegree.model.spatialschema.ByteUtils;
058    import org.deegree.ogcbase.CommonNamespaces;
059    
060    /**
061     * the datatypes of the dBase file and their representation as java types:
062     * 
063     * dBase-type dBase-type-ID java-type
064     * 
065     * character "C" String float "F" Float number "N" Double logical "L" String memo "M" String date "D" Date binary "B"
066     * ByteArrayOutputStream
067     * 
068     * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a>
069     * @author last edited by: $Author: aschmitz $
070     * 
071     * @version $Revision: 19488 $, $Date: 2009-09-03 15:05:04 +0200 (Do, 03. Sep 2009) $
072     */
073    public class DBaseFile {
074    
075        private static final URI DEEGREEAPP = CommonNamespaces.buildNSURI( "http://www.deegree.org/app" );
076    
077        private static final String APP_PREFIX = "app";
078    
079        private ArrayList<String> colHeader = new ArrayList<String>();
080    
081        // representing the datasection of the dBase file
082        // only needed for writing a dBase file
083        private DBFDataSection dataSection = null;
084    
085        // feature type of generated features
086        private FeatureType ft;
087    
088        // keys: property types, values: column (in dbase file)
089        private Map<PropertyType, String> ftMapping = new HashMap<PropertyType, String>( 100 );
090    
091        // Hashtable to contain info abouts in the table
092        private Map<String, dbfCol> column_info = new HashMap<String, dbfCol>();
093    
094        // references to the dbase file
095        private RandomAccessFile rafDbf;
096    
097        // represents the dBase file header
098        // only needed for writing the dBase file
099        private DBFHeader header = null;
100    
101        // representing the name of the dBase file
102        // only needed for writing the dBase file
103        private String fname = null;
104    
105        private String ftName = null;
106    
107        // number of records in the table
108        private double file_numrecs;
109    
110        // data start position, and length of the data
111        private int file_datalength;
112    
113        // data start position, and length of the data
114        private int file_datap;
115    
116        // flag which indicates if a dBase file should be
117        // read or writed.
118        // filemode = 0 : read only
119        // filemode = 1 : write only
120        private int filemode = 0;
121    
122        // number of columns
123        private int num_fields;
124    
125        // current record
126        private long record_number = 0;
127    
128        // size of the cache used for reading data from the dbase table
129        private long cacheSize = 1000000;
130    
131        // array containing the data of the cache
132        private byte[] dataArray = null;
133    
134        // file position the caches starts
135        private long startIndex = 0;
136    
137        /**
138         * constructor<BR>
139         * only for reading a dBase file<BR>
140         * 
141         * @param url
142         * @throws IOException
143         */
144        public DBaseFile( String url ) throws IOException {
145            fname = url;
146    
147            // creates rafDbf
148            File file = new File( url + ".dbf" );
149            if ( !file.exists() ) {
150                file = new File( url + ".DBF" );
151            }
152            rafDbf = new RandomAccessFile( file, "r" );
153    
154            // dataArray = new byte[(int)rafDbf.length()];
155            if ( cacheSize > rafDbf.length() ) {
156                cacheSize = rafDbf.length();
157            }
158    
159            dataArray = new byte[(int) cacheSize];
160            rafDbf.read( dataArray );
161            rafDbf.seek( 0 );
162    
163            // initialize dbase file
164            initDBaseFile();
165    
166            filemode = 0;
167        }
168    
169        /**
170         * constructor<BR>
171         * only for writing a dBase file<BR>
172         * 
173         * @param url
174         * @param fieldDesc
175         * @throws DBaseException
176         * 
177         */
178        public DBaseFile( String url, FieldDescriptor[] fieldDesc ) throws DBaseException {
179            fname = url;
180    
181            // create header
182            header = new DBFHeader( fieldDesc );
183    
184            // create data section
185            dataSection = new DBFDataSection( fieldDesc );
186    
187            filemode = 1;
188        }
189    
190        /**
191         *
192         */
193        public void close() {
194            try {
195                if ( rafDbf != null ) {
196                    // just true for reading access
197                    rafDbf.close();
198                }
199            } catch ( Exception ex ) {
200                // should never happen
201                ex.printStackTrace();
202            }
203        }
204    
205        /**
206         * method: initDBaseFile(); inits a DBF file. This is based on Pratap Pereira's Xbase.pm perl module
207         * 
208         */
209        private void initDBaseFile()
210                                throws IOException {
211            // position the record pointer at 0
212            rafDbf.seek( 0 );
213    
214            /*
215             * // read the file type file_type = fixByte( rafDbf.readByte() ); // get the last update date file_update_year
216             * = fixByte( rafDbf.readByte() ); file_update_month = fixByte( rafDbf.readByte() ); file_update_day = fixByte(
217             * rafDbf.readByte() );
218             */
219    
220            fixByte( rafDbf.readByte() );
221            fixByte( rafDbf.readByte() );
222            fixByte( rafDbf.readByte() );
223            fixByte( rafDbf.readByte() );
224    
225            // a byte array to hold little-endian long data
226            byte[] b = new byte[4];
227    
228            // read that baby in...
229            rafDbf.readFully( b );
230    
231            // convert the byte array into a long (really a double)
232            file_numrecs = ByteUtils.readLEInt( b, 0 );
233    
234            b = null;
235    
236            // a byte array to hold little-endian short data
237            b = new byte[2];
238    
239            // get the data position (where it starts in the file)
240            rafDbf.readFully( b );
241            file_datap = ByteUtils.readLEShort( b, 0 );
242    
243            // find out the length of the data portion
244            rafDbf.readFully( b );
245            file_datalength = ByteUtils.readLEShort( b, 0 );
246    
247            // calculate the number of fields
248            num_fields = ( file_datap - 33 ) / 32;
249    
250            // read in the column data
251            int locn = 0; // offset of the current column
252    
253            // process each field
254            for ( int i = 1; i <= num_fields; i++ ) {
255                // seek the position of the field definition data.
256                // This information appears after the first 32 byte
257                // table information, and lives in 32 byte chunks.
258                rafDbf.seek( ( ( i - 1 ) * 32 ) + 32 );
259    
260                b = null;
261    
262                // get the column name into a byte array
263                b = new byte[11];
264                rafDbf.readFully( b );
265    
266                // convert the byte array to a String
267                String col_name = new String( b ).trim().toUpperCase();
268                while ( colHeader.contains( col_name ) ) {
269                    col_name = col_name + "__" + i; // do it like shp2pgsql to avoid same-column names all over
270                }
271    
272                // read in the column type
273                char[] c = new char[1];
274                c[0] = (char) rafDbf.readByte();
275    
276                // String ftyp = new String( c );
277    
278                // skip four bytes
279                rafDbf.skipBytes( 4 );
280    
281                // get field length and precision
282                short flen = fixByte( rafDbf.readByte() );
283                short fdec = fixByte( rafDbf.readByte() );
284    
285                // set the field position to the current
286                // value of locn
287                int fpos = locn;
288    
289                // increment locn by the length of this field.
290                locn += flen;
291    
292                // create a new dbfCol object and assign it the
293                // attributes of the current field
294                dbfCol column = new dbfCol( col_name );
295                column.type = new String( c );
296                column.size = flen;
297                column.position = fpos + 1;
298                column.prec = fdec;
299    
300                // to be done: get the name of dbf-table via method in ShapeFile
301                column.table = "NOT";
302    
303                column_info.put( col_name, column );
304                colHeader.add( col_name );
305            } // end for
306    
307            ft = createCanonicalFeatureType();
308    
309        } // end of initDBaseFile
310    
311        /**
312         * Overrides the default feature type (which is generated from all columns in the dbase file) to allow customized
313         * naming and ordering of properties.
314         * 
315         * @param ft
316         * @param ftMapping
317         */
318        public void setFeatureType( FeatureType ft, Map<PropertyType, String> ftMapping ) {
319            this.ft = ft;
320            this.ftMapping = ftMapping;
321        }
322    
323        /**
324         * Creates a canonical {@link FeatureType} from all fields of the <code>DBaseFile</code>.
325         * 
326         * @return feature type that contains all fields as property types
327         */
328        private FeatureType createCanonicalFeatureType() {
329            dbfCol column = null;
330    
331            PropertyType[] ftp = new PropertyType[colHeader.size() + 1];
332    
333            for ( int i = 0; i < colHeader.size(); i++ ) {
334                // retrieve the dbfCol object which corresponds // to this column.
335                column = column_info.get( colHeader.get( i ) );
336    
337                QualifiedName name = new QualifiedName( APP_PREFIX, column.name, DEEGREEAPP );
338    
339                if ( column.type.equalsIgnoreCase( "C" ) ) {
340                    ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true );
341                } else if ( column.type.equalsIgnoreCase( "F" ) || column.type.equalsIgnoreCase( "N" ) ) {
342                    if ( column.prec == 0 ) {
343                        if ( column.size < 10 ) {
344                            ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.INTEGER, true );
345                        } else {
346                            ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.BIGINT, true );
347                        }
348                    } else {
349                        if ( column.size < 8 ) {
350                            ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.FLOAT, true );
351                        } else {
352                            ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.DOUBLE, true );
353                        }
354                    }
355                } else if ( column.type.equalsIgnoreCase( "M" ) ) {
356                    ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true );
357                } else if ( column.type.equalsIgnoreCase( "L" ) ) {
358                    ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true );
359                } else if ( column.type.equalsIgnoreCase( "D" ) ) {
360                    ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true );
361                } else if ( column.type.equalsIgnoreCase( "B" ) ) {
362                    ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.BLOB, true );
363                }
364    
365                this.ftMapping.put( ftp[i], column.name );
366            }
367    
368            int index = fname.lastIndexOf( "/" );
369            ftName = fname;
370            if ( index >= 0 ) {
371                ftName = fname.substring( index + 1 );
372            } else {
373                index = fname.lastIndexOf( "\\" );
374                if ( index >= 0 ) {
375                    ftName = fname.substring( index + 1 );
376                }
377            }
378    
379            QualifiedName featureTypeName = new QualifiedName( APP_PREFIX, ftName, DEEGREEAPP );
380    
381            QualifiedName name = new QualifiedName( APP_PREFIX, "GEOM", DEEGREEAPP );
382            ftp[ftp.length - 1] = FeatureFactory.createGeometryPropertyType( name, Types.GEOMETRY_PROPERTY_NAME, 1, 1 );
383    
384            return FeatureFactory.createFeatureType( featureTypeName, false, ftp );
385        }
386    
387        /**
388         * 
389         * @return number of records in the table
390         * @throws DBaseException
391         */
392        public int getRecordNum()
393                                throws DBaseException {
394            if ( filemode == 1 ) {
395                throw new DBaseException( "class is initialized in write-only mode" );
396            }
397    
398            return (int) file_numrecs;
399        }
400    
401        /**
402         * 
403         * Positions the record pointer at the top of the table.
404         * 
405         * @throws DBaseException
406         */
407        public void goTop()
408                                throws DBaseException {
409            if ( filemode == 1 ) {
410                throw new DBaseException( "class is initialized in write-only mode" );
411            }
412    
413            record_number = 0;
414        }
415    
416        /**
417         * Advance the record pointer to the next record.
418         * 
419         * @return true if pointer has been increased
420         * @throws DBaseException
421         */
422        public boolean nextRecord()
423                                throws DBaseException {
424            if ( filemode == 1 ) {
425                throw new DBaseException( "class is initialized in write-only mode" );
426            }
427    
428            if ( record_number < file_numrecs ) {
429                record_number++;
430                return true;
431            }
432            return false;
433    
434        }
435    
436        /**
437         * 
438         * @param col_name
439         * @return column's string value from the current row.
440         * @throws DBaseException
441         */
442        public String getColumn( String col_name )
443                                throws DBaseException {
444            if ( filemode == 1 ) {
445                throw new DBaseException( "class is initialized in write-only mode" );
446            }
447    
448            try {
449                // retrieve the dbfCol object which corresponds
450                // to this column.
451                // System.out.println( columnNames.get( col_name ) + "/" + col_name );
452                dbfCol column = column_info.get( col_name );
453    
454                // seek the starting offset of the current record,
455                // as indicated by record_number
456                long pos = file_datap + ( ( record_number - 1 ) * file_datalength );
457    
458                // read data from cache if the requested part of the dbase file is
459                // within it
460                if ( ( pos >= startIndex ) && ( ( pos + column.position + column.size ) < ( startIndex + cacheSize ) ) ) {
461                    pos = pos - startIndex;
462                } else {
463                    // actualize cache starting at the current cursor position
464                    // if neccesary correct cursor position
465                    rafDbf.seek( pos );
466                    rafDbf.read( dataArray );
467                    startIndex = pos;
468                    pos = 0;
469                }
470                int ff = (int) ( pos + column.position );
471                return new String( dataArray, ff, column.size ).trim();
472            } catch ( Exception e ) {
473                e.printStackTrace();
474                return e.toString();
475            }
476        }
477    
478        /**
479         * @return properties (column headers) of the dBase-file<BR>
480         * @throws DBaseException
481         */
482        public String[] getProperties()
483                                throws DBaseException {
484            if ( filemode == 1 ) {
485                throw new DBaseException( "class is initialized in write-only mode" );
486            }
487    
488            return colHeader.toArray( new String[colHeader.size()] );
489        }
490    
491        /**
492         * @return datatype of each column of the database<BR>
493         * @throws DBaseException
494         */
495        public String[] getDataTypes()
496                                throws DBaseException {
497            if ( filemode == 1 ) {
498                throw new DBaseException( "class is initialized in write-only mode" );
499            }
500    
501            String[] datatypes = new String[colHeader.size()];
502            dbfCol column;
503    
504            for ( int i = 0; i < colHeader.size(); i++ ) {
505                // retrieve the dbfCol object which corresponds
506                // to this column.
507                column = column_info.get( colHeader.get( i ) );
508    
509                datatypes[i] = column.type.trim();
510            }
511    
512            return datatypes;
513        }
514    
515        /**
516         * @param container
517         * @param element
518         * @return true if the container sting array contains element<BR>
519         */
520        private boolean contains( String[] container, String element ) {
521            for ( int i = 0; i < container.length; i++ )
522    
523                if ( container[i].equals( element ) ) {
524                    return true;
525                }
526    
527            return false;
528        }
529    
530        /**
531         * @param field
532         * @return the size of a column
533         * @throws DBaseException
534         */
535        public int getDataLength( String field )
536                                throws DBaseException {
537            dbfCol col = column_info.get( field );
538            if ( col == null )
539                throw new DBaseException( "Field " + field + " not found" );
540    
541            return col.size;
542        }
543    
544        /**
545         * @param fields
546         * @return the datatype of each column of the database specified by fields<BR>
547         * @throws DBaseException
548         */
549        public String[] getDataTypes( String[] fields )
550                                throws DBaseException {
551            if ( filemode == 1 ) {
552                throw new DBaseException( "class is initialized in write-only mode" );
553            }
554    
555            ArrayList<String> vec = new ArrayList<String>();
556            dbfCol column;
557    
558            for ( int i = 0; i < colHeader.size(); i++ ) {
559                // check if the current (i'th) column (string) is
560                // within the array of specified columns
561                if ( contains( fields, colHeader.get( i ) ) ) {
562                    // retrieve the dbfCol object which corresponds
563                    // to this column.
564                    column = column_info.get( colHeader.get( i ) );
565    
566                    vec.add( column.type.trim() );
567                }
568            }
569    
570            return vec.toArray( new String[vec.size()] );
571        }
572    
573        /**
574         * Returns a row of the dBase file as a {@link Feature} instance.
575         * 
576         * @param rowNo
577         * @return a row of the dBase file as a Feature instance
578         * @throws DBaseException
579         */
580        public Feature getFRow( int rowNo )
581                                throws DBaseException {
582    
583            Map<String, Object> columnValues = getRow( rowNo );
584    
585            PropertyType[] propTypes = this.ft.getProperties();
586            List<FeatureProperty> props = new ArrayList<FeatureProperty>();
587    
588            for ( int i = 0; i < propTypes.length; i++ ) {
589                PropertyType pt = propTypes[i];
590                if ( pt instanceof GeometryPropertyType ) {
591                    // insert dummy property for geometry
592                    FeatureProperty prop = FeatureFactory.createFeatureProperty( pt.getName(), null );
593                    props.add( prop );
594                } else {
595                    String columnName = this.ftMapping.get( pt );
596                    Object columnValue = columnValues.get( columnName );
597                    if ( columnValue != null ) {
598                        FeatureProperty prop = FeatureFactory.createFeatureProperty( pt.getName(), columnValue );
599                        props.add( prop );
600                    }
601                }
602            }
603            FeatureProperty[] fp = props.toArray( new FeatureProperty[props.size()] );
604            return FeatureFactory.createFeature( ftName + rowNo, ft, fp );
605        }
606    
607        /**
608         * 
609         * @param rowNo
610         * @return a row of the dbase file
611         * @throws DBaseException
612         */
613        private Map<String, Object> getRow( int rowNo )
614                                throws DBaseException {
615    
616            Map<String, Object> columnValues = new HashMap<String, Object>();
617    
618            goTop();
619            record_number += rowNo;
620    
621            for ( int i = 0; i < colHeader.size(); i++ ) {
622    
623                // retrieve the dbfCol object which corresponds to this column.
624                dbfCol column = column_info.get( colHeader.get( i ) );
625    
626                String value = getColumn( column.name );
627                Object columnValue = value;
628    
629                if ( value != null ) {
630                    // cast the value of the i'th column to corresponding datatype
631                    if ( column.type.equalsIgnoreCase( "C" ) ) {
632                        // nothing to do
633                    } else if ( column.type.equalsIgnoreCase( "F" ) || column.type.equalsIgnoreCase( "N" ) ) {
634                        try {
635                            if ( column.prec == 0 ) {
636                                if ( column.size < 10 ) {
637                                    columnValue = new Integer( value );
638                                } else {
639                                    columnValue = new Long( value );
640                                }
641                            } else {
642                                if ( column.size < 8 ) {
643                                    columnValue = new Float( value );
644                                } else {
645                                    columnValue = new Double( value );
646                                }
647                            }
648                        } catch ( Exception ex ) {
649                            columnValue = new Double( "0" );
650                        }
651                    } else if ( column.type.equalsIgnoreCase( "M" ) ) {
652                        // nothing to do
653                    } else if ( column.type.equalsIgnoreCase( "L" ) ) {
654                        // nothing to do
655                    } else if ( column.type.equalsIgnoreCase( "D" ) ) {
656                        if ( value.equals( "" ) ) {
657                            columnValue = null;
658                        } else {
659                            String s = value.substring( 0, 4 ) + '-' + value.substring( 4, 6 ) + '-'
660                                       + value.substring( 6, 8 );
661                            columnValue = TimeTools.createCalendar( s ).getTime();
662                        }
663                    } else if ( column.type.equalsIgnoreCase( "B" ) ) {
664                        ByteArrayOutputStream os = new ByteArrayOutputStream( 10000 );
665                        try {
666                            os.write( value.getBytes() );
667                        } catch ( IOException e ) {
668                            e.printStackTrace();
669                        }
670                        columnValue = os;
671                    }
672                } else {
673                    columnValue = "";
674                }
675                columnValues.put( column.name, columnValue );
676            }
677    
678            return columnValues;
679        }
680    
681        /**
682         * bytes are signed; let's fix them...
683         * 
684         * @param b
685         * @return unsigned byte as short
686         */
687        private static short fixByte( byte b ) {
688            if ( b < 0 ) {
689                return (short) ( b + 256 );
690            }
691    
692            return b;
693        }
694    
695        /**
696         * creates the dbase file and writes all data to it if the file specified by fname (s.o.) exists it will be deleted!
697         * 
698         * @throws IOException
699         * @throws DBaseException
700         */
701        public void writeAllToFile()
702                                throws IOException, DBaseException {
703            if ( filemode == 0 ) {
704                throw new DBaseException( "class is initialized in read-only mode" );
705            }
706    
707            // if a file with the retrieved filename exists, delete it!
708            File file = new File( fname + ".dbf" );
709    
710            if ( file.exists() ) {
711                file.delete();
712            }
713    
714            // create a new file
715            RandomAccessFile rdbf = new RandomAccessFile( fname + ".dbf", "rw" );
716    
717            try {
718                byte[] b = header.getHeader();
719                int nRecords = dataSection.getNoOfRecords();
720                // write number of records
721                ByteUtils.writeLEInt( b, 4, nRecords );
722                // write header to the file
723                rdbf.write( b );
724                b = dataSection.getDataSection();
725                // write datasection to the file
726                rdbf.write( b );
727            } catch ( IOException e ) {
728                throw e;
729            } finally {
730                rdbf.close();
731            }
732        }
733    
734        /**
735         * writes a data record to byte array representing the data section of the dBase file. The method gets the data type
736         * of each field in recData from fieldDesc wich has been set at the constructor.
737         * 
738         * @param recData
739         * @throws DBaseException
740         */
741        public void setRecord( ArrayList recData )
742                                throws DBaseException {
743            if ( filemode == 0 ) {
744                throw new DBaseException( "class is initialized in read-only mode" );
745            }
746    
747            dataSection.setRecord( recData );
748        }
749    
750        /**
751         * writes a data record to byte array representing the data section of the dBase file. The method gets the data type
752         * of each field in recData from fieldDesc wich has been set at the constructor. index specifies the location of the
753         * retrieved record in the datasection. if an invalid index is used an exception will be thrown
754         * 
755         * @param index
756         * @param recData
757         * @throws DBaseException
758         */
759        public void setRecord( int index, ArrayList recData )
760                                throws DBaseException {
761            if ( filemode == 0 ) {
762                throw new DBaseException( "class is initialized in read-only mode" );
763            }
764    
765            dataSection.setRecord( index, recData );
766        }
767    
768        /**
769         * @return the feature type of the generated features
770         */
771        public FeatureType getFeatureType() {
772            return ft;
773        }
774    
775    } // end of class DBaseFile
776    
777    /**
778     * 
779     * 
780     * @version $Revision: 19488 $
781     * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a>
782     */
783    class tsColumn {
784        public String name = null; // the column's name
785    
786        public String table = null; // the table which "owns" the column
787    
788        public String type = null; // the column's type
789    
790        public int prec = 0; // the column's precision
791    
792        public int size = 0; // the column's size
793    
794        /**
795         * 
796         * Constructs a tsColumn object.
797         * 
798         * @param s
799         *            the column name
800         */
801        tsColumn( String s ) {
802            name = s;
803        }
804    } // end of class tsColumn
805    
806    /**
807     * 
808     * 
809     * @version $Revision: 19488 $
810     * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a>
811     */
812    class dbfCol extends tsColumn {
813        int position = 0;
814    
815        /**
816         * Creates a new dbfCol object.
817         * 
818         * @param c
819         */
820        public dbfCol( String c ) {
821            super( c );
822        }
823    }