001 //$HeadURL: svn+ssh://rbezema@svn.wald.intevation.org/deegree/base/branches/2.2_testing/src/org/deegree/io/dbaseapi/DBaseFile.java $
002 /*---------------- FILE HEADER ------------------------------------------
003
004 This file is part of deegree.
005 Copyright (C) 2001-2008 by:
006 EXSE, Department of Geography, University of Bonn
007 http://www.giub.uni-bonn.de/deegree/
008 lat/lon GmbH
009 http://www.lat-lon.de
010
011 This library is free software; you can redistribute it and/or
012 modify it under the terms of the GNU Lesser General Public
013 License as published by the Free Software Foundation; either
014 version 2.1 of the License, or (at your option) any later version.
015
016 This library is distributed in the hope that it will be useful,
017 but WITHOUT ANY WARRANTY; without even the implied warranty of
018 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
019 Lesser General Public License for more details.
020
021 You should have received a copy of the GNU Lesser General Public
022 License along with this library; if not, write to the Free Software
023 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
024
025 Contact:
026
027 Andreas Poth
028 lat/lon GmbH
029 Aennchenstr. 19
030 53177 Bonn
031 Germany
032 E-Mail: poth@lat-lon.de
033
034 Prof. Dr. Klaus Greve
035 Department of Geography
036 University of Bonn
037 Meckenheimer Allee 166
038 53115 Bonn
039 Germany
040 E-Mail: greve@giub.uni-bonn.de
041
042
043 ---------------------------------------------------------------------------*/
044 package org.deegree.io.dbaseapi;
045
046 import java.io.ByteArrayOutputStream;
047 import java.io.File;
048 import java.io.IOException;
049 import java.io.RandomAccessFile;
050 import java.net.URI;
051 import java.util.ArrayList;
052 import java.util.HashMap;
053 import java.util.List;
054 import java.util.Map;
055
056 import org.deegree.datatypes.QualifiedName;
057 import org.deegree.datatypes.Types;
058 import org.deegree.framework.util.TimeTools;
059 import org.deegree.model.feature.Feature;
060 import org.deegree.model.feature.FeatureFactory;
061 import org.deegree.model.feature.FeatureProperty;
062 import org.deegree.model.feature.schema.FeatureType;
063 import org.deegree.model.feature.schema.GeometryPropertyType;
064 import org.deegree.model.feature.schema.PropertyType;
065 import org.deegree.model.spatialschema.ByteUtils;
066 import org.deegree.ogcbase.CommonNamespaces;
067
068 /**
069 * the datatypes of the dBase file and their representation as java types:
070 *
071 * dBase-type dBase-type-ID java-type
072 *
073 * character "C" String float "F" Float number "N" Double logical "L" String memo "M" String date
074 * "D" Date binary "B" ByteArrayOutputStream
075 *
076 * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a>
077 * @author last edited by: $Author: apoth $
078 *
079 * @version $Revision: 9342 $, $Date: 2007-12-27 13:32:57 +0100 (Do, 27 Dez 2007) $
080 */
081 public class DBaseFile {
082
083 private static final URI DEEGREEAPP = CommonNamespaces.buildNSURI( "http://www.deegree.org/app" );
084
085 private static final String APP_PREFIX = "app";
086
087 private ArrayList<String> colHeader = new ArrayList<String>();
088
089 // representing the datasection of the dBase file
090 // only needed for writing a dBase file
091 private DBFDataSection dataSection = null;
092
093 // feature type of generated features
094 private FeatureType ft;
095
096 // keys: property types, values: column (in dbase file)
097 private Map<PropertyType, String> ftMapping = new HashMap<PropertyType, String>( 100 );
098
099 // Hashtable to contain info abouts in the table
100 private Map<String, dbfCol> column_info = new HashMap<String, dbfCol>();
101
102 // references to the dbase file
103 private RandomAccessFile rafDbf;
104
105 // file suffixes for dbf
106 private final String _dbf = ".dbf";
107
108 // represents the dBase file header
109 // only needed for writing the dBase file
110 private DBFHeader header = null;
111
112 // representing the name of the dBase file
113 // only needed for writing the dBase file
114 private String fname = null;
115
116 private String ftName = null;
117
118 // number of records in the table
119 private double file_numrecs;
120
121 // data start position, and length of the data
122 private int file_datalength;
123
124 // data start position, and length of the data
125 private int file_datap;
126
127 // flag which indicates if a dBase file should be
128 // read or writed.
129 // filemode = 0 : read only
130 // filemode = 1 : write only
131 private int filemode = 0;
132
133 // number of columns
134 private int num_fields;
135
136 // current record
137 private long record_number = 0;
138
139 // size of the cache used for reading data from the dbase table
140 private long cacheSize = 1000000;
141
142 // array containing the data of the cache
143 private byte[] dataArray = null;
144
145 // file position the caches starts
146 private long startIndex = 0;
147
148 /**
149 * constructor<BR>
150 * only for reading a dBase file<BR>
151 *
152 * @param url
153 */
154 public DBaseFile( String url ) throws IOException {
155 fname = url;
156
157 // creates rafDbf
158 rafDbf = new RandomAccessFile( url + _dbf, "r" );
159
160 // dataArray = new byte[(int)rafDbf.length()];
161 if ( cacheSize > rafDbf.length() ) {
162 cacheSize = rafDbf.length();
163 }
164
165 dataArray = new byte[(int) cacheSize];
166 rafDbf.read( dataArray );
167 rafDbf.seek( 0 );
168
169 // initialize dbase file
170 initDBaseFile();
171
172 filemode = 0;
173 }
174
175 /**
176 * constructor<BR>
177 * only for writing a dBase file<BR>
178 *
179 * @param url
180 * @param fieldDesc
181 *
182 */
183 public DBaseFile( String url, FieldDescriptor[] fieldDesc ) throws DBaseException {
184 fname = url;
185
186 // create header
187 header = new DBFHeader( fieldDesc );
188
189 // create data section
190 dataSection = new DBFDataSection( fieldDesc );
191
192 filemode = 1;
193 }
194
195 /**
196 *
197 */
198 public void close() {
199 try {
200 if ( rafDbf != null ) {
201 // just true for reading access
202 rafDbf.close();
203 }
204 } catch ( Exception ex ) {
205 // should never happen
206 ex.printStackTrace();
207 }
208 }
209
210 /**
211 * method: initDBaseFile(); inits a DBF file. This is based on Pratap Pereira's Xbase.pm perl
212 * module
213 *
214 */
215 private void initDBaseFile()
216 throws IOException {
217 // position the record pointer at 0
218 rafDbf.seek( 0 );
219
220 /*
221 * // read the file type file_type = fixByte( rafDbf.readByte() ); // get the last update
222 * date file_update_year = fixByte( rafDbf.readByte() ); file_update_month = fixByte(
223 * rafDbf.readByte() ); file_update_day = fixByte( rafDbf.readByte() );
224 */
225
226 fixByte( rafDbf.readByte() );
227 fixByte( rafDbf.readByte() );
228 fixByte( rafDbf.readByte() );
229 fixByte( rafDbf.readByte() );
230
231 // a byte array to hold little-endian long data
232 byte[] b = new byte[4];
233
234 // read that baby in...
235 rafDbf.readFully( b );
236
237 // convert the byte array into a long (really a double)
238 file_numrecs = ByteUtils.readLEInt( b, 0 );
239
240 b = null;
241
242 // a byte array to hold little-endian short data
243 b = new byte[2];
244
245 // get the data position (where it starts in the file)
246 rafDbf.readFully( b );
247 file_datap = ByteUtils.readLEShort( b, 0 );
248
249 // find out the length of the data portion
250 rafDbf.readFully( b );
251 file_datalength = ByteUtils.readLEShort( b, 0 );
252
253 // calculate the number of fields
254 num_fields = ( file_datap - 33 ) / 32;
255
256 // read in the column data
257 int locn = 0; // offset of the current column
258
259 // process each field
260 for ( int i = 1; i <= num_fields; i++ ) {
261 // seek the position of the field definition data.
262 // This information appears after the first 32 byte
263 // table information, and lives in 32 byte chunks.
264 rafDbf.seek( ( ( i - 1 ) * 32 ) + 32 );
265
266 b = null;
267
268 // get the column name into a byte array
269 b = new byte[11];
270 rafDbf.readFully( b );
271
272 // convert the byte array to a String
273 String col_name = new String( b ).trim().toUpperCase();
274
275 // read in the column type
276 char[] c = new char[1];
277 c[0] = (char) rafDbf.readByte();
278
279 // String ftyp = new String( c );
280
281 // skip four bytes
282 rafDbf.skipBytes( 4 );
283
284 // get field length and precision
285 short flen = fixByte( rafDbf.readByte() );
286 short fdec = fixByte( rafDbf.readByte() );
287
288 // set the field position to the current
289 // value of locn
290 int fpos = locn;
291
292 // increment locn by the length of this field.
293 locn += flen;
294
295 // create a new dbfCol object and assign it the
296 // attributes of the current field
297 dbfCol column = new dbfCol( col_name );
298 column.type = new String( c );
299 column.size = flen;
300 column.position = fpos + 1;
301 column.prec = fdec;
302
303 // to be done: get the name of dbf-table via method in ShapeFile
304 column.table = "NOT";
305
306 column_info.put( col_name, column );
307 colHeader.add( col_name );
308 } // end for
309
310 ft = createCanonicalFeatureType();
311
312 } // end of initDBaseFile
313
314 /**
315 * Overrides the default feature type (which is generated from all columns in the dbase file) to
316 * allow customized naming and ordering of properties.
317 *
318 * @param ft
319 * @param ftMapping
320 */
321 public void setFeatureType( FeatureType ft, Map<PropertyType, String> ftMapping ) {
322 this.ft = ft;
323 this.ftMapping = ftMapping;
324 }
325
326 /**
327 * Creates a canonical {@link FeatureType} from all fields of the <code>DBaseFile</code>.
328 *
329 * @return feature type that contains all fields as property types
330 */
331 private FeatureType createCanonicalFeatureType() {
332 dbfCol column = null;
333
334 PropertyType[] ftp = new PropertyType[colHeader.size() + 1];
335
336 for ( int i = 0; i < colHeader.size(); i++ ) {
337 // retrieve the dbfCol object which corresponds // to this column.
338 column = column_info.get( colHeader.get( i ) );
339
340 QualifiedName name = new QualifiedName( APP_PREFIX, column.name, DEEGREEAPP );
341
342 if ( column.type.equalsIgnoreCase( "C" ) ) {
343 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true );
344 } else if ( column.type.equalsIgnoreCase( "F" ) || column.type.equalsIgnoreCase( "N" ) ) {
345 if ( column.prec == 0 ) {
346 if ( column.size < 10 ) {
347 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.INTEGER, true );
348 } else {
349 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.BIGINT, true );
350 }
351 } else {
352 if ( column.size < 8 ) {
353 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.FLOAT, true );
354 } else {
355 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.DOUBLE, true );
356 }
357 }
358 } else if ( column.type.equalsIgnoreCase( "M" ) ) {
359 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true );
360 } else if ( column.type.equalsIgnoreCase( "L" ) ) {
361 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true );
362 } else if ( column.type.equalsIgnoreCase( "D" ) ) {
363 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.VARCHAR, true );
364 } else if ( column.type.equalsIgnoreCase( "B" ) ) {
365 ftp[i] = FeatureFactory.createSimplePropertyType( name, Types.BLOB, true );
366 }
367
368 this.ftMapping.put( ftp[i], column.name );
369 }
370
371 int index = fname.lastIndexOf( "/" );
372 ftName = fname;
373 if ( index >= 0 ) {
374 ftName = fname.substring( index + 1 );
375 } else {
376 index = fname.lastIndexOf( "\\" );
377 if ( index >= 0 ) {
378 ftName = fname.substring( index + 1 );
379 }
380 }
381
382 QualifiedName featureTypeName = new QualifiedName( APP_PREFIX, ftName, DEEGREEAPP );
383
384 QualifiedName name = new QualifiedName( APP_PREFIX, "GEOM", DEEGREEAPP );
385 ftp[ftp.length - 1] = FeatureFactory.createGeometryPropertyType( name, Types.GEOMETRY_PROPERTY_NAME, 1, 1 );
386
387 return FeatureFactory.createFeatureType( featureTypeName, false, ftp );
388 }
389
390 /**
391 *
392 * @return number of records in the table
393 */
394 public int getRecordNum()
395 throws DBaseException {
396 if ( filemode == 1 ) {
397 throw new DBaseException( "class is initialized in write-only mode" );
398 }
399
400 return (int) file_numrecs;
401 }
402
403 /**
404 *
405 * Positions the record pointer at the top of the table.
406 */
407 public void goTop()
408 throws DBaseException {
409 if ( filemode == 1 ) {
410 throw new DBaseException( "class is initialized in write-only mode" );
411 }
412
413 record_number = 0;
414 }
415
416 /**
417 * Advance the record pointer to the next record.
418 *
419 * @return true if pointer has been increased
420 */
421 public boolean nextRecord()
422 throws DBaseException {
423 if ( filemode == 1 ) {
424 throw new DBaseException( "class is initialized in write-only mode" );
425 }
426
427 if ( record_number < file_numrecs ) {
428 record_number++;
429 return true;
430 }
431 return false;
432
433 }
434
435 /**
436 *
437 * @return column's string value from the current row.
438 */
439 public String getColumn( String col_name )
440 throws DBaseException {
441 if ( filemode == 1 ) {
442 throw new DBaseException( "class is initialized in write-only mode" );
443 }
444
445 try {
446 // retrieve the dbfCol object which corresponds
447 // to this column.
448 dbfCol column = column_info.get( col_name );
449
450 // seek the starting offset of the current record,
451 // as indicated by record_number
452 long pos = file_datap + ( ( record_number - 1 ) * file_datalength );
453
454 // read data from cache if the requested part of the dbase file is
455 // within it
456 if ( ( pos >= startIndex ) && ( ( pos + column.position + column.size ) < ( startIndex + cacheSize ) ) ) {
457 pos = pos - startIndex;
458 } else {
459 // actualize cache starting at the current cursor position
460 // if neccesary correct cursor position
461 rafDbf.seek( pos );
462 rafDbf.read( dataArray );
463 startIndex = pos;
464 pos = 0;
465 }
466 int ff = (int) ( pos + column.position );
467 return new String( dataArray, ff, column.size ).trim();
468 } catch ( Exception e ) {
469 e.printStackTrace();
470 return e.toString();
471 }
472 }
473
474 /**
475 * @return properties (column headers) of the dBase-file<BR>
476 */
477 public String[] getProperties()
478 throws DBaseException {
479 if ( filemode == 1 ) {
480 throw new DBaseException( "class is initialized in write-only mode" );
481 }
482
483 return colHeader.toArray( new String[colHeader.size()] );
484 }
485
486 /**
487 * @return datatype of each column of the database<BR>
488 */
489 public String[] getDataTypes()
490 throws DBaseException {
491 if ( filemode == 1 ) {
492 throw new DBaseException( "class is initialized in write-only mode" );
493 }
494
495 String[] datatypes = new String[colHeader.size()];
496 dbfCol column;
497
498 for ( int i = 0; i < colHeader.size(); i++ ) {
499 // retrieve the dbfCol object which corresponds
500 // to this column.
501 column = column_info.get( colHeader.get( i ) );
502
503 datatypes[i] = column.type.trim();
504 }
505
506 return datatypes;
507 }
508
509 /**
510 * @param container
511 * @param element
512 * @return true if the container sting array contains element<BR>
513 */
514 private boolean contains( String[] container, String element ) {
515 for ( int i = 0; i < container.length; i++ )
516
517 if ( container[i].equals( element ) ) {
518 return true;
519 }
520
521 return false;
522 }
523
524 /**
525 * @param field
526 * @return the size of a column
527 */
528 public int getDataLength( String field )
529 throws DBaseException {
530 dbfCol col = column_info.get( field );
531 if ( col == null )
532 throw new DBaseException( "Field " + field + " not found" );
533
534 return col.size;
535 }
536
537 /**
538 * @param fields
539 * @return the datatype of each column of the database specified by fields<BR>
540 */
541 public String[] getDataTypes( String[] fields )
542 throws DBaseException {
543 if ( filemode == 1 ) {
544 throw new DBaseException( "class is initialized in write-only mode" );
545 }
546
547 ArrayList<String> vec = new ArrayList<String>();
548 dbfCol column;
549
550 for ( int i = 0; i < colHeader.size(); i++ ) {
551 // check if the current (i'th) column (string) is
552 // within the array of specified columns
553 if ( contains( fields, colHeader.get( i ) ) ) {
554 // retrieve the dbfCol object which corresponds
555 // to this column.
556 column = column_info.get( colHeader.get( i ) );
557
558 vec.add( column.type.trim() );
559 }
560 }
561
562 return vec.toArray( new String[vec.size()] );
563 }
564
565 /**
566 * Returns a row of the dBase file as a {@link Feature} instance.
567 *
568 * @param rowNo
569 * @return a row of the dBase file as a Feature instance
570 * @throws DBaseException
571 */
572 public Feature getFRow( int rowNo )
573 throws DBaseException {
574
575 Map<String, Object> columnValues = getRow( rowNo );
576
577 PropertyType[] propTypes = this.ft.getProperties();
578 List<FeatureProperty> props = new ArrayList<FeatureProperty>();
579
580 for ( int i = 0; i < propTypes.length; i++ ) {
581 PropertyType pt = propTypes[i];
582 if ( pt instanceof GeometryPropertyType ) {
583 // insert dummy property for geometry
584 FeatureProperty prop = FeatureFactory.createFeatureProperty( pt.getName(), null );
585 props.add( prop );
586 } else {
587 String columnName = this.ftMapping.get( pt );
588 Object columnValue = columnValues.get( columnName );
589 if ( columnValue != null ) {
590 FeatureProperty prop = FeatureFactory.createFeatureProperty( pt.getName(), columnValue );
591 props.add( prop );
592 }
593 }
594 }
595 FeatureProperty[] fp = props.toArray( new FeatureProperty[props.size()] );
596 return FeatureFactory.createFeature( ftName + rowNo, ft, fp );
597 }
598
599 /**
600 *
601 * @param rowNo
602 * @return a row of the dbase file
603 * @throws DBaseException
604 */
605 private Map<String, Object> getRow( int rowNo )
606 throws DBaseException {
607
608 Map<String, Object> columnValues = new HashMap<String, Object>();
609
610 goTop();
611 record_number += rowNo;
612
613 for ( int i = 0; i < colHeader.size(); i++ ) {
614
615 // retrieve the dbfCol object which corresponds to this column.
616 dbfCol column = column_info.get( colHeader.get( i ) );
617
618 String value = getColumn( column.name );
619 Object columnValue = value;
620
621 if ( value != null ) {
622 // cast the value of the i'th column to corresponding datatype
623 if ( column.type.equalsIgnoreCase( "C" ) ) {
624 // nothing to do
625 } else if ( column.type.equalsIgnoreCase( "F" ) || column.type.equalsIgnoreCase( "N" ) ) {
626 try {
627 if ( column.prec == 0 ) {
628 if ( column.size < 10 ) {
629 columnValue = new Integer( value );
630 } else {
631 columnValue = new Long( value );
632 }
633 } else {
634 if ( column.size < 8 ) {
635 columnValue = new Float( value );
636 } else {
637 columnValue = new Double( value );
638 }
639 }
640 } catch ( Exception ex ) {
641 columnValue = new Double( "0" );
642 }
643 } else if ( column.type.equalsIgnoreCase( "M" ) ) {
644 // nothing to do
645 } else if ( column.type.equalsIgnoreCase( "L" ) ) {
646 // nothing to do
647 } else if ( column.type.equalsIgnoreCase( "D" ) ) {
648 if ( value.equals( "" ) ) {
649 columnValue = null;
650 } else {
651 String s = value.substring( 0, 4 ) + '-' + value.substring( 4, 6 ) + '-'
652 + value.substring( 6, 8 );
653 columnValue = TimeTools.createCalendar( s ).getTime();
654 }
655 } else if ( column.type.equalsIgnoreCase( "B" ) ) {
656 ByteArrayOutputStream os = new ByteArrayOutputStream( 10000 );
657 try {
658 os.write( value.getBytes() );
659 } catch ( IOException e ) {
660 e.printStackTrace();
661 }
662 columnValue = os;
663 }
664 } else {
665 columnValue = "";
666 }
667 columnValues.put( column.name, columnValue );
668 }
669
670 return columnValues;
671 }
672
673 /**
674 * bytes are signed; let's fix them...
675 * @param b
676 * @return unsigned byte as short
677 */
678 private static short fixByte( byte b ) {
679 if ( b < 0 ) {
680 return (short) ( b + 256 );
681 }
682
683 return b;
684 }
685
686 /**
687 * creates the dbase file and writes all data to it if the
688 * file specified by fname (s.o.) exists it will be deleted!
689 *
690 * @throws IOException
691 * @throws DBaseException
692 */
693 public void writeAllToFile()
694 throws IOException, DBaseException {
695 if ( filemode == 0 ) {
696 throw new DBaseException( "class is initialized in read-only mode" );
697 }
698
699 // if a file with the retrieved filename exists, delete it!
700 File file = new File( fname + ".dbf" );
701
702 if ( file.exists() ) {
703 file.delete();
704 }
705
706 // create a new file
707 RandomAccessFile rdbf = new RandomAccessFile( fname + ".dbf", "rw" );
708
709 try {
710 byte[] b = header.getHeader();
711 int nRecords = dataSection.getNoOfRecords();
712 // write number of records
713 ByteUtils.writeLEInt( b, 4, nRecords );
714 // write header to the file
715 rdbf.write( b );
716 b = dataSection.getDataSection();
717 // write datasection to the file
718 rdbf.write( b );
719 } catch ( IOException e ) {
720 throw e;
721 } finally {
722 rdbf.close();
723 }
724 }
725
726 /**
727 * writes a data record to byte array representing
728 * the data section of the dBase file. The method gets the data type of each field in recData
729 * from fieldDesc wich has been set at the constructor.
730 * @param recData
731 * @throws DBaseException
732 */
733 public void setRecord( ArrayList recData )
734 throws DBaseException {
735 if ( filemode == 0 ) {
736 throw new DBaseException( "class is initialized in read-only mode" );
737 }
738
739 dataSection.setRecord( recData );
740 }
741
742 /**
743 * writes a data record to byte array
744 * representing the data section of the dBase file. The method gets the data type of each field
745 * in recData from fieldDesc wich has been set at the constructor. index specifies the location
746 * of the retrieved record in the datasection. if an invalid index is used an exception will be
747 * thrown
748 * @param index
749 * @param recData
750 * @throws DBaseException
751 */
752 public void setRecord( int index, ArrayList recData )
753 throws DBaseException {
754 if ( filemode == 0 ) {
755 throw new DBaseException( "class is initialized in read-only mode" );
756 }
757
758 dataSection.setRecord( index, recData );
759 }
760
761 /**
762 * @return the feature type of the generated features
763 */
764 public FeatureType getFeatureType() {
765 return ft;
766 }
767
768 } // end of class DBaseFile
769
770 /**
771 *
772 *
773 * @version $Revision: 9342 $
774 * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a>
775 */
776 class tsColumn {
777 public String name = null; // the column's name
778
779 public String table = null; // the table which "owns" the column
780
781 public String type = null; // the column's type
782
783 public int prec = 0; // the column's precision
784
785 public int size = 0; // the column's size
786
787 /**
788 *
789 * Constructs a tsColumn object.
790 *
791 * @param s
792 * the column name
793 */
794 tsColumn( String s ) {
795 name = s;
796 }
797 } // end of class tsColumn
798
799 /**
800 *
801 *
802 * @version $Revision: 9342 $
803 * @author <a href="mailto:poth@lat-lon.de">Andreas Poth</a>
804 */
805 class dbfCol extends tsColumn {
806 int position = 0;
807
808 /**
809 * Creates a new dbfCol object.
810 *
811 * @param c
812 */
813 public dbfCol( String c ) {
814 super( c );
815 }
816 }