Source Code Cross Referenced for JDBC1DataStore.java in  » GIS » GeoTools-2.4.1 » org » geotools » data » jdbc » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » GIS » GeoTools 2.4.1 » org.geotools.data.jdbc 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


0001:        /*
0002:         *    GeoTools - OpenSource mapping toolkit
0003:         *    http://geotools.org
0004:         *    (C) 2003-2006, GeoTools Project Managment Committee (PMC)
0005:         *    
0006:         *    This library is free software; you can redistribute it and/or
0007:         *    modify it under the terms of the GNU Lesser General Public
0008:         *    License as published by the Free Software Foundation;
0009:         *    version 2.1 of the License.
0010:         *
0011:         *    This library is distributed in the hope that it will be useful,
0012:         *    but WITHOUT ANY WARRANTY; without even the implied warranty of
0013:         *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
0014:         *    Lesser General Public License for more details.
0015:         */
0016:        package org.geotools.data.jdbc;
0017:
0018:        import java.io.IOException;
0019:        import java.math.BigDecimal;
0020:        import java.net.URI;
0021:        import java.net.URISyntaxException;
0022:        import java.sql.Connection;
0023:        import java.sql.DatabaseMetaData;
0024:        import java.sql.ResultSet;
0025:        import java.sql.SQLException;
0026:        import java.sql.Statement;
0027:        import java.sql.Types;
0028:        import java.util.ArrayList;
0029:        import java.util.Arrays;
0030:        import java.util.Collections;
0031:        import java.util.HashMap;
0032:        import java.util.HashSet;
0033:        import java.util.List;
0034:        import java.util.Map;
0035:        import java.util.Set;
0036:        import java.util.logging.Level;
0037:        import java.util.logging.Logger;
0038:
0039:        import org.geotools.data.DataSourceException;
0040:        import org.geotools.data.DataStore;
0041:        import org.geotools.data.DataUtilities;
0042:        import org.geotools.data.DefaultQuery;
0043:        import org.geotools.data.EmptyFeatureReader;
0044:        import org.geotools.data.FeatureListenerManager;
0045:        import org.geotools.data.FeatureReader;
0046:        import org.geotools.data.FeatureSource;
0047:        import org.geotools.data.FeatureWriter;
0048:        import org.geotools.data.FilteringFeatureReader;
0049:        import org.geotools.data.InProcessLockingManager;
0050:        import org.geotools.data.LockingManager;
0051:        import org.geotools.data.Query;
0052:        import org.geotools.data.ReTypeFeatureReader;
0053:        import org.geotools.data.SchemaNotFoundException;
0054:        import org.geotools.data.Transaction;
0055:        import org.geotools.data.jdbc.attributeio.AttributeIO;
0056:        import org.geotools.data.jdbc.attributeio.BasicAttributeIO;
0057:        import org.geotools.data.jdbc.fidmapper.DefaultFIDMapperFactory;
0058:        import org.geotools.data.jdbc.fidmapper.FIDMapper;
0059:        import org.geotools.data.jdbc.fidmapper.FIDMapperFactory;
0060:        import org.geotools.data.view.DefaultView;
0061:        import org.geotools.factory.FactoryRegistryException;
0062:        import org.geotools.factory.Hints;
0063:        import org.geotools.feature.AttributeType;
0064:        import org.geotools.feature.AttributeTypeFactory;
0065:        import org.geotools.feature.FeatureType;
0066:        import org.geotools.feature.FeatureTypeBuilder;
0067:        import org.geotools.feature.SchemaException;
0068:        import org.opengis.filter.Filter;
0069:        import org.geotools.filter.SQLEncoder;
0070:        import org.geotools.filter.SQLEncoderException;
0071:
0072:        import com.vividsolutions.jts.geom.Envelope;
0073:
0074:        /**
0075:         * Abstract class for JDBC based DataStore implementations.
0076:         *
0077:         * <p>
0078:         * This class provides a default implementation of a JDBC data store. Support
0079:         * for vendor specific JDBC data stores can be easily added to Geotools by
0080:         * subclassing this class and overriding the hooks provided.
0081:         * </p>
0082:         *
0083:         * <p>
0084:         * At a minimum subclasses should implement the following methods:
0085:         *
0086:         * <ul>
0087:         * <li> {@link #buildAttributeType(ResultSet) buildAttributeType(ResultSet)} -
0088:         * This should be overriden to construct an attribute type that represents any
0089:         * column types not supported by the default implementation, such as geometry
0090:         * columns. </li>
0091:         * <li> {@link #getGeometryAttributeIO(AttributeType, QueryData)
0092:         * getGeometryAttributeIO(AttributeType, QueryData)} - Should be overriden to
0093:         * provide a way to read/write geometries into the format of the database </li>
0094:         * </ul>
0095:         * </p>
0096:         *
0097:         * <p>
0098:         * Additionally subclasses can optionally override the following:
0099:         *
0100:         * <ul>
0101:         * <li> Use a specific FIDMapperFactory by overriding the {@link
0102:         * #buildFIDMapperFactory(JDBCDataStoreConfig)
0103:         * buildFIDMapperFactory(JDBCDataStoreConfig)} method, and eventually disallow
0104:         * user overrides by throwing an {@link java.lang.UnsupportedOperationException
0105:         * UnsupportedOperationException} in the
0106:         * {@link #setFIDMapperFactory(FIDMapperFactory) setFidMapperFactory()} method.
0107:         * </li>
0108:         * <li> {@link #allowTable(String) allowTable} - Used to determine whether a
0109:         * table name should be exposed as a feature type. </li>
0110:         * <li> {@link #determineSRID(String,String) determineSRID} - Used to determine
0111:         * the SpatialReference ID of a geometry column in a table. </li>
0112:         * <li> {@link #buildSQLQuery(String,AttributeType[],Filter,boolean)
0113:         * buildSQLQuery()} - Sub classes can override this to build a custom SQL query.
0114:         * </li>
0115:         * <li> {@link #getResultSetType(boolean) getResultSetType} if the standard
0116:         * result set type is not satisfactory/does not work with a normal FORWARD_ONLY
0117:         * resultset type </li>
0118:         * <li> {@link #getConcurrency(boolean) getConcurrency} to set the level of
0119:         * concurrency for the result set used to read/write the database </li>
0120:         * </ul>
0121:         * </p>
0122:         *
0123:         * <p>
0124:         * Additionally subclasses may want to set the value of:
0125:         *
0126:         * <ul>
0127:         * <li> sqlNameEscape - character (String) to surround names of SQL objects to
0128:         * support mixed-case and non-English names. </li>
0129:         * </ul>
0130:         * </p>
0131:         *
0132:         * @author Amr Alam, Refractions Research
0133:         * @author Sean Geoghegan, Defence Science and Technology Organisation
0134:         * @author Chris Holmes, TOPP
0135:         * @author Andrea Aime
0136:         * @source $URL: http://svn.geotools.org/geotools/tags/2.4.1/modules/library/jdbc/src/main/java/org/geotools/data/jdbc/JDBC1DataStore.java $
0137:         * @version $Id: JDBC1DataStore.java 29442 2008-02-25 09:36:19Z jgarnett $
0138:         */
0139:        public abstract class JDBC1DataStore implements  DataStore {
0140:
0141:            /** The logger for the filter module. */
0142:            protected static final Logger LOGGER = org.geotools.util.logging.Logging
0143:                    .getLogger("org.geotools.data.jdbc");
0144:
0145:            /**
0146:             * Maps SQL types to Java classes. This might need to be fleshed out more
0147:             * later, Ive ignored complex types such as ARRAY, BLOB and CLOB. It is
0148:             * protected so subclasses can override it I guess.
0149:             *
0150:             * <p>
0151:             * These mappings were taken from
0152:             * http://java.sun.com/j2se/1.3/docs/guide/jdbc/getstart/mapping.html#997737
0153:             * </p>
0154:             */
0155:            public static final Map TYPE_MAPPINGS = new HashMap();
0156:
0157:            static {
0158:                TYPE_MAPPINGS.put(new Integer(Types.VARCHAR), String.class);
0159:                TYPE_MAPPINGS.put(new Integer(Types.CHAR), String.class);
0160:                TYPE_MAPPINGS.put(new Integer(Types.LONGVARCHAR), String.class);
0161:
0162:                TYPE_MAPPINGS.put(new Integer(Types.BIT), Boolean.class);
0163:                TYPE_MAPPINGS.put(new Integer(Types.BOOLEAN), Boolean.class);
0164:
0165:                TYPE_MAPPINGS.put(new Integer(Types.TINYINT), Short.class);
0166:                TYPE_MAPPINGS.put(new Integer(Types.SMALLINT), Short.class);
0167:
0168:                TYPE_MAPPINGS.put(new Integer(Types.INTEGER), Integer.class);
0169:                TYPE_MAPPINGS.put(new Integer(Types.BIGINT), Long.class);
0170:
0171:                TYPE_MAPPINGS.put(new Integer(Types.REAL), Float.class);
0172:                TYPE_MAPPINGS.put(new Integer(Types.FLOAT), Double.class);
0173:                TYPE_MAPPINGS.put(new Integer(Types.DOUBLE), Double.class);
0174:
0175:                TYPE_MAPPINGS.put(new Integer(Types.DECIMAL), BigDecimal.class);
0176:                TYPE_MAPPINGS.put(new Integer(Types.NUMERIC), BigDecimal.class);
0177:
0178:                TYPE_MAPPINGS.put(new Integer(Types.DATE), java.sql.Date.class);
0179:                TYPE_MAPPINGS.put(new Integer(Types.TIME), java.sql.Time.class);
0180:                TYPE_MAPPINGS.put(new Integer(Types.TIMESTAMP),
0181:                        java.sql.Timestamp.class);
0182:            }
0183:
0184:            private BasicAttributeIO basicAttributeIO;
0185:
0186:            /** Manages listener lists for FeatureSource implementations */
0187:            public FeatureListenerManager listenerManager = new FeatureListenerManager();
0188:
0189:            private LockingManager lockingManager = createLockingManager();
0190:
0191:            protected final JDBCDataStoreConfig config;
0192:
0193:            protected FeatureTypeHandler typeHandler = null;
0194:
0195:            /**
0196:             * The character(s) to surround schema, table and column names an SQL query
0197:             * to support mixed-case and non-English names
0198:             */
0199:            protected String sqlNameEscape = "";
0200:
0201:            /**
0202:             * When true, writes are allowed also on tables with volatile FID mappers.
0203:             * False by default
0204:             *
0205:             * @see FIDMapper#isVolatile()
0206:             */
0207:            protected boolean allowWriteOnVolatileFIDs;
0208:
0209:            /**
0210:             * The transaction isolation level to use in a transaction.  One of
0211:             * Connection.TRANSACTION_READ_UNCOMMITTED, TRANSACTION_READ_COMMITTED,
0212:             * TRANSACTION_REPEATABLE_READ, or SERIALIZABLE.
0213:             * 
0214:             * Connection.TRANSACTION_NONE may also be used to indicate "use default".
0215:             */
0216:            protected int transactionIsolation = Connection.TRANSACTION_NONE;
0217:
0218:            /**
0219:             * Construct a JDBCDataStore with ConnectionPool and associated
0220:             * configuration.
0221:             * 
0222:             * @param config
0223:             *
0224:             * @throws IOException
0225:             */
0226:            public JDBC1DataStore(JDBCDataStoreConfig config)
0227:                    throws IOException {
0228:                this .config = config;
0229:                this .typeHandler = getFeatureTypeHandler(config);
0230:            }
0231:
0232:            /**
0233:             * Gets the SQL name escape string.
0234:             *
0235:             * <p>
0236:             * The value of this string is prefixed and appended to table schema names,
0237:             * table names and column names in an SQL statement to support mixed-case
0238:             * and non-English names.
0239:             * </p>
0240:             *
0241:             * @return the value of the SQL name escape string.
0242:             */
0243:            public String getSqlNameEscape() {
0244:                return sqlNameEscape;
0245:            }
0246:
0247:            /**
0248:             * Sets the SQL name escape string.
0249:             *
0250:             * <p>
0251:             * The value of this string is prefixed and appended to table schema names,
0252:             * table names and column names in an SQL statement to support mixed-case
0253:             * and non-English names.
0254:             * </p>
0255:             *
0256:             * <p>
0257:             * This value is typically only set once when the DataStore implementation
0258:             * class is constructed.
0259:             * </p>
0260:             *
0261:             * @param sqlNameEscape
0262:             *            the name escape character
0263:             */
0264:            protected void setSqlNameEscape(String sqlNameEscape) {
0265:                this .sqlNameEscape = sqlNameEscape;
0266:            }
0267:
0268:            /**
0269:             * DOCUMENT ME!
0270:             *
0271:             * @return DOCUMENT ME!
0272:             *
0273:             * @deprecated This is deprecated in favour of the JDBCDataStoreConfig
0274:             *             object. public JDBCDataStore(ConnectionPool connectionPool)
0275:             *             throws IOException { this(connectionPool, null, new
0276:             *             HashMap(), ""); }
0277:             */
0278:
0279:            /**
0280:             * DOCUMENT ME!
0281:             *
0282:             * @param config
0283:             *
0284:             * @return DOCUMENT ME!
0285:             *
0286:             * @throws IOException
0287:             *             DOCUMENT ME!
0288:             */
0289:            protected FeatureTypeHandler getFeatureTypeHandler(
0290:                    JDBCDataStoreConfig config) throws IOException {
0291:                return new FeatureTypeHandler(this ,
0292:                        buildFIDMapperFactory(config), config
0293:                                .getTypeHandlerTimeout());
0294:            }
0295:
0296:            protected FIDMapperFactory buildFIDMapperFactory(
0297:                    JDBCDataStoreConfig config) {
0298:                return new DefaultFIDMapperFactory();
0299:            }
0300:
0301:            public FIDMapper getFIDMapper(String tableName) throws IOException {
0302:                return typeHandler.getFIDMapper(tableName);
0303:            }
0304:
0305:            /**
0306:             * Allows subclass to create LockingManager to support their needs.
0307:             *
0308:             */
0309:            protected LockingManager createLockingManager() {
0310:                return new InProcessLockingManager();
0311:            }
0312:
0313:            /**
0314:             * @see org.geotools.data.DataStore#getFeatureTypes()
0315:             */
0316:            public String[] getTypeNames() throws IOException {
0317:                return typeHandler.getTypeNames();
0318:            }
0319:
0320:            /**
0321:             * @see org.geotools.data.DataStore#getSchema(java.lang.String)
0322:             */
0323:            public FeatureType getSchema(String typeName) throws IOException {
0324:                return typeHandler.getSchema(typeName);
0325:            }
0326:
0327:            /**
0328:             * Create a new featureType.
0329:             *
0330:             * <p>
0331:             * Not currently supported - subclass may implement.
0332:             * </p>
0333:             *
0334:             * @param featureType
0335:             *
0336:             * @throws IOException
0337:             * @throws UnsupportedOperationException
0338:             *             Creating new schemas is not supported.
0339:             *
0340:             * @see org.geotools.data.DataStore#createSchema(org.geotools.feature.FeatureType)
0341:             */
0342:            public void createSchema(FeatureType featureType)
0343:                    throws IOException {
0344:                throw new UnsupportedOperationException(
0345:                        "Table creation not implemented");
0346:            }
0347:
0348:            /**
0349:             * Used to provide support for changing the DataStore Schema.
0350:             *
0351:             * <p>
0352:             * Specifically this is intended to address updating the metadata Coordinate
0353:             * System information.
0354:             * </p>
0355:             *
0356:             * <p>
0357:             * If we can figure out the Catalog API for metadata we will not have to use
0358:             * such a heavy handed approach.
0359:             * </p>
0360:             *
0361:             * <p>
0362:             * Subclasses are free to implement various levels of support:
0363:             * </p>
0364:             *
0365:             * <ul>
0366:             * <li> None - table modification is not supported </li>
0367:             * <li> CS change - ensure that the attribtue types match and only update
0368:             * metadata but not table structure. </li>
0369:             * <li> Allow table change opperations </li>
0370:             * </ul>
0371:             *
0372:             *
0373:             * @see org.geotools.data.DataStore#updateSchema(java.lang.String,
0374:             *      org.geotools.feature.FeatureType)
0375:             */
0376:            public void updateSchema(String typeName, FeatureType featureType)
0377:                    throws IOException {
0378:                throw new UnsupportedOperationException(
0379:                        "Table modification not supported");
0380:            }
0381:
0382:            // This is the *better* implementation of getview from AbstractDataStore
0383:            public FeatureSource getView(final Query query) throws IOException,
0384:                    SchemaException {
0385:                return new DefaultView(this .getFeatureSource(query
0386:                        .getTypeName()), query);
0387:            }
0388:
0389:            /*
0390:             * // Jody - This is my recomendation for DataStore // in order to support
0391:             * CS reprojection and override public FeatureSource getView(final Query
0392:             * query) throws IOException, SchemaException { String typeName =
0393:             * query.getTypeName(); FeatureType origionalType = getSchema(typeName);
0394:             * //CoordinateSystem cs = query.getCoordinateSystem(); //final FeatureType
0395:             * featureType = DataUtilities.createSubType( origionalType,
0396:             * query.getPropertyNames(), cs ); final FeatureType featureType =
0397:             * DataUtilities.createSubType(origionalType, query.getPropertyNames());
0398:             * return new AbstractFeatureSource() { public DataStore getDataStore() {
0399:             * return JDBCDataStore.this; } public void
0400:             * addFeatureListener(FeatureListener listener) {
0401:             * listenerManager.addFeatureListener(this, listener); } public void
0402:             * removeFeatureListener(FeatureListener listener) {
0403:             * listenerManager.removeFeatureListener(this, listener); } public
0404:             * FeatureType getSchema() { return featureType; } }; }
0405:             */
0406:
0407:            /**
0408:             * Default implementation based on getFeatureReader and getFeatureWriter.
0409:             *
0410:             * <p>
0411:             * We should be able to optimize this to only get the RowSet once
0412:             * </p>
0413:             *
0414:             * @see org.geotools.data.DataStore#getFeatureSource(java.lang.String)
0415:             */
0416:            public FeatureSource getFeatureSource(String typeName)
0417:                    throws IOException {
0418:                if (!typeHandler.getFIDMapper(typeName).isVolatile()
0419:                        || allowWriteOnVolatileFIDs) {
0420:                    if (getLockingManager() != null) {
0421:                        // Use default JDBCFeatureLocking that delegates all locking
0422:                        // the getLockingManager
0423:                        //
0424:                        return new JDBCFeatureLocking(this , getSchema(typeName));
0425:                    } else {
0426:                        // subclass should provide a FeatureLocking implementation
0427:                        // but for now we will simply forgo all locking
0428:                        return new JDBCFeatureStore(this , getSchema(typeName));
0429:                    }
0430:                } else {
0431:                    return new JDBCFeatureSource(this , getSchema(typeName));
0432:                }
0433:            }
0434:
0435:            /**
0436:             * This is a public entry point to the DataStore.
0437:             *
0438:             * <p>
0439:             * We have given some though to changing this api to be based on query.
0440:             * </p>
0441:             *
0442:             * <p>
0443:             * Currently the is is the only way to retype your features to different
0444:             * name spaces.
0445:             * </p>
0446:             * (non-Javadoc)
0447:             */
0448:            public FeatureReader getFeatureReader(
0449:                    final FeatureType requestType, final Filter filter,
0450:                    final Transaction transaction) throws IOException {
0451:                String typeName = requestType.getTypeName();
0452:                FeatureType schemaType = getSchema(typeName);
0453:
0454:                int compare = DataUtilities.compare(requestType, schemaType);
0455:
0456:                Query query;
0457:
0458:                if (compare == 0) {
0459:                    // they are the same type
0460:                    //
0461:                    query = new DefaultQuery(typeName, filter);
0462:                } else if (compare == 1) {
0463:                    // featureType is a proper subset and will require reTyping
0464:                    //
0465:                    String[] names = attributeNames(requestType, filter);
0466:                    query = new DefaultQuery(typeName, filter,
0467:                            Query.DEFAULT_MAX, names, "getFeatureReader");
0468:                } else {
0469:                    // featureType is not compatiable
0470:                    //
0471:                    throw new IOException("Type " + typeName
0472:                            + " does match request");
0473:                }
0474:
0475:                if ((filter == Filter.EXCLUDE) || filter.equals(Filter.EXCLUDE)) {
0476:                    return new EmptyFeatureReader(requestType);
0477:                }
0478:
0479:                FeatureReader reader = getFeatureReader(query, transaction);
0480:
0481:                if (compare == 1) {
0482:                    reader = new ReTypeFeatureReader(reader, requestType, false);
0483:                }
0484:
0485:                return reader;
0486:            }
0487:
0488:            /**
0489:             * Gets the list of attribute names required for both featureType and filter
0490:             *
0491:             * @param featureType
0492:             *            The FeatureType to get attribute names for.
0493:             * @param filter
0494:             *            The filter which needs attributes to filter.
0495:             *
0496:             * @return The list of attribute names required by a filter.
0497:             *
0498:             * @throws IOException
0499:             *             If we can't get the schema.
0500:             */
0501:            protected String[] attributeNames(FeatureType featureType,
0502:                    Filter filter) throws IOException {
0503:                String typeName = featureType.getTypeName();
0504:                FeatureType origional = getSchema(typeName);
0505:                SQLBuilder sqlBuilder = getSqlBuilder(typeName);
0506:
0507:                if (featureType.getAttributeCount() == origional
0508:                        .getAttributeCount()) {
0509:                    // featureType is complete (so filter must require subset
0510:                    return DataUtilities.attributeNames(featureType);
0511:                }
0512:
0513:                String[] typeAttributes = DataUtilities
0514:                        .attributeNames(featureType);
0515:                String[] filterAttributes = DataUtilities
0516:                        .attributeNames(sqlBuilder.getPostQueryFilter(filter));
0517:
0518:                if ((filterAttributes == null)
0519:                        || (filterAttributes.length == 0)) {
0520:                    // no filter attributes required
0521:                    return typeAttributes;
0522:                }
0523:
0524:                Set set = new HashSet();
0525:                set.addAll(Arrays.asList(typeAttributes));
0526:                set.addAll(Arrays.asList(filterAttributes));
0527:
0528:                if (set.size() == typeAttributes.length) {
0529:                    // filter required a subset of featureType attributes
0530:                    return typeAttributes;
0531:                } else {
0532:                    return (String[]) set.toArray(new String[set.size()]);
0533:                }
0534:            }
0535:
0536:            /**
0537:             * The top level method for getting a FeatureReader.
0538:             *
0539:             * <p>
0540:             * Chris- I've gone with the Query object aswell. It just seems to make more
0541:             * sense. This is pretty well split up across methods. The hooks for DB
0542:             * specific AttributeReaders are createResultSetReader and
0543:             * createGeometryReader.
0544:             * </p>
0545:             *
0546:             * <p>
0547:             * JG- I have implemented getFeatureReader( FeatureType, Filter,
0548:             * Transasction) ontop of this method, it will Retype as required
0549:             * </p>
0550:             *
0551:             * @param query
0552:             *            The Query to get a FeatureReader for.
0553:             * @param trans
0554:             *            The transaction this read operation is being performed in.
0555:             *
0556:             * @return A FeatureReader that contains features defined by the query.
0557:             *
0558:             * @throws IOException
0559:             *             If an error occurs executing the query.
0560:             * @throws DataSourceException
0561:             */
0562:            public FeatureReader getFeatureReader(Query query, Transaction trans)
0563:                    throws IOException {
0564:                String typeName = query.getTypeName();
0565:                FeatureType featureType = getSchema(typeName);
0566:                FeatureTypeInfo typeInfo = typeHandler
0567:                        .getFeatureTypeInfo(typeName);
0568:
0569:                SQLBuilder sqlBuilder = getSqlBuilder(typeName);
0570:
0571:                Filter preFilter = (Filter) sqlBuilder.getPreQueryFilter(query
0572:                        .getFilter()); //process in DB
0573:                Filter postFilter = (Filter) sqlBuilder
0574:                        .getPostQueryFilter(query.getFilter()); //process after DB
0575:
0576:                //JD: This is bad, we should not assume we have the right to change the query object
0577:                Filter originalFilter = (Filter) query.getFilter();
0578:                ((DefaultQuery) query).setFilter(preFilter);
0579:
0580:                String[] requestedNames = propertyNames(query);
0581:                String[] propertyNames;
0582:
0583:                // DJB: changed to account for miss-ordered queries
0584:                if (allSameOrder(requestedNames, featureType)) {
0585:                    // because we have everything, the filter can run
0586:                    propertyNames = requestedNames;
0587:                } else if (requestedNames.length <= featureType
0588:                        .getAttributeCount()) {
0589:                    // we will need to reType this :-)
0590:                    //
0591:                    // check to make sure we have enough for the post filter
0592:                    //
0593:                    String[] filterNames = DataUtilities.attributeNames(
0594:                            postFilter, featureType);
0595:
0596:                    //JD: using a list here to maintain order
0597:                    List list = new ArrayList();
0598:                    list.addAll(Arrays.asList(requestedNames));
0599:                    for (int i = 0; i < filterNames.length; i++) {
0600:                        if (!list.contains(filterNames[i])) {
0601:                            list.add(filterNames[i]);
0602:                        }
0603:                    }
0604:
0605:                    if (list.size() == requestedNames.length) {
0606:                        propertyNames = requestedNames;
0607:                    } else {
0608:                        propertyNames = (String[]) list.toArray(new String[list
0609:                                .size()]);
0610:                    }
0611:
0612:                    try {
0613:                        typeInfo = new FeatureTypeInfo(typeInfo
0614:                                .getFeatureTypeName(), DataUtilities
0615:                                .createSubType(typeInfo.getSchema(),
0616:                                        propertyNames), typeInfo.getFIDMapper());
0617:                    } catch (SchemaException e1) {
0618:                        throw new DataSourceException(
0619:                                "Could not create subtype", e1);
0620:                    }
0621:                } else { // too many requested (duplicates?)
0622:                    throw new DataSourceException(typeName
0623:                            + " does not contain requested properties:" + query);
0624:                }
0625:
0626:                AttributeType[] attrTypes = null;
0627:
0628:                try {
0629:                    attrTypes = getAttributeTypes(typeName, propertyNames);
0630:                } catch (SchemaException schemaException) {
0631:                    throw new DataSourceException(
0632:                            "Some Attribute Names were specified that"
0633:                                    + " do not exist in the FeatureType "
0634:                                    + typeName + ". " + "Requested names: "
0635:                                    + Arrays.asList(propertyNames) + ", "
0636:                                    + "FeatureType: " + featureType,
0637:                            schemaException);
0638:                }
0639:
0640:                String sqlQuery = constructQuery(query, attrTypes);
0641:                LOGGER.fine(sqlQuery);
0642:
0643:                //JD: This is bad, we should not assume we have the right to change the query object
0644:                ((DefaultQuery) query).setFilter(originalFilter);
0645:
0646:                QueryData queryData = executeQuery(typeInfo, typeName,
0647:                        sqlQuery, trans, false, query.getHints());
0648:
0649:                FeatureType schema;
0650:
0651:                try {
0652:                    schema = FeatureTypeBuilder.newFeatureType(attrTypes,
0653:                            typeName, getNameSpace());
0654:                } catch (FactoryRegistryException e) {
0655:                    throw new DataSourceException(
0656:                            "Schema Factory Error when creating schema for FeatureReader",
0657:                            e);
0658:                } catch (SchemaException e) {
0659:                    throw new DataSourceException(
0660:                            "Schema Error when creating schema for FeatureReader",
0661:                            e);
0662:                }
0663:
0664:                FeatureReader reader;
0665:                reader = createFeatureReader(schema, postFilter, queryData);
0666:
0667:                if (requestedNames.length < propertyNames.length) {
0668:                    // need to scale back to what the user asked for
0669:                    // (remove the attribtues only used for postFilter)
0670:                    //
0671:                    try {
0672:                        FeatureType requestType = DataUtilities.createSubType(
0673:                                schema, requestedNames);
0674:                        if (!requestType.equals(reader.getFeatureType())) {
0675:                            reader = new ReTypeFeatureReader(reader,
0676:                                    requestType, false);
0677:                        }
0678:                    } catch (SchemaException schemaException) {
0679:                        throw new DataSourceException("Could not handle query",
0680:                                schemaException);
0681:                    }
0682:                }
0683:
0684:                // chorner: this is redundant, since we've already created the reader with the post filter attached		
0685:                // if (postFilter != null && !postFilter.equals(Filter.INCLUDE)) {
0686:                //     reader = new FilteringFeatureReader(reader, postFilter);
0687:                // }
0688:
0689:                return reader;
0690:            }
0691:
0692:            /**
0693:             * Used internally to call the subclass hooks that construct the SQL query.
0694:             *
0695:             * @param query
0696:             * @param attrTypes
0697:             *
0698:             *
0699:             * @throws IOException
0700:             * @throws DataSourceException
0701:             */
0702:            private String constructQuery(Query query, AttributeType[] attrTypes)
0703:                    throws IOException, DataSourceException {
0704:                String typeName = query.getTypeName();
0705:
0706:                SQLBuilder sqlBuilder = getSqlBuilder(query.getTypeName());
0707:                sqlBuilder.setHints(query.getHints()); // hints will control FEATURE_2D etc...
0708:
0709:                org.opengis.filter.Filter preFilter = sqlBuilder
0710:                        .getPreQueryFilter(query.getFilter()); //dupe?
0711:                //Filter postFilter = sqlBuilder.getPostQueryFilter(query.getFilter());
0712:
0713:                FIDMapper mapper = getFIDMapper(typeName);
0714:
0715:                String sqlQuery;
0716:                //FeatureTypeInfo info = typeHandler.getFeatureTypeInfo(typeName);
0717:                //boolean useMax = (postFilter == null); // not used yet
0718:
0719:                try {
0720:                    LOGGER.fine("calling sql builder with filter " + preFilter);
0721:
0722:                    if (query.getFilter() == Filter.EXCLUDE) {
0723:                        StringBuffer buf = new StringBuffer("SELECT ");
0724:                        sqlBuilder.sqlColumns(buf, mapper, attrTypes);
0725:                        sqlBuilder.sqlFrom(buf, typeName);
0726:                        buf.append(" WHERE '1' = '0'"); // NO-OP it
0727:                        sqlQuery = buf.toString();
0728:                    } else {
0729:                        sqlQuery = sqlBuilder.buildSQLQuery(typeName, mapper,
0730:                                attrTypes, preFilter);
0731:                    }
0732:
0733:                    //order by clause
0734:                    if (query.getSortBy() != null) {
0735:                        //encode the sortBy clause
0736:                        StringBuffer buf = new StringBuffer();
0737:                        buf.append(sqlQuery);
0738:                        sqlBuilder.sqlOrderBy(buf, query.getSortBy());
0739:
0740:                        sqlQuery = buf.toString();
0741:                    }
0742:
0743:                    LOGGER.fine("sql is " + sqlQuery);
0744:                } catch (SQLEncoderException e) {
0745:                    throw new DataSourceException("Error building SQL Query", e);
0746:                }
0747:
0748:                return sqlQuery;
0749:            }
0750:
0751:            /**
0752:             * Create a new FeatureReader based on attributeReaders.
0753:             *
0754:             * <p>
0755:             * The provided <code>schema</code> describes the attributes in the
0756:             * queryData ResultSet. This schema should cover the requirements of
0757:             * <code>filter</code>.
0758:             * </p>
0759:             *
0760:             * <p>
0761:             * Retyping to the users requested Schema will not happen in this method.
0762:             * </p>
0763:             *
0764:             * @param schema
0765:             * @param postFilter
0766:             *            Filter for post processing, or <code>null</code> if not
0767:             *            required.
0768:             * @param queryData
0769:             *            Holds a ResultSet for attribute Readers
0770:             *
0771:             *
0772:             * @throws IOException
0773:             */
0774:            protected FeatureReader createFeatureReader(FeatureType schema,
0775:                    org.opengis.filter.Filter postFilter, QueryData queryData)
0776:                    throws IOException {
0777:
0778:                // Thanks Shaun Forbes moving excludes check earlier
0779:                if (postFilter == Filter.EXCLUDE) {
0780:                    return new EmptyFeatureReader(schema);
0781:                }
0782:
0783:                FeatureReader fReader = getJDBCFeatureReader(queryData);
0784:
0785:                if ((postFilter != null) && (postFilter != Filter.INCLUDE)) {
0786:                    fReader = new FilteringFeatureReader(fReader, postFilter);
0787:                }
0788:
0789:                return fReader;
0790:            }
0791:
0792:            protected JDBCFeatureReader getJDBCFeatureReader(QueryData queryData)
0793:                    throws IOException {
0794:                return new JDBCFeatureReader(queryData);
0795:            }
0796:
0797:            // protected final AttributeReader createAttributeReader(AttributeType[]
0798:            // attrTypes, int fidColumnsCount, ResultSet rs) {
0799:            // AttributeIO[] attributeIO = new AttributeIO[attrTypes.length];
0800:            // for(int i = 0; i < attributeIO.length; i++) {
0801:            // if(attrTypes[i].isGeometry()) {
0802:            // attributeIO[i] = getGeometryAttributeIO(attrTypes[i]);
0803:            // } else {
0804:            // attributeIO[i] = getAttributeIO(attrTypes[i]);
0805:            // }
0806:            //
0807:            // }
0808:            // return new JDBCFeatureReader(attrTypes, attributeIO, fidColumnsCount,
0809:            // rs);
0810:            // }
0811:
0812:            /**
0813:             * Returns the basic AttributeIO that can read and write all of the simple
0814:             * data types
0815:             *
0816:             * @param type
0817:             *
0818:             */
0819:            protected AttributeIO getAttributeIO(AttributeType type) {
0820:                if (basicAttributeIO == null) {
0821:                    basicAttributeIO = new BasicAttributeIO();
0822:                }
0823:
0824:                return basicAttributeIO;
0825:            }
0826:
0827:            /**
0828:             * Hook to create the geometry attribute IO for a vendor specific data
0829:             * source.
0830:             *
0831:             * @param type
0832:             *            The AttributeType to read.
0833:             * @param queryData
0834:             *            The connection holder
0835:             *
0836:             * @return The AttributeIO that will read and write the geometry from the
0837:             *         results.
0838:             *
0839:             * @throws IOException
0840:             *             DOCUMENT ME!
0841:             */
0842:            protected abstract AttributeIO getGeometryAttributeIO(
0843:                    AttributeType type, QueryData queryData) throws IOException;
0844:
0845:            /**
0846:             * See the full version with hints support
0847:             * @param featureTypeInfo
0848:             * @param tableName
0849:             * @param sqlQuery
0850:             * @param transaction
0851:             * @param forWrite
0852:             * @return
0853:             * @throws IOException
0854:             */
0855:            protected QueryData executeQuery(FeatureTypeInfo featureTypeInfo,
0856:                    String tableName, String sqlQuery, Transaction transaction,
0857:                    boolean forWrite) throws IOException {
0858:                return executeQuery(featureTypeInfo, tableName, sqlQuery,
0859:                        transaction, forWrite, null);
0860:            }
0861:
0862:            /**
0863:             * Executes the SQL Query.
0864:             *
0865:             * <p>
0866:             * This is private in the expectation that subclasses should not need to
0867:             * change this behaviour.
0868:             * </p>
0869:             *
0870:             * <p>
0871:             * Jody with a question here - I have stopped this method from closing
0872:             * connection shared by a Transaction. It sill seems like we are leaving
0873:             * connections open by using this method. I have also stopped QueryData from
0874:             * doing the same thing.
0875:             * </p>
0876:             *
0877:             * <p>
0878:             * Answer from Sean: Resources for successful queries are closed when close
0879:             * is called on the AttributeReaders constructed with the QueryData. We
0880:             * can't close them here since they need to be open to read from the
0881:             * ResultSet.
0882:             * </p>
0883:             *
0884:             * <p>
0885:             * Jody AttributeReader question: I looked at the code and Attribute Readers
0886:             * do not close with respect to Transactions (they need to as we can issue a
0887:             * Reader against a Transaction. I have changed the JDBCDataStore.close
0888:             * method to force us to keep track of these things.
0889:             * </p>
0890:             *
0891:             * <p>
0892:             * SG: I've marked this as final since I don't think it shoudl be overriden,
0893:             * but Im not sure
0894:             * </p>
0895:             *
0896:             * @param featureTypeInfo
0897:             * @param tableName
0898:             * @param sqlQuery
0899:             *            The SQL query to execute.
0900:             * @param transaction
0901:             *            The Transaction is included here for handling transaction
0902:             *            connections at a later stage. It is not currently used.
0903:             * @param forWrite
0904:             * @param hints the {@link Query} hints
0905:             *
0906:             * @return The QueryData object that contains the resources for the query.
0907:             * 
0908:             *
0909:             * @throws IOException
0910:             * @throws DataSourceException
0911:             *             If an error occurs performing the query.
0912:             *
0913:             * @task HACK: This is just protected for postgis FeatureWriter purposes.
0914:             *       Should move back to private when that stuff moves more abstract
0915:             *       here.
0916:             */
0917:            protected QueryData executeQuery(FeatureTypeInfo featureTypeInfo,
0918:                    String tableName, String sqlQuery, Transaction transaction,
0919:                    boolean forWrite, Hints hints) throws IOException {
0920:                LOGGER.fine("About to execute query: " + sqlQuery);
0921:
0922:                Connection conn = null;
0923:                Statement statement = null;
0924:                ResultSet rs = null;
0925:
0926:                try {
0927:                    conn = getConnection(transaction);
0928:
0929:                    setAutoCommit(forWrite, conn);
0930:                    statement = conn.createStatement(
0931:                            getResultSetType(forWrite),
0932:                            getConcurrency(forWrite));
0933:                    statement.setFetchSize(1000);
0934:                    rs = statement.executeQuery(sqlQuery);
0935:
0936:                    return new QueryData(featureTypeInfo, this , conn,
0937:                            statement, rs, transaction, hints);
0938:                } catch (SQLException e) {
0939:                    // if an error occurred we close the resources
0940:                    String msg = "Error Performing SQL query: " + sqlQuery;
0941:                    LOGGER.log(Level.SEVERE, msg, e);
0942:                    JDBCUtils.close(rs);
0943:                    JDBCUtils.close(statement);
0944:                    JDBCUtils.close(conn, transaction, e);
0945:                    throw new DataSourceException(msg, e);
0946:                }
0947:            }
0948:
0949:            /**
0950:             * This method should be overridden to do nothing by DataStores where
0951:             * setting autoCommit causes funky behaviour (ie. anytime autoCommit is
0952:             * changed, every thing up to that point is committed...this isn't good at
0953:             * this stage)
0954:             *
0955:             * @param forWrite
0956:             * @param conn
0957:             * @throws SQLException
0958:             */
0959:            protected void setAutoCommit(boolean forWrite, Connection conn)
0960:                    throws SQLException {
0961:                if (!forWrite) {
0962:                    // for postgis streaming, but I don't believe it hurts anyone.
0963:                    conn.setAutoCommit(false);
0964:                }
0965:            }
0966:
0967:            protected int getResultSetType(boolean forWrite) {
0968:                return ResultSet.TYPE_FORWARD_ONLY;
0969:            }
0970:
0971:            protected int getConcurrency(boolean forWrite) {
0972:                if (forWrite) {
0973:                    return ResultSet.CONCUR_UPDATABLE;
0974:                } else {
0975:                    return ResultSet.CONCUR_READ_ONLY;
0976:                }
0977:            }
0978:
0979:            /**
0980:             * Hook for subclass to return a different sql builder.
0981:             * <p>
0982:             * Subclasses requiring a ClientTransactionAccessor should override
0983:             * and instantiate an SQLBuilder with one in the constructor. 
0984:             * 
0985:             * @param typeName
0986:             *            The typename for the sql builder.
0987:             *
0988:             * @return A new sql builder.
0989:             *
0990:             * @throws IOException
0991:             *             if anything goes wrong.
0992:             */
0993:            public SQLBuilder getSqlBuilder(String typeName) throws IOException {
0994:                FeatureType schema = getSchema(typeName);
0995:                SQLEncoder encoder = new SQLEncoder();
0996:                encoder.setFeatureType(schema);
0997:                encoder.setFIDMapper(getFIDMapper(typeName));
0998:
0999:                return new DefaultSQLBuilder(encoder, schema, null);
1000:            }
1001:
1002:            /**
1003:             * Gets a connection for the provided transaction.
1004:             *
1005:             * @param transaction
1006:             * @return A single use connection.
1007:             *
1008:             * @throws IOException
1009:             * @throws DataSourceException
1010:             *             If the connection can not be obtained.
1011:             */
1012:            public Connection getConnection(Transaction transaction)
1013:                    throws IOException {
1014:                if (transaction != Transaction.AUTO_COMMIT) {
1015:                    // we will need to save a JDBC connection is
1016:                    // transaction.putState( connectionPool, JDBCState )
1017:                    // throw new UnsupportedOperationException("Transactions not
1018:                    // supported yet");
1019:
1020:                    JDBCTransactionState state;
1021:                    synchronized (transaction) {
1022:
1023:                        state = (JDBCTransactionState) transaction
1024:                                .getState(this );
1025:
1026:                        if (state == null) {
1027:                            try {
1028:                                Connection conn = createConnection();
1029:                                conn.setAutoCommit(requireAutoCommit());
1030:                                if (getTransactionIsolation() != Connection.TRANSACTION_NONE) {
1031:                                    // for us, NONE means use the default, which is
1032:                                    // usually READ_COMMITTED
1033:                                    conn
1034:                                            .setTransactionIsolation(getTransactionIsolation());
1035:                                }
1036:                                state = new JDBCTransactionState(conn);
1037:                                transaction.putState(this , state);
1038:                            } catch (SQLException eep) {
1039:                                throw new DataSourceException(
1040:                                        "Connection failed:" + eep, eep);
1041:                            }
1042:                        }
1043:                    }
1044:                    return state.getConnection();
1045:                }
1046:
1047:                try {
1048:                    return createConnection();
1049:                } catch (SQLException sqle) {
1050:                    throw new DataSourceException("Connection failed:" + sqle,
1051:                            sqle);
1052:                }
1053:            }
1054:
1055:            /**
1056:             * Obtain the transaction isolation level for connections.
1057:             * 
1058:             * @return Connection.TRANSACTION_* value
1059:             * @since 2.2.0
1060:             * @see setTransactionIsolation
1061:             * @see <a href="http://www.postgresql.org/docs/7.4/static/transaction-iso.html">This web page</a>
1062:             */
1063:            public int getTransactionIsolation() {
1064:                return transactionIsolation;
1065:            }
1066:
1067:            /**
1068:             * Sets the transaction isolation level for connections.
1069:             * 
1070:             * @param value
1071:             *            Connection.TRANSACTION_READ_UNCOMMITTED,
1072:             *            Connection.TRANSACTION_READ_COMMITTED,
1073:             *            Connection.TRANSACTION_REPEATABLE_READ,
1074:             *            Connection.SERIALIZABLE, or Connection.TRANSACTION_NONE
1075:             *            (for use default/do not set)
1076:             * @since 2.2.0
1077:             * @see <a href="http://www.postgresql.org/docs/7.4/static/transaction-iso.html">This web page</a>
1078:             */
1079:            public void setTransactionIsolation(int value) {
1080:                transactionIsolation = value;
1081:            }
1082:
1083:            /**
1084:             * Return true if transaction is handled on client.  Usually this will not have to be overridden.
1085:             * @return true if transaction is handled on client.  Usually this will not have to be overridden.
1086:             */
1087:            protected boolean requireAutoCommit() {
1088:                return false;
1089:            }
1090:
1091:            /**
1092:             * Create a connection for your JDBC1 database
1093:             */
1094:            protected abstract Connection createConnection()
1095:                    throws SQLException;
1096:
1097:            /**
1098:             * Provides a hook for sub classes to filter out specific tables in the data
1099:             * store that are not to be used as geospatial tables. The default
1100:             * implementation of this method is to allow all tables.
1101:             *
1102:             * @param tablename
1103:             *            A table name to check.
1104:             *
1105:             * @return True if the table should be exposed as a FeatureType, false if it
1106:             *         should be ignored.
1107:             */
1108:            protected boolean allowTable(String tablename) {
1109:                return true;
1110:            }
1111:
1112:            /**
1113:             * Builds the appropriate FID mapper given a table name and a FID mapper
1114:             * factory
1115:             *
1116:             * @param typeName
1117:             * @param factory
1118:             *
1119:             *
1120:             * @throws IOException
1121:             */
1122:            protected FIDMapper buildFIDMapper(String typeName,
1123:                    FIDMapperFactory factory) throws IOException {
1124:                Connection conn = null;
1125:
1126:                try {
1127:                    conn = getConnection(Transaction.AUTO_COMMIT);
1128:
1129:                    FIDMapper mapper = factory.getMapper(null, config
1130:                            .getDatabaseSchemaName(), typeName, conn);
1131:
1132:                    return mapper;
1133:                } finally {
1134:                    JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
1135:                }
1136:            }
1137:
1138:            /**
1139:             * Builds the schema for a table in the database.
1140:             *
1141:             * <p>
1142:             * This works by retrieving the column information for the table from the
1143:             * DatabaseMetaData object. It then iterates over the information for each
1144:             * column, calling buildAttributeType(ResultSet) to construct an
1145:             * AttributeType for each column. The list of attribute types is then turned
1146:             * into a FeatureType that defines the schema.
1147:             * </p>
1148:             *
1149:             * <p>
1150:             * It is not intended that this method is overriden. It should provide the
1151:             * required functionality for most sub-classes. To add AttributeType
1152:             * construction for vendor specific SQL types, such as geometries, override
1153:             * the buildAttributeType(ResultSet) method.
1154:             * </p>
1155:             *
1156:             * <p>
1157:             * This may become final later. In fact Ill make it private because I don't
1158:             * think It will need to be overriden.
1159:             * </p>
1160:             *
1161:             * @param typeName
1162:             *            The name of the table to construct a feature type for.
1163:             * @param mapper
1164:             *            The name of the column holding the fid.
1165:             *
1166:             * @return The FeatureType for the table.
1167:             *
1168:             * @throws IOException
1169:             * @throws DataSourceException
1170:             *             This can occur if there is an SQL error or an error
1171:             *             constructing the FeatureType.
1172:             *
1173:             * @see JDBC1DataStore#buildAttributeType(ResultSet)
1174:             */
1175:            protected FeatureType buildSchema(String typeName, FIDMapper mapper)
1176:                    throws IOException {
1177:                final int NAME_COLUMN = 4;
1178:                final int TYPE_NAME = 6;
1179:                Connection conn = null;
1180:                ResultSet tableInfo = null;
1181:
1182:                try {
1183:                    conn = getConnection(Transaction.AUTO_COMMIT);
1184:
1185:                    DatabaseMetaData dbMetaData = conn.getMetaData();
1186:
1187:                    List attributeTypes = new ArrayList();
1188:
1189:                    tableInfo = dbMetaData.getColumns(null, config
1190:                            .getDatabaseSchemaName(), typeName, "%");
1191:
1192:                    boolean tableInfoFound = false;
1193:
1194:                    while (tableInfo.next()) {
1195:                        tableInfoFound = true;
1196:
1197:                        try {
1198:                            String columnName = tableInfo
1199:                                    .getString(NAME_COLUMN);
1200:
1201:                            if (!mapper.returnFIDColumnsAsAttributes()) {
1202:                                boolean isPresent = false;
1203:
1204:                                for (int i = 0; i < mapper.getColumnCount(); i++) {
1205:                                    if (columnName.equalsIgnoreCase(mapper
1206:                                            .getColumnName(i))) {
1207:                                        isPresent = true;
1208:
1209:                                        break;
1210:                                    }
1211:                                }
1212:
1213:                                if (isPresent) {
1214:                                    continue;
1215:                                }
1216:                            }
1217:
1218:                            AttributeType attributeType = buildAttributeType(tableInfo);
1219:
1220:                            if (attributeType != null) {
1221:                                attributeTypes.add(attributeType);
1222:                            } else {
1223:                                LOGGER.finest("Unknown SQL Type: "
1224:                                        + tableInfo.getString(TYPE_NAME));
1225:                            }
1226:                        } catch (DataSourceException dse) {
1227:                            String msg = "Error building attribute type. The column will be ignored";
1228:                            LOGGER.log(Level.WARNING, msg, dse);
1229:                        }
1230:                    }
1231:
1232:                    if (!tableInfoFound) {
1233:                        throw new SchemaNotFoundException(typeName);
1234:                    }
1235:
1236:                    AttributeType[] types = (AttributeType[]) attributeTypes
1237:                            .toArray(new AttributeType[0]);
1238:
1239:                    return FeatureTypeBuilder.newFeatureType(types, typeName,
1240:                            getNameSpace());
1241:                } catch (SQLException sqlException) {
1242:                    JDBCUtils
1243:                            .close(conn, Transaction.AUTO_COMMIT, sqlException);
1244:                    conn = null; // prevent finally block from reclosing
1245:                    throw new DataSourceException(
1246:                            "SQL Error building FeatureType for " + typeName
1247:                                    + " " + sqlException.getMessage(),
1248:                            sqlException);
1249:                } catch (FactoryRegistryException e) {
1250:                    throw new DataSourceException("Error creating FeatureType "
1251:                            + typeName, e);
1252:                } catch (SchemaException e) {
1253:                    throw new DataSourceException(
1254:                            "Error creating FeatureType for " + typeName, e);
1255:                } finally {
1256:                    JDBCUtils.close(tableInfo);
1257:                    JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
1258:                }
1259:            }
1260:
1261:            /**
1262:             * Constructs an AttributeType from a row in a ResultSet. The ResultSet
1263:             * contains the information retrieved by a call to getColumns() on the
1264:             * DatabaseMetaData object. This information can be used to construct an
1265:             * Attribute Type.
1266:             *
1267:             * <p>
1268:             * The default implementation constructs an AttributeType using the default
1269:             * JDBC type mappings defined in JDBCDataStore. These type mappings only
1270:             * handle native Java classes and SQL standard column types, so to handle
1271:             * Geometry columns, sub classes should override this to check if a column
1272:             * is a geometry column, if it is a geometry column the appropriate
1273:             * determination of the geometry type can be performed. Otherwise,
1274:             * overriding methods should call super.buildAttributeType.
1275:             * </p>
1276:             *
1277:             * <p>
1278:             * Note: Overriding methods must never move the current row pointer in the
1279:             * result set.
1280:             * </p>
1281:             *
1282:             * @param rs
1283:             *            The ResultSet containing the result of a
1284:             *            DatabaseMetaData.getColumns call.
1285:             *
1286:             * @return The AttributeType built from the ResultSet or null if the column
1287:             *         should be excluded from the schema.
1288:             *
1289:             * @throws IOException
1290:             *             If an error occurs processing the ResultSet.
1291:             */
1292:            protected AttributeType buildAttributeType(ResultSet rs)
1293:                    throws IOException {
1294:                try {
1295:                    final int COLUMN_NAME = 4;
1296:                    final int DATA_TYPE = 5;
1297:                    final int NULLABLE = 11;
1298:
1299:                    String columnName = rs.getString(COLUMN_NAME);
1300:                    int dataType = rs.getInt(DATA_TYPE);
1301:                    Class type = (Class) TYPE_MAPPINGS
1302:                            .get(new Integer(dataType));
1303:
1304:                    //check for nullability
1305:                    int nullCode = rs.getInt(NULLABLE);
1306:                    boolean nillable = true;
1307:                    switch (nullCode) {
1308:                    case DatabaseMetaData.columnNoNulls:
1309:                        nillable = false;
1310:                        break;
1311:
1312:                    case DatabaseMetaData.columnNullable:
1313:                        nillable = true;
1314:                        break;
1315:
1316:                    case DatabaseMetaData.columnNullableUnknown:
1317:                        nillable = true;
1318:                        break;
1319:                    }
1320:
1321:                    if (type == null) {
1322:                        return null;
1323:                    } else {
1324:                        int min = nillable ? 0 : 1;
1325:                        //JD: We would like to set the nillable flag properly here, but there is a lot of 
1326:                        // code that sets the value of an attribute to be null while building a feature, 
1327:                        // think of feature readers that have to copy content, so we always set it to true,
1328:                        // perhaps with the new feature model things like this will be fished out
1329:                        //return AttributeTypeFactory.newAttributeType(columnName, type, nillable, null, null, null, min, 1 );
1330:                        return AttributeTypeFactory.newAttributeType(
1331:                                columnName, type, true, null, null, null, min,
1332:                                1);
1333:                    }
1334:                } catch (SQLException e) {
1335:                    throw new IOException("SQL exception occurred: "
1336:                            + e.getMessage());
1337:                }
1338:            }
1339:
1340:            /**
1341:             * Provides a hook for subclasses to determine the SRID of a geometry
1342:             * column.
1343:             *
1344:             * <p>
1345:             * This allows SRIDs to be determined in a Vendor specific way and to be
1346:             * cached by the default implementation. To retreive these srids, get the
1347:             * FeatureTypeInfo object for the table and call
1348:             * getSRID(geometryColumnName). This will allow storage of SRIDs for
1349:             * multiple geometry columns in each table.
1350:             * </p>
1351:             *
1352:             * <p>
1353:             * If no SRID can be found, subclasses should return -1. The default
1354:             * implementation always returns -1.
1355:             * </p>
1356:             *
1357:             * @param tableName
1358:             *            The name of the table to get the SRID for.
1359:             * @param geometryColumnName
1360:             *            The name of the geometry column within the table to get SRID
1361:             *            for.
1362:             *
1363:             * @return The SRID for the geometry column in the table or -1.
1364:             *
1365:             * @throws IOException
1366:             */
1367:            protected int determineSRID(String tableName,
1368:                    String geometryColumnName) throws IOException {
1369:                return -1;
1370:            }
1371:
1372:            /**
1373:             * Provides the default implementation of determining the FID column.
1374:             *
1375:             * <p>
1376:             * The default implementation of determining the FID column name is to use
1377:             * the primary key as the FID column. If no primary key is present, null
1378:             * will be returned. Sub classes can override this behaviour to define
1379:             * primary keys for vendor specific cases.
1380:             * </p>
1381:             *
1382:             * <p>
1383:             * There is an unresolved issue as to what to do when there are multiple
1384:             * primary keys. Maybe a restriction that table much have a single column
1385:             * primary key is appropriate.
1386:             * </p>
1387:             *
1388:             * <p>
1389:             * This should not be called by subclasses to retreive the FID column name.
1390:             * Instead, subclasses should call getFeatureTypeInfo(String) to get the
1391:             * FeatureTypeInfo for a feature type and get the fidColumn name from the
1392:             * fidColumn name memeber.
1393:             * </p>
1394:             *
1395:             * @param typeName
1396:             *            The name of the table to get a primary key for.
1397:             *
1398:             * @return The name of the primay key column or null if one does not exist.
1399:             *
1400:             * @throws IOException
1401:             *             This will only occur if there is an error getting a
1402:             *             connection to the Database.
1403:             */
1404:            protected String determineFidColumnName(String typeName)
1405:                    throws IOException {
1406:                final int NAME_COLUMN = 4;
1407:                String fidColumnName = null;
1408:                ResultSet rs = null;
1409:                Connection conn = null;
1410:
1411:                try {
1412:                    conn = getConnection(Transaction.AUTO_COMMIT);
1413:
1414:                    DatabaseMetaData dbMetadata = conn.getMetaData();
1415:                    rs = dbMetadata.getPrimaryKeys(null, null, typeName);
1416:
1417:                    if (rs.next()) {
1418:                        fidColumnName = rs.getString(NAME_COLUMN);
1419:                    }
1420:                } catch (SQLException sqlException) {
1421:                    JDBCUtils
1422:                            .close(conn, Transaction.AUTO_COMMIT, sqlException);
1423:                    conn = null; // prevent finally block from reclosing
1424:                    LOGGER
1425:                            .warning("Could not find the primary key - using the default");
1426:                } finally {
1427:                    JDBCUtils.close(rs);
1428:                    JDBCUtils.close(conn, Transaction.AUTO_COMMIT, null);
1429:                }
1430:
1431:                return fidColumnName;
1432:            }
1433:
1434:            /**
1435:             * Gets the namespace of the data store. TODO: change config over to use URI
1436:             *
1437:             * @return The namespace.
1438:             */
1439:            public URI getNameSpace() {
1440:                try {
1441:                    if (config.getNamespace() != null) {
1442:                        return new URI(config.getNamespace());
1443:                    }
1444:                } catch (URISyntaxException e) {
1445:                    LOGGER.warning("Could not use namespace "
1446:                            + config.getNamespace() + " - " + e.getMessage());
1447:
1448:                    return null;
1449:                }
1450:
1451:                return null;
1452:            }
1453:
1454:            /**
1455:             * Retrieve a FeatureWriter over entire dataset.
1456:             *
1457:             * <p>
1458:             * Quick notes: This FeatureWriter is often used to add new content, or
1459:             * perform summary calculations over the entire dataset.
1460:             * </p>
1461:             *
1462:             * <p>
1463:             * Subclass may wish to implement an optimized featureWriter for these
1464:             * operations.
1465:             * </p>
1466:             *
1467:             * <p>
1468:             * It should provide Feature for next() even when hasNext() is
1469:             * <code>false</code>.
1470:             * </p>
1471:             *
1472:             * <p>
1473:             * Subclasses are responsible for checking with the lockingManger unless
1474:             * they are providing their own locking support.
1475:             * </p>
1476:             *
1477:             * @param typeName
1478:             * @param transaction
1479:             *
1480:             *
1481:             * @throws IOException
1482:             *
1483:             * @see org.geotools.data.DataStore#getFeatureWriter(java.lang.String,
1484:             *      boolean, org.geotools.data.Transaction)
1485:             */
1486:            public FeatureWriter getFeatureWriter(String typeName,
1487:                    Transaction transaction) throws IOException {
1488:                return getFeatureWriter(typeName, Filter.INCLUDE, transaction);
1489:            }
1490:
1491:            /**
1492:             * Retrieve a FeatureWriter for creating new content.
1493:             *
1494:             * <p>
1495:             * Subclass may wish to implement an optimized featureWriter for this
1496:             * operation. One based on prepaired statemnts is a possibility, as we do
1497:             * not require a ResultSet.
1498:             * </p>
1499:             *
1500:             * <p>
1501:             * To allow new content the FeatureWriter should provide Feature for next()
1502:             * even when hasNext() is <code>false</code>.
1503:             * </p>
1504:             *
1505:             * <p>
1506:             * Subclasses are responsible for checking with the lockingManger unless
1507:             * they are providing their own locking support.
1508:             * </p>
1509:             *
1510:             * @param typeName
1511:             * @param transaction
1512:             *
1513:             *
1514:             * @throws IOException
1515:             *
1516:             * @see org.geotools.data.DataStore#getFeatureWriter(java.lang.String,
1517:             *      boolean, org.geotools.data.Transaction)
1518:             */
1519:            public FeatureWriter getFeatureWriterAppend(String typeName,
1520:                    Transaction transaction) throws IOException {
1521:                FeatureWriter writer = getFeatureWriter(typeName,
1522:                        Filter.EXCLUDE, transaction);
1523:
1524:                while (writer.hasNext()) {
1525:                    writer.next(); // this would be a use for skip then :-)
1526:                }
1527:
1528:                return writer;
1529:            }
1530:
1531:            /**
1532:             * Acquire FeatureWriter for modification of contents specifed by filter.
1533:             *
1534:             * <p>
1535:             * Quick notes: This FeatureWriter is often used to remove contents
1536:             * specified by the provided filter, or perform summary calculations.
1537:             * </p>
1538:             *
1539:             * <p>
1540:             * Subclasses are responsible for checking with the lockingManager unless
1541:             * they are providing their own locking support.
1542:             * </p>
1543:             *
1544:             * @param typeName
1545:             * @param filter
1546:             * @param transaction
1547:             *
1548:             *
1549:             * @throws IOException
1550:             *             If typeName could not be located
1551:             * @throws NullPointerException
1552:             *             If the provided filter is null
1553:             * @throws DataSourceException
1554:             */
1555:            public FeatureWriter getFeatureWriter(String typeName,
1556:                    org.opengis.filter.Filter filter, Transaction transaction)
1557:                    throws IOException {
1558:                if (filter == null) {
1559:                    throw new NullPointerException(
1560:                            "getFeatureReader requires Filter: "
1561:                                    + "did you mean Filter.INCLUDE?");
1562:                }
1563:
1564:                if (transaction == null) {
1565:                    throw new NullPointerException(
1566:                            "getFeatureReader requires Transaction: "
1567:                                    + "did you mean Transaction.AUTO_COMMIT");
1568:                }
1569:
1570:                FeatureType featureType = getSchema(typeName);
1571:                FeatureTypeInfo info = typeHandler.getFeatureTypeInfo(typeName);
1572:                LOGGER.fine("getting feature writer for " + typeName + ": "
1573:                        + info);
1574:
1575:                SQLBuilder sqlBuilder = getSqlBuilder(typeName);
1576:                org.opengis.filter.Filter preFilter = sqlBuilder
1577:                        .getPreQueryFilter(filter);
1578:                org.opengis.filter.Filter postFilter = sqlBuilder
1579:                        .getPostQueryFilter(filter);
1580:                Query query = new DefaultQuery(typeName, preFilter);
1581:                String sqlQuery;
1582:
1583:                try {
1584:                    sqlQuery = constructQuery(query, getAttributeTypes(
1585:                            typeName, propertyNames(query)));
1586:                } catch (SchemaException e) {
1587:                    throw new DataSourceException(
1588:                            "Some Attribute Names were specified that"
1589:                                    + " do not exist in the FeatureType "
1590:                                    + typeName + ". " + "Requested names: "
1591:                                    + Arrays.asList(query.getPropertyNames())
1592:                                    + ", " + "FeatureType: " + featureType, e);
1593:                }
1594:
1595:                QueryData queryData = executeQuery(typeHandler
1596:                        .getFeatureTypeInfo(typeName), typeName, sqlQuery,
1597:                        transaction, true, null);
1598:                FeatureReader reader = createFeatureReader(info.getSchema(),
1599:                        postFilter, queryData);
1600:                FeatureWriter writer = createFeatureWriter(reader, queryData);
1601:
1602:                if ((getLockingManager() != null)
1603:                        && getLockingManager() instanceof  InProcessLockingManager) {
1604:                    InProcessLockingManager inProcess = (InProcessLockingManager) getLockingManager();
1605:                    writer = inProcess.checkedWriter(writer, transaction);
1606:                }
1607:
1608:                // chorner: writer shouldn't have a wrapped post filter, otherwise one can't add features.
1609:                // if ((postFilter != null) && (postFilter != Filter.INCLUDE)) {
1610:                //     writer = new FilteringFeatureWriter(writer, postFilter);
1611:                // }
1612:
1613:                return writer;
1614:            }
1615:
1616:            protected JDBCFeatureWriter createFeatureWriter(
1617:                    FeatureReader reader, QueryData queryData)
1618:                    throws IOException {
1619:                LOGGER.fine("returning jdbc feature writer");
1620:
1621:                JDBCFeatureWriter featureWriter = new JDBCFeatureWriter(reader,
1622:                        queryData);
1623:                return featureWriter;
1624:            }
1625:
1626:            /**
1627:             * Get propertyNames in a safe manner.
1628:             *
1629:             * <p>
1630:             * Method will figure out names from the schema for query.getTypeName(), if
1631:             * query getPropertyNames() is <code>null</code>, or
1632:             * query.retrieveAllProperties is <code>true</code>.
1633:             * </p>
1634:             *
1635:             * @param query
1636:             *
1637:             *
1638:             * @throws IOException
1639:             */
1640:            protected String[] propertyNames(Query query) throws IOException {
1641:                String[] names = query.getPropertyNames();
1642:
1643:                if ((names == null) || query.retrieveAllProperties()) {
1644:                    String typeName = query.getTypeName();
1645:                    FeatureType schema = getSchema(typeName);
1646:
1647:                    names = new String[schema.getAttributeCount()];
1648:
1649:                    for (int i = 0; i < schema.getAttributeCount(); i++) {
1650:                        names[i] = schema.getAttributeType(i).getName();
1651:                    }
1652:                }
1653:
1654:                return names;
1655:            }
1656:
1657:            /**
1658:             * Gets the attribute types from from a given type.
1659:             *
1660:             * @param typeName
1661:             *            The name of the feature type to get the AttributeTypes for.
1662:             * @param propertyNames
1663:             *            The list of propertyNames to get AttributeTypes for.
1664:             *
1665:             * @return the array of attribute types from the schema which match
1666:             *         propertyNames.
1667:             *
1668:             * @throws IOException
1669:             *             If we can't get the schema.
1670:             * @throws SchemaException
1671:             *             if query contains a propertyName that is not a part of this
1672:             *             type's schema.
1673:             */
1674:            protected final AttributeType[] getAttributeTypes(String typeName,
1675:                    String[] propertyNames) throws IOException, SchemaException {
1676:                FeatureType schema = getSchema(typeName);
1677:                AttributeType[] types = new AttributeType[propertyNames.length];
1678:
1679:                for (int i = 0; i < propertyNames.length; i++) {
1680:                    types[i] = schema.getAttributeType(propertyNames[i]);
1681:
1682:                    if (types[i] == null) {
1683:                        throw new SchemaException(typeName
1684:                                + " does not contain requested "
1685:                                + propertyNames[i] + " attribute");
1686:                    }
1687:                }
1688:
1689:                return types;
1690:            }
1691:
1692:            /**
1693:             * Locking manager used for this DataStore.
1694:             *
1695:             * <p>
1696:             * By default AbstractDataStore makes use of InProcessLockingManager.
1697:             * </p>
1698:             *
1699:             *
1700:             * @see org.geotools.data.DataStore#getLockingManager()
1701:             */
1702:            public LockingManager getLockingManager() {
1703:                return lockingManager;
1704:            }
1705:
1706:            /**
1707:             * Sets the FIDMapper for a specific type name
1708:             *
1709:             * @param featureTypeName
1710:             * @param fidMapper
1711:             */
1712:            public void setFIDMapper(String featureTypeName, FIDMapper fidMapper) {
1713:                typeHandler.setFIDMapper(featureTypeName, fidMapper);
1714:            }
1715:
1716:            /**
1717:             * Returns the FIDMapperFactory used for this data store
1718:             *
1719:             */
1720:            public FIDMapperFactory getFIDMapperFactory() {
1721:                return typeHandler.getFIDMapperFactory();
1722:            }
1723:
1724:            /**
1725:             * Allows to override the default FIDMapperFactory.
1726:             *
1727:             * <p>
1728:             * Warning: the ovveride may not be supported by all data stores, in this
1729:             * case an exception will be thrown
1730:             * </p>
1731:             *
1732:             * @param fmFactory
1733:             *
1734:             * @throws UnsupportedOperationException -
1735:             *             if the datastore does not allow the factory override
1736:             */
1737:            public void setFIDMapperFactory(FIDMapperFactory fmFactory)
1738:                    throws UnsupportedOperationException {
1739:                typeHandler.setFIDMapperFactory(fmFactory);
1740:            }
1741:
1742:            /**
1743:             * returns true if the requested names list all the attributes in the
1744:             * correct order.
1745:             *
1746:             * @param requestedNames
1747:             * @param ft
1748:             */
1749:            public boolean allSameOrder(String[] requestedNames, FeatureType ft) {
1750:                if (requestedNames.length != ft.getAttributeCount())
1751:                    return false; // incorrect # of attribute
1752:                for (int t = 0; t < requestedNames.length; t++) {
1753:                    if (!(requestedNames[t].equals(ft.getAttributeType(t)
1754:                            .getName())))
1755:                        return false; // name doesnt match
1756:                }
1757:                return true;
1758:            }
1759:
1760:            /**
1761:             * Retrieve approx bounds of all Features.
1762:             * <p>
1763:             * This result is suitable for a quick map display, illustrating the data.
1764:             * This value is often stored as metadata in databases such as oraclespatial.
1765:             * </p>
1766:             * @return null as a generic implementation is not provided.
1767:             */
1768:            public Envelope getEnvelope(String typeName) {
1769:                return null;
1770:            }
1771:
1772:            private static final Set BASE_HINTS = Collections
1773:                    .unmodifiableSet(new HashSet(Arrays
1774:                            .asList(new Object[] { Hints.FEATURE_DETACHED })));
1775:
1776:            public Set getSupportedHints() {
1777:                return BASE_HINTS;
1778:            }
1779:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.