001/*
002 * ModeShape (http://www.modeshape.org)
003 *
004 * Licensed under the Apache License, Version 2.0 (the "License");
005 * you may not use this file except in compliance with the License.
006 * You may obtain a copy of the License at
007 *
008 *       http://www.apache.org/licenses/LICENSE-2.0
009 *
010 * Unless required by applicable law or agreed to in writing, software
011 * distributed under the License is distributed on an "AS IS" BASIS,
012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
013 * See the License for the specific language governing permissions and
014 * limitations under the License.
015 */
016package org.modeshape.sequencer.ddl.dialect.postgres;
017
018import static org.modeshape.sequencer.ddl.StandardDdlLexicon.ALL_PRIVILEGES;
019import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_EXPRESSION;
020import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_LENGTH;
021import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_ORIGINAL_EXPRESSION;
022import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_CHAR_INDEX;
023import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_COLUMN_NUMBER;
024import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_LINE_NUMBER;
025import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_CURRENT_USER;
026import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_DATETIME;
027import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_LITERAL;
028import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_NULL;
029import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_SESSION_USER;
030import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_SYSTEM_USER;
031import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_USER;
032import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_OPTION;
033import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_PRECISION;
034import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_VALUE;
035import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DROP_BEHAVIOR;
036import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANTEE;
037import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANT_PRIVILEGE;
038import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NEW_NAME;
039import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE;
040import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_COLUMN_DEFINITION;
041import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_DEFINITION;
042import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_REFERENCE;
043import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TABLE_STATEMENT;
044import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLUMN_DEFINITION;
045import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_DOMAIN_STATEMENT;
046import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_SCHEMA_STATEMENT;
047import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_CONSTRAINT_DEFINITION;
048import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_STATEMENT;
049import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_VIEW_STATEMENT;
050import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TABLE_STATEMENT;
051import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_MISSING_TERMINATOR;
052import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT_OPTION;
053import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_UNKNOWN_STATEMENT;
054import static org.modeshape.sequencer.ddl.StandardDdlLexicon.VALUE;
055import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.FUNCTION_PARAMETER_MODE;
056import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.ROLE;
057import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.SCHEMA_NAME;
058import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ABORT_STATEMENT;
059import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_AGGREGATE_STATEMENT;
060import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_CONVERSION_STATEMENT;
061import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_DATABASE_STATEMENT;
062import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_FOREIGN_DATA_WRAPPER_STATEMENT;
063import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_FUNCTION_STATEMENT;
064import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_GROUP_STATEMENT;
065import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_INDEX_STATEMENT;
066import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_LANGUAGE_STATEMENT;
067import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_OPERATOR_STATEMENT;
068import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_ROLE_STATEMENT;
069import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_SCHEMA_STATEMENT;
070import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_SEQUENCE_STATEMENT;
071import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_SERVER_STATEMENT;
072import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_TABLESPACE_STATEMENT;
073import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_TABLE_STATEMENT_POSTGRES;
074import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_TEXT_SEARCH_STATEMENT;
075import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_TRIGGER_STATEMENT;
076import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_TYPE_STATEMENT;
077import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_USER_MAPPING_STATEMENT;
078import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_USER_STATEMENT;
079import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ALTER_VIEW_STATEMENT;
080import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ANALYZE_STATEMENT;
081import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CLUSTER_STATEMENT;
082import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_COMMENT_ON_STATEMENT;
083import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_COMMIT_STATEMENT;
084import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_COPY_STATEMENT;
085import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_AGGREGATE_STATEMENT;
086import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_CAST_STATEMENT;
087import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_CONSTRAINT_TRIGGER_STATEMENT;
088import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_CONVERSION_STATEMENT;
089import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_DATABASE_STATEMENT;
090import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_FOREIGN_DATA_WRAPPER_STATEMENT;
091import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_FUNCTION_STATEMENT;
092import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_GROUP_STATEMENT;
093import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_INDEX_STATEMENT;
094import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_LANGUAGE_STATEMENT;
095import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_OPERATOR_STATEMENT;
096import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_ROLE_STATEMENT;
097import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_RULE_STATEMENT;
098import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_SEQUENCE_STATEMENT;
099import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_SERVER_STATEMENT;
100import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_TABLESPACE_STATEMENT;
101import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_TEXT_SEARCH_STATEMENT;
102import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_TRIGGER_STATEMENT;
103import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_TYPE_STATEMENT;
104import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_USER_MAPPING_STATEMENT;
105import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_CREATE_USER_STATEMENT;
106import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DEALLOCATE_STATEMENT;
107import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DECLARE_STATEMENT;
108import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_AGGREGATE_STATEMENT;
109import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_CAST_STATEMENT;
110import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_CONSTRAINT_TRIGGER_STATEMENT;
111import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_CONVERSION_STATEMENT;
112import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_DATABASE_STATEMENT;
113import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_FOREIGN_DATA_WRAPPER_STATEMENT;
114import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_FUNCTION_STATEMENT;
115import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_GROUP_STATEMENT;
116import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_INDEX_STATEMENT;
117import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_LANGUAGE_STATEMENT;
118import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_OPERATOR_STATEMENT;
119import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_OWNED_BY_STATEMENT;
120import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_ROLE_STATEMENT;
121import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_RULE_STATEMENT;
122import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_SEQUENCE_STATEMENT;
123import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_SERVER_STATEMENT;
124import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_TABLESPACE_STATEMENT;
125import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_TEXT_SEARCH_STATEMENT;
126import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_TRIGGER_STATEMENT;
127import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_TYPE_STATEMENT;
128import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_USER_MAPPING_STATEMENT;
129import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_DROP_USER_STATEMENT;
130import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_EXPLAIN_STATEMENT;
131import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_FETCH_STATEMENT;
132import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_FUNCTION_PARAMETER;
133import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ON_DATABASE_STATEMENT;
134import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ON_FOREIGN_DATA_WRAPPER_STATEMENT;
135import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ON_FOREIGN_SERVER_STATEMENT;
136import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ON_FUNCTION_STATEMENT;
137import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ON_LANGUAGE_STATEMENT;
138import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ON_SCHEMA_STATEMENT;
139import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ON_SEQUENCE_STATEMENT;
140import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ON_TABLESPACE_STATEMENT;
141import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_GRANT_ROLES_STATEMENT;
142import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_LISTEN_STATEMENT;
143import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_LOAD_STATEMENT;
144import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_LOCK_TABLE_STATEMENT;
145import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_MOVE_STATEMENT;
146import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_NOTIFY_STATEMENT;
147import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_PREPARE_STATEMENT;
148import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_REASSIGN_OWNED_STATEMENT;
149import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_REINDEX_STATEMENT;
150import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_RELEASE_SAVEPOINT_STATEMENT;
151import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_RENAME_COLUMN;
152import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_ROLLBACK_STATEMENT;
153import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_SELECT_INTO_STATEMENT;
154import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_SHOW_STATEMENT;
155import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_TRUNCATE_STATEMENT;
156import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_UNLISTEN_STATEMENT;
157import static org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlLexicon.TYPE_VACUUM_STATEMENT;
158import java.util.ArrayList;
159import java.util.List;
160import org.modeshape.common.logging.Logger;
161import org.modeshape.common.text.ParsingException;
162import org.modeshape.sequencer.ddl.DdlParserProblem;
163import org.modeshape.sequencer.ddl.DdlSequencerI18n;
164import org.modeshape.sequencer.ddl.DdlTokenStream;
165import org.modeshape.sequencer.ddl.DdlTokenStream.DdlTokenizer;
166import org.modeshape.sequencer.ddl.StandardDdlParser;
167import org.modeshape.sequencer.ddl.datatype.DataType;
168import org.modeshape.sequencer.ddl.datatype.DataTypeParser;
169import org.modeshape.sequencer.ddl.node.AstNode;
170
171/**
172 * Postgres-specific DDL Parser. Includes custom data types as well as custom DDL statements.
173 */
174public class PostgresDdlParser extends StandardDdlParser
175    implements PostgresDdlConstants, PostgresDdlConstants.PostgresStatementStartPhrases {
176
177    private static final Logger LOGGER = Logger.getLogger(PostgresDdlParser.class);
178
179    /**
180     * The Postress parser identifier.
181     */
182    public static final String ID = "POSTGRES";
183
184    static List<String[]> postgresDataTypeStrings = new ArrayList<String[]>();
185
186    // SQL COMMANDS FOUND @ http://www.postgresql.org/docs/8.4/static/sql-commands.html
187
188    private static final String TERMINATOR = ";";
189
190    public PostgresDdlParser() {
191        setDatatypeParser(new PostgresDataTypeParser());
192        initialize();
193    }
194
195    private void initialize() {
196
197        setDoUseTerminator(true);
198
199        setTerminator(TERMINATOR);
200
201        postgresDataTypeStrings.addAll(PostgresDataTypes.CUSTOM_DATATYPE_START_PHRASES);
202    }
203
204    @Override
205    public String getId() {
206        return ID;
207    }
208
209    @Override
210    protected void initializeTokenStream( DdlTokenStream tokens ) {
211        super.initializeTokenStream(tokens);
212        tokens.registerKeyWords(CUSTOM_KEYWORDS);
213        tokens.registerKeyWords(PostgresDataTypes.CUSTOM_DATATYPE_START_WORDS);
214        tokens.registerStatementStartPhrase(ALTER_PHRASES);
215        tokens.registerStatementStartPhrase(CREATE_PHRASES);
216        tokens.registerStatementStartPhrase(DROP_PHRASES);
217        tokens.registerStatementStartPhrase(SET_PHRASES);
218        tokens.registerStatementStartPhrase(MISC_PHRASES);
219    }
220
221    @Override
222    protected void rewrite( DdlTokenStream tokens,
223                            AstNode rootNode ) {
224        assert tokens != null;
225        assert rootNode != null;
226
227        // We may hava a prepare statement that is followed by a missing terminator node
228
229        List<AstNode> copyOfNodes = new ArrayList<AstNode>(rootNode.getChildren());
230        AstNode prepareNode = null;
231        boolean mergeNextStatement = false;
232        for (AstNode child : copyOfNodes) {
233            if (prepareNode != null && mergeNextStatement) {
234                mergeNodes(tokens, prepareNode, child);
235                rootNode.removeChild(child);
236                prepareNode = null;
237            }
238            if (prepareNode != null && nodeFactory().hasMixinType(child, TYPE_MISSING_TERMINATOR)) {
239                mergeNextStatement = true;
240            } else {
241                mergeNextStatement = false;
242            }
243            if (nodeFactory().hasMixinType(child, TYPE_PREPARE_STATEMENT)) {
244                prepareNode = child;
245            }
246        }
247
248        super.rewrite(tokens, rootNode); // Removes all extra "missing terminator" nodes
249
250        // Now we need to walk the tree again looking for unknown nodes under the root
251        // and attach them to the previous node, assuming the node can contain multiple nested statements.
252        // CREATE FUNCTION is one of those types
253
254        copyOfNodes = new ArrayList<AstNode>(rootNode.getChildren());
255        boolean foundComplexNode = false;
256        AstNode complexNode = null;
257        for (AstNode child : copyOfNodes) {
258            if (matchesComplexNode(child)) {
259                foundComplexNode = true;
260                complexNode = child;
261            } else if (foundComplexNode) {
262                if (complexNode != null && nodeFactory().hasMixinType(child, TYPE_UNKNOWN_STATEMENT)) {
263                    mergeNodes(tokens, complexNode, child);
264                    rootNode.removeChild(child);
265                } else {
266                    foundComplexNode = false;
267                    complexNode = null;
268                }
269            }
270        }
271    }
272
273    private boolean matchesComplexNode( AstNode node ) {
274        for (String mixin : COMPLEX_STMT_TYPES) {
275            if (nodeFactory().hasMixinType(node, mixin)) {
276                return true;
277            }
278        }
279
280        return false;
281    }
282
283    @Override
284    protected AstNode parseAlterStatement( DdlTokenStream tokens,
285                                           AstNode parentNode ) throws ParsingException {
286        assert tokens != null;
287        assert parentNode != null;
288
289        if (tokens.matches(STMT_ALTER_AGGREGATE)) {
290            return parseStatement(tokens, STMT_ALTER_AGGREGATE, parentNode, TYPE_ALTER_AGGREGATE_STATEMENT);
291        } else if (tokens.matches(STMT_ALTER_CONVERSION)) {
292            return parseStatement(tokens, STMT_ALTER_CONVERSION, parentNode, TYPE_ALTER_CONVERSION_STATEMENT);
293        } else if (tokens.matches(STMT_ALTER_DATABASE)) {
294            return parseStatement(tokens, STMT_ALTER_DATABASE, parentNode, TYPE_ALTER_DATABASE_STATEMENT);
295        } else if (tokens.matches(STMT_ALTER_FOREIGN_DATA_WRAPPER)) {
296            return parseStatement(tokens, STMT_ALTER_FOREIGN_DATA_WRAPPER, parentNode, TYPE_ALTER_FOREIGN_DATA_WRAPPER_STATEMENT);
297        } else if (tokens.matches(STMT_ALTER_FUNCTION)) {
298            return parseStatement(tokens, STMT_ALTER_FUNCTION, parentNode, TYPE_ALTER_FUNCTION_STATEMENT);
299        } else if (tokens.matches(STMT_ALTER_GROUP)) {
300            return parseStatement(tokens, STMT_ALTER_GROUP, parentNode, TYPE_ALTER_GROUP_STATEMENT);
301        } else if (tokens.matches(STMT_ALTER_INDEX)) {
302            return parseStatement(tokens, STMT_ALTER_INDEX, parentNode, TYPE_ALTER_INDEX_STATEMENT);
303        } else if (tokens.matches(STMT_ALTER_LANGUAGE)) {
304            return parseStatement(tokens, STMT_ALTER_LANGUAGE, parentNode, TYPE_ALTER_LANGUAGE_STATEMENT);
305        } else if (tokens.matches(STMT_ALTER_OPERATOR)) {
306            return parseStatement(tokens, STMT_ALTER_OPERATOR, parentNode, TYPE_ALTER_OPERATOR_STATEMENT);
307        } else if (tokens.matches(STMT_ALTER_ROLE)) {
308            return parseStatement(tokens, STMT_ALTER_ROLE, parentNode, TYPE_ALTER_ROLE_STATEMENT);
309        } else if (tokens.matches(STMT_ALTER_SCHEMA)) {
310            return parseStatement(tokens, STMT_ALTER_SCHEMA, parentNode, TYPE_ALTER_SCHEMA_STATEMENT);
311        } else if (tokens.matches(STMT_ALTER_SEQUENCE)) {
312            return parseStatement(tokens, STMT_ALTER_SEQUENCE, parentNode, TYPE_ALTER_SEQUENCE_STATEMENT);
313        } else if (tokens.matches(STMT_ALTER_SERVER)) {
314            return parseStatement(tokens, STMT_ALTER_SERVER, parentNode, TYPE_ALTER_SERVER_STATEMENT);
315        } else if (tokens.matches(STMT_ALTER_TABLESPACE)) {
316            return parseStatement(tokens, STMT_ALTER_TABLESPACE, parentNode, TYPE_ALTER_TABLESPACE_STATEMENT);
317        } else if (tokens.matches(STMT_ALTER_TEXT_SEARCH)) {
318            return parseStatement(tokens, STMT_ALTER_TEXT_SEARCH, parentNode, TYPE_ALTER_TEXT_SEARCH_STATEMENT);
319        } else if (tokens.matches(STMT_ALTER_TRIGGER)) {
320            return parseStatement(tokens, STMT_ALTER_TRIGGER, parentNode, TYPE_ALTER_TRIGGER_STATEMENT);
321        } else if (tokens.matches(STMT_ALTER_TYPE)) {
322            return parseStatement(tokens, STMT_ALTER_TYPE, parentNode, TYPE_ALTER_TYPE_STATEMENT);
323        } else if (tokens.matches(STMT_ALTER_USER_MAPPING)) {
324            return parseStatement(tokens, STMT_ALTER_USER_MAPPING, parentNode, TYPE_ALTER_USER_MAPPING_STATEMENT);
325        } else if (tokens.matches(STMT_ALTER_USER)) {
326            return parseStatement(tokens, STMT_ALTER_USER, parentNode, TYPE_ALTER_USER_STATEMENT);
327        } else if (tokens.matches(STMT_ALTER_VIEW)) {
328            return parseStatement(tokens, STMT_ALTER_VIEW, parentNode, TYPE_ALTER_VIEW_STATEMENT);
329        }
330
331        return super.parseAlterStatement(tokens, parentNode);
332
333    }
334
335    @Override
336    protected AstNode parseAlterTableStatement( DdlTokenStream tokens,
337                                                AstNode parentNode ) throws ParsingException {
338        assert tokens != null;
339        assert parentNode != null;
340
341        markStartOfStatement(tokens);
342        // TODO: Need to flesh out and store more info on alterTableStatement properties
343
344        // NOTE: Not sure the rules of Postgress here. It appears that you can have comma separated clauses
345        // but can't find many examples. Also don't know if you can mix clause types
346        // EXAMPLE:
347        //
348        // ALTER TABLE distributors
349        // ALTER COLUMN address TYPE varchar(80),
350        // DROP COLUMN name RESTRICTED;
351
352        // --ALTER TABLE [ ONLY ] name [ * ]
353        // -- action [, ... ]
354
355        // --where action is one of:
356        // -- ADD [ COLUMN ] column type [ column_constraint [ ... ] ]
357        // -- DROP [ COLUMN ] column [ RESTRICT | CASCADE ]
358        // -- ALTER [ COLUMN ] column [ SET DATA ] TYPE type [ USING expression ]
359        // -- ALTER [ COLUMN ] column SET DEFAULT expression
360        // -- ALTER [ COLUMN ] column DROP DEFAULT
361        // -- ALTER [ COLUMN ] column { SET | DROP } NOT NULL
362        // -- ALTER [ COLUMN ] column SET STATISTICS integer
363        // -- ALTER [ COLUMN ] column SET STORAGE { PLAIN | EXTERNAL | EXTENDED | MAIN }
364        // -- ADD table_constraint
365        // -- DROP CONSTRAINT constraint_name [ RESTRICT | CASCADE ]
366        // -- DISABLE TRIGGER [ trigger_name | ALL | USER ]
367        // -- ENABLE TRIGGER [ trigger_name | ALL | USER ]
368        // -- ENABLE REPLICA TRIGGER trigger_name
369        // -- ENABLE ALWAYS TRIGGER trigger_name
370        // -- DISABLE RULE rewrite_rule_name
371        // -- ENABLE RULE rewrite_rule_name
372        // -- ENABLE REPLICA RULE rewrite_rule_name
373        // -- ENABLE ALWAYS RULE rewrite_rule_name
374        // -- CLUSTER ON index_name
375        // -- SET WITHOUT CLUSTER
376        // -- SET WITH OIDS
377        // -- SET WITHOUT OIDS
378        // -- SET ( storage_parameter = value [, ... ] )
379        // -- RESET ( storage_parameter [, ... ] )
380        // -- INHERIT parent_table
381        // -- NO INHERIT parent_table
382        // -- OWNER TO new_owner
383        // -- SET TABLESPACE new_tablespace
384        // =========== MISC.............
385        // --ALTER TABLE [ ONLY ] name [ * ]
386        // -- RENAME [ COLUMN ] column TO new_column
387        // --ALTER TABLE name
388        // -- RENAME TO new_name
389        // --ALTER TABLE name new_tablespace
390        // -- SET SCHEMA new_schema
391
392        tokens.consume(); // consumes 'ALTER'
393        tokens.consume("TABLE");
394
395        tokens.canConsume("ONLY");
396        String tableName = parseName(tokens);
397        tokens.canConsume("*");
398
399        // System.out.println("  >> PARSING ALTER STATEMENT >>  TABLE Name = " + tableName);
400        AstNode alterTableNode = nodeFactory().node(tableName, parentNode, TYPE_ALTER_TABLE_STATEMENT_POSTGRES);
401
402        do {
403            parseAlterTableAction(tokens, alterTableNode);
404        } while (tokens.canConsume(COMMA));
405
406        markEndOfStatement(tokens, alterTableNode);
407
408        return alterTableNode;
409    }
410
411    private void parseAlterTableAction( DdlTokenStream tokens,
412                                        AstNode alterTableNode ) throws ParsingException {
413        assert tokens != null;
414        assert alterTableNode != null;
415
416        if (tokens.canConsume("ADD")) { // ADD COLUMN
417            if (isTableConstraint(tokens)) {
418                parseTableConstraint(tokens, alterTableNode, true);
419            } else {
420                parseSingleCommaTerminatedColumnDefinition(tokens, alterTableNode, true);
421            }
422
423        } else if (tokens.canConsume("DROP")) { // DROP CONSTRAINT & DROP COLUMN
424            if (tokens.canConsume("CONSTRAINT")) {
425                String constraintName = parseName(tokens); // constraint name
426
427                AstNode constraintNode = nodeFactory().node(constraintName, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
428
429                if (tokens.canConsume(DropBehavior.CASCADE)) {
430                    constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
431                } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
432                    constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
433                }
434            } else {
435                // ALTER TABLE supplier
436                // DROP COLUMN supplier_name;
437                tokens.canConsume("COLUMN"); // "COLUMN" is optional
438
439                String columnName = parseName(tokens);
440
441                AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_DROP_COLUMN_DEFINITION);
442
443                if (tokens.canConsume(DropBehavior.CASCADE)) {
444                    columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
445                } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
446                    columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
447                }
448            }
449        } else if (tokens.matches("ALTER")) {
450            // -- ALTER [ COLUMN ] column SET STORAGE { PLAIN | EXTERNAL | EXTENDED | MAIN }
451            // -- ALTER [ COLUMN ] columnnew_tablespace SET STATISTICS integer
452            // -- ALTER [ COLUMN ] column DROP DEFAULT
453            // -- ALTER [ COLUMN ] column [ SET DATA ] TYPE type [ USING expression ]
454            // -- ALTER [ COLUMN ] column SET DEFAULT expression
455            // -- ALTER [ COLUMN ] column { SET | DROP } NOT NULL
456
457            tokens.consume("ALTER");
458            tokens.canConsume("COLUMN");
459            String columnName = parseName(tokens);
460
461            AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_ALTER_COLUMN_DEFINITION);
462
463            if (tokens.canConsume("SET", "STORAGE")) {
464                tokens.consume(); // { PLAIN | EXTERNAL | EXTENDED | MAIN }
465            } else if (tokens.canConsume("SET", "STATISTICS")) {
466                tokens.consume(); // integer
467            } else if (tokens.canConsume("DROP", "DEFAULT")) {
468
469            } else if (tokens.canConsume("SET", "DATA")) {
470                tokens.consume("TYPE");
471                DataType datatype = getDatatypeParser().parse(tokens);
472
473                getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
474
475                if (tokens.canConsume("USING")) {
476                    // TODO: Not storing the following expression in properties.
477                    parseUntilCommaOrTerminator(tokens);
478                }
479            } else if (tokens.canConsume("TYPE")) {
480                DataType datatype = getDatatypeParser().parse(tokens);
481
482                getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
483
484                if (tokens.canConsume("USING")) {
485                    // TODO: Not storing the following expression in properties.
486                    parseUntilCommaOrTerminator(tokens);
487                }
488            } else if (tokens.matches("SET", "DEFAULT")) {
489                tokens.consume("SET");
490                parseDefaultClause(tokens, columnNode);
491            } else if (tokens.matches("SET") || tokens.matches("DROP")) {
492                tokens.consume(); // { SET | DROP }
493                tokens.canConsume("NOT", "NULL");
494                tokens.canConsume("NULL");
495            } else {
496                LOGGER.debug("  WARNING:  Option not found for ALTER TABLE - ALTER COLUMN. Check your DDL for incomplete statement.");
497            }
498
499        } else if (tokens.canConsume("ENABLE")) {
500            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
501            StringBuilder sb = new StringBuilder("ENABLE");
502            // -- ENABLE TRIGGER [ trigger_name | ALL | USER ]
503            // -- ENABLE REPLICA TRIGGER trigger_name
504            // -- ENABLE REPLICA RULE rewrite_rule_name
505            // -- ENABLE ALWAYS TRIGGER trigger_name
506            // -- ENABLE ALWAYS RULE rewrite_rule_name
507            // -- ENABLE RULE rewrite_rule_name
508            if (tokens.canConsume("TRIGGER")) {
509                sb.append(SPACE).append("TRIGGER");
510                if (!tokens.matches(getTerminator())) {
511                    sb.append(SPACE).append(parseName(tokens)); // [ trigger_name | ALL | USER ]
512                }
513            } else if (tokens.canConsume("REPLICA", "TRIGGER")) {
514                sb.append(SPACE).append("REPLICA TRIGGER");
515                sb.append(SPACE).append(parseName(tokens)); // trigger_name
516            } else if (tokens.canConsume("REPLICA", "RULE")) {
517                sb.append(SPACE).append("REPLICA RULE");
518                sb.append(SPACE).append(parseName(tokens)); // rewrite_rule_name
519            } else if (tokens.canConsume("ALWAYS", "TRIGGER")) {
520                sb.append(SPACE).append("ALWAYS TRIGGER");
521                sb.append(SPACE).append(parseName(tokens)); // trigger_name
522            } else if (tokens.canConsume("ALWAYS", "RULE")) {
523                sb.append(SPACE).append("ALWAYS RULE");
524                sb.append(SPACE).append(parseName(tokens)); // rewrite_rule_name
525            } else if (tokens.canConsume("RULE")) {
526                sb.append(SPACE).append("RULE");
527                sb.append(SPACE).append(parseName(tokens)); // rewrite_rule_name
528            } else {
529                LOGGER.debug("  WARNING:  Option not found for ALTER TABLE - ENABLE XXXX. Check your DDL for incomplete statement.");
530            }
531            optionNode.setProperty(VALUE, sb.toString());
532        } else if (tokens.canConsume("DISABLE")) {
533            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
534            StringBuilder sb = new StringBuilder("DISABLE");
535            // -- DISABLE TRIGGER [ trigger_name | ALL | USER ]
536            // -- DISABLE RULE rewrite_rule_name
537            if (tokens.canConsume("TRIGGER")) {
538                sb.append(SPACE).append("TRIGGER");
539                if (!tokens.matches(getTerminator())) {
540                    sb.append(SPACE).append(parseName(tokens)); // [ trigger_name | ALL | USER ]
541                }
542            } else if (tokens.canConsume("RULE")) {
543                sb.append(SPACE).append("RULE");
544                sb.append(SPACE).append(parseName(tokens)); // rewrite_rule_name
545            } else {
546                LOGGER.debug("  WARNING:  Option not found for ALTER TABLE - DISABLE XXXX. Check your DDL for incomplete statement.");
547            }
548            optionNode.setProperty(VALUE, sb.toString());
549        } else if (tokens.canConsume("CLUSTER", "ON")) {
550            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
551            // -- CLUSTER ON index_name
552            String indexName = parseName(tokens); // index_name
553            optionNode.setProperty(VALUE, "CLUSTER ON" + SPACE + indexName);
554        } else if (tokens.canConsume("OWNER", "TO")) {
555            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
556            // -- OWNER TO new_owner
557            optionNode.setProperty(VALUE, "OWNER TO" + SPACE + parseName(tokens));
558        } else if (tokens.canConsume("INHERIT")) {
559            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
560            // -- INHERIT parent_table
561            optionNode.setProperty(VALUE, "INHERIT" + SPACE + parseName(tokens));
562        } else if (tokens.canConsume("NO", "INHERIT")) {
563            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
564            // -- NO INHERIT parent_table
565            optionNode.setProperty(VALUE, "NO INHERIT" + SPACE + parseName(tokens));
566        } else if (tokens.canConsume("SET", "TABLESPACE")) {
567            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
568            // -- SET TABLESPACE new_tablespace
569            optionNode.setProperty(VALUE, "SET TABLESPACE" + SPACE + parseName(tokens));
570        } else if (tokens.canConsume("SET", "WITHOUT", "CLUSTER")) {
571            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
572            optionNode.setProperty(VALUE, "SET WITHOUT CLUSTER");
573        } else if (tokens.canConsume("SET", "WITHOUT", "OIDS")) {
574            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
575            optionNode.setProperty(VALUE, "SET WITHOUT OIDS");
576        } else if (tokens.canConsume("SET", "WITH", "OIDS")) {
577            AstNode optionNode = nodeFactory().node("action", alterTableNode, TYPE_STATEMENT_OPTION);
578            optionNode.setProperty(VALUE, "SET WITH OIDS");
579        } else if (tokens.canConsume("RENAME", "TO")) {
580            // --ALTER TABLE name
581            // -- RENAME TO new_name
582            String newTableName = parseName(tokens);
583            alterTableNode.setProperty(NEW_NAME, newTableName);
584
585        } else if (tokens.canConsume("RENAME")) {
586            // --ALTER TABLE [ ONLY ] name [ * ]
587            // -- RENAME [ COLUMN ] column TO new_column
588            tokens.canConsume("COLUMN");
589            String oldColumnName = parseName(tokens); // OLD COLUMN NAME
590            tokens.consume("TO");
591            String newColumnName = parseName(tokens); // NEW COLUMN NAME
592            AstNode renameColumnNode = nodeFactory().node(oldColumnName, alterTableNode, TYPE_RENAME_COLUMN);
593            renameColumnNode.setProperty(NEW_NAME, newColumnName);
594        } else if (tokens.canConsume("SET", "SCHEMA")) {
595            // ALTER TABLE myschema.distributors SET SCHEMA your schema;
596            String schemaName = parseName(tokens);
597            alterTableNode.setProperty(SCHEMA_NAME, schemaName);
598        } else {
599            LOGGER.debug("  WARNING:  Option not found for ALTER TABLE. Check your DDL for incomplete statement.");
600        }
601    }
602
603    private void parseSingleCommaTerminatedColumnDefinition( DdlTokenStream tokens,
604                                                             AstNode tableNode,
605                                                             boolean isAlterTable ) throws ParsingException {
606        assert tokens != null;
607        assert tableNode != null;
608
609        tokens.canConsume("COLUMN");
610        String columnName = parseName(tokens);
611        DataType datatype = getDatatypeParser().parse(tokens);
612
613        AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
614
615        getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
616        // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma, a
617        // terminator
618        // or a new statement
619
620        while (tokens.hasNext() && !tokens.matches(getTerminator()) && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
621            boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
622            if (!parsedDefaultClause) {
623                parseCollateClause(tokens, columnNode);
624                parseColumnConstraint(tokens, columnNode, isAlterTable);
625            }
626            consumeComment(tokens);
627            if (tokens.matches(COMMA)) {
628                break;
629            }
630        }
631    }
632
633    /**
634     * Currently, only CREATE TABLE, CREATE VIEW, CREATE INDEX, CREATE SEQUENCE, CREATE TRIGGER and GRANT are accepted as clauses
635     * within CREATE SCHEMA. {@inheritDoc}
636     * 
637     * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseCreateSchemaStatement(org.modeshape.sequencer.ddl.DdlTokenStream,
638     *      org.modeshape.sequencer.ddl.node.AstNode)
639     */
640    @Override
641    protected AstNode parseCreateSchemaStatement( DdlTokenStream tokens,
642                                                  AstNode parentNode ) throws ParsingException {
643        assert tokens != null;
644        assert parentNode != null;
645
646        return super.parseCreateSchemaStatement(tokens, parentNode);
647    }
648
649    @Override
650    protected AstNode parseCreateStatement( DdlTokenStream tokens,
651                                            AstNode parentNode ) throws ParsingException {
652        assert tokens != null;
653        assert parentNode != null;
654
655        if (tokens.matches(STMT_CREATE_TEMP_TABLE) || tokens.matches(STMT_CREATE_GLOBAL_TEMP_TABLE)
656            || tokens.matches(STMT_CREATE_LOCAL_TEMP_TABLE)) {
657            return parseCreateTableStatement(tokens, parentNode);
658        } else if (tokens.matches(STMT_CREATE_AGGREGATE)) {
659            return parseStatement(tokens, STMT_CREATE_AGGREGATE, parentNode, TYPE_CREATE_AGGREGATE_STATEMENT);
660        } else if (tokens.matches(STMT_CREATE_CAST)) {
661            return parseStatement(tokens, STMT_CREATE_CAST, parentNode, TYPE_CREATE_CAST_STATEMENT);
662        } else if (tokens.matches(STMT_CREATE_CONSTRAINT_TRIGGER)) {
663            return parseStatement(tokens, STMT_CREATE_CONSTRAINT_TRIGGER, parentNode, TYPE_CREATE_CONSTRAINT_TRIGGER_STATEMENT);
664        } else if (tokens.matches(STMT_CREATE_CONVERSION)) {
665            return parseStatement(tokens, STMT_CREATE_CONVERSION, parentNode, TYPE_CREATE_CONVERSION_STATEMENT);
666        } else if (tokens.matches(STMT_CREATE_DATABASE)) {
667            return parseStatement(tokens, STMT_CREATE_DATABASE, parentNode, TYPE_CREATE_DATABASE_STATEMENT);
668        } else if (tokens.matches(STMT_CREATE_FOREIGN_DATA_WRAPPER)) {
669            return parseStatement(tokens,
670                                  STMT_CREATE_FOREIGN_DATA_WRAPPER,
671                                  parentNode,
672                                  TYPE_CREATE_FOREIGN_DATA_WRAPPER_STATEMENT);
673        } else if (tokens.matches(STMT_CREATE_FUNCTION)) {
674            return parseCreateFunctionStatement(tokens, parentNode);
675        } else if (tokens.matches(STMT_CREATE_OR_REPLACE_FUNCTION)) {
676            return parseCreateFunctionStatement(tokens, parentNode);
677        } else if (tokens.matches(STMT_CREATE_GROUP)) {
678            return parseStatement(tokens, STMT_CREATE_GROUP, parentNode, TYPE_CREATE_GROUP_STATEMENT);
679        } else if (tokens.matches(STMT_CREATE_INDEX)) {
680            return parseStatement(tokens, STMT_CREATE_INDEX, parentNode, TYPE_CREATE_INDEX_STATEMENT);
681        } else if (tokens.matches(STMT_CREATE_UNIQUE_INDEX)) {
682            return parseStatement(tokens, STMT_CREATE_UNIQUE_INDEX, parentNode, TYPE_CREATE_INDEX_STATEMENT);
683        } else if (tokens.matches(STMT_CREATE_LANGUAGE)) {
684            return parseStatement(tokens, STMT_CREATE_LANGUAGE, parentNode, TYPE_CREATE_LANGUAGE_STATEMENT);
685        } else if (tokens.matches(STMT_CREATE_TRUSTED_PROCEDURAL_LANGUAGE)) {
686            return parseStatement(tokens, STMT_CREATE_TRUSTED_PROCEDURAL_LANGUAGE, parentNode, TYPE_CREATE_LANGUAGE_STATEMENT);
687        } else if (tokens.matches(STMT_CREATE_PROCEDURAL_LANGUAGE)) {
688            return parseStatement(tokens, STMT_CREATE_PROCEDURAL_LANGUAGE, parentNode, TYPE_CREATE_LANGUAGE_STATEMENT);
689        } else if (tokens.matches(STMT_CREATE_OPERATOR)) {
690            return parseStatement(tokens, STMT_CREATE_OPERATOR, parentNode, TYPE_CREATE_OPERATOR_STATEMENT);
691        } else if (tokens.matches(STMT_CREATE_ROLE)) {
692            return parseStatement(tokens, STMT_CREATE_ROLE, parentNode, TYPE_CREATE_ROLE_STATEMENT);
693        } else if (tokens.matches(STMT_CREATE_RULE) || tokens.matches(STMT_CREATE_OR_REPLACE_RULE)) {
694            return parseCreateRuleStatement(tokens, parentNode);
695        } else if (tokens.matches(STMT_CREATE_SEQUENCE)) {
696            return parseStatement(tokens, STMT_CREATE_SEQUENCE, parentNode, TYPE_CREATE_SEQUENCE_STATEMENT);
697        } else if (tokens.matches(STMT_CREATE_SERVER)) {
698            return parseStatement(tokens, STMT_CREATE_SERVER, parentNode, TYPE_CREATE_SERVER_STATEMENT);
699        } else if (tokens.matches(STMT_CREATE_TABLESPACE)) {
700            return parseStatement(tokens, STMT_CREATE_TABLESPACE, parentNode, TYPE_CREATE_TABLESPACE_STATEMENT);
701        } else if (tokens.matches(STMT_CREATE_TEXT_SEARCH)) {
702            return parseStatement(tokens, STMT_CREATE_TEXT_SEARCH, parentNode, TYPE_CREATE_TEXT_SEARCH_STATEMENT);
703        } else if (tokens.matches(STMT_CREATE_TRIGGER)) {
704            return parseStatement(tokens, STMT_CREATE_TRIGGER, parentNode, TYPE_CREATE_TRIGGER_STATEMENT);
705        } else if (tokens.matches(STMT_CREATE_TYPE)) {
706            return parseStatement(tokens, STMT_CREATE_TYPE, parentNode, TYPE_CREATE_TYPE_STATEMENT);
707        } else if (tokens.matches(STMT_CREATE_USER_MAPPING)) {
708            return parseStatement(tokens, STMT_CREATE_USER_MAPPING, parentNode, TYPE_CREATE_USER_MAPPING_STATEMENT);
709        } else if (tokens.matches(STMT_CREATE_USER)) {
710            return parseStatement(tokens, STMT_CREATE_USER, parentNode, TYPE_CREATE_USER_STATEMENT);
711        }
712
713        return super.parseCreateStatement(tokens, parentNode);
714    }
715
716    @Override
717    protected AstNode parseCreateTableStatement( DdlTokenStream tokens,
718                                                 AstNode parentNode ) throws ParsingException {
719        assert tokens != null;
720        assert parentNode != null;
721
722        markStartOfStatement(tokens);
723
724        tokens.consume("CREATE"); // CREATE
725
726        tokens.canConsumeAnyOf("LOCAL", "GLOBAL");
727        tokens.canConsumeAnyOf("TEMP", "TEMPORARY");
728
729        tokens.consume("TABLE"); // TABLE
730
731        String tableName = parseName(tokens);
732        AstNode tableNode = nodeFactory().node(tableName, parentNode, TYPE_CREATE_TABLE_STATEMENT);
733
734        // //System.out.println("  >> PARSING CREATE TABLE >>  Name = " + tableName);
735        // if( tokens.canConsume("AS") ) {
736        // parseUntilTerminator(tokens);
737        // } else if( tokens.matches(L_PAREN)){
738        parseColumnsAndConstraints(tokens, tableNode);
739        // }
740        // // [ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]
741        // // [ TABLESPACE tablespace ]
742        // // [ WITH ( storage_parameter [= value] [, ... ] ) | WITH OIDS | WITHOUT OIDS ]
743        // // [ WITH [ NO ] DATA ]
744        // // AS query (SEE ABOVE)
745        // if( tokens.canConsume("ON", "COMMIT") ) {
746        // // PRESERVE ROWS | DELETE ROWS | DROP
747        // tokens.canConsume("PRESERVE", "ROWS");
748        // tokens.canConsume("DELETE", "ROWS");
749        // tokens.canConsume("DROP");
750        // } else if( tokens.canConsume("TABLESPACE") ) {
751        // tokens.consume(); // tablespace name
752        // } else if( tokens.canConsume("WITH", "OIDS") ||
753        // tokens.canConsume("WITHOUT", "OUDS")) {
754        // } else if( tokens.canConsume("WITH")) {
755        // if( tokens.matches(L_PAREN) ) {
756        // consumeParenBoundedTokens(tokens, true);
757        // } else {
758        // tokens.canConsume("NO");
759        // tokens.canConsume("DATA");
760        // }
761        // }
762
763        parseCreateTableOptions(tokens, tableNode);
764
765        markEndOfStatement(tokens, tableNode);
766
767        return tableNode;
768    }
769
770    @Override
771    protected void parseNextCreateTableOption( DdlTokenStream tokens,
772                                               AstNode parentNode ) throws ParsingException {
773        assert tokens != null;
774        assert parentNode != null;
775
776        if (tokens.canConsume("ON", "COMMIT")) {
777            // PRESERVE ROWS | DELETE ROWS | DROP
778            tokens.canConsume("PRESERVE", "ROWS");
779            tokens.canConsume("DELETE", "ROWS");
780            tokens.canConsume("DROP");
781        } else if (tokens.canConsume("TABLESPACE")) {
782            tokens.consume(); // tablespace name
783        } else if (tokens.canConsume("WITH", "OIDS") || tokens.canConsume("WITHOUT", "OUDS")) {
784        } else if (tokens.canConsume("WITH")) {
785            if (tokens.matches(L_PAREN)) {
786                consumeParenBoundedTokens(tokens, true);
787            } else {
788                tokens.canConsume("NO");
789                tokens.canConsume("DATA");
790            }
791        } else if (tokens.canConsume("AS")) {
792            parseUntilTerminator(tokens);
793        }
794    }
795
796    @Override
797    protected boolean areNextTokensCreateTableOptions( DdlTokenStream tokens ) throws ParsingException {
798        assert tokens != null;
799
800        boolean result = false;
801
802        if (tokens.matches("ON", "COMMIT") || tokens.matches("TABLESPACE") || tokens.matches("WITH") || tokens.matches("WITHOUT")
803            || tokens.matches("AS")) {
804            result = true;
805        }
806
807        return result;
808    }
809
810    @Override
811    protected AstNode parseCreateViewStatement( DdlTokenStream tokens,
812                                                AstNode parentNode ) throws ParsingException {
813        assert tokens != null;
814        assert parentNode != null;
815
816        return super.parseCreateViewStatement(tokens, parentNode);
817    }
818
819    @Override
820    protected boolean parseDefaultClause( DdlTokenStream tokens,
821                                          AstNode columnNode ) {
822        assert tokens != null;
823        assert columnNode != null;
824
825        /*
826            } else if( tokens.matches("NOW")){
827                tokens.consume("NOW");
828                tokens.consume('(');
829                tokens.consume(')');
830                defaultValue = "NOW()";
831            } else if( tokens.matches("NEXTVAL")){
832                defaultValue = tokens.consume() + consumeParenBoundedTokens(tokens, true);
833            } 
834         * 
835         */
836        // defaultClause
837        // : 'WITH'? 'DEFAULT' defaultOption
838        // ;
839        // defaultOption : ('('? literal ')'?) | datetimeValueFunction
840        // | 'SYSDATE' | 'USER' | 'CURRENT_USER' | 'SESSION_USER' | 'SYSTEM_USER' | 'NULL' | nowOption;
841        String defaultValue = "";
842
843        if (tokens.matchesAnyOf("WITH", "DEFAULT")) {
844            if (tokens.matches("WITH")) {
845                tokens.consume();
846            }
847            tokens.consume("DEFAULT");
848            String optionID;
849            int precision = -1;
850
851            if (tokens.canConsume("CURRENT_DATE")) {
852
853                optionID = DEFAULT_ID_DATETIME;
854                defaultValue = "CURRENT_DATE";
855            } else if (tokens.canConsume("CURRENT_TIME")) {
856                optionID = DEFAULT_ID_DATETIME;
857                defaultValue = "CURRENT_TIME";
858                if (tokens.canConsume(L_PAREN)) {
859                    // EXPECT INTEGER
860                    precision = integer(tokens.consume());
861                    tokens.canConsume(R_PAREN);
862                }
863            } else if (tokens.canConsume("CURRENT_TIMESTAMP")) {
864                optionID = DEFAULT_ID_DATETIME;
865                defaultValue = "CURRENT_TIMESTAMP";
866                if (tokens.canConsume(L_PAREN)) {
867                    // EXPECT INTEGER
868                    precision = integer(tokens.consume());
869                    tokens.canConsume(R_PAREN);
870                }
871            } else if (tokens.canConsume("USER")) {
872                optionID = DEFAULT_ID_USER;
873                defaultValue = "USER";
874            } else if (tokens.canConsume("CURRENT_USER")) {
875                optionID = DEFAULT_ID_CURRENT_USER;
876                defaultValue = "CURRENT_USER";
877            } else if (tokens.canConsume("SESSION_USER")) {
878                optionID = DEFAULT_ID_SESSION_USER;
879                defaultValue = "SESSION_USER";
880            } else if (tokens.canConsume("SYSTEM_USER")) {
881                optionID = DEFAULT_ID_SYSTEM_USER;
882                defaultValue = "SYSTEM_USER";
883            } else if (tokens.canConsume("NULL")) {
884                optionID = DEFAULT_ID_NULL;
885                defaultValue = "NULL";
886            } else if (tokens.canConsume(L_PAREN)) {
887                optionID = DEFAULT_ID_LITERAL;
888                while (!tokens.canConsume(R_PAREN)) {
889                    defaultValue = defaultValue + tokens.consume();
890                }
891            } else if (tokens.matches("NOW")) {
892                optionID = DEFAULT_ID_LITERAL;
893                tokens.consume("NOW");
894                tokens.consume('(');
895                tokens.consume(')');
896                defaultValue = "NOW()";
897            } else if (tokens.matches("NEXTVAL")) {
898                optionID = DEFAULT_ID_LITERAL;
899                defaultValue = tokens.consume() + consumeParenBoundedTokens(tokens, true);
900            } else {
901                optionID = DEFAULT_ID_LITERAL;
902                // Assume default was EMPTY or ''
903                defaultValue = tokens.consume();
904                // NOTE: default value could be a Real number as well as an integer, so
905                // 1000.00 is valid
906                if (tokens.canConsume(".")) {
907                    defaultValue = defaultValue + '.' + tokens.consume();
908                }
909            }
910
911            columnNode.setProperty(DEFAULT_OPTION, optionID);
912            columnNode.setProperty(DEFAULT_VALUE, defaultValue);
913            if (precision > -1) {
914                columnNode.setProperty(DEFAULT_PRECISION, precision);
915            }
916            return true;
917        }
918
919        return false;
920    }
921
922    @Override
923    protected AstNode parseCustomStatement( DdlTokenStream tokens,
924                                            AstNode parentNode ) throws ParsingException {
925        assert tokens != null;
926        assert parentNode != null;
927
928        if (tokens.matches(STMT_COMMENT_ON)) {
929            return parseCommentStatement(tokens, parentNode);
930        } else if (tokens.matches(STMT_ABORT)) {
931            return parseStatement(tokens, STMT_ABORT, parentNode, TYPE_ABORT_STATEMENT);
932        } else if (tokens.matches(STMT_ANALYZE)) {
933            return parseStatement(tokens, STMT_ANALYZE, parentNode, TYPE_ANALYZE_STATEMENT);
934        } else if (tokens.matches(STMT_CLUSTER)) {
935            return parseStatement(tokens, STMT_CLUSTER, parentNode, TYPE_CLUSTER_STATEMENT);
936        } else if (tokens.matches(STMT_COPY)) {
937            return parseStatement(tokens, STMT_COPY, parentNode, TYPE_COPY_STATEMENT);
938        } else if (tokens.matches(STMT_DEALLOCATE_PREPARE)) {
939            return parseStatement(tokens, STMT_DEALLOCATE_PREPARE, parentNode, TYPE_DEALLOCATE_STATEMENT);
940        } else if (tokens.matches(STMT_DEALLOCATE)) {
941            return parseStatement(tokens, STMT_DEALLOCATE, parentNode, TYPE_DEALLOCATE_STATEMENT);
942        } else if (tokens.matches(STMT_DECLARE)) {
943            return parseStatement(tokens, STMT_DECLARE, parentNode, TYPE_DECLARE_STATEMENT);
944        } else if (tokens.matches(STMT_EXPLAIN_ANALYZE)) {
945            return parseStatement(tokens, STMT_EXPLAIN_ANALYZE, parentNode, TYPE_EXPLAIN_STATEMENT);
946        } else if (tokens.matches(STMT_EXPLAIN)) {
947            return parseStatement(tokens, STMT_EXPLAIN, parentNode, TYPE_EXPLAIN_STATEMENT);
948        } else if (tokens.matches(STMT_FETCH)) {
949            return parseStatement(tokens, STMT_FETCH, parentNode, TYPE_FETCH_STATEMENT);
950        } else if (tokens.matches(STMT_LISTEN)) {
951            return parseStatement(tokens, STMT_LISTEN, parentNode, TYPE_LISTEN_STATEMENT);
952        } else if (tokens.matches(STMT_LOAD)) {
953            return parseStatement(tokens, STMT_LOAD, parentNode, TYPE_LOAD_STATEMENT);
954        } else if (tokens.matches(STMT_LOCK_TABLE)) {
955            return parseStatement(tokens, STMT_LOCK_TABLE, parentNode, TYPE_LOCK_TABLE_STATEMENT);
956        } else if (tokens.matches(STMT_MOVE)) {
957            return parseStatement(tokens, STMT_MOVE, parentNode, TYPE_MOVE_STATEMENT);
958        } else if (tokens.matches(STMT_NOTIFY)) {
959            return parseStatement(tokens, STMT_NOTIFY, parentNode, TYPE_NOTIFY_STATEMENT);
960        } else if (tokens.matches(STMT_PREPARE)) {
961            return parseStatement(tokens, STMT_PREPARE, parentNode, TYPE_PREPARE_STATEMENT);
962        } else if (tokens.matches(STMT_REASSIGN_OWNED)) {
963            return parseStatement(tokens, STMT_REASSIGN_OWNED, parentNode, TYPE_REASSIGN_OWNED_STATEMENT);
964        } else if (tokens.matches(STMT_REINDEX)) {
965            return parseStatement(tokens, STMT_REINDEX, parentNode, TYPE_REINDEX_STATEMENT);
966        } else if (tokens.matches(STMT_RELEASE_SAVEPOINT)) {
967            return parseStatement(tokens, STMT_RELEASE_SAVEPOINT, parentNode, TYPE_RELEASE_SAVEPOINT_STATEMENT);
968        } else if (tokens.matches(STMT_ROLLBACK)) {
969            return parseStatement(tokens, STMT_ROLLBACK, parentNode, TYPE_ROLLBACK_STATEMENT);
970        } else if (tokens.matches(STMT_SELECT_INTO)) {
971            return parseStatement(tokens, STMT_SELECT_INTO, parentNode, TYPE_SELECT_INTO_STATEMENT);
972        } else if (tokens.matches(STMT_SHOW)) {
973            return parseStatement(tokens, STMT_SHOW, parentNode, TYPE_SHOW_STATEMENT);
974        } else if (tokens.matches(STMT_TRUNCATE)) {
975            return parseStatement(tokens, STMT_TRUNCATE, parentNode, TYPE_TRUNCATE_STATEMENT);
976        } else if (tokens.matches(STMT_UNLISTEN)) {
977            return parseStatement(tokens, STMT_UNLISTEN, parentNode, TYPE_UNLISTEN_STATEMENT);
978        } else if (tokens.matches(STMT_VACUUM)) {
979            return parseStatement(tokens, STMT_VACUUM, parentNode, TYPE_VACUUM_STATEMENT);
980        } else if (tokens.matches(STMT_COMMIT)) {
981            return parseStatement(tokens, STMT_COMMIT, parentNode, TYPE_COMMIT_STATEMENT);
982        }
983
984        return super.parseCustomStatement(tokens, parentNode);
985    }
986
987    @Override
988    protected AstNode parseDropStatement( DdlTokenStream tokens,
989                                          AstNode parentNode ) throws ParsingException {
990        assert tokens != null;
991        assert parentNode != null;
992
993        if (tokens.matches(STMT_DROP_AGGREGATE)) {
994            return parseStatement(tokens, STMT_DROP_AGGREGATE, parentNode, TYPE_DROP_AGGREGATE_STATEMENT);
995        } else if (tokens.matches(STMT_DROP_CAST)) {
996            return parseStatement(tokens, STMT_DROP_CAST, parentNode, TYPE_DROP_CAST_STATEMENT);
997        } else if (tokens.matches(STMT_DROP_CONSTRAINT_TRIGGER)) {
998            return parseStatement(tokens, STMT_DROP_CONSTRAINT_TRIGGER, parentNode, TYPE_DROP_CONSTRAINT_TRIGGER_STATEMENT);
999        } else if (tokens.matches(STMT_DROP_CONVERSION)) {
1000            return parseSimpleDropStatement(tokens, STMT_DROP_CONVERSION, parentNode, TYPE_DROP_CONVERSION_STATEMENT);
1001        } else if (tokens.matches(STMT_DROP_DATABASE)) {
1002            return parseSimpleDropStatement(tokens, STMT_DROP_DATABASE, parentNode, TYPE_DROP_DATABASE_STATEMENT);
1003        } else if (tokens.matches(STMT_DROP_FOREIGN_DATA_WRAPPER)) {
1004            return parseSimpleDropStatement(tokens,
1005                                            STMT_DROP_FOREIGN_DATA_WRAPPER,
1006                                            parentNode,
1007                                            TYPE_DROP_FOREIGN_DATA_WRAPPER_STATEMENT);
1008        } else if (tokens.matches(STMT_DROP_FUNCTION)) {
1009            return parseStatement(tokens, STMT_DROP_FUNCTION, parentNode, TYPE_DROP_FUNCTION_STATEMENT);
1010        } else if (tokens.matches(STMT_DROP_GROUP)) {
1011            return parseSimpleDropStatement(tokens, STMT_DROP_GROUP, parentNode, TYPE_DROP_GROUP_STATEMENT);
1012        } else if (tokens.matches(STMT_DROP_INDEX)) {
1013            return parseSimpleDropStatement(tokens, STMT_DROP_INDEX, parentNode, TYPE_DROP_INDEX_STATEMENT);
1014        } else if (tokens.matches(STMT_DROP_LANGUAGE)) {
1015            return parseSimpleDropStatement(tokens, STMT_DROP_LANGUAGE, parentNode, TYPE_DROP_LANGUAGE_STATEMENT);
1016        } else if (tokens.matches(STMT_DROP_PROCEDURAL_LANGUAGE)) {
1017            return parseSimpleDropStatement(tokens, STMT_DROP_PROCEDURAL_LANGUAGE, parentNode, TYPE_DROP_LANGUAGE_STATEMENT);
1018        } else if (tokens.matches(STMT_DROP_OPERATOR)) {
1019            return parseStatement(tokens, STMT_DROP_OPERATOR, parentNode, TYPE_DROP_OPERATOR_STATEMENT);
1020        } else if (tokens.matches(STMT_DROP_OWNED_BY)) {
1021            return parseSimpleDropStatement(tokens, STMT_DROP_OWNED_BY, parentNode, TYPE_DROP_OWNED_BY_STATEMENT);
1022        } else if (tokens.matches(STMT_DROP_ROLE)) {
1023            return parseSimpleDropStatement(tokens, STMT_DROP_ROLE, parentNode, TYPE_DROP_ROLE_STATEMENT);
1024        } else if (tokens.matches(STMT_DROP_RULE)) {
1025            return parseStatement(tokens, STMT_DROP_RULE, parentNode, TYPE_DROP_RULE_STATEMENT);
1026        } else if (tokens.matches(STMT_DROP_SEQUENCE)) {
1027            return parseSimpleDropStatement(tokens, STMT_DROP_SEQUENCE, parentNode, TYPE_DROP_SEQUENCE_STATEMENT);
1028        } else if (tokens.matches(STMT_DROP_SERVER)) {
1029            return parseSimpleDropStatement(tokens, STMT_DROP_SERVER, parentNode, TYPE_DROP_SERVER_STATEMENT);
1030        } else if (tokens.matches(STMT_DROP_TABLESPACE)) {
1031            return parseSimpleDropStatement(tokens, STMT_DROP_TABLESPACE, parentNode, TYPE_DROP_TABLESPACE_STATEMENT);
1032        } else if (tokens.matches(STMT_DROP_TEXT_SEARCH_CONFIGURATION)) {
1033            return parseSimpleDropStatement(tokens,
1034                                            STMT_DROP_TEXT_SEARCH_CONFIGURATION,
1035                                            parentNode,
1036                                            TYPE_DROP_TEXT_SEARCH_STATEMENT);
1037        } else if (tokens.matches(STMT_DROP_TEXT_SEARCH_DICTIONARY)) {
1038            return parseSimpleDropStatement(tokens, STMT_DROP_TEXT_SEARCH_DICTIONARY, parentNode, TYPE_DROP_TEXT_SEARCH_STATEMENT);
1039        } else if (tokens.matches(STMT_DROP_TEXT_SEARCH_PARSER)) {
1040            return parseSimpleDropStatement(tokens, STMT_DROP_TEXT_SEARCH_PARSER, parentNode, TYPE_DROP_TEXT_SEARCH_STATEMENT);
1041        } else if (tokens.matches(STMT_DROP_TEXT_SEARCH_TEMPLATE)) {
1042            return parseSimpleDropStatement(tokens, STMT_DROP_TEXT_SEARCH_TEMPLATE, parentNode, TYPE_DROP_TEXT_SEARCH_STATEMENT);
1043        } else if (tokens.matches(STMT_DROP_TRIGGER)) {
1044            return parseStatement(tokens, STMT_DROP_TRIGGER, parentNode, TYPE_DROP_TRIGGER_STATEMENT);
1045        } else if (tokens.matches(STMT_DROP_TYPE)) {
1046            return parseSimpleDropStatement(tokens, STMT_DROP_TYPE, parentNode, TYPE_DROP_TYPE_STATEMENT);
1047        } else if (tokens.matches(STMT_DROP_USER_MAPPING)) {
1048            return parseStatement(tokens, STMT_DROP_USER_MAPPING, parentNode, TYPE_DROP_USER_MAPPING_STATEMENT);
1049        } else if (tokens.matches(STMT_DROP_USER)) {
1050            return parseSimpleDropStatement(tokens, STMT_DROP_USER, parentNode, TYPE_DROP_USER_STATEMENT);
1051        } else if (tokens.matches(StatementStartPhrases.STMT_DROP_DOMAIN)) {
1052            // -- DROP DOMAIN [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]
1053            return parseSimpleDropStatement(tokens,
1054                                            StatementStartPhrases.STMT_DROP_DOMAIN,
1055                                            parentNode,
1056                                            TYPE_DROP_DOMAIN_STATEMENT);
1057        } else if (tokens.matches(StatementStartPhrases.STMT_DROP_TABLE)) {
1058            // -- DROP TABLE [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]
1059            return parseSimpleDropStatement(tokens, StatementStartPhrases.STMT_DROP_TABLE, parentNode, TYPE_DROP_TABLE_STATEMENT);
1060        } else if (tokens.matches(StatementStartPhrases.STMT_DROP_VIEW)) {
1061            // -- DROP VIEW [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]
1062            return parseSimpleDropStatement(tokens, StatementStartPhrases.STMT_DROP_VIEW, parentNode, TYPE_DROP_VIEW_STATEMENT);
1063        } else if (tokens.matches(StatementStartPhrases.STMT_DROP_SCHEMA)) {
1064            // -- DROP SCHEMA [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ]
1065            return parseSimpleDropStatement(tokens,
1066                                            StatementStartPhrases.STMT_DROP_SCHEMA,
1067                                            parentNode,
1068                                            TYPE_DROP_SCHEMA_STATEMENT);
1069        }
1070
1071        return super.parseDropStatement(tokens, parentNode);
1072    }
1073
1074    private AstNode parseSimpleDropStatement( DdlTokenStream tokens,
1075                                              String[] startPhrase,
1076                                              AstNode parentNode,
1077                                              String stmtType ) throws ParsingException {
1078        assert tokens != null;
1079        assert startPhrase != null && startPhrase.length > 0;
1080        assert parentNode != null;
1081
1082        markStartOfStatement(tokens);
1083
1084        String behavior = null;
1085        tokens.consume(startPhrase);
1086        boolean usesIfExists = tokens.canConsume("IF", "EXISTS"); // SUPER CLASS does not include "IF EXISTS"
1087
1088        List<String> nameList = new ArrayList<String>();
1089        nameList.add(parseName(tokens));
1090        while (tokens.matches(COMMA)) {
1091            tokens.consume(COMMA);
1092            nameList.add(parseName(tokens));
1093        }
1094
1095        if (tokens.canConsume("CASCADE")) {
1096            behavior = "CASCADE";
1097        } else if (tokens.canConsume("RESTRICT")) {
1098            behavior = "RESTRICT";
1099        }
1100
1101        AstNode dropNode = nodeFactory().node(nameList.get(0), parentNode, stmtType);
1102
1103        if (behavior != null) {
1104            dropNode.setProperty(DROP_BEHAVIOR, behavior);
1105        }
1106
1107        markEndOfStatement(tokens, dropNode);
1108
1109        // If there is only ONE name, then the EXPRESSION property is the whole expression and we don't need to set the
1110        // ORIGINAL EXPRESSION
1111        String originalExpression = (String)dropNode.getProperty(DDL_EXPRESSION);
1112        Object startLineNumber = dropNode.getProperty(DDL_START_LINE_NUMBER);
1113        Object startColumnNumber = dropNode.getProperty(DDL_START_COLUMN_NUMBER);
1114        Object startCharIndex = dropNode.getProperty(DDL_START_CHAR_INDEX);
1115
1116        if (nameList.size() > 1) {
1117            for (int i = 1; i < nameList.size(); i++) {
1118                String nextName = nameList.get(i);
1119                AstNode newNode = createSingleDropNode(nextName,
1120                                                       startPhrase,
1121                                                       originalExpression,
1122                                                       usesIfExists,
1123                                                       behavior,
1124                                                       stmtType,
1125                                                       parentNode);
1126                newNode.setProperty(DDL_START_LINE_NUMBER, startLineNumber);
1127                newNode.setProperty(DDL_START_COLUMN_NUMBER, startColumnNumber);
1128                newNode.setProperty(DDL_START_CHAR_INDEX, startCharIndex);
1129            }
1130
1131            // Since there is more than ONE name, then the EXPRESSION property of the first node's expression needs to be reset to
1132            // the first name and the ORIGINAL EXPRESSION property set to the entire statement.
1133            StringBuilder sb = new StringBuilder().append(getStatementTypeName(startPhrase));
1134            if (usesIfExists) {
1135                sb.append(SPACE).append("IF EXISTS");
1136            }
1137            sb.append(SPACE).append(nameList.get(0));
1138            if (behavior != null) {
1139                sb.append(SPACE).append(behavior);
1140            }
1141            sb.append(SEMICOLON);
1142            dropNode.setProperty(DDL_EXPRESSION, sb.toString());
1143            dropNode.setProperty(DDL_LENGTH, sb.length());
1144            dropNode.setProperty(DDL_ORIGINAL_EXPRESSION, originalExpression);
1145        }
1146
1147        return dropNode;
1148    }
1149
1150    private AstNode createSingleDropNode( String name,
1151                                          String[] startPhrase,
1152                                          String originalExpression,
1153                                          boolean usesIfExists,
1154                                          String behavior,
1155                                          String nodeType,
1156                                          AstNode parentNode ) {
1157        assert name != null;
1158        assert startPhrase != null && startPhrase.length > 0;
1159        assert nodeType != null;
1160        assert parentNode != null;
1161
1162        AstNode newNode = nodeFactory().node(name, parentNode, nodeType);
1163        StringBuilder sb = new StringBuilder().append(getStatementTypeName(startPhrase));
1164        if (usesIfExists) {
1165            sb.append(SPACE).append("IF EXISTS");
1166        }
1167        sb.append(SPACE).append(name);
1168        if (behavior != null) {
1169            sb.append(SPACE).append(behavior);
1170        }
1171        sb.append(SEMICOLON);
1172
1173        newNode.setProperty(DDL_EXPRESSION, sb.toString());
1174        newNode.setProperty(DDL_LENGTH, sb.length());
1175        newNode.setProperty(DDL_ORIGINAL_EXPRESSION, originalExpression);
1176
1177        return newNode;
1178    }
1179
1180    /**
1181     * {@inheritDoc}
1182     * 
1183     * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseGrantStatement(org.modeshape.sequencer.ddl.DdlTokenStream,
1184     *      org.modeshape.sequencer.ddl.node.AstNode)
1185     */
1186    @Override
1187    protected AstNode parseGrantStatement( DdlTokenStream tokens,
1188                                           AstNode parentNode ) throws ParsingException {
1189        assert tokens != null;
1190        assert parentNode != null;
1191        assert tokens.matches(GRANT);
1192
1193        markStartOfStatement(tokens);
1194
1195        // NOTE: The first wack at this does not take into account the apparent potential repeating name elements after each type
1196        // declaration. Example:
1197        // GRANT { { SELECT | INSERT | UPDATE | DELETE | TRUNCATE | REFERENCES | TRIGGER }
1198        // [,...] | ALL [ PRIVILEGES ] }
1199        // ON [ TABLE ] tablename [, ...]
1200        // TO { [ GROUP ] rolename | PUBLIC } [, ...] [ WITH GRANT OPTION ]
1201        //
1202        // the "ON [ TABLE ] tablename [, ...]" seems to indicate that you can grant privileges on multiple tables at once, which
1203        // is
1204        // different thatn the SQL 92 standard. So this pass ONLY allows one and an parsing error will probably occur if multiple.
1205        //
1206        // Syntax for tables
1207        //
1208        // GRANT <privileges> ON <object name>
1209        // TO <grantee> [ { <comma> <grantee> }... ]
1210        // [ WITH GRANT OPTION ]
1211        //
1212        // <object name> ::=
1213        // [ TABLE ] <table name>
1214        // | SEQUENCE <sequence name>
1215        // | DATABASE <db name>
1216        // | FOREIGN DATA WRAPPER <fdw name>
1217        // | FOREIGN SERVER <server name>
1218        // | FUNCTION <function name>
1219        // | LANGUAGE <language name>
1220        // | SCHEMA <schema name>
1221        // | TABLESPACE <tablespace name>
1222
1223        //
1224        // Syntax for roles
1225        //
1226        // GRANT roleName [ {, roleName }* ] TO grantees
1227
1228        // privilege-types
1229        //
1230        // ALL PRIVILEGES | privilege-list
1231        //
1232        List<AstNode> grantNodes = new ArrayList<AstNode>();
1233        boolean allPrivileges = false;
1234
1235        List<AstNode> privileges = new ArrayList<AstNode>();
1236
1237        tokens.consume("GRANT");
1238
1239        if (tokens.canConsume("ALL", "PRIVILEGES")) {
1240            allPrivileges = true;
1241        } else {
1242            parseGrantPrivileges(tokens, privileges);
1243        }
1244
1245        if (allPrivileges || !privileges.isEmpty()) {
1246
1247            tokens.consume("ON");
1248
1249            if (tokens.canConsume("SCHEMA")) {
1250                grantNodes = parseMultipleGrantTargets(tokens, parentNode, TYPE_GRANT_ON_SCHEMA_STATEMENT);
1251            } else if (tokens.canConsume("SEQUENCE")) {
1252                grantNodes = parseMultipleGrantTargets(tokens, parentNode, TYPE_GRANT_ON_SEQUENCE_STATEMENT);
1253            } else if (tokens.canConsume("TABLESPACE")) {
1254                grantNodes = parseMultipleGrantTargets(tokens, parentNode, TYPE_GRANT_ON_TABLESPACE_STATEMENT);
1255            } else if (tokens.canConsume("DATABASE")) {
1256                grantNodes = parseMultipleGrantTargets(tokens, parentNode, TYPE_GRANT_ON_DATABASE_STATEMENT);
1257            } else if (tokens.canConsume("FUNCTION")) {
1258                grantNodes = parseFunctionAndParameters(tokens, parentNode);
1259            } else if (tokens.canConsume("LANGUAGE")) {
1260                grantNodes = parseMultipleGrantTargets(tokens, parentNode, TYPE_GRANT_ON_LANGUAGE_STATEMENT);
1261            } else if (tokens.canConsume("FOREIGN", "DATA", "WRAPPER")) {
1262                grantNodes = parseMultipleGrantTargets(tokens, parentNode, TYPE_GRANT_ON_FOREIGN_DATA_WRAPPER_STATEMENT);
1263            } else if (tokens.canConsume("FOREIGN", "SERVER")) {
1264                grantNodes = parseMultipleGrantTargets(tokens, parentNode, TYPE_GRANT_ON_FOREIGN_SERVER_STATEMENT);
1265            } else {
1266                tokens.canConsume(TABLE); // OPTIONAL
1267                String name = parseName(tokens);
1268                AstNode grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT);
1269                grantNodes.add(grantNode);
1270                while (tokens.canConsume(COMMA)) {
1271                    // Assume more names here
1272                    name = parseName(tokens);
1273                    grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT);
1274                    grantNodes.add(grantNode);
1275                }
1276            }
1277        } else {
1278            // Assume ROLES here
1279            // role [, ...]
1280            AstNode grantNode = nodeFactory().node("roles", parentNode, TYPE_GRANT_ROLES_STATEMENT);
1281            grantNodes.add(grantNode);
1282            do {
1283                String role = parseName(tokens);
1284                nodeFactory().node(role, grantNode, ROLE);
1285            } while (tokens.canConsume(COMMA));
1286        }
1287
1288        tokens.consume("TO");
1289        List<String> grantees = new ArrayList<String>();
1290
1291        do {
1292            String grantee = parseName(tokens);
1293            grantees.add(grantee);
1294        } while (tokens.canConsume(COMMA));
1295
1296        boolean withGrantOption = false;
1297        if (tokens.canConsume("WITH", "GRANT", "OPTION")) {
1298            withGrantOption = true;
1299        }
1300
1301        // Set all properties and children on Grant Nodes
1302        for (AstNode grantNode : grantNodes) {
1303            List<AstNode> copyOfPrivileges = copyOfPrivileges(privileges);
1304            // Attach privileges to grant node
1305            for (AstNode node : copyOfPrivileges) {
1306                node.setParent(grantNode);
1307            }
1308            if (allPrivileges) {
1309                grantNode.setProperty(ALL_PRIVILEGES, allPrivileges);
1310            }
1311            for (String grantee : grantees) {
1312                nodeFactory().node(grantee, grantNode, GRANTEE);
1313            }
1314
1315            if (withGrantOption) {
1316                AstNode optionNode = nodeFactory().node("withGrant", grantNode, TYPE_STATEMENT_OPTION);
1317                optionNode.setProperty(VALUE, "WITH GRANT OPTION");
1318            }
1319        }
1320        AstNode firstGrantNode = grantNodes.get(0);
1321
1322        markEndOfStatement(tokens, firstGrantNode);
1323
1324        // Update additional grant nodes with statement info
1325
1326        for (int i = 1; i < grantNodes.size(); i++) {
1327            AstNode grantNode = grantNodes.get(i);
1328            grantNode.setProperty(DDL_EXPRESSION, firstGrantNode.getProperty(DDL_EXPRESSION));
1329            grantNode.setProperty(DDL_LENGTH, firstGrantNode.getProperty(DDL_LENGTH));
1330            grantNode.setProperty(DDL_START_LINE_NUMBER, firstGrantNode.getProperty(DDL_START_LINE_NUMBER));
1331            grantNode.setProperty(DDL_START_CHAR_INDEX, firstGrantNode.getProperty(DDL_START_CHAR_INDEX));
1332            grantNode.setProperty(DDL_START_COLUMN_NUMBER, firstGrantNode.getProperty(DDL_START_COLUMN_NUMBER));
1333        }
1334
1335        return grantNodes.get(0);
1336    }
1337
1338    /**
1339     * {@inheritDoc}
1340     * 
1341     * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseGrantPrivileges(org.modeshape.sequencer.ddl.DdlTokenStream,
1342     *      java.util.List)
1343     */
1344    @Override
1345    protected void parseGrantPrivileges( DdlTokenStream tokens,
1346                                         List<AstNode> privileges ) throws ParsingException {
1347        // privilege-types
1348        //
1349        // ALL PRIVILEGES | privilege-list
1350        //
1351        // privilege-list
1352        //
1353        // table-privilege {, table-privilege }*
1354        //
1355        // table-privilege
1356        // SELECT [ <left paren> <privilege column list> <right paren> ]
1357        // | DELETE
1358        // | INSERT [ <left paren> <privilege column list> <right paren> ]
1359        // | UPDATE [ <left paren> <privilege column list> <right paren> ]
1360        // | REFERENCES [ <left paren> <privilege column list> <right paren> ]
1361        // | USAGE
1362        // | TRIGGER
1363        // | TRUNCATE
1364        // | CREATE
1365        // | CONNECT
1366        // | TEMPORARY
1367        // | TEMP
1368        // | EXECUTE
1369
1370        // POSTGRES has the following Privileges:
1371        // GRANT { { SELECT | INSERT | UPDATE | DELETE | TRUNCATE | REFERENCES | TRIGGER }
1372
1373        do {
1374            AstNode node = null;
1375
1376            if (tokens.canConsume(DELETE)) {
1377                node = nodeFactory().node("privilege");
1378                node.setProperty(TYPE, DELETE);
1379            } else if (tokens.canConsume(INSERT)) {
1380                node = nodeFactory().node("privilege");
1381                node.setProperty(TYPE, INSERT);
1382                parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
1383            } else if (tokens.canConsume("REFERENCES")) {
1384                node = nodeFactory().node("privilege");
1385                node.setProperty(TYPE, "REFERENCES");
1386                parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
1387            } else if (tokens.canConsume(SELECT)) {
1388                node = nodeFactory().node("privilege");
1389                node.setProperty(TYPE, SELECT);
1390                // Could have columns here
1391                // GRANT SELECT (col1), UPDATE (col1) ON mytable TO miriam_rw;
1392
1393                // Let's just swallow the column data.
1394
1395                consumeParenBoundedTokens(tokens, true);
1396            } else if (tokens.canConsume("USAGE")) {
1397                node = nodeFactory().node("privilege");
1398                node.setProperty(TYPE, "USAGE");
1399            } else if (tokens.canConsume(UPDATE)) {
1400                node = nodeFactory().node("privilege");
1401                node.setProperty(TYPE, UPDATE);
1402                parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
1403            } else if (tokens.canConsume("TRIGGER")) {
1404                node = nodeFactory().node("privilege");
1405                node.setProperty(TYPE, "TRIGGER");
1406            } else if (tokens.canConsume("TRUNCATE")) {
1407                node = nodeFactory().node("privilege");
1408                node.setProperty(TYPE, "TRUNCATE");
1409            } else if (tokens.canConsume("CREATE")) {
1410                node = nodeFactory().node("privilege");
1411                node.setProperty(TYPE, "CREATE");
1412            } else if (tokens.canConsume("CONNECT")) {
1413                node = nodeFactory().node("privilege");
1414                node.setProperty(TYPE, "CONNECT");
1415            } else if (tokens.canConsume("TEMPORARY")) {
1416                node = nodeFactory().node("privilege");
1417                node.setProperty(TYPE, "TEMPORARY");
1418            } else if (tokens.canConsume("TEMP")) {
1419                node = nodeFactory().node("privilege");
1420                node.setProperty(TYPE, "TEMP");
1421            } else if (tokens.canConsume("EXECUTE")) {
1422                node = nodeFactory().node("privilege");
1423                node.setProperty(TYPE, "EXECUTE");
1424            }
1425
1426            if (node == null) {
1427                break;
1428            }
1429            nodeFactory().setType(node, GRANT_PRIVILEGE);
1430            privileges.add(node);
1431
1432        } while (tokens.canConsume(COMMA));
1433
1434    }
1435
1436    private List<AstNode> parseMultipleGrantTargets( DdlTokenStream tokens,
1437                                                     AstNode parentNode,
1438                                                     String nodeType ) throws ParsingException {
1439        List<AstNode> grantNodes = new ArrayList<AstNode>();
1440        String name = parseName(tokens);
1441        AstNode grantNode = nodeFactory().node(name, parentNode, nodeType);
1442        grantNodes.add(grantNode);
1443        while (tokens.canConsume(COMMA)) {
1444            // Assume more names here
1445            name = parseName(tokens);
1446            grantNode = nodeFactory().node(name, parentNode, nodeType);
1447            grantNodes.add(grantNode);
1448        }
1449
1450        return grantNodes;
1451    }
1452
1453    private List<AstNode> copyOfPrivileges( List<AstNode> privileges ) {
1454        List<AstNode> copyOfPrivileges = new ArrayList<AstNode>();
1455        for (AstNode node : privileges) {
1456            copyOfPrivileges.add(node.clone());
1457        }
1458
1459        return copyOfPrivileges;
1460    }
1461
1462    private List<AstNode> parseFunctionAndParameters( DdlTokenStream tokens,
1463                                                      AstNode parentNode ) throws ParsingException {
1464        boolean isFirstFunction = true;
1465        List<AstNode> grantNodes = new ArrayList<AstNode>();
1466
1467        // FUNCTION funcname ( [ [ argmode ] [ argname ] argtype [, ...] ] ) [, ...]
1468
1469        // argmode = [ IN, OUT, INOUT, or VARIADIC ]
1470
1471        // p(a int, b TEXT), q(integer, double)
1472
1473        // [postgresddl:grantOnFunctionStatement] > ddl:grantStatement, postgresddl:functionOperand mixin
1474        // + * (postgresddl:functionParameter) = postgresddl:functionParameter multiple
1475
1476        do {
1477            String name = parseName(tokens);
1478            AstNode grantFunctionNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_FUNCTION_STATEMENT);
1479
1480            grantNodes.add(grantFunctionNode);
1481
1482            // Parse Parameter Data
1483            if (tokens.matches(L_PAREN)) {
1484                tokens.consume(L_PAREN);
1485
1486                if (!tokens.canConsume(R_PAREN)) {
1487                    // check for datatype
1488                    do {
1489                        String mode = null;
1490
1491                        if (tokens.matchesAnyOf("IN", "OUT", "INOUT", "VARIADIC")) {
1492                            mode = tokens.consume();
1493                        }
1494                        AstNode paramNode = null;
1495
1496                        DataType dType = getDatatypeParser().parse(tokens);
1497                        if (dType != null) {
1498                            // NO Parameter Name, only DataType
1499                            paramNode = nodeFactory().node("parameter", grantFunctionNode, TYPE_FUNCTION_PARAMETER);
1500                            if (mode != null) {
1501                                paramNode.setProperty(FUNCTION_PARAMETER_MODE, mode);
1502                            }
1503                            getDatatypeParser().setPropertiesOnNode(paramNode, dType);
1504                        } else {
1505                            String paramName = parseName(tokens);
1506                            dType = getDatatypeParser().parse(tokens);
1507                            assert paramName != null;
1508
1509                            paramNode = nodeFactory().node(paramName, grantFunctionNode, TYPE_FUNCTION_PARAMETER);
1510                            if (mode != null) {
1511                                paramNode.setProperty(FUNCTION_PARAMETER_MODE, mode);
1512                            }
1513                            if (dType != null) {
1514                                getDatatypeParser().setPropertiesOnNode(paramNode, dType);
1515                            }
1516                        }
1517                    } while (tokens.canConsume(COMMA));
1518
1519                    tokens.consume(R_PAREN);
1520                }
1521            }
1522
1523            // RESET first parameter flag
1524            if (isFirstFunction) {
1525                isFirstFunction = false;
1526            }
1527        } while (tokens.canConsume(COMMA));
1528
1529        return grantNodes;
1530    }
1531
1532    @Override
1533    protected AstNode parseSetStatement( DdlTokenStream tokens,
1534                                         AstNode parentNode ) throws ParsingException {
1535        assert tokens != null;
1536        assert parentNode != null;
1537
1538        return super.parseSetStatement(tokens, parentNode);
1539    }
1540
1541    private AstNode parseCommentStatement( DdlTokenStream tokens,
1542                                           AstNode parentNode ) throws ParsingException {
1543        assert tokens != null;
1544        assert parentNode != null;
1545
1546        markStartOfStatement(tokens);
1547
1548        /*
1549        --  TABLE object_name |
1550        --  COLUMN table_name.column_name |
1551        --  AGGREGATE agg_name (agg_type [, ...] ) |
1552        --  CAST (sourcetype AS targettype) |
1553        --  CONSTRAINT constraint_name ON table_name |
1554        --  CONVERSION object_name |
1555        --  DATABASE object_name |
1556        --  DOMAIN object_name |
1557        --  FUNCTION func_name ( [ [ argmode ] [ argname ] argtype [, ...] ] ) |
1558        --  INDEX object_name |
1559        --  LARGE OBJECT large_object_oid |
1560        --  OPERATOR op (leftoperand_type, rightoperand_type) |
1561        --  OPERATOR CLASS object_name USING index_method |
1562        --  OPERATOR FAMILY object_name USING index_method |
1563        --  [ PROCEDURAL ] LANGUAGE object_name |
1564        --  ROLE object_name |
1565        --  RULE rule_name ON table_name |
1566        --  SCHEMA object_name |
1567        --  SEQUENCE object_name |
1568        --  TABLESPACE object_name |
1569        --  TEXT SEARCH CONFIGURATION object_name |
1570        --  TEXT SEARCH DICTIONARY object_name |
1571        --  TEXT SEARCH PARSER object_name |
1572        --  TEXT SEARCH TEMPLATE object_name |
1573        --  TRIGGER trigger_name ON table_name |
1574        --  TYPE object_name |
1575        --  VIEW object_name
1576        --} IS ’text’
1577         */
1578        tokens.consume("COMMENT", "ON"); // consumes 'COMMENT' 'ON'
1579
1580        String objectType = null;
1581        String objectName = null;
1582
1583        if (tokens.matches(TABLE)) {
1584            objectType = tokens.consume();
1585            objectName = parseName(tokens);
1586        } else if (tokens.matches("COLUMN")) {
1587            objectType = tokens.consume();
1588            objectName = parseName(tokens);
1589        } else if (tokens.matches("AGGREGATE")) {
1590            objectType = tokens.consume();
1591            objectName = parseName(tokens);
1592            // (agg_type [, ...] )
1593            consumeParenBoundedTokens(tokens, true);
1594        } else if (tokens.matches("CAST")) {
1595            objectType = tokens.consume();
1596            consumeParenBoundedTokens(tokens, true);
1597        } else if (tokens.matches("CONSTRAINT")) {
1598            objectType = tokens.consume();
1599            objectName = parseName(tokens);
1600            tokens.consume("ON");
1601            tokens.consume(); // table_name
1602        } else if (tokens.matches("CONVERSION")) {
1603            objectType = tokens.consume();
1604            objectName = parseName(tokens);
1605        } else if (tokens.matches("DATABASE")) {
1606            objectType = tokens.consume();
1607            objectName = parseName(tokens);
1608        } else if (tokens.matches("DOMAIN")) {
1609            objectType = tokens.consume();
1610            objectName = parseName(tokens);
1611        } else if (tokens.matches("FUNCTION")) {
1612            objectType = tokens.consume();
1613            objectName = parseName(tokens);
1614            consumeParenBoundedTokens(tokens, true);
1615        } else if (tokens.matches("INDEX")) {
1616            objectType = tokens.consume();
1617            objectName = parseName(tokens);
1618        } else if (tokens.matches("LARGE", "OBJECT")) {
1619            tokens.consume("LARGE", "OBJECT");
1620            objectType = "LARGE OBJECT";
1621            objectName = parseName(tokens);
1622        } else if (tokens.matches("OPERATOR", "FAMILY")) {
1623            tokens.consume("OPERATOR", "FAMILY");
1624            objectType = "OPERATOR FAMILY";
1625            objectName = parseName(tokens);
1626            tokens.consume("USING");
1627            tokens.consume(); // index_method
1628        } else if (tokens.matches("OPERATOR", "CLASS")) {
1629            tokens.consume("OPERATOR", "CLASS");
1630            objectType = "OPERATOR CLASS";
1631            objectName = parseName(tokens);
1632            tokens.consume("USING");
1633            tokens.consume(); // index_method
1634        } else if (tokens.matches("OPERATOR")) {
1635            objectType = tokens.consume();
1636            objectName = parseName(tokens);
1637            consumeParenBoundedTokens(tokens, true);
1638        } else if (tokens.matches("PROCEDURAL", "LANGUAGE")) {
1639            tokens.consume("PROCEDURAL", "LANGUAGE");
1640            objectType = "PROCEDURAL LANGUAGE";
1641            objectName = parseName(tokens);
1642        } else if (tokens.matches("LANGUAGE")) {
1643            objectType = tokens.consume();
1644            objectName = parseName(tokens);
1645        } else if (tokens.matches("ROLE")) {
1646            objectType = tokens.consume();
1647            objectName = parseName(tokens);
1648        } else if (tokens.matches("RULE")) {
1649            objectType = tokens.consume();
1650            objectName = parseName(tokens);
1651            tokens.consume("ON");
1652            tokens.consume(); // table_name
1653        } else if (tokens.matches("SCHEMA")) {
1654            objectType = tokens.consume();
1655            objectName = parseName(tokens);
1656        } else if (tokens.matches("SEQUENCE")) {
1657            objectType = tokens.consume();
1658            objectName = parseName(tokens);
1659        } else if (tokens.matches("TABLESPACE")) {
1660            objectType = tokens.consume();
1661            objectName = parseName(tokens);
1662        } else if (tokens.matches("TEXT", "SEARCH", "CONFIGURATION")) {
1663            tokens.consume("TEXT", "SEARCH", "CONFIGURATION");
1664            objectType = "TEXT SEARCH CONFIGURATION";
1665            objectName = parseName(tokens);
1666        } else if (tokens.matches("TEXT", "SEARCH", "DICTIONARY")) {
1667            tokens.consume("TEXT", "SEARCH", "DICTIONARY");
1668            objectType = "TEXT SEARCH DICTIONARY";
1669            objectName = parseName(tokens);
1670        } else if (tokens.matches("TEXT", "SEARCH", "PARSER")) {
1671            tokens.consume("TEXT", "SEARCH", "PARSER");
1672            objectType = "TEXT SEARCH PARSER";
1673            objectName = parseName(tokens);
1674        } else if (tokens.matches("TEXT", "SEARCH", "TEMPLATE")) {
1675            tokens.consume("TEXT", "SEARCH", "TEMPLATE");
1676            objectType = "TEXT SEARCH TEMPLATE";
1677            objectName = parseName(tokens);
1678        } else if (tokens.matches("TRIGGER")) {
1679            objectType = tokens.consume();
1680            objectName = parseName(tokens);
1681            tokens.consume("ON");
1682            tokens.consume(); // table_name
1683        } else if (tokens.matches("TYPE")) {
1684            objectType = tokens.consume();
1685            objectName = parseName(tokens);
1686        } else if (tokens.matches("VIEW")) {
1687            objectType = tokens.consume();
1688            objectName = parseName(tokens);
1689        }
1690
1691        // System.out.println("  >> FOUND [COMMENT ON] STATEMENT >>  TABLE Name = " + objName);
1692        String commentString = null;
1693
1694        tokens.consume("IS");
1695        if (tokens.matches("NULL")) {
1696            tokens.consume("NULL");
1697            commentString = "NULL";
1698        } else {
1699            commentString = parseUntilTerminator(tokens).trim();
1700        }
1701
1702        AstNode commentNode = null;
1703
1704        if (objectName != null) {
1705            commentNode = nodeFactory().node(objectName, parentNode, TYPE_COMMENT_ON_STATEMENT);
1706            commentNode.setProperty(PostgresDdlLexicon.TARGET_OBJECT_NAME, objectName);
1707        } else {
1708            commentNode = nodeFactory().node("commentOn", parentNode, TYPE_COMMENT_ON_STATEMENT);
1709        }
1710        commentNode.setProperty(PostgresDdlLexicon.COMMENT, commentString);
1711        commentNode.setProperty(PostgresDdlLexicon.TARGET_OBJECT_TYPE, objectType);
1712
1713        markEndOfStatement(tokens, commentNode);
1714
1715        return commentNode;
1716    }
1717
1718    /**
1719     * Utility method designed to parse columns within an ALTER TABLE ADD statement.
1720     * 
1721     * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null
1722     * @param tableNode the parent {@link AstNode} node; may not be null
1723     * @param isAlterTable
1724     * @throws ParsingException
1725     */
1726    protected void parseColumns( DdlTokenStream tokens,
1727                                 AstNode tableNode,
1728                                 boolean isAlterTable ) throws ParsingException {
1729        assert tokens != null;
1730        assert tableNode != null;
1731
1732        String tableElementString = getTableElementsString(tokens, false);
1733
1734        DdlTokenStream localTokens = new DdlTokenStream(tableElementString, DdlTokenStream.ddlTokenizer(false), false);
1735
1736        localTokens.start();
1737
1738        StringBuilder unusedTokensSB = new StringBuilder();
1739
1740        do {
1741            if (isColumnDefinitionStart(localTokens)) {
1742                parseColumnDefinition(localTokens, tableNode, isAlterTable);
1743            } else {
1744                // THIS IS AN ERROR. NOTHING FOUND.
1745                // NEED TO absorb tokens
1746                unusedTokensSB.append(SPACE).append(localTokens.consume());
1747            }
1748        } while (localTokens.canConsume(COMMA));
1749
1750        if (unusedTokensSB.length() > 0) {
1751            String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getName());
1752            DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, getCurrentMarkedPosition(), msg);
1753            problem.setUnusedSource(unusedTokensSB.toString());
1754            addProblem(problem, tableNode);
1755        }
1756    }
1757
1758    private AstNode parseCreateRuleStatement( DdlTokenStream tokens,
1759                                              AstNode parentNode ) throws ParsingException {
1760        assert tokens != null;
1761        assert parentNode != null;
1762
1763        // CREATE [ OR REPLACE ] RULE name AS ON event
1764        // TO table [ WHERE condition ]
1765        // DO [ ALSO | INSTEAD ] { NOTHING | command | ( command ; command ... ) }
1766        //
1767        // EXAMPLE: CREATE RULE notify_me AS ON UPDATE TO mytable DO ALSO NOTIFY mytable;
1768        // parseStatement(tokens, STMT_CREATE_RULE, parentNode, TYPE_CREATE_RULE_STATEMENT);
1769
1770        markStartOfStatement(tokens);
1771
1772        boolean isReplace = tokens.canConsume(STMT_CREATE_OR_REPLACE_RULE);
1773        tokens.canConsume(STMT_CREATE_RULE);
1774
1775        String name = parseName(tokens);
1776
1777        AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_RULE_STATEMENT);
1778        if (isReplace) {
1779            // TODO: SET isReplace = TRUE to node (possibly a cnd mixin of "replaceable"
1780        }
1781        parseUntilTerminatorIgnoreEmbeddedStatements(tokens);
1782
1783        markEndOfStatement(tokens, node);
1784
1785        return node;
1786    }
1787
1788    private AstNode parseCreateFunctionStatement( DdlTokenStream tokens,
1789                                                  AstNode parentNode ) throws ParsingException {
1790        assert tokens != null;
1791        assert parentNode != null;
1792
1793        markStartOfStatement(tokens);
1794
1795        boolean isReplace = tokens.canConsume(STMT_CREATE_OR_REPLACE_FUNCTION);
1796
1797        tokens.canConsume(STMT_CREATE_FUNCTION);
1798
1799        String name = parseName(tokens);
1800
1801        AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_FUNCTION_STATEMENT);
1802
1803        if (isReplace) {
1804            // TODO: SET isReplace = TRUE to node (possibly a cnd mixin of "replaceable"
1805        }
1806
1807        parseUntilTerminator(tokens);
1808
1809        markEndOfStatement(tokens, node);
1810
1811        return node;
1812    }
1813
1814    /**
1815     * {@inheritDoc}
1816     * 
1817     * @see org.modeshape.sequencer.ddl.StandardDdlParser#getValidSchemaChildTypes()
1818     */
1819    @Override
1820    protected String[] getValidSchemaChildTypes() {
1821        return PostgresStatementStartPhrases.VALID_SCHEMA_CHILD_STMTS;
1822    }
1823
1824    /**
1825     * {@inheritDoc}
1826     * 
1827     * @see org.modeshape.sequencer.ddl.StandardDdlParser#getDataTypeStartWords()
1828     */
1829    @Override
1830    protected List<String> getCustomDataTypeStartWords() {
1831        return PostgresDataTypes.CUSTOM_DATATYPE_START_WORDS;
1832    }
1833
1834    class PostgresDataTypeParser extends DataTypeParser {
1835
1836        /**
1837         * {@inheritDoc}
1838         * 
1839         * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#isCustomDataType(org.modeshape.sequencer.ddl.DdlTokenStream)
1840         */
1841        @Override
1842        protected boolean isCustomDataType( DdlTokenStream tokens ) throws ParsingException {
1843            // Loop through the registered statement start string arrays and look for exact matches.
1844
1845            for (String[] stmts : postgresDataTypeStrings) {
1846                if (tokens.matches(stmts)) return true;
1847            }
1848            return super.isCustomDataType(tokens);
1849        }
1850
1851        /**
1852         * {@inheritDoc}
1853         * 
1854         * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseApproxNumericType(org.modeshape.sequencer.ddl.DdlTokenStream)
1855         */
1856        @Override
1857        protected DataType parseApproxNumericType( DdlTokenStream tokens ) throws ParsingException {
1858            DataType result = null;
1859            String typeName = null;
1860
1861            if (tokens.matches(PostgresDataTypes.DTYPE_FLOAT4) || tokens.matches(PostgresDataTypes.DTYPE_FLOAT8)) {
1862                typeName = tokens.consume();
1863                result = new DataType(typeName);
1864                int precision = 0;
1865                if (tokens.matches('(')) {
1866                    precision = (int)parseBracketedLong(tokens, result);
1867                }
1868                result.setPrecision(precision);
1869            }
1870
1871            if (result == null) {
1872                result = super.parseApproxNumericType(tokens);
1873            }
1874
1875            return result;
1876        }
1877
1878        /**
1879         * {@inheritDoc}
1880         * 
1881         * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseBitStringType(org.modeshape.sequencer.ddl.DdlTokenStream)
1882         */
1883        @Override
1884        protected DataType parseBitStringType( DdlTokenStream tokens ) throws ParsingException {
1885            return super.parseBitStringType(tokens);
1886        }
1887
1888        /**
1889         * {@inheritDoc}
1890         * 
1891         * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseCharStringType(org.modeshape.sequencer.ddl.DdlTokenStream)
1892         */
1893        @Override
1894        protected DataType parseCharStringType( DdlTokenStream tokens ) throws ParsingException {
1895            DataType result = super.parseCharStringType(tokens);
1896
1897            tokens.canConsume("FOR", "BIT", "DATA");
1898
1899            return result;
1900        }
1901
1902        /**
1903         * {@inheritDoc}
1904         * 
1905         * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseCustomType(org.modeshape.sequencer.ddl.DdlTokenStream)
1906         */
1907        @Override
1908        protected DataType parseCustomType( DdlTokenStream tokens ) throws ParsingException {
1909            DataType result = null;
1910            String typeName = null;
1911
1912            if (tokens.matches(PostgresDataTypes.DTYPE_BIGSERIAL) || tokens.matches(PostgresDataTypes.DTYPE_SERIAL)
1913                || tokens.matches(PostgresDataTypes.DTYPE_SERIAL4) || tokens.matches(PostgresDataTypes.DTYPE_SERIAL8)
1914                || tokens.matches(PostgresDataTypes.DTYPE_INT2) || tokens.matches(PostgresDataTypes.DTYPE_INT4)
1915                || tokens.matches(PostgresDataTypes.DTYPE_INT8) || tokens.matches(PostgresDataTypes.DTYPE_BOX)
1916                || tokens.matches(PostgresDataTypes.DTYPE_BOOL) || tokens.matches(PostgresDataTypes.DTYPE_BOOLEAN)
1917                || tokens.matches(PostgresDataTypes.DTYPE_BYTEA) || tokens.matches(PostgresDataTypes.DTYPE_CIDR)
1918                || tokens.matches(PostgresDataTypes.DTYPE_CIRCLE) || tokens.matches(PostgresDataTypes.DTYPE_INET)
1919                || tokens.matches(PostgresDataTypes.DTYPE_LINE) || tokens.matches(PostgresDataTypes.DTYPE_LSEG)
1920                || tokens.matches(PostgresDataTypes.DTYPE_MACADDR) || tokens.matches(PostgresDataTypes.DTYPE_MONEY)
1921                || tokens.matches(PostgresDataTypes.DTYPE_PATH) || tokens.matches(PostgresDataTypes.DTYPE_POINT)
1922                || tokens.matches(PostgresDataTypes.DTYPE_POLYGON) || tokens.matches(PostgresDataTypes.DTYPE_TEXT)
1923                || tokens.matches(PostgresDataTypes.DTYPE_TSQUERY) || tokens.matches(PostgresDataTypes.DTYPE_TSVECTOR)
1924                || tokens.matches(PostgresDataTypes.DTYPE_TXID_SNAPSHOT) || tokens.matches(PostgresDataTypes.DTYPE_UUID)
1925                || tokens.matches(PostgresDataTypes.DTYPE_VARBIT) || tokens.matches(PostgresDataTypes.DTYPE_XML)) {
1926                typeName = tokens.consume();
1927                result = new DataType(typeName);
1928            }
1929
1930            if (result == null) {
1931                super.parseCustomType(tokens);
1932            }
1933            return result;
1934        }
1935
1936        /**
1937         * {@inheritDoc}
1938         * 
1939         * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseDateTimeType(org.modeshape.sequencer.ddl.DdlTokenStream)
1940         */
1941        @Override
1942        protected DataType parseDateTimeType( DdlTokenStream tokens ) throws ParsingException {
1943            DataType dtype = super.parseDateTimeType(tokens);
1944
1945            tokens.canConsume("WITHOUT", "TIME", "ZONE");
1946
1947            return dtype;
1948        }
1949
1950        /**
1951         * {@inheritDoc}
1952         * 
1953         * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseExactNumericType(org.modeshape.sequencer.ddl.DdlTokenStream)
1954         */
1955        @Override
1956        protected DataType parseExactNumericType( DdlTokenStream tokens ) throws ParsingException {
1957            return super.parseExactNumericType(tokens);
1958        }
1959
1960    }
1961
1962}