001/*
002 * ModeShape (http://www.modeshape.org)
003 *
004 * Licensed under the Apache License, Version 2.0 (the "License");
005 * you may not use this file except in compliance with the License.
006 * You may obtain a copy of the License at
007 *
008 *       http://www.apache.org/licenses/LICENSE-2.0
009 *
010 * Unless required by applicable law or agreed to in writing, software
011 * distributed under the License is distributed on an "AS IS" BASIS,
012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
013 * See the License for the specific language governing permissions and
014 * limitations under the License.
015 */
016/**
017 * This class provides basic parsing of SQL-92 based DDL files.  The initial implementation does NOT handle generic SQL query
018 * statements, but rather database schema manipulation (i.e. CREATE, DROP, ALTER, etc...)
019 * 
020 */
021package org.modeshape.sequencer.ddl;
022
023import static org.modeshape.sequencer.ddl.StandardDdlLexicon.ALL_PRIVILEGES;
024import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CHECK_SEARCH_CONDITION;
025import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLLATION_CHARACTER_SET_NAME;
026import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLLATION_NAME;
027import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLLATION_SOURCE;
028import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CONSTRAINT_TYPE;
029import static org.modeshape.sequencer.ddl.StandardDdlLexicon.CREATE_VIEW_QUERY_EXPRESSION;
030import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_EXPRESSION;
031import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_LENGTH;
032import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_PROBLEM;
033import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_CHAR_INDEX;
034import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_COLUMN_NUMBER;
035import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_LINE_NUMBER;
036import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_CURRENT_USER;
037import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_DATETIME;
038import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_LITERAL;
039import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_NULL;
040import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_SESSION_USER;
041import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_SYSTEM_USER;
042import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_ID_USER;
043import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_OPTION;
044import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_PRECISION;
045import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DEFAULT_VALUE;
046import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DROP_BEHAVIOR;
047import static org.modeshape.sequencer.ddl.StandardDdlLexicon.EXISTING_NAME;
048import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANTEE;
049import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANT_PRIVILEGE;
050import static org.modeshape.sequencer.ddl.StandardDdlLexicon.MESSAGE;
051import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NULLABLE;
052import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PAD_ATTRIBUTE;
053import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PAD_ATTRIBUTE_NO_PAD;
054import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PAD_ATTRIBUTE_PAD;
055import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROBLEM_LEVEL;
056import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROPERTY_VALUE;
057import static org.modeshape.sequencer.ddl.StandardDdlLexicon.SOURCE_CHARACTER_SET_NAME;
058import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TARGET_CHARACTER_SET_NAME;
059import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TEMPORARY;
060import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE;
061import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ADD_TABLE_CONSTRAINT_DEFINITION;
062import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_COLUMN_DEFINITION;
063import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_DOMAIN_STATEMENT;
064import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_TABLE_STATEMENT;
065import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_DEFINITION;
066import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_REFERENCE;
067import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CONSTRAINT_ATTRIBUTE;
068import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_ASSERTION_STATEMENT;
069import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_CHARACTER_SET_STATEMENT;
070import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_COLLATION_STATEMENT;
071import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_DOMAIN_STATEMENT;
072import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_SCHEMA_STATEMENT;
073import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TABLE_STATEMENT;
074import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TRANSLATION_STATEMENT;
075import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_VIEW_STATEMENT;
076import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_ASSERTION_STATEMENT;
077import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_CHARACTER_SET_STATEMENT;
078import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLLATION_STATEMENT;
079import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLUMN_DEFINITION;
080import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_DOMAIN_STATEMENT;
081import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_SCHEMA_STATEMENT;
082import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_CONSTRAINT_DEFINITION;
083import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_STATEMENT;
084import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TRANSLATION_STATEMENT;
085import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_VIEW_STATEMENT;
086import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_FK_COLUMN_REFERENCE;
087import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_CHARACTER_SET_STATEMENT;
088import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_COLLATION_STATEMENT;
089import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_DOMAIN_STATEMENT;
090import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TABLE_STATEMENT;
091import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TRANSLATION_STATEMENT;
092import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_INSERT_STATEMENT;
093import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_MISSING_TERMINATOR;
094import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_PROBLEM;
095import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT;
096import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_COLLATION_STATEMENT;
097import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_DOMAIN_STATEMENT;
098import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TABLE_STATEMENT;
099import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_REVOKE_ON_TRANSLATION_STATEMENT;
100import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_SET_STATEMENT;
101import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT;
102import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT_OPTION;
103import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_CONSTRAINT;
104import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_TABLE_REFERENCE;
105import static org.modeshape.sequencer.ddl.StandardDdlLexicon.VALUE;
106import static org.modeshape.sequencer.ddl.StandardDdlLexicon.WITH_GRANT_OPTION;
107import java.math.BigInteger;
108import java.util.ArrayList;
109import java.util.Arrays;
110import java.util.Collections;
111import java.util.LinkedList;
112import java.util.List;
113import org.modeshape.common.annotation.NotThreadSafe;
114import org.modeshape.common.logging.Logger;
115import org.modeshape.common.text.ParsingException;
116import org.modeshape.common.text.Position;
117import org.modeshape.common.text.TokenStream;
118import org.modeshape.common.util.CheckArg;
119import org.modeshape.sequencer.ddl.DdlTokenStream.DdlTokenizer;
120import org.modeshape.sequencer.ddl.datatype.DataType;
121import org.modeshape.sequencer.ddl.datatype.DataTypeParser;
122import org.modeshape.sequencer.ddl.node.AstNode;
123import org.modeshape.sequencer.ddl.node.AstNodeFactory;
124
125/**
126 * Standard SQL 92 DDL file content parser.
127 */
128@NotThreadSafe
129public class StandardDdlParser implements DdlParser, DdlConstants, DdlConstants.StatementStartPhrases {
130
131    /**
132     * The Standard DDL parser identifier.
133     */
134    public static final String ID = "SQL92";
135
136    private static final Logger LOGGER = Logger.getLogger(StandardDdlParser.class);
137
138    private boolean testMode = false;
139    private final List<DdlParserProblem> problems;
140    private final AstNodeFactory nodeFactory;
141    private AstNode rootNode;
142    private List<String> allDataTypeStartWords = null;
143    private DataTypeParser datatypeParser = null;
144    private String terminator = DEFAULT_TERMINATOR;
145    private boolean useTerminator = false;
146    private Position currentMarkedPosition;
147
148    public StandardDdlParser() {
149        super();
150        setDoUseTerminator(true);
151        setDatatypeParser(new DataTypeParser());
152        nodeFactory = new AstNodeFactory();
153        problems = new ArrayList<DdlParserProblem>();
154    }
155
156    /**
157     * Returns the data type parser instance.
158     * 
159     * @return the {@link DataTypeParser}
160     */
161    public DataTypeParser getDatatypeParser() {
162        return datatypeParser;
163    }
164
165    /**
166     * @param datatypeParser
167     */
168    public void setDatatypeParser( DataTypeParser datatypeParser ) {
169        this.datatypeParser = datatypeParser;
170    }
171
172    /**
173     * Method to access the node utility class.
174     * 
175     * @return the instance of the {@link AstNodeFactory} node utility class
176     */
177    public AstNodeFactory nodeFactory() {
178        return this.nodeFactory;
179    }
180
181    /**
182     * @return rootNode
183     */
184    public AstNode getRootNode() {
185        return rootNode;
186    }
187
188    /**
189     * @param rootNode Sets rootNode to the specified value.
190     */
191    public void setRootNode( AstNode rootNode ) {
192        this.rootNode = rootNode;
193    }
194
195    /**
196     * {@inheritDoc}
197     * 
198     * @see org.modeshape.sequencer.ddl.DdlParser#score(java.lang.String, java.lang.String,
199     *      org.modeshape.sequencer.ddl.DdlParserScorer)
200     */
201    @Override
202    public Object score( String ddl,
203                         String fileName,
204                         DdlParserScorer scorer ) throws ParsingException {
205        CheckArg.isNotNull(ddl, "ddl");
206        CheckArg.isNotNull(scorer, "scorer");
207
208        if (fileName != null) {
209            // Score the filename using the identifier only ...
210            scorer.scoreText(fileName, 2, getIdentifyingKeywords());
211        }
212
213        // Create the state of this parser ...
214        problems.clear();
215        boolean includeComments = true;
216        DdlTokenStream tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
217        initializeTokenStream(tokens);
218        tokens.start();
219
220        testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
221
222        // Consume the first block of comments ...
223        while (tokens.matches(DdlTokenizer.COMMENT)) {
224            // Consume the comment ...
225            String comment = tokens.consume();
226            scorer.scoreText(comment, 2, getIdentifyingKeywords());
227        }
228
229        // Compute the score for the rest of this content ...
230        computeScore(tokens, scorer);
231
232        // Return the tokens so parse(...) won't have to re-tokenize ...
233        return tokens;
234    }
235
236    protected void computeScore( DdlTokenStream tokens,
237                                 DdlParserScorer scorer ) {
238        while (tokens.hasNext()) {
239            int score = tokens.computeNextStatementStartKeywordCount();
240
241            if ((score == 0) && tokens.isNextKeyWord()) {
242                score = 1;
243            }
244
245            if (score != 0) {
246                scorer.scoreStatements(score);
247            }
248
249            tokens.consume();
250        }
251    }
252
253    public String[] getIdentifyingKeywords() {
254        return new String[] {getId()};
255    }
256
257    /**
258     * {@inheritDoc}
259     * 
260     * @see org.modeshape.sequencer.ddl.DdlParser#parse(java.lang.String, org.modeshape.sequencer.ddl.node.AstNode,
261     *      java.lang.Object)
262     */
263    @Override
264    public void parse( String ddl,
265                       AstNode rootNode,
266                       Object scoreReturnObject ) throws ParsingException {
267        CheckArg.isNotNull(ddl, "ddl");
268        CheckArg.isNotNull(rootNode, "rootNode");
269        problems.clear();
270        setRootNode(rootNode);
271
272        DdlTokenStream tokens = null;
273        if (scoreReturnObject instanceof DdlTokenStream) {
274            tokens = (DdlTokenStream)scoreReturnObject;
275            tokens.rewind();
276        } else {
277            // Need to create the token stream ...
278            boolean includeComments = false;
279            tokens = new DdlTokenStream(ddl, DdlTokenStream.ddlTokenizer(includeComments), false);
280            initializeTokenStream(tokens);
281            tokens.start();
282        }
283
284        testPrint("\n== >> StandardDdlParser.parse() PARSING STARTED: ");
285
286        // Simply move to the next statement start (registered prior to tokenizing).
287        while (moveToNextStatementStart(tokens)) {
288
289            // It is assumed that if a statement is registered, the registering dialect will handle the parsing of that object
290            // and successfully create a statement {@link AstNode}
291            AstNode stmtNode = parseNextStatement(tokens, rootNode);
292            if (stmtNode == null) {
293                markStartOfStatement(tokens);
294                String stmtName = tokens.consume();
295                stmtNode = parseIgnorableStatement(tokens, stmtName, rootNode);
296                markEndOfStatement(tokens, stmtNode);
297            }
298            // testPrint("== >> Found Statement" + "(" + (++count) + "):\n" + stmtNode);
299        }
300
301        postProcess(rootNode);
302
303        rewrite(tokens, rootNode);
304
305        for (DdlParserProblem problem : problems) {
306            attachNewProblem(problem, rootNode);
307        }
308
309        // // Compute the score based upon the number of AST nodes ...
310        // // System.out.println("\n\n " + getId() + " (score=" + (getScore(rootNode) - 1 - (problems.size() * 2)) + ")\n" +
311        // // rootNode);
312        // int score = getScore(rootNode) - 1; // exclude the root, since we didn't create it
313        // score -= (problems.size() * 2); // remove double the # of problems
314        // scorer.scoreStatements(score);
315
316        if (testMode) {
317            // testPrint("== >> StandardDdlParser.parse() PARSING COMPLETE: " + statements.size() + " statements parsed.\n\n");
318            int count = 0;
319            for (AstNode child : rootNode.getChildren()) {
320                testPrint("== >> Found Statement" + "(" + (++count) + "):\n" + child);
321            }
322        }
323    }
324
325    /**
326     * Method called by {@link #score(String, String, DdlParserScorer)} and {@link #parse(String, AstNode, Object)} to initialize
327     * the {@link DdlTokenStream token stream}, giving subclasses a chance to {@link DdlTokenStream#registeredKeyWords register
328     * key words} and {@link DdlTokenStream#registerStatementStartPhrase(String[]) statement start phrases}.
329     * 
330     * @param tokens the stream of tokens
331     */
332    protected void initializeTokenStream( DdlTokenStream tokens ) {
333        tokens.registerKeyWords(SQL_92_RESERVED_WORDS);
334        tokens.registerStatementStartPhrase(SQL_92_ALL_PHRASES);
335    }
336
337    /**
338     * Performs token match checks for initial statement type and delegates to specific parser methods. If no specific statement
339     * is found, then a call is made to parse a custom statement type. Subclasses may override this method, but the
340     * {@link StandardDdlParser}.parseCustomStatement() method is designed to allow for parsing db-specific statement types.
341     * 
342     * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null
343     * @param node the top level {@link AstNode}; may not be null
344     * @return node the new statement node
345     */
346    protected AstNode parseNextStatement( DdlTokenStream tokens,
347                                          AstNode node ) {
348        assert tokens != null;
349        assert node != null;
350
351        AstNode stmtNode = null;
352
353        if (tokens.matches(CREATE)) {
354            stmtNode = parseCreateStatement(tokens, node);
355        } else if (tokens.matches(ALTER)) {
356            stmtNode = parseAlterStatement(tokens, node);
357        } else if (tokens.matches(DROP)) {
358            stmtNode = parseDropStatement(tokens, node);
359        } else if (tokens.matches(INSERT)) {
360            stmtNode = parseInsertStatement(tokens, node);
361        } else if (tokens.matches(SET)) {
362            stmtNode = parseSetStatement(tokens, node);
363        } else if (tokens.matches(GRANT)) {
364            stmtNode = parseGrantStatement(tokens, node);
365        } else if (tokens.matches(REVOKE)) {
366            stmtNode = parseRevokeStatement(tokens, node);
367        }
368
369        if (stmtNode == null) {
370            stmtNode = parseCustomStatement(tokens, node);
371        }
372
373        return stmtNode;
374    }
375
376    private boolean moveToNextStatementStart( DdlTokenStream tokens ) throws ParsingException {
377        assert tokens != null;
378
379        StringBuilder sb = new StringBuilder();
380        DdlParserProblem problem = null;
381
382        // Check to see if any more tokens exists
383        if (tokens.hasNext()) {
384            while (tokens.hasNext()) {
385                if (tokens.canConsume(DdlTokenizer.COMMENT)) continue;
386
387                // If the next toke is a STATEMENT_KEY, then stop
388                if (!tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
389                    // If the next toke is NOT a statement, create a problem statement in case it can't be fully recognized as
390                    // a statement.
391                    if (problem == null) {
392                        markStartOfStatement(tokens);
393
394                        String msg = DdlSequencerI18n.unusedTokensDiscovered.text(tokens.nextPosition().getLine(),
395                                                                                  tokens.nextPosition().getColumn());
396                        problem = new DdlParserProblem(DdlConstants.Problems.WARNING, tokens.nextPosition(), msg);
397                    }
398
399                    String nextTokenValue = null;
400
401                    // For known, parsed statements, the terminator is consumed in the markEndOfStatement() method. So if we get
402                    // here, we then we know we've got an unknown statement.
403                    if (tokens.matches(getTerminator()) && sb.length() > 0) {
404                        nextTokenValue = getTerminator();
405                        // Let's call this a statement up until now
406                        AstNode unknownNode = unknownTerminatedNode(getRootNode());
407                        markEndOfStatement(tokens, unknownNode);
408                        // We've determined that it's just an unknown node, which we determine is not a problem node.
409                        problem = null;
410                    } else {
411                        // Just keep consuming, but check each token value and allow sub-classes to handle the token if they wish.
412                        // ORACLE, for instance can terminator a complex statement with a backslash, '/'. Calling
413                        // handleUnknownToken() allows that dialect to create it's own statement node that can be assessed and
414                        // used during the rewrite() call at the end of parsing.
415                        nextTokenValue = tokens.consume();
416                        AstNode unknownNode = handleUnknownToken(tokens, nextTokenValue);
417                        if (unknownNode != null) {
418                            markEndOfStatement(tokens, unknownNode);
419                            // We've determined that it's just an unknown node, which we determine is not a problem node.
420                            problem = null;
421                        }
422                    }
423                    sb.append(SPACE).append(nextTokenValue);
424
425                } else {
426                    // If we have a problem, add it.
427                    if (problem != null && sb.length() > 0) {
428                        problem.setUnusedSource(sb.toString());
429                        addProblem(problem);
430                    }
431                    return true;
432                }
433            }
434
435            // If we still have a problem, add it.
436            if (problem != null && sb.length() > 0) {
437                problem.setUnusedSource(sb.toString());
438                addProblem(problem);
439            }
440        }
441        return false;
442    }
443
444    public final void addProblem( DdlParserProblem problem,
445                                  AstNode node ) {
446        addProblem(problem);
447        attachNewProblem(problem, node);
448    }
449
450    public final void addProblem( DdlParserProblem problem ) {
451        problems.add(problem);
452    }
453
454    public final List<DdlParserProblem> getProblems() {
455        return this.problems;
456    }
457
458    public final void attachNewProblem( DdlParserProblem problem,
459                                        AstNode parentNode ) {
460        assert problem != null;
461        assert parentNode != null;
462
463        AstNode problemNode = nodeFactory().node(DDL_PROBLEM, parentNode, TYPE_PROBLEM);
464        problemNode.setProperty(PROBLEM_LEVEL, problem.getLevel());
465        problemNode.setProperty(MESSAGE, problem.toString() + "[" + problem.getUnusedSource() + "]");
466
467        testPrint(problem.toString());
468    }
469
470    protected void rewrite( DdlTokenStream tokens,
471                            AstNode rootNode ) {
472        assert tokens != null;
473        assert rootNode != null;
474        // Walk the tree and remove any missing missing terminator nodes
475
476        removeMissingTerminatorNodes(rootNode);
477    }
478
479    protected void removeMissingTerminatorNodes( AstNode parentNode ) {
480        assert parentNode != null;
481        // Walk the tree and remove any missing missing terminator nodes
482        List<AstNode> copyOfNodes = new ArrayList<AstNode>(parentNode.getChildren());
483
484        for (AstNode child : copyOfNodes) {
485            if (nodeFactory().hasMixinType(child, TYPE_MISSING_TERMINATOR)) {
486                parentNode.removeChild(child);
487            } else {
488                removeMissingTerminatorNodes(child);
489            }
490        }
491    }
492
493    /**
494     * Merges second node into first node by re-setting expression source and length.
495     * 
496     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
497     * @param firstNode the node to merge into; may not be null
498     * @param secondNode the node to merge into first node; may not be null
499     */
500    public void mergeNodes( DdlTokenStream tokens,
501                            AstNode firstNode,
502                            AstNode secondNode ) {
503        assert tokens != null;
504        assert firstNode != null;
505        assert secondNode != null;
506
507        int firstStartIndex = (Integer)firstNode.getProperty(DDL_START_CHAR_INDEX);
508        int secondStartIndex = (Integer)secondNode.getProperty(DDL_START_CHAR_INDEX);
509        int deltaLength = ((String)secondNode.getProperty(DDL_EXPRESSION)).length();
510        Position startPosition = new Position(firstStartIndex, 1, 0);
511        Position endPosition = new Position((secondStartIndex + deltaLength), 1, 0);
512        String source = tokens.getContentBetween(startPosition, endPosition);
513        firstNode.setProperty(DDL_EXPRESSION, source);
514        firstNode.setProperty(DDL_LENGTH, source.length());
515    }
516
517    /**
518     * Utility method subclasses can override to check unknown tokens and perform additional node manipulation. Example would be
519     * in Oracle dialect for CREATE FUNCTION statements that can end with an '/' character because statement can contain multiple
520     * statements.
521     * 
522     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
523     * @param tokenValue the string value of the unknown token; never null
524     * @return the new node
525     * @throws ParsingException
526     */
527    public AstNode handleUnknownToken( DdlTokenStream tokens,
528                                       String tokenValue ) throws ParsingException {
529        assert tokens != null;
530        assert tokenValue != null;
531        // DEFAULT IMPLEMENTATION DOES NOTHING
532        return null;
533    }
534
535    /**
536     * Parses DDL CREATE statement based on SQL 92 specifications.
537     * 
538     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
539     * @param parentNode the parent {@link AstNode} node; may not be null
540     * @return the parsed CREATE {@link AstNode}
541     * @throws ParsingException
542     */
543    protected AstNode parseCreateStatement( DdlTokenStream tokens,
544                                            AstNode parentNode ) throws ParsingException {
545        assert tokens != null;
546        assert parentNode != null;
547
548        AstNode stmtNode = null;
549
550        // DEFAULT DOES NOTHING
551        // Subclasses can implement additional parsing
552        // System.out.println(" >>> FOUND [CREATE] STATEMENT: TOKEN = " + tokens.consume() + " " + tokens.consume() + " " +
553        // tokens.consume());
554        // SQL 92 CREATE OPTIONS:
555        // CREATE SCHEMA
556        // CREATE DOMAIN
557        // CREATE [ { GLOBAL | LOCAL } TEMPORARY ] TABLE
558        // CREATE VIEW
559        // CREATE ASSERTION
560        // CREATE CHARACTER SET
561        // CREATE COLLATION
562        // CREATE TRANSLATION
563
564        if (tokens.matches(STMT_CREATE_SCHEMA)) {
565            stmtNode = parseCreateSchemaStatement(tokens, parentNode);
566        } else if (tokens.matches(STMT_CREATE_TABLE) || tokens.matches(STMT_CREATE_GLOBAL_TEMPORARY_TABLE)
567                   || tokens.matches(STMT_CREATE_LOCAL_TEMPORARY_TABLE)) {
568            stmtNode = parseCreateTableStatement(tokens, parentNode);
569        } else if (tokens.matches(STMT_CREATE_VIEW) || tokens.matches(STMT_CREATE_OR_REPLACE_VIEW)) {
570            stmtNode = parseCreateViewStatement(tokens, parentNode);
571        } else if (tokens.matches(STMT_CREATE_ASSERTION)) {
572            stmtNode = parseCreateAssertionStatement(tokens, parentNode);
573        } else if (tokens.matches(STMT_CREATE_CHARACTER_SET)) {
574            stmtNode = parseCreateCharacterSetStatement(tokens, parentNode);
575        } else if (tokens.matches(STMT_CREATE_COLLATION)) {
576            stmtNode = parseCreateCollationStatement(tokens, parentNode);
577        } else if (tokens.matches(STMT_CREATE_TRANSLATION)) {
578            stmtNode = parseCreateTranslationStatement(tokens, parentNode);
579        } else if (tokens.matches(STMT_CREATE_DOMAIN)) {
580            stmtNode = parseCreateDomainStatement(tokens, parentNode);
581        } else {
582            markStartOfStatement(tokens);
583
584            stmtNode = parseIgnorableStatement(tokens, "CREATE UNKNOWN", parentNode);
585            Position position = getCurrentMarkedPosition();
586            String msg = DdlSequencerI18n.unknownCreateStatement.text(position.getLine(), position.getColumn());
587            DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, position, msg);
588
589            stmtNode.setProperty(DDL_PROBLEM, problem.toString());
590
591            markEndOfStatement(tokens, stmtNode);
592        }
593
594        return stmtNode;
595    }
596
597    /**
598     * Parses DDL ALTER statement based on SQL 92 specifications.
599     * 
600     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
601     * @param parentNode the parent {@link AstNode} node; may not be null
602     * @return the parsed ALTER {@link AstNode}
603     * @throws ParsingException
604     */
605    protected AstNode parseAlterStatement( DdlTokenStream tokens,
606                                           AstNode parentNode ) throws ParsingException {
607        assert tokens != null;
608        assert parentNode != null;
609
610        if (tokens.matches(ALTER, TABLE)) {
611            return parseAlterTableStatement(tokens, parentNode);
612        } else if (tokens.matches("ALTER", "DOMAIN")) {
613            markStartOfStatement(tokens);
614            tokens.consume("ALTER", "DOMAIN");
615            String domainName = parseName(tokens);
616            AstNode alterNode = nodeFactory().node(domainName, parentNode, TYPE_ALTER_DOMAIN_STATEMENT);
617            parseUntilTerminator(tokens);
618            markEndOfStatement(tokens, alterNode);
619            return alterNode;
620        }
621        return null;
622    }
623
624    /**
625     * Parses DDL ALTER TABLE {@link AstNode} based on SQL 92 specifications.
626     * 
627     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
628     * @param parentNode the parent {@link AstNode} node; may not be null
629     * @return the parsed ALTER TABLE {@link AstNode}
630     * @throws ParsingException
631     */
632    protected AstNode parseAlterTableStatement( DdlTokenStream tokens,
633                                                AstNode parentNode ) throws ParsingException {
634        assert tokens != null;
635        assert parentNode != null;
636
637        markStartOfStatement(tokens);
638
639        // <alter table statement> ::=
640        // ALTER TABLE <table name> <alter table action>
641        //
642        // <alter table action> ::=
643        // <add column definition>
644        // | <alter column definition>
645        // | <drop column definition>
646        // | <add table constraint definition>
647        // | <drop table constraint definition>
648
649        tokens.consume("ALTER", "TABLE"); // consumes 'ALTER'
650        String tableName = parseName(tokens);
651
652        AstNode alterTableNode = nodeFactory().node(tableName, parentNode, TYPE_ALTER_TABLE_STATEMENT);
653
654        if (tokens.canConsume("ADD")) {
655            if (isTableConstraint(tokens)) {
656                parseTableConstraint(tokens, alterTableNode, true);
657            } else {
658                parseSingleTerminatedColumnDefinition(tokens, alterTableNode, true);
659            }
660        } else if (tokens.canConsume("DROP")) {
661            if (tokens.canConsume(CONSTRAINT)) {
662                String constraintName = parseName(tokens); // constraint name
663                AstNode constraintNode = nodeFactory().node(constraintName, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION);
664                if (tokens.canConsume(DropBehavior.CASCADE)) {
665                    constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
666                } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
667                    constraintNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
668                }
669            } else {
670                // ALTER TABLE supplier
671                // DROP COLUMN supplier_name;
672
673                // DROP [ COLUMN ] <column name> <drop behavior>
674                tokens.canConsume("COLUMN"); // "COLUMN" is optional
675                String columnName = parseName(tokens);
676                AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_DROP_COLUMN_DEFINITION);
677                if (tokens.canConsume(DropBehavior.CASCADE)) {
678                    columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.CASCADE);
679                } else if (tokens.canConsume(DropBehavior.RESTRICT)) {
680                    columnNode.setProperty(DROP_BEHAVIOR, DropBehavior.RESTRICT);
681                }
682            }
683        } else if (tokens.canConsume("ALTER")) {
684            // EXAMPLE: ALTER TABLE table_name [ ALTER column_name SET DEFAULT (0) ]
685            //
686            // ALTER [ COLUMN ] <column name> {SET <default clause> | DROP DEFAULT}
687
688            tokens.canConsume("COLUMN");
689            String alterColumnName = parseName(tokens);
690            AstNode columnNode = nodeFactory().node(alterColumnName, alterTableNode, TYPE_ALTER_COLUMN_DEFINITION);
691            if (tokens.canConsume("SET")) {
692                parseDefaultClause(tokens, columnNode);
693            } else if (tokens.canConsume("DROP", "DEFAULT")) {
694                columnNode.setProperty(DROP_BEHAVIOR, "DROP DEFAULT");
695            }
696        } else {
697            parseUntilTerminator(tokens); // COULD BE "NESTED TABLE xxxxxxxx" option clause
698        }
699
700        markEndOfStatement(tokens, alterTableNode);
701        return alterTableNode;
702    }
703
704    /**
705     * Parses DDL DROP {@link AstNode} based on SQL 92 specifications.
706     * 
707     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
708     * @param parentNode the parent {@link AstNode} node; may not be null
709     * @return the parsed DROP {@link AstNode}
710     * @throws ParsingException
711     */
712    protected AstNode parseDropStatement( DdlTokenStream tokens,
713                                          AstNode parentNode ) throws ParsingException {
714        assert tokens != null;
715        assert parentNode != null;
716
717        if (tokens.matches(STMT_DROP_TABLE)) {
718            // <drop table statement> ::=
719            // DROP TABLE <table name> <drop behavior>
720            //
721            // <drop behavior> ::= CASCADE | RESTRICT
722            return parseSimpleDropStatement(tokens, STMT_DROP_TABLE, parentNode, TYPE_DROP_TABLE_STATEMENT);
723        } else if (tokens.matches(STMT_DROP_VIEW)) {
724            return parseSimpleDropStatement(tokens, STMT_DROP_VIEW, parentNode, TYPE_DROP_VIEW_STATEMENT);
725        } else if (tokens.matches(STMT_DROP_SCHEMA)) {
726            return parseSimpleDropStatement(tokens, STMT_DROP_SCHEMA, parentNode, TYPE_DROP_SCHEMA_STATEMENT);
727        } else if (tokens.matches(STMT_DROP_DOMAIN)) {
728            return parseSimpleDropStatement(tokens, STMT_DROP_DOMAIN, parentNode, TYPE_DROP_DOMAIN_STATEMENT);
729        } else if (tokens.matches(STMT_DROP_TRANSLATION)) {
730            return parseSimpleDropStatement(tokens, STMT_DROP_TRANSLATION, parentNode, TYPE_DROP_TRANSLATION_STATEMENT);
731        } else if (tokens.matches(STMT_DROP_CHARACTER_SET)) {
732            return parseSimpleDropStatement(tokens, STMT_DROP_CHARACTER_SET, parentNode, TYPE_DROP_CHARACTER_SET_STATEMENT);
733        } else if (tokens.matches(STMT_DROP_ASSERTION)) {
734            return parseSimpleDropStatement(tokens, STMT_DROP_ASSERTION, parentNode, TYPE_DROP_ASSERTION_STATEMENT);
735        } else if (tokens.matches(STMT_DROP_COLLATION)) {
736            return parseSimpleDropStatement(tokens, STMT_DROP_COLLATION, parentNode, TYPE_DROP_COLLATION_STATEMENT);
737        }
738
739        return null;
740    }
741
742    private AstNode parseSimpleDropStatement( DdlTokenStream tokens,
743                                              String[] startPhrase,
744                                              AstNode parentNode,
745                                              String stmtType ) throws ParsingException {
746        assert tokens != null;
747        assert startPhrase != null && startPhrase.length > 0;
748        assert parentNode != null;
749
750        markStartOfStatement(tokens);
751        String behavior = null;
752        tokens.consume(startPhrase);
753        List<String> nameList = new ArrayList<String>();
754        nameList.add(parseName(tokens));
755        while (tokens.matches(COMMA)) {
756            tokens.consume(COMMA);
757            nameList.add(parseName(tokens));
758        }
759
760        if (tokens.canConsume("CASCADE")) {
761            behavior = "CASCADE";
762        } else if (tokens.canConsume("RESTRICT")) {
763            behavior = "RESTRICT";
764        }
765
766        AstNode dropNode = nodeFactory().node(nameList.get(0), parentNode, stmtType);
767        if (behavior != null) {
768            dropNode.setProperty(DROP_BEHAVIOR, behavior);
769        }
770        markEndOfStatement(tokens, dropNode);
771
772        return dropNode;
773    }
774
775    /**
776     * Parses DDL INSERT {@link AstNode} based on SQL 92 specifications.
777     * 
778     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
779     * @param parentNode the parent {@link AstNode} node; may not be null
780     * @return the {@link AstNode}
781     * @throws ParsingException
782     */
783    protected AstNode parseInsertStatement( DdlTokenStream tokens,
784                                            AstNode parentNode ) throws ParsingException {
785        assert tokens != null;
786        assert parentNode != null;
787
788        // Original implementation does NOT parse Insert statement, but just returns a generic TypedStatement
789        if (tokens.matches(STMT_INSERT_INTO)) {
790            markStartOfStatement(tokens);
791            tokens.consume(STMT_INSERT_INTO);
792            String prefix = getStatementTypeName(STMT_INSERT_INTO);
793            AstNode node = nodeFactory().node(prefix, parentNode, TYPE_INSERT_STATEMENT);
794            parseUntilTerminator(tokens);
795            markEndOfStatement(tokens, node);
796            return node;
797        }
798        return null;
799    }
800
801    /**
802     * Parses DDL SET {@link AstNode} based on SQL 92 specifications.
803     * 
804     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
805     * @param parentNode the parent {@link AstNode} node; may not be null
806     * @return the {@link AstNode}
807     * @throws ParsingException
808     */
809    protected AstNode parseSetStatement( DdlTokenStream tokens,
810                                         AstNode parentNode ) throws ParsingException {
811        assert tokens != null;
812        assert parentNode != null;
813
814        // Original implementation does NOT parse Insert statement, but just returns a generic TypedStatement
815        if (tokens.matches(SET)) {
816            markStartOfStatement(tokens);
817            tokens.consume(SET);
818            AstNode node = nodeFactory().node("SET", parentNode, TYPE_SET_STATEMENT);
819            parseUntilTerminator(tokens);
820            markEndOfStatement(tokens, node);
821            return node;
822        }
823        return null;
824    }
825
826    /**
827     * Parses DDL GRANT statement {@link AstNode} based on SQL 92 specifications.
828     * 
829     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
830     * @param parentNode the parent {@link AstNode} node; may not be null
831     * @return the {@link AstNode}
832     * @throws ParsingException
833     */
834    protected AstNode parseGrantStatement( DdlTokenStream tokens,
835                                           AstNode parentNode ) throws ParsingException {
836        assert tokens != null;
837        assert parentNode != null;
838        assert tokens.matches(GRANT);
839
840        markStartOfStatement(tokens);
841
842        // Syntax for tables
843        //
844        // GRANT <privileges> ON <object name>
845        // TO <grantee> [ { <comma> <grantee> }... ]
846        // [ WITH GRANT OPTION ]
847        //
848        // <object name> ::=
849        // [ TABLE ] <table name>
850        // | DOMAIN <domain name>
851        // | COLLATION <collation name>
852        // | CHARACTER SET <character set name>
853        // | TRANSLATION <translation name>
854        //
855        // Syntax for roles
856        //
857        // GRANT roleName [ {, roleName }* ] TO grantees
858
859        // privilege-types
860        //
861        // ALL PRIVILEGES | privilege-list
862        //
863        AstNode grantNode = null;
864        boolean allPrivileges = false;
865
866        List<AstNode> privileges = new ArrayList<AstNode>();
867
868        tokens.consume("GRANT");
869
870        if (tokens.canConsume("ALL", "PRIVILEGES")) {
871            allPrivileges = true;
872        } else {
873            parseGrantPrivileges(tokens, privileges);
874        }
875        tokens.consume("ON");
876
877        if (tokens.canConsume("DOMAIN")) {
878            String name = parseName(tokens);
879            grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_DOMAIN_STATEMENT);
880        } else if (tokens.canConsume("COLLATION")) {
881            String name = parseName(tokens);
882            grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_COLLATION_STATEMENT);
883        } else if (tokens.canConsume("CHARACTER", "SET")) {
884            String name = parseName(tokens);
885            grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_CHARACTER_SET_STATEMENT);
886        } else if (tokens.canConsume("TRANSLATION")) {
887            String name = parseName(tokens);
888            grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TRANSLATION_STATEMENT);
889        } else {
890            tokens.canConsume(TABLE); // OPTIONAL
891            String name = parseName(tokens);
892            grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT);
893        }
894
895        // Attach privileges to grant node
896        for (AstNode node : privileges) {
897            node.setParent(grantNode);
898        }
899        if (allPrivileges) {
900            grantNode.setProperty(ALL_PRIVILEGES, allPrivileges);
901        }
902
903        tokens.consume("TO");
904
905        do {
906            String grantee = parseName(tokens);
907            nodeFactory().node(grantee, grantNode, GRANTEE);
908        } while (tokens.canConsume(COMMA));
909
910        if (tokens.canConsume("WITH", "GRANT", "OPTION")) {
911            grantNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
912        }
913
914        markEndOfStatement(tokens, grantNode);
915
916        return grantNode;
917    }
918
919    protected void parseGrantPrivileges( DdlTokenStream tokens,
920                                         List<AstNode> privileges ) throws ParsingException {
921        // privilege-types
922        //
923        // ALL PRIVILEGES | privilege-list
924        //
925        // privilege-list
926        //
927        // table-privilege {, table-privilege }*
928        //
929        // table-privilege
930        // SELECT
931        // | DELETE
932        // | INSERT [ <left paren> <privilege column list> <right paren> ]
933        // | UPDATE [ <left paren> <privilege column list> <right paren> ]
934        // | REFERENCES [ <left paren> <privilege column list> <right paren> ]
935        // | USAGE
936
937        do {
938            AstNode node = null;
939
940            if (tokens.canConsume(DELETE)) {
941                node = nodeFactory().node("privilege");
942                node.setProperty(TYPE, DELETE);
943            } else if (tokens.canConsume(INSERT)) {
944                node = nodeFactory().node("privilege");
945                node.setProperty(TYPE, INSERT);
946                parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
947            } else if (tokens.canConsume("REFERENCES")) {
948                node = nodeFactory().node("privilege");
949                node.setProperty(TYPE, "REFERENCES");
950                parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
951            } else if (tokens.canConsume(SELECT)) {
952                node = nodeFactory().node("privilege");
953                node.setProperty(TYPE, SELECT);
954            } else if (tokens.canConsume("USAGE")) {
955                node = nodeFactory().node("privilege");
956                node.setProperty(TYPE, "USAGE");
957            } else if (tokens.canConsume(UPDATE)) {
958                node = nodeFactory().node("privilege");
959                node.setProperty(TYPE, UPDATE);
960                parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE);
961            }
962            if (node == null) {
963                break;
964            }
965            nodeFactory().setType(node, GRANT_PRIVILEGE);
966            privileges.add(node);
967
968        } while (tokens.canConsume(COMMA));
969
970    }
971
972    protected AstNode parseRevokeStatement( DdlTokenStream tokens,
973                                            AstNode parentNode ) throws ParsingException {
974        assert tokens != null;
975        assert parentNode != null;
976        assert tokens.matches(REVOKE);
977
978        markStartOfStatement(tokens);
979
980        // <revoke statement> ::=
981        // REVOKE [ GRANT OPTION FOR ]
982        // <privileges>
983        // ON <object name>
984        // FROM <grantee> [ { <comma> <grantee> }... ] <drop behavior>
985
986        AstNode revokeNode = null;
987        boolean allPrivileges = false;
988        boolean withGrantOption = false;
989
990        List<AstNode> privileges = new ArrayList<AstNode>();
991
992        tokens.consume("REVOKE");
993
994        withGrantOption = tokens.canConsume("WITH", "GRANT", "OPTION");
995
996        if (tokens.canConsume("ALL", "PRIVILEGES")) {
997            allPrivileges = true;
998        } else {
999            parseGrantPrivileges(tokens, privileges);
1000        }
1001        tokens.consume("ON");
1002
1003        if (tokens.canConsume("DOMAIN")) {
1004            String name = parseName(tokens);
1005            revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_DOMAIN_STATEMENT);
1006        } else if (tokens.canConsume("COLLATION")) {
1007            String name = parseName(tokens);
1008            revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_COLLATION_STATEMENT);
1009        } else if (tokens.canConsume("CHARACTER", "SET")) {
1010            String name = parseName(tokens);
1011            revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_CHARACTER_SET_STATEMENT);
1012        } else if (tokens.canConsume("TRANSLATION")) {
1013            String name = parseName(tokens);
1014            revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TRANSLATION_STATEMENT);
1015        } else {
1016            tokens.canConsume(TABLE); // OPTIONAL
1017            String name = parseName(tokens);
1018            revokeNode = nodeFactory().node(name, parentNode, TYPE_REVOKE_ON_TABLE_STATEMENT);
1019        }
1020
1021        // Attach privileges to grant node
1022        for (AstNode node : privileges) {
1023            node.setParent(revokeNode);
1024        }
1025
1026        if (allPrivileges) {
1027            revokeNode.setProperty(ALL_PRIVILEGES, allPrivileges);
1028        }
1029
1030        tokens.consume("FROM");
1031
1032        do {
1033            String grantee = parseName(tokens);
1034            nodeFactory().node(grantee, revokeNode, GRANTEE);
1035        } while (tokens.canConsume(COMMA));
1036
1037        String behavior = null;
1038
1039        if (tokens.canConsume("CASCADE")) {
1040            behavior = "CASCADE";
1041        } else if (tokens.canConsume("RESTRICT")) {
1042            behavior = "RESTRICT";
1043        }
1044
1045        if (behavior != null) {
1046            revokeNode.setProperty(DROP_BEHAVIOR, behavior);
1047        }
1048
1049        if (withGrantOption) {
1050            revokeNode.setProperty(WITH_GRANT_OPTION, "WITH GRANT OPTION");
1051        }
1052
1053        markEndOfStatement(tokens, revokeNode);
1054
1055        return revokeNode;
1056    }
1057
1058    /**
1059     * Parses DDL CREATE DOMAIN {@link AstNode} based on SQL 92 specifications.
1060     * 
1061     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1062     * @param parentNode the parent {@link AstNode} node; may not be null
1063     * @return the parsed statement node {@link AstNode}
1064     * @throws ParsingException
1065     */
1066    protected AstNode parseCreateDomainStatement( DdlTokenStream tokens,
1067                                                  AstNode parentNode ) throws ParsingException {
1068        assert tokens != null;
1069        assert parentNode != null;
1070
1071        // <domain definition> ::=
1072        // CREATE DOMAIN <domain name>
1073        // [ AS ] <data type>
1074        // [ <default clause> ]
1075        // [ <domain constraint>... ]
1076        // [ <collate clause> ]
1077
1078        markStartOfStatement(tokens);
1079
1080        tokens.consume(STMT_CREATE_DOMAIN);
1081
1082        String name = parseName(tokens);
1083        AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_DOMAIN_STATEMENT);
1084
1085        tokens.canConsume("AS");
1086        DataType datatype = getDatatypeParser().parse(tokens);
1087        if (datatype != null) {
1088            getDatatypeParser().setPropertiesOnNode(node, datatype);
1089            parseDefaultClause(tokens, node);
1090        }
1091
1092        parseUntilTerminator(tokens);
1093
1094        markEndOfStatement(tokens, node);
1095
1096        return node;
1097    }
1098
1099    /**
1100     * Parses DDL CREATE COLLATION {@link AstNode} based on SQL 92 specifications.
1101     * 
1102     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1103     * @param parentNode the parent {@link AstNode} node; may not be null
1104     * @return the parsed statement node {@link AstNode}
1105     * @throws ParsingException
1106     */
1107    protected AstNode parseCreateCollationStatement( DdlTokenStream tokens,
1108                                                     AstNode parentNode ) throws ParsingException {
1109        assert tokens != null;
1110        assert parentNode != null;
1111
1112        markStartOfStatement(tokens);
1113
1114        tokens.consume(STMT_CREATE_COLLATION);
1115
1116        String name = parseName(tokens);
1117
1118        AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_COLLATION_STATEMENT);
1119
1120        // character set attribute
1121        tokens.consume("FOR");
1122        String charSetName = parseName(tokens);
1123        node.setProperty(COLLATION_CHARACTER_SET_NAME, charSetName);
1124
1125        // collation source
1126        // TODO author=Horia Chiorean date=1/4/12 description=Only parsing a string atm (should probably be some nested nodes -
1127        // see StandardDdl.cnd
1128        tokens.consume("FROM");
1129        String collationSource = null;
1130        if (tokens.canConsume("EXTERNAL") || tokens.canConsume("DESC")) {
1131            collationSource = consumeParenBoundedTokens(tokens, false);
1132        } else if (tokens.canConsume("TRANSLATION")) {
1133            StringBuilder translationCollation = new StringBuilder("TRANSLATION ").append(tokens.consume());
1134            if (tokens.canConsume("THEN", "COLLATION")) {
1135                translationCollation.append(" THEN COLLATION ");
1136                translationCollation.append(parseName(tokens));
1137            }
1138            collationSource = translationCollation.toString();
1139        } else {
1140            collationSource = parseName(tokens);
1141        }
1142        node.setProperty(COLLATION_SOURCE, collationSource);
1143
1144        // pad attribute
1145        if (tokens.canConsume("PAD", "SPACE")) {
1146            node.setProperty(PAD_ATTRIBUTE, PAD_ATTRIBUTE_PAD);
1147        } else if (tokens.canConsume("NO", "PAD")) {
1148            node.setProperty(PAD_ATTRIBUTE, PAD_ATTRIBUTE_NO_PAD);
1149        }
1150
1151        parseUntilTerminator(tokens);
1152        markEndOfStatement(tokens, node);
1153        return node;
1154    }
1155
1156    /**
1157     * Parses DDL CREATE TRANSLATION {@link AstNode} based on SQL 92 specifications.
1158     * 
1159     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1160     * @param parentNode the parent {@link AstNode} node; may not be null
1161     * @return the parsed statement node {@link AstNode}
1162     * @throws ParsingException
1163     */
1164    protected AstNode parseCreateTranslationStatement( DdlTokenStream tokens,
1165                                                       AstNode parentNode ) throws ParsingException {
1166        assert tokens != null;
1167        assert parentNode != null;
1168
1169        markStartOfStatement(tokens);
1170
1171        tokens.consume(STMT_CREATE_TRANSLATION);
1172
1173        String name = parseName(tokens);
1174
1175        AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_TRANSLATION_STATEMENT);
1176        tokens.consume("FOR");
1177        node.setProperty(SOURCE_CHARACTER_SET_NAME, parseName(tokens));
1178        tokens.consume("TO");
1179        node.setProperty(TARGET_CHARACTER_SET_NAME, parseName(tokens));
1180
1181        parseUntilTerminator(tokens);
1182
1183        markEndOfStatement(tokens, node);
1184
1185        return node;
1186    }
1187
1188    /**
1189     * Parses DDL CREATE CHARACTER SET {@link AstNode} based on SQL 92 specifications.
1190     * 
1191     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1192     * @param parentNode the parent {@link AstNode} node; may not be null
1193     * @return the parsed statement node {@link AstNode}
1194     * @throws ParsingException
1195     */
1196    protected AstNode parseCreateCharacterSetStatement( DdlTokenStream tokens,
1197                                                        AstNode parentNode ) throws ParsingException {
1198        assert tokens != null;
1199        assert parentNode != null;
1200
1201        markStartOfStatement(tokens);
1202
1203        tokens.consume(STMT_CREATE_CHARACTER_SET);
1204
1205        String name = parseName(tokens);
1206
1207        AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_CHARACTER_SET_STATEMENT);
1208        // TODO author=Horia Chiorean date=1/4/12 description=Some of the optional attributes from the CND are not implemented yet
1209        node.setProperty(EXISTING_NAME, consumeIdentifier(tokens));
1210
1211        parseUntilTerminator(tokens);
1212
1213        markEndOfStatement(tokens, node);
1214
1215        return node;
1216    }
1217
1218    /**
1219     * Catch-all method to parse unknown (not registered or handled by sub-classes) statements.
1220     * 
1221     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1222     * @param parentNode the parent {@link AstNode} node; may not be null
1223     * @return the {@link AstNode}
1224     * @throws ParsingException
1225     */
1226    protected AstNode parseCustomStatement( DdlTokenStream tokens,
1227                                            AstNode parentNode ) throws ParsingException {
1228        assert tokens != null;
1229        assert parentNode != null;
1230
1231        // DEFAULT DOES NOTHING
1232        // Subclasses can implement additional parsing
1233
1234        return null;
1235    }
1236
1237    // ===========================================================================================================================
1238    // PARSING CREATE TABLE
1239    // ===========================================================================================================================
1240
1241    /**
1242     * Parses DDL CREATE TABLE {@link AstNode} based on SQL 92 specifications.
1243     * 
1244     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1245     * @param parentNode the parent {@link AstNode} node; may not be null
1246     * @return the parsed CREATE TABLE {@link AstNode}
1247     * @throws ParsingException
1248     */
1249    protected AstNode parseCreateTableStatement( DdlTokenStream tokens,
1250                                                 AstNode parentNode ) throws ParsingException {
1251        assert tokens != null;
1252        assert parentNode != null;
1253
1254        markStartOfStatement(tokens);
1255
1256        tokens.consume(CREATE); // CREATE
1257        String temporaryValue = null;
1258        if (tokens.canConsume("LOCAL")) {
1259            tokens.consume("TEMPORARY");
1260            temporaryValue = "LOCAL";
1261        } else if (tokens.canConsume("GLOBAL")) {
1262            tokens.consume("TEMPORARY");
1263            temporaryValue = "GLOBAL";
1264        }
1265
1266        tokens.consume(TABLE);
1267
1268        String tableName = parseName(tokens);
1269
1270        AstNode tableNode = nodeFactory().node(tableName, parentNode, TYPE_CREATE_TABLE_STATEMENT);
1271
1272        if (temporaryValue != null) {
1273            tableNode.setProperty(TEMPORARY, temporaryValue);
1274        }
1275
1276        // System.out.println("  >> PARSING CREATE TABLE >>  Name = " + tableName);
1277        parseColumnsAndConstraints(tokens, tableNode);
1278
1279        parseCreateTableOptions(tokens, tableNode);
1280
1281        markEndOfStatement(tokens, tableNode);
1282
1283        return tableNode;
1284    }
1285
1286    protected void parseCreateTableOptions( DdlTokenStream tokens,
1287                                            AstNode tableNode ) throws ParsingException {
1288        assert tokens != null;
1289        assert tableNode != null;
1290
1291        // [ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]
1292        while (areNextTokensCreateTableOptions(tokens)) {
1293            parseNextCreateTableOption(tokens, tableNode);
1294        }
1295
1296    }
1297
1298    protected void parseNextCreateTableOption( DdlTokenStream tokens,
1299                                               AstNode tableNode ) throws ParsingException {
1300        assert tokens != null;
1301        assert tableNode != null;
1302
1303        if (tokens.canConsume("ON", "COMMIT")) {
1304            String option = "";
1305            // PRESERVE ROWS | DELETE ROWS | DROP
1306            if (tokens.canConsume("PRESERVE", "ROWS")) {
1307                option = option + "ON COMMIT PRESERVE ROWS";
1308            } else if (tokens.canConsume("DELETE", "ROWS")) {
1309                option = option + "ON COMMIT DELETE ROWS";
1310            } else if (tokens.canConsume("DROP")) {
1311                option = option + "ON COMMIT DROP";
1312            }
1313
1314            if (option.length() > 0) {
1315                AstNode tableOption = nodeFactory().node("option", tableNode, TYPE_STATEMENT_OPTION);
1316                tableOption.setProperty(VALUE, option);
1317            }
1318        }
1319    }
1320
1321    protected boolean areNextTokensCreateTableOptions( DdlTokenStream tokens ) throws ParsingException {
1322        assert tokens != null;
1323
1324        boolean result = false;
1325
1326        // [ ON COMMIT { PRESERVE ROWS | DELETE ROWS | DROP } ]
1327        if (tokens.matches("ON", "COMMIT")) {
1328            result = true;
1329        }
1330
1331        return result;
1332    }
1333
1334    /**
1335     * Utility method to parse columns and table constraints within either a CREATE TABLE statement. Method first parses and
1336     * copies the text enclosed within the bracketed "( xxxx  )" statement. Then the individual column definition or table
1337     * constraint definition sub-statements are parsed assuming they are comma delimited.
1338     * 
1339     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1340     * @param tableNode
1341     * @throws ParsingException
1342     */
1343    protected void parseColumnsAndConstraints( DdlTokenStream tokens,
1344                                               AstNode tableNode ) throws ParsingException {
1345        assert tokens != null;
1346        assert tableNode != null;
1347
1348        if (!tokens.matches(L_PAREN)) {
1349            return;
1350        }
1351
1352        String tableElementString = getTableElementsString(tokens, false);
1353
1354        DdlTokenStream localTokens = new DdlTokenStream(tableElementString, DdlTokenStream.ddlTokenizer(false), false);
1355
1356        localTokens.start();
1357
1358        StringBuilder unusedTokensSB = new StringBuilder();
1359        do {
1360            if (isTableConstraint(localTokens)) {
1361                parseTableConstraint(localTokens, tableNode, false);
1362            } else if (isColumnDefinitionStart(localTokens)) {
1363                parseColumnDefinition(localTokens, tableNode, false);
1364            } else {
1365                unusedTokensSB.append(SPACE).append(localTokens.consume());
1366            }
1367        } while (localTokens.canConsume(COMMA));
1368
1369        if (unusedTokensSB.length() > 0) {
1370            String msg = DdlSequencerI18n.unusedTokensParsingColumnsAndConstraints.text(tableNode.getName());
1371            DdlParserProblem problem = new DdlParserProblem(DdlConstants.Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1372            problem.setUnusedSource(unusedTokensSB.toString());
1373            addProblem(problem, tableNode);
1374        }
1375
1376    }
1377
1378    /**
1379     * Utility method to parse the actual column definition. SQL-92 Structural Specification <column definition> ::= <column name>
1380     * { <data type> | <domain name> } [ <default clause> ] [ <column constraint definition>... ] [ <collate clause> ]
1381     * 
1382     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1383     * @param tableNode
1384     * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1385     * @throws ParsingException
1386     */
1387    protected void parseColumnDefinition( DdlTokenStream tokens,
1388                                          AstNode tableNode,
1389                                          boolean isAlterTable ) throws ParsingException {
1390        assert tokens != null;
1391        assert tableNode != null;
1392
1393        tokens.canConsume("COLUMN");
1394        String columnName = parseName(tokens);
1395        DataType datatype = getDatatypeParser().parse(tokens);
1396
1397        AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1398
1399        getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1400
1401        // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma
1402        StringBuilder unusedTokensSB = new StringBuilder();
1403
1404        while (tokens.hasNext() && !tokens.matches(COMMA)) {
1405            boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1406            if (!parsedDefaultClause) {
1407                boolean parsedCollate = parseCollateClause(tokens, columnNode);
1408                boolean parsedConstraint = parseColumnConstraint(tokens, columnNode, isAlterTable);
1409                if (!parsedCollate && !parsedConstraint) {
1410                    // THIS IS AN ERROR. NOTHING FOUND.
1411                    // NEED TO absorb tokens
1412                    unusedTokensSB.append(SPACE).append(tokens.consume());
1413                }
1414            }
1415            tokens.canConsume(DdlTokenizer.COMMENT);
1416        }
1417
1418        if (unusedTokensSB.length() > 0) {
1419            String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getName());
1420            DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, Position.EMPTY_CONTENT_POSITION, msg);
1421            problem.setUnusedSource(unusedTokensSB.toString());
1422            addProblem(problem, tableNode);
1423        }
1424    }
1425
1426    /**
1427     * Utility method to parse the actual column definition. SQL-92 Structural Specification <column definition> ::= <column name>
1428     * { <data type> | <domain name> } [ <default clause> ] [ <column constraint definition>... ] [ <collate clause> ]
1429     * 
1430     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1431     * @param tableNode the alter or create table statement node; may not be null
1432     * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1433     * @throws ParsingException
1434     */
1435    protected void parseSingleTerminatedColumnDefinition( DdlTokenStream tokens,
1436                                                          AstNode tableNode,
1437                                                          boolean isAlterTable ) throws ParsingException {
1438        assert tokens != null;
1439        assert tableNode != null;
1440
1441        tokens.canConsume("COLUMN");
1442        String columnName = parseName(tokens);
1443        DataType datatype = getDatatypeParser().parse(tokens);
1444
1445        AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION);
1446
1447        getDatatypeParser().setPropertiesOnNode(columnNode, datatype);
1448        // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma, a
1449        // terminator
1450        // or a new statement
1451
1452        while (tokens.hasNext() && !tokens.matches(getTerminator()) && !tokens.matches(DdlTokenizer.STATEMENT_KEY)) {
1453            boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode);
1454            boolean foundSomething = parsedDefaultClause;
1455            if (!parsedDefaultClause) {
1456                foundSomething |= parseCollateClause(tokens, columnNode);
1457                foundSomething |= parseColumnConstraint(tokens, columnNode, isAlterTable);
1458            }
1459            foundSomething |= consumeComment(tokens);
1460            if (tokens.canConsume(COMMA) || !foundSomething) break;
1461        }
1462    }
1463
1464    /**
1465     * Method which extracts the table element string from a CREATE TABLE statement.
1466     * 
1467     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1468     * @param useTerminator
1469     * @return the parsed table elements String.
1470     * @throws ParsingException
1471     */
1472    protected String getTableElementsString( DdlTokenStream tokens,
1473                                             boolean useTerminator ) throws ParsingException {
1474        assert tokens != null;
1475
1476        StringBuilder sb = new StringBuilder(100);
1477
1478        if (useTerminator) {
1479            while (!isTerminator(tokens)) {
1480                sb.append(SPACE).append(tokens.consume());
1481            }
1482        } else {
1483            // Assume we start with open parenthesis '(', then we can count on walking through ALL tokens until we find the close
1484            // parenthesis ')'. If there are intermediate parenthesis, we can count on them being pairs.
1485            tokens.consume(L_PAREN); // EXPECTED
1486
1487            int iParen = 0;
1488            while (tokens.hasNext()) {
1489                if (tokens.matches(L_PAREN)) {
1490                    iParen++;
1491                } else if (tokens.matches(R_PAREN)) {
1492                    if (iParen == 0) {
1493                        tokens.consume(R_PAREN);
1494                        break;
1495                    }
1496                    iParen--;
1497                }
1498                if (isComment(tokens)) {
1499                    tokens.consume();
1500                } else {
1501                    sb.append(SPACE).append(tokens.consume());
1502                }
1503            }
1504        }
1505
1506        return sb.toString();
1507
1508    }
1509
1510    /**
1511     * Simple method which parses, consumes and returns a string representing text found between parenthesis (i.e. '()') If
1512     * parents don't exist, method returns NULL;
1513     * 
1514     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1515     * @param includeParens
1516     * @return the parenthesis bounded text or null if no parens.
1517     * @throws ParsingException
1518     */
1519    protected String consumeParenBoundedTokens( DdlTokenStream tokens,
1520                                                boolean includeParens ) throws ParsingException {
1521        assert tokens != null;
1522
1523        // Assume we start with open parenthesis '(', then we can count on walking through ALL tokens until we find the close
1524        // parenthesis ')'. If there are intermediate parenthesis, we can count on them being pairs.
1525        if (tokens.canConsume(L_PAREN)) { // EXPECTED
1526            StringBuilder sb = new StringBuilder(100);
1527            if (includeParens) {
1528                sb.append(L_PAREN);
1529            }
1530            int iParen = 0;
1531            while (tokens.hasNext()) {
1532                if (tokens.matches(L_PAREN)) {
1533                    iParen++;
1534                } else if (tokens.matches(R_PAREN)) {
1535                    if (iParen == 0) {
1536                        tokens.consume(R_PAREN);
1537                        if (includeParens) {
1538                            sb.append(SPACE).append(R_PAREN);
1539                        }
1540                        break;
1541                    }
1542                    iParen--;
1543                }
1544                if (isComment(tokens)) {
1545                    tokens.consume();
1546                } else {
1547                    sb.append(SPACE).append(tokens.consume());
1548                }
1549            }
1550            return sb.toString();
1551        }
1552
1553        return null;
1554    }
1555
1556    /**
1557     * Parses an in-line column constraint including NULLABLE value, UNIQUE, PRIMARY KEY and REFERENCES to a Foreign Key. The
1558     * values for the constraint are set as properties on the input columnNode.
1559     * 
1560     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1561     * @param columnNode the column definition being created; may not be null
1562     * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1563     * @return true if parsed a constraint, else false.
1564     * @throws ParsingException
1565     */
1566    protected boolean parseColumnConstraint( DdlTokenStream tokens,
1567                                             AstNode columnNode,
1568                                             boolean isAlterTable ) throws ParsingException {
1569        assert tokens != null;
1570        assert columnNode != null;
1571
1572        String mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1573
1574        boolean result = false;
1575
1576        // : [ CONSTRAINT <constraint name> ] <column constraint> [ <constraint attributes> ]
1577        // <column constraint> ::= NOT NULL | <unique specification> | <references specification> | <check constraint definition>
1578        // <unique specification> ::= UNIQUE | PRIMARY KEY
1579        // <references specification> ::= REFERENCES <referenced table and columns> [ MATCH <match type> ] [ <referential
1580        // triggered action> ]
1581        // <check constraint definition> ::= CHECK <left paren> <search condition> <right paren>
1582        String colName = columnNode.getName();
1583
1584        if (tokens.canConsume("NULL")) {
1585            columnNode.setProperty(NULLABLE, "NULL");
1586            result = true;
1587        } else if (tokens.canConsume("NOT", "NULL")) {
1588            columnNode.setProperty(NULLABLE, "NOT NULL");
1589            result = true;
1590        } else if (tokens.matches(CONSTRAINT)) {
1591            result = true;
1592            tokens.consume(CONSTRAINT);
1593            String constraintName = parseName(tokens);
1594            AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1595
1596            if (tokens.matches("UNIQUE")) {
1597                // CONSTRAINT P_KEY_2a UNIQUE (PERMISSIONUID)
1598                tokens.consume("UNIQUE"); // UNIQUE
1599
1600                constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1601
1602                // CONSUME COLUMNS
1603                boolean columnsAdded = parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1604
1605                if (!columnsAdded) {
1606                    nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1607                }
1608
1609                parseConstraintAttributes(tokens, constraintNode);
1610            } else if (tokens.matches("PRIMARY", "KEY")) {
1611                // CONSTRAINT U_KEY_2a PRIMARY KEY (PERMISSIONUID)
1612                tokens.consume("PRIMARY"); // PRIMARY
1613                tokens.consume("KEY"); // KEY
1614
1615                constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1616
1617                // CONSUME COLUMNS
1618                boolean columnsAdded = parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1619
1620                if (!columnsAdded) {
1621                    // add the current column as the PK reference
1622                    nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1623                }
1624
1625                parseConstraintAttributes(tokens, constraintNode);
1626            } else if (tokens.matches("REFERENCES")) {
1627                // References in an in-line constraint is really a foreign key definition
1628                // EXAMPLE:
1629                // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) CONSTRAINT SOME_FK_NAME REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME,
1630                // ...)
1631
1632                constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1633
1634                nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1635
1636                parseReferences(tokens, constraintNode);
1637
1638                parseConstraintAttributes(tokens, constraintNode);
1639            }
1640        } else if (tokens.matches("UNIQUE")) {
1641            result = true;
1642            tokens.consume("UNIQUE");
1643            // Unique constraint for this particular column
1644            String uc_name = "UC_1"; // UNIQUE CONSTRAINT NAME
1645
1646            AstNode constraintNode = nodeFactory().node(uc_name, columnNode.getParent(), mixinType);
1647
1648            constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1649
1650            nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1651
1652        } else if (tokens.matches("PRIMARY", "KEY")) {
1653            result = true;
1654            tokens.consume("PRIMARY", "KEY");
1655            // PRIMARY KEY for this particular column
1656            String pk_name = "PK_1"; // PRIMARY KEY NAME
1657
1658            AstNode constraintNode = nodeFactory().node(pk_name, columnNode.getParent(), mixinType);
1659
1660            constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1661
1662            nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1663
1664        } else if (tokens.matches("FOREIGN", "KEY")) {
1665            result = true;
1666            tokens.consume("FOREIGN", "KEY");
1667            // This is an auto-named FK
1668            // References in an in-line constraint is really a foreign key definition
1669            // EXAMPLE:
1670            // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) FOREIGN KEY MY_FK_NAME REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME, ...)
1671
1672            String constraintName = parseName(tokens);
1673
1674            AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1675
1676            constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1677
1678            nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1679
1680            parseReferences(tokens, constraintNode);
1681            parseConstraintAttributes(tokens, constraintNode);
1682        } else if (tokens.matches("REFERENCES")) {
1683            result = true;
1684            // This is an auto-named FK
1685            // References in an in-line constraint is really a foreign key definition
1686            // EXAMPLE:
1687            // COLUMN_NAME DATATYPE NOT NULL DEFAULT (0) REFERENCES SOME_TABLE_NAME (SOME_COLUMN_NAME, ...)
1688
1689            String constraintName = "FK_1";
1690
1691            AstNode constraintNode = nodeFactory().node(constraintName, columnNode.getParent(), mixinType);
1692
1693            constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1694
1695            nodeFactory().node(colName, constraintNode, TYPE_COLUMN_REFERENCE);
1696
1697            parseReferences(tokens, constraintNode);
1698            parseConstraintAttributes(tokens, constraintNode);
1699        } else if (tokens.matches(CHECK)) {
1700            result = true;
1701            tokens.consume(CHECK); // CHECK
1702
1703            String ck_name = UNNAMED_CHECK_NODE_NAME;
1704
1705            AstNode constraintNode = nodeFactory().node(ck_name, columnNode.getParent(), mixinType);
1706            constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1707
1708            String clause = consumeParenBoundedTokens(tokens, true);
1709            constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1710        }
1711
1712        return result;
1713    }
1714
1715    /**
1716     * Parses full table constraint definition including the "CONSTRAINT" token Examples: CONSTRAINT P_KEY_2a UNIQUE
1717     * (PERMISSIONUID)
1718     * 
1719     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1720     * @param tableNode
1721     * @param isAlterTable true if in-line constraint is part of add column in alter table statement
1722     * @throws ParsingException
1723     */
1724    protected void parseTableConstraint( DdlTokenStream tokens,
1725                                         AstNode tableNode,
1726                                         boolean isAlterTable ) throws ParsingException {
1727        assert tokens != null;
1728        assert tableNode != null;
1729
1730        String mixinType = isAlterTable ? TYPE_ADD_TABLE_CONSTRAINT_DEFINITION : TYPE_TABLE_CONSTRAINT;
1731
1732        /*
1733        <table constraint definition> ::=
1734            [ <constraint name definition> ]
1735            <table constraint> [ <constraint attributes> ]
1736        
1737        <table constraint> ::=
1738              <unique constraint definition>
1739            | <referential constraint definition>
1740            | <check constraint definition>
1741            
1742        <constraint attributes> ::=
1743              <constraint check time> [ [ NOT ] DEFERRABLE ]
1744            | [ NOT ] DEFERRABLE [ <constraint check time> ]
1745        
1746        <unique constraint definition> ::=
1747                    <unique specification> even in SQL3)
1748            <unique specification>
1749              <left paren> <unique column list> <right paren>
1750        
1751        <unique column list> ::= <column name list>
1752        
1753        <referential constraint definition> ::=
1754            FOREIGN KEY
1755                <left paren> <referencing columns> <right paren>
1756              <references specification>
1757        
1758        <referencing columns> ::=
1759            <reference column list>
1760            
1761        <constraint attributes> ::=
1762              <constraint check time> [ [ NOT ] DEFERRABLE ]
1763            | [ NOT ] DEFERRABLE [ <constraint check time> ]
1764        
1765        <constraint check time> ::=
1766              INITIALLY DEFERRED
1767            | INITIALLY IMMEDIATE
1768            
1769        <check constraint definition> ::=
1770            CHECK
1771                <left paren> <search condition> <right paren>
1772         */
1773        consumeComment(tokens);
1774
1775        if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE"))) {
1776
1777            // This is the case where the PK/FK/UK is NOT NAMED
1778            if (tokens.matches("UNIQUE")) {
1779                String uc_name = "UC_1"; // UNIQUE CONSTRAINT NAME
1780                tokens.consume(); // UNIQUE
1781
1782                AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1783                constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1784
1785                // CONSUME COLUMNS
1786                parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1787
1788                parseConstraintAttributes(tokens, constraintNode);
1789
1790                consumeComment(tokens);
1791            } else if (tokens.matches("PRIMARY", "KEY")) {
1792                String pk_name = "PK_1"; // PRIMARY KEY NAME
1793                tokens.consume("PRIMARY", "KEY"); // PRIMARY KEY
1794
1795                AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1796                constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1797
1798                // CONSUME COLUMNS
1799                parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1800
1801                parseConstraintAttributes(tokens, constraintNode);
1802
1803                consumeComment(tokens);
1804            } else if (tokens.matches("FOREIGN", "KEY")) {
1805                String fk_name = "FK_1"; // FOREIGN KEY NAME
1806                tokens.consume("FOREIGN", "KEY"); // FOREIGN KEY
1807
1808                if (!tokens.matches(L_PAREN)) {
1809                    // Assume the FK is Named here
1810                    fk_name = tokens.consume();
1811                }
1812
1813                AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1814                constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1815
1816                // CONSUME COLUMNS
1817                parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1818
1819                // Parse the references to table and columns
1820                parseReferences(tokens, constraintNode);
1821
1822                parseConstraintAttributes(tokens, constraintNode);
1823
1824                consumeComment(tokens);
1825            }
1826        } else if (tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "UNIQUE")) {
1827            // CONSTRAINT P_KEY_2a UNIQUE (PERMISSIONUID)
1828            tokens.consume(); // CONSTRAINT
1829            String uc_name = parseName(tokens); // UNIQUE CONSTRAINT NAME
1830            tokens.consume("UNIQUE"); // UNIQUE
1831
1832            AstNode constraintNode = nodeFactory().node(uc_name, tableNode, mixinType);
1833            constraintNode.setProperty(CONSTRAINT_TYPE, UNIQUE);
1834
1835            // CONSUME COLUMNS
1836            parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1837
1838            parseConstraintAttributes(tokens, constraintNode);
1839
1840            consumeComment(tokens);
1841        } else if (tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "PRIMARY", "KEY")) {
1842            // CONSTRAINT U_KEY_2a PRIMARY KEY (PERMISSIONUID)
1843            tokens.consume(CONSTRAINT); // CONSTRAINT
1844            String pk_name = parseName(tokens); // PRIMARY KEY NAME
1845            tokens.consume("PRIMARY", "KEY"); // PRIMARY KEY
1846
1847            AstNode constraintNode = nodeFactory().node(pk_name, tableNode, mixinType);
1848            constraintNode.setProperty(CONSTRAINT_TYPE, PRIMARY_KEY);
1849
1850            // CONSUME COLUMNS
1851            parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1852
1853            parseConstraintAttributes(tokens, constraintNode);
1854
1855            consumeComment(tokens);
1856
1857        } else if (tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "FOREIGN", "KEY")) {
1858            // CONSTRAINT F_KEY_2a FOREIGN KEY (PERMISSIONUID)
1859            tokens.consume(CONSTRAINT); // CONSTRAINT
1860            String fk_name = parseName(tokens); // FOREIGN KEY NAME
1861            tokens.consume("FOREIGN", "KEY"); // FOREIGN KEY
1862
1863            AstNode constraintNode = nodeFactory().node(fk_name, tableNode, mixinType);
1864
1865            constraintNode.setProperty(CONSTRAINT_TYPE, FOREIGN_KEY);
1866
1867            // CONSUME COLUMNS
1868            parseColumnNameList(tokens, constraintNode, TYPE_COLUMN_REFERENCE);
1869
1870            // Parse the references to table and columns
1871            parseReferences(tokens, constraintNode);
1872
1873            parseConstraintAttributes(tokens, constraintNode);
1874
1875            consumeComment(tokens);
1876
1877        } else if (tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, CHECK)) {
1878            // CONSTRAINT zipchk CHECK (char_length(zipcode) = 5);
1879            tokens.consume(CONSTRAINT); // CONSTRAINT
1880            String ck_name = parseName(tokens); // NAME
1881            parseCheckConstraint(tokens, tableNode, mixinType, ck_name);
1882        }  else if (tokens.matches(CHECK)) {
1883             parseCheckConstraint(tokens, tableNode, mixinType, UNNAMED_CHECK_NODE_NAME);
1884        }
1885    }
1886
1887    protected void parseCheckConstraint(DdlTokenStream tokens, AstNode tableNode, String mixinType, String constraintName) {
1888        tokens.consume(CHECK); // CHECK
1889
1890        AstNode constraintNode = nodeFactory().node(constraintName, tableNode, mixinType);
1891        constraintNode.setProperty(CONSTRAINT_TYPE, CHECK);
1892
1893        String clause = consumeParenBoundedTokens(tokens, true);
1894        constraintNode.setProperty(CHECK_SEARCH_CONDITION, clause);
1895    }
1896
1897    /**
1898     * Parses the attributes associated with any in-line column constraint definition or a table constrain definition.
1899     * 
1900     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
1901     * @param constraintNode
1902     * @throws ParsingException
1903     */
1904    protected void parseConstraintAttributes( DdlTokenStream tokens,
1905                                              AstNode constraintNode ) throws ParsingException {
1906        assert tokens != null;
1907        assert constraintNode != null;
1908
1909        // Now we need to check for constraint attributes:
1910
1911        // <constraint attributes> ::=
1912        // <constraint check time> [ [ NOT ] DEFERRABLE ]
1913        // | [ NOT ] DEFERRABLE [ <constraint check time> ]
1914        //
1915        // <constraint check time> ::=
1916        // INITIALLY DEFERRED
1917        // | INITIALLY IMMEDIATE
1918
1919        // EXAMPLE : foreign key (contact_id) references contact (contact_id) on delete cascade INITIALLY DEFERRED,
1920        if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1921            AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, TYPE_CONSTRAINT_ATTRIBUTE);
1922            attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1923        }
1924        if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1925            AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, TYPE_CONSTRAINT_ATTRIBUTE);
1926            attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1927        }
1928        if (tokens.canConsume("NOT", "DEFERRABLE")) {
1929            AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, TYPE_CONSTRAINT_ATTRIBUTE);
1930            attrNode.setProperty(PROPERTY_VALUE, "NOT DEFERRABLE");
1931        }
1932        if (tokens.canConsume("DEFERRABLE")) {
1933            AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, TYPE_CONSTRAINT_ATTRIBUTE);
1934            attrNode.setProperty(PROPERTY_VALUE, "DEFERRABLE");
1935        }
1936        if (tokens.canConsume("INITIALLY", "DEFERRED")) {
1937            AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, TYPE_CONSTRAINT_ATTRIBUTE);
1938            attrNode.setProperty(PROPERTY_VALUE, "INITIALLY DEFERRED");
1939        }
1940        if (tokens.canConsume("INITIALLY", "IMMEDIATE")) {
1941            AstNode attrNode = nodeFactory().node("CONSTRAINT_ATTRIBUTE", constraintNode, TYPE_CONSTRAINT_ATTRIBUTE);
1942            attrNode.setProperty(PROPERTY_VALUE, "INITIALLY IMMEDIATE");
1943        }
1944    }
1945
1946    protected void parseReferences( DdlTokenStream tokens,
1947                                    AstNode constraintNode ) throws ParsingException {
1948        assert tokens != null;
1949        assert constraintNode != null;
1950
1951        if (tokens.matches("REFERENCES")) {
1952            tokens.consume("REFERENCES");
1953            // 'REFERENCES' referencedTableAndColumns matchType? referentialTriggeredAction?;
1954            String tableName = parseName(tokens);
1955
1956            nodeFactory().node(tableName, constraintNode, TYPE_TABLE_REFERENCE);
1957
1958            parseColumnNameList(tokens, constraintNode, TYPE_FK_COLUMN_REFERENCE);
1959
1960            tokens.canConsume("MATCH", "FULL");
1961            tokens.canConsume("MATCH", "PARTIAL");
1962
1963            //
1964            // referentialTriggeredAction : (updateRule deleteRule?) | (deleteRule updateRule?);
1965            //
1966            // deleteRule : 'ON' 'DELETE' referencialAction;
1967            //
1968            // updateRule : 'ON' 'UPDATE' referencialAction;
1969            //
1970            // referencialAction
1971            // : cascadeOption | setNullOption | setDefaultOption | noActionOption
1972            // ;
1973            //
1974            // cascadeOption : 'CASCADE';
1975            // setNullOption : 'SET' 'NULL';
1976            // setDefaultOption : 'SET' 'DEFAULT';
1977            // noActionOption : 'NO' 'ACTION';
1978            // nowOption : 'NOW' '(' ')' ;
1979
1980            // Could be one or both, so check more than once.
1981            while (tokens.canConsume("ON", "UPDATE") || tokens.canConsume("ON", "DELETE")) {
1982
1983                if (tokens.matches("CASCADE") || tokens.matches("NOW()")) {
1984                    tokens.consume();
1985                } else if (tokens.matches("SET", "NULL")) {
1986                    tokens.consume("SET", "NULL");
1987                } else if (tokens.matches("SET", "DEFAULT")) {
1988                    tokens.consume("SET", "DEFAULT");
1989                } else if (tokens.matches("NO", "ACTION")) {
1990                    tokens.consume("NO", "ACTION");
1991                } else {
1992                    LOGGER.debug(" ERROR:   ColumnDefinition REFERENCES has NO REFERENCIAL ACTION.");
1993                }
1994            }
1995        }
1996    }
1997
1998    // ===========================================================================================================================
1999    // PARSING CREATE VIEW
2000    // ===========================================================================================================================
2001
2002    /**
2003     * Parses DDL CREATE VIEW {@link AstNode} basedregisterStatementStartPhrase on SQL 92 specifications. Initial implementation
2004     * here does not parse the statement in detail.
2005     * 
2006     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2007     * @param parentNode the parent {@link AstNode} node; may not be null
2008     * @return the Create View node
2009     * @throws ParsingException
2010     */
2011    protected AstNode parseCreateViewStatement( DdlTokenStream tokens,
2012                                                AstNode parentNode ) throws ParsingException {
2013        assert tokens != null;
2014        assert parentNode != null;
2015
2016        markStartOfStatement(tokens);
2017        // <view definition> ::=
2018        // CREATE VIEW <table name> [ <left paren> <view column list><right paren> ]
2019        // AS <query expression>
2020        // [ WITH [ <levels clause> ] CHECK OPTION ]
2021        // <levels clause> ::=
2022        // CASCADED | LOCAL
2023
2024        // NOTE: the query expression along with the CHECK OPTION clause require no SQL statement terminator.
2025        // So the CHECK OPTION clause will NOT
2026
2027        String stmtType = "CREATE";
2028        tokens.consume("CREATE");
2029        if (tokens.canConsume("OR", "REPLACE")) {
2030            stmtType = stmtType + SPACE + "OR REPLACE";
2031        }
2032        tokens.consume("VIEW");
2033        stmtType = stmtType + SPACE + "VIEW";
2034
2035        String name = parseName(tokens);
2036
2037        AstNode createViewNode = nodeFactory().node(name, parentNode, TYPE_CREATE_VIEW_STATEMENT);
2038
2039        // CONSUME COLUMNS
2040        parseColumnNameList(tokens, createViewNode, TYPE_COLUMN_REFERENCE);
2041
2042        tokens.consume("AS");
2043
2044        String queryExpression = parseUntilTerminator(tokens);
2045
2046        createViewNode.setProperty(CREATE_VIEW_QUERY_EXPRESSION, queryExpression);
2047
2048        markEndOfStatement(tokens, createViewNode);
2049
2050        return createViewNode;
2051    }
2052
2053    // ===========================================================================================================================
2054    // PARSING CREATE SCHEMA
2055    // ===========================================================================================================================
2056
2057    /**
2058     * Parses DDL CREATE SCHEMA {@link AstNode} based on SQL 92 specifications. Initial implementation here does not parse the
2059     * statement in detail.
2060     * 
2061     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2062     * @param parentNode the parent {@link AstNode} node; may not be null
2063     * @return the parsed schema node
2064     * @throws ParsingException
2065     */
2066    protected AstNode parseCreateSchemaStatement( DdlTokenStream tokens,
2067                                                  AstNode parentNode ) throws ParsingException {
2068        markStartOfStatement(tokens);
2069
2070        AstNode schemaNode = null;
2071
2072        String authorizationIdentifier = null;
2073        String schemaName = null;
2074
2075        tokens.consume("CREATE", "SCHEMA");
2076
2077        if (tokens.canConsume("AUTHORIZATION")) {
2078            authorizationIdentifier = tokens.consume();
2079        } else {
2080            schemaName = parseName(tokens);
2081            if (tokens.canConsume("AUTHORIZATION")) {
2082                authorizationIdentifier = parseName(tokens);
2083            }
2084        }
2085        // Must have one or the other or both
2086        assert authorizationIdentifier != null || schemaName != null;
2087
2088        if (schemaName != null) {
2089            schemaNode = nodeFactory().node(schemaName, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2090        } else {
2091            schemaNode = nodeFactory().node(authorizationIdentifier, parentNode, TYPE_CREATE_SCHEMA_STATEMENT);
2092        }
2093
2094        if (tokens.canConsume("DEFAULT", "CHARACTER", "SET")) {
2095            // consume name
2096            parseName(tokens);
2097        }
2098
2099        markEndOfStatement(tokens, schemaNode);
2100
2101        return schemaNode;
2102    }
2103
2104    /**
2105     * Parses DDL CREATE ASSERTION {@link AstNode} based on SQL 92 specifications. Initial implementation here does not parse the
2106     * statement's search condition in detail.
2107     * 
2108     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2109     * @param parentNode the parent {@link AstNode} node; may not be null
2110     * @return the parsed schema node
2111     * @throws ParsingException
2112     */
2113    protected AstNode parseCreateAssertionStatement( DdlTokenStream tokens,
2114                                                     AstNode parentNode ) throws ParsingException {
2115        markStartOfStatement(tokens);
2116
2117        // <assertion definition> ::=
2118        // CREATE ASSERTION <constraint name> CHECK <left paren> <search condition> <right paren> [ <constraint attributes> ]
2119
2120        AstNode node = null;
2121
2122        tokens.consume("CREATE", "ASSERTION");
2123
2124        String name = parseName(tokens);
2125
2126        // Must have one or the other or both
2127
2128        node = nodeFactory().node(name, parentNode, TYPE_CREATE_ASSERTION_STATEMENT);
2129
2130        tokens.consume(CHECK);
2131
2132        String searchCondition = consumeParenBoundedTokens(tokens, false);
2133
2134        node.setProperty(CHECK_SEARCH_CONDITION, searchCondition);
2135
2136        parseConstraintAttributes(tokens, node);
2137
2138        markEndOfStatement(tokens, node);
2139
2140        return node;
2141    }
2142
2143    // ===========================================================================================================================
2144    // PARSING CREATE XXXXX (Typed Statements)
2145    // ===========================================================================================================================
2146
2147    /**
2148     * Utility method to parse a statement that can be ignored. The value returned in the generic {@link AstNode} will contain all
2149     * text between starting token and either the terminator (if defined) or the next statement start token. NOTE: This method
2150     * does NOT mark and add consumed fragment to parent node.
2151     * 
2152     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2153     * @param name
2154     * @param parentNode the parent {@link AstNode} node; may not be null
2155     * @return the parsed generic {@link AstNode}
2156     * @throws ParsingException
2157     */
2158    protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2159                                               String name,
2160                                               AstNode parentNode ) {
2161
2162        AstNode node = nodeFactory().node(name, parentNode, TYPE_STATEMENT);
2163
2164        parseUntilTerminator(tokens);
2165
2166        // System.out.println(" >>> FOUND [" + stmt.getType() +"] STATEMENT TOKEN. IGNORING");
2167        return node;
2168    }
2169
2170    /**
2171     * Utility method to parse a statement that can be ignored. The value returned in the generic {@link AstNode} will contain all
2172     * text between starting token and either the terminator (if defined) or the next statement start token. NOTE: This method
2173     * does NOT mark and add consumed fragment to parent node.
2174     * 
2175     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2176     * @param name
2177     * @param parentNode the parent {@link AstNode} node; may not be null
2178     * @param mixinType
2179     * @return the parsed generic {@link AstNode}
2180     * @throws ParsingException
2181     */
2182    protected AstNode parseIgnorableStatement( DdlTokenStream tokens,
2183                                               String name,
2184                                               AstNode parentNode,
2185                                               String mixinType ) {
2186        CheckArg.isNotNull(tokens, "tokens");
2187        CheckArg.isNotNull(name, "name");
2188        CheckArg.isNotNull(parentNode, "parentNode");
2189        CheckArg.isNotNull(mixinType, "mixinType");
2190
2191        AstNode node = nodeFactory().node(name, parentNode, mixinType);
2192
2193        parseUntilTerminator(tokens);
2194
2195        return node;
2196    }
2197
2198    /**
2199     * Utility method to parse a generic statement given a start phrase and statement mixin type.
2200     * 
2201     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2202     * @param stmt_start_phrase the string array statement start phrase
2203     * @param parentNode the parent of the newly created node.
2204     * @param mixinType the mixin type of the newly created statement node
2205     * @return the new node
2206     */
2207    protected AstNode parseStatement( DdlTokenStream tokens,
2208                                      String[] stmt_start_phrase,
2209                                      AstNode parentNode,
2210                                      String mixinType ) {
2211        CheckArg.isNotNull(tokens, "tokens");
2212        CheckArg.isNotNull(stmt_start_phrase, "stmt_start_phrase");
2213        CheckArg.isNotNull(parentNode, "parentNode");
2214        CheckArg.isNotNull(mixinType, "mixinType");
2215
2216        markStartOfStatement(tokens);
2217        tokens.consume(stmt_start_phrase);
2218        AstNode result = parseIgnorableStatement(tokens, getStatementTypeName(stmt_start_phrase), parentNode, mixinType);
2219        markEndOfStatement(tokens, result);
2220
2221        return result;
2222    }
2223
2224    /**
2225     * Constructs a terminator AstNode as child of root node
2226     * 
2227     * @param parentNode the parent {@link AstNode} node; may not be null
2228     * @return terminator node
2229     */
2230    public final AstNode unknownTerminatedNode( AstNode parentNode ) {
2231        return nodeFactory.node("unknownStatement", parentNode, StandardDdlLexicon.TYPE_UNKNOWN_STATEMENT);
2232    }
2233
2234    /**
2235     * Constructs a terminator AstNode as child of root node
2236     * 
2237     * @param parentNode the parent {@link AstNode} node; may not be null
2238     * @return terminator node
2239     */
2240    public final AstNode missingTerminatorNode( AstNode parentNode ) {
2241        return nodeFactory.node("missingTerminator", parentNode, StandardDdlLexicon.TYPE_MISSING_TERMINATOR);
2242    }
2243
2244    public final boolean isMissingTerminatorNode( AstNode node ) {
2245        return node.getName().equals(MISSING_TERMINATOR_NODE_LITERAL)
2246               && nodeFactory().hasMixinType(node, TYPE_MISSING_TERMINATOR);
2247    }
2248
2249    public final boolean isValidSchemaChild( AstNode node ) {
2250        List<String> schemaChildMixins = Arrays.asList(getValidSchemaChildTypes());
2251
2252        for (String mixin : node.getMixins()) {
2253            if (schemaChildMixins.contains(mixin)) {
2254                return true;
2255            }
2256        }
2257
2258        return false;
2259    }
2260
2261    public final boolean setAsSchemaChildNode( AstNode statementNode,
2262                                               boolean stmtIsMissingTerminator ) {
2263
2264        if (!isValidSchemaChild(statementNode)) {
2265            return false;
2266        }
2267
2268        // Because we are setting the schema children on the fly we can assume that if we are under a schema with children, then
2269        // the schema should be followed by a missing terminator node. So we just check the previous 2 nodes.
2270
2271        List<AstNode> children = getRootNode().getChildren();
2272
2273        if (children.size() > 2) {
2274            AstNode previousNode = children.get(children.size() - 2);
2275            if (nodeFactory().hasMixinType(previousNode, TYPE_MISSING_TERMINATOR)) {
2276                AstNode theSchemaNode = children.get(children.size() - 3);
2277
2278                // If the last child of a schema is missing terminator, then the schema isn't complete.
2279                // If it is NOT a missing terminator, we aren't under a schema node anymore.
2280                if (theSchemaNode.getChildCount() == 0
2281                    || nodeFactory().hasMixinType(theSchemaNode.getLastChild(), TYPE_MISSING_TERMINATOR)) {
2282                    if (nodeFactory().hasMixinType(theSchemaNode, TYPE_CREATE_SCHEMA_STATEMENT)) {
2283                        statementNode.setParent(theSchemaNode);
2284                        if (stmtIsMissingTerminator) {
2285                            missingTerminatorNode(theSchemaNode);
2286                        }
2287                        return true;
2288                    }
2289                }
2290            }
2291        }
2292
2293        return false;
2294    }
2295
2296    /**
2297     * Returns current terminator
2298     * 
2299     * @return terminator string value
2300     */
2301    protected String getTerminator() {
2302        return this.terminator;
2303    }
2304
2305    /**
2306     * @param terminator the string value used as the statement terminator for the ddl dialect
2307     * @return if terminator was changed or not
2308     */
2309    protected boolean setTerminator( String terminator ) {
2310        CheckArg.isNotNull(terminator, "terminator");
2311        if (this.terminator.equalsIgnoreCase(terminator)) {
2312            return false;
2313        }
2314        this.terminator = terminator;
2315        return true;
2316    }
2317
2318    protected String[] getValidSchemaChildTypes() {
2319        return VALID_SCHEMA_CHILD_TYPES;
2320    }
2321
2322    /**
2323     * Checks if next token is of type comment.
2324     * 
2325     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2326     * @return true if next token is a comment.
2327     * @throws ParsingException
2328     */
2329    protected boolean isComment( DdlTokenStream tokens ) throws ParsingException {
2330        return tokens.matches(DdlTokenizer.COMMENT);
2331    }
2332
2333    /**
2334     * Consumes an an end-of-line comment or in-line comment
2335     * 
2336     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2337     * @return true if a comment was found and consumed
2338     * @throws ParsingException
2339     */
2340    protected boolean consumeComment( DdlTokenStream tokens ) throws ParsingException {
2341        return tokens.canConsume(DdlTokenizer.COMMENT);
2342    }
2343
2344    /**
2345     * This utility method provides this parser the ability to distinguish between a CreateTable Constraint and a ColumnDefinition
2346     * Definition which are the only two statement segment types allowed within the CREATE TABLE parenthesis ( xxxxxx );
2347     * 
2348     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2349     * @return is table constraint
2350     * @throws ParsingException
2351     */
2352    protected boolean isTableConstraint( DdlTokenStream tokens ) throws ParsingException {
2353        boolean result = false;
2354
2355        if ((tokens.matches("PRIMARY", "KEY")) || (tokens.matches("FOREIGN", "KEY")) || (tokens.matches("UNIQUE")) || tokens.matches(
2356                CHECK)) {
2357            result = true;
2358        } else if (tokens.matches(CONSTRAINT)) {
2359            if (tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "UNIQUE")
2360                || tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "PRIMARY", "KEY")
2361                || tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, "FOREIGN", "KEY")
2362                || tokens.matches(CONSTRAINT, TokenStream.ANY_VALUE, CHECK)) {
2363                result = true;
2364            }
2365        }
2366
2367        return result;
2368    }
2369
2370    /**
2371     * This utility method provides this parser the ability to distinguish between a CreateTable Constrain and a ColumnDefinition
2372     * Definition which are the only two statement segment types allowed within the CREATE TABLE parenthesis ( xxxxxx );
2373     * 
2374     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2375     * @return is column definition start phrase
2376     * @throws ParsingException
2377     */
2378    protected boolean isColumnDefinitionStart( DdlTokenStream tokens ) throws ParsingException {
2379        boolean result = false;
2380
2381        if (isTableConstraint(tokens)) {
2382            result = false;
2383        } else {
2384            for (String dTypeStartWord : getDataTypeStartWords()) {
2385                result = (tokens.matches(TokenStream.ANY_VALUE, dTypeStartWord) || tokens.matches("COLUMN",
2386                                                                                                  TokenStream.ANY_VALUE,
2387                                                                                                  dTypeStartWord));
2388                if (result) {
2389                    break;
2390                }
2391            }
2392
2393        }
2394
2395        return result;
2396    }
2397
2398    /**
2399     * Returns a list of data type start words which can be used to help identify a column definition sub-statement.
2400     * 
2401     * @return list of data type start words
2402     */
2403    protected List<String> getDataTypeStartWords() {
2404        if (allDataTypeStartWords == null) {
2405            allDataTypeStartWords = new ArrayList<String>();
2406            allDataTypeStartWords.addAll(DataTypes.DATATYPE_START_WORDS);
2407            allDataTypeStartWords.addAll(getCustomDataTypeStartWords());
2408        }
2409        return allDataTypeStartWords;
2410    }
2411
2412    /**
2413     * Returns a list of custom data type start words which can be used to help identify a column definition sub-statement.
2414     * Sub-classes should override this method to contribute DB-specific data types.
2415     * 
2416     * @return list of data type start words
2417     */
2418    protected List<String> getCustomDataTypeStartWords() {
2419        return Collections.emptyList();
2420    }
2421
2422    /**
2423     * Method to parse fully qualified schema, table and column names that are defined with '.' separator and optionally bracketed
2424     * with square brackets Example: partsSchema.supplier Example: [partsSchema].[supplier]
2425     * 
2426     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2427     * @return the parsed name
2428     */
2429    protected String parseName( DdlTokenStream tokens ) {
2430        // Basically we want to construct a name that could have the form:
2431        // [schemaName].[tableName].[columnName]
2432        // NOTE: "[]" brackets are optional
2433        StringBuilder sb = new StringBuilder();
2434
2435        if (tokens.matches('[')) {
2436            // We have the bracketed case, so assume all brackets
2437            while (true) {
2438
2439                tokens.consume('['); // [ bracket
2440                sb.append(consumeIdentifier(tokens)); // name
2441                tokens.consume(']'); // ] bracket
2442                if (tokens.matches('.')) {
2443                    sb.append(tokens.consume()); // '.'
2444                } else {
2445                    break;
2446                }
2447            }
2448        } else {
2449
2450            // We have the NON-bracketed case, so assume all brackets
2451            while (true) {
2452
2453                sb.append(consumeIdentifier(tokens)); // name
2454
2455                if (tokens.matches('.')) {
2456                    sb.append(tokens.consume()); // '.'
2457                } else {
2458                    break;
2459                }
2460
2461            }
2462        }
2463
2464        return sb.toString();
2465    }
2466
2467    /**
2468     * Consumes an token identifier which can be of the form of a simple string or a double-quoted string.
2469     * 
2470     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2471     * @return the identifier
2472     * @throws ParsingException
2473     */
2474    protected String consumeIdentifier( DdlTokenStream tokens ) throws ParsingException {
2475        String value = tokens.consume();
2476        // This may surrounded by quotes, so remove them ...
2477        if (value.charAt(0) == '"') {
2478            int length = value.length();
2479            // Check for the end quote ...
2480            value = value.substring(1, length - 1); // not complete!!
2481        }
2482        // TODO: Handle warnings elegantly
2483        // else {
2484        // // Not quoted, so check for reserved words ...
2485        // if (tokens.isKeyWord(value)) {
2486        // // Record warning ...
2487        // System.out.println("  WARNING:  Identifier [" + value + "] is a SQL 92 Reserved Word");
2488        // }
2489        // }
2490        return value;
2491    }
2492
2493    /**
2494     * Utility method to determine if next token is a terminator.
2495     * 
2496     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2497     * @return is terminator token
2498     * @throws ParsingException
2499     */
2500    protected boolean isTerminator( DdlTokenStream tokens ) throws ParsingException {
2501        boolean result = tokens.matches(getTerminator());
2502
2503        return result;
2504    }
2505
2506    /**
2507     * Adds column reference nodes to a parent node. Returns true if column references added, false if not.
2508     * 
2509     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2510     * @param parentNode the parent node
2511     * @param referenceType the type of the reference node to create
2512     * @return true if the column references were found and added to the node, or false if there were no column references found
2513     *         in the stream
2514     */
2515    protected boolean parseColumnNameList( DdlTokenStream tokens,
2516                                           AstNode parentNode,
2517                                           String referenceType ) {
2518        boolean parsedColumns = false;
2519        // CONSUME COLUMNS
2520        List<String> columnNameList = new ArrayList<String>();
2521        if (tokens.matches(L_PAREN)) {
2522            tokens.consume(L_PAREN);
2523            columnNameList = parseNameList(tokens);
2524
2525            if (!columnNameList.isEmpty()) {
2526                parsedColumns = true;
2527            }
2528
2529            tokens.consume(R_PAREN);
2530        }
2531
2532        for (String columnName : columnNameList) {
2533            nodeFactory().node(columnName, parentNode, referenceType);
2534        }
2535
2536        return parsedColumns;
2537    }
2538
2539    /**
2540     * Parses a comma separated list of names.
2541     * 
2542     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2543     * @return list of names (never <code>null</code>)
2544     * @throws ParsingException
2545     */
2546    protected List<String> parseNameList( DdlTokenStream tokens ) throws ParsingException {
2547        List<String> names = new LinkedList<String>();
2548
2549        while (true) {
2550            names.add(parseName(tokens));
2551
2552            if (!tokens.canConsume(COMMA)) {
2553                break;
2554            }
2555        }
2556
2557        return names;
2558    }
2559
2560    /**
2561     * Utility method which parses tokens until a terminator is found, another statement is identified or there are no more
2562     * tokens.
2563     * 
2564     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2565     * @return the parsed string
2566     * @throws ParsingException
2567     */
2568    protected String parseUntilTerminator( DdlTokenStream tokens ) throws ParsingException {
2569        final StringBuilder sb = new StringBuilder();
2570        boolean lastTokenWasPeriod = false;
2571        Position prevPosition = (tokens.hasNext() ? tokens.nextPosition() : Position.EMPTY_CONTENT_POSITION);
2572        String prevToken = "";
2573
2574        while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY)
2575               && ((doUseTerminator() && !isTerminator(tokens)) || !doUseTerminator())) {
2576            final Position currPosition = tokens.nextPosition();
2577            final String thisToken = tokens.consume();
2578            final boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2579            final boolean thisTokenIsComma = thisToken.equals(COMMA);
2580
2581            if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2582                sb.append(thisToken);
2583            } else if ((currPosition.getIndexInContent() - prevPosition.getIndexInContent() - prevToken.length()) > 0) {
2584                sb.append(SPACE).append(thisToken);
2585            } else {
2586                sb.append(thisToken);
2587            }
2588
2589            if (thisTokenIsPeriod) {
2590                lastTokenWasPeriod = true;
2591            } else {
2592                lastTokenWasPeriod = false;
2593            }
2594
2595            prevToken = thisToken;
2596            prevPosition = currPosition;
2597        }
2598
2599        return sb.toString();
2600    }
2601
2602    /**
2603     * Utility method which parses tokens until a terminator is found or there are no more tokens. This method differs from
2604     * parseUntilTermintor() in that it ignores embedded statements. This method can be used for parsers that have statements
2605     * which can embed statements that should not be parsed.
2606     * 
2607     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2608     * @return the parsed string
2609     * @throws ParsingException
2610     */
2611    protected String parseUntilTerminatorIgnoreEmbeddedStatements( DdlTokenStream tokens ) throws ParsingException {
2612        StringBuilder sb = new StringBuilder();
2613        boolean lastTokenWasPeriod = false;
2614        Position prevPosition = Position.EMPTY_CONTENT_POSITION;
2615        String prevToken = "";
2616
2617        while (tokens.hasNext() && !isTerminator(tokens)) {
2618            final Position currPosition = tokens.nextPosition();
2619            String thisToken = tokens.consume();
2620            boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2621            boolean thisTokenIsComma = thisToken.equals(COMMA);
2622
2623            if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2624                sb.append(thisToken);
2625            } else if ((currPosition.getIndexInContent() - prevPosition.getIndexInContent() - prevToken.length()) > 0) {
2626                sb.append(SPACE).append(thisToken);
2627            } else {
2628                sb.append(thisToken);
2629            }
2630
2631            if (thisTokenIsPeriod) {
2632                lastTokenWasPeriod = true;
2633            } else {
2634                lastTokenWasPeriod = false;
2635            }
2636
2637            prevToken = thisToken;
2638            prevPosition = currPosition;
2639        }
2640
2641        return sb.toString();
2642    }
2643
2644    /**
2645     * Utility method which parses tokens until a semicolon is found or there are no more tokens.
2646     * 
2647     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2648     * @return the parsed string
2649     * @throws ParsingException
2650     */
2651    protected String parseUntilSemiColon( DdlTokenStream tokens ) throws ParsingException {
2652        StringBuilder sb = new StringBuilder();
2653
2654        boolean lastTokenWasPeriod = false;
2655        while (tokens.hasNext() && !tokens.matches(SEMICOLON)) {
2656            String thisToken = tokens.consume();
2657            boolean thisTokenIsPeriod = thisToken.equals(PERIOD);
2658            boolean thisTokenIsComma = thisToken.equals(COMMA);
2659            if (lastTokenWasPeriod || thisTokenIsPeriod || thisTokenIsComma) {
2660                sb.append(thisToken);
2661            } else {
2662                sb.append(SPACE).append(thisToken);
2663            }
2664            if (thisTokenIsPeriod) {
2665                lastTokenWasPeriod = true;
2666            } else {
2667                lastTokenWasPeriod = false;
2668            }
2669        }
2670
2671        return sb.toString();
2672    }
2673
2674    protected String parseUntilCommaOrTerminator( DdlTokenStream tokens ) throws ParsingException {
2675        StringBuilder sb = new StringBuilder();
2676        if (doUseTerminator()) {
2677            while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !isTerminator(tokens)
2678                   && !tokens.matches(COMMA)) {
2679                sb.append(SPACE).append(tokens.consume());
2680            }
2681        } else {
2682            // parse until next statement
2683            while (tokens.hasNext() && !tokens.matches(DdlTokenizer.STATEMENT_KEY) && !tokens.matches(COMMA)) {
2684                sb.append(SPACE).append(tokens.consume());
2685            }
2686        }
2687
2688        return sb.toString();
2689    }
2690
2691    /**
2692     * Returns if parser is using statement terminator or not.
2693     * 
2694     * @return value of useTerminator flag.
2695     */
2696    public boolean doUseTerminator() {
2697        return useTerminator;
2698    }
2699
2700    /**
2701     * Sets the value of the use terminator flag for the parser. If TRUE, then all statements are expected to be terminated by a
2702     * terminator. The default terminator ";" can be overridden by setting the value using setTerminator() method.
2703     * 
2704     * @param useTerminator
2705     */
2706    public void setDoUseTerminator( boolean useTerminator ) {
2707        this.useTerminator = useTerminator;
2708    }
2709
2710    public String getStatementTypeName( String[] stmtPhrase ) {
2711        StringBuilder sb = new StringBuilder(100);
2712        for (int i = 0; i < stmtPhrase.length; i++) {
2713            if (i == 0) {
2714                sb.append(stmtPhrase[0]);
2715            } else {
2716                sb.append(SPACE).append(stmtPhrase[i]);
2717            }
2718        }
2719
2720        return sb.toString();
2721    }
2722
2723    /**
2724     * Parses the default clause for a column and sets appropriate properties on the column node.
2725     * 
2726     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2727     * @param columnNode the column node which may contain a default clause; may not be null
2728     * @return true if default clause was found and parsed, otherwise false
2729     * @throws ParsingException
2730     */
2731    protected boolean parseDefaultClause( DdlTokenStream tokens,
2732                                          AstNode columnNode ) throws ParsingException {
2733
2734        assert tokens != null;
2735        assert columnNode != null;
2736
2737        // defaultClause
2738        // : defaultOption
2739        // ;
2740        // defaultOption : <literal> | datetimeValueFunction
2741        // | USER | CURRENT_USER | SESSION_USER | SYSTEM_USER | NULL;
2742        //
2743        // <datetime value function> ::=
2744        // <current date value function>
2745        // | <current time value function>
2746        // | <current timestamp value function>
2747        //
2748        // <current date value function> ::= CURRENT_DATE
2749        //
2750        // <current time value function> ::=
2751        // CURRENT_TIME [ <left paren> <time precision> <right paren> ]
2752        //
2753        // <current timestamp value function> ::=
2754        // CURRENT_TIMESTAMP [ <left paren> <timestamp precision> <right paren> ]
2755
2756        String defaultValue = "";
2757
2758        if (tokens.canConsume("DEFAULT")) {
2759
2760            String optionID;
2761            int precision = -1;
2762
2763            if (tokens.canConsume("CURRENT_DATE") || tokens.canConsume("'CURRENT_DATE'")) {
2764                optionID = DEFAULT_ID_DATETIME;
2765                defaultValue = "CURRENT_DATE";
2766            } else if (tokens.canConsume("CURRENT_TIME") || tokens.canConsume("'CURRENT_TIME'")) {
2767                optionID = DEFAULT_ID_DATETIME;
2768                defaultValue = "CURRENT_TIME";
2769                if (tokens.canConsume(L_PAREN)) {
2770                    // EXPECT INTEGER
2771                    precision = integer(tokens.consume());
2772                    tokens.canConsume(R_PAREN);
2773                }
2774            } else if (tokens.canConsume("CURRENT_TIMESTAMP") || tokens.canConsume("'CURRENT_TIMESTAMP'")) {
2775                optionID = DEFAULT_ID_DATETIME;
2776                defaultValue = "CURRENT_TIMESTAMP";
2777                if (tokens.canConsume(L_PAREN)) {
2778                    // EXPECT INTEGER
2779                    precision = integer(tokens.consume());
2780                    tokens.canConsume(R_PAREN);
2781                }
2782            } else if (tokens.canConsume("USER") || tokens.canConsume("'USER'")) {
2783                optionID = DEFAULT_ID_USER;
2784                defaultValue = "USER";
2785            } else if (tokens.canConsume("CURRENT_USER") || tokens.canConsume("'CURRENT_USER'")) {
2786                optionID = DEFAULT_ID_CURRENT_USER;
2787                defaultValue = "CURRENT_USER";
2788            } else if (tokens.canConsume("SESSION_USER") || tokens.canConsume("'SESSION_USER'")) {
2789                optionID = DEFAULT_ID_SESSION_USER;
2790                defaultValue = "SESSION_USER";
2791            } else if (tokens.canConsume("SYSTEM_USER") || tokens.canConsume("'SYSTEM_USER'")) {
2792                optionID = DEFAULT_ID_SYSTEM_USER;
2793                defaultValue = "SYSTEM_USER";
2794            } else if (tokens.canConsume("NULL") || tokens.canConsume("NULL")) {
2795                optionID = DEFAULT_ID_NULL;
2796                defaultValue = "NULL";
2797            } else if (tokens.canConsume(L_PAREN)) {
2798                optionID = DEFAULT_ID_LITERAL;
2799                while (!tokens.canConsume(R_PAREN)) {
2800                    defaultValue = defaultValue + tokens.consume();
2801                }
2802            } else {
2803                optionID = DEFAULT_ID_LITERAL;
2804                // Assume default was EMPTY or ''
2805                defaultValue = tokens.consume();
2806
2807                // strip quotes if necessary
2808                if (defaultValue.startsWith("'") && defaultValue.endsWith("'")) {
2809                    if (defaultValue.length() > 2) {
2810                        defaultValue = defaultValue.substring(1, defaultValue.lastIndexOf('\''));
2811                    } else {
2812                        defaultValue = "";
2813                    }
2814                }
2815
2816                // NOTE: default value could be a Real number as well as an integer, so
2817                // 1000.00 is valid
2818                if (tokens.canConsume(".")) {
2819                    defaultValue = defaultValue + '.' + tokens.consume();
2820                }
2821            }
2822
2823            columnNode.setProperty(DEFAULT_OPTION, optionID);
2824            columnNode.setProperty(DEFAULT_VALUE, defaultValue);
2825            if (precision > -1) {
2826                columnNode.setProperty(DEFAULT_PRECISION, precision);
2827            }
2828            return true;
2829        }
2830
2831        return false;
2832    }
2833
2834    /**
2835     * Parses the default clause for a column and sets appropriate properties on the column node.
2836     * 
2837     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2838     * @param columnNode the column node which may contain a collate clause; may not be null
2839     * @return true if collate clause was found and parsed else return false.
2840     * @throws ParsingException
2841     */
2842    protected boolean parseCollateClause( DdlTokenStream tokens,
2843                                          AstNode columnNode ) throws ParsingException {
2844        assert tokens != null;
2845        assert columnNode != null;
2846
2847        // an option in the CREATE DOMAIN definition
2848        //
2849        // <collate clause> ::= COLLATE <collation name>
2850
2851        if (tokens.matches("COLLATE")) {
2852            tokens.consume("COLLATE");
2853            String collationName = parseName(tokens);
2854            columnNode.setProperty(COLLATION_NAME, collationName);
2855            return true;
2856        }
2857
2858        return false;
2859    }
2860
2861    /**
2862     * Returns the integer value of the input string. Handles both straight integer string or complex KMG (CLOB or BLOB) value.
2863     * Throws {@link NumberFormatException} if a valid integer is not found.
2864     * 
2865     * @param value the string to be parsed; may not be null and length must be > 0;
2866     * @return integer value
2867     */
2868    protected int integer( String value ) {
2869        assert value != null;
2870        assert value.length() > 0;
2871
2872        return new BigInteger(value).intValue();
2873    }
2874
2875    public final Position getCurrentMarkedPosition() {
2876        return currentMarkedPosition;
2877    }
2878
2879    /**
2880     * Marks the token stream with the current position to help track statement scope within the original input string.
2881     * 
2882     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2883     */
2884    public final void markStartOfStatement( DdlTokenStream tokens ) {
2885        tokens.mark();
2886        currentMarkedPosition = tokens.nextPosition();
2887    }
2888
2889    /**
2890     * Marks the end of a statement by consuming the terminator (if exists). If it does not exist, a missing terminator node may
2891     * be added. If the resulting statement node is a valid child node type for a schema, the child node may be re-parented to the
2892     * schema if the schema is still parentable. Each resulting statement node is tagged with the enclosing source expression,
2893     * starting line number and column number from the file content as well as a starting character index from that same content.
2894     * 
2895     * @param tokens the {@link DdlTokenStream} representing the tokenized DDL content; may not be null
2896     * @param statementNode
2897     */
2898    public final void markEndOfStatement( DdlTokenStream tokens,
2899                                          AstNode statementNode ) {
2900        if (!tokens.canConsume(getTerminator())) {
2901            // System.out.println("  WARNING:  Terminator NOT FOUND");
2902
2903            // Check previous until
2904            // 1) find two sequential nodes that are not missing terminator nodes
2905            // 2) the node before the missing terminator is a valid schema child and
2906            // 3) we find a schema node that is ALSO missing a terminator BEFORE we find an invalid schema child OR a terminated
2907            // node.
2908
2909            if (!setAsSchemaChildNode(statementNode, true)) {
2910                missingTerminatorNode(getRootNode()); // Construct missing terminator node
2911            }
2912        } else {
2913            setAsSchemaChildNode(statementNode, false);
2914        }
2915
2916        String source = tokens.getMarkedContent().trim();
2917        statementNode.setProperty(DDL_EXPRESSION, source);
2918        statementNode.setProperty(DDL_LENGTH, source.length());
2919        statementNode.setProperty(DDL_START_LINE_NUMBER, currentMarkedPosition.getLine());
2920        statementNode.setProperty(DDL_START_CHAR_INDEX, currentMarkedPosition.getIndexInContent());
2921        statementNode.setProperty(DDL_START_COLUMN_NUMBER, currentMarkedPosition.getColumn());
2922
2923        testPrint("== >> SOURCE:\n" + source + "\n");
2924    }
2925
2926    /**
2927     * {@inheritDoc}
2928     * 
2929     * @see org.modeshape.sequencer.ddl.DdlParser#postProcess(org.modeshape.sequencer.ddl.node.AstNode)
2930     */
2931    @Override
2932    public void postProcess( AstNode rootNode ) {
2933        // Default behavior is no post processing
2934        // Subclasses will need to override this method
2935    }
2936
2937    protected void testPrint( String str ) {
2938        if (isTestMode()) {
2939            // CHECKSTYLE IGNORE check FOR NEXT 1 LINES
2940            System.out.println(str);
2941        }
2942    }
2943
2944    /**
2945     * @return testMode
2946     */
2947    public boolean isTestMode() {
2948        return testMode;
2949    }
2950
2951    /**
2952     * @param testMode Sets testMode to the specified value.
2953     */
2954    public void setTestMode( boolean testMode ) {
2955        this.testMode = testMode;
2956    }
2957
2958    /**
2959     * {@inheritDoc}
2960     * 
2961     * @see org.modeshape.sequencer.ddl.DdlParser#getId()
2962     */
2963    @Override
2964    public String getId() {
2965        return ID;
2966    }
2967
2968    /**
2969     * {@inheritDoc}
2970     * 
2971     * @see java.lang.Object#hashCode()
2972     */
2973    @Override
2974    public int hashCode() {
2975        return getId().hashCode();
2976    }
2977
2978    /**
2979     * {@inheritDoc}
2980     * 
2981     * @see java.lang.Object#equals(java.lang.Object)
2982     */
2983    @Override
2984    public boolean equals( Object obj ) {
2985        if (obj == this) return true;
2986        if (obj instanceof DdlParser) {
2987            return ((DdlParser)obj).getId().equals(this.getId());
2988        }
2989        return false;
2990    }
2991}