001/* 002 * ModeShape (http://www.modeshape.org) 003 * 004 * Licensed under the Apache License, Version 2.0 (the "License"); 005 * you may not use this file except in compliance with the License. 006 * You may obtain a copy of the License at 007 * 008 * http://www.apache.org/licenses/LICENSE-2.0 009 * 010 * Unless required by applicable law or agreed to in writing, software 011 * distributed under the License is distributed on an "AS IS" BASIS, 012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 013 * See the License for the specific language governing permissions and 014 * limitations under the License. 015 */ 016package org.modeshape.sequencer.ddl.dialect.derby; 017 018import static org.modeshape.sequencer.ddl.StandardDdlLexicon.ALL_PRIVILEGES; 019import static org.modeshape.sequencer.ddl.StandardDdlLexicon.COLUMN_ATTRIBUTE; 020import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_EXPRESSION; 021import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_LENGTH; 022import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_CHAR_INDEX; 023import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_COLUMN_NUMBER; 024import static org.modeshape.sequencer.ddl.StandardDdlLexicon.DDL_START_LINE_NUMBER; 025import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANTEE; 026import static org.modeshape.sequencer.ddl.StandardDdlLexicon.GRANT_PRIVILEGE; 027import static org.modeshape.sequencer.ddl.StandardDdlLexicon.NEW_NAME; 028import static org.modeshape.sequencer.ddl.StandardDdlLexicon.PROPERTY_VALUE; 029import static org.modeshape.sequencer.ddl.StandardDdlLexicon.SQL; 030import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE; 031import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_COLUMN_DEFINITION; 032import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_ALTER_TABLE_STATEMENT; 033import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_DEFINITION; 034import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_COLUMN_REFERENCE; 035import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_CREATE_TABLE_STATEMENT; 036import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_COLUMN_DEFINITION; 037import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_DROP_TABLE_CONSTRAINT_DEFINITION; 038import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_GRANT_ON_TABLE_STATEMENT; 039import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_SIMPLE_PROPERTY; 040import static org.modeshape.sequencer.ddl.StandardDdlLexicon.TYPE_STATEMENT_OPTION; 041import static org.modeshape.sequencer.ddl.StandardDdlLexicon.VALUE; 042import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.IS_TABLE_TYPE; 043import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.ORDER; 044import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.PARAMETER_STYLE; 045import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.ROLE_NAME; 046import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TABLE_NAME; 047import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_FUNCTION_STATEMENT; 048import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_INDEX_STATEMENT; 049import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_PROCEDURE_STATEMENT; 050import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_ROLE_STATEMENT; 051import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_SYNONYM_STATEMENT; 052import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_CREATE_TRIGGER_STATEMENT; 053import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DECLARE_GLOBAL_TEMPORARY_TABLE_STATEMENT; 054import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_FUNCTION_STATEMENT; 055import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_INDEX_STATEMENT; 056import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_PROCEDURE_STATEMENT; 057import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_ROLE_STATEMENT; 058import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_SYNONYM_STATEMENT; 059import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_DROP_TRIGGER_STATEMENT; 060import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_FUNCTION_PARAMETER; 061import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_GRANT_ON_FUNCTION_STATEMENT; 062import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_GRANT_ON_PROCEDURE_STATEMENT; 063import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_GRANT_ROLES_STATEMENT; 064import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_INDEX_COLUMN_REFERENCE; 065import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_LOCK_TABLE_STATEMENT; 066import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_RENAME_INDEX_STATEMENT; 067import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.TYPE_RENAME_TABLE_STATEMENT; 068import static org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlLexicon.UNIQUE_INDEX; 069import java.util.ArrayList; 070import java.util.List; 071import org.modeshape.common.text.ParsingException; 072import org.modeshape.common.text.TokenStream; 073import org.modeshape.sequencer.ddl.DdlParserProblem; 074import org.modeshape.sequencer.ddl.DdlSequencerI18n; 075import org.modeshape.sequencer.ddl.DdlTokenStream; 076import org.modeshape.sequencer.ddl.DdlTokenStream.DdlTokenizer; 077import org.modeshape.sequencer.ddl.StandardDdlLexicon; 078import org.modeshape.sequencer.ddl.StandardDdlParser; 079import org.modeshape.sequencer.ddl.datatype.DataType; 080import org.modeshape.sequencer.ddl.datatype.DataTypeParser; 081import org.modeshape.sequencer.ddl.node.AstNode; 082 083/** 084 * Derby-specific DDL Parser. Includes custom data types as well as custom DDL statements. 085 */ 086public class DerbyDdlParser extends StandardDdlParser implements DerbyDdlConstants, DerbyDdlConstants.DerbyStatementStartPhrases { 087 088 /** 089 * The Derby parser identifier. 090 */ 091 public static final String ID = "DERBY"; 092 093 protected static final List<String[]> derbyDataTypeStrings = new ArrayList<String[]>( 094 DerbyDataTypes.CUSTOM_DATATYPE_START_PHRASES); 095 096 private static final String TERMINATOR = DEFAULT_TERMINATOR; 097 098 public DerbyDdlParser() { 099 setDatatypeParser(new DerbyDataTypeParser()); 100 setDoUseTerminator(true); 101 setTerminator(TERMINATOR); 102 } 103 104 /** 105 * {@inheritDoc} 106 * 107 * @see org.modeshape.sequencer.ddl.StandardDdlParser#initializeTokenStream(org.modeshape.sequencer.ddl.DdlTokenStream) 108 */ 109 @Override 110 protected void initializeTokenStream( DdlTokenStream tokens ) { 111 super.initializeTokenStream(tokens); 112 tokens.registerKeyWords(CUSTOM_KEYWORDS); 113 tokens.registerStatementStartPhrase(ALTER_PHRASES); 114 tokens.registerStatementStartPhrase(CREATE_PHRASES); 115 tokens.registerStatementStartPhrase(DROP_PHRASES); 116 tokens.registerStatementStartPhrase(SET_PHRASES); 117 tokens.registerStatementStartPhrase(MISC_PHRASES); 118 } 119 120 /** 121 * {@inheritDoc} 122 * 123 * @see org.modeshape.sequencer.ddl.StandardDdlParser#getId() 124 */ 125 @Override 126 public String getId() { 127 return ID; 128 } 129 130 /** 131 * {@inheritDoc} 132 * 133 * @see org.modeshape.sequencer.ddl.StandardDdlParser#getValidSchemaChildTypes() 134 */ 135 @Override 136 protected String[] getValidSchemaChildTypes() { 137 return VALID_SCHEMA_CHILD_STMTS; 138 } 139 140 /** 141 * {@inheritDoc} 142 * 143 * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseCustomStatement(org.modeshape.sequencer.ddl.DdlTokenStream, 144 * org.modeshape.sequencer.ddl.node.AstNode) 145 */ 146 @Override 147 protected AstNode parseCustomStatement( DdlTokenStream tokens, 148 AstNode parentNode ) throws ParsingException { 149 assert tokens != null; 150 assert parentNode != null; 151 152 AstNode result = super.parseCustomStatement(tokens, parentNode); 153 if (result == null) { 154 if (tokens.matches(STMT_LOCK_TABLE)) { 155 result = parseLockTable(tokens, parentNode); 156 } else if (tokens.matches(STMT_RENAME_TABLE)) { 157 result = parseRenameTable(tokens, parentNode); 158 } else if (tokens.matches(STMT_RENAME_INDEX)) { 159 result = parseRenameIndex(tokens, parentNode); 160 } else if (tokens.matches(STMT_DECLARE_GLOBAL_TEMP_TABLE)) { 161 result = parseDeclareGlobalTempTable(tokens, parentNode); 162 } 163 } 164 return result; 165 } 166 167 /** 168 * {@inheritDoc} 169 * 170 * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseCreateStatement(org.modeshape.sequencer.ddl.DdlTokenStream, 171 * org.modeshape.sequencer.ddl.node.AstNode) 172 */ 173 @Override 174 protected AstNode parseCreateStatement( DdlTokenStream tokens, 175 AstNode parentNode ) throws ParsingException { 176 assert tokens != null; 177 assert parentNode != null; 178 179 if (tokens.matches(STMT_CREATE_INDEX) || tokens.matches(STMT_CREATE_UNIQUE_INDEX)) { 180 return parseCreateIndex(tokens, parentNode); 181 } else if (tokens.matches(STMT_CREATE_FUNCTION)) { 182 return parseCreateFunction(tokens, parentNode); 183 } else if (tokens.matches(STMT_CREATE_PROCEDURE)) { 184 return parseCreateProcedure(tokens, parentNode); 185 } else if (tokens.matches(STMT_CREATE_ROLE)) { 186 return parseCreateRole(tokens, parentNode); 187 } else if (tokens.matches(STMT_CREATE_SYNONYM)) { 188 return parseCreateSynonym(tokens, parentNode); 189 } else if (tokens.matches(STMT_CREATE_TRIGGER)) { 190 return parseCreateTrigger(tokens, parentNode); 191 } 192 193 return super.parseCreateStatement(tokens, parentNode); 194 195 } 196 197 /** 198 * Parses DDL CREATE INDEX 199 * 200 * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null 201 * @param parentNode the parent {@link AstNode} node; may not be null 202 * @return the parsed CREATE INDEX 203 * @throws ParsingException 204 */ 205 protected AstNode parseCreateIndex( DdlTokenStream tokens, 206 AstNode parentNode ) throws ParsingException { 207 assert tokens != null; 208 assert parentNode != null; 209 210 markStartOfStatement(tokens); 211 // CREATE [UNIQUE] INDEX index-Name 212 // ON table-Name ( Simple-column-Name [ ASC | DESC ] [ , Simple-column-Name [ ASC | DESC ]] * ) 213 tokens.consume(CREATE); // CREATE 214 215 boolean isUnique = tokens.canConsume("UNIQUE"); 216 217 tokens.consume("INDEX"); 218 String indexName = parseName(tokens); 219 tokens.consume("ON"); 220 String tableName = parseName(tokens); 221 222 AstNode indexNode = nodeFactory().node(indexName, parentNode, TYPE_CREATE_INDEX_STATEMENT); 223 224 indexNode.setProperty(UNIQUE_INDEX, isUnique); 225 indexNode.setProperty(TABLE_NAME, tableName); 226 227 parseIndexTableColumns(tokens, indexNode); 228 229 parseUntilTerminator(tokens); 230 231 markEndOfStatement(tokens, indexNode); 232 233 return indexNode; 234 } 235 236 private void parseIndexTableColumns( DdlTokenStream tokens, 237 AstNode indexNode ) throws ParsingException { 238 assert tokens != null; 239 assert indexNode != null; 240 241 // Assume we start with open parenthesis '(', then we parse comma separated list of column names followed by optional 242 // ASC or DESC 243 244 tokens.consume(L_PAREN); // EXPECTED 245 246 while (!tokens.canConsume(R_PAREN)) { 247 String colName = parseName(tokens); 248 AstNode colRefNode = nodeFactory().node(colName, indexNode, TYPE_INDEX_COLUMN_REFERENCE); 249 if (tokens.canConsume("ASC")) { 250 colRefNode.setProperty(ORDER, "ASC"); 251 } else if (tokens.canConsume("DESC")) { 252 colRefNode.setProperty(ORDER, "DESC"); 253 } 254 tokens.canConsume(COMMA); 255 } 256 } 257 258 /** 259 * Parses DDL CREATE FUNCTION statement 260 * 261 * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null 262 * @param parentNode the parent {@link AstNode} node; may not be null 263 * @return the parsed CREATE FUNCTION statement node 264 * @throws ParsingException 265 */ 266 protected AstNode parseCreateFunction( DdlTokenStream tokens, 267 AstNode parentNode ) throws ParsingException { 268 assert tokens != null; 269 assert parentNode != null; 270 271 markStartOfStatement(tokens); 272 // CREATE FUNCTION function-name ( [ FunctionParameter [, FunctionParameter] ] * ) 273 // RETURNS ReturnDataType [ FunctionElement ] * 274 275 // FunctionElement 276 // { 277 // | LANGUAGE { JAVA } 278 // | {DETERMINISTIC | NOT DETERMINISTIC} 279 // | EXTERNAL NAME string 280 // | PARAMETER STYLE {JAVA | DERBY_JDBC_RESULT_SET} 281 // | { NO SQL | CONTAINS SQL | READS SQL DATA } 282 // | { RETURNS NULL ON NULL INPUT | CALLED ON NULL INPUT } 283 // } 284 tokens.consume(CREATE, "FUNCTION"); 285 286 String functionName = parseName(tokens); 287 288 AstNode functionNode = nodeFactory().node(functionName, parentNode, TYPE_CREATE_FUNCTION_STATEMENT); 289 290 parseFunctionParameters(tokens, functionNode); 291 292 tokens.consume("RETURNS"); 293 294 if (tokens.canConsume("TABLE")) { 295 AstNode tableNode = nodeFactory().node("TABLE", functionNode, TYPE_CREATE_TABLE_STATEMENT); 296 tableNode.setProperty(DDL_START_LINE_NUMBER, getCurrentMarkedPosition().getLine()); 297 tableNode.setProperty(DDL_START_CHAR_INDEX, getCurrentMarkedPosition().getIndexInContent()); 298 tableNode.setProperty(DDL_START_COLUMN_NUMBER, getCurrentMarkedPosition().getColumn()); 299 parseColumnsAndConstraints(tokens, tableNode); 300 String expressionSource = "TABLE " + tokens.getMarkedContent(); 301 tableNode.setProperty(DDL_EXPRESSION, expressionSource); 302 tableNode.setProperty(DDL_LENGTH, expressionSource.length()); 303 304 functionNode.setProperty(IS_TABLE_TYPE, true); 305 } else { 306 // Assume DataType 307 DataType datatype = getDatatypeParser().parse(tokens); 308 if (datatype != null) { 309 getDatatypeParser().setPropertiesOnNode(functionNode, datatype); 310 } else { 311 String msg = DdlSequencerI18n.missingReturnTypeForFunction.text(functionName); 312 DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, getCurrentMarkedPosition(), msg); 313 addProblem(problem, functionNode); 314 } 315 } 316 317 while (!isTerminator(tokens)) { 318 if (tokens.matches("LANGUAGE")) { 319 AstNode optionNode = nodeFactory().node("language", functionNode, TYPE_STATEMENT_OPTION); 320 if (tokens.canConsume("LANGUAGE", "JAVA")) { 321 optionNode.setProperty(VALUE, "LANGUAGE JAVA"); 322 } else { 323 tokens.consume("LANGUAGE"); 324 optionNode.setProperty(VALUE, "LANGUAGE"); 325 } 326 } else if (tokens.canConsume("DETERMINISTIC")) { 327 AstNode optionNode = nodeFactory().node("deterministic", functionNode, TYPE_STATEMENT_OPTION); 328 optionNode.setProperty(VALUE, "DETERMINISTIC"); 329 } else if (tokens.canConsume("NOT", "DETERMINISTIC")) { 330 AstNode optionNode = nodeFactory().node("deterministic", functionNode, TYPE_STATEMENT_OPTION); 331 optionNode.setProperty(VALUE, "NOT DETERMINISTIC"); 332 } else if (tokens.canConsume("EXTERNAL", "NAME")) { 333 String extName = parseName(tokens); 334 AstNode optionNode = nodeFactory().node("externalName", functionNode, TYPE_STATEMENT_OPTION); 335 optionNode.setProperty(VALUE, "EXTERNAL NAME" + SPACE + extName); 336 } else if (tokens.canConsume("PARAMETER", "STYLE")) { 337 if (tokens.canConsume("JAVA")) { 338 functionNode.setProperty(PARAMETER_STYLE, "PARAMETER STYLE" + SPACE + "JAVA"); 339 } else { 340 tokens.consume("DERBY_JDBC_RESULT_SET"); 341 functionNode.setProperty(PARAMETER_STYLE, "PARAMETER STYLE" + SPACE + "DERBY_JDBC_RESULT_SET"); 342 } 343 } else if (tokens.canConsume("NO", "SQL")) { 344 AstNode optionNode = nodeFactory().node("sqlStatus", functionNode, TYPE_STATEMENT_OPTION); 345 optionNode.setProperty(VALUE, "NO SQL"); 346 } else if (tokens.canConsume("CONTAINS", "SQL")) { 347 AstNode optionNode = nodeFactory().node("sqlStatus", functionNode, TYPE_STATEMENT_OPTION); 348 optionNode.setProperty(VALUE, "CONTAINS SQL"); 349 } else if (tokens.canConsume("READS", "SQL", "DATA")) { 350 AstNode optionNode = nodeFactory().node("sqlStatus", functionNode, TYPE_STATEMENT_OPTION); 351 optionNode.setProperty(VALUE, "READS SQL DATA"); 352 } else if (tokens.canConsume("RETURNS", "NULL", "ON", "NULL", "INPUT")) { 353 AstNode optionNode = nodeFactory().node("nullInput", functionNode, TYPE_STATEMENT_OPTION); 354 optionNode.setProperty(VALUE, "RETURNS NULL ON NULL INPUT"); 355 } else if (tokens.canConsume("CALLED", "ON", "NULL", "INPUT")) { 356 AstNode optionNode = nodeFactory().node("nullInput", functionNode, TYPE_STATEMENT_OPTION); 357 optionNode.setProperty(VALUE, "CALLED ON NULL INPUT"); 358 } else { 359 String msg = DdlSequencerI18n.errorParsingDdlContent.text(functionName); 360 DdlParserProblem problem = new DdlParserProblem(Problems.ERROR, getCurrentMarkedPosition(), msg); 361 addProblem(problem, functionNode); 362 break; 363 } 364 } 365 366 markEndOfStatement(tokens, functionNode); 367 368 return functionNode; 369 } 370 371 private void parseFunctionParameters( DdlTokenStream tokens, 372 AstNode functionNode ) throws ParsingException { 373 assert tokens != null; 374 assert functionNode != null; 375 376 // Assume we start with open parenthesis '(', then we parse comma separated list of function parameters 377 // which have the form: [ parameter-Name ] DataType 378 // So, try getting datatype, if datatype == NULL, then parseName() & parse datatype, then repeat as long as next token is 379 // "," 380 381 tokens.consume(L_PAREN); // EXPECTED 382 383 while (!tokens.canConsume(R_PAREN)) { 384 DataType datatype = getDatatypeParser().parse(tokens); 385 if (datatype == null) { 386 String paramName = parseName(tokens); 387 datatype = getDatatypeParser().parse(tokens); 388 AstNode paramNode = nodeFactory().node(paramName, functionNode, TYPE_FUNCTION_PARAMETER); 389 getDatatypeParser().setPropertiesOnNode(paramNode, datatype); 390 } else { 391 AstNode paramNode = nodeFactory().node("functionParameter", functionNode, TYPE_FUNCTION_PARAMETER); 392 getDatatypeParser().setPropertiesOnNode(paramNode, datatype); 393 } 394 tokens.canConsume(COMMA); 395 } 396 } 397 398 /** 399 * Parses DDL CREATE PROCEDURE statement 400 * 401 * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null 402 * @param parentNode the parent {@link AstNode} node; may not be null 403 * @return the parsed CREATE PROCEDURE statement node 404 * @throws ParsingException 405 */ 406 protected AstNode parseCreateProcedure( DdlTokenStream tokens, 407 AstNode parentNode ) throws ParsingException { 408 assert tokens != null; 409 assert parentNode != null; 410 411 markStartOfStatement(tokens); 412 413 tokens.consume(CREATE, "PROCEDURE"); 414 415 String functionName = parseName(tokens); 416 417 AstNode functionNode = nodeFactory().node(functionName, parentNode, TYPE_CREATE_PROCEDURE_STATEMENT); 418 419 parseUntilTerminator(tokens); 420 markEndOfStatement(tokens, functionNode); 421 422 return functionNode; 423 } 424 425 /** 426 * Parses DDL CREATE ROLE statement 427 * 428 * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null 429 * @param parentNode the parent {@link AstNode} node; may not be null 430 * @return the parsed CREATE ROLE statement node 431 * @throws ParsingException 432 */ 433 protected AstNode parseCreateRole( DdlTokenStream tokens, 434 AstNode parentNode ) throws ParsingException { 435 assert tokens != null; 436 assert parentNode != null; 437 438 markStartOfStatement(tokens); 439 440 tokens.consume(CREATE, "ROLE"); 441 442 String functionName = parseName(tokens); 443 444 AstNode functionNode = nodeFactory().node(functionName, parentNode, TYPE_CREATE_ROLE_STATEMENT); 445 446 markEndOfStatement(tokens, functionNode); 447 448 return functionNode; 449 } 450 451 /** 452 * {@inheritDoc} 453 * 454 * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseDropStatement(org.modeshape.sequencer.ddl.DdlTokenStream, 455 * org.modeshape.sequencer.ddl.node.AstNode) 456 */ 457 @Override 458 protected AstNode parseDropStatement( DdlTokenStream tokens, 459 AstNode parentNode ) throws ParsingException { 460 assert tokens != null; 461 assert parentNode != null; 462 463 AstNode dropNode = null; 464 465 String name = null; 466 467 if (tokens.matches(STMT_DROP_FUNCTION)) { 468 markStartOfStatement(tokens); 469 tokens.consume(STMT_DROP_FUNCTION); 470 name = parseName(tokens); 471 dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_FUNCTION_STATEMENT); 472 } else if (tokens.matches(STMT_DROP_INDEX)) { 473 markStartOfStatement(tokens); 474 tokens.consume(STMT_DROP_INDEX); 475 name = parseName(tokens); 476 dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_INDEX_STATEMENT); 477 } else if (tokens.matches(STMT_DROP_PROCEDURE)) { 478 markStartOfStatement(tokens); 479 tokens.consume(STMT_DROP_PROCEDURE); 480 name = parseName(tokens); 481 dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_PROCEDURE_STATEMENT); 482 } else if (tokens.matches(STMT_DROP_ROLE)) { 483 markStartOfStatement(tokens); 484 tokens.consume(STMT_DROP_ROLE); 485 name = parseName(tokens); 486 dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_ROLE_STATEMENT); 487 } else if (tokens.matches(STMT_DROP_SYNONYM)) { 488 markStartOfStatement(tokens); 489 tokens.consume(STMT_DROP_SYNONYM); 490 name = parseName(tokens); 491 dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_SYNONYM_STATEMENT); 492 } else if (tokens.matches(STMT_DROP_TRIGGER)) { 493 markStartOfStatement(tokens); 494 tokens.consume(STMT_DROP_TRIGGER); 495 name = parseName(tokens); 496 dropNode = nodeFactory().node(name, parentNode, TYPE_DROP_TRIGGER_STATEMENT); 497 } 498 499 if (dropNode != null) { 500 markEndOfStatement(tokens, dropNode); 501 } 502 503 if (dropNode == null) { 504 dropNode = super.parseDropStatement(tokens, parentNode); 505 } 506 507 return dropNode; 508 } 509 510 /** 511 * {@inheritDoc} Syntax for tables GRANT privilege-type ON [TABLE] { table-Name | view-Name } TO grantees Syntax for routines 512 * GRANT EXECUTE ON { FUNCTION | PROCEDURE } routine-designator TO grantees Syntax for roles GRANT roleName [ {, roleName }* ] 513 * TO grantees privilege-types ALL PRIVILEGES | privilege-list privilege-list table-privilege {, table-privilege }* 514 * table-privilege DELETE | INSERT | REFERENCES [column list] | SELECT [column list] | TRIGGER | UPDATE [column list] column 515 * list ( column-identifier {, column-identifier}* ) GRANT 516 * 517 * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseGrantStatement(org.modeshape.sequencer.ddl.DdlTokenStream, 518 * org.modeshape.sequencer.ddl.node.AstNode) 519 */ 520 @Override 521 protected AstNode parseGrantStatement( DdlTokenStream tokens, 522 AstNode parentNode ) throws ParsingException { 523 assert tokens != null; 524 assert parentNode != null; 525 assert tokens.matches(GRANT); 526 527 markStartOfStatement(tokens); 528 529 // Syntax for tables 530 // 531 // GRANT privilege-type ON [TABLE] { table-Name | view-Name } TO grantees 532 // 533 // Syntax for routines 534 // 535 // GRANT EXECUTE ON { FUNCTION | PROCEDURE } {function-name | procedure-name} TO grantees 536 // 537 // Syntax for roles 538 // 539 // GRANT roleName [ {, roleName }* ] TO grantees 540 541 // privilege-types 542 // 543 // ALL PRIVILEGES | privilege-list 544 // 545 AstNode grantNode = null; 546 boolean allPrivileges = false; 547 548 List<AstNode> privileges = new ArrayList<AstNode>(); 549 550 tokens.consume("GRANT"); 551 if (tokens.canConsume("EXECUTE", "ON")) { 552 AstNode node = nodeFactory().node("privilege"); 553 nodeFactory().setType(node, GRANT_PRIVILEGE); 554 node.setProperty(TYPE, "EXECUTE"); 555 privileges = new ArrayList<AstNode>(); 556 privileges.add(node); 557 if (tokens.canConsume("FUNCTION")) { 558 String name = parseName(tokens); 559 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_FUNCTION_STATEMENT); 560 } else { 561 tokens.consume("PROCEDURE"); 562 String name = parseName(tokens); 563 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_PROCEDURE_STATEMENT); 564 } 565 } else { 566 567 if (tokens.canConsume("ALL", "PRIVILEGES")) { 568 allPrivileges = true; 569 } else { 570 parseGrantPrivileges(tokens, privileges); 571 572 if (privileges.isEmpty()) { 573 // ASSUME: GRANT roleName [ {, roleName }* ] TO grantees 574 grantNode = nodeFactory().node("grantRoles", parentNode, TYPE_GRANT_ROLES_STATEMENT); 575 do { 576 String roleName = parseName(tokens); 577 nodeFactory().node(roleName, grantNode, ROLE_NAME); 578 } while (tokens.canConsume(COMMA)); 579 } 580 } 581 if (grantNode == null) { 582 tokens.consume("ON"); 583 tokens.canConsume(TABLE); // OPTIONAL 584 String name = parseName(tokens); 585 grantNode = nodeFactory().node(name, parentNode, TYPE_GRANT_ON_TABLE_STATEMENT); 586 // Attach privileges to grant node 587 for (AstNode node : privileges) { 588 node.setParent(grantNode); 589 } 590 if (allPrivileges) { 591 grantNode.setProperty(ALL_PRIVILEGES, allPrivileges); 592 } 593 } 594 595 } 596 597 tokens.consume("TO"); 598 599 do { 600 String grantee = parseName(tokens); 601 nodeFactory().node(grantee, grantNode, GRANTEE); 602 } while (tokens.canConsume(COMMA)); 603 604 markEndOfStatement(tokens, grantNode); 605 606 return grantNode; 607 } 608 609 /** 610 * {@inheritDoc} 611 * 612 * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseGrantPrivileges(org.modeshape.sequencer.ddl.DdlTokenStream, 613 * java.util.List) 614 */ 615 @Override 616 protected void parseGrantPrivileges( DdlTokenStream tokens, 617 List<AstNode> privileges ) throws ParsingException { 618 // privilege-types 619 // 620 // ALL PRIVILEGES | privilege-list 621 // 622 // privilege-list 623 // 624 // table-privilege {, table-privilege }* 625 // 626 // table-privilege 627 // DELETE | 628 // INSERT | 629 // REFERENCES [column list] | 630 // SELECT [column list] | 631 // TRIGGER | 632 // UPDATE [column list] 633 // column list 634 // ( column-identifier {, column-identifier}* ) 635 636 do { 637 AstNode node = null; 638 639 if (tokens.canConsume(DELETE)) { 640 node = nodeFactory().node("privilege"); 641 node.setProperty(TYPE, DELETE); 642 } else if (tokens.canConsume(INSERT)) { 643 node = nodeFactory().node("privilege"); 644 node.setProperty(TYPE, INSERT); 645 } else if (tokens.canConsume("REFERENCES")) { 646 node = nodeFactory().node("privilege"); 647 node.setProperty(TYPE, "REFERENCES"); 648 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE); 649 } else if (tokens.canConsume(SELECT)) { 650 node = nodeFactory().node("privilege"); 651 node.setProperty(TYPE, SELECT); 652 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE); 653 } else if (tokens.canConsume("TRIGGER")) { 654 node = nodeFactory().node("privilege"); 655 node.setProperty(TYPE, "TRIGGER"); 656 } else if (tokens.canConsume(UPDATE)) { 657 node = nodeFactory().node("privilege"); 658 node.setProperty(TYPE, UPDATE); 659 parseColumnNameList(tokens, node, TYPE_COLUMN_REFERENCE); 660 } 661 if (node == null) { 662 break; 663 } 664 nodeFactory().setType(node, GRANT_PRIVILEGE); 665 privileges.add(node); 666 667 } while (tokens.canConsume(COMMA)); 668 669 } 670 671 /** 672 * {@inheritDoc} 673 * 674 * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseAlterTableStatement(org.modeshape.sequencer.ddl.DdlTokenStream, 675 * org.modeshape.sequencer.ddl.node.AstNode) 676 */ 677 @Override 678 protected AstNode parseAlterTableStatement( DdlTokenStream tokens, 679 AstNode parentNode ) throws ParsingException { 680 assert tokens != null; 681 assert parentNode != null; 682 683 markStartOfStatement(tokens); 684 685 // ALTER TABLE table-Name 686 // { 687 // ADD COLUMN column-definition | 688 // ADD CONSTRAINT clause | 689 // DROP [ COLUMN ] column-name [ CASCADE | RESTRICT ] | 690 // DROP { PRIMARY KEY | FOREIGN KEY constraint-name | UNIQUE constraint-name | CHECK constraint-name | CONSTRAINT 691 // constraint-name } | 692 // ALTER [ COLUMN ] column-alteration | 693 // LOCKSIZE { ROW | TABLE } 694 // } 695 696 tokens.consume(ALTER, TABLE); // consumes 'ALTER TABLE' 697 String tableName = parseName(tokens); 698 699 AstNode alterTableNode = nodeFactory().node(tableName, parentNode, TYPE_ALTER_TABLE_STATEMENT); 700 701 // System.out.println(" >> PARSIN ALTER STATEMENT >> TABLE Name = " + tableName); 702 703 if (tokens.canConsume("ADD")) { 704 if (isTableConstraint(tokens)) { 705 parseTableConstraint(tokens, alterTableNode, true); 706 } else { 707 // This segment can also be enclosed in "()" brackets to handle multiple ColumnDefinition ADDs 708 if (tokens.matches(L_PAREN)) { 709 parseColumns(tokens, alterTableNode, true); 710 } else { 711 parseSingleTerminatedColumnDefinition(tokens, alterTableNode, true); 712 } 713 } 714 715 } else if (tokens.canConsume("DROP")) { 716 // DROP { PRIMARY KEY | FOREIGN KEY constraint-name | UNIQUE constraint-name | CHECK constraint-name | CONSTRAINT 717 // constraint-name } 718 if (tokens.canConsume("PRIMARY", "KEY")) { 719 String name = parseName(tokens); // constraint name 720 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION); 721 } else if (tokens.canConsume("FOREIGN", "KEY")) { 722 String name = parseName(tokens); // constraint name 723 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION); 724 } else if (tokens.canConsume("UNIQUE")) { 725 String name = parseName(tokens); // constraint name 726 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION); 727 } else if (tokens.canConsume("CHECK")) { 728 String name = parseName(tokens); // constraint name 729 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION); 730 } else if (tokens.canConsume("CONSTRAINT")) { 731 String name = parseName(tokens); // constraint name 732 nodeFactory().node(name, alterTableNode, TYPE_DROP_TABLE_CONSTRAINT_DEFINITION); 733 } else { 734 // DROP [ COLUMN ] column-name [ CASCADE | RESTRICT ] 735 tokens.canConsume("COLUMN"); // "COLUMN" is optional 736 737 String columnName = parseName(tokens); 738 739 AstNode columnNode = nodeFactory().node(columnName, alterTableNode, TYPE_DROP_COLUMN_DEFINITION); 740 741 if (tokens.canConsume(DropBehavior.CASCADE)) { 742 columnNode.setProperty(StandardDdlLexicon.DROP_BEHAVIOR, DropBehavior.CASCADE); 743 } else if (tokens.canConsume(DropBehavior.RESTRICT)) { 744 columnNode.setProperty(StandardDdlLexicon.DROP_BEHAVIOR, DropBehavior.RESTRICT); 745 } 746 } 747 } else if (tokens.canConsume("ALTER")) { 748 // column-alteration 749 // 750 // ALTER [ COLUMN ] column-Name SET DATA TYPE VARCHAR(integer) | 751 // ALTER [ COLUMN ] column-Name SET DATA TYPE VARCHAR FOR BIT DATA(integer) | 752 // ALTER [ COLUMN ] column-name SET INCREMENT BY integer-constant | 753 // ALTER [ COLUMN ] column-name RESTART WITH integer-constant | 754 // ALTER [ COLUMN ] column-name [ NOT ] NULL | 755 // ALTER [ COLUMN ] column-name [ WITH | SET ] DEFAULT default-value | 756 // ALTER [ COLUMN ] column-name DROP DEFAULT 757 758 tokens.canConsume("COLUMN"); 759 String alterColumnName = parseName(tokens); 760 761 AstNode columnNode = nodeFactory().node(alterColumnName, alterTableNode, TYPE_ALTER_COLUMN_DEFINITION); 762 763 if (tokens.matches("DEFAULT")) { 764 parseDefaultClause(tokens, columnNode); 765 } else if (tokens.canConsume("SET")) { 766 if (tokens.canConsume("DATA", "TYPE")) { 767 DataType datatype = getDatatypeParser().parse(tokens); 768 769 columnNode.setProperty(StandardDdlLexicon.DATATYPE_NAME, datatype.getName()); 770 if (datatype.getLength() >= 0) { 771 columnNode.setProperty(StandardDdlLexicon.DATATYPE_LENGTH, datatype.getLength()); 772 } 773 if (datatype.getPrecision() >= 0) { 774 columnNode.setProperty(StandardDdlLexicon.DATATYPE_PRECISION, datatype.getPrecision()); 775 } 776 if (datatype.getScale() >= 0) { 777 columnNode.setProperty(StandardDdlLexicon.DATATYPE_SCALE, datatype.getScale()); 778 } 779 780 } else if (tokens.canConsume("INCREMENT")) { 781 tokens.consume("BY", TokenStream.ANY_VALUE); 782 } 783 if (tokens.matches("DEFAULT")) { 784 parseDefaultClause(tokens, columnNode); 785 } 786 } else if (tokens.canConsume("WITH")) { 787 parseDefaultClause(tokens, columnNode); 788 } else { 789 tokens.canConsume("RESTART", "WITH", TokenStream.ANY_VALUE); 790 tokens.canConsume("DROP", "DEFAULT"); 791 792 if (tokens.canConsume("NOT", "NULL")) { 793 columnNode.setProperty(StandardDdlLexicon.NULLABLE, "NOT NULL"); 794 } else if (tokens.canConsume("NULL")) { 795 columnNode.setProperty(StandardDdlLexicon.NULLABLE, "NULL"); 796 } 797 } 798 799 } else if (tokens.canConsume("LOCKSIZE")) { 800 tokens.canConsume("ROWS"); 801 tokens.canConsume("TABLE"); 802 } 803 804 markEndOfStatement(tokens, alterTableNode); 805 806 return alterTableNode; 807 } 808 809 /** 810 * {@inheritDoc} 811 * 812 * @see org.modeshape.sequencer.ddl.StandardDdlParser#parseColumnDefinition(org.modeshape.sequencer.ddl.DdlTokenStream, 813 * org.modeshape.sequencer.ddl.node.AstNode, boolean) 814 */ 815 @Override 816 protected void parseColumnDefinition( DdlTokenStream tokens, 817 AstNode tableNode, 818 boolean isAlterTable ) throws ParsingException { 819 // column-definition 820 // 821 // Simple-column-Name DataType 822 // [ ColumnDefinition-level-constraint ]* 823 // [ [ WITH ] DEFAULT { ConstantExpression | NULL } |generated-column-spec ] 824 // [ ColumnDefinition-level-constraint ]* 825 826 // generated-column-spec 827 // 828 // [ GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( START WITH IntegerConstant [ ,INCREMENT BY IntegerConstant] ) ] ] ] 829 830 // EXAMPLE COLUMNS 831 // (i INT GENERATED BY DEFAULT AS IDENTITY (START WITH 2, INCREMENT BY 1), 832 // ch CHAR(50)); 833 834 tokens.canConsume("COLUMN"); // FOR ALTER TABLE ADD [COLUMN] case 835 String columnName = parseName(tokens); 836 DataType datatype = getDatatypeParser().parse(tokens); 837 838 AstNode columnNode = nodeFactory().node(columnName, tableNode, TYPE_COLUMN_DEFINITION); 839 840 columnNode.setProperty(StandardDdlLexicon.DATATYPE_NAME, datatype.getName()); 841 if (datatype.getLength() >= 0) { 842 columnNode.setProperty(StandardDdlLexicon.DATATYPE_LENGTH, datatype.getLength()); 843 } 844 if (datatype.getPrecision() >= 0) { 845 columnNode.setProperty(StandardDdlLexicon.DATATYPE_PRECISION, datatype.getPrecision()); 846 } 847 if (datatype.getScale() >= 0) { 848 columnNode.setProperty(StandardDdlLexicon.DATATYPE_SCALE, datatype.getScale()); 849 } 850 851 // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma 852 // Now clauses and constraints can be defined in any order, so we need to keep parsing until we get to a comma 853 StringBuilder unusedTokensSB = new StringBuilder(); 854 855 while (tokens.hasNext() && !tokens.matches(COMMA)) { 856 boolean parsedDefaultClause = parseDefaultClause(tokens, columnNode); 857 if (!parsedDefaultClause) { 858 boolean parsedCollate = parseCollateClause(tokens, columnNode); 859 boolean parsedConstraint = parseColumnConstraint(tokens, columnNode, isAlterTable); 860 boolean parsedGeneratedColumn = parseGeneratedColumnSpecClause(tokens, columnNode); 861 if (!parsedCollate && !parsedConstraint && !parsedGeneratedColumn) { 862 // THIS IS AN ERROR. NOTHING FOUND. 863 // NEED TO absorb tokens 864 unusedTokensSB.append(SPACE).append(tokens.consume()); 865 } 866 } 867 tokens.canConsume(DdlTokenizer.COMMENT); 868 } 869 870 if (unusedTokensSB.length() > 0) { 871 String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getName()); 872 DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, getCurrentMarkedPosition(), msg); 873 problem.setUnusedSource(unusedTokensSB.toString()); 874 addProblem(problem, tableNode); 875 } 876 877 } 878 879 /** 880 * Utility method designed to parse columns within an ALTER TABLE ADD statement. 881 * 882 * @param tokens the tokenized {@link DdlTokenStream} of the DDL input content; may not be null 883 * @param tableNode 884 * @param isAlterTable 885 * @throws ParsingException 886 */ 887 protected void parseColumns( DdlTokenStream tokens, 888 AstNode tableNode, 889 boolean isAlterTable ) throws ParsingException { 890 String tableElementString = getTableElementsString(tokens, false); 891 892 DdlTokenStream localTokens = new DdlTokenStream(tableElementString, DdlTokenStream.ddlTokenizer(false), false); 893 894 localTokens.start(); 895 896 StringBuilder unusedTokensSB = new StringBuilder(); 897 898 do { 899 if (isColumnDefinitionStart(localTokens)) { 900 parseColumnDefinition(localTokens, tableNode, isAlterTable); 901 } else { 902 // THIS IS AN ERROR. NOTHING FOUND. 903 // NEED TO absorb tokens 904 unusedTokensSB.append(SPACE).append(localTokens.consume()); 905 } 906 } while (localTokens.canConsume(COMMA)); 907 908 if (unusedTokensSB.length() > 0) { 909 String msg = DdlSequencerI18n.unusedTokensParsingColumnDefinition.text(tableNode.getName()); 910 DdlParserProblem problem = new DdlParserProblem(Problems.WARNING, getCurrentMarkedPosition(), msg); 911 problem.setUnusedSource(unusedTokensSB.toString()); 912 addProblem(problem, tableNode); 913 } 914 } 915 916 private boolean parseGeneratedColumnSpecClause( DdlTokenStream tokens, 917 AstNode columnNode ) throws ParsingException { 918 assert tokens != null; 919 assert columnNode != null; 920 // generated-column-spec 921 // 922 // [ GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( START WITH IntegerConstant [ ,INCREMENT BY IntegerConstant] ) ] ] ] 923 if (tokens.canConsume("GENERATED")) { 924 StringBuilder sb = new StringBuilder("GENERATED"); 925 926 if (tokens.canConsume("ALWAYS")) { 927 sb.append(SPACE).append("ALWAYS"); 928 } else { 929 tokens.consume("BY", "DEFAULT"); 930 sb.append(SPACE).append("BY DEFAULT"); 931 } 932 933 tokens.consume("AS", "IDENTITY"); 934 sb.append(SPACE).append("AS IDENTITY"); 935 936 if (tokens.canConsume(L_PAREN, "START", "WITH")) { 937 String value = tokens.consume(); // integer constant 938 sb.append(SPACE).append(L_PAREN).append(SPACE).append("START WITH").append(SPACE).append(value); 939 if (tokens.canConsume(COMMA, "INCREMENT", "BY")) { 940 value = tokens.consume();// integer constant 941 sb.append(COMMA).append("INCREMENT BY").append(SPACE).append(value); 942 } 943 tokens.consume(R_PAREN); 944 sb.append(SPACE).append(R_PAREN); 945 } 946 AstNode propNode = nodeFactory().node(COLUMN_ATTRIBUTE, columnNode, TYPE_SIMPLE_PROPERTY); 947 propNode.setProperty(PROPERTY_VALUE, sb.toString()); 948 949 return true; 950 } 951 952 return false; 953 } 954 955 private AstNode parseDeclareGlobalTempTable( DdlTokenStream tokens, 956 AstNode parentNode ) throws ParsingException { 957 assert tokens != null; 958 assert parentNode != null; 959 960 markStartOfStatement(tokens); 961 962 // DECLARE GLOBAL TEMPORARY TABLE table-Name 963 // { column-definition [ , column-definition ] * } 964 // [ ON COMMIT {DELETE | PRESERVE} ROWS ] 965 // NOT LOGGED [ON ROLLBACK DELETE ROWS] 966 967 tokens.consume(STMT_DECLARE_GLOBAL_TEMP_TABLE); 968 String name = parseName(tokens); 969 970 AstNode node = nodeFactory().node(name, parentNode, TYPE_DECLARE_GLOBAL_TEMPORARY_TABLE_STATEMENT); 971 972 parseColumnsAndConstraints(tokens, node); 973 974 if (tokens.canConsume("ON", "COMMIT")) { 975 AstNode optionNode = nodeFactory().node("onCommit", node, TYPE_STATEMENT_OPTION); 976 if (tokens.canConsume("DELETE", "ROWS")) { 977 optionNode.setProperty(VALUE, "ON COMMIT DELETE ROWS"); 978 } else { 979 tokens.consume("PRESERVE", "ROWS"); 980 optionNode.setProperty(VALUE, "ON COMMIT PRESERVE ROWS"); 981 } 982 } 983 tokens.consume("NOT", "LOGGED"); 984 985 if (tokens.canConsume("ON", "ROLLBACK", "DELETE", "ROWS")) { 986 AstNode optionNode = nodeFactory().node("onRollback", node, TYPE_STATEMENT_OPTION); 987 optionNode.setProperty(VALUE, "ON ROLLBACK DELETE ROWS"); 988 } 989 990 markEndOfStatement(tokens, node); 991 992 return node; 993 } 994 995 private AstNode parseLockTable( DdlTokenStream tokens, 996 AstNode parentNode ) throws ParsingException { 997 assert tokens != null; 998 assert parentNode != null; 999 1000 markStartOfStatement(tokens); 1001 1002 // LOCK TABLE table-Name IN { SHARE | EXCLUSIVE } MODE; 1003 1004 tokens.consume(STMT_LOCK_TABLE); 1005 1006 String name = parseName(tokens); 1007 1008 AstNode node = nodeFactory().node(name, parentNode, TYPE_LOCK_TABLE_STATEMENT); 1009 1010 tokens.consume("IN"); 1011 1012 if (tokens.canConsume("SHARE")) { 1013 AstNode propNode = nodeFactory().node("lockMode", node, TYPE_STATEMENT_OPTION); 1014 propNode.setProperty(VALUE, "SHARE"); 1015 } else { 1016 tokens.consume("EXCLUSIVE"); 1017 AstNode propNode = nodeFactory().node("lockMode", node, TYPE_STATEMENT_OPTION); 1018 propNode.setProperty(VALUE, "EXCLUSIVE"); 1019 } 1020 tokens.consume("MODE"); 1021 1022 markEndOfStatement(tokens, node); 1023 1024 return node; 1025 } 1026 1027 private AstNode parseRenameTable( DdlTokenStream tokens, 1028 AstNode parentNode ) throws ParsingException { 1029 assert tokens != null; 1030 assert parentNode != null; 1031 1032 markStartOfStatement(tokens); 1033 1034 // RENAME TABLE SAMP.EMP_ACT TO EMPLOYEE_ACT; 1035 1036 tokens.consume(STMT_RENAME_TABLE); 1037 1038 String oldName = parseName(tokens); 1039 1040 AstNode node = nodeFactory().node(oldName, parentNode, TYPE_RENAME_TABLE_STATEMENT); 1041 1042 tokens.consume("TO"); 1043 1044 String newName = parseName(tokens); 1045 1046 node.setProperty(NEW_NAME, newName); 1047 1048 markEndOfStatement(tokens, node); 1049 1050 return node; 1051 } 1052 1053 private AstNode parseRenameIndex( DdlTokenStream tokens, 1054 AstNode parentNode ) throws ParsingException { 1055 assert tokens != null; 1056 assert parentNode != null; 1057 1058 markStartOfStatement(tokens); 1059 1060 // RENAME TABLE SAMP.EMP_ACT TO EMPLOYEE_ACT; 1061 1062 tokens.consume(STMT_RENAME_INDEX); 1063 1064 String oldName = parseName(tokens); 1065 1066 AstNode node = nodeFactory().node(oldName, parentNode, TYPE_RENAME_INDEX_STATEMENT); 1067 1068 tokens.consume("TO"); 1069 1070 String newName = parseName(tokens); 1071 1072 node.setProperty(NEW_NAME, newName); 1073 1074 markEndOfStatement(tokens, node); 1075 1076 return node; 1077 } 1078 1079 private AstNode parseCreateSynonym( DdlTokenStream tokens, 1080 AstNode parentNode ) throws ParsingException { 1081 assert tokens != null; 1082 assert parentNode != null; 1083 1084 markStartOfStatement(tokens); 1085 // CREATE SYNONYM synonym-Name FOR { view-Name | table-Name } 1086 1087 tokens.consume(STMT_CREATE_SYNONYM); 1088 1089 String name = parseName(tokens); 1090 1091 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_SYNONYM_STATEMENT); 1092 1093 tokens.consume("FOR"); 1094 1095 String tableOrViewName = parseName(tokens); 1096 1097 node.setProperty(TABLE_NAME, tableOrViewName); 1098 1099 markEndOfStatement(tokens, node); 1100 1101 return node; 1102 } 1103 1104 private AstNode parseCreateTrigger( DdlTokenStream tokens, 1105 AstNode parentNode ) throws ParsingException { 1106 assert tokens != null; 1107 assert parentNode != null; 1108 1109 markStartOfStatement(tokens); 1110 // CREATE TRIGGER TriggerName 1111 // { AFTER | NO CASCADE BEFORE } 1112 // { INSERT | DELETE | UPDATE [ OF column-Name [, column-Name]* ] } 1113 // ON table-Name 1114 // [ ReferencingClause ] 1115 // [ FOR EACH { ROW | STATEMENT } ] [ MODE DB2SQL ] 1116 // Triggered-SQL-statement 1117 1118 // ReferencingClause 1119 // REFERENCING 1120 // { 1121 // { OLD | NEW } [ ROW ] [ AS ] correlation-Name [ { OLD | NEW } [ ROW ] [ AS ] correlation-Name ] | 1122 // { OLD TABLE | NEW TABLE } [ AS ] Identifier [ { OLD TABLE | NEW TABLE } [AS] Identifier ] | 1123 // { OLD_TABLE | NEW_TABLE } [ AS ] Identifier [ { OLD_TABLE | NEW_TABLE } [AS] Identifier ] 1124 // } 1125 1126 // EXAMPLE: 1127 // CREATE TRIGGER t1 NO CASCADE BEFORE UPDATE ON x 1128 // FOR EACH ROW MODE DB2SQL 1129 // values app.notifyEmail('Jerry', 'Table x is about to be updated'); 1130 1131 tokens.consume(STMT_CREATE_TRIGGER); 1132 1133 String name = parseName(tokens); 1134 1135 AstNode node = nodeFactory().node(name, parentNode, TYPE_CREATE_TRIGGER_STATEMENT); 1136 1137 String type = null; 1138 1139 if (tokens.canConsume("AFTER")) { 1140 AstNode optionNode = nodeFactory().node("beforeOrAfter", node, TYPE_STATEMENT_OPTION); 1141 optionNode.setProperty(VALUE, "AFTER"); 1142 } else { 1143 tokens.consume("NO", "CASCADE", "BEFORE"); 1144 AstNode optionNode = nodeFactory().node("beforeOrAfter", node, TYPE_STATEMENT_OPTION); 1145 optionNode.setProperty(VALUE, "NO CASCADE BEFORE"); 1146 } 1147 1148 if (tokens.canConsume(INSERT)) { 1149 AstNode optionNode = nodeFactory().node("eventType", node, TYPE_STATEMENT_OPTION); 1150 optionNode.setProperty(VALUE, INSERT); 1151 type = INSERT; 1152 } else if (tokens.canConsume(DELETE)) { 1153 AstNode optionNode = nodeFactory().node("eventType", node, TYPE_STATEMENT_OPTION); 1154 optionNode.setProperty(VALUE, DELETE); 1155 type = DELETE; 1156 } else { 1157 tokens.consume(UPDATE); 1158 AstNode optionNode = nodeFactory().node("eventType", node, TYPE_STATEMENT_OPTION); 1159 optionNode.setProperty(VALUE, UPDATE); 1160 type = UPDATE; 1161 } 1162 1163 if (tokens.canConsume("OF")) { 1164 // Parse comma separated column names 1165 String colName = parseName(tokens); 1166 nodeFactory().node(colName, node, TYPE_COLUMN_REFERENCE); 1167 1168 while (tokens.canConsume(COMMA)) { 1169 colName = parseName(tokens); 1170 nodeFactory().node(colName, node, TYPE_COLUMN_REFERENCE); 1171 } 1172 } 1173 tokens.consume("ON"); 1174 1175 String tableName = parseName(tokens); 1176 1177 node.setProperty(TABLE_NAME, tableName); 1178 1179 if (tokens.canConsume("REFERENCING")) { 1180 // ReferencingClause 1181 // REFERENCING 1182 // { 1183 // { OLD | NEW } [ ROW ] [ AS ] correlation-Name [ { OLD | NEW } [ ROW ] [ AS ] correlation-Name ] | 1184 // { OLD TABLE | NEW TABLE } [ AS ] Identifier [ { OLD TABLE | NEW TABLE } [AS] Identifier ] | 1185 // { OLD_TABLE | NEW_TABLE } [ AS ] Identifier [ { OLD_TABLE | NEW_TABLE } [AS] Identifier ] 1186 // } 1187 1188 StringBuilder sb = new StringBuilder(); 1189 if (tokens.matchesAnyOf("OLD", "NEW")) { 1190 if (tokens.canConsume("OLD")) { 1191 sb.append("OLD"); 1192 } else { 1193 tokens.consume("NEW"); 1194 sb.append("NEW"); 1195 } 1196 if (tokens.canConsume("ROW")) { 1197 sb.append(SPACE).append("ROW"); 1198 } 1199 if (tokens.canConsume("AS")) { 1200 sb.append(SPACE).append("AS"); 1201 } 1202 if (tokens.matchesAnyOf("OLD", "NEW")) { 1203 if (tokens.canConsume("OLD")) { 1204 sb.append(SPACE).append("OLD"); 1205 } else { 1206 tokens.consume("NEW"); 1207 sb.append(SPACE).append("NEW"); 1208 } 1209 1210 if (tokens.canConsume("ROW")) { 1211 sb.append(SPACE).append("ROW"); 1212 } 1213 if (tokens.canConsume("AS")) { 1214 sb.append(SPACE).append("AS"); 1215 } 1216 if (!tokens.matchesAnyOf("FOR", "MODE", type)) { 1217 String corrName = parseName(tokens); 1218 sb.append(SPACE).append(corrName); 1219 } 1220 } else { 1221 String corrName = parseName(tokens); 1222 sb.append(SPACE).append(corrName); 1223 1224 if (tokens.matchesAnyOf("OLD", "NEW")) { 1225 if (tokens.canConsume("OLD")) { 1226 sb.append(SPACE).append("OLD"); 1227 } else { 1228 tokens.consume("NEW"); 1229 sb.append(SPACE).append("NEW"); 1230 } 1231 1232 if (tokens.canConsume("ROW")) { 1233 sb.append(SPACE).append("ROW"); 1234 } 1235 if (tokens.canConsume("AS")) { 1236 sb.append(SPACE).append("AS"); 1237 } 1238 if (!tokens.matchesAnyOf("FOR", "MODE", type)) { 1239 corrName = parseName(tokens); 1240 sb.append(SPACE).append(corrName); 1241 } 1242 } 1243 } 1244 } 1245 } 1246 // [ FOR EACH { ROW | STATEMENT } ] [ MODE DB2SQL ] 1247 if (tokens.canConsume("FOR", "EACH")) { 1248 if (tokens.canConsume("ROW")) { 1249 AstNode optionNode = nodeFactory().node("forEach", node, TYPE_STATEMENT_OPTION); 1250 optionNode.setProperty(VALUE, "FOR EACH ROW"); 1251 } else { 1252 tokens.consume("STATEMENT"); 1253 AstNode optionNode = nodeFactory().node("forEach", node, TYPE_STATEMENT_OPTION); 1254 optionNode.setProperty(VALUE, "FOR EACH STATEMENT"); 1255 } 1256 } 1257 if (tokens.canConsume("MODE")) { 1258 tokens.consume("DB2SQL"); 1259 AstNode optionNode = nodeFactory().node("mode", node, TYPE_STATEMENT_OPTION); 1260 optionNode.setProperty(VALUE, "MODE DB2SQL"); 1261 } 1262 1263 String sql = parseUntilTerminatorIgnoreEmbeddedStatements(tokens); 1264 node.setProperty(SQL, sql); 1265 1266 markEndOfStatement(tokens, node); 1267 1268 return node; 1269 } 1270 1271 /** 1272 * {@inheritDoc} 1273 * 1274 * @see org.modeshape.sequencer.ddl.StandardDdlParser#getDataTypeStartWords() 1275 */ 1276 @Override 1277 protected List<String> getCustomDataTypeStartWords() { 1278 return DerbyDataTypes.CUSTOM_DATATYPE_START_WORDS; 1279 } 1280 1281 class DerbyDataTypeParser extends DataTypeParser { 1282 1283 /** 1284 * {@inheritDoc} 1285 * 1286 * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#isCustomDataType(org.modeshape.sequencer.ddl.DdlTokenStream) 1287 */ 1288 @Override 1289 protected boolean isCustomDataType( DdlTokenStream tokens ) throws ParsingException { 1290 // Loop through the registered statement start string arrays and look for exact matches. 1291 1292 for (String[] stmts : derbyDataTypeStrings) { 1293 if (tokens.matches(stmts)) return true; 1294 } 1295 return super.isCustomDataType(tokens); 1296 } 1297 1298 /** 1299 * {@inheritDoc} 1300 * 1301 * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseApproxNumericType(org.modeshape.sequencer.ddl.DdlTokenStream) 1302 */ 1303 @Override 1304 protected DataType parseApproxNumericType( DdlTokenStream tokens ) throws ParsingException { 1305 return super.parseApproxNumericType(tokens); 1306 } 1307 1308 /** 1309 * {@inheritDoc} 1310 * 1311 * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseBitStringType(org.modeshape.sequencer.ddl.DdlTokenStream) 1312 */ 1313 @Override 1314 protected DataType parseBitStringType( DdlTokenStream tokens ) throws ParsingException { 1315 return super.parseBitStringType(tokens); 1316 } 1317 1318 /** 1319 * {@inheritDoc} 1320 * 1321 * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseCharStringType(org.modeshape.sequencer.ddl.DdlTokenStream) 1322 */ 1323 @Override 1324 protected DataType parseCharStringType( DdlTokenStream tokens ) throws ParsingException { 1325 DataType result = super.parseCharStringType(tokens); 1326 1327 canConsume(tokens, result, true, "FOR", "BIT", "DATA"); 1328 1329 return result; 1330 } 1331 1332 /** 1333 * {@inheritDoc} 1334 * 1335 * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseCustomType(org.modeshape.sequencer.ddl.DdlTokenStream) 1336 */ 1337 @Override 1338 protected DataType parseCustomType( DdlTokenStream tokens ) throws ParsingException { 1339 DataType dataType = null; 1340 String typeName = null; 1341 long length = 0; 1342 1343 if (tokens.matches(DerbyDataTypes.DTYPE_BINARY_LARGE_OBJECT) 1344 || tokens.matches(DerbyDataTypes.DTYPE_CHARACTER_LARGE_OBJECT)) { 1345 dataType = new DataType(); 1346 typeName = consume(tokens, dataType, true) + SPACE + consume(tokens, dataType, true) + SPACE 1347 + consume(tokens, dataType, true); 1348 if (canConsume(tokens, dataType, true, L_PAREN)) { 1349 String lengthValue = consume(tokens, dataType, false); 1350 length = parseLong(lengthValue); 1351 consume(tokens, dataType, true, R_PAREN); 1352 } 1353 dataType.setName(typeName); 1354 dataType.setLength(length); 1355 } else if (tokens.matches(DerbyDataTypes.DTYPE_CLOB) || tokens.matches(DerbyDataTypes.DTYPE_BLOB)) { 1356 dataType = new DataType(); 1357 typeName = consume(tokens, dataType, true); 1358 if (canConsume(tokens, dataType, true, L_PAREN)) { 1359 String lengthValue = consume(tokens, dataType, false); 1360 length = parseLong(lengthValue); 1361 consume(tokens, dataType, true, R_PAREN); 1362 } 1363 dataType.setName(typeName); 1364 dataType.setLength(length); 1365 } else if (tokens.matches(DerbyDataTypes.DTYPE_BIGINT)) { 1366 dataType = new DataType(); 1367 typeName = consume(tokens, dataType, true); 1368 dataType.setName(typeName); 1369 } else if (tokens.matches(DerbyDataTypes.DTYPE_LONG_VARCHAR_FBD)) { 1370 dataType = new DataType(); 1371 typeName = consume(tokens, dataType, true) + SPACE + consume(tokens, dataType, true) + SPACE 1372 + consume(tokens, dataType, true) + SPACE + consume(tokens, dataType, true) + SPACE 1373 + consume(tokens, dataType, true); 1374 dataType.setName(typeName); 1375 } else if (tokens.matches(DerbyDataTypes.DTYPE_LONG_VARCHAR)) { 1376 dataType = new DataType(); 1377 typeName = consume(tokens, dataType, true) + SPACE + consume(tokens, dataType, true); 1378 typeName = consume(tokens, dataType, true); 1379 dataType.setName(typeName); 1380 } else if (tokens.matches(DerbyDataTypes.DTYPE_DOUBLE)) { 1381 dataType = new DataType(); 1382 typeName = consume(tokens, dataType, true); 1383 dataType.setName(typeName); 1384 } else if (tokens.matches(DerbyDataTypes.DTYPE_XML)) { 1385 dataType = new DataType(); 1386 typeName = consume(tokens, dataType, true); 1387 dataType.setName(typeName); 1388 } 1389 1390 if (dataType == null) { 1391 super.parseCustomType(tokens); 1392 } 1393 return dataType; 1394 } 1395 1396 /** 1397 * {@inheritDoc} 1398 * 1399 * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseDateTimeType(org.modeshape.sequencer.ddl.DdlTokenStream) 1400 */ 1401 @Override 1402 protected DataType parseDateTimeType( DdlTokenStream tokens ) throws ParsingException { 1403 return super.parseDateTimeType(tokens); 1404 } 1405 1406 /** 1407 * {@inheritDoc} 1408 * 1409 * @see org.modeshape.sequencer.ddl.datatype.DataTypeParser#parseExactNumericType(org.modeshape.sequencer.ddl.DdlTokenStream) 1410 */ 1411 @Override 1412 protected DataType parseExactNumericType( DdlTokenStream tokens ) throws ParsingException { 1413 return super.parseExactNumericType(tokens); 1414 } 1415 1416 } 1417 1418}