001: /*
002: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
003: *
004: *
005: * The contents of this file are subject to the terms of either the GNU
006: * General Public License Version 2 only ("GPL") or the Common
007: * Development and Distribution License("CDDL") (collectively, the
008: * "License"). You may not use this file except in compliance with the
009: * License. You can obtain a copy of the License at
010: * http://www.netbeans.org/cddl-gplv2.html
011: * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
012: * specific language governing permissions and limitations under the
013: * License. When distributing the software, include this License Header
014: * Notice in each file and include the License file at
015: * nbbuild/licenses/CDDL-GPL-2-CP. Sun designates this
016: * particular file as subject to the "Classpath" exception as provided
017: * by Sun in the GPL Version 2 section of the License file that
018: * accompanied this code. If applicable, add the following below the
019: * License Header, with the fields enclosed by brackets [] replaced by
020: * your own identifying information:
021: * "Portions Copyrighted [year] [name of copyright owner]"
022: *
023: * Contributor(s):
024: *
025: * The Original Software is NetBeans. The Initial Developer of the Original
026: * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun
027: * Microsystems, Inc. All Rights Reserved.
028: *
029: * If you wish your version of this file to be governed by only the CDDL
030: * or only the GPL Version 2, indicate your decision by adding
031: * "[Contributor] elects to include this software in this distribution
032: * under the [CDDL or GPL Version 2] license." If you do not indicate a
033: * single choice of license, a recipient has the option to distribute
034: * your version of this file under either the CDDL, the GPL Version 2 or
035: * to extend the choice of license to its licensees as provided above.
036: * However, if you add GPL Version 2 code and therefore, elected the GPL
037: * Version 2 license, then the option applies only if the new code is
038: * made subject to such option by the copyright holder.
039: */
040:
041: package org.netbeans.modules.languages.parser;
042:
043: import java.text.MessageFormat;
044: import java.util.ArrayList;
045: import java.util.Collections;
046: import java.util.HashMap;
047: import java.util.Iterator;
048: import java.util.List;
049: import java.util.ListIterator;
050: import java.util.Map;
051: import java.util.Set;
052: import java.util.Set;
053: import java.util.Stack;
054:
055: import org.netbeans.api.languages.ASTItem;
056: import org.netbeans.api.languages.ASTItem;
057: import org.netbeans.api.languages.ASTPath;
058: import org.netbeans.api.languages.ParseException;
059: import org.netbeans.api.languages.SyntaxContext;
060: import org.netbeans.api.languages.TokenInput;
061: import org.netbeans.modules.languages.Feature;
062: import org.netbeans.modules.languages.Language;
063: import org.netbeans.modules.languages.LanguagesManager;
064: import org.netbeans.api.languages.TokenInput;
065: import org.netbeans.api.languages.ASTNode;
066: import org.netbeans.api.languages.ASTToken;
067: import org.netbeans.modules.languages.Language;
068: import org.netbeans.modules.languages.LanguagesManager;
069: import org.netbeans.modules.languages.Rule;
070: import org.openide.util.NbBundle;
071:
072: /**
073: *
074: * @author Jan Jancura
075: */
076: public class LLSyntaxAnalyser {
077:
078: public static final String GAP_TOKEN_TYPE_NAME = "GAP";
079:
080: private Language language;
081: private List<Rule> grammarRules;
082: private First first;
083: private Set<Integer> skipTokenTypes;
084: private int traceSteps = -1;
085: private boolean printFirst = false;
086:
087: private LLSyntaxAnalyser(Language language,
088: List<Rule> grammarRules, Set<Integer> skipTokenTypes) {
089: this .language = language;
090: this .grammarRules = grammarRules;
091: this .skipTokenTypes = skipTokenTypes;
092: }
093:
094: // public methods ..........................................................
095:
096: public List<Rule> getRules() {
097: return grammarRules;
098: }
099:
100: public Set<Integer> getSkipTokenTypes() {
101: return skipTokenTypes;
102: }
103:
104: First getFirst() {
105: return first;
106: }
107:
108: public static LLSyntaxAnalyser create(Language language,
109: List<Rule> grammarRules, Set<Integer> skipTokenTypes)
110: throws ParseException {
111: LLSyntaxAnalyser a = new LLSyntaxAnalyser(language,
112: grammarRules, skipTokenTypes);
113: a.initTracing();
114: a.first = First.create(a.grammarRules, language);
115: // boolean hasConflicts = AnalyserAnalyser.printConflicts (a.first, null);
116: // if (hasConflicts)
117: // AnalyserAnalyser.printRules (a.grammarRules, null);
118: //if (a.printFirst)
119: // AnalyserAnalyser.printF (a.first, null, language);
120: // System.out.println(a.first);
121: // AnalyserAnalyser.printUndefinedNTs (a.grammarRules, null);
122: return a;
123: }
124:
125: public static LLSyntaxAnalyser createEmpty(Language language) {
126: LLSyntaxAnalyser a = new LLSyntaxAnalyser(language, Collections
127: .<Rule> emptyList(), Collections.<Integer> emptySet());
128: try {
129: a.first = First.create(Collections.<Rule> emptyList(),
130: language);
131: } catch (ParseException ex) {
132: ex.printStackTrace();
133: }
134: return a;
135: }
136:
137: public ASTNode read(TokenInput input, boolean skipErrors,
138: List<SyntaxError> syntaxErrors, boolean[] cancel)
139: throws ParseException {
140: Map<String, List<ASTItem>> embeddings = new HashMap<String, List<ASTItem>>();
141: ASTNode root;
142: try {
143: if (grammarRules.isEmpty() || input.eof()) {
144: root = readNoGrammar(input, skipErrors, embeddings,
145: syntaxErrors, cancel);
146: } else {
147: root = read2(input, skipErrors, embeddings,
148: syntaxErrors, cancel);
149: }
150: } catch (CancelledException ex) {
151: return null;
152: }
153: if (embeddings.isEmpty()) {
154: inspect(root);
155: return root;
156: }
157: List<ASTItem> roots = new ArrayList<ASTItem>();
158: Iterator<String> it = embeddings.keySet().iterator();
159: while (it.hasNext()) {
160: String mimeType = it.next();
161: List<ASTItem> tokens = embeddings.get(mimeType);
162: Language language = LanguagesManager.getDefault()
163: .getLanguage(mimeType);
164: TokenInput in = TokenInputUtils.create(tokens);
165: ASTNode r = language.getAnalyser().read(in, skipErrors,
166: syntaxErrors, cancel);
167: if (r == null) {
168: continue;
169: }
170: Feature astProperties = language.getFeatureList()
171: .getFeature("AST");
172: if (astProperties != null) {
173: String process_embedded = (String) astProperties
174: .getValue("process_embedded");
175: if (process_embedded == null
176: || Boolean.valueOf(process_embedded)) {
177: ASTNode newRoot = (ASTNode) astProperties.getValue(
178: "process", SyntaxContext.create(null,
179: ASTPath.create(r)));
180: if (newRoot != null)
181: r = newRoot;
182: }
183: }
184: roots.add(r);
185: }
186: roots.add(root);
187: ASTNode result = ASTNode.createCompoundASTNode(language,
188: "Root", roots, 0);
189: inspect(result);
190: return result;
191: }
192:
193: private void inspect(ASTItem item) {
194: Iterator<ASTItem> it = new ArrayList<ASTItem>(item
195: .getChildren()).iterator();
196: int i = 0;
197: while (it.hasNext()) {
198: ASTItem child = it.next();
199: if (child instanceof ASTNode && item instanceof ASTNode) {
200: ASTNode n = (ASTNode) child;
201: if (removeNode(n)) {
202: ((ASTNode) item).removeChildren(n);
203: continue;
204: }
205: ASTItem r = replaceNode(n);
206: if (r != null) {
207: ((ASTNode) item).setChildren(i, r);
208: child = r;
209: }
210: }
211: i++;
212: inspect(child);
213: }
214: }
215:
216: // helper methods ..........................................................
217:
218: private ASTNode read(TokenInput input, boolean skipErrors,
219: Map<String, List<ASTItem>> embeddings,
220: List<SyntaxError> syntaxErrors, boolean[] cancel)
221: throws ParseException, CancelledException {
222: if (grammarRules.isEmpty() || input.eof())
223: return readNoGrammar(input, skipErrors, embeddings,
224: syntaxErrors, cancel);
225: return read2(input, skipErrors, embeddings, syntaxErrors,
226: cancel);
227: }
228:
229: private ASTNode read2(TokenInput input, boolean skipErrors,
230: Map<String, List<ASTItem>> embeddings,
231: List<SyntaxError> syntaxErrors, boolean[] cancel)
232: throws ParseException, CancelledException {
233: Stack<Object> stack = new Stack<Object>();
234: ASTNode root = null, node = null;
235: ListIterator it = Collections.singletonList("S").listIterator();
236: List<ASTItem> whitespaces;
237: boolean firstLine = true;
238: do {
239: if (cancel[0])
240: throw new CancelledException();
241: int offset = input.getOffset();
242: whitespaces = readWhitespaces(node, input, skipErrors,
243: embeddings, syntaxErrors, cancel);
244: if (firstLine && input.eof() && whitespaces != null) {
245: return readNoGrammar(whitespaces, offset, skipErrors,
246: embeddings, syntaxErrors, cancel);
247: }
248: if (node != null)
249: offset = input.getOffset();
250: while (!it.hasNext()) {
251: if (stack.empty())
252: break;
253: node = (ASTNode) stack.pop();
254: it = (ListIterator) stack.pop();
255: }
256: if (!it.hasNext())
257: break;
258: Object current = it.next();
259: if (current instanceof String) {
260: String nt = (String) current;
261: int newRule = first.getRule(language.getNTID(nt),
262: input, skipTokenTypes);
263: if (newRule < 0) {
264: if (!skipErrors) {
265: if (node == null)
266: root = node = ASTNode.create(language,
267: "Root", whitespaces, offset);
268: throw new ParseException("Syntax error (nt: "
269: + nt + ", tokens: " + input.next(1)
270: + " " + input.next(2) + ".", root);
271: }
272: if (input.eof()) {
273: if (node == null)
274: root = node = ASTNode.create(language,
275: "Root", whitespaces, offset);
276: it.previous();
277: it = readError(node, root, input, null, it,
278: stack, embeddings, syntaxErrors,
279: whitespaces, cancel);
280: return root;
281: }
282: it.previous();
283: it = readError(node, root, input, null, it, stack,
284: embeddings, syntaxErrors, whitespaces,
285: cancel);
286: } else {
287: Rule rule = grammarRules.get(newRule);
288: Feature parse = language.getFeatureList()
289: .getFeature("PARSE", rule.getNT());
290: if (parse != null) {
291: stack.push(it);
292: stack.push(node);
293: it = Collections.EMPTY_LIST.listIterator();
294: ASTNode nast = (ASTNode) parse
295: .getValue(new Object[] { input, stack });
296: if (nast != null)
297: node.addChildren(nast);
298: } else {
299: if (node == null || it.hasNext()
300: || !nt.equals(node.getNT())) {
301: if (nt.indexOf('$') > 0
302: || nt.indexOf('#') > 0) {
303: stack.push(it);
304: stack.push(node);
305: } else {
306: if (rule.getRight().isEmpty()
307: && removeEmpty(language, rule
308: .getNT()))
309: continue;
310: ASTNode nnode = ASTNode.create(
311: language, rule.getNT(),
312: whitespaces, offset);
313: if (node != null) {
314: node.addChildren(nnode);
315: stack.push(it);
316: stack.push(node);
317: } else {
318: root = nnode;
319: }
320: node = nnode;
321: }
322: }
323: //S ystem.out.println(input.getIndex () + ": " + rule);
324: it = rule.getRight().listIterator();
325: }
326: }
327: } else {
328: ASTToken token = (ASTToken) current;
329: if (input.eof()) {
330: if (!skipErrors)
331: throw new ParseException(
332: "Unexpected end of file.", root);
333: it.previous();
334: it = readError(node, root, input, token, it, stack,
335: embeddings, syntaxErrors, whitespaces,
336: cancel);
337: return root;
338: } else if (!isCompatible(token, input.next(1))) {
339: if (input.next(1).getTypeName().equals(
340: GAP_TOKEN_TYPE_NAME)) {
341: input.read();
342: } else {
343: if (!skipErrors)
344: throw new ParseException(
345: "Unexpected token " + input.next(1)
346: + ". Expecting " + token,
347: root);
348: it.previous();
349: it = readError(node, root, input, token, it,
350: stack, embeddings, syntaxErrors,
351: whitespaces, cancel);
352: }
353: } else {
354: node.addChildren(readEmbeddings(input.read(),
355: skipErrors, embeddings, syntaxErrors,
356: cancel));
357: }
358: }
359: } while (true);
360: if (!skipErrors && !input.eof())
361: throw new ParseException("Unexpected token "
362: + input.next(1) + ".", root);
363: while (!input.eof())
364: it = readError(node, root, input, null, it, stack,
365: embeddings, syntaxErrors, whitespaces, cancel);
366: if (root == null) {
367: root = ASTNode.create(language, "Root", whitespaces, input
368: .getOffset());
369: }
370: return root;
371: }
372:
373: private static boolean isCompatible(ASTToken t1, ASTToken t2) {
374: if (t1.getTypeID() == -1) {
375: return t1.getIdentifier().equals(t2.getIdentifier());
376: } else {
377: if (t1.getIdentifier() == null)
378: return t1.getTypeID() == t2.getTypeID();
379: else
380: return t1.getTypeID() == t2.getTypeID()
381: && t1.getIdentifier()
382: .equals(t2.getIdentifier());
383: }
384: }
385:
386: private List<ASTItem> readWhitespaces(ASTNode node,
387: TokenInput input, boolean skipErrors,
388: Map<String, List<ASTItem>> embeddings,
389: List<SyntaxError> syntaxErrors, boolean[] cancel)
390: throws ParseException, CancelledException {
391: List<ASTItem> result = null;
392: while (!input.eof()
393: && skipTokenTypes.contains(input.next(1).getTypeID())) {
394: if (cancel[0])
395: throw new CancelledException();
396: ASTToken token = input.read();
397: if (node != null)
398: node.addChildren(readEmbeddings(token, skipErrors,
399: embeddings, syntaxErrors, cancel));
400: else {
401: if (result == null)
402: result = new ArrayList<ASTItem>();
403: result.add(readEmbeddings(token, skipErrors,
404: embeddings, syntaxErrors, cancel));
405: }
406: }
407: return result;
408: }
409:
410: private ASTItem readEmbeddings(ASTToken token, boolean skipErrors,
411: Map<String, List<ASTItem>> embeddings,
412: List<SyntaxError> syntaxErrors, boolean[] cancel)
413: throws ParseException, CancelledException {
414: List<ASTItem> children = token.getChildren();
415: if (children.isEmpty())
416: return token;
417:
418: TokenInput in = TokenInputUtils.create(children);
419: String mimeType = children.get(0).getMimeType();
420: Language language = (Language) children.get(0).getLanguage();
421: if (language == null)
422: return readNoGrammar(in, skipErrors, embeddings,
423: syntaxErrors, cancel);
424:
425: //HACK should be deleted - inner language should not define its embedding to other languages...
426: Feature astp = language.getFeatureList().getFeature("AST");
427: if (astp != null) {
428: String skip_embedded = (String) astp
429: .getValue("skip_embedded");
430: if (skip_embedded != null && Boolean.valueOf(skip_embedded)) {
431: return skipEmbedding(token, embeddings, children,
432: mimeType);
433: }
434: }
435: //HACK END
436:
437: Language outerLanguage = (Language) token.getLanguage();
438: if (outerLanguage != null) {
439: Feature f = outerLanguage.getPreprocessorImport();
440: if (f != null && f.getValue("mimeType").equals(mimeType)
441: && f.getBoolean("continual", false))
442: return skipEmbedding(token, embeddings, children,
443: mimeType);
444: f = outerLanguage.getTokenImports()
445: .get(token.getTypeName());
446: if (f != null && f.getValue("mimeType").equals(mimeType)
447: && f.getBoolean("continual", false))
448: return skipEmbedding(token, embeddings, children,
449: mimeType);
450: }
451:
452: Feature astProperties = language.getFeatureList().getFeature(
453: "AST");
454: ASTNode root = language.getAnalyser().read(in, skipErrors,
455: embeddings, syntaxErrors, cancel);
456: if (astProperties != null) {
457: String process_embedded = (String) astProperties
458: .getValue("process_embedded");
459: if (process_embedded == null
460: || Boolean.valueOf(process_embedded)) {
461: ASTNode newRoot = (ASTNode) astProperties.getValue(
462: "process", SyntaxContext.create(null, ASTPath
463: .create(root)));
464: if (newRoot != null)
465: root = newRoot;
466: }
467: }
468: return ASTToken.create(outerLanguage, token.getTypeID(), token
469: .getIdentifier(), token.getOffset(), token.getLength(),
470: Collections.<ASTItem> singletonList(root));
471: }
472:
473: private ASTToken skipEmbedding(ASTToken token,
474: Map<String, List<ASTItem>> embeddings,
475: List<ASTItem> children, String mimeType) {
476: List<ASTItem> l = embeddings.get(mimeType);
477: if (l == null) {
478: l = new ArrayList<ASTItem>();
479: embeddings.put(mimeType, l);
480: l.addAll(children.subList(0, children.size()));
481: appendGap(l);
482: } else {
483: ASTToken token1 = (ASTToken) l.get(l.size() - 1);
484: ASTToken token2 = (ASTToken) children.get(0);
485: if (token1.getTypeID() == token2.getTypeID()) {
486: l.remove(l.size() - 1);
487: ASTToken joinedToken = join(token1, token2);
488: l.add(joinedToken);
489: l.addAll(children.subList(1, children.size()));
490: } else
491: l.addAll(children);
492: appendGap(l);
493: }
494: return ASTToken
495: .create(token.getLanguage(), token.getTypeID(), token
496: .getIdentifier(), token.getOffset(), token
497: .getLength(), Collections.<ASTItem> emptyList());
498: }
499:
500: private static ASTToken join(ASTToken token1, ASTToken token2) {
501: List<ASTItem> token1Children = token1.getChildren();
502: List<ASTItem> token2Children = token2.getChildren();
503: List<ASTItem> joinedChildren = new ArrayList<ASTItem>();
504: if (token1Children.size() > 1 && token2Children.size() > 0) {
505: ASTToken t1 = (ASTToken) token1Children.get(token1Children
506: .size() - 2);
507: ASTToken t2 = (ASTToken) token2Children.get(0);
508: if (("js_string".equals(t1.getTypeName()) && "js_string"
509: .equals(t2.getTypeName()))
510: || ("css_string".equals(t1.getTypeName()) && "css_string"
511: .equals(t2.getTypeName()))) {
512: joinedChildren.addAll(token1Children.subList(0,
513: token1Children.size() - 2));
514: joinedChildren.add(ASTToken.create(t1.getLanguage(), t1
515: .getTypeID(), t1.getIdentifier()
516: + t2.getIdentifier(), t1.getOffset()));
517: joinedChildren.addAll(token2Children.subList(1,
518: token2Children.size()));
519: } else {
520: joinedChildren.addAll(token1Children);
521: joinedChildren.addAll(token2Children);
522: }
523: } else {
524: joinedChildren.addAll(token1Children);
525: joinedChildren.addAll(token2Children);
526: }
527: return ASTToken.create(token1.getLanguage(),
528: token1.getTypeID(), "", token1.getOffset(), token2
529: .getEndOffset()
530: - token1.getOffset(), joinedChildren);
531: }
532:
533: private static void appendGap(List<ASTItem> children) {
534: ASTToken lastToken = (ASTToken) children
535: .get(children.size() - 1);
536: if (lastToken.getChildren().isEmpty())
537: return;
538: List<ASTItem> lastTokenChildren = new ArrayList<ASTItem>(
539: lastToken.getChildren());
540: lastTokenChildren.add(ASTToken.create(lastTokenChildren.get(0)
541: .getLanguage(), GAP_TOKEN_TYPE_NAME, "",
542: lastTokenChildren.get(lastTokenChildren.size() - 1)
543: .getEndOffset(), 0, null));
544: children.remove(children.size() - 1);
545: children
546: .add(ASTToken.create(lastToken.getLanguage(), lastToken
547: .getTypeID(), lastToken.getIdentifier(),
548: lastToken.getOffset(), lastToken.getLength(),
549: lastTokenChildren));
550: }
551:
552: private ASTNode readNoGrammar(TokenInput input, boolean skipErrors,
553: Map<String, List<ASTItem>> embeddings,
554: List<SyntaxError> syntaxErrors, boolean[] cancel)
555: throws ParseException, CancelledException {
556: ASTNode root = ASTNode.create(language, "S", input.getIndex());
557: while (!input.eof()) {
558: if (cancel[0])
559: throw new CancelledException();
560: ASTToken token = input.read();
561: root.addChildren(readEmbeddings(token, skipErrors,
562: embeddings, syntaxErrors, cancel));
563: }
564: return root;
565: }
566:
567: private ASTNode readNoGrammar(List tokens, int offset,
568: boolean skipErrors, Map<String, List<ASTItem>> embeddings,
569: List<SyntaxError> syntaxErrors, boolean[] cancel)
570: throws ParseException, CancelledException {
571: ASTNode root = ASTNode.create(language, "S", offset);
572: for (Iterator iter = tokens.iterator(); iter.hasNext();) {
573: if (cancel[0])
574: throw new CancelledException();
575: ASTToken token = (ASTToken) iter.next();
576: root.addChildren(readEmbeddings(token, skipErrors,
577: embeddings, syntaxErrors, cancel));
578: }
579: return root;
580: }
581:
582: private ListIterator readError(ASTNode parentNode, ASTNode root,
583: TokenInput input, ASTToken expectedToken,
584: ListIterator iterator, Stack stack,
585: Map<String, List<ASTItem>> embeddings,
586: List<SyntaxError> syntaxErrors, List<ASTItem> whitespaces,
587: boolean[] cancel) throws ParseException, CancelledException {
588: ListIterator newIterator = findError(parentNode, parentNode,
589: input, expectedToken, iterator, stack, embeddings,
590: syntaxErrors, cancel);
591: if (newIterator != null)
592: return newIterator;
593: if (root != parentNode) {
594: ASTNode n = root;
595: while (n != null) {
596: newIterator = findError(n, parentNode, input,
597: expectedToken, iterator, stack, embeddings,
598: syntaxErrors, cancel);
599: if (newIterator != null)
600: return newIterator;
601: List<ASTItem> children = n.getChildren();
602: if (children.isEmpty())
603: break;
604: ASTItem item = children.get(children.size() - 1);
605: if (item instanceof ASTNode && item != parentNode)
606: n = (ASTNode) item;
607: else
608: break;
609: }
610: }
611: //S ystem.out.println ("\nUnrecognized Error " + parentNode.getNT () + " : "+ input + " : " + expectedToken);
612: createError(input, expectedToken, syntaxErrors, null);
613: if (!input.eof()) {
614: if (parentNode != null)
615: parentNode.addChildren(readEmbeddings(input.read(),
616: true, embeddings, syntaxErrors, cancel));
617: else {
618: if (whitespaces == null)
619: whitespaces = new ArrayList<ASTItem>();
620: whitespaces.add(readEmbeddings(input.read(), true,
621: embeddings, syntaxErrors, cancel));
622: }
623: }
624: return iterator;
625: }
626:
627: private ListIterator findError(ASTNode node, ASTNode parentNode,
628: TokenInput input, ASTToken expectedToken,
629: ListIterator iterator, Stack stack,
630: Map<String, List<ASTItem>> embeddings,
631: List<SyntaxError> syntaxErrors, boolean[] cancel)
632: throws ParseException, CancelledException {
633: String id = node == null ? "S" : node.getNT();
634: List<Feature> features = language.getFeatureList().getFeatures(
635: "SYNTAX_ERROR", id);
636: if (features.isEmpty())
637: return null;
638: boolean errorCreated = false;
639: Map<String, String> tokenIdentifierToNt = new HashMap<String, String>();
640: Iterator<Feature> it = features.iterator();
641: while (it.hasNext()) {
642: Feature feature = it.next();
643: boolean eof = feature.getBoolean("eof", false);
644: String nextTokenTypeName = (String) feature
645: .getValue("next_token_type_name");
646: String nextTokenIdentifier = (String) feature
647: .getValue("next_token_identifier");
648: if ((eof && input.eof())
649: || (!input.eof() && nextTokenTypeName != null && nextTokenTypeName
650: .equals(input.next(1).getTypeName()))
651: || (!input.eof() && nextTokenIdentifier != null && nextTokenIdentifier
652: .equals(input.next(1).getIdentifier()))
653: || (!eof && nextTokenIdentifier == null && nextTokenTypeName == null)) {
654: String message = (String) feature.getValue("message");
655: String tokenIdentifier = (String) feature
656: .getValue("token_identifier");
657: String nt = (String) feature.getValue("nt");
658: if (tokenIdentifier != null)
659: tokenIdentifierToNt.put(tokenIdentifier, nt);
660: if (message != null && !errorCreated) {
661: createError(input, expectedToken, syntaxErrors,
662: message);
663: errorCreated = true;
664: }
665: }
666: }
667: if (!errorCreated)
668: createError(input, expectedToken, syntaxErrors, null);
669: //S ystem.out.println ("\nRecognized Error " + parentNode.getNT () + " : "+ input + " : " + expectedToken);
670: if (tokenIdentifierToNt.isEmpty()) {
671: if (!input.eof())
672: parentNode.addChildren(readEmbeddings(input.read(),
673: true, embeddings, syntaxErrors, cancel));
674: return iterator;
675: }
676: while (!input.eof()
677: && !tokenIdentifierToNt.containsKey(input.next(1)
678: .getIdentifier()))
679: parentNode.addChildren(readEmbeddings(input.read(), true,
680: embeddings, syntaxErrors, cancel));
681: if (input.eof())
682: return iterator;
683: String nt = tokenIdentifierToNt.get(input.next(1)
684: .getIdentifier());
685: if (nt != null) {
686: String cnt = parentNode.getNT();
687: while (!cnt.equals(nt) && !stack.isEmpty()) {
688: cnt = ((ASTNode) stack.pop()).getNT();
689: iterator = (ListIterator) stack.pop();
690: }
691: }
692: return iterator;
693: }
694:
695: private void createError(TokenInput input, ASTToken expectedToken,
696: List<SyntaxError> syntaxErrors, String message) {
697: ASTItem item = input.eof() ? ASTToken.create(null, 0, "", input
698: .getOffset()) : input.next(1);
699: if (message == null) {
700: if (expectedToken != null) {
701: if (expectedToken.getIdentifier() != null)
702: message = MessageFormat.format(NbBundle.getMessage(
703: LLSyntaxAnalyser.class, "CTL_ID_expected"),
704: expectedToken.getIdentifier());
705: else {
706: String type = expectedToken.getTypeName();
707: if (type.contains("identifier"))
708: message = NbBundle.getMessage(
709: LLSyntaxAnalyser.class,
710: "CTL_Identifier_expected");
711: else if (type.contains("string"))
712: message = NbBundle.getMessage(
713: LLSyntaxAnalyser.class,
714: "CTL_String_expected");
715: else if (type.contains("keyword"))
716: message = NbBundle.getMessage(
717: LLSyntaxAnalyser.class,
718: "CTL_Keyword_expected");
719: else
720: message = MessageFormat.format(NbBundle
721: .getMessage(LLSyntaxAnalyser.class,
722: "CTL_Type_expected"), type);
723: }
724: } else if (input.eof())
725: message = NbBundle.getMessage(LLSyntaxAnalyser.class,
726: "CTL_Unexpected_end_of_file");
727: else
728: message = MessageFormat.format(NbBundle.getMessage(
729: LLSyntaxAnalyser.class,
730: "CTL_Unexpected_token_ID"), input.next(1)
731: .getIdentifier());
732: }
733: syntaxErrors.add(new SyntaxError(item, message));
734: }
735:
736: private void initTracing() {
737: Feature properties = language.getFeatureList().getFeature(
738: "PROPERTIES");
739: if (properties == null)
740: return;
741: try {
742: traceSteps = Integer.parseInt((String) properties
743: .getValue("traceSteps"));
744: } catch (NumberFormatException ex) {
745: traceSteps = -2;
746: }
747: if (properties.getBoolean("printRules", false))
748: AnalyserAnalyser.printRules(grammarRules, null);
749: printFirst = properties.getBoolean("printFirst", false);
750: }
751:
752: private boolean removeNode(ASTNode node) {
753: List l = node.getChildren();
754: if (!l.isEmpty())
755: return false;
756: ASTFeatures astFeatures = ASTFeatures.get((Language) node
757: .getLanguage());
758: return astFeatures.removeEmpty
759: || (astFeatures.removeEmptyN == astFeatures.empty
760: .contains(node.getNT()));
761: }
762:
763: private ASTItem replaceNode(ASTNode node) {
764: ASTFeatures astFeatures = ASTFeatures.get((Language) node
765: .getLanguage());
766: ASTItem result = null;
767: do {
768: List<ASTItem> l = node.getChildren();
769: if (l.size() != 1)
770: return result;
771: if (!astFeatures.removeSimple
772: && (astFeatures.removeSimpleN != astFeatures.simple
773: .contains(node.getNT())))
774: return result;
775: result = l.get(0);
776: if (!(result instanceof ASTNode))
777: return result;
778: node = (ASTNode) result;
779: } while (true);
780: }
781:
782: private boolean removeEmpty(Language language, String nt) {
783: ASTFeatures astFeatures = ASTFeatures.get(language);
784: return astFeatures.removeEmpty
785: || (astFeatures.removeEmptyN == astFeatures.empty
786: .contains(nt));
787: }
788:
789: // innerclasses ............................................................
790:
791: public static class T {
792: int type;
793: String identifier;
794:
795: T(ASTToken t) {
796: type = t.getTypeID();
797: identifier = t.getIdentifier();
798: }
799:
800: public boolean equals(Object o) {
801: if (!(o instanceof T))
802: return false;
803: return (((T) o).type == -1 || ((T) o).type == type)
804: && (((T) o).identifier == null || ((T) o).identifier
805: .equals(identifier));
806: }
807:
808: public int hashCode() {
809: return (type + 1)
810: * (identifier == null ? -1 : identifier.hashCode());
811: }
812:
813: public String toString() {
814: if (type == -1)
815: return "\"" + identifier + "\"";
816: if (identifier == null)
817: return "<" + type + ">";
818: return "[" + type + "," + identifier + "]";
819: }
820:
821: public String toString(Language language) {
822: if (type == -1)
823: return "\"" + identifier + "\"";
824: String typeName = language.getTokenType(type);
825: if (identifier == null)
826: return "<" + typeName + ">";
827: return "[" + typeName + "," + identifier + "]";
828: }
829: }
830:
831: private class CancelledException extends Exception {
832:
833: }
834: }
|