• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

C# LexicalState类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了C#中LexicalState的典型用法代码示例。如果您正苦于以下问题:C# LexicalState类的具体用法?C# LexicalState怎么用?C# LexicalState使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



LexicalState类属于命名空间,在下文中一共展示了LexicalState类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。

示例1: ReadEquals

        // Assignment: =
        // Operators: == === =~ =>
        private Tokens ReadEquals() {
            switch (_lexicalState) {
                case LexicalState.EXPR_FNAME:
                case LexicalState.EXPR_DOT:
                    _lexicalState = LexicalState.EXPR_ARG; 
                    break;

                default:
                    _lexicalState = LexicalState.EXPR_BEG; 
                    break;
            }

            switch (Peek()) {
                case '=':
                    Skip('=');
                    return Read('=') ? Tokens.Eqq : Tokens.Eq;

                case '~':
                    Skip('~');
                    return Tokens.Match;

                case '>':
                    Skip('>');
                    return Tokens.Assoc;

                default:
                    return (Tokens)'=';
            }
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:31,代码来源:Tokenizer.cs


示例2: Tokenize


//.........这里部分代码省略.........

                case '+':
                    return MarkSingleLineTokenEnd(ReadPlus(whitespaceSeen));

                case '-':
                    return MarkSingleLineTokenEnd(ReadMinus(whitespaceSeen));

                case '.':
                    return MarkSingleLineTokenEnd(ReadDot());

                case '0':
                case '1':
                case '2':
                case '3':
                case '4':
                case '5':
                case '6':
                case '7':
                case '8':
                case '9':
                    return MarkSingleLineTokenEnd(ReadUnsignedNumber(c));

                case ':':
                    return MarkSingleLineTokenEnd(ReadColon(whitespaceSeen));

                case '/':
                    return MarkSingleLineTokenEnd(ReadSlash(whitespaceSeen));

                case '^':
                    return MarkSingleLineTokenEnd(ReadCaret());

                case ';':
                    _commaStart = true;
                    _lexicalState = LexicalState.EXPR_BEG;
                    MarkSingleLineTokenEnd();
                    return (Tokens)';';

                case ',':
                    _lexicalState = LexicalState.EXPR_BEG;
                    MarkSingleLineTokenEnd();
                    return (Tokens)',';

                case '~':
                    return MarkSingleLineTokenEnd(ReadTilde());

                case '(':
                    _commaStart = true;
                    return MarkSingleLineTokenEnd(ReadLeftParenthesis(whitespaceSeen));

                case '[':
                    return MarkSingleLineTokenEnd(ReadLeftBracket(whitespaceSeen));

                case '{':
                    return MarkSingleLineTokenEnd(ReadLeftBrace());

                case ')':
                case ']':
                case '}':
                    COND_LEXPOP();
                    CMDARG_LEXPOP();
                    _lexicalState = LexicalState.EXPR_END;
                    MarkSingleLineTokenEnd();
                    return (Tokens)c;

                case '%':
                    return TokenizePercent(whitespaceSeen);
开发者ID:bclubb,项目名称:ironruby,代码行数:67,代码来源:Tokenizer.cs


示例3: ReadIdentifier

        // Identifiers:
        //   [:alpha:_][:identifier:]+
        // Method names:
        //   [:alpha:_][:identifier:]+[?][^=]
        //   [:alpha:_][:identifier:]+[!][^=]
        //   [:alpha:_][:identifier:]+[=][^=~>]
        //   [:alpha:_][:identifier:]+[=] immediately followed by =>
        // Keywords
        private Tokens ReadIdentifier(int firstCharacter, bool cmdState) {
            // the first character already read:
            int start = _bufferPos - 1;
            SkipVariableName();

            // reads token suffix (!, ?, =) and returns the the token kind based upon the suffix:
            Tokens result = ReadIdentifierSuffix(firstCharacter);

            // TODO: possible optimization: ~15% are keywords, ~15% are existing local variables -> we can save allocations
            string identifier = new String(_lineBuffer, start, _bufferPos - start);
            
            if (_lexicalState != LexicalState.EXPR_DOT) {
                if (_lexicalState == LexicalState.EXPR_FNAME) {
                    SetStringToken(identifier);
                }

                Tokens keyword = StringToKeyword(identifier);
                if (keyword != Tokens.None) {
                    return keyword;
                }
            }

            if (_lexicalState == LexicalState.EXPR_BEG ||
                _lexicalState == LexicalState.EXPR_MID ||
                _lexicalState == LexicalState.EXPR_DOT ||
                _lexicalState == LexicalState.EXPR_ARG ||
                _lexicalState == LexicalState.EXPR_CMDARG) {

                if (_localVariableResolver.IsLocalVariable(identifier)) {
                    _lexicalState = LexicalState.EXPR_END;
                } else if (cmdState) {
                    _lexicalState = LexicalState.EXPR_CMDARG;
                } else {
                    _lexicalState = LexicalState.EXPR_ARG;
                }
            } else {
                _lexicalState = LexicalState.EXPR_END;
            }

            SetStringToken(identifier);
            return result;
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:50,代码来源:Tokenizer.cs


示例4: SetState

 internal void SetState(LexicalState state) {
     _lexicalState = state;
 }
开发者ID:bclubb,项目名称:ironruby,代码行数:3,代码来源:Tokenizer.cs


示例5: ReadUnsignedNumber

        // INTEGER:
        // [1-9]([0-9_]*[1-9])?
        // 0([0-7_]*[0-7])?
        // 0[xX][0-9a-fA-F]([0-9a-fA-F_]*[0-9a-fA-F])?
        // 0[dD][0-9]([0-9_]*[0-9])?
        // 0[bB][01]([01_]*[01])?
        // 0[oO][0-7]([0-7_]*[0-7])?
        //
        // FLOAT:
        // (0|[1-9]([0-9_]*[0-9])?)[.][0-9_]*[0-9]([eE][+-]?[0-9]([0-9_]*[0-9])?)
        //
        // Takes the first decimal digit of the number.
        //
        private Tokens ReadUnsignedNumber(int c) {
            _lexicalState = LexicalState.EXPR_END;
           
            if (c == '0') {
                switch (Peek()) {
                    case 'x':
                    case 'X':
                        Skip();
                        return ReadInteger(16, NumericCharKind.None);

                    case 'b':
                    case 'B':
                        Skip();
                        return ReadInteger(2, NumericCharKind.None);

                    case 'o':
                    case 'O':
                        Skip();
                        return ReadInteger(8, NumericCharKind.None);

                    case 'd':
                    case 'D':
                        Skip();
                        return ReadInteger(10, NumericCharKind.None);

                    case 'e':
                    case 'E': {
                            // 0e[+-]...    
                            int sign;
                            int start = _bufferPos - 1;

                            if (TryReadExponentSign(1, out sign)) {
                                return ReadDoubleExponent(start, sign);
                            }

                            _tokenValue.SetInteger(0);
                            return Tokens.Integer;
                        }

                    case '.':
                        // 0.
                        if (IsDecimalDigit(Peek(1))) {
                            Skip('.');
                            return ReadDouble(_bufferPos - 2);
                        }

                        _tokenValue.SetInteger(0);
                        return Tokens.Integer;

                    case '0':
                    case '1':
                    case '2':
                    case '3':
                    case '4':
                    case '5':
                    case '6':
                    case '7':
                    case '_':
                        // the previous character is '0' digit:
                        return ReadInteger(8, NumericCharKind.Digit);

                    case '8':
                    case '9':
                        ReportError(Errors.IllegalOctalDigit);
                        // treat the number as decimal
                        return ReadInteger(10, NumericCharKind.Digit);

                    default:
                        _tokenValue.SetInteger(0);
                        return Tokens.Integer;
                }
            }

            return ReadDecimalNumber(c);
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:88,代码来源:Tokenizer.cs


示例6: ReadAmpersand

        // Operators: & &&
        // Assignments: &=
        private Tokens ReadAmpersand(bool whitespaceSeen) {
            int c = Peek();
            
            if (c == '&') {
                Skip(c);
                _lexicalState = LexicalState.EXPR_BEG;
                
                if (Read('=')) {
                    SetAsciiStringToken(Symbols.And);
                    return Tokens.Assignment;
                }

                return Tokens.LogicalAnd;
            } 
            
            if (c == '=') {
                Skip(c);
                _lexicalState = LexicalState.EXPR_BEG;
                SetAsciiStringToken(Symbols.BitwiseAnd);
                return Tokens.Assignment;
            }

            Tokens result;
            if (IS_ARG() && whitespaceSeen && !IsWhiteSpace(c)) {
                // we are in command argument and there is a whitespace between ampersand: "foo &bar"
                ReportWarning(Errors.AmpersandInterpretedAsProcArgument);
                result = Tokens.Ampersand;
            } else if (_lexicalState == LexicalState.EXPR_BEG || _lexicalState == LexicalState.EXPR_MID) {
                result = Tokens.Ampersand;
            } else {
                result = (Tokens)'&';
            }

            switch (_lexicalState) {
                case LexicalState.EXPR_FNAME:
                case LexicalState.EXPR_DOT:
                    _lexicalState = LexicalState.EXPR_ARG;
                    break;

                default:
                    _lexicalState = LexicalState.EXPR_BEG;
                    break;
            }

            return result;
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:48,代码来源:Tokenizer.cs


示例7: ReadDot

        // Operators: . .. ...
        // Errors: .[:digit:]
        private Tokens ReadDot() {
            _lexicalState = LexicalState.EXPR_BEG;
            
            int c = Peek();
            if (c == '.') {
                Skip(c);
                return Read('.') ? Tokens.Dot3 : Tokens.Dot2;
            }

            if (IsDecimalDigit(c)) {
                ReportError(Errors.NoFloatingLiteral);
            }

            _lexicalState = LexicalState.EXPR_DOT;
            return (Tokens)'.';
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:18,代码来源:Tokenizer.cs


示例8: ReadLeftBracket

        // Brackets: [
        // Operators: [] []=
        private Tokens ReadLeftBracket(bool whitespaceSeen) {
            if (_lexicalState == LexicalState.EXPR_FNAME || _lexicalState == LexicalState.EXPR_DOT) {
                _lexicalState = LexicalState.EXPR_ARG;
                
                return Read(']') ? (Read('=') ? Tokens.Aset : Tokens.Aref) : (Tokens)'[';
            }

            Tokens result;
            if (_lexicalState == LexicalState.EXPR_BEG || _lexicalState == LexicalState.EXPR_MID) {
                result = Tokens.Lbrack;
            } else if (IS_ARG() && whitespaceSeen) {
                result = Tokens.Lbrack;
            } else {
                result = (Tokens)'[';
            }

            _lexicalState = LexicalState.EXPR_BEG;
            COND_PUSH(0);
            CMDARG_PUSH(0);
            return result;
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:23,代码来源:Tokenizer.cs


示例9: ReadTilde

        // Operators: ~ [email protected]
        private Tokens ReadTilde() {
            if (_lexicalState == LexicalState.EXPR_FNAME || _lexicalState == LexicalState.EXPR_DOT) {
                // [email protected]
                Read('@');
            }

            switch (_lexicalState) {
                case LexicalState.EXPR_FNAME:
                case LexicalState.EXPR_DOT:
                    _lexicalState = LexicalState.EXPR_ARG; 
                    break;

                default:
                    _lexicalState = LexicalState.EXPR_BEG; 
                    break;
            }

            return (Tokens)'~';
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:20,代码来源:Tokenizer.cs


示例10: TokenizePercent

        // Assignments: %=
        // Operators: % 
        // Literals: %{... (quotation start)
        private Tokens TokenizePercent(bool whitespaceSeen) {
            if (_lexicalState == LexicalState.EXPR_BEG || _lexicalState == LexicalState.EXPR_MID) {
                return TokenizeQuotationStart();
            }

            int c = Peek();
            if (c == '=') {
                Skip(c);
                SetAsciiStringToken(Symbols.Mod);
                _lexicalState = LexicalState.EXPR_BEG;
                MarkSingleLineTokenEnd();
                return Tokens.Assignment;
            }

            if (IS_ARG() && whitespaceSeen && !IsWhiteSpace(c)) {
                return TokenizeQuotationStart();
            }

            switch (_lexicalState) {
                case LexicalState.EXPR_FNAME:
                case LexicalState.EXPR_DOT:
                    _lexicalState = LexicalState.EXPR_ARG; 
                    break;

                default:
                    _lexicalState = LexicalState.EXPR_BEG; 
                    break;
            }

            MarkSingleLineTokenEnd();
            return (Tokens)'%';
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:35,代码来源:Tokenizer.cs


示例11: ReadLeftBrace

        // Brackets: {
        private Tokens ReadLeftBrace() {
            Tokens result;

            if (IS_ARG() || _lexicalState == LexicalState.EXPR_END) {
                result = (Tokens)'{';        // block (primary)
            } else if (_lexicalState == LexicalState.EXPR_ENDARG) {
                result = Tokens.LbraceArg;   // block (expr)
            } else {
                result = Tokens.Lbrace;      // hash
            }

            COND_PUSH(0);
            CMDARG_PUSH(0);
            _lexicalState = LexicalState.EXPR_BEG;
            return result;
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:17,代码来源:Tokenizer.cs


示例12: ReadGlobalVariable

        // Global variables: 
        //   $[_~*[email protected]/\;,.=:<>"] 
        //   $-[:identifier:] 
        //   $[:identifier:]
        // Match references: 
        //   $[&`'+] 
        //   $[1-9][0-9]+
        // Dollar:
        //   $
        private Tokens ReadGlobalVariable() {
            _lexicalState = LexicalState.EXPR_END;

            // start right after $, the resulting symbol doesn't contain $
            int start = _bufferPos;
            
            int c = Read();
            switch (c) {
                case '_':
                    if (IsIdentifier(Peek())) {
                        SkipVariableName();
                        SetStringToken(start, _bufferPos - start);
                        return Tokens.GlobalVariable;
                    }
                    return GlobalVariableToken(Symbols.LastInputLine);

                // exceptions:
                case '!': return GlobalVariableToken(Symbols.CurrentException);
                case '@': return GlobalVariableToken(Symbols.CurrentExceptionBacktrace);

                // options:
                case '-':
                    if (IsIdentifier(Peek())) {
                        Read();
                        SetStringToken(start, 2);
                    } else {
                        SetAsciiStringToken("-");
                    }
                    return Tokens.GlobalVariable;

                // others:
                case ',': return GlobalVariableToken(Symbols.ItemSeparator);
                case ';': return GlobalVariableToken(Symbols.StringSeparator);
                case '/': return GlobalVariableToken(Symbols.InputSeparator);
                case '\\': return GlobalVariableToken(Symbols.OutputSeparator);
                case '*': return GlobalVariableToken(Symbols.CommandLineArguments);
                case '$': return GlobalVariableToken(Symbols.CurrentProcessId);
                case '?': return GlobalVariableToken(Symbols.ChildProcessExitStatus);
                case '=': return GlobalVariableToken(Symbols.IgnoreCaseComparator);
                case ':': return GlobalVariableToken(Symbols.LoadPath);
                case '"': return GlobalVariableToken(Symbols.LoadedFiles);
                case '<': return GlobalVariableToken(Symbols.InputContent);
                case '>': return GlobalVariableToken(Symbols.OutputStream);
                case '.': return GlobalVariableToken(Symbols.LastInputLineNumber);

                // regex:
                case '~': 
                    return GlobalVariableToken(Symbols.MatchData);
                
                case '&':
                    _tokenValue.SetInteger(RegexMatchReference.EntireMatch);
                    return Tokens.MatchReference;

                case '`':
                    _tokenValue.SetInteger(RegexMatchReference.MatchPrefix);
                    return Tokens.MatchReference;

                case '\'':		
                    _tokenValue.SetInteger(RegexMatchReference.MatchSuffix);
                    return Tokens.MatchReference;

                case '+':
                    _tokenValue.SetInteger(RegexMatchReference.MatchLastGroup);
                    return Tokens.MatchReference;

                case '0':
                    if (IsIdentifier(Peek())) {
                        // $0[A-Za-z0-9_] are invalid:
                        SkipVariableName();
                        ReportError(Errors.InvalidGlobalVariableName, new String(_lineBuffer, start - 1, _bufferPos - start));
                        SetAsciiStringToken(Symbols.ErrorVariable);
                        return Tokens.GlobalVariable;
                    }

                    return GlobalVariableToken(Symbols.CommandLineProgramPath);

                default:
                    if (IsDecimalDigit(c)) {
                        return ReadMatchGroupReferenceVariable(c);
                    }

                    if (IsIdentifier(c)) {
                        SkipVariableName();
                        SetStringToken(start, _bufferPos - start);
                        return Tokens.GlobalVariable;
                    }

                    Back(c);
                    return (Tokens)'$';
            }
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:100,代码来源:Tokenizer.cs


示例13: ReadInstanceOrClassVariable

        // Instance variables:
        //   @[:alpha:_][:identifier:]*
        // Class variables:
        //   @@[:alpha:_][:identifier:]*
        // At:
        //   @
        private Tokens ReadInstanceOrClassVariable() {
            Tokens result;

            // start right before @/@@, the resulting symbol starts with @/@@
            int start = _bufferPos - 1;

            int c = Peek(0);
            if (c == '@') {
                c = Peek(1);
                result = Tokens.ClassVariable;
            } else {
                result = Tokens.InstanceVariable;
            }

            // c follows @ or @@
            if (IsDecimalDigit(c)) {
                ReportError(result == Tokens.InstanceVariable ? Errors.InvalidInstanceVariableName : Errors.InvalidClassVariableName, (char)c);
            } else if (IsIdentifierInitial(c)) {
                if (result == Tokens.ClassVariable) {
                    Skip('@');
                }
                Skip(c);

                SkipVariableName();
                SetStringToken(start, _bufferPos - start);
                _lexicalState = LexicalState.EXPR_END;
                return result;
            }

            return (Tokens)'@';
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:37,代码来源:Tokenizer.cs


示例14: ReadLeftParenthesis

        // Brackets: (
        private Tokens ReadLeftParenthesis(bool whitespaceSeen) {
            Tokens result = (Tokens)'(';
            
            if (_lexicalState == LexicalState.EXPR_BEG || _lexicalState == LexicalState.EXPR_MID) {
                result = Tokens.LeftParen;
            } else if (whitespaceSeen) {
                if (_lexicalState == LexicalState.EXPR_CMDARG) {
                    result = Tokens.LparenArg;
                } else if (_lexicalState == LexicalState.EXPR_ARG) {
                    ReportWarning(Errors.WhitespaceBeforeArgumentParentheses);
                }
            }

            COND_PUSH(0);
            CMDARG_PUSH(0);
            _lexicalState = LexicalState.EXPR_BEG;
            return result;
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:19,代码来源:Tokenizer.cs


示例15: ReadBacktick

        // String: `...
        // Operator: `
        private Tokens ReadBacktick(bool cmdState) {
            if (_lexicalState == LexicalState.EXPR_FNAME) {
                _lexicalState = LexicalState.EXPR_END;
                return (Tokens)'`';
            }

            if (_lexicalState == LexicalState.EXPR_DOT) {
                _lexicalState = (cmdState) ? LexicalState.EXPR_CMDARG : LexicalState.EXPR_ARG;
                return (Tokens)'`';
            }

            _currentString = new StringContentTokenizer(StringType.ExpandsEmbedded, '`');
            _tokenValue.SetStringTokenizer(_currentString);
            return Tokens.ShellStringBegin;
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:17,代码来源:Tokenizer.cs


示例16: ReadCaret

        // Assignments: ^=
        // Operators: ^
        private Tokens ReadCaret() {
            if (Read('=')) {
                SetAsciiStringToken(Symbols.Xor);
                _lexicalState = LexicalState.EXPR_BEG;
                return Tokens.Assignment;
            }

            switch (_lexicalState) {
                case LexicalState.EXPR_FNAME:
                case LexicalState.EXPR_DOT:
                    _lexicalState = LexicalState.EXPR_ARG;
                    break;

                default:
                    _lexicalState = LexicalState.EXPR_BEG; 
                    break;
            }

            return (Tokens)'^';
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:22,代码来源:Tokenizer.cs


示例17: TokenizeQuestionmark

        // Operators: ? (conditional)
        // Literals: ?[:char:] ?{escape}
        // Errors: ?[:EOF:]
        private Tokens TokenizeQuestionmark() {
            if (_lexicalState == LexicalState.EXPR_END || _lexicalState == LexicalState.EXPR_ENDARG) {
                _lexicalState = LexicalState.EXPR_BEG;
                MarkSingleLineTokenEnd();
                return (Tokens)'?';
            }

            // ?[:EOF:]
            int c = Peek();
            if (c == -1) {
                _unterminatedToken = true;
                MarkSingleLineTokenEnd();
                ReportError(Errors.IncompleteCharacter);
                return Tokens.EndOfFile;
            }

            // TODO: ?x, ?\u1234, ?\u{123456} -> string in 1.9
            // ?[:whitespace:]
            if (IsWhiteSpace(c)) {
                if (!IS_ARG()) {
                    int c2 = 0;
                    switch (c) {
                        case ' ': c2 = 's'; break;
                        case '\n': c2 = 'n'; break;
                        case '\t': c2 = 't'; break;
                        case '\v': c2 = 'v'; break;
                        case '\r': c2 = (Peek(1) == '\n') ? 'n' : 'r'; break;
                        case '\f': c2 = 'f'; break;
                    }

                    if (c2 != 0) {
                        ReportWarning(Errors.InvalidCharacterSyntax, (char)c2);
                    }
                }
                _lexicalState = LexicalState.EXPR_BEG;
                MarkSingleLineTokenEnd();
                return (Tokens)'?';
            } 
            
            // ?{identifier}
            if ((IsLetterOrDigit(c) || c == '_') && IsIdentifier(Peek(1))) {
                _lexicalState = LexicalState.EXPR_BEG;
                MarkSingleLineTokenEnd();
                return (Tokens)'?';
            }

            Skip(c);
            
            // ?\{escape}
            if (c == '\\') {
                // TODO: ?\xx, ?\u1234, ?\u{123456} -> string in 1.9
                c = ReadEscape();

                // \M-{eoln} eats the eoln:
                MarkMultiLineTokenEnd();
            } else {
                MarkSingleLineTokenEnd();
            }

            // TODO: ?x -> string in 1.9
            c &= 0xff;
            _lexicalState = LexicalState.EXPR_END;
            _tokenValue.SetInteger(c);

            return Tokens.Integer;
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:69,代码来源:Tokenizer.cs


示例18: ReadSlash

        // Operators: /
        // Assignments: /=
        // Literals: /... (regex start)
        private Tokens ReadSlash(bool whitespaceSeen) {
            if (_lexicalState == LexicalState.EXPR_BEG || _lexicalState == LexicalState.EXPR_MID) {
                _currentString = new StringContentTokenizer(StringType.RegularExpression | StringType.ExpandsEmbedded, '/');
                _tokenValue.SetStringTokenizer(_currentString);
                return Tokens.RegexpBegin;
            }

            int c = Peek();
            if (c == '=') {
                Skip(c);
                SetAsciiStringToken(Symbols.Divide);
                _lexicalState = LexicalState.EXPR_BEG;
                return Tokens.Assignment;
            }

            if (IS_ARG() && whitespaceSeen) {
                if (!IsWhiteSpace(c)) {
                    ReportWarning(Errors.AmbiguousFirstArgument);
                    _currentString = new StringContentTokenizer(StringType.RegularExpression | StringType.ExpandsEmbedded, '/');
                    _tokenValue.SetStringTokenizer(_currentString);
                    return Tokens.RegexpBegin;
                }
            }

            switch (_lexicalState) {
                case LexicalState.EXPR_FNAME:
                case LexicalState.EXPR_DOT:
                    _lexicalState = LexicalState.EXPR_ARG;
                    break;

                default:
                    _lexicalState = LexicalState.EXPR_BEG; 
                    break;
            }

            return (Tokens)'/';
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:40,代码来源:Tokenizer.cs


示例19: ReadPipe

        // Operators: | ||
        // Assignments: |= ||=
        private Tokens ReadPipe() {
            int c = Peek();

            if (c == '|') {
                Skip(c);
                _lexicalState = LexicalState.EXPR_BEG;

                if (Read('=')) {
                    SetAsciiStringToken(Symbols.Or);
                    _lexicalState = LexicalState.EXPR_BEG;
                    return Tokens.Assignment;
                }
                return Tokens.LogicalOr;
            }

            if (c == '=') {
                Skip(c);
                SetAsciiStringToken(Symbols.BitwiseOr);
                _lexicalState = LexicalState.EXPR_BEG;
                return Tokens.Assignment;
            }

            if (_lexicalState == LexicalState.EXPR_FNAME || _lexicalState == LexicalState.EXPR_DOT) {
                _lexicalState = LexicalState.EXPR_ARG;
            } else {
                _lexicalState = LexicalState.EXPR_BEG;
            }

            return (Tokens)'|';
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:32,代码来源:Tokenizer.cs


示例20: ReadColon

        // Operators: :: : 
        // Literals: :... (symbol start)
        private Tokens ReadColon(bool whitespaceSeen) {
            int c = Peek();
            if (c == ':') {
                Skip(c);
                if (_lexicalState == LexicalState.EXPR_BEG || _lexicalState == LexicalState.EXPR_MID ||
                    _lexicalState == LexicalState.EXPR_CLASS || (IS_ARG() && whitespaceSeen)) {
                    
                    _lexicalState = LexicalState.EXPR_BEG;
                    return Tokens.LeadingDoubleColon;
                }

                _lexicalState = LexicalState.EXPR_DOT;
                return Tokens.SeparatingDoubleColon;
            }

            if (_lexicalState == LexicalState.EXPR_END || _lexicalState == LexicalState.EXPR_ENDARG || IsWhiteSpace(c)) {
                _lexicalState = LexicalState.EXPR_BEG;
                return (Tokens)':';
            }

            switch (c) {
                case '\'':
                    Skip(c);
                    _currentString = new StringContentTokenizer(StringType.Symbol, '\'');
                    break;

                case '"':
                    Skip(c);
                    _currentString = new StringContentTokenizer(StringType.Symbol | StringType.ExpandsEmbedded, '"');
                    break;

                default:
                    Debug.Assert(_currentString == null);
                    break;
            }

            _lexicalState = LexicalState.EXPR_FNAME;
            _tokenValue.SetStringTokenizer(_currentString);
            return Tokens.SymbolBegin;
        }
开发者ID:bclubb,项目名称:ironruby,代码行数:42,代码来源:Tokenizer.cs



注:本文中的LexicalState类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
C# LexicalVariable类代码示例发布时间:2022-05-24
下一篇:
C# LexicalScope类代码示例发布时间:2022-05-24
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap