Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 10 of 14 for Tokenize (0.06 sec)

  1. src/cmd/asm/internal/asm/pseudo_test.go

    package asm
    
    import (
    	"strings"
    	"testing"
    
    	"cmd/asm/internal/lex"
    )
    
    func tokenize(s string) [][]lex.Token {
    	res := [][]lex.Token{}
    	if len(s) == 0 {
    		return res
    	}
    	for _, o := range strings.Split(s, ",") {
    		res = append(res, lex.Tokenize(o))
    	}
    	return res
    }
    
    func TestErroneous(t *testing.T) {
    
    	type errtest struct {
    		pseudo   string
    		operands string
    Registered: Tue Sep 09 11:13:09 UTC 2025
    - Last Modified: Tue Aug 29 07:48:38 UTC 2023
    - 3.1K bytes
    - Viewed (0)
  2. src/cmd/asm/internal/asm/expr_test.go

    }
    
    func TestExpr(t *testing.T) {
    	p := NewParser(nil, nil, nil) // Expression evaluation uses none of these fields of the parser.
    	for i, test := range exprTests {
    		p.start(lex.Tokenize(test.input))
    		result := int64(p.expr())
    		if result != test.output {
    			t.Errorf("%d: %q evaluated to %d; expected %d", i, test.input, result, test.output)
    		}
    		tok := p.next()
    Registered: Tue Sep 09 11:13:09 UTC 2025
    - Last Modified: Tue Aug 29 07:48:38 UTC 2023
    - 3.2K bytes
    - Viewed (0)
  3. src/cmd/asm/internal/lex/lex.go

    type Macro struct {
    	name   string   // The #define name.
    	args   []string // Formal arguments.
    	tokens []Token  // Body of macro.
    }
    
    // Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests.
    func Tokenize(str string) []Token {
    	t := NewTokenizer("command line", strings.NewReader(str), nil)
    	var tokens []Token
    	for {
    		tok := t.Next()
    		if tok == scanner.EOF {
    			break
    Registered: Tue Sep 09 11:13:09 UTC 2025
    - Last Modified: Tue Aug 29 18:31:05 UTC 2023
    - 4.1K bytes
    - Viewed (0)
  4. src/cmd/asm/internal/lex/tokenizer.go

    	return i > 0 && unicode.IsDigit(ch)
    }
    
    func (t *Tokenizer) Text() string {
    	switch t.tok {
    	case LSH:
    		return "<<"
    	case RSH:
    		return ">>"
    	case ARR:
    		return "->"
    	case ROT:
    		return "@>"
    	}
    	return t.s.TokenText()
    }
    
    func (t *Tokenizer) File() string {
    	return t.base.Filename()
    }
    
    func (t *Tokenizer) Base() *src.PosBase {
    	return t.base
    }
    
    Registered: Tue Sep 09 11:13:09 UTC 2025
    - Last Modified: Thu Aug 04 20:35:21 UTC 2022
    - 3K bytes
    - Viewed (0)
  5. src/main/java/org/codelibs/core/text/Tokenizer.java

        static {
            setup(defaultCtype);
        }
    
        /**
         * Creates a {@link Tokenizer}.
         *
         * @param str
         *            The string. Must not be {@literal null}.
         */
        public Tokenizer(final String str) {
            this(str, defaultCtype);
        }
    
        /**
         * Creates a {@link Tokenizer}.
         *
         * @param str
         *            The string. Must not be {@literal null}.
    Registered: Fri Sep 05 20:58:11 UTC 2025
    - Last Modified: Sat Jul 05 00:11:05 UTC 2025
    - 8.8K bytes
    - Viewed (0)
  6. src/main/java/jcifs/smb1/http/Handler.java

                if (handler == null) {
                    final String path = System.getProperty(HANDLER_PKGS_PROPERTY);
                    final StringTokenizer tokenizer = new StringTokenizer(path, "|");
                    while (tokenizer.hasMoreTokens()) {
                        final String provider = tokenizer.nextToken().trim();
                        if (provider.equals("jcifs.smb1")) {
                            continue;
                        }
    Registered: Sun Sep 07 00:10:21 UTC 2025
    - Last Modified: Sat Aug 16 01:32:48 UTC 2025
    - 6.1K bytes
    - Viewed (0)
  7. src/test/java/org/codelibs/core/text/TokenizerTest.java

        public void testEOF() throws Exception {
            final Tokenizer tokenizer = new Tokenizer("");
            assertThat(tokenizer.nextToken(), is(Tokenizer.TT_EOF));
            assertThat(tokenizer.nextToken(), is(Tokenizer.TT_EOF));
        }
    
        /**
         * @throws Exception
         */
        @Test
        public void testWhitespace() throws Exception {
            final Tokenizer tokenizer = new Tokenizer("\t       \n");
    Registered: Fri Sep 05 20:58:11 UTC 2025
    - Last Modified: Sat May 10 01:32:17 UTC 2025
    - 2K bytes
    - Viewed (0)
  8. src/main/java/jcifs/http/Handler.java

                if (handler == null) {
                    final String path = System.getProperty(HANDLER_PKGS_PROPERTY);
                    final StringTokenizer tokenizer = new StringTokenizer(path, "|");
                    while (tokenizer.hasMoreTokens()) {
                        final String provider = tokenizer.nextToken().trim();
                        if (provider.equals("jcifs")) {
                            continue;
                        }
    Registered: Sun Sep 07 00:10:21 UTC 2025
    - Last Modified: Sat Aug 16 01:32:48 UTC 2025
    - 6.9K bytes
    - Viewed (0)
  9. okhttp/src/commonJvmAndroid/kotlin/okhttp3/internal/http/HttpHeaders.kt

     * [TOKEN_DELIMITERS]. Returns null if the buffer is empty or prefixed with a delimiter.
     */
    private fun Buffer.readToken(): String? {
      var tokenSize = indexOfElement(TOKEN_DELIMITERS)
      if (tokenSize == -1L) tokenSize = size
    
      return when {
        tokenSize != 0L -> readUtf8(tokenSize)
        else -> null
      }
    }
    
    fun CookieJar.receiveHeaders(
      url: HttpUrl,
      headers: Headers,
    ) {
    Registered: Fri Sep 05 11:42:10 UTC 2025
    - Last Modified: Mon May 05 16:01:00 UTC 2025
    - 7.2K bytes
    - Viewed (0)
  10. src/cmd/asm/internal/asm/line_test.go

    	})
    }
    
    func testBadInstParser(t *testing.T, goarch string, tests []badInstTest) {
    	for i, test := range tests {
    		arch, ctxt := setArch(goarch)
    		tokenizer := lex.NewTokenizer("", strings.NewReader(test.input+"\n"), nil)
    		parser := NewParser(ctxt, arch, tokenizer)
    
    		err := tryParse(t, func() {
    			parser.Parse()
    		})
    
    		switch {
    		case err == nil:
    Registered: Tue Sep 09 11:13:09 UTC 2025
    - Last Modified: Tue Aug 29 07:48:38 UTC 2023
    - 1.9K bytes
    - Viewed (0)
Back to top