Search Options

Results per page
Sort
Preferred Languages
Advance

Results 11 - 20 of 501 for toTokens (2.96 sec)

  1. src/cmd/asm/internal/lex/slice.go

    package lex
    
    import (
    	"text/scanner"
    
    	"cmd/internal/src"
    )
    
    // A Slice reads from a slice of Tokens.
    type Slice struct {
    	tokens []Token
    	base   *src.PosBase
    	line   int
    	pos    int
    }
    
    func NewSlice(base *src.PosBase, line int, tokens []Token) *Slice {
    	return &Slice{
    		tokens: tokens,
    		base:   base,
    		line:   line,
    		pos:    -1, // Next will advance to zero.
    	}
    }
    
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Jun 29 22:49:50 UTC 2023
    - 1.6K bytes
    - Viewed (0)
  2. internal/config/lambda/event/arn.go

    		return nil, &ErrInvalidARN{s}
    	}
    
    	tokens := strings.Split(s, ":")
    	if len(tokens) != 6 {
    		return nil, &ErrInvalidARN{s}
    	}
    
    	if tokens[4] == "" || tokens[5] == "" {
    		return nil, &ErrInvalidARN{s}
    	}
    
    	return &ARN{
    		region: tokens[3],
    		TargetID: TargetID{
    			ID:   tokens[4],
    			Name: tokens[5],
    		},
    	}, nil
    Registered: Sun Jun 16 00:44:34 UTC 2024
    - Last Modified: Tue Mar 07 16:12:41 UTC 2023
    - 1.6K bytes
    - Viewed (0)
  3. src/go/token/token.go

    // Package token defines constants representing the lexical tokens of the Go
    // programming language and basic operations on tokens (printing, predicates).
    package token
    
    import (
    	"strconv"
    	"unicode"
    	"unicode/utf8"
    )
    
    // Token is the set of lexical tokens of the Go programming language.
    type Token int
    
    // The list of tokens.
    const (
    	// Special tokens
    	ILLEGAL Token = iota
    	EOF
    	COMMENT
    
    	literal_beg
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Oct 19 12:02:03 UTC 2023
    - 6.4K bytes
    - Viewed (0)
  4. src/main/java/org/codelibs/fess/suggest/index/contents/DefaultContentsParser.java

            if (langs == null || langs.length == 0) {
                final List<AnalyzeToken> tokens = analyzer.analyze(searchWord, "", null);
                return tokens == null || tokens.size() == 0;
            }
            for (final String lang : langs) {
                final List<AnalyzeToken> tokens = analyzer.analyze(searchWord, field, lang);
                if (tokens != null && tokens.size() > 0) {
                    return false;
                }
            }
    Registered: Wed Jun 12 15:38:08 UTC 2024
    - Last Modified: Thu Feb 22 01:36:54 UTC 2024
    - 13.7K bytes
    - Viewed (0)
  5. platforms/documentation/docs/src/samples/templates/java-application/src/main/java/org/gradle/sample/app/Main.java

    import static org.gradle.sample.app.MessageUtils.getMessage;
    
    public class Main {
        public static void main(String[] args) {
            LinkedList tokens;
            tokens = split(getMessage());
            System.out.println(join(tokens));
        }
    Registered: Wed Jun 12 18:38:38 UTC 2024
    - Last Modified: Mon Nov 27 17:53:42 UTC 2023
    - 438 bytes
    - Viewed (0)
  6. platforms/native/language-native/src/test/groovy/org/gradle/language/nativeplatform/internal/incremental/sourceparser/RegexBackedCSourceParserTest.groovy

            'a(,)'                                             | 'a'     | [tokens(''), tokens('')]
            'a((a,b))'                                         | 'a'     | [tokens('(a,b)')]
            'a((a,b,(c, d)))'                                  | 'a'     | [tokens('(a,b,(c,d))')]
            'a( ( a ,,, b ), c)'                               | 'a'     | [tokens('(a,,,b)'), token('c')]
    Registered: Wed Jun 12 18:38:38 UTC 2024
    - Last Modified: Thu Nov 16 20:20:03 UTC 2023
    - 34.3K bytes
    - Viewed (0)
  7. src/compress/flate/huffman_bit_writer.go

    	// Write the tokens.
    	w.writeTokens(tokens, w.literalEncoding.codes, w.offsetEncoding.codes)
    }
    
    // indexTokens indexes a slice of tokens, and updates
    // literalFreq and offsetFreq, and generates literalEncoding
    // and offsetEncoding.
    // The number of literal and offset tokens is returned.
    func (w *huffmanBitWriter) indexTokens(tokens []token) (numLiterals, numOffsets int) {
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Thu Sep 29 22:59:14 UTC 2022
    - 18.4K bytes
    - Viewed (0)
  8. platforms/software/dependency-management/src/main/java/org/gradle/api/internal/artifacts/repositories/PatternHelper.java

            Map<String, Object> tokens = new HashMap<>(attributes);
            if (tokens.containsKey(ORGANISATION_KEY) && !tokens.containsKey(ORGANISATION_KEY2)) {
                tokens.put(ORGANISATION_KEY2, tokens.get(ORGANISATION_KEY));
            }
            if (tokens.containsKey(ORGANISATION_KEY)
                && !tokens.containsKey(ORGANISATION_PATH_KEY)) {
                String org = (String) tokens.get(ORGANISATION_KEY);
    Registered: Wed Jun 12 18:38:38 UTC 2024
    - Last Modified: Tue Oct 10 21:10:11 UTC 2023
    - 6.1K bytes
    - Viewed (0)
  9. platforms/software/build-init/src/main/resources/org/gradle/buildinit/tasks/templates/groovyapplication/multi/app/App.groovy.template

    import static ${basePackagePrefix.raw}app.MessageUtils.getMessage
    
    import org.apache.commons.text.WordUtils
    
    class App {
        static void main(String[] args) {
            LinkedList tokens
            tokens = split(getMessage())
            String result = join(tokens)
            println(WordUtils.capitalize(result))
        }
    Registered: Wed Jun 12 18:38:38 UTC 2024
    - Last Modified: Tue Dec 26 19:39:09 UTC 2023
    - 548 bytes
    - Viewed (0)
  10. src/cmd/asm/internal/lex/lex.go

    	args   []string // Formal arguments.
    	tokens []Token  // Body of macro.
    }
    
    // Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests.
    func Tokenize(str string) []Token {
    	t := NewTokenizer("command line", strings.NewReader(str), nil)
    	var tokens []Token
    	for {
    		tok := t.Next()
    		if tok == scanner.EOF {
    			break
    		}
    		tokens = append(tokens, Make(tok, t.Text()))
    	}
    	return tokens
    Registered: Wed Jun 12 16:32:35 UTC 2024
    - Last Modified: Tue Aug 29 18:31:05 UTC 2023
    - 4.1K bytes
    - Viewed (0)
Back to top