TokenizerRepeater.java

/*
 * Copyright (c) 2015-2020, Stein Eldar Johnsen
 *
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements. See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership. The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License. You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied. See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */
package net.morimekta.lexer;

import java.io.IOException;
import java.util.ArrayDeque;
import java.util.List;
import java.util.Objects;
import java.util.Queue;
import java.util.stream.Collectors;

import static java.util.Objects.requireNonNull;
import static net.morimekta.strings.EscapeUtil.javaEscape;

/**
 * the tokenizer repeater is meant to repeat a set of tokens in the same
 * sequence.
 *
 * @param <TT> TokenType type.
 * @param <T> Token type.
 */
public class TokenizerRepeater<TT, T extends Token<TT>> implements Tokenizer<TT, T> {
    private final Queue<T> unread;
    private       T        lastToken;

    /**
     * @param tokens List of tokens.
     */
    public TokenizerRepeater(List<T> tokens) {
        if (requireNonNull(tokens, "tokens == null").isEmpty()) {
            throw new IllegalArgumentException("Empty token list cannot be repeated");
        }

        unread = new ArrayDeque<>();
        unread.addAll(tokens);
    }

    @Override
    public T parseNextToken() {
        if (unread.size() > 0) {
            lastToken = unread.poll();
            return lastToken;
        }
        return null;
    }

    @Override
    public T readUntil(CharSequence terminator, TT type, boolean allowEof) throws IOException {
        if (unread.size() > 0) {
            if (!Objects.equals(unread.peek().type(), type)) {
                throw new LexerException("type mismatch: " + type + " != " + lastToken.type());
            }
            lastToken = requireNonNull(unread.poll());
            return lastToken;
        }
        if (allowEof) return null;
        throw new LexerException(currentLine(), currentLineNo(), currentLinePos(), 1,
                                 "End of stream reading until '" + terminator + "'");
    }

    @Override
    public int currentLineNo() {
        if (lastToken != null) {
            return lastToken.lineNo();
        }
        T next = requireNonNull(unread.peek());
        return next.lineNo();
    }

    @Override
    public int currentLinePos() {
        if (lastToken != null) {
            return lastToken.linePos() + lastToken.length();
        }
        T next = requireNonNull(unread.peek());
        return next.linePos();
    }

    @Override
    public CharSequence currentLine() {
        if (lastToken != null) {
            return lastToken.line();
        }
        T next = requireNonNull(unread.peek());
        return next.line();
    }

    @Override
    public String toString() {
        return getClass().getSimpleName() + "{" +
               unread.stream()
                     .map(TokenizerRepeater::quote)
                     .collect(Collectors.joining(", ")) + "}";
    }

    private static String quote(Token<?> t) {
        return "'" + javaEscape(t) + "'";
    }
}