001/**
002 * Logback: the reliable, generic, fast and flexible logging framework.
003 * Copyright (C) 1999-2015, QOS.ch. All rights reserved.
004 *
005 * This program and the accompanying materials are dual-licensed under
006 * either the terms of the Eclipse Public License v1.0 as published by
007 * the Eclipse Foundation
008 *
009 *   or (per the licensee's choosing)
010 *
011 * under the terms of the GNU Lesser General Public License version 2.1
012 * as published by the Free Software Foundation.
013 */
014package ch.qos.logback.core.subst;
015
016import ch.qos.logback.core.spi.ScanException;
017import org.junit.Test;
018
019import java.util.ArrayList;
020import java.util.List;
021
022import static org.junit.Assert.assertEquals;
023
024public class TokenizerTest {
025
026    List<Token> witnessList = new ArrayList<Token>();
027
028    @Test
029    public void literalOnly() throws ScanException {
030        String input = "abc";
031        Tokenizer tokenizer = new Tokenizer(input);
032        List<Token> tokenList = tokenizer.tokenize();
033        witnessList.add(new Token(Token.Type.LITERAL, input));
034        assertEquals(witnessList, tokenList);
035    }
036
037    @Test
038    public void literalWithAccolades() throws ScanException {
039        String input0 = "%logger";
040        String input1 = "24";
041        String input2 = " - %m";
042        String input = input0 + "{" + input1 + "}" + input2;
043        Tokenizer tokenizer = new Tokenizer(input);
044        List<Token> tokenList = tokenizer.tokenize();
045        witnessList.add(new Token(Token.Type.LITERAL, input0));
046        witnessList.add(Token.CURLY_LEFT_TOKEN);
047        witnessList.add(new Token(Token.Type.LITERAL, input1));
048
049        witnessList.add(Token.CURLY_RIGHT_TOKEN);
050        witnessList.add(new Token(Token.Type.LITERAL, input2));
051        assertEquals(witnessList, tokenList);
052    }
053
054    @Test
055    public void simleVariable() throws ScanException {
056        String input = "${abc}";
057        Tokenizer tokenizer = new Tokenizer(input);
058        List<Token> tokenList = tokenizer.tokenize();
059        witnessList.add(Token.START_TOKEN);
060        witnessList.add(new Token(Token.Type.LITERAL, "abc"));
061        witnessList.add(Token.CURLY_RIGHT_TOKEN);
062        assertEquals(witnessList, tokenList);
063    }
064
065    @Test
066    public void mix() throws ScanException {
067        String input = "a${b}c";
068        Tokenizer tokenizer = new Tokenizer(input);
069        List<Token> tokenList = tokenizer.tokenize();
070        witnessList.add(new Token(Token.Type.LITERAL, "a"));
071        witnessList.add(Token.START_TOKEN);
072        witnessList.add(new Token(Token.Type.LITERAL, "b"));
073        witnessList.add(Token.CURLY_RIGHT_TOKEN);
074        witnessList.add(new Token(Token.Type.LITERAL, "c"));
075        assertEquals(witnessList, tokenList);
076    }
077
078    @Test
079    public void nested() throws ScanException {
080        String input = "a${b${c}}";
081        Tokenizer tokenizer = new Tokenizer(input);
082        List<Token> tokenList = tokenizer.tokenize();
083        witnessList.add(new Token(Token.Type.LITERAL, "a"));
084        witnessList.add(Token.START_TOKEN);
085        witnessList.add(new Token(Token.Type.LITERAL, "b"));
086        witnessList.add(Token.START_TOKEN);
087        witnessList.add(new Token(Token.Type.LITERAL, "c"));
088        witnessList.add(Token.CURLY_RIGHT_TOKEN);
089        witnessList.add(Token.CURLY_RIGHT_TOKEN);
090        assertEquals(witnessList, tokenList);
091    }
092
093    @Test
094    public void basicDefaultSeparator() throws ScanException {
095        String input = "${a:-b}";
096        Tokenizer tokenizer = new Tokenizer(input);
097        List<Token> tokenList = tokenizer.tokenize();
098        witnessList.add(Token.START_TOKEN);
099        witnessList.add(new Token(Token.Type.LITERAL, "a"));
100        witnessList.add(Token.DEFAULT_SEP_TOKEN);
101        witnessList.add(new Token(Token.Type.LITERAL, "b"));
102        witnessList.add(Token.CURLY_RIGHT_TOKEN);
103        assertEquals(witnessList, tokenList);
104    }
105
106    @Test
107    public void colon() throws ScanException {
108        String input = "a:b";
109        Tokenizer tokenizer = new Tokenizer(input);
110        List<Token> tokenList = tokenizer.tokenize();
111        witnessList.add(new Token(Token.Type.LITERAL, "a"));
112        witnessList.add(new Token(Token.Type.LITERAL, ":b"));
113        assertEquals(witnessList, tokenList);
114    }
115
116    // /LOGBACK-744
117    @Test
118    public void colonFollowedByDollar() throws ScanException {
119        String input = "a:${b}";
120        Tokenizer tokenizer = new Tokenizer(input);
121        List<Token> tokenList = tokenizer.tokenize();
122        witnessList.add(new Token(Token.Type.LITERAL, "a"));
123        witnessList.add(new Token(Token.Type.LITERAL, ":"));
124        witnessList.add(Token.START_TOKEN);
125        witnessList.add(new Token(Token.Type.LITERAL, "b"));
126        witnessList.add(Token.CURLY_RIGHT_TOKEN);
127        assertEquals(witnessList, tokenList);
128        
129    }
130
131    @Test
132    public void defaultSeparatorOutsideVariable() throws ScanException {
133
134        String input = "{a:-b}";
135        Tokenizer tokenizer = new Tokenizer(input);
136        List<Token> tokenList = tokenizer.tokenize();
137        witnessList.add(Token.CURLY_LEFT_TOKEN);
138        witnessList.add(new Token(Token.Type.LITERAL, "a"));
139        witnessList.add(Token.DEFAULT_SEP_TOKEN);
140        witnessList.add(new Token(Token.Type.LITERAL, "b"));
141        witnessList.add(Token.CURLY_RIGHT_TOKEN);
142        assertEquals(witnessList, tokenList);
143    }
144
145    @Test
146    public void literalContainingColon() throws ScanException {
147        String input = "a:b";
148        Tokenizer tokenizer = new Tokenizer(input);
149        List<Token> tokenList = tokenizer.tokenize();
150        witnessList.add(new Token(Token.Type.LITERAL, "a"));
151        witnessList.add(new Token(Token.Type.LITERAL, ":b"));
152        assertEquals(witnessList, tokenList);
153    }
154
155    @Test
156    public void literalEndingWithColon_LOGBACK_1140() throws ScanException {
157        String input = "a:";
158        Tokenizer tokenizer = new Tokenizer(input);
159        List<Token> tokenList = tokenizer.tokenize();
160        witnessList.add(new Token(Token.Type.LITERAL, "a"));
161        witnessList.add(new Token(Token.Type.LITERAL, ":"));
162        assertEquals(witnessList, tokenList);
163    }
164    
165    @Test
166    public void literalEndingWithDollar_LOGBACK_1149() throws ScanException {
167        String input = "a$";
168        Tokenizer tokenizer = new Tokenizer(input);
169        List<Token> tokenList = tokenizer.tokenize();
170        witnessList.add(new Token(Token.Type.LITERAL, "a"));
171        witnessList.add(new Token(Token.Type.LITERAL, "$"));
172        assertEquals(witnessList, tokenList);
173    }
174    
175    @Test
176    public void LOGBACK_1101() throws ScanException {
177        String input = "a:{y}";
178        Tokenizer tokenizer = new Tokenizer(input);
179        List<Token> tokenList = tokenizer.tokenize();
180        witnessList.add(new Token(Token.Type.LITERAL, "a"));
181        
182        witnessList.add(new Token(Token.Type.LITERAL, ":"));
183        witnessList.add(Token.CURLY_LEFT_TOKEN);
184        witnessList.add(new Token(Token.Type.LITERAL, "y"));
185        
186        witnessList.add(Token.CURLY_RIGHT_TOKEN);
187        assertEquals(witnessList, tokenList);
188    }
189
190}