1
1
import dbg from "../../debug/debug.js" ;
2
- import { Token } from "../../ast/mod.ts" ;
2
+ import type { Token } from "../../ast/mod.ts" ;
3
3
4
- let debug = dbg ( "lex" ) ;
4
+ const debug = dbg ( "lex" ) ;
5
5
6
6
/**
7
7
* Convert a CSS string into an array of lexical tokens.
@@ -10,21 +10,21 @@ let debug = dbg("lex");
10
10
* @returns {Array } lexical tokens
11
11
*/
12
12
export function lex ( css : string ) : Token [ ] {
13
- var start = 0 ; // Debug timer start.
14
-
15
- var buffer = "" ; // Character accumulator
16
- var ch ; // Current character
17
- var column = 0 ; // Current source column number
18
- var cursor = - 1 ; // Current source cursor position
19
- var depth = 0 ; // Current nesting depth
20
- var line = 1 ; // Current source line number
21
- var state = "before-selector" ; // Current state
22
- var stack = [ state ] ; // State stack
23
- var token : Token = { } ; // Current token
24
- var tokens : Token [ ] = [ ] ; // Token accumulator
13
+ let start = 0 ; // Debug timer start.
14
+
15
+ let buffer = "" ; // Character accumulator
16
+ let ch : string ; // Current character
17
+ let column = 0 ; // Current source column number
18
+ let cursor = - 1 ; // Current source cursor position
19
+ let depth = 0 ; // Current nesting depth
20
+ let line = 1 ; // Current source line number
21
+ let state = "before-selector" ; // Current state
22
+ const stack = [ state ] ; // State stack
23
+ let token : Token = { } ; // Current token
24
+ const tokens : Token [ ] = [ ] ; // Token accumulator
25
25
26
26
// Supported @-rules, in roughly descending order of usage probability.
27
- var atRules : any = [
27
+ const atRules : any = [
28
28
"media" ,
29
29
"keyframes" ,
30
30
{ name : "-webkit-keyframes" , type : "keyframes" , prefix : "-webkit-" } ,
@@ -49,7 +49,7 @@ export function lex(css: string): Token[] {
49
49
*
50
50
* @returns {String } The next character.
51
51
*/
52
- function getCh ( ) {
52
+ function getCh ( ) : string {
53
53
skip ( ) ;
54
54
return css [ cursor ] ;
55
55
}
@@ -73,8 +73,8 @@ export function lex(css: string): Token[] {
73
73
* @returns {Boolean } Whether the string was found.
74
74
*/
75
75
function isNextString ( str : string ) : boolean {
76
- var start = cursor + 1 ;
77
- return ( str === css . slice ( start , start + str . length ) ) ;
76
+ let start = cursor + 1 ;
77
+ return str === css . slice ( start , start + str . length ) ;
78
78
}
79
79
80
80
/**
@@ -85,7 +85,7 @@ export function lex(css: string): Token[] {
85
85
* @returns {Number|false } The position, or `false` if not found.
86
86
*/
87
87
function find ( str : string ) : number | boolean {
88
- var pos = css . slice ( cursor ) . indexOf ( str ) ;
88
+ let pos = css . slice ( cursor ) . indexOf ( str ) ;
89
89
90
90
return pos > 0 ? pos : false ;
91
91
}
@@ -117,7 +117,7 @@ export function lex(css: string): Token[] {
117
117
* @returns {String } The removed state.
118
118
*/
119
119
function popState ( ) : string | undefined {
120
- var removed = stack . pop ( ) ;
120
+ let removed = stack . pop ( ) ;
121
121
state = stack [ stack . length - 1 ] ;
122
122
123
123
return removed ;
@@ -143,7 +143,7 @@ export function lex(css: string): Token[] {
143
143
* @returns {String } The replaced state.
144
144
*/
145
145
function replaceState ( newState : string ) : string {
146
- var previousState = state ;
146
+ let previousState = state ;
147
147
stack [ stack . length - 1 ] = state = newState ;
148
148
149
149
return previousState ;
@@ -165,7 +165,7 @@ export function lex(css: string): Token[] {
165
165
}
166
166
cursor ++ ;
167
167
} else {
168
- var skipStr = css . slice ( cursor , cursor + ( n || 0 ) ) . split ( "\n" ) ;
168
+ let skipStr = css . slice ( cursor , cursor + ( n || 0 ) ) . split ( "\n" ) ;
169
169
if ( skipStr . length > 1 ) {
170
170
line += skipStr . length - 1 ;
171
171
column = 1 ;
@@ -221,7 +221,7 @@ export function lex(css: string): Token[] {
221
221
222
222
start = Date . now ( ) ;
223
223
224
- while ( ch = getCh ( ) ) {
224
+ while ( ( ch = getCh ( ) ) ) {
225
225
debug ( ch , getState ( ) ) ;
226
226
227
227
// column += 1;
@@ -311,7 +311,7 @@ export function lex(css: string): Token[] {
311
311
// Tokenize a declaration
312
312
// if value is empty skip the declaration
313
313
if ( buffer . trim ( ) . length > 0 ) {
314
- token . value = buffer . trim ( ) , addToken ( ) ;
314
+ ( token . value = buffer . trim ( ) ) , addToken ( ) ;
315
315
}
316
316
replaceState ( "before-name" ) ;
317
317
break ;
@@ -524,7 +524,7 @@ export function lex(css: string): Token[] {
524
524
if ( isNextChar ( "*" ) ) {
525
525
// Ignore comments in selectors, properties and values. They are
526
526
// difficult to represent in the AST.
527
- var pos = find ( "*/" ) ;
527
+ let pos = find ( "*/" ) ;
528
528
529
529
if ( pos && typeof pos !== "boolean" ) {
530
530
skip ( pos + 1 ) ;
@@ -596,11 +596,11 @@ export function lex(css: string): Token[] {
596
596
597
597
default :
598
598
// Iterate over the supported @-rules and attempt to tokenize one.
599
- var tokenized = false ;
600
- var name ;
601
- var rule ;
599
+ let tokenized = false ;
600
+ let name ;
601
+ let rule ;
602
602
603
- for ( var j = 0 , len = atRules . length ; ! tokenized && j < len ; ++ j ) {
603
+ for ( let j = 0 , len = atRules . length ; ! tokenized && j < len ; ++ j ) {
604
604
rule = atRules [ j ] ;
605
605
name = rule . name || rule ;
606
606
@@ -683,7 +683,7 @@ export function lex(css: string): Token[] {
683
683
}
684
684
}
685
685
686
- debug ( "ran in" , ( Date . now ( ) - start ) + "ms" ) ;
686
+ debug ( "ran in" , Date . now ( ) - start + "ms" ) ;
687
687
688
688
return tokens ;
689
689
}
0 commit comments