@@ -5,14 +5,14 @@ var TIMER = false; // `true` to time calls to `parse()` and print the results.
5
5
import dbg from './debug.js'
6
6
let debug = dbg ( 'parse' ) ;
7
7
8
- import lex from './lexer.js ' ;
8
+ import lex from './lexer.ts ' ;
9
9
10
10
export default parse ;
11
11
12
- var _comments ; // Whether comments are allowed.
13
- var _depth ; // Current block nesting depth.
14
- var _position ; // Whether to include line/column position.
15
- var _tokens ; // Array of lexical tokens.
12
+ var _comments : boolean ; // Whether comments are allowed.
13
+ var _depth : number ; // Current block nesting depth.
14
+ var _position : any ; // Whether to include line/column position.
15
+ var _tokens : any [ ] ; // Array of lexical tokens.
16
16
17
17
/**
18
18
* Convert a CSS string or array of lexical tokens into a `stringify`-able AST.
@@ -22,8 +22,8 @@ var _tokens; // Array of lexical tokens.
22
22
* @param {Boolean } [options.comments=false] allow comment nodes in the AST
23
23
* @returns {Object } `stringify`-able AST
24
24
*/
25
- function parse ( css , options ) {
26
- var start ; // Debug timer start.
25
+ function parse ( css : string | any [ ] , options : any ) {
26
+ var start = 0 ; // Debug timer start.
27
27
28
28
options || ( options = { } ) ;
29
29
_comments = ! ! options . comments ;
@@ -65,12 +65,12 @@ function parse(css, options) {
65
65
* already in the token, or that will be added to the token.
66
66
* @returns {Object } AST node
67
67
*/
68
- function astNode ( token , override ) {
69
- override || ( override = { } ) ;
68
+ function astNode ( token : any , overrd ?: any ) : any {
69
+ let override : any = overrd || { } ;
70
70
71
71
var key ;
72
72
var keys = [ 'type' , 'name' , 'value' ] ;
73
- var node = { } ;
73
+ var node : any = { } ;
74
74
75
75
// Avoiding [].forEach for performance reasons.
76
76
for ( var i = 0 ; i < keys . length ; ++ i ) {
@@ -122,12 +122,12 @@ function next() {
122
122
* @param {Object } token @-group lexical token
123
123
* @returns {Object } @-group AST node
124
124
*/
125
- function parseAtGroup ( token ) {
125
+ function parseAtGroup ( token : any ) : any {
126
126
_depth = _depth + 1 ;
127
127
128
128
// As the @-group token is assembled, relevant token values are captured here
129
129
// temporarily. They will later be used as `tokenize()` overrides.
130
- var overrides = { } ;
130
+ var overrides : any = { } ;
131
131
132
132
switch ( token . type ) {
133
133
case 'font-face' :
@@ -154,7 +154,7 @@ function parseAtGroup(token) {
154
154
* @param {Object } token @import lexical token
155
155
* @returns {Object } @import AST node
156
156
*/
157
- function parseAtImport ( token ) {
157
+ function parseAtImport ( token : any ) : any {
158
158
return astNode ( token ) ;
159
159
}
160
160
@@ -164,7 +164,7 @@ function parseAtImport(token) {
164
164
* @param {Object } token @charset lexical token
165
165
* @returns {Object } @charset node
166
166
*/
167
- function parseCharset ( token ) {
167
+ function parseCharset ( token : any ) : any {
168
168
return astNode ( token ) ;
169
169
}
170
170
@@ -174,11 +174,11 @@ function parseCharset(token) {
174
174
* @param {Object } token comment lexical token
175
175
* @returns {Object } comment node
176
176
*/
177
- function parseComment ( token ) {
177
+ function parseComment ( token : any ) : any {
178
178
return astNode ( token , { text : token . text } ) ;
179
179
}
180
180
181
- function parseNamespace ( token ) {
181
+ function parseNamespace ( token : any ) : any {
182
182
return astNode ( token ) ;
183
183
}
184
184
@@ -187,7 +187,7 @@ function parseNamespace(token) {
187
187
*
188
188
* @returns {Object } property node
189
189
*/
190
- function parseProperty ( token ) {
190
+ function parseProperty ( token : any ) : any {
191
191
return astNode ( token ) ;
192
192
}
193
193
@@ -197,15 +197,16 @@ function parseProperty(token) {
197
197
* @param {Object } token selector lexical token
198
198
* @returns {Object } selector node
199
199
*/
200
- function parseSelector ( token ) {
201
- function trim ( str ) {
200
+ function parseSelector ( token : any ) : any {
201
+ function trim ( str : string ) {
202
202
return str . trim ( ) ;
203
203
}
204
204
205
205
return astNode ( token , {
206
206
type : 'rule' ,
207
207
selectors : token . text . split ( ',' ) . map ( trim ) ,
208
- declarations : parseDeclarations ( token )
208
+ // parseDeclarations(token)
209
+ declarations : parseDeclarations ( )
209
210
} ) ;
210
211
}
211
212
@@ -214,7 +215,7 @@ function parseSelector(token) {
214
215
*
215
216
* @returns {Object|undefined } AST node
216
217
*/
217
- function parseToken ( token ) {
218
+ function parseToken ( token : any ) : any {
218
219
switch ( token . type ) {
219
220
// Cases are listed in roughly descending order of probability.
220
221
case 'property' : return parseProperty ( token ) ;
@@ -255,7 +256,7 @@ function parseToken(token) {
255
256
* @returns {Boolean } `true` if the token should be parsed, `false` otherwise
256
257
* @return {Array } AST nodes
257
258
*/
258
- function parseTokensWhile ( conditionFn ) {
259
+ function parseTokensWhile ( conditionFn : ( token : any ) => boolean | number ) : any {
259
260
var node ;
260
261
var nodes = [ ] ;
261
262
var token ;
@@ -278,7 +279,7 @@ function parseTokensWhile(conditionFn) {
278
279
*
279
280
* @returns {Array } declaration nodes
280
281
*/
281
- function parseDeclarations ( ) {
282
+ function parseDeclarations ( ) : any {
282
283
return parseTokensWhile ( function ( token ) {
283
284
return ( token . type === 'property' || token . type === 'comment' ) ;
284
285
} ) ;
@@ -289,6 +290,6 @@ function parseDeclarations() {
289
290
*
290
291
* @returns {Array } rule nodes
291
292
*/
292
- function parseRules ( ) {
293
+ function parseRules ( ) : any {
293
294
return parseTokensWhile ( function ( ) { return _depth ; } ) ;
294
295
}
0 commit comments