Skip to content

Commit 6ac7375

Browse files
authored
Lots of fixes/chores (#1)
* fix: use `const` & `let` instead of `var` * fix: use `const` for unreassigned immutables * fix: set default value of options instead of || * fix: explicitly state `any` types for `isolatedModules` flag * fix: reduce code dupe * fix: use `import/export type` for types * fix: remove unused imports * feat: use `Token` type * fix: add actual `mod.ts` for using module
1 parent 5589cd5 commit 6ac7375

File tree

9 files changed

+73
-74
lines changed

9 files changed

+73
-74
lines changed

ast/ast.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { Rule } from "./node.ts";
1+
import type { Rule } from "./node.ts";
22

33
type Type = "stylesheet";
44

ast/mod.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
export * from "./ast.ts";
2-
export * from "./node.ts";
3-
export * from "./token.ts";
1+
export type { AST } from "./ast.ts";
2+
export type { Rule, Decl } from "./node.ts";
3+
export type { Position, Token } from "./token.ts";

ast/node.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { Position } from "./token.ts";
1+
import type { Position } from "./token.ts";
22

33
export interface Rule {
44
type: string;

core/lexer/lexer.ts

+30-30
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import dbg from "../../debug/debug.js";
2-
import { Token } from "../../ast/mod.ts";
2+
import type { Token } from "../../ast/mod.ts";
33

4-
let debug = dbg("lex");
4+
const debug = dbg("lex");
55

66
/**
77
* Convert a CSS string into an array of lexical tokens.
@@ -10,21 +10,21 @@ let debug = dbg("lex");
1010
* @returns {Array} lexical tokens
1111
*/
1212
export function lex(css: string): Token[] {
13-
var start = 0; // Debug timer start.
14-
15-
var buffer = ""; // Character accumulator
16-
var ch; // Current character
17-
var column = 0; // Current source column number
18-
var cursor = -1; // Current source cursor position
19-
var depth = 0; // Current nesting depth
20-
var line = 1; // Current source line number
21-
var state = "before-selector"; // Current state
22-
var stack = [state]; // State stack
23-
var token: Token = {}; // Current token
24-
var tokens: Token[] = []; // Token accumulator
13+
let start = 0; // Debug timer start.
14+
15+
let buffer = ""; // Character accumulator
16+
let ch: string; // Current character
17+
let column = 0; // Current source column number
18+
let cursor = -1; // Current source cursor position
19+
let depth = 0; // Current nesting depth
20+
let line = 1; // Current source line number
21+
let state = "before-selector"; // Current state
22+
const stack = [state]; // State stack
23+
let token: Token = {}; // Current token
24+
const tokens: Token[] = []; // Token accumulator
2525

2626
// Supported @-rules, in roughly descending order of usage probability.
27-
var atRules: any = [
27+
const atRules: any = [
2828
"media",
2929
"keyframes",
3030
{ name: "-webkit-keyframes", type: "keyframes", prefix: "-webkit-" },
@@ -49,7 +49,7 @@ export function lex(css: string): Token[] {
4949
*
5050
* @returns {String} The next character.
5151
*/
52-
function getCh() {
52+
function getCh(): string {
5353
skip();
5454
return css[cursor];
5555
}
@@ -73,8 +73,8 @@ export function lex(css: string): Token[] {
7373
* @returns {Boolean} Whether the string was found.
7474
*/
7575
function isNextString(str: string): boolean {
76-
var start = cursor + 1;
77-
return (str === css.slice(start, start + str.length));
76+
let start = cursor + 1;
77+
return str === css.slice(start, start + str.length);
7878
}
7979

8080
/**
@@ -85,7 +85,7 @@ export function lex(css: string): Token[] {
8585
* @returns {Number|false} The position, or `false` if not found.
8686
*/
8787
function find(str: string): number | boolean {
88-
var pos = css.slice(cursor).indexOf(str);
88+
let pos = css.slice(cursor).indexOf(str);
8989

9090
return pos > 0 ? pos : false;
9191
}
@@ -117,7 +117,7 @@ export function lex(css: string): Token[] {
117117
* @returns {String} The removed state.
118118
*/
119119
function popState(): string | undefined {
120-
var removed = stack.pop();
120+
let removed = stack.pop();
121121
state = stack[stack.length - 1];
122122

123123
return removed;
@@ -143,7 +143,7 @@ export function lex(css: string): Token[] {
143143
* @returns {String} The replaced state.
144144
*/
145145
function replaceState(newState: string): string {
146-
var previousState = state;
146+
let previousState = state;
147147
stack[stack.length - 1] = state = newState;
148148

149149
return previousState;
@@ -165,7 +165,7 @@ export function lex(css: string): Token[] {
165165
}
166166
cursor++;
167167
} else {
168-
var skipStr = css.slice(cursor, cursor + (n || 0)).split("\n");
168+
let skipStr = css.slice(cursor, cursor + (n || 0)).split("\n");
169169
if (skipStr.length > 1) {
170170
line += skipStr.length - 1;
171171
column = 1;
@@ -221,7 +221,7 @@ export function lex(css: string): Token[] {
221221

222222
start = Date.now();
223223

224-
while (ch = getCh()) {
224+
while ((ch = getCh())) {
225225
debug(ch, getState());
226226

227227
// column += 1;
@@ -311,7 +311,7 @@ export function lex(css: string): Token[] {
311311
// Tokenize a declaration
312312
// if value is empty skip the declaration
313313
if (buffer.trim().length > 0) {
314-
token.value = buffer.trim(), addToken();
314+
(token.value = buffer.trim()), addToken();
315315
}
316316
replaceState("before-name");
317317
break;
@@ -524,7 +524,7 @@ export function lex(css: string): Token[] {
524524
if (isNextChar("*")) {
525525
// Ignore comments in selectors, properties and values. They are
526526
// difficult to represent in the AST.
527-
var pos = find("*/");
527+
let pos = find("*/");
528528

529529
if (pos && typeof pos !== "boolean") {
530530
skip(pos + 1);
@@ -596,11 +596,11 @@ export function lex(css: string): Token[] {
596596

597597
default:
598598
// Iterate over the supported @-rules and attempt to tokenize one.
599-
var tokenized = false;
600-
var name;
601-
var rule;
599+
let tokenized = false;
600+
let name;
601+
let rule;
602602

603-
for (var j = 0, len = atRules.length; !tokenized && j < len; ++j) {
603+
for (let j = 0, len = atRules.length; !tokenized && j < len; ++j) {
604604
rule = atRules[j];
605605
name = rule.name || rule;
606606

@@ -683,7 +683,7 @@ export function lex(css: string): Token[] {
683683
}
684684
}
685685

686-
debug("ran in", (Date.now() - start) + "ms");
686+
debug("ran in", Date.now() - start + "ms");
687687

688688
return tokens;
689689
}

core/parser/parser.ts

+22-23
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
import dbg from "../../debug/debug.js";
2-
let debug = dbg("parse");
2+
const debug = dbg("parse");
33

44
import { lex } from "../lexer/lexer.ts";
5-
import { AST, Token } from "../../ast/mod.ts";
5+
import type { AST, Token } from "../../ast/mod.ts";
66

7-
var _comments: boolean; // Whether comments are allowed.
8-
var _depth: number; // Current block nesting depth.
9-
var _position: any; // Whether to include line/column position.
10-
var _tokens: Token[]; // Array of lexical tokens.
7+
let _comments: boolean; // Whether comments are allowed.
8+
let _depth: number; // Current block nesting depth.
9+
let _position: any; // Whether to include line/column position.
10+
let _tokens: Token[]; // Array of lexical tokens.
1111

1212
/**
1313
* Convert a CSS string or array of lexical tokens into a `stringify`-able AST.
@@ -17,10 +17,9 @@ var _tokens: Token[]; // Array of lexical tokens.
1717
* @param {Boolean} [options.comments=false] allow comment nodes in the AST
1818
* @returns {Object} `stringify`-able AST
1919
*/
20-
export function parse(css: string | any[], options: any): AST {
21-
var start = 0; // Debug timer start.
20+
export function parse(css: string | any[], options: any = {}): AST {
21+
let start = 0; // Debug timer start.
2222

23-
options || (options = {});
2423
_comments = !!options.comments;
2524
_position = !!options.position;
2625

@@ -29,9 +28,9 @@ export function parse(css: string | any[], options: any): AST {
2928
// Operate on a copy of the given tokens, or the lex()'d CSS string.
3029
_tokens = Array.isArray(css) ? css.slice() : lex(css);
3130

32-
var rule;
33-
var rules = [];
34-
var token;
31+
let rule: any;
32+
let rules = [];
33+
let token: Token;
3534

3635
start = Date.now();
3736

@@ -40,7 +39,7 @@ export function parse(css: string | any[], options: any): AST {
4039
rule && rules.push(rule);
4140
}
4241

43-
debug("ran in", (Date.now() - start) + "ms");
42+
debug("ran in", Date.now() - start + "ms");
4443

4544
return {
4645
type: "stylesheet",
@@ -63,7 +62,7 @@ export function parse(css: string | any[], options: any): AST {
6362
function astNode(token: Token, overrd?: any): Token {
6463
let override: any = overrd || {};
6564

66-
var node: Token = {};
65+
let node: Token = {};
6766

6867
if (token.type) {
6968
node.type = override.type || token.type;
@@ -76,7 +75,7 @@ function astNode(token: Token, overrd?: any): Token {
7675
}
7776

7877
let keys = Object.keys(override);
79-
let key;
78+
let key: any;
8079
for (let i = 0; i < keys.length; ++i) {
8180
key = keys[i];
8281
let n = node as Record<string, any>;
@@ -102,8 +101,8 @@ function astNode(token: Token, overrd?: any): Token {
102101
*
103102
* @returns {Object} lexical token
104103
*/
105-
function next() {
106-
var token = _tokens.shift();
104+
function next(): any {
105+
let token = _tokens.shift();
107106
debug("next:", JSON.stringify(token, null, 2));
108107
return token;
109108
}
@@ -121,7 +120,7 @@ function parseAtGroup(token: Token): any {
121120

122121
// As the @-group token is assembled, relevant token values are captured here
123122
// temporarily. They will later be used as `tokenize()` overrides.
124-
var overrides: any = {};
123+
let overrides: any = {};
125124

126125
switch (token.type) {
127126
case "font-face":
@@ -262,11 +261,11 @@ function parseToken(token: any): any {
262261
* @return {Array} AST nodes
263262
*/
264263
function parseTokensWhile(conditionFn: (token: any) => boolean | number): any {
265-
var node;
266-
var nodes = [];
267-
var token;
264+
let node: any;
265+
let nodes = [];
266+
let token: any;
268267

269-
while ((token = next()) && (conditionFn && conditionFn(token))) {
268+
while ((token = next()) && conditionFn && conditionFn(token)) {
270269
node = parseToken(token);
271270
node && nodes.push(node);
272271
}
@@ -286,7 +285,7 @@ function parseTokensWhile(conditionFn: (token: any) => boolean | number): any {
286285
*/
287286
function parseDeclarations(): any {
288287
return parseTokensWhile(function (token) {
289-
return (token.type === "property" || token.type === "comment");
288+
return token.type === "property" || token.type === "comment";
290289
});
291290
}
292291

example.ts

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { parse } from "./core/parser/parser.ts";
2-
var ast = parse(
3-
"/* comment */ p { color: black; }",
4-
{ position: true, comments: true },
5-
);
2+
const ast = parse("/* comment */ p { color: black; }", {
3+
position: true,
4+
comments: true,
5+
});
66
console.log(ast);

mod.ts

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
export type { AST, Rule, Decl, Position, Token } from "./ast/mod.ts";
2+
export { lex } from "./core/lexer/lexer.ts";
3+
export { parse } from "./core/parser/parser.ts";

testing/deps.ts

+1-5
Original file line numberDiff line numberDiff line change
@@ -1,5 +1 @@
1-
export {
2-
assert,
3-
assertEquals,
4-
assertObjectMatch
5-
} from "https://deno.land/std@0.75.0/testing/asserts.ts";
1+
export { assertEquals } from "https://deno.land/std@0.75.0/testing/asserts.ts";

testing/parse_test.ts

+8-7
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,22 @@
1-
import { parse } from "../core/parser/parser.ts";
1+
import { parse } from "../mod.ts";
22
import { comment } from "./fixtures.js";
3-
import { assertEquals, assertObjectMatch } from "./deps.ts";
3+
import { assertEquals } from "./deps.ts";
44

55
for (let i = 0; i < comment.length; i++) {
6+
const node = comment[i];
67
Deno.test(`[parse] - comments (${i})`, function () {
7-
let ast = parse(comment[i].css, { comments: true });
8-
assertEquals(ast, comment[i].ast);
8+
let ast = parse(node.css, { comments: true });
9+
assertEquals(ast, node.ast);
910
});
1011
}
1112

1213
Deno.test(`[parse] bootstrap.css`, async function () {
1314
const source = await Deno.readTextFile(
14-
"./testing/bootstrap_3.3.7/bootstrap.css",
15+
"./testing/bootstrap_3.3.7/bootstrap.css"
1516
);
16-
const expected_ast = await import("./bootstrap_3.3.7/bootstrap.css.ast.js")
17+
const expected_ast = await import("./bootstrap_3.3.7/bootstrap.css.ast.js");
1718
let ast = parse(source, {});
18-
19+
1920
// For dumping new AST for bootstrap.css
2021
// await Deno.writeTextFile("./testing/bootstrap_3.3.7/bootstrap.css.ast.js", JSON.stringify(ast));
2122
//

0 commit comments

Comments
 (0)