@@ -5,7 +5,7 @@ describe("parser", () => {
55
66 it ( "schema" , ( ) => {
77
8- const tokens = Parser . tokenize ( "a[a*][/a*]" )
8+ const tokens = Parser . tagTokens ( "a[a*][/a*]" )
99 assert . strictEqual ( tokens instanceof Array , true )
1010 assert . strictEqual ( tokens . length , 2 )
1111 assert . strictEqual ( Object . keys ( tokens [ 0 ] ) . length , 3 )
@@ -42,59 +42,59 @@ describe("parser", () => {
4242
4343 it ( "text" , ( ) => {
4444
45- assert . strictEqual ( Parser . tokenize ( "This is some text" ) [ 0 ] . buffer , "This is some text" )
45+ assert . strictEqual ( Parser . tagTokens ( "This is some text" ) [ 0 ] . buffer , "This is some text" )
4646
4747 } )
4848
4949
5050 it ( "invalidTags" , ( ) => {
5151
52- assert . strictEqual ( Parser . tokenize ( "[u]" ) [ 0 ] . name , Token . NAME . TEXT )
53- assert . strictEqual ( Parser . tokenize ( "[u][u]" ) [ 0 ] . name , Token . NAME . TEXT )
54- assert . strictEqual ( Parser . tokenize ( "[/u]" ) [ 0 ] . name , Token . NAME . TEXT )
55- assert . strictEqual ( Parser . tokenize ( "[/u][/u]" ) [ 0 ] . name , Token . NAME . TEXT )
56- assert . strictEqual ( Parser . tokenize ( "[test={][/test]" ) [ 0 ] . name , Token . NAME . TEXT )
57- assert . strictEqual ( Parser . tokenize ( "[@][/@]" ) [ 0 ] . name , Token . NAME . TEXT )
58- assert . strictEqual ( Parser . tokenize ( "[test test=1 testt=\"2\"][/test]" ) [ 0 ] . name , Token . NAME . TEXT )
52+ assert . strictEqual ( Parser . tagTokens ( "[u]" ) [ 0 ] . name , Token . NAME . TEXT )
53+ assert . strictEqual ( Parser . tagTokens ( "[u][u]" ) [ 0 ] . name , Token . NAME . TEXT )
54+ assert . strictEqual ( Parser . tagTokens ( "[/u]" ) [ 0 ] . name , Token . NAME . TEXT )
55+ assert . strictEqual ( Parser . tagTokens ( "[/u][/u]" ) [ 0 ] . name , Token . NAME . TEXT )
56+ assert . strictEqual ( Parser . tagTokens ( "[test={][/test]" ) [ 0 ] . name , Token . NAME . TEXT )
57+ assert . strictEqual ( Parser . tagTokens ( "[@][/@]" ) [ 0 ] . name , Token . NAME . TEXT )
58+ assert . strictEqual ( Parser . tagTokens ( "[test test=1 testt=\"2\"][/test]" ) [ 0 ] . name , Token . NAME . TEXT )
5959
6060 } )
6161
6262 it ( "keys" , ( ) => {
6363
64- assert . strictEqual ( Parser . tokenize ( "[color=#:/.rEd][/color]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "#:/.rEd" )
65- assert . strictEqual ( Parser . tokenize ( "[list=1][/list]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "1" )
66- assert . strictEqual ( Parser . tokenize ( "[url=http://localhost][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
67- assert . strictEqual ( Parser . tokenize ( "[url=\"http://localhost\"][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
68- assert . strictEqual ( Parser . tokenize ( "[url=\"http://localhost\"][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
69- assert . strictEqual ( Parser . tokenize ( "[test test=1][/test]" ) [ 0 ] . openingTag . keys [ 1 ] . value , "1" )
70- assert . strictEqual ( Parser . tokenize ( "[test test=1 testt=\"2\"]x[test]cvv[/test]xc[/test]" ) [ 0 ] . openingTag . keys [ 2 ] . name , "testt" )
71- assert . strictEqual ( Parser . tokenize ( "[test test=1 testt=\"2\"]test[test][test][/test][/test]sfd[/test]" ) [ 0 ] . openingTag . keys [ 2 ] . value , "2" )
64+ assert . strictEqual ( Parser . tagTokens ( "[color=#:/.rEd][/color]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "#:/.rEd" )
65+ assert . strictEqual ( Parser . tagTokens ( "[list=1][/list]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "1" )
66+ assert . strictEqual ( Parser . tagTokens ( "[url=http://localhost][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
67+ assert . strictEqual ( Parser . tagTokens ( "[url=\"http://localhost\"][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
68+ assert . strictEqual ( Parser . tagTokens ( "[url=\"http://localhost\"][/url]" ) [ 0 ] . openingTag . keys [ 0 ] . value , "http://localhost" )
69+ assert . strictEqual ( Parser . tagTokens ( "[test test=1][/test]" ) [ 0 ] . openingTag . keys [ 1 ] . value , "1" )
70+ assert . strictEqual ( Parser . tagTokens ( "[test test=1 testt=\"2\"]x[test]cvv[/test]xc[/test]" ) [ 0 ] . openingTag . keys [ 2 ] . name , "testt" )
71+ assert . strictEqual ( Parser . tagTokens ( "[test test=1 testt=\"2\"]test[test][test][/test][/test]sfd[/test]" ) [ 0 ] . openingTag . keys [ 2 ] . value , "2" )
7272
7373 } )
7474
7575 it ( "buffer" , ( ) => {
7676
77- assert . strictEqual ( Parser . tokenize ( "[b][i]Test[/b][/i]" ) [ 0 ] . openingTag . buffer , "[b]" )
78- assert . strictEqual ( Parser . tokenize ( "[b][i test=1]Hello World[/i][/b]" ) [ 1 ] . openingTag . buffer , "[i test=1]" )
79- assert . strictEqual ( Parser . tokenize ( "[b][i=2]Test[/b][/i]" ) [ 1 ] . openingTag . buffer , "[i=2]" )
80- assert . strictEqual ( Parser . tokenize ( "[b=dsadsa test=1][i]Test[/b][/i]" ) [ 1 ] . openingTag . buffer , "[i]" )
81- assert . strictEqual ( Parser . tokenize ( "[b][i]Test[/b][/i]" ) [ 0 ] . closingTag . buffer , "[/b]" )
82- assert . strictEqual ( Parser . tokenize ( "[b][i]Test[/b][/i]" ) [ 1 ] . closingTag . buffer , "[/i]" )
77+ assert . strictEqual ( Parser . tagTokens ( "[b][i]Test[/b][/i]" ) [ 0 ] . openingTag . buffer , "[b]" )
78+ assert . strictEqual ( Parser . tagTokens ( "[b][i test=1]Hello World[/i][/b]" ) [ 1 ] . openingTag . buffer , "[i test=1]" )
79+ assert . strictEqual ( Parser . tagTokens ( "[b][i=2]Test[/b][/i]" ) [ 1 ] . openingTag . buffer , "[i=2]" )
80+ assert . strictEqual ( Parser . tagTokens ( "[b=dsadsa test=1][i]Test[/b][/i]" ) [ 1 ] . openingTag . buffer , "[i]" )
81+ assert . strictEqual ( Parser . tagTokens ( "[b][i]Test[/b][/i]" ) [ 0 ] . closingTag . buffer , "[/b]" )
82+ assert . strictEqual ( Parser . tagTokens ( "[b][i]Test[/b][/i]" ) [ 1 ] . closingTag . buffer , "[/i]" )
8383
8484 } )
8585
8686 it ( "bufferIndex" , ( ) => {
8787
88- assert . strictEqual ( Parser . tokenize ( "test[b][/b]" ) [ 1 ] . openingTag . bufferIndex , 4 )
89- assert . strictEqual ( Parser . tokenize ( "test[b][/b][test][/test]xoxo" ) [ 2 ] . openingTag . bufferIndex , 11 )
90- assert . strictEqual ( Parser . tokenize ( "[b][i]Test[/b][/i]" ) [ 0 ] . closingTag . bufferIndex , 10 )
91- assert . strictEqual ( Parser . tokenize ( "[b][i][e]Hello World[/e][/i][/b]" ) [ 2 ] . closingTag . bufferIndex , 20 )
88+ assert . strictEqual ( Parser . tagTokens ( "test[b][/b]" ) [ 1 ] . openingTag . bufferIndex , 4 )
89+ assert . strictEqual ( Parser . tagTokens ( "test[b][/b][test][/test]xoxo" ) [ 2 ] . openingTag . bufferIndex , 11 )
90+ assert . strictEqual ( Parser . tagTokens ( "[b][i]Test[/b][/i]" ) [ 0 ] . closingTag . bufferIndex , 10 )
91+ assert . strictEqual ( Parser . tagTokens ( "[b][i][e]Hello World[/e][/i][/b]" ) [ 2 ] . closingTag . bufferIndex , 20 )
9292
9393 } )
9494
9595 it ( "code" , ( ) => {
9696
97- const tokens = Parser . tokenize ( "[code][b][/b][i][/i]Test[code]test[/code][/code][b][/b]" )
97+ const tokens = Parser . tagTokens ( "[code][b][/b][i][/i]Test[code]test[/code][/code][b][/b]" )
9898 assert . strictEqual ( tokens . length , 3 )
9999 assert . strictEqual ( tokens [ 0 ] . name , "bbcode" )
100100 assert . strictEqual ( tokens [ 1 ] . name , Token . NAME . TEXT )
@@ -104,7 +104,7 @@ describe("parser", () => {
104104
105105 it ( "list" , ( ) => {
106106
107- const tokens = Parser . tokenize ( "[list][*]test[*][/list]" )
107+ const tokens = Parser . tagTokens ( "[list][*]test[*][/list]" )
108108
109109 assert . strictEqual ( tokens . length , 4 )
110110 assert . strictEqual ( tokens [ 0 ] . name , "bbcode" )
0 commit comments