Skip to content

Commit

Permalink
Isolate tokenizer, create head module
Browse files Browse the repository at this point in the history
Remove the tokenizer's dependency on the default grammar.
Set up entry module and export primary classes / functions.
  • Loading branch information
patgrasso committed Aug 2, 2016
1 parent 2ab70b3 commit 7c7c858
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 28 deletions.
4 changes: 4 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@ node_js:
- "5.1"
- "6"
- "stable"

script:
- npm run lint
after_script:
- npm run coveralls

notifications:
email: false
6 changes: 4 additions & 2 deletions lib/parser.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ const tokenize = require('./tokenizer');


function parse(sent, grammar, tokenizer) {
let tokens = (tokenizer || tokenize)(sent);
let tokens = (tokenizer || tokenize)(sent, grammar || rules.rules);
let states = earley(tokens, grammar || rules.rules);
return dfs(states, tokens);
}
Expand Down Expand Up @@ -220,4 +220,6 @@ let tree = parse(sentence, rules.rules);

//console.log(JSON.stringify(tree, null, 2));

module.exports.parse = parse;
module.exports.parse = parse;
module.exports.earley = earley;
module.exports.dfs = dfs;
15 changes: 3 additions & 12 deletions lib/rules.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,18 +82,9 @@ let rules = [
new Rule(factor , [/\d+/] , (n) => parseFloat(n))
];

function getTokens() {
return rules.reduce(
(tokens, rule) => tokens.concat(
rule.filter((sym) => typeof sym === 'string' || sym instanceof RegExp)
)
, []);
}

module.exports = {
rules : rules,
Rule : Rule,
Sym : Sym,
getTokens : getTokens
rules : rules,
Rule : Rule,
Sym : Sym
};

25 changes: 12 additions & 13 deletions lib/tokenizer.js
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@

let terminals = require('./rules').getTokens();


module.exports = (sent, terms) => {
terms = terms || terminals;

let tokens = terms
.map((token) => {
if (typeof token === 'string') {
return token.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, '\\$&');
}
return token.source;
})
module.exports = (sent, grammar) => {
let terms = grammar.reduce(
(tokens, rule) => tokens.concat(
rule.filter((sym) => typeof sym === 'string' || sym instanceof RegExp)
), [])
, tokens = terms
.map((token) => {
if (typeof token === 'string') {
return token.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, '\\$&');
}
return token.source;
})
, delims = RegExp('(' + tokens.join('|') + ')');

return sent
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "parsey",
"version": "0.0.0",
"description": "Parser for context-free grammars",
"main": "parser.js",
"main": "parsey.js",
"scripts": {
"lint": "eslint . --ignore-pattern coverage",
"test": "jasmine",
Expand Down
8 changes: 8 additions & 0 deletions parsey.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@

module.exports = {
parse : require('./lib/parser').parse,
tokenize : require('./lib/tokenizer'),
Rule : require('./lib/rules').Rule,
Sym : require('./lib/rules').Sym
};

0 comments on commit 7c7c858

Please sign in to comment.