Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

init darabonba2.0 #63

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 4 additions & 23 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,25 +1,6 @@
'use strict';

const {
analyze, getChecker
} = require('./lib/semantic');
const util = require('./lib/util');
const Tag = require('./lib/tag');
const builtin = require('./lib/builtin');
const comment = require('./lib/comment');
const pkg = require('./package.json');

function parse(source, filePath) {
const ast = analyze(source, filePath);
ast.parserVersion = pkg.version;
return ast;
}

module.exports = {
parse,
Tag,
util,
builtin,
comment,
getChecker
};
const { Tag } = require('./lib/tag');
exports.Tag = Tag;
exports.Package = require('./lib/package');
exports.comment = require('./lib/comment');
86 changes: 86 additions & 0 deletions lib/analyser.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
'use strict';

const { Tag } = require('./tag');

class Analyser {
constructor(file, pkg) {
// 文件级别
this.filename = file.filename;
this.source = file.source;
// 包级别
this.pkg = pkg;
// 初始化其他内部状态
this.dependencies = new Map();
// 计数器
this.usedTypes = new Map();
this.usedPackages = new Map();
this.usedComponents = new Map();
// used to flag $dara
this.usedFeatures = new Map();
}

error(message, token) {
if (token) {
const loc = token.loc;
console.error(`${this.filename}:${loc.start.line}:${loc.start.column}`);
console.error(`${this.source.split('\n')[loc.start.line - 1]}`);
console.error(`${' '.repeat(loc.start.column - 1)}^`);
}

throw new SyntaxError(message);
}

checkImports(ast) {
if (ast.imports.length === 0) {
return;
}

for (let i = 0; i < ast.imports.length; i++) {
const item = ast.imports[i];
const aliasId = item.aliasId.lexeme;

// checkpoint: 检查是否在 Darafile 中声明外部包
if (!this.pkg.libraries.has(aliasId)) {
this.error(`the package '${aliasId}' not defined in Darafile`, item.aliasId);
}

// checkpoint: 不允许重复引入外部包
if (this.dependencies.has(aliasId)) {
this.error(`the package id '${aliasId}' has been imported`, item.aliasId);
}

this.dependencies.set(aliasId, this.pkg.libraries.get(aliasId));
this.usedPackages.set(aliasId, new Map());
}
}

checkType(ast) {
if (ast.tag === Tag.TYPE) {
this.usedTypes.set(ast.lexeme, true);
} else if (ast.tag === Tag.ID) {
// checkpoint: 只能是包内的 model/module
if (!this.pkg.components.has(ast.lexeme)) {
this.error(`the type '${ast.lexeme}' is undefined`, ast);
}
this.usedComponents.set(ast.lexeme, this.pkg.components.get(ast.lexeme));
} else if (ast.type === 'array') {
this.checkType(ast.itemType);
} else if (ast.type === 'map') {
this.checkType(ast.keyType);
this.checkType(ast.valueType);
} else if (ast.type === 'extern_component') {
if (!this.dependencies.has(ast.aliasId.lexeme)) {
this.error(`the package '${ast.aliasId.lexeme}' is un-imported`, ast.aliasId);
}
const pkg = this.dependencies.get(ast.aliasId.lexeme);
if (!pkg.components.has(ast.component.lexeme)) {
this.error(`'${ast.component.lexeme}' is undefined in '${ast.aliasId.lexeme}'`, ast.component);
}
this.usedPackages.get(ast.aliasId.lexeme).set(ast.component.lexeme, true);
} else {
throw new Error('unimplemented');
}
}
}

module.exports = Analyser;
55 changes: 55 additions & 0 deletions lib/base_lexer.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
'use strict';

class BaseLexer {
constructor(source, filename, offset = {}) {
this.source = source;
this.filename = filename;

this.index = offset.index || -1;
this.peek = ' ';
this.words = new Map();
this.line = offset.line || 1;
this.column = offset.column || 0;
}

// read and consume a char
getch() {
if (this.peek === '\n') {
// line number
this.line++;
this.column = 0;
}
this.index++;
this.column++;
this.peek = this.source[this.index]; // 其它返回实际字节值
}

// read a char by offset
readch(i = 0) {
// 只读取,不消费
return this.source[this.index + i];
}

ungetch() {
this.index--;
this.column--;
this.peek = this.source[this.index]; // 其它返回实际字节值
}

reserve(word) {
if (this.words.has(word.lexeme)) {
throw new Error(`duplicate reserved word: ${word.lexeme}`);
}
this.words.set(word.lexeme, word);
}

skipWhitespaces() {
// 忽略空格,和TAB ch =='\n'
while (this.peek === ' ' || this.peek === '\t' ||
this.peek === '\n' || this.peek === '\r') {
this.getch();
}
}
}

module.exports = BaseLexer;
158 changes: 158 additions & 0 deletions lib/base_parser.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
'use strict';

const { Tag, tip } = require('./tag');

class Parser {
constructor(lexer) {
this.lexer = lexer;
this.look = null;
}

move() {
do {
this.look = this.lexer.scan();
} while (this.look.tag === Tag.COMMENT);
}

tagTip(tag) {
return tip(tag);
}

getIndex() {
return this.look.index;
}

imports() {
var imports = [];

while (this.is(Tag.IMPORT)) {
const begin = this.getIndex();
this.move();
const aliasId = this.look;
this.match(Tag.PACK_ID);
this.match(';');
let end = this.getIndex();

imports.push({
type: 'import',
aliasId: aliasId,
tokenRange: [begin, end]
});
}

return imports;
}

externComponent() {
const begin = this.getIndex();
let t = this.look;
this.move();
// for $A.B
this.match('.');
const id = this.look;
this.match(Tag.ID);
const end = this.getIndex();
return {
type: 'extern_component',
aliasId: t,
component: id,
loc: {
start: t.loc.start,
end: id.loc.end
},
tokenRange: [begin, end]
};
}

baseType() {
if (this.look.tag === '[') {
this.move();
const t = this.baseType();
this.match(']');
return {
type: 'array',
itemType: t
};
}

if (this.isWord(Tag.TYPE, 'map')) {
let t = this.look;
this.move();
this.match('[');
const keyType = this.baseType();
this.match(']');
const valueType = this.baseType();
return {
loc: {
start: t.loc.start,
end: valueType.loc.end
},
type: 'map',
keyType: keyType,
valueType: valueType
};
}

if (this.is(Tag.ID)) {
var t = this.look;
this.move();
return t;
}

if (this.is(Tag.PACK_ID)) {
return this.externComponent();
}

if (this.is(Tag.TYPE)) {
let t = this.look;
this.move();
return t;
}

this.error(`expect base type, model id or array form`);
}

match(tag) {
if (this.look.tag === tag) {
this.move();
} else {
this.error(`Expect ${this.tagTip(tag)}, but ${this.tokenTip(this.look)}`);
}
}

matchWord(tag, lexeme) {
if (this.look.tag === tag && this.look.lexeme === lexeme) {
this.move();
} else {
this.error(`Expect ${this.tagTip(tag)} ${lexeme}, but ${this.tokenTip(this.look)}`);
}
}

is(tag) {
return this.look.tag === tag;
}

isWord(tag, lexeme) {
return this.look.tag === tag && this.look.lexeme === lexeme;
}

tokenTip(token) {
if (!token.tag) {
return 'EOF';
}

return this.look;
}

error(message) {
const lexer = this.lexer;
const token = this.look;
console.log(`${lexer.filename}:${token.loc.start.line}:${token.loc.start.column}`);
console.log(`${lexer.source.split('\n')[token.loc.start.line - 1]}`);
console.log(`${' '.repeat(token.loc.start.column - 1)}^`);
const prefix = `Unexpected token: ${this.tokenTip(token)}.`;
throw new SyntaxError(`${prefix} ${message}`);
}
}

module.exports = Parser;
Loading