Make C++ implementation the main implementation
This commit is contained in:
parent
88134802fa
commit
c907885420
2805 changed files with 1986 additions and 12174 deletions
18
.gitignore
vendored
18
.gitignore
vendored
|
@ -1,11 +1,19 @@
|
|||
|
||||
# populated by npm
|
||||
node_modules/
|
||||
# default build directory for VSCode
|
||||
/build/
|
||||
|
||||
# compiled TypeScript code from src/
|
||||
/lib/
|
||||
# used by clangd and other programs
|
||||
.cache/
|
||||
|
||||
# local development helpers
|
||||
# created by pyenv
|
||||
.python-version
|
||||
|
||||
# used for local development
|
||||
Makefile
|
||||
|
||||
# Used by IDEs to get correct error messages
|
||||
compile_commands.json
|
||||
|
||||
# quick test scripts in the main directory
|
||||
/*.bolt
|
||||
/*.c
|
||||
|
|
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
|
@ -5,15 +5,13 @@
|
|||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Launch Program",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"program": "${workspaceFolder}/compiler/lib/bin/bolt.js",
|
||||
"args": [ "compiler/test.bolt" ],
|
||||
"outputCapture": "std"
|
||||
"name": "Debug",
|
||||
"program": "${workspaceFolder}/build/bolt",
|
||||
"args": [ "--direct-diagnostics", "verify", "${workspaceFolder}/test.bolt" ],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"preLaunchTask": "CMake: build"
|
||||
}
|
||||
]
|
||||
}
|
18
bootstrap/cxx/.gitignore
vendored
18
bootstrap/cxx/.gitignore
vendored
|
@ -1,18 +0,0 @@
|
|||
|
||||
# default build directory for VSCode
|
||||
/build/
|
||||
|
||||
# used by clangd and other programs
|
||||
.cache/
|
||||
|
||||
# created by pyenv
|
||||
.python-version
|
||||
|
||||
# used for local development
|
||||
Makefile
|
||||
|
||||
# Used by IDEs to get correct error messages
|
||||
compile_commands.json
|
||||
|
||||
# quick test scripts in the main directory
|
||||
/*.bolt
|
17
bootstrap/cxx/.vscode/launch.json
vendored
17
bootstrap/cxx/.vscode/launch.json
vendored
|
@ -1,17 +0,0 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug",
|
||||
"program": "${workspaceFolder}/build/bolt",
|
||||
"args": [ "--direct-diagnostics", "verify", "${workspaceFolder}/test.bolt" ],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"preLaunchTask": "CMake: build"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
../../../../zen
|
11
bootstrap/js/compiler/.gitignore
vendored
11
bootstrap/js/compiler/.gitignore
vendored
|
@ -1,11 +0,0 @@
|
|||
|
||||
# populated by npm
|
||||
node_modules/
|
||||
|
||||
# compiled TypeScript code from src/
|
||||
/lib/
|
||||
|
||||
# local development helpers
|
||||
Makefile
|
||||
/*.bolt
|
||||
/*.c
|
169
bootstrap/js/compiler/package-lock.json
generated
169
bootstrap/js/compiler/package-lock.json
generated
|
@ -1,169 +0,0 @@
|
|||
{
|
||||
"name": "@samvv/bolt",
|
||||
"version": "0.0.1",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@samvv/bolt",
|
||||
"version": "0.0.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/which": "^3.0.0",
|
||||
"commander": "^10.0.0",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"source-map-support": "^0.5.21",
|
||||
"tslib": "^2.5.0",
|
||||
"which": "^3.0.0",
|
||||
"yagl": "^0.5.1"
|
||||
},
|
||||
"bin": {
|
||||
"bolt": "lib/bin/bolt.js",
|
||||
"bolt-self": "lib/bin/bolt-self.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^18.15.11"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/which": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/which/-/which-3.0.0.tgz",
|
||||
"integrity": "sha512-ASCxdbsrwNfSMXALlC3Decif9rwDMu+80KGp5zI2RLRotfMsTv7fHL8W8VDp24wymzDyIFudhUeSCugrgRFfHQ=="
|
||||
},
|
||||
"node_modules/buffer-from": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
|
||||
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
|
||||
},
|
||||
"node_modules/commander": {
|
||||
"version": "10.0.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-10.0.0.tgz",
|
||||
"integrity": "sha512-zS5PnTI22FIRM6ylNW8G4Ap0IEOyk62fhLSD0+uHRT9McRCLGpkVNvao4bjimpK/GShynyQkFFxHhwMcETmduA==",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/isexe": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
|
||||
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
|
||||
},
|
||||
"node_modules/reflect-metadata": {
|
||||
"version": "0.1.13",
|
||||
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.13.tgz",
|
||||
"integrity": "sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg=="
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-support": {
|
||||
"version": "0.5.21",
|
||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
|
||||
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
|
||||
"dependencies": {
|
||||
"buffer-from": "^1.0.0",
|
||||
"source-map": "^0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz",
|
||||
"integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg=="
|
||||
},
|
||||
"node_modules/which": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/which/-/which-3.0.0.tgz",
|
||||
"integrity": "sha512-nla//68K9NU6yRiwDY/Q8aU6siKlSs64aEC7+IV56QoAuyQT2ovsJcgGYGyqMOmI/CGN1BOR6mM5EN0FBO+zyQ==",
|
||||
"dependencies": {
|
||||
"isexe": "^2.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"node-which": "bin/which.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/yagl": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/yagl/-/yagl-0.5.1.tgz",
|
||||
"integrity": "sha512-DfJygWCefAq5eEOmwvVkiMFBUEQJs9aijGdhaYGSdj1TM2OqSbe/Vp37e/nMGXsgmWiryZapKMOtpYx3ECUrJQ=="
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": {
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/which": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/which/-/which-3.0.0.tgz",
|
||||
"integrity": "sha512-ASCxdbsrwNfSMXALlC3Decif9rwDMu+80KGp5zI2RLRotfMsTv7fHL8W8VDp24wymzDyIFudhUeSCugrgRFfHQ=="
|
||||
},
|
||||
"buffer-from": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
|
||||
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
|
||||
},
|
||||
"commander": {
|
||||
"version": "10.0.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-10.0.0.tgz",
|
||||
"integrity": "sha512-zS5PnTI22FIRM6ylNW8G4Ap0IEOyk62fhLSD0+uHRT9McRCLGpkVNvao4bjimpK/GShynyQkFFxHhwMcETmduA=="
|
||||
},
|
||||
"isexe": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
|
||||
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
|
||||
},
|
||||
"reflect-metadata": {
|
||||
"version": "0.1.13",
|
||||
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.13.tgz",
|
||||
"integrity": "sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg=="
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
|
||||
},
|
||||
"source-map-support": {
|
||||
"version": "0.5.21",
|
||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
|
||||
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
|
||||
"requires": {
|
||||
"buffer-from": "^1.0.0",
|
||||
"source-map": "^0.6.0"
|
||||
}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz",
|
||||
"integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg=="
|
||||
},
|
||||
"which": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/which/-/which-3.0.0.tgz",
|
||||
"integrity": "sha512-nla//68K9NU6yRiwDY/Q8aU6siKlSs64aEC7+IV56QoAuyQT2ovsJcgGYGyqMOmI/CGN1BOR6mM5EN0FBO+zyQ==",
|
||||
"requires": {
|
||||
"isexe": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"yagl": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/yagl/-/yagl-0.5.1.tgz",
|
||||
"integrity": "sha512-DfJygWCefAq5eEOmwvVkiMFBUEQJs9aijGdhaYGSdj1TM2OqSbe/Vp37e/nMGXsgmWiryZapKMOtpYx3ECUrJQ=="
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
{
|
||||
"name": "@boltlang/bolt",
|
||||
"version": "0.0.1",
|
||||
"description": "A new programming language for the web",
|
||||
"main": "lib/index.js",
|
||||
"bin": {
|
||||
"bolt": "lib/bin/bolt.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/samvv/BoltJS"
|
||||
},
|
||||
"keywords": [
|
||||
"programming-language",
|
||||
"productivity",
|
||||
"development",
|
||||
"performance"
|
||||
],
|
||||
"author": "Sam Vervaeck",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/which": "^3.0.0",
|
||||
"commander": "^11.0.0",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"source-map-support": "^0.5.21",
|
||||
"tslib": "^2.5.3",
|
||||
"which": "^3.0.1",
|
||||
"yagl": "^0.5.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.3.1"
|
||||
}
|
||||
}
|
|
@ -1,201 +0,0 @@
|
|||
import { DirectedHashGraph, strongconnect } from "yagl";
|
||||
import { assert } from "./util";
|
||||
import { Syntax, LetDeclaration, SourceFile, SyntaxKind } from "./cst";
|
||||
import type { Scope } from "./scope"
|
||||
|
||||
export class Analyser {
|
||||
|
||||
private referenceGraph = new DirectedHashGraph<LetDeclaration>();
|
||||
|
||||
public addSourceFile(node: SourceFile): void {
|
||||
|
||||
const visit = (node: Syntax, source: Syntax | null) => {
|
||||
|
||||
const addReference = (scope: Scope, name: string) => {
|
||||
const target = scope.lookup(name);
|
||||
if (source === null || target === null || isParam(target.kind)) {
|
||||
return;
|
||||
}
|
||||
assert(source.kind === SyntaxKind.LetDeclaration);
|
||||
assert(target.kind === SyntaxKind.LetDeclaration);
|
||||
this.referenceGraph.addEdge(source, target);
|
||||
}
|
||||
|
||||
switch (node.kind) {
|
||||
|
||||
case SyntaxKind.ConstantExpression:
|
||||
break;
|
||||
|
||||
case SyntaxKind.MatchExpression:
|
||||
{
|
||||
for (const arm of node.arms) {
|
||||
visit(arm.expression, source);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.InstanceDeclaration:
|
||||
case SyntaxKind.ClassDeclaration:
|
||||
case SyntaxKind.SourceFile:
|
||||
case SyntaxKind.ModuleDeclaration:
|
||||
{
|
||||
for (const element of node.elements) {
|
||||
visit(element, source);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.ReferenceExpression:
|
||||
{
|
||||
if (node.name.kind === SyntaxKind.Identifier) {
|
||||
assert(node.modulePath.length === 0);
|
||||
addReference(node.getScope(), node.name.text);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.MemberExpression:
|
||||
{
|
||||
visit(node.expression, source);
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.TupleExpression:
|
||||
{
|
||||
for (const element of node.elements) {
|
||||
visit(element, source);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.StructExpression:
|
||||
{
|
||||
for (const member of node.members) {
|
||||
switch (member.kind) {
|
||||
case SyntaxKind.PunnedStructExpressionField:
|
||||
{
|
||||
addReference(node.getScope(), member.name.text);
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.StructExpressionField:
|
||||
{
|
||||
visit(member.expression, source);
|
||||
break;
|
||||
};
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.NestedExpression:
|
||||
{
|
||||
visit(node.expression, source);
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.InfixExpression:
|
||||
{
|
||||
visit(node.left, source);
|
||||
visit(node.right, source);
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.CallExpression:
|
||||
{
|
||||
visit(node.func, source);
|
||||
for (const arg of node.args) {
|
||||
visit(arg, source);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.IfStatement:
|
||||
{
|
||||
for (const cs of node.cases) {
|
||||
if (cs.test !== null) {
|
||||
visit(cs.test, source);
|
||||
}
|
||||
for (const element of cs.elements) {
|
||||
visit(element, source);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.ExpressionStatement:
|
||||
{
|
||||
visit(node.expression, source);
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.ReturnStatement:
|
||||
{
|
||||
if (node.expression !== null) {
|
||||
visit(node.expression, source);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.LetDeclaration:
|
||||
{
|
||||
this.referenceGraph.addVertex(node);
|
||||
if (node.body !== null) {
|
||||
switch (node.body.kind) {
|
||||
case SyntaxKind.ExprBody:
|
||||
{
|
||||
visit(node.body.expression, node);
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.BlockBody:
|
||||
{
|
||||
for (const element of node.body.elements) {
|
||||
visit(element, node);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.TypeDeclaration:
|
||||
case SyntaxKind.EnumDeclaration:
|
||||
case SyntaxKind.StructDeclaration:
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`Unexpected ${node.constructor.name}`);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
visit(node, null);
|
||||
|
||||
}
|
||||
|
||||
public isReferencedInParentScope(node: LetDeclaration): boolean {
|
||||
const maxDepth = node.getScope().depth;
|
||||
for (const other of this.referenceGraph.getSourceVertices(node)) {
|
||||
if (other.getScope().depth < maxDepth) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a sorted list of collections where each collection contains
|
||||
* let-declarations that reference each other in some way or another.
|
||||
*
|
||||
* The declarations are sorted in such a way that declarations that reference
|
||||
* nothing come before declarations that reference another declaration. When
|
||||
* a let-declaration is not recusive, it will simply show up as a collection
|
||||
* with only one element.
|
||||
*/
|
||||
public getSortedDeclarations(): Iterable<LetDeclaration[]> {
|
||||
return strongconnect(this.referenceGraph);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,227 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import "source-map-support/register"
|
||||
import "reflect-metadata"
|
||||
|
||||
import fs from "fs"
|
||||
import util from "util"
|
||||
import path from "path"
|
||||
import { Command } from "commander"
|
||||
|
||||
import { PassManager, Program, TargetType } from "../program"
|
||||
import { TypeclassDictPassing } from "../passes/TypeclassDictPass"
|
||||
import BoltToC from "../passes/BoltToC"
|
||||
import BoltToJS from "../passes/BoltToJS"
|
||||
import { stripExtension } from "../util"
|
||||
import { sync as which } from "which"
|
||||
import { spawnSync } from "child_process"
|
||||
import { ConsoleDiagnostics, DiagnosticKind, DiagnosticStore, TypeMismatchDiagnostic } from "../diagnostics"
|
||||
import { Syntax, SyntaxKind, TextFile, isExpression, visitEachChild } from "../cst"
|
||||
import { Analyser, Checker, parseSourceFile } from ".."
|
||||
import { typesEqual } from "../types"
|
||||
|
||||
function debug(value: any) {
|
||||
console.error(util.inspect(value, { colors: true, depth: Infinity }));
|
||||
}
|
||||
|
||||
// The positions of all program arguments which are not flags will be parsed
|
||||
// into this structure.
|
||||
const commandIndices = [];
|
||||
|
||||
for (let i = 2; i < process.argv.length; i++) {
|
||||
const arg = process.argv[i];
|
||||
if (!arg.startsWith('-')) {
|
||||
commandIndices.push(i);
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate in reverse over the command indices, such that bolt-self-test-run
|
||||
// gets precedence over bolt-self-test
|
||||
for (let i = commandIndices.length-1; i >= 0; i--) {
|
||||
|
||||
const argvIndex = commandIndices[i];
|
||||
|
||||
// Construct the binary name from the parts of which we stored the locations in `commandIndices`.
|
||||
// Build from the first command up until the command at index `i`
|
||||
const binaryName = 'bolt-' + commandIndices.slice(0, i+1).map(index => process.argv[index]).join('-');
|
||||
|
||||
const binaryPath = which(binaryName, { nothrow: true });
|
||||
|
||||
// Reconstruct the args list without the commands in `binaryName`
|
||||
const argv = [];
|
||||
for (let i = 2; i < argvIndex; i++) {
|
||||
const arg = process.argv[i];
|
||||
if (arg.startsWith('-')) {
|
||||
argv.push(arg);
|
||||
}
|
||||
}
|
||||
for (let i = argvIndex+1; i < process.argv.length; i++) {
|
||||
argv.push(process.argv[i]);
|
||||
}
|
||||
|
||||
// Only execute and return if the command was actually found. Otherwise, try
|
||||
// the other possible commands or execute the default program if this was the
|
||||
// last iteration.
|
||||
if (binaryPath) {
|
||||
const exitCode = spawnSync(binaryPath, argv, { stdio: 'inherit' }).status;
|
||||
process.exit(exitCode || 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const program = new Command();
|
||||
|
||||
program
|
||||
.name('bolt')
|
||||
.description('The official Bolt language compiler')
|
||||
.version('0.0.1')
|
||||
.option('-C, --work-dir', 'Act as if run from this directory', '.');
|
||||
|
||||
program.command('build', { isDefault: true })
|
||||
.description('Build a set of Bolt sources')
|
||||
.argument('<file>', 'Path to the Bolt program to compile')
|
||||
.option('-C, --work-dir', 'Act as if run from this directory', '.')
|
||||
.option('--no-typecheck', 'Skip type-checking')
|
||||
.option('--no-emit', 'Do not output compiled files')
|
||||
.option('-t, --target <target-id>', 'What to compile to', 'c')
|
||||
.action((fileName, opts) => {
|
||||
|
||||
const cwd = opts.workDir;
|
||||
const filePath = path.resolve(cwd, fileName);
|
||||
const shouldTypecheck = opts.typecheck;
|
||||
const shouldEmit = opts.emit;
|
||||
|
||||
let targetType: TargetType;
|
||||
switch (opts.target) {
|
||||
case 'bolt':
|
||||
targetType = TargetType.Bolt;
|
||||
break;
|
||||
case 'js':
|
||||
targetType = TargetType.JS;
|
||||
break;
|
||||
case 'c':
|
||||
targetType = TargetType.C;
|
||||
break;
|
||||
default:
|
||||
console.error(`Invalid target '${opts.target}' provided.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const program = new Program([ filePath ]);
|
||||
if (program.diagnostics.hasError) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (shouldTypecheck) {
|
||||
program.check();
|
||||
if (program.diagnostics.hasError) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldEmit) {
|
||||
|
||||
const passes = new PassManager();
|
||||
passes.add(TypeclassDictPassing);
|
||||
|
||||
let suffix;
|
||||
switch (targetType) {
|
||||
case TargetType.Bolt:
|
||||
suffix = '.gen.bolt';
|
||||
break;
|
||||
case TargetType.C:
|
||||
suffix = '.c';
|
||||
passes.add(BoltToC);
|
||||
break;
|
||||
case TargetType.JS:
|
||||
suffix = '.js'
|
||||
passes.add(BoltToJS);
|
||||
break;
|
||||
}
|
||||
|
||||
for (const sourceFile of program.getSourceFiles()) {
|
||||
const code = passes.apply(sourceFile);
|
||||
const targetFilePath = stripExtension(sourceFile.getFile().getFullPath()) + suffix;
|
||||
const file = fs.createWriteStream(targetFilePath, 'utf-8');
|
||||
code.emit(file);
|
||||
}
|
||||
|
||||
if (program.diagnostics.hasError) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
program.command('verify', { hidden: true })
|
||||
.description('Run verification tests')
|
||||
.argument('<file>', 'File with verification source')
|
||||
.action((fileName, _opts) => {
|
||||
|
||||
const diagnostics = new DiagnosticStore();
|
||||
const realPath = path.resolve(fileName);
|
||||
const text = fs.readFileSync(realPath, 'utf-8');
|
||||
const file = new TextFile(fileName, text);
|
||||
|
||||
const sourceFile = parseSourceFile(file, diagnostics);
|
||||
if (!sourceFile) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const analyser = new Analyser();
|
||||
analyser.addSourceFile(sourceFile);
|
||||
const checker = new Checker(analyser, diagnostics);
|
||||
checker.check(sourceFile);
|
||||
|
||||
const realDiagnostics = new ConsoleDiagnostics();
|
||||
|
||||
let annotationTotalCount = 0;
|
||||
let annotationErrorCount = 0;
|
||||
let diagnosticTotalCount = diagnostics.size;
|
||||
let diagnosticErrorCount = 0;
|
||||
|
||||
const visit = (node: Syntax) => {
|
||||
if (isExpression(node)) {
|
||||
for (const annotation of node.annotations) {
|
||||
if (annotation.kind === SyntaxKind.TypeAnnotation) {
|
||||
const actual = checker.getTypeOfNode(node);
|
||||
const expected = checker.getTypeOfNode(annotation.typeExpr);
|
||||
if (!typesEqual(actual, expected)) {
|
||||
realDiagnostics.add(new TypeMismatchDiagnostic(actual, expected, [ node ], []));
|
||||
annotationErrorCount++;
|
||||
}
|
||||
annotationTotalCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
visitEachChild(node, visit);
|
||||
}
|
||||
visit(sourceFile);
|
||||
|
||||
const uncaughtDiagnostics = new Set(diagnostics);
|
||||
|
||||
// TODO check comments that did not match any diagnostic
|
||||
for (const [line, comment] of file.comments) {
|
||||
if (comment[0].kind === SyntaxKind.At && comment[1].kind === SyntaxKind.Identifier && comment[1].text === 'expect_diagnostic' && comment[2].kind === SyntaxKind.StringLiteral) {
|
||||
for (const diagnostic of uncaughtDiagnostics) {
|
||||
if (diagnostic.position && diagnostic.position.line === line+1 && DiagnosticKind[diagnostic.kind] === comment[2].contents) {
|
||||
uncaughtDiagnostics.delete(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const diagnostic of uncaughtDiagnostics) {
|
||||
realDiagnostics.add(diagnostic);
|
||||
}
|
||||
|
||||
console.log(`${annotationTotalCount} type annotation(s) verified, ${annotationErrorCount} error(s).`);
|
||||
console.log(`${diagnosticTotalCount} diagnostic(s) generated, ${uncaughtDiagnostics.size} unexpected.`);
|
||||
if (realDiagnostics.hasError) {
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
program.parse();
|
||||
|
|
@ -1,387 +0,0 @@
|
|||
|
||||
import type stream from "stream"
|
||||
import { IndentWriter } from "./util";
|
||||
|
||||
export const enum CNodeKind {
|
||||
|
||||
// Types
|
||||
BuiltinType,
|
||||
|
||||
// Statements
|
||||
ExprStmt,
|
||||
RetStmt,
|
||||
|
||||
// Expressions
|
||||
CallExpr,
|
||||
RefExpr,
|
||||
ConstExpr,
|
||||
|
||||
// Declarations
|
||||
TypeDecl,
|
||||
VarDecl,
|
||||
FuncDecl,
|
||||
|
||||
// Directives
|
||||
IncDir,
|
||||
|
||||
// Other nodes
|
||||
Program,
|
||||
|
||||
}
|
||||
|
||||
export const enum CBuiltinTypeKind {
|
||||
Char,
|
||||
Short,
|
||||
Int,
|
||||
Long,
|
||||
LongLong,
|
||||
UnsignedChar,
|
||||
UnsignedShort,
|
||||
UnsignedInt,
|
||||
UnsignedLong,
|
||||
UnsignedLongLong,
|
||||
}
|
||||
|
||||
abstract class CNodeBase {
|
||||
|
||||
public abstract readonly kind: CNodeKind;
|
||||
|
||||
public emit(file: stream.Writable): void {
|
||||
const emitter = new CEmitter(file);
|
||||
emitter.emit(this as any);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class CBuiltinType extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.BuiltinType;
|
||||
|
||||
public constructor(
|
||||
public typeKind: CBuiltinTypeKind,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type CType
|
||||
= CBuiltinType
|
||||
|
||||
export class CRefExpr extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.RefExpr;
|
||||
|
||||
public constructor(
|
||||
public name: string
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class CCallExpr extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.CallExpr;
|
||||
|
||||
public constructor(
|
||||
public operator: CExpr,
|
||||
public args: CExpr[],
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class CConstExpr extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.ConstExpr;
|
||||
|
||||
public constructor(
|
||||
public value: bigint | string | boolean,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type CExpr
|
||||
= CRefExpr
|
||||
| CCallExpr
|
||||
| CConstExpr
|
||||
;
|
||||
|
||||
export class CRetStmt extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.RetStmt;
|
||||
|
||||
public constructor(
|
||||
public value: CExpr | null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class CExprStmt extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.ExprStmt;
|
||||
|
||||
public constructor(
|
||||
public expr: CExpr,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type CStmt
|
||||
= CExprStmt
|
||||
| CRetStmt;
|
||||
|
||||
export class CTypeDecl extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.TypeDecl;
|
||||
|
||||
public constructor(
|
||||
public name: string,
|
||||
public type: CType,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class CFuncDecl extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.FuncDecl;
|
||||
|
||||
public constructor(
|
||||
public returnType: CType,
|
||||
public name: string,
|
||||
public params: Array<[CType, string]>,
|
||||
public body: CStmt[] | null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class CVarDecl extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.VarDecl;
|
||||
|
||||
public constructor(
|
||||
public isExtern: boolean,
|
||||
public type: CType,
|
||||
public name: string,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type CDecl
|
||||
= CTypeDecl
|
||||
| CVarDecl
|
||||
| CFuncDecl
|
||||
|
||||
export class CIncDir extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.IncDir;
|
||||
|
||||
public constructor(
|
||||
public filePath: string,
|
||||
public isSystem = false,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type CDir
|
||||
= CIncDir;
|
||||
|
||||
export class CProgram extends CNodeBase {
|
||||
|
||||
public readonly kind = CNodeKind.Program;
|
||||
|
||||
public constructor(
|
||||
public elements: (CDecl | CDir)[],
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type CNode
|
||||
= CDecl
|
||||
| CDir
|
||||
| CStmt
|
||||
| CExpr
|
||||
| CType
|
||||
| CProgram
|
||||
|
||||
export class CEmitter {
|
||||
|
||||
private writer: IndentWriter;
|
||||
|
||||
public constructor(
|
||||
public stream: stream.Writable,
|
||||
) {
|
||||
this.writer = new IndentWriter(stream);
|
||||
}
|
||||
|
||||
public emit(node: CNode): void {
|
||||
|
||||
switch (node.kind) {
|
||||
|
||||
case CNodeKind.Program:
|
||||
{
|
||||
for (const element of node.elements) {
|
||||
this.emit(element);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case CNodeKind.IncDir:
|
||||
{
|
||||
this.writer.write('#include ');
|
||||
this.writer.write(node.isSystem ? '<' : '"');
|
||||
this.writer.write(node.filePath);
|
||||
this.writer.write(node.isSystem ? '>' : '"');
|
||||
this.writer.write('\n\n');
|
||||
break;
|
||||
}
|
||||
|
||||
case CNodeKind.BuiltinType:
|
||||
{
|
||||
switch (node.typeKind) {
|
||||
case CBuiltinTypeKind.Char:
|
||||
this.writer.write('char');
|
||||
break;
|
||||
case CBuiltinTypeKind.Short:
|
||||
this.writer.write('short');
|
||||
break;
|
||||
case CBuiltinTypeKind.Int:
|
||||
this.writer.write('int');
|
||||
break;
|
||||
case CBuiltinTypeKind.Long:
|
||||
this.writer.write('long');
|
||||
break;
|
||||
case CBuiltinTypeKind.LongLong:
|
||||
this.writer.write('long long');
|
||||
break;
|
||||
case CBuiltinTypeKind.UnsignedChar:
|
||||
this.writer.write('unsigned char');
|
||||
break;
|
||||
case CBuiltinTypeKind.UnsignedShort:
|
||||
this.writer.write('unsigned short');
|
||||
break;
|
||||
case CBuiltinTypeKind.UnsignedInt:
|
||||
this.writer.write('unsigned int');
|
||||
break;
|
||||
case CBuiltinTypeKind.UnsignedLong:
|
||||
this.writer.write('unsigned long');
|
||||
break;
|
||||
case CBuiltinTypeKind.UnsignedLongLong:
|
||||
this.writer.write('unsigned long long');
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case CNodeKind.FuncDecl:
|
||||
{
|
||||
this.emit(node.returnType);
|
||||
this.writer.write(' ' + node.name + '(');
|
||||
let count = 0;
|
||||
for (const [type, name] of node.params) {
|
||||
this.emit(type);
|
||||
this.writer.write(' ' + name);
|
||||
if (count++ > 0) {
|
||||
this.writer.write(', ');
|
||||
}
|
||||
}
|
||||
this.writer.write(') {\n');
|
||||
this.writer.indent();
|
||||
if (node.body !== null) {
|
||||
for (const element of node.body) {
|
||||
this.emit(element);
|
||||
}
|
||||
}
|
||||
this.writer.dedent();
|
||||
this.writer.write('}\n\n');
|
||||
break;
|
||||
}
|
||||
|
||||
case CNodeKind.ExprStmt:
|
||||
this.emit(node.expr);
|
||||
this.writer.write(';\n');
|
||||
break;
|
||||
|
||||
case CNodeKind.RetStmt:
|
||||
{
|
||||
this.writer.write('return');
|
||||
if (node.value !== null) {
|
||||
this.writer.write(' ');
|
||||
this.emit(node.value);
|
||||
}
|
||||
this.writer.write(';\n');
|
||||
break;
|
||||
}
|
||||
|
||||
case CNodeKind.RefExpr:
|
||||
this.writer.write(node.name);
|
||||
break;
|
||||
|
||||
case CNodeKind.CallExpr:
|
||||
{
|
||||
this.emit(node.operator);
|
||||
this.writer.write('(');
|
||||
let count = 0;
|
||||
for (const arg of node.args) {
|
||||
this.emit(arg);
|
||||
if (count++ > 0) {
|
||||
this.writer.write(', ');
|
||||
}
|
||||
}
|
||||
this.writer.write(')');
|
||||
break;
|
||||
}
|
||||
|
||||
case CNodeKind.ConstExpr:
|
||||
{
|
||||
if (typeof(node.value) === 'string') {
|
||||
this.writer.write('"');
|
||||
for (const ch of node.value) {
|
||||
switch (ch) {
|
||||
case '\b': this.writer.write('\\b'); break;
|
||||
case '\f': this.writer.write('\\f'); break;
|
||||
case '\n': this.writer.write('\\n'); break;
|
||||
case '\r': this.writer.write('\\r'); break;
|
||||
case '\t': this.writer.write('\\t'); break;
|
||||
case '\v': this.writer.write('\\v'); break;
|
||||
case '\0': this.writer.write('\\0'); break;
|
||||
case '\'': this.writer.write('\\\''); break;
|
||||
case '"': this.writer.write('\\"'); break;
|
||||
default: this.writer.write(ch); break;
|
||||
}
|
||||
}
|
||||
this.writer.write('"');
|
||||
} else if (typeof(node.value) === 'bigint') {
|
||||
this.writer.write(node.value.toString());
|
||||
} else {
|
||||
throw new Error(`Unexpected type of value in CConstExpr`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unexpected ${node.constructor.name}`);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,171 +0,0 @@
|
|||
|
||||
import { InspectOptions } from "util";
|
||||
import { Syntax } from "./cst"
|
||||
import { TVSub, TVar, Type } from "./types";
|
||||
import { first, InspectFn, last, toStringTag } from "./util";
|
||||
|
||||
export const enum ConstraintKind {
|
||||
Equal,
|
||||
// Class,
|
||||
Many,
|
||||
Empty,
|
||||
}
|
||||
|
||||
abstract class ConstraintBase {
|
||||
|
||||
public constructor(
|
||||
public node: Syntax | null = null
|
||||
) {
|
||||
|
||||
}
|
||||
|
||||
public prevInstantiation: Constraint | null = null;
|
||||
|
||||
public *getNodes(): Iterable<Syntax> {
|
||||
let curr: Constraint | null = this as any;
|
||||
while (curr !== null) {
|
||||
if (curr.node !== null) {
|
||||
yield curr.node;
|
||||
}
|
||||
curr = curr.prevInstantiation;
|
||||
}
|
||||
}
|
||||
|
||||
public get lastNode(): Syntax | null {
|
||||
return last(this.getNodes()[Symbol.iterator]()) ?? null;
|
||||
}
|
||||
|
||||
public get firstNode(): Syntax | null {
|
||||
return first(this.getNodes()[Symbol.iterator]()) ?? null;
|
||||
}
|
||||
|
||||
public abstract freeTypeVars(): Iterable<TVar>;
|
||||
|
||||
public abstract substitute(sub: TVSub): Constraint;
|
||||
|
||||
}
|
||||
|
||||
export class CEqual extends ConstraintBase {
|
||||
|
||||
public readonly kind = ConstraintKind.Equal;
|
||||
|
||||
public constructor(
|
||||
public left: Type,
|
||||
public right: Type,
|
||||
public node: Syntax | null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public substitute(sub: TVSub): CEqual {
|
||||
return new CEqual(
|
||||
this.left.substitute(sub),
|
||||
this.right.substitute(sub),
|
||||
this.node,
|
||||
);
|
||||
}
|
||||
|
||||
public *freeTypeVars(): Iterable<TVar> {
|
||||
yield* this.left.getTypeVars();
|
||||
yield* this.right.getTypeVars();
|
||||
}
|
||||
|
||||
public [toStringTag](_currentDepth: number, options: InspectOptions, inspect: InspectFn): string {
|
||||
return inspect(this.left, options) + ' ~ ' + inspect(this.right, options);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class CMany extends ConstraintBase {
|
||||
|
||||
public readonly kind = ConstraintKind.Many;
|
||||
|
||||
public constructor(
|
||||
public elements: Constraint[]
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public substitute(sub: TVSub): CMany {
|
||||
const newElements = [];
|
||||
for (const element of this.elements) {
|
||||
newElements.push(element.substitute(sub));
|
||||
}
|
||||
return new CMany(newElements);
|
||||
}
|
||||
|
||||
public *freeTypeVars(): Iterable<TVar> {
|
||||
for (const element of this.elements) {
|
||||
yield* element.freeTypeVars();
|
||||
}
|
||||
}
|
||||
|
||||
public [toStringTag](_depth: number, opts: InspectOptions, inspect: InspectFn): string {
|
||||
if (this.elements.length === 0) {
|
||||
return '[]';
|
||||
}
|
||||
let out = '[\n';
|
||||
out += this.elements.map(constraint => ' ' + inspect(constraint, opts)).join('\n');
|
||||
out += '\n]';
|
||||
return out;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// export class CClass extends ConstraintBase {
|
||||
|
||||
// public readonly kind = ConstraintKind.Class;
|
||||
|
||||
// public constructor(
|
||||
// public className: string,
|
||||
// public type: Type,
|
||||
// public node: Syntax,
|
||||
// ) {
|
||||
// super();
|
||||
// }
|
||||
|
||||
// public substitute(sub: TVSub): Constraint {
|
||||
// return new CClass(
|
||||
// this.className,
|
||||
// this.type.substitute(sub),
|
||||
// this.node,
|
||||
// );
|
||||
// }
|
||||
|
||||
// public *freeTypeVars(): Iterable<TVar> {
|
||||
// yield* this.type.getTypeVars();
|
||||
// }
|
||||
|
||||
// public [toStringTag](_depth: number, opts: InspectOptions, inspect: InspectFn): string {
|
||||
// return this.className + ' => ' + inspect(this.type, opts);
|
||||
// }
|
||||
|
||||
// }
|
||||
|
||||
export class CEmpty extends ConstraintBase {
|
||||
|
||||
public readonly kind = ConstraintKind.Empty;
|
||||
|
||||
public substitute(_sub: TVSub): CEmpty {
|
||||
return this;
|
||||
}
|
||||
|
||||
public *freeTypeVars(): Iterable<TVar> {
|
||||
|
||||
}
|
||||
|
||||
public [toStringTag]() {
|
||||
return 'ε';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type Constraint
|
||||
= CEqual
|
||||
// | CClass
|
||||
| CMany
|
||||
| CEmpty
|
||||
|
||||
export class ConstraintSet extends Array<Constraint> {
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,667 +0,0 @@
|
|||
|
||||
import { Kind, KindType } from "./checker";
|
||||
import { type Type, TypeKind, labelTag } from "./types"
|
||||
import { ClassConstraint, ClassDeclaration, IdentifierAlt, InstanceDeclaration, Syntax, SyntaxKind, TextFile, TextPosition, TextRange, Token } from "./cst";
|
||||
import { assert, assertNever, countDigits, IndentWriter } from "./util";
|
||||
import { unwatchFile } from "fs";
|
||||
import { warn } from "console";
|
||||
|
||||
const ANSI_RESET = "\u001b[0m"
|
||||
const ANSI_BOLD = "\u001b[1m"
|
||||
const ANSI_UNDERLINE = "\u001b[4m"
|
||||
const ANSI_REVERSED = "\u001b[7m"
|
||||
|
||||
const ANSI_FG_BLACK = "\u001b[30m"
|
||||
const ANSI_FG_RED = "\u001b[31m"
|
||||
const ANSI_FG_GREEN = "\u001b[32m"
|
||||
const ANSI_FG_YELLOW = "\u001b[33m"
|
||||
const ANSI_FG_BLUE = "\u001b[34m"
|
||||
const ANSI_FG_CYAN = "\u001b[35m"
|
||||
const ANSI_FG_MAGENTA = "\u001b[36m"
|
||||
const ANSI_FG_WHITE = "\u001b[37m"
|
||||
|
||||
const ANSI_BG_BLACK = "\u001b[40m"
|
||||
const ANSI_BG_RED = "\u001b[41m"
|
||||
const ANSI_BG_GREEN = "\u001b[42m"
|
||||
const ANSI_BG_YELLOW = "\u001b[43m"
|
||||
const ANSI_BG_BLUE = "\u001b[44m"
|
||||
const ANSI_BG_CYAN = "\u001b[45m"
|
||||
const ANSI_BG_MAGENTA = "\u001b[46m"
|
||||
const ANSI_BG_WHITE = "\u001b[47m"
|
||||
|
||||
const enum Level {
|
||||
Debug,
|
||||
Verbose,
|
||||
Info,
|
||||
Warning,
|
||||
Error,
|
||||
Fatal,
|
||||
}
|
||||
|
||||
export enum DiagnosticKind {
|
||||
UnexpectedChar,
|
||||
UnexpectedToken,
|
||||
KindMismatch,
|
||||
TypeMismatch,
|
||||
TupleIndexOutOfRange,
|
||||
TypeclassNotFound,
|
||||
TypeclassDecaredTwice,
|
||||
TypeclassNotImplemented,
|
||||
BindingNotFound,
|
||||
ModuleNotFound,
|
||||
FieldNotFound,
|
||||
}
|
||||
|
||||
abstract class DiagnosticBase {
|
||||
|
||||
public abstract readonly kind: DiagnosticKind;
|
||||
|
||||
public abstract level: Level;
|
||||
|
||||
public abstract position: TextPosition | undefined;
|
||||
|
||||
}
|
||||
|
||||
export class UnexpectedCharDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.UnexpectedChar;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public file: TextFile,
|
||||
public position: TextPosition,
|
||||
public actual: string,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
export class UnexpectedTokenDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.UnexpectedToken;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public file: TextFile,
|
||||
public actual: Token,
|
||||
public expected: SyntaxKind[],
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition {
|
||||
return this.actual.getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TypeclassDeclaredTwiceDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.TypeclassDecaredTwice;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public name: IdentifierAlt,
|
||||
public origDecl: ClassDeclaration,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition {
|
||||
return this.name.getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TypeclassNotFoundDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.TypeclassNotFound;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public name: string,
|
||||
public node: Syntax | null = null,
|
||||
public origin: InstanceDeclaration | ClassConstraint | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition | undefined {
|
||||
return this.node?.getFirstToken().getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TypeclassNotImplementedDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.TypeclassNotImplemented;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public name: string,
|
||||
public type: Type,
|
||||
public node: Syntax | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition | undefined {
|
||||
return this.node?.getFirstToken().getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class BindingNotFoundDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.BindingNotFound;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public modulePath: string[],
|
||||
public name: string,
|
||||
public node: Syntax,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition {
|
||||
return this.node.getFirstToken().getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TypeMismatchDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.TypeMismatch;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public left: Type,
|
||||
public right: Type,
|
||||
public trace: Syntax[],
|
||||
public fieldPath: (string | number)[],
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition | undefined {
|
||||
return this.trace[0]?.getFirstToken().getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class FieldNotFoundDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.FieldNotFound;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public fieldName: string | number,
|
||||
public missing: Syntax | null,
|
||||
public present: Syntax | null,
|
||||
public cause: Syntax | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition | undefined {
|
||||
return this.cause?.getFirstToken().getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class KindMismatchDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.KindMismatch;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public left: Kind,
|
||||
public right: Kind,
|
||||
public origin: Syntax | null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition | undefined {
|
||||
return this.origin?.getFirstToken().getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class ModuleNotFoundDiagnostic extends DiagnosticBase {
|
||||
|
||||
public readonly kind = DiagnosticKind.ModuleNotFound;
|
||||
|
||||
public level = Level.Error;
|
||||
|
||||
public constructor(
|
||||
public modulePath: string[],
|
||||
public node: Syntax,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public get position(): TextPosition | undefined {
|
||||
return this.node.getFirstToken().getStartPosition();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type Diagnostic
|
||||
= UnexpectedCharDiagnostic
|
||||
| TypeclassNotFoundDiagnostic
|
||||
| TypeclassDeclaredTwiceDiagnostic
|
||||
| TypeclassNotImplementedDiagnostic
|
||||
| BindingNotFoundDiagnostic
|
||||
| TypeMismatchDiagnostic
|
||||
| UnexpectedTokenDiagnostic
|
||||
| FieldNotFoundDiagnostic
|
||||
| KindMismatchDiagnostic
|
||||
| ModuleNotFoundDiagnostic
|
||||
|
||||
export interface Diagnostics {
|
||||
readonly hasError: boolean;
|
||||
readonly hasFatal: boolean;
|
||||
add(diagnostic: Diagnostic): void;
|
||||
}
|
||||
|
||||
export class DiagnosticStore implements Diagnostics {
|
||||
|
||||
private storage: Diagnostic[] = [];
|
||||
|
||||
public hasError = false;
|
||||
public hasFatal = false;
|
||||
|
||||
public get size(): number {
|
||||
return this.storage.length;
|
||||
}
|
||||
|
||||
public add(diagnostic: Diagnostic): void {
|
||||
this.storage.push(diagnostic);
|
||||
if (diagnostic.level >= Level.Error) {
|
||||
this.hasError = true;
|
||||
}
|
||||
if (diagnostic.level >= Level.Fatal) {
|
||||
this.hasFatal = true;
|
||||
}
|
||||
}
|
||||
|
||||
public [Symbol.iterator](): IterableIterator<Diagnostic> {
|
||||
return this.storage[Symbol.iterator]();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class ConsoleDiagnostics implements Diagnostics {
|
||||
|
||||
private writer = new IndentWriter(process.stderr);
|
||||
|
||||
public hasError = false;
|
||||
public hasFatal = false;
|
||||
|
||||
public add(diagnostic: Diagnostic): void {
|
||||
|
||||
if (diagnostic.level >= Level.Error) {
|
||||
this.hasError = true;
|
||||
}
|
||||
if (diagnostic.level >= Level.Fatal) {
|
||||
this.hasFatal = true;
|
||||
}
|
||||
|
||||
switch (diagnostic.level) {
|
||||
case Level.Fatal:
|
||||
this.writer.write(ANSI_FG_RED + ANSI_BOLD + 'fatal: ' + ANSI_RESET);
|
||||
break;
|
||||
case Level.Error:
|
||||
this.writer.write(ANSI_FG_RED + ANSI_BOLD + 'error: ' + ANSI_RESET);
|
||||
break;
|
||||
case Level.Warning:
|
||||
this.writer.write(ANSI_FG_RED + ANSI_BOLD + 'warning: ' + ANSI_RESET);
|
||||
break;
|
||||
case Level.Info:
|
||||
this.writer.write(ANSI_FG_YELLOW + ANSI_BOLD + 'info: ' + ANSI_RESET);
|
||||
break;
|
||||
case Level.Verbose:
|
||||
this.writer.write(ANSI_FG_CYAN + ANSI_BOLD + 'verbose: ' + ANSI_RESET);
|
||||
break;
|
||||
}
|
||||
|
||||
switch (diagnostic.kind) {
|
||||
|
||||
case DiagnosticKind.UnexpectedChar:
|
||||
const endPos = diagnostic.position.clone();
|
||||
endPos.advance(diagnostic.actual);
|
||||
this.writer.write(`unexpected character sequence '${diagnostic.actual}'.\n\n`);
|
||||
this.writer.write(printExcerpt(diagnostic.file, new TextRange(diagnostic.position, endPos)) + '\n');
|
||||
break;
|
||||
|
||||
case DiagnosticKind.UnexpectedToken:
|
||||
this.writer.write(`expected ${describeExpected(diagnostic.expected)} but got ${describeActual(diagnostic.actual)}\n\n`);
|
||||
this.writer.write(printExcerpt(diagnostic.file, diagnostic.actual.getRange()) + '\n');
|
||||
break;
|
||||
|
||||
case DiagnosticKind.TypeclassDecaredTwice:
|
||||
this.writer.write(`type class '${diagnostic.name.text}' was already declared somewhere else.\n\n`);
|
||||
this.writer.write(ANSI_FG_YELLOW + ANSI_BOLD + 'info: ' + ANSI_RESET);
|
||||
this.writer.write(`type class '${diagnostic.name.text}' is already declared here\n\n`);
|
||||
this.writer.write(printNode(diagnostic.origDecl) + '\n');
|
||||
break;
|
||||
|
||||
case DiagnosticKind.TypeclassNotFound:
|
||||
this.writer.write(`the type class ${ANSI_FG_MAGENTA + diagnostic.name + ANSI_RESET} was not found.\n\n`);
|
||||
if (diagnostic.node !== null) {
|
||||
this.writer.write(printNode(diagnostic.node) + '\n');
|
||||
}
|
||||
// if (diagnostic.origin !== null) {
|
||||
// this.writer.indent();
|
||||
// this.writer.write(ANSI_FG_YELLOW + ANSI_BOLD + 'info: ' + ANSI_RESET);
|
||||
// this.writer.write(`${ANSI_FG_MAGENTA + diagnostic.name + ANSI_RESET} is required by ${ANSI_FG_MAGENTA + diagnostic.origin.name.text + ANSI_RESET}\n\n`);
|
||||
// this.writer.write(printNode(diagnostic.origin.name) + '\n');
|
||||
// this.writer.dedent();
|
||||
// }
|
||||
break;
|
||||
|
||||
case DiagnosticKind.BindingNotFound:
|
||||
this.writer.write(`binding '${diagnostic.name}' was not found`);
|
||||
if (diagnostic.modulePath.length > 0) {
|
||||
this.writer.write(` in module ${ANSI_FG_BLUE + diagnostic.modulePath.join('.') + ANSI_RESET}`);
|
||||
}
|
||||
this.writer.write(`.\n\n`);
|
||||
this.writer.write(printNode(diagnostic.node) + '\n');
|
||||
break;
|
||||
|
||||
case DiagnosticKind.TypeMismatch:
|
||||
const leftNode = getFirstNodeInTypeChain(diagnostic.left);
|
||||
const rightNode = getFirstNodeInTypeChain(diagnostic.right);
|
||||
const node = diagnostic.trace[0];
|
||||
this.writer.write(`unification of ` + ANSI_FG_GREEN + describeType(diagnostic.left) + ANSI_RESET);
|
||||
this.writer.write(' and ' + ANSI_FG_GREEN + describeType(diagnostic.right) + ANSI_RESET + ' failed');
|
||||
if (diagnostic.fieldPath.length > 0) {
|
||||
this.writer.write(` in field '${diagnostic.fieldPath.join('.')}'`);
|
||||
}
|
||||
this.writer.write('.\n\n');
|
||||
this.writer.write(printNode(node) + '\n');
|
||||
for (let i = 1; i < diagnostic.trace.length; i++) {
|
||||
const node = diagnostic.trace[i];
|
||||
this.writer.write(' ... in an instantiation of the following expression\n\n');
|
||||
this.writer.write(printNode(node, { indentation: i === 0 ? ' ' : ' ' }) + '\n');
|
||||
}
|
||||
if (leftNode !== null) {
|
||||
this.writer.indent();
|
||||
this.writer.write(ANSI_FG_YELLOW + ANSI_BOLD + `info: ` + ANSI_RESET);
|
||||
this.writer.write(`type ` + ANSI_FG_GREEN + describeType(diagnostic.left) + ANSI_RESET + ` was inferred from this expression:\n\n`);
|
||||
this.writer.write(printNode(leftNode) + '\n');
|
||||
this.writer.dedent();
|
||||
}
|
||||
if (rightNode !== null) {
|
||||
this.writer.indent();
|
||||
this.writer.write(ANSI_FG_YELLOW + ANSI_BOLD + `info: ` + ANSI_RESET);
|
||||
this.writer.write(`type ` + ANSI_FG_GREEN + describeType(diagnostic.right) + ANSI_RESET + ` was inferred from this expression:\n\n`);
|
||||
this.writer.write(printNode(rightNode) + '\n');
|
||||
this.writer.dedent();
|
||||
}
|
||||
break;
|
||||
|
||||
case DiagnosticKind.KindMismatch:
|
||||
this.writer.write(`kind ${describeKind(diagnostic.left)} does not match with ${describeKind(diagnostic.right)}\n\n`);
|
||||
if (diagnostic.origin !== null) {
|
||||
this.writer.write(printNode(diagnostic.origin) + '\n');
|
||||
}
|
||||
break;
|
||||
|
||||
case DiagnosticKind.ModuleNotFound:
|
||||
this.writer.write(`a module named ${ANSI_FG_BLUE + diagnostic.modulePath.join('.') + ANSI_RESET} was not found.\n\n`);
|
||||
this.writer.write(printNode(diagnostic.node) + '\n');
|
||||
break;
|
||||
|
||||
case DiagnosticKind.FieldNotFound:
|
||||
this.writer.write(`field '${diagnostic.fieldName}' is required in one type but missing in another\n\n`);
|
||||
this.writer.indent();
|
||||
if (diagnostic.missing !== null) {
|
||||
this.writer.write(ANSI_FG_YELLOW + ANSI_BOLD + 'info: ' + ANSI_RESET);
|
||||
this.writer.write(`field '${diagnostic.fieldName}' is missing in this construct\n\n`);
|
||||
this.writer.write(printNode(diagnostic.missing) + '\n');
|
||||
}
|
||||
if (diagnostic.present !== null) {
|
||||
this.writer.write(ANSI_FG_YELLOW + ANSI_BOLD + 'info: ' + ANSI_RESET);
|
||||
this.writer.write(`field '${diagnostic.fieldName}' is required in this construct\n\n`);
|
||||
this.writer.write(printNode(diagnostic.present) + '\n');
|
||||
}
|
||||
if (diagnostic.cause !== null) {
|
||||
this.writer.write(ANSI_FG_YELLOW + ANSI_BOLD + 'info: ' + ANSI_RESET);
|
||||
this.writer.write(`because of a constraint on this node:\n\n`);
|
||||
this.writer.write(printNode(diagnostic.cause) + '\n');
|
||||
}
|
||||
this.writer.dedent();
|
||||
break;
|
||||
|
||||
default:
|
||||
assertNever(diagnostic);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const DESCRIPTIONS: Partial<Record<SyntaxKind, string>> = {
|
||||
[SyntaxKind.StringLiteral]: 'a string literal',
|
||||
[SyntaxKind.Identifier]: "an identifier",
|
||||
[SyntaxKind.RArrow]: "'->'",
|
||||
[SyntaxKind.RArrowAlt]: '"=>"',
|
||||
[SyntaxKind.VBar]: "'|'",
|
||||
[SyntaxKind.Comma]: "','",
|
||||
[SyntaxKind.Colon]: "':'",
|
||||
[SyntaxKind.Integer]: "an integer",
|
||||
[SyntaxKind.LParen]: "'('",
|
||||
[SyntaxKind.RParen]: "')'",
|
||||
[SyntaxKind.LBrace]: "'{'",
|
||||
[SyntaxKind.RBrace]: "'}'",
|
||||
[SyntaxKind.LBracket]: "'['",
|
||||
[SyntaxKind.RBracket]: "']'",
|
||||
[SyntaxKind.StructKeyword]: "'struct'",
|
||||
[SyntaxKind.EnumKeyword]: "'enum'",
|
||||
[SyntaxKind.MatchKeyword]: "'match'",
|
||||
[SyntaxKind.TypeKeyword]: "'type'",
|
||||
[SyntaxKind.IdentifierAlt]: 'an identifier starting with an uppercase letter',
|
||||
[SyntaxKind.TupleExpression]: 'a tuple expression such as (1, 2)',
|
||||
[SyntaxKind.ReferenceExpression]: 'a reference to some variable',
|
||||
[SyntaxKind.NestedExpression]: 'an expression nested with parentheses',
|
||||
[SyntaxKind.ConstantExpression]: 'a constant expression such as 1 or "foo"',
|
||||
[SyntaxKind.StructExpression]: 'a struct expression',
|
||||
[SyntaxKind.BlockStart]: 'the start of an indented block',
|
||||
[SyntaxKind.BlockEnd]: 'the end of an indented block',
|
||||
[SyntaxKind.LineFoldEnd]: 'the end of the current line-fold',
|
||||
[SyntaxKind.EndOfFile]: 'end-of-file',
|
||||
}
|
||||
|
||||
function describeSyntaxKind(kind: SyntaxKind): string {
|
||||
const desc = DESCRIPTIONS[kind];
|
||||
if (desc === undefined) {
|
||||
throw new Error(`Could not describe SyntaxKind '${kind}'`);
|
||||
}
|
||||
return desc
|
||||
}
|
||||
|
||||
function describeExpected(expected: SyntaxKind[]) {
|
||||
if (expected.length === 0) {
|
||||
return 'nothing';
|
||||
}
|
||||
let out = describeSyntaxKind(expected[0]);
|
||||
if (expected.length === 1) {
|
||||
return out;
|
||||
}
|
||||
for (let i = 1; i < expected.length-1; i++) {
|
||||
const kind = expected[i];
|
||||
out += ', ' + describeSyntaxKind(kind);
|
||||
}
|
||||
out += ' or ' + describeSyntaxKind(expected[expected.length-1])
|
||||
return out;
|
||||
}
|
||||
|
||||
function describeActual(token: Token): string {
|
||||
switch (token.kind) {
|
||||
case SyntaxKind.BlockStart:
|
||||
case SyntaxKind.BlockEnd:
|
||||
case SyntaxKind.LineFoldEnd:
|
||||
case SyntaxKind.EndOfFile:
|
||||
return describeSyntaxKind(token.kind);
|
||||
default:
|
||||
return `'${token.text}'`;
|
||||
}
|
||||
}
|
||||
|
||||
export function describeType(type: Type): string {
|
||||
switch (type.kind) {
|
||||
case TypeKind.Con:
|
||||
{
|
||||
return type.displayName;
|
||||
}
|
||||
case TypeKind.RegularVar:
|
||||
return 'a' + type.id;
|
||||
case TypeKind.RigidVar:
|
||||
return type.displayName;
|
||||
case TypeKind.Arrow:
|
||||
{
|
||||
return describeType(type.paramType) + ' -> ' + describeType(type.returnType);
|
||||
}
|
||||
case TypeKind.Field:
|
||||
{
|
||||
// let curr: Type = type;
|
||||
// while (curr.kind === TypeKind.Field) {
|
||||
// if (curr.name === labelTag) {
|
||||
// return describeType(curr.type);
|
||||
// }
|
||||
// curr = curr.restType;
|
||||
// }
|
||||
let out = '{ ' + type.name + ': ' + describeType(type.type);
|
||||
type = type.restType;
|
||||
while (type.kind === TypeKind.Field) {
|
||||
out += '; ' + type.name + ': ' + describeType(type.type);
|
||||
type = type.restType;
|
||||
}
|
||||
if (type.kind !== TypeKind.Nil) {
|
||||
out += '; ' + describeType(type);
|
||||
}
|
||||
return out + ' }'
|
||||
}
|
||||
case TypeKind.App:
|
||||
{
|
||||
return describeType(type.left) + ' ' + describeType(type.right);
|
||||
}
|
||||
case TypeKind.Nil:
|
||||
return '{}';
|
||||
case TypeKind.Absent:
|
||||
return 'Abs';
|
||||
case TypeKind.Present:
|
||||
return describeType(type.type);
|
||||
default:
|
||||
assertNever(type);
|
||||
}
|
||||
}
|
||||
|
||||
function describeKind(kind: Kind): string {
|
||||
switch (kind.type) {
|
||||
case KindType.Var:
|
||||
return `k${kind.id}`;
|
||||
case KindType.Arrow:
|
||||
return describeKind(kind.left) + ' -> ' + describeKind(kind.right);
|
||||
case KindType.Type:
|
||||
return '*';
|
||||
default:
|
||||
assertNever(kind);
|
||||
}
|
||||
}
|
||||
|
||||
function getFirstNodeInTypeChain(type: Type): Syntax | null {
|
||||
while (type !== type && (type.kind === TypeKind.RegularVar || type.node === null)) {
|
||||
type = type.next;
|
||||
}
|
||||
return type.node;
|
||||
}
|
||||
|
||||
interface PrintExcerptOptions {
|
||||
indentation?: string;
|
||||
extraLineCount?: number;
|
||||
}
|
||||
|
||||
interface PrintNodeOptions extends PrintExcerptOptions { }
|
||||
|
||||
function printNode(node: Syntax, options?: PrintNodeOptions): string {
|
||||
const file = node.getSourceFile().getFile();
|
||||
return printExcerpt(file, node.getRange(), options);
|
||||
}
|
||||
|
||||
function printExcerpt(file: TextFile, span: TextRange, { indentation = ' ', extraLineCount = 2 } = {}): string {
|
||||
|
||||
let out = '';
|
||||
|
||||
const content = file.text;
|
||||
const startLine = Math.max(0, span.start.line-1-extraLineCount)
|
||||
const lines = content.split('\n')
|
||||
const endLine = Math.min(lines.length, (span.end !== undefined ? span.end.line : startLine) + extraLineCount)
|
||||
const gutterWidth = Math.max(2, countDigits(endLine+1))
|
||||
|
||||
for (let i = startLine; i < endLine; i++) {
|
||||
|
||||
const line = lines[i];
|
||||
|
||||
let j = firstIndexOfNonEmpty(line);
|
||||
|
||||
out += indentation + ' ' + ANSI_FG_BLACK + ANSI_BG_WHITE + ' '.repeat(gutterWidth-countDigits(i+1))+(i+1).toString() + ANSI_RESET + ' ' + line + '\n'
|
||||
|
||||
const gutter = indentation + ' ' + ANSI_FG_BLACK + ANSI_BG_WHITE + ' '.repeat(gutterWidth) + ANSI_RESET + ' '
|
||||
|
||||
let mark: number;
|
||||
let skip: number;
|
||||
|
||||
if (i === span.start.line-1 && i === span.end.line-1) {
|
||||
skip = span.start.column-1;
|
||||
mark = span.end.column-span.start.column;
|
||||
} else if (i === span.start.line-1) {
|
||||
skip = span.start.column-1;
|
||||
mark = line.length-span.start.column+1;
|
||||
} else if (i === span.end.line-1) {
|
||||
skip = 0;
|
||||
mark = span.end.column-1;
|
||||
} else if (i > span.start.line-1 && i < span.end.line-1) {
|
||||
skip = 0;
|
||||
mark = line.length;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (j <= skip) {
|
||||
j = 0;
|
||||
}
|
||||
|
||||
out += gutter + ' '.repeat(j+skip) + ANSI_FG_RED + '~'.repeat(mark-j) + ANSI_RESET + '\n'
|
||||
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
function firstIndexOfNonEmpty(str: string) {
|
||||
let j = 0;
|
||||
for (; j < str.length; j++) {
|
||||
const ch = str[j];
|
||||
if (ch !== ' ' && ch !== '\t') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return j
|
||||
}
|
||||
|
|
@ -1,201 +0,0 @@
|
|||
import { Syntax, SyntaxKind } from "./cst";
|
||||
import { IndentWriter, assertNever } from "./util";
|
||||
|
||||
export class Emitter {
|
||||
|
||||
public constructor(
|
||||
public writer: IndentWriter,
|
||||
) {
|
||||
|
||||
}
|
||||
|
||||
public emit(node: Syntax): void {
|
||||
|
||||
switch (node.kind) {
|
||||
|
||||
case SyntaxKind.ModuleDeclaration:
|
||||
this.writer.write(`mod ${node.name.text}`);
|
||||
if (node.elements === null) {
|
||||
this.writer.write('\n');
|
||||
break;
|
||||
}
|
||||
this.writer.write('.\n');
|
||||
this.writer.indent();
|
||||
for (const element of node.elements) {
|
||||
this.emit(element);
|
||||
}
|
||||
this.writer.dedent();
|
||||
break;
|
||||
|
||||
case SyntaxKind.ReferenceExpression:
|
||||
for (const [name, _dot] of node.modulePath) {
|
||||
this.writer.write(name.text);
|
||||
this.writer.write('.');
|
||||
}
|
||||
this.writer.write(node.name.text);
|
||||
break;
|
||||
|
||||
case SyntaxKind.CallExpression:
|
||||
this.emit(node.func);
|
||||
for (const arg of node.args) {
|
||||
this.writer.write(' ');
|
||||
this.emit(arg);
|
||||
}
|
||||
break;
|
||||
|
||||
case SyntaxKind.ReferenceTypeExpression:
|
||||
for (const [name, _dot] of node.modulePath) {
|
||||
this.writer.write(name.text);
|
||||
this.writer.write('.');
|
||||
}
|
||||
this.writer.write(node.name.text);
|
||||
break;
|
||||
|
||||
case SyntaxKind.StructExpressionField:
|
||||
this.writer.write(node.name.text);
|
||||
this.writer.write(' = ');
|
||||
this.emit(node.expression);
|
||||
break;
|
||||
|
||||
case SyntaxKind.StructExpression:
|
||||
this.writer.write('{ ');
|
||||
for (const member of node.members) {
|
||||
this.emit(member);
|
||||
this.writer.write(', ');
|
||||
}
|
||||
this.writer.write(' }');
|
||||
break;
|
||||
|
||||
case SyntaxKind.ConstantExpression:
|
||||
this.writer.write(node.token.text);
|
||||
break;
|
||||
|
||||
case SyntaxKind.FunctionExpression:
|
||||
this.writer.write('\\');
|
||||
for (const param of node.params) {
|
||||
this.emit(param);
|
||||
this.writer.write(' ');
|
||||
}
|
||||
this.emit(node.body);
|
||||
break;
|
||||
|
||||
case SyntaxKind.ArrowTypeExpression:
|
||||
for (const typeExpr of node.paramTypeExprs) {
|
||||
this.emit(typeExpr);
|
||||
this.writer.write(' -> ');
|
||||
}
|
||||
this.emit(node.returnTypeExpr);
|
||||
break;
|
||||
|
||||
case SyntaxKind.VarTypeExpression:
|
||||
this.writer.write(node.name.text);
|
||||
break;
|
||||
|
||||
case SyntaxKind.PlainParam:
|
||||
this.emit(node.pattern);
|
||||
break;
|
||||
|
||||
case SyntaxKind.NamedPattern:
|
||||
this.writer.write(node.name.text);
|
||||
break;
|
||||
|
||||
case SyntaxKind.ExpressionStatement:
|
||||
this.emit(node.expression);
|
||||
this.writer.write('\n');
|
||||
break;
|
||||
|
||||
case SyntaxKind.SourceFile:
|
||||
for (const element of node.elements) {
|
||||
this.emit(element);
|
||||
}
|
||||
break;
|
||||
|
||||
case SyntaxKind.TypeAssert:
|
||||
this.writer.write(': ');
|
||||
this.emit(node.typeExpression);
|
||||
break;
|
||||
|
||||
case SyntaxKind.ExprBody:
|
||||
this.writer.write(node.equals.text);
|
||||
this.writer.write(' ');
|
||||
this.emit(node.expression);
|
||||
break
|
||||
|
||||
case SyntaxKind.BlockBody:
|
||||
this.writer.write('.\n');
|
||||
this.writer.indent();
|
||||
for (const element of node.elements) {
|
||||
this.emit(element);
|
||||
}
|
||||
this.writer.dedent();
|
||||
break;
|
||||
|
||||
case SyntaxKind.LetDeclaration:
|
||||
if (node.pubKeyword) {
|
||||
this.writer.write('pub ');
|
||||
}
|
||||
this.writer.write('let ');
|
||||
if (node.mutKeyword) {
|
||||
this.writer.write(' mut ');
|
||||
}
|
||||
this.emit(node.pattern);
|
||||
this.writer.write(' ');
|
||||
for (const param of node.params) {
|
||||
this.emit(param);
|
||||
this.writer.write(' ');
|
||||
}
|
||||
if (node.typeAssert) {
|
||||
this.emit(node.typeAssert);
|
||||
this.writer.write(' ');
|
||||
}
|
||||
if (node.body) {
|
||||
this.emit(node.body);
|
||||
}
|
||||
this.writer.write('\n\n');
|
||||
break;
|
||||
|
||||
case SyntaxKind.ClassConstraint:
|
||||
this.writer.write(node.name.text);
|
||||
for (const type of node.types) {
|
||||
this.writer.write(' ');
|
||||
this.emit(type);
|
||||
}
|
||||
break;
|
||||
|
||||
case SyntaxKind.ClassDeclaration:
|
||||
if (node.pubKeyword) {
|
||||
this.writer.write('pub ');
|
||||
}
|
||||
this.writer.write(`class `);
|
||||
if (node.constraintClause) {
|
||||
for (const constraint of node.constraintClause.constraints) {
|
||||
this.emit(constraint);
|
||||
this.writer.write(`, `);
|
||||
}
|
||||
this.writer.write(' => ');
|
||||
}
|
||||
this.emit(node.name);
|
||||
for (const type of node.types) {
|
||||
this.writer.write(' ');
|
||||
this.emit(type);
|
||||
}
|
||||
if (node.elements !== null) {
|
||||
this.writer.write('.\n');
|
||||
this.writer.indent();
|
||||
for (const element of node.elements) {
|
||||
this.emit(element);
|
||||
}
|
||||
this.writer.dedent();
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
assertNever(node);
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
import { SourceFile, TextFile } from "./cst";
|
||||
import { Diagnostics, UnexpectedCharDiagnostic, UnexpectedTokenDiagnostic } from "./diagnostics";
|
||||
import { ParseError, Parser } from "./parser";
|
||||
import { Punctuator, ScanError, Scanner } from "./scanner";
|
||||
|
||||
export function parseSourceFile(file: TextFile, diagnostics: Diagnostics): SourceFile | null {
|
||||
const scanner = new Scanner(file.text, diagnostics, file);
|
||||
const punctuated = new Punctuator(scanner);
|
||||
const parser = new Parser(file, punctuated);
|
||||
let sourceFile;
|
||||
try {
|
||||
sourceFile = parser.parseSourceFile();
|
||||
} catch (error) {
|
||||
if (error instanceof ParseError) {
|
||||
diagnostics.add(new UnexpectedTokenDiagnostic(error.file, error.actual, error.expected));
|
||||
return null;
|
||||
}
|
||||
if (error instanceof ScanError) {
|
||||
diagnostics.add(new UnexpectedCharDiagnostic(error.file, error.position, error.actual));
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
sourceFile.setParents();
|
||||
return sourceFile;
|
||||
}
|
||||
|
||||
export * from "./util"
|
||||
export * from "./diagnostics"
|
||||
export * from "./scanner"
|
||||
export * from "./parser"
|
||||
export * from "./cst"
|
||||
export * from "./analysis"
|
||||
export * from "./checker"
|
||||
export * from "./program"
|
||||
|
|
@ -1,196 +0,0 @@
|
|||
|
||||
import type stream from "stream"
|
||||
import { IndentWriter } from "./util";
|
||||
|
||||
export const enum JSNodeKind {
|
||||
|
||||
// Patterns
|
||||
BindPattern,
|
||||
ArrayPattern,
|
||||
ObjectPattern,
|
||||
|
||||
// Expressions
|
||||
ReferenceExpression,
|
||||
CallExpression,
|
||||
FunctionExpression,
|
||||
MemberExpression,
|
||||
IndexExpression,
|
||||
|
||||
// Statements
|
||||
ExpressionStatement,
|
||||
ReturnStatement,
|
||||
|
||||
// Declarations
|
||||
FunctionDeclaration,
|
||||
VariableDeclaration,
|
||||
|
||||
// Other nodes
|
||||
Program,
|
||||
}
|
||||
|
||||
abstract class JSNodeBase {
|
||||
|
||||
public abstract readonly kind: JSNodeKind;
|
||||
|
||||
public emit(out: stream.Writable): void {
|
||||
const emitter = new JSEmitter(out);
|
||||
emitter.emit(this as unknown as JSNode);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class JSBindPattern extends JSNodeBase {
|
||||
|
||||
public readonly kind = JSNodeKind.BindPattern;
|
||||
|
||||
public constructor(
|
||||
public name: string,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type JSPattern
|
||||
= JSBindPattern
|
||||
|
||||
export class JSReferenceExpression extends JSNodeBase {
|
||||
|
||||
public kind = JSNodeKind.ReferenceExpression;
|
||||
|
||||
public constructor(
|
||||
public name: string,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class JSCallExpression extends JSNodeBase {
|
||||
|
||||
public readonly kind = JSNodeKind.CallExpression;
|
||||
|
||||
public constructor(
|
||||
public operator: JSExpression,
|
||||
public args: JSExpression[],
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type JSExpression
|
||||
= JSReferenceExpression
|
||||
| JSCallExpression
|
||||
|
||||
export class JSExpressionStatement extends JSNodeBase {
|
||||
|
||||
public readonly kind = JSNodeKind.ExpressionStatement;
|
||||
|
||||
public constructor(
|
||||
public expression: JSExpression,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class JSReturnStatement extends JSNodeBase {
|
||||
|
||||
public readonly kind = JSNodeKind.ReturnStatement;
|
||||
|
||||
public constructor(
|
||||
public value: JSExpression | null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type JSStatement
|
||||
= JSExpressionStatement
|
||||
| JSReturnStatement
|
||||
|
||||
export const enum JSDeclarationFlags {
|
||||
IsExported = 1,
|
||||
}
|
||||
|
||||
export type JSFunctionElement
|
||||
= JSDeclaration
|
||||
| JSStatement
|
||||
|
||||
export class JSFunctionDeclaration extends JSNodeBase {
|
||||
|
||||
public readonly kind = JSNodeKind.FunctionDeclaration;
|
||||
|
||||
public constructor(
|
||||
public flags: JSDeclarationFlags,
|
||||
public name: string,
|
||||
public params: JSPattern[],
|
||||
public body: JSFunctionElement[],
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export enum JSVarType {
|
||||
Var,
|
||||
Const,
|
||||
Let,
|
||||
}
|
||||
|
||||
export class JSVariableDeclaration extends JSNodeBase {
|
||||
|
||||
public readonly kind = JSNodeKind.VariableDeclaration;
|
||||
|
||||
public constructor(
|
||||
public flags: JSDeclarationFlags,
|
||||
public varType: JSVarType,
|
||||
public pattern: JSPattern,
|
||||
public value: JSExpression | null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type JSDeclaration
|
||||
= JSFunctionDeclaration
|
||||
| JSVariableDeclaration
|
||||
|
||||
export type JSSourceElement
|
||||
= JSStatement
|
||||
| JSDeclaration
|
||||
|
||||
export class JSProgram extends JSNodeBase {
|
||||
|
||||
public readonly kind = JSNodeKind.Program;
|
||||
|
||||
public constructor(
|
||||
public elements: JSSourceElement[],
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export type JSNode
|
||||
= JSStatement
|
||||
| JSDeclaration
|
||||
| JSExpression
|
||||
| JSPattern
|
||||
| JSProgram
|
||||
|
||||
export class JSEmitter {
|
||||
|
||||
private writer: IndentWriter;
|
||||
|
||||
public constructor(out: stream.Writable) {
|
||||
this.writer = new IndentWriter(out);
|
||||
}
|
||||
|
||||
public emit(node: JSNode) {
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,90 +0,0 @@
|
|||
|
||||
import { CBuiltinType, CBuiltinTypeKind, CCallExpr, CConstExpr, CDecl, CDir, CExpr, CExprStmt, CFuncDecl, CIncDir, CNode, CProgram, CRefExpr, CStmt } from "../c";
|
||||
import { Expression, Syntax, SyntaxKind } from "../cst";
|
||||
import type { Pass } from "../program";
|
||||
import { assert } from "../util";
|
||||
|
||||
interface Context {
|
||||
body: CStmt[];
|
||||
}
|
||||
|
||||
class BoltToC implements Pass<Syntax, CNode> {
|
||||
|
||||
public apply(input: Syntax): CNode {
|
||||
|
||||
assert(input.kind === SyntaxKind.SourceFile);
|
||||
|
||||
const intType = new CBuiltinType(CBuiltinTypeKind.Int);
|
||||
|
||||
const decls: (CDecl | CDir)[] = [];
|
||||
|
||||
decls.push(new CIncDir("runtime.h"));
|
||||
|
||||
const mainBody: CStmt[] = [];
|
||||
|
||||
decls.push(
|
||||
new CFuncDecl(
|
||||
intType,
|
||||
'main',
|
||||
[],
|
||||
mainBody
|
||||
)
|
||||
);
|
||||
|
||||
visit(input, { body: mainBody });
|
||||
|
||||
return new CProgram(decls);
|
||||
|
||||
function visit(node: Syntax, context: Context): void {
|
||||
|
||||
switch (node.kind) {
|
||||
|
||||
case SyntaxKind.SourceFile:
|
||||
{
|
||||
for (const element of node.elements) {
|
||||
visit(element, context);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.ExpressionStatement:
|
||||
{
|
||||
context.body.push(
|
||||
new CExprStmt(
|
||||
visitExpression(node.expression, context)
|
||||
)
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
case SyntaxKind.LetDeclaration:
|
||||
{
|
||||
// TODO
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function visitExpression(node: Expression, context: Context): CExpr {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.ReferenceExpression:
|
||||
assert(node.modulePath.length === 0);
|
||||
return new CRefExpr(node.name.text);
|
||||
case SyntaxKind.CallExpression:
|
||||
const operator = visitExpression(node.func, context);
|
||||
const args = node.args.map(arg => visitExpression(arg, context));
|
||||
return new CCallExpr(operator, args);
|
||||
case SyntaxKind.ConstantExpression:
|
||||
return new CConstExpr(node.token.getValue());
|
||||
default:
|
||||
throw new Error(`Unexpected ${node}`);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default BoltToC;
|
|
@ -1,14 +0,0 @@
|
|||
import { Syntax } from "../cst";
|
||||
import { JSNode, JSProgram } from "../js";
|
||||
import type { Pass } from "../program";
|
||||
|
||||
export class BoltToJS implements Pass<Syntax, JSNode> {
|
||||
|
||||
public apply(input: Syntax): JSNode {
|
||||
return new JSProgram([]);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default BoltToJS;
|
||||
|
|
@ -1,93 +0,0 @@
|
|||
import { TypeExpression } from "../cst";
|
||||
import {
|
||||
ExprBody,
|
||||
NamedPattern,
|
||||
LBrace,
|
||||
RBrace,
|
||||
LetKeyword,
|
||||
LetDeclaration,
|
||||
SourceFile,
|
||||
Syntax,
|
||||
SyntaxKind,
|
||||
Identifier,
|
||||
StructExpression,
|
||||
StructExpressionField,
|
||||
Equals,
|
||||
InstanceDeclaration,
|
||||
FunctionExpression,
|
||||
Backslash,
|
||||
canHaveInstanceDeclaration,
|
||||
visitEachChild
|
||||
} from "../cst";
|
||||
import { Pass } from "../program";
|
||||
import { assert } from "../util";
|
||||
|
||||
function encode(typeExpr: TypeExpression): string {
|
||||
switch (typeExpr.kind) {
|
||||
case SyntaxKind.ReferenceTypeExpression:
|
||||
let out = '';
|
||||
if (typeExpr.modulePath.length > 0) {
|
||||
out += '_xm';
|
||||
for (const [name, _dot] of typeExpr.modulePath) {
|
||||
out += name + '_';
|
||||
}
|
||||
}
|
||||
return out + typeExpr.name.text;
|
||||
default:
|
||||
throw new Error(`Could not encode type.`)
|
||||
}
|
||||
}
|
||||
|
||||
function lcfirst(text: string): string {
|
||||
return text[0].toLowerCase() + text.substring(1);
|
||||
}
|
||||
|
||||
export class TypeclassDictPassing implements Pass<SourceFile, SourceFile> {
|
||||
|
||||
private mangleInstance(node: InstanceDeclaration): string {
|
||||
return lcfirst(node.name.text) + '_' + node.types.map(encode).join('');
|
||||
}
|
||||
|
||||
private visit(node: Syntax): Syntax {
|
||||
if (canHaveInstanceDeclaration(node)) {
|
||||
return visitEachChild(node, this.visit.bind(this));
|
||||
}
|
||||
if (node.kind === SyntaxKind.InstanceDeclaration) {
|
||||
const decl = new LetDeclaration(
|
||||
node.pubKeyword,
|
||||
new LetKeyword(),
|
||||
null,
|
||||
null,
|
||||
new NamedPattern(new Identifier(null, this.mangleInstance(node))),
|
||||
[],
|
||||
null, // TODO
|
||||
new ExprBody(
|
||||
new Equals(),
|
||||
new StructExpression(
|
||||
[],
|
||||
new LBrace(),
|
||||
node.elements.map(element => {
|
||||
assert(element.kind === SyntaxKind.LetDeclaration);
|
||||
assert(element.pattern.kind === SyntaxKind.NamedPattern);
|
||||
return new StructExpressionField(
|
||||
new Identifier(null, element.pattern.name.text),
|
||||
new Equals(),
|
||||
new FunctionExpression([], new Backslash(), element.params, element.body!)
|
||||
);
|
||||
}),
|
||||
new RBrace(),
|
||||
)
|
||||
)
|
||||
);
|
||||
return decl;
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
public apply(input: SourceFile): SourceFile {
|
||||
return this.visit(input) as SourceFile;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
import path from "path";
|
||||
import fs from "fs"
|
||||
import { parseSourceFile } from ".";
|
||||
import { SourceFile, TextFile } from "./cst";
|
||||
import { ConsoleDiagnostics, Diagnostics } from "./diagnostics";
|
||||
import { Checker } from "./checker";
|
||||
import { Analyser } from "./analysis";
|
||||
|
||||
export interface Pass<In, Out> {
|
||||
apply(input: In): Out;
|
||||
}
|
||||
|
||||
export interface Newable<T> {
|
||||
new (...args: any[]): T;
|
||||
}
|
||||
|
||||
type AnyPass = Pass<any, any>;
|
||||
|
||||
export enum TargetType {
|
||||
Bolt,
|
||||
C,
|
||||
JS,
|
||||
WebAssembly,
|
||||
LLVM,
|
||||
}
|
||||
|
||||
export class PassManager {
|
||||
|
||||
private registeredPasses: AnyPass[] = [];
|
||||
|
||||
public add(pass: Newable<AnyPass>) {
|
||||
this.registeredPasses.push(new pass());
|
||||
}
|
||||
|
||||
public apply(input: any): any {
|
||||
for (const pass of this.registeredPasses) {
|
||||
input = pass.apply(input);
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class Program {
|
||||
|
||||
private sourceFilesByPath = new Map<string, SourceFile>();
|
||||
|
||||
private analyser = new Analyser();
|
||||
|
||||
public constructor(
|
||||
public fileNames: string[],
|
||||
public diagnostics: Diagnostics = new ConsoleDiagnostics(),
|
||||
) {
|
||||
for (const fileName of fileNames) {
|
||||
const realPath = path.resolve(fileName);
|
||||
const text = fs.readFileSync(realPath, 'utf-8');
|
||||
const file = new TextFile(fileName, text);
|
||||
const sourceFile = parseSourceFile(file, diagnostics);
|
||||
if (sourceFile !== null) {
|
||||
this.sourceFilesByPath.set(realPath, sourceFile);
|
||||
this.analyser.addSourceFile(sourceFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public getSourceFiles(): Iterable<SourceFile> {
|
||||
return this.sourceFilesByPath.values();
|
||||
}
|
||||
|
||||
public check(): void {
|
||||
const checker = new Checker(this.analyser, this.diagnostics);
|
||||
for (const sourceFile of this.getSourceFiles()) {
|
||||
checker.check(sourceFile);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,551 +0,0 @@
|
|||
|
||||
import { warn } from "console";
|
||||
import {
|
||||
SyntaxKind,
|
||||
Token,
|
||||
Identifier,
|
||||
StringLiteral,
|
||||
EndOfFile,
|
||||
BlockStart,
|
||||
BlockEnd,
|
||||
LineFoldEnd,
|
||||
PubKeyword,
|
||||
MutKeyword,
|
||||
LetKeyword,
|
||||
ImportKeyword,
|
||||
TypeKeyword,
|
||||
TextPosition,
|
||||
Colon,
|
||||
Comma,
|
||||
Equals,
|
||||
LParen,
|
||||
RParen,
|
||||
LBrace,
|
||||
LBracket,
|
||||
RBrace,
|
||||
RBracket,
|
||||
ReturnKeyword,
|
||||
CustomOperator,
|
||||
IdentifierAlt,
|
||||
Integer,
|
||||
TextFile,
|
||||
Dot,
|
||||
DotDot,
|
||||
Assignment,
|
||||
ElifKeyword,
|
||||
ElseKeyword,
|
||||
IfKeyword,
|
||||
StructKeyword,
|
||||
RArrow,
|
||||
EnumKeyword,
|
||||
MatchKeyword,
|
||||
RArrowAlt,
|
||||
VBar,
|
||||
ForeignKeyword,
|
||||
ModKeyword,
|
||||
ClassKeyword,
|
||||
InstanceKeyword,
|
||||
Backslash,
|
||||
ForallKeyword,
|
||||
At,
|
||||
} from "./cst"
|
||||
import { Diagnostics } from "./diagnostics"
|
||||
import { Stream, BufferedStream, assert } from "./util";
|
||||
|
||||
const EOF = '\uFFFF'
|
||||
|
||||
function isUpper(ch: string): boolean {
|
||||
return ch.toUpperCase() === ch;
|
||||
}
|
||||
|
||||
function isWhiteSpace(ch: string): boolean {
|
||||
return /[\r\n\t ]/.test(ch);
|
||||
}
|
||||
|
||||
function isIdentPart(ch: string): boolean {
|
||||
return /[a-zA-Z0-9_]/.test(ch);
|
||||
}
|
||||
|
||||
function isIdentStart(ch: string): boolean {
|
||||
return /[a-zA-Z_]/.test(ch)
|
||||
}
|
||||
|
||||
function isDecimalDigit(ch: string): boolean {
|
||||
return /[0-9]/.test(ch);
|
||||
}
|
||||
|
||||
function toDecimal(ch: string): number {
|
||||
const code = ch.charCodeAt(0);
|
||||
assert(code >= 48 && code <= 57);
|
||||
return code - 48;
|
||||
}
|
||||
|
||||
function isOperatorPart(ch: string): boolean {
|
||||
return /[+\-*\/%^&|$<>!?=]/.test(ch);
|
||||
}
|
||||
|
||||
export class ScanError extends Error {
|
||||
|
||||
public constructor(
|
||||
public file: TextFile,
|
||||
public position: TextPosition,
|
||||
public actual: string,
|
||||
) {
|
||||
super(`Uncaught scanner error`);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class Scanner extends BufferedStream<Token> {
|
||||
|
||||
private textOffset = 0;
|
||||
|
||||
public constructor(
|
||||
public text: string,
|
||||
public diagnostics: Diagnostics,
|
||||
private file: TextFile,
|
||||
public currPos: TextPosition = new TextPosition(0, 1, 1),
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
private peekChar(offset = 1): string {
|
||||
const i = this.textOffset + offset - 1;
|
||||
return i < this.text.length ? this.text[i] : EOF;
|
||||
}
|
||||
|
||||
private getChar(): string {
|
||||
let ch;
|
||||
if (this.textOffset < this.text.length) {
|
||||
ch = this.text[this.textOffset++];
|
||||
this.currPos.offset++;
|
||||
} else {
|
||||
ch = EOF;
|
||||
}
|
||||
if (ch === '\n') {
|
||||
this.currPos.line++;
|
||||
this.currPos.column = 1;
|
||||
} else {
|
||||
this.currPos.column++;
|
||||
}
|
||||
return ch;
|
||||
}
|
||||
|
||||
private takeWhile(pred: (ch: string) => boolean): string {
|
||||
let out = ''
|
||||
for (;;) {
|
||||
const c0 = this.peekChar()
|
||||
if (!pred(c0)) {
|
||||
break;
|
||||
}
|
||||
this.getChar()
|
||||
out += c0;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private getCurrentPosition(): TextPosition {
|
||||
return this.currPos.clone();
|
||||
}
|
||||
|
||||
public read(): Token {
|
||||
|
||||
let c0: string;
|
||||
|
||||
// Skip whitespace and comments
|
||||
for (;;) {
|
||||
|
||||
for (;;) {
|
||||
c0 = this.peekChar();
|
||||
if (isWhiteSpace(c0)) {
|
||||
this.getChar();
|
||||
continue;
|
||||
}
|
||||
if (c0 === '#') {
|
||||
const line = this.currPos.line;
|
||||
this.getChar();
|
||||
for (;;) {
|
||||
const c1 = this.peekChar();
|
||||
if (!isWhiteSpace(c1) || c1 === '\n' || c1 === EOF) {
|
||||
break;
|
||||
}
|
||||
this.getChar();
|
||||
}
|
||||
let text = '';
|
||||
for (;;) {
|
||||
const c1 = this.getChar();
|
||||
if (c1 === '\n' || c1 === EOF) {
|
||||
break;
|
||||
}
|
||||
text += c1;
|
||||
}
|
||||
if (text[0] === '@') {
|
||||
const scanner = new Scanner(text, this.diagnostics, this.file, this.getCurrentPosition());
|
||||
this.file.comments.set(line, scanner.getAll());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// We failed to match a newline or line comment, so there's nothing to skip
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
const startPos = this.getCurrentPosition();
|
||||
this.getChar();
|
||||
|
||||
switch (c0) {
|
||||
|
||||
case '"':
|
||||
{
|
||||
let contents = '';
|
||||
let escaping = false;
|
||||
for (;;) {
|
||||
if (escaping) {
|
||||
const startPos = this.getCurrentPosition();
|
||||
const c1 = this.getChar();
|
||||
switch (c1) {
|
||||
case 'a': contents += '\a'; break;
|
||||
case 'b': contents += '\b'; break;
|
||||
case 'f': contents += '\f'; break;
|
||||
case 'n': contents += '\n'; break;
|
||||
case 'r': contents += '\r'; break;
|
||||
case 't': contents += '\t'; break;
|
||||
case 'v': contents += '\v'; break;
|
||||
case '0': contents += '\0'; break;
|
||||
case '\'': contents += '\''; break;
|
||||
case '\"': contents += '\"'; break;
|
||||
default:
|
||||
throw new ScanError(this.file, startPos, c1);
|
||||
}
|
||||
escaping = false;
|
||||
} else {
|
||||
const c1 = this.getChar();
|
||||
if (c1 === '"') {
|
||||
break;
|
||||
} else {
|
||||
contents += c1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new StringLiteral(startPos, contents);
|
||||
}
|
||||
|
||||
case EOF:
|
||||
{
|
||||
return new EndOfFile(startPos);
|
||||
}
|
||||
|
||||
case '@': return new At(startPos);
|
||||
case '\\': return new Backslash(startPos);
|
||||
case '(': return new LParen(startPos);
|
||||
case ')': return new RParen(startPos);
|
||||
case '[': return new LBracket(startPos);
|
||||
case ']': return new RBracket(startPos);
|
||||
case '{': return new LBrace(startPos);
|
||||
case '}': return new RBrace(startPos);
|
||||
case ',': return new Comma(startPos);
|
||||
case ':':
|
||||
const text = this.takeWhile(isOperatorPart);
|
||||
if (text === '') {
|
||||
return new Colon(startPos);
|
||||
} else if (text === '=') {
|
||||
return new Assignment(startPos, ':');
|
||||
} else {
|
||||
throw new ScanError(this.file, startPos, ':' + text);
|
||||
}
|
||||
case '.': {
|
||||
const dots = c0 + this.takeWhile(ch => ch === '.');
|
||||
if (dots === '.') {
|
||||
return new Dot(startPos);
|
||||
} else if (dots === '..') {
|
||||
return new DotDot(startPos);
|
||||
} else {
|
||||
throw new ScanError(this.file, startPos, dots);
|
||||
}
|
||||
}
|
||||
|
||||
case '+':
|
||||
case '-':
|
||||
case '*':
|
||||
case '/':
|
||||
case '%':
|
||||
case '&':
|
||||
case '^':
|
||||
case '|':
|
||||
case '$':
|
||||
case '<':
|
||||
case '>':
|
||||
case '=':
|
||||
case '!':
|
||||
case '?':
|
||||
{
|
||||
const text = c0 + this.takeWhile(isOperatorPart);
|
||||
if (text === '->') {
|
||||
return new RArrow(startPos);
|
||||
} else if (text === '=>') {
|
||||
return new RArrowAlt(startPos);
|
||||
} else if (text === '|') {
|
||||
return new VBar(startPos);
|
||||
} else if (text === '=') {
|
||||
return new Equals(startPos);
|
||||
} else if (text.endsWith('=') && text[text.length-2] !== '=') {
|
||||
return new Assignment(startPos, text.substring(0, text.length-1));
|
||||
} else {
|
||||
return new CustomOperator(startPos, text);
|
||||
}
|
||||
}
|
||||
|
||||
case '0':
|
||||
{
|
||||
const c1 = this.peekChar();
|
||||
switch (c1) {
|
||||
case 'x': // TODO
|
||||
case 'o': // TODO
|
||||
case 'b': // TODO
|
||||
}
|
||||
}
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
{
|
||||
let value = BigInt(toDecimal(c0));
|
||||
for (;;) {
|
||||
const c1 = this.peekChar();
|
||||
if (!isDecimalDigit(c1)) {
|
||||
break;
|
||||
}
|
||||
this.getChar();
|
||||
value = value * BigInt(10) + BigInt(toDecimal(c1));
|
||||
}
|
||||
return new Integer(startPos, value, 10);
|
||||
}
|
||||
|
||||
case 'a':
|
||||
case 'b':
|
||||
case 'c':
|
||||
case 'd':
|
||||
case 'e':
|
||||
case 'f':
|
||||
case 'g':
|
||||
case 'h':
|
||||
case 'i':
|
||||
case 'j':
|
||||
case 'k':
|
||||
case 'l':
|
||||
case 'm':
|
||||
case 'n':
|
||||
case 'o':
|
||||
case 'p':
|
||||
case 'q':
|
||||
case 'r':
|
||||
case 's':
|
||||
case 't':
|
||||
case 'u':
|
||||
case 'v':
|
||||
case 'w':
|
||||
case 'x':
|
||||
case 'y':
|
||||
case 'z':
|
||||
case 'A':
|
||||
case 'B':
|
||||
case 'C':
|
||||
case 'D':
|
||||
case 'E':
|
||||
case 'F':
|
||||
case 'G':
|
||||
case 'H':
|
||||
case 'I':
|
||||
case 'J':
|
||||
case 'K':
|
||||
case 'L':
|
||||
case 'M':
|
||||
case 'N':
|
||||
case 'O':
|
||||
case 'P':
|
||||
case 'Q':
|
||||
case 'R':
|
||||
case 'S':
|
||||
case 'T':
|
||||
case 'U':
|
||||
case 'V':
|
||||
case 'W':
|
||||
case 'X':
|
||||
case 'Y':
|
||||
case 'Z':
|
||||
case '_':
|
||||
{
|
||||
const text = c0 + this.takeWhile(isIdentPart);
|
||||
switch (text) {
|
||||
case 'trait': return new ClassKeyword(startPos);
|
||||
case 'impl': return new InstanceKeyword(startPos);
|
||||
case 'import': return new ImportKeyword(startPos);
|
||||
case 'pub': return new PubKeyword(startPos);
|
||||
case 'mut': return new MutKeyword(startPos);
|
||||
case 'let': return new LetKeyword(startPos);
|
||||
case 'import': return new ImportKeyword(startPos);
|
||||
case 'return': return new ReturnKeyword(startPos);
|
||||
case 'type': return new TypeKeyword(startPos);
|
||||
case 'if': return new IfKeyword(startPos);
|
||||
case 'else': return new ElseKeyword(startPos);
|
||||
case 'elif': return new ElifKeyword(startPos);
|
||||
case 'struct': return new StructKeyword(startPos);
|
||||
case 'enum': return new EnumKeyword(startPos);
|
||||
case 'match': return new MatchKeyword(startPos);
|
||||
case 'foreign': return new ForeignKeyword(startPos);
|
||||
case 'mod': return new ModKeyword(startPos);
|
||||
case 'forall': return new ForallKeyword(startPos);
|
||||
default:
|
||||
if (isUpper(text[0])) {
|
||||
return new IdentifierAlt(startPos, text);
|
||||
} else {
|
||||
return new Identifier(startPos, text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
|
||||
// Nothing matched, so the current character is unrecognisable
|
||||
throw new ScanError(this.file, startPos, c0);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public getAll(): Token[] {
|
||||
const tokens = [];
|
||||
for (;;) {
|
||||
const t0 = this.get();
|
||||
if (t0.kind === SyntaxKind.EndOfFile) {
|
||||
break;
|
||||
}
|
||||
tokens.push(t0);
|
||||
}
|
||||
return tokens;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const enum FrameType {
|
||||
Block,
|
||||
LineFold,
|
||||
Fallthrough,
|
||||
}
|
||||
|
||||
const INIT_POS = new TextPosition(0, 0, 0);
|
||||
|
||||
export class Punctuator extends BufferedStream<Token> {
|
||||
|
||||
private referencePositions: TextPosition[] = [ INIT_POS ];
|
||||
|
||||
private frameTypes: FrameType[] = [ FrameType.Block ];
|
||||
|
||||
public constructor(
|
||||
private tokens: Stream<Token>,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public read(): Token {
|
||||
|
||||
const t0 = this.tokens.peek(1);
|
||||
|
||||
switch (t0.kind) {
|
||||
case SyntaxKind.LBrace:
|
||||
this.frameTypes.push(FrameType.Fallthrough);
|
||||
break;
|
||||
case SyntaxKind.EndOfFile:
|
||||
{
|
||||
if (this.frameTypes.length === 1) {
|
||||
return t0;
|
||||
}
|
||||
const frameType = this.frameTypes.pop()!;
|
||||
switch (frameType) {
|
||||
case FrameType.LineFold:
|
||||
return new LineFoldEnd(t0.getStartPosition());
|
||||
case FrameType.Block:
|
||||
return new BlockEnd(t0.getStartPosition());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const refPos = this.referencePositions[this.referencePositions.length-1];
|
||||
const frameType = this.frameTypes[this.frameTypes.length-1];
|
||||
|
||||
switch (frameType) {
|
||||
|
||||
case FrameType.Fallthrough:
|
||||
{
|
||||
if (t0.kind === SyntaxKind.RBrace) {
|
||||
this.frameTypes.pop()!;
|
||||
}
|
||||
this.tokens.get();
|
||||
return t0;
|
||||
}
|
||||
|
||||
case FrameType.LineFold:
|
||||
{
|
||||
|
||||
// This important check verifies we're still inside the line-fold. If
|
||||
// we aren't, we need to clean up the stack a bit and eventually return
|
||||
// a token that indicates the line-fold ended.
|
||||
if (t0.getStartLine() > refPos.line
|
||||
&& t0.getStartColumn() <= refPos.column) {
|
||||
this.frameTypes.pop();
|
||||
this.referencePositions.pop();
|
||||
return new LineFoldEnd(t0.getStartPosition());
|
||||
}
|
||||
|
||||
const t1 = this.tokens.peek(2);
|
||||
if (t0.kind === SyntaxKind.Dot && t0.getEndLine() < t1.getStartLine()) {
|
||||
this.tokens.get();
|
||||
this.frameTypes.push(FrameType.Block);
|
||||
return new BlockStart(t0.getStartPosition());
|
||||
}
|
||||
|
||||
// If we got here, this is an ordinary token that is part of the
|
||||
// line-fold. Make sure to consume it and return it to the caller.
|
||||
this.tokens.get();
|
||||
return t0;
|
||||
}
|
||||
|
||||
case FrameType.Block:
|
||||
{
|
||||
|
||||
if (t0.getStartColumn() <= refPos.column) {
|
||||
|
||||
// We only get here if the current token is less indented than the
|
||||
// current reference token. Pop the block indicator and leave the
|
||||
// reference position be for the edge case where the parent line-fold
|
||||
// continues after the block.
|
||||
this.frameTypes.pop();
|
||||
return new BlockEnd(t0.getStartPosition());
|
||||
|
||||
}
|
||||
|
||||
this.frameTypes.push(FrameType.LineFold);
|
||||
this.referencePositions.push(t0.getStartPosition());
|
||||
|
||||
// In theory, we could explictly issue a LineFoldStart and let all
|
||||
// tokens be passed through in the FrameType.LineFold case. It does add
|
||||
// more logic to the parser for no real benefit, which is why it was
|
||||
// omitted.
|
||||
this.tokens.get();
|
||||
return t0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
@ -1,190 +0,0 @@
|
|||
import { warn } from "console";
|
||||
import { LetDeclaration, Pattern, SourceFile, Syntax, SyntaxKind } from "./cst";
|
||||
import { MultiMap, assertNever } from "./util";
|
||||
|
||||
export type NodeWithScope
|
||||
= SourceFile
|
||||
| LetDeclaration
|
||||
|
||||
export function isNodeWithScope(node: Syntax): node is NodeWithScope {
|
||||
return node.kind === SyntaxKind.SourceFile
|
||||
|| node.kind === SyntaxKind.LetDeclaration;
|
||||
}
|
||||
|
||||
export const enum Symkind {
|
||||
Var = 1,
|
||||
Type = 2,
|
||||
Module = 4,
|
||||
Typeclass = 8,
|
||||
Any = Var | Type | Module
|
||||
}
|
||||
|
||||
export class Scope {
|
||||
|
||||
private mapping = new MultiMap<string, [Symkind, Syntax]>();
|
||||
|
||||
public constructor(
|
||||
public node: NodeWithScope,
|
||||
) {
|
||||
this.scan(node);
|
||||
}
|
||||
|
||||
public get depth(): number {
|
||||
let out = 0;
|
||||
let curr = this.getParent();
|
||||
while (curr !== null) {
|
||||
out++;
|
||||
curr = curr.getParent();
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private getParent(): Scope | null {
|
||||
let curr = this.node.parent;
|
||||
while (curr !== null) {
|
||||
if (isNodeWithScope(curr)) {
|
||||
return curr.getScope();
|
||||
}
|
||||
curr = curr.parent;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private add(name: string, node: Syntax, kind: Symkind): void {
|
||||
this.mapping.add(name, [kind, node]);
|
||||
}
|
||||
|
||||
private scan(node: Syntax): void {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.ClassDeclaration:
|
||||
{
|
||||
this.add(node.name.text, node, Symkind.Typeclass);
|
||||
}
|
||||
case SyntaxKind.InstanceDeclaration:
|
||||
case SyntaxKind.SourceFile:
|
||||
{
|
||||
for (const element of node.elements) {
|
||||
this.scan(element);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.ModuleDeclaration:
|
||||
{
|
||||
this.add(node.name.text, node, Symkind.Module);
|
||||
for (const element of node.elements) {
|
||||
this.scan(element);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.ExpressionStatement:
|
||||
case SyntaxKind.ReturnStatement:
|
||||
case SyntaxKind.IfStatement:
|
||||
break;
|
||||
case SyntaxKind.TypeDeclaration:
|
||||
{
|
||||
this.add(node.name.text, node, Symkind.Type);
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.EnumDeclaration:
|
||||
{
|
||||
this.add(node.name.text, node, Symkind.Type);
|
||||
if (node.members !== null) {
|
||||
for (const member of node.members) {
|
||||
this.add(member.name.text, member, Symkind.Var);
|
||||
}
|
||||
}
|
||||
}
|
||||
case SyntaxKind.StructDeclaration:
|
||||
{
|
||||
this.add(node.name.text, node, Symkind.Type);
|
||||
// TODO remove this?
|
||||
// this.add(node.name.text, node, Symkind.Var);
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.LetDeclaration:
|
||||
{
|
||||
for (const param of node.params) {
|
||||
switch (param.kind) {
|
||||
case SyntaxKind.PlainParam:
|
||||
this.scanPattern(param.pattern, param);
|
||||
break;
|
||||
case SyntaxKind.InstanceParam:
|
||||
this.add(node.name.text, param, Symkind.Var);
|
||||
break;
|
||||
default:
|
||||
assertNever(param);
|
||||
}
|
||||
}
|
||||
if (node === this.node) {
|
||||
if (node.body !== null && node.body.kind === SyntaxKind.BlockBody) {
|
||||
for (const element of node.body.elements) {
|
||||
this.scan(element);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.scanPattern(node.pattern, node);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
assertNever(node);
|
||||
}
|
||||
}
|
||||
|
||||
private scanPattern(node: Pattern, decl: Syntax): void {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.LiteralPattern:
|
||||
break;
|
||||
case SyntaxKind.NamedPattern:
|
||||
{
|
||||
this.add(node.name.text, decl, Symkind.Var);
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.NestedPattern:
|
||||
this.scanPattern(node.pattern, decl);
|
||||
break;
|
||||
case SyntaxKind.NamedTuplePattern:
|
||||
{
|
||||
for (const element of node.elements) {
|
||||
this.scanPattern(element, decl);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.StructPattern:
|
||||
{
|
||||
for (const member of node.members) {
|
||||
switch (member.kind) {
|
||||
case SyntaxKind.StructPatternField:
|
||||
{
|
||||
this.scanPattern(member.pattern, decl);
|
||||
break;
|
||||
}
|
||||
case SyntaxKind.PunnedStructPatternField:
|
||||
{
|
||||
this.add(member.name.text, decl, Symkind.Var);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unexpected ${node}`);
|
||||
}
|
||||
}
|
||||
|
||||
public lookup(name: string, expectedKind: Symkind = Symkind.Any): Syntax | null {
|
||||
let curr: Scope | null = this;
|
||||
do {
|
||||
for (const [kind, decl] of curr.mapping.get(name)) {
|
||||
if (kind & expectedKind) {
|
||||
return decl;
|
||||
}
|
||||
}
|
||||
curr = curr.getParent();
|
||||
} while (curr !== null);
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
|
||||
let foo n.
|
||||
# @expect_diagnostic "TypeMismatch"
|
||||
let f : String = 1
|
||||
return n
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
|
||||
enum Maybe a.
|
||||
Just a
|
||||
Nothing
|
||||
|
||||
enum App a b.
|
||||
MkApp (a b)
|
||||
|
||||
enum Foo.
|
||||
MkFoo (App Maybe Int)
|
||||
|
||||
let f : Foo = MkFoo (MkApp (Just 1))
|
||||
|
||||
# @expect_diagnostic "TypeMismatch"
|
||||
let g : Foo = MkFoo (MkApp (Just "foo"))
|
|
@ -1,10 +0,0 @@
|
|||
|
||||
enum Maybe a.
|
||||
Just a
|
||||
Nothing
|
||||
|
||||
let right_1 : Maybe Int = Just 1
|
||||
let right_2 : Maybe String = Just "foo"
|
||||
# @expect_diagnostic "TypeMismatch"
|
||||
let wrong : Maybe Int = Just "foo"
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
|
||||
let is_even x.
|
||||
if x == 0.
|
||||
return True
|
||||
else.
|
||||
return is_odd (x-1)
|
||||
|
||||
let is_odd x.
|
||||
if x == 1.
|
||||
return False
|
||||
else.
|
||||
return is_even (x-1)
|
||||
|
||||
@:Bool is_even 1
|
||||
# @expect_diagnostic "TypeMismatch"
|
||||
is_even True
|
|
@ -1,8 +0,0 @@
|
|||
|
||||
let fac n = fac_2 n
|
||||
|
||||
let fac_2 n = fac_3 n + fac n
|
||||
|
||||
let fac_3 n = fac_2 (n-1)
|
||||
|
||||
@:Int fac 1
|
|
@ -1,6 +0,0 @@
|
|||
|
||||
let foo x.
|
||||
let bar y z = y + z - x
|
||||
return bar 1 2
|
||||
|
||||
@:Int foo 3
|
|
@ -1,6 +0,0 @@
|
|||
|
||||
let id x = x
|
||||
|
||||
@:Int id 1
|
||||
@:String id "foo"
|
||||
@:Bool id True
|
|
@ -1,5 +0,0 @@
|
|||
|
||||
let t1 = (1, True)
|
||||
|
||||
@:Int t1.0
|
||||
@:Bool t1.1
|
|
@ -1,271 +0,0 @@
|
|||
|
||||
## Record types can be unified without causing an error
|
||||
|
||||
```
|
||||
struct Person.
|
||||
email: String
|
||||
age: Int
|
||||
|
||||
let bert
|
||||
= Person {
|
||||
email = "bar@boo.com",
|
||||
age = 32
|
||||
}
|
||||
let bob
|
||||
= Person {
|
||||
email = "boo",
|
||||
age = 43
|
||||
}
|
||||
|
||||
bert == bob
|
||||
```
|
||||
|
||||
## Return types are polymorphic
|
||||
|
||||
```
|
||||
let id x = x
|
||||
|
||||
id 1
|
||||
id "foo"
|
||||
id True
|
||||
```
|
||||
|
||||
## Nested definitions work
|
||||
|
||||
```
|
||||
let foo x.
|
||||
let bar y z = y + z - x
|
||||
bar
|
||||
|
||||
foo True
|
||||
```
|
||||
|
||||
## Everything that can be type-checked will be type-checked
|
||||
|
||||
```
|
||||
let foo n.
|
||||
let f : String = 1
|
||||
return n
|
||||
```
|
||||
|
||||
## Recursive definitions do not cause infinite loops in the type-checker
|
||||
|
||||
```
|
||||
let fac n = fac_2 n
|
||||
|
||||
let fac_2 n = fac_3 n + fac n
|
||||
|
||||
let fac_3 n = fac_2 (n-1)
|
||||
|
||||
not (fac 1)
|
||||
```
|
||||
|
||||
## Example with mutual recursion works
|
||||
|
||||
```
|
||||
let is_even x.
|
||||
if x == 0.
|
||||
return True
|
||||
else.
|
||||
return is_odd (x-1)
|
||||
|
||||
let is_odd x.
|
||||
if x == 1.
|
||||
return False
|
||||
else.
|
||||
return is_even (x-1)
|
||||
|
||||
not (is_even True)
|
||||
```
|
||||
|
||||
## Polymorphic records can be partially typed
|
||||
|
||||
```
|
||||
struct Timestamped a b.
|
||||
first: a
|
||||
second: b
|
||||
timestamp: Int
|
||||
|
||||
type Foo = Timestamped Int
|
||||
|
||||
type Bar = Foo Int
|
||||
|
||||
let t : Bar = Timestamped { first = "bar", second = 1, timestamp = 12345 }
|
||||
```
|
||||
|
||||
## Extensible records work
|
||||
|
||||
```
|
||||
struct Timestamped a.
|
||||
data: a
|
||||
timestamp: Int
|
||||
|
||||
let t = Timestamped { data = "foo", timestamp = 12345 }
|
||||
|
||||
t.data == 1
|
||||
t.data == "foo"
|
||||
|
||||
let u = Timestamped { data = True, timestamp = 12345 }
|
||||
|
||||
u.data == "foo"
|
||||
u.data == False
|
||||
```
|
||||
|
||||
## A recursive function is automatically instantiated
|
||||
|
||||
```
|
||||
let fac n.
|
||||
if n == 0.
|
||||
return 1
|
||||
else.
|
||||
return n * fac (n-"foo")
|
||||
```
|
||||
|
||||
## Enum-declarations are correctly typed
|
||||
|
||||
```
|
||||
enum Maybe a.
|
||||
Just a
|
||||
Nothing
|
||||
|
||||
let right_1 : Maybe Int = Just 1
|
||||
let right_2 : Maybe String = Just "foo"
|
||||
let wrong : Maybe Int = Just "foo"
|
||||
```
|
||||
|
||||
## Kind inference works
|
||||
|
||||
```
|
||||
enum Maybe a.
|
||||
Just a
|
||||
Nothing
|
||||
|
||||
let foo_1 : Maybe
|
||||
let foo_2 : Maybe Int
|
||||
let foo_3 : Maybe Int Int
|
||||
let foo_4 : Maybe Int Int Int
|
||||
```
|
||||
|
||||
## Can indirectly apply a polymorphic datatype to some type
|
||||
|
||||
```
|
||||
enum Maybe a.
|
||||
Just a
|
||||
Nothing
|
||||
|
||||
enum App a b.
|
||||
MkApp (a b)
|
||||
|
||||
enum Foo.
|
||||
MkFoo (App Maybe Int)
|
||||
|
||||
let f : Foo = MkFoo (MkApp (Just 1))
|
||||
```
|
||||
|
||||
## Record-declarations inside enum-declarations work
|
||||
|
||||
```
|
||||
enum Shape.
|
||||
Circle.
|
||||
radius: Int
|
||||
Rect.
|
||||
width: Int
|
||||
height: Int
|
||||
|
||||
let z = Circle { radius = 12 }
|
||||
let a = Rect { width = 12, height = 12 }
|
||||
|
||||
a == z
|
||||
```
|
||||
|
||||
## Tuple types are correctly inferred and unified
|
||||
|
||||
```
|
||||
let foo_1 : (Int, Int, Int) = (1, 2, 3)
|
||||
let foo_2 : (Int, Int, Int) = (1, 2, "foo")
|
||||
```
|
||||
|
||||
## Module references work
|
||||
|
||||
```
|
||||
mod CD.
|
||||
mod A.
|
||||
struct Foo
|
||||
mod B.
|
||||
let alpha: A.Foo
|
||||
```
|
||||
|
||||
## Rest-expressions on extensible records work
|
||||
|
||||
```
|
||||
let foo { x, y, .. } = x + y
|
||||
|
||||
foo { x = 1, y = 2 }
|
||||
foo { x = 1, y = 2, z = 3 }
|
||||
foo { x = 1, y = 2, z = 3, a = 4 }
|
||||
```
|
||||
|
||||
## A polymorphic function is properly generalized when assigned to a new variable
|
||||
|
||||
```
|
||||
let id x = x
|
||||
let id2 = id
|
||||
let id3 = id
|
||||
|
||||
id3 1
|
||||
id3 "bla"
|
||||
|
||||
id2 1
|
||||
id2 "bla"
|
||||
````
|
||||
|
||||
## Can omit a field from a record type
|
||||
|
||||
```
|
||||
let remove_x { x, ..rest } = rest
|
||||
let p1 = { x = 1, y = 2, z = 3 }
|
||||
(remove_x p1).x
|
||||
```
|
||||
|
||||
## Can project a field from some other fields
|
||||
|
||||
```
|
||||
struct Point.
|
||||
x: Int
|
||||
y: Int
|
||||
|
||||
let project { x, y, .. } = { x, y }
|
||||
|
||||
let p2 : Point = project { x = 1, y = 2 }
|
||||
let p3 : Point = project { x = 1, y = 2, z = 3 }
|
||||
let p3 : Point = project { x = 1, y = 2, z = 3, a = 4 }
|
||||
```
|
||||
|
||||
## Can assign records to a struct-type
|
||||
|
||||
```
|
||||
struct Vec2.
|
||||
x: Int
|
||||
y: Int
|
||||
|
||||
let p1 : Vec2 = { x = 1, y = 2 }
|
||||
```
|
||||
|
||||
```
|
||||
struct Vec3.
|
||||
x: Int
|
||||
y: Int
|
||||
z: Int
|
||||
|
||||
let p1 : Vec3 = { x = 1, y = 2, z = 3 }
|
||||
```
|
||||
|
||||
This one should fail:
|
||||
|
||||
```
|
||||
struct Vec2.
|
||||
x: Int
|
||||
y: Int
|
||||
|
||||
let p1 : Vec2 = { x = 1, y = 2, z = 3 }
|
||||
```
|
|
@ -1,18 +0,0 @@
|
|||
|
||||
struct Person.
|
||||
email: String
|
||||
age: Int
|
||||
|
||||
let bert
|
||||
= Person {
|
||||
email = "bar@boo.com",
|
||||
age = 32
|
||||
}
|
||||
let bob
|
||||
= Person {
|
||||
email = "boo",
|
||||
age = 43
|
||||
}
|
||||
|
||||
# @expect_diagnostic TypeMismatch
|
||||
bert == bob
|
|
@ -1,654 +0,0 @@
|
|||
import { InspectOptions } from "util";
|
||||
import { ClassDeclaration, EnumDeclaration, StructDeclaration, Syntax } from "./cst";
|
||||
import { InspectFn, assert, assertNever, toStringTag } from "./util";
|
||||
import { warn } from "console";
|
||||
|
||||
export enum TypeKind {
|
||||
Arrow,
|
||||
RegularVar,
|
||||
RigidVar,
|
||||
Con,
|
||||
App,
|
||||
Nominal,
|
||||
Field,
|
||||
Nil,
|
||||
Absent,
|
||||
Present,
|
||||
Tag,
|
||||
}
|
||||
|
||||
export abstract class TypeBase {
|
||||
|
||||
public abstract readonly kind: TypeKind;
|
||||
|
||||
public parent: Type = this as unknown as Type;
|
||||
|
||||
public next: Type = this as any;
|
||||
|
||||
public abstract node: Syntax | null;
|
||||
|
||||
public static join(a: Type, b: Type): void {
|
||||
const keep = a.next;
|
||||
a.next = b;
|
||||
b.next = keep;
|
||||
}
|
||||
|
||||
public abstract getTypeVars(): Iterable<TVar>;
|
||||
|
||||
public abstract shallowClone(): Type;
|
||||
|
||||
public abstract substitute(sub: TVSub): Type;
|
||||
|
||||
public find(): Type {
|
||||
let curr = this as unknown as Type;
|
||||
while (curr.parent !== curr) {
|
||||
curr.parent = curr.parent.parent;
|
||||
curr = curr.parent;
|
||||
}
|
||||
return curr;
|
||||
}
|
||||
|
||||
public set(newType: Type): void {
|
||||
this.find().parent = newType;
|
||||
}
|
||||
|
||||
public hasTypeVar(tv: TRegularVar): boolean {
|
||||
for (const other of this.getTypeVars()) {
|
||||
if (tv.id === other.id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public abstract [toStringTag](depth: number, options: InspectOptions, inspect: InspectFn): string;
|
||||
|
||||
}
|
||||
|
||||
export function isType(value: any): value is Type {
|
||||
return value !== undefined
|
||||
&& value !== null
|
||||
&& value instanceof TypeBase;
|
||||
}
|
||||
|
||||
abstract class TVarBase extends TypeBase {
|
||||
|
||||
public context = new Set<ClassDeclaration>();
|
||||
|
||||
}
|
||||
|
||||
export function isTVar(type: Type): type is TVar {
|
||||
return type.kind === TypeKind.RegularVar
|
||||
|| type.kind === TypeKind.RigidVar;
|
||||
}
|
||||
|
||||
export class TRigidVar extends TVarBase {
|
||||
|
||||
public readonly kind = TypeKind.RigidVar;
|
||||
|
||||
public constructor(
|
||||
public id: number,
|
||||
public displayName: string,
|
||||
public node: Syntax | null = null
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public *getTypeVars(): Iterable<TVar> {
|
||||
yield this;
|
||||
}
|
||||
|
||||
public shallowClone(): TRigidVar {
|
||||
return new TRigidVar(
|
||||
this.id,
|
||||
this.displayName,
|
||||
this.node
|
||||
);
|
||||
}
|
||||
|
||||
public substitute(sub: TVSub): Type {
|
||||
const other = sub.get(this);
|
||||
return other === undefined
|
||||
? this : other.substitute(sub);
|
||||
}
|
||||
|
||||
public [toStringTag]() {
|
||||
return this.displayName;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TRegularVar extends TVarBase {
|
||||
|
||||
public readonly kind = TypeKind.RegularVar;
|
||||
|
||||
public constructor(
|
||||
public id: number,
|
||||
public node: Syntax | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public *getTypeVars(): Iterable<TVar> {
|
||||
yield this;
|
||||
}
|
||||
|
||||
public shallowClone(): TRegularVar {
|
||||
return new TRegularVar(this.id, this.node);
|
||||
}
|
||||
|
||||
public substitute(sub: TVSub): Type {
|
||||
const other = sub.get(this);
|
||||
return other === undefined
|
||||
? this : other.substitute(sub);
|
||||
}
|
||||
|
||||
public [toStringTag]() {
|
||||
return 'a' + this.id;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TNil extends TypeBase {
|
||||
|
||||
public readonly kind = TypeKind.Nil;
|
||||
|
||||
public constructor(
|
||||
public node: Syntax | null = null
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public substitute(_sub: TVSub): Type {
|
||||
return this;
|
||||
}
|
||||
|
||||
public shallowClone(): Type {
|
||||
return new TNil(this.node);
|
||||
}
|
||||
|
||||
public *getTypeVars(): Iterable<TVar> {
|
||||
|
||||
}
|
||||
|
||||
public [toStringTag]() {
|
||||
return '∂Abs';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TAbsent extends TypeBase {
|
||||
|
||||
public readonly kind = TypeKind.Absent;
|
||||
|
||||
public constructor(
|
||||
public node: Syntax | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public substitute(_sub: TVSub): Type {
|
||||
return this;
|
||||
}
|
||||
|
||||
public shallowClone(): Type {
|
||||
return new TAbsent(this.node);
|
||||
}
|
||||
|
||||
public *getTypeVars(): Iterable<TVar> {
|
||||
|
||||
}
|
||||
|
||||
public [toStringTag]() {
|
||||
return 'Abs';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TPresent extends TypeBase {
|
||||
|
||||
public readonly kind = TypeKind.Present;
|
||||
|
||||
public constructor(
|
||||
public type: Type,
|
||||
public node: Syntax | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public substitute(sub: TVSub): Type {
|
||||
return new TPresent(this.type.substitute(sub), this.node);
|
||||
}
|
||||
|
||||
public getTypeVars(): Iterable<TVar> {
|
||||
return this.type.getTypeVars();
|
||||
}
|
||||
|
||||
public shallowClone(): Type {
|
||||
return new TPresent(this.type, this.node);
|
||||
}
|
||||
|
||||
public [toStringTag](_depth: number, options: InspectOptions, inspect: InspectFn) {
|
||||
return 'Pre ' + inspect(this.type, options);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TArrow extends TypeBase {
|
||||
|
||||
public readonly kind = TypeKind.Arrow;
|
||||
|
||||
public constructor(
|
||||
public paramType: Type,
|
||||
public returnType: Type,
|
||||
public node: Syntax | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public static build(paramTypes: Type[], returnType: Type, node: Syntax | null = null): Type {
|
||||
let result = returnType;
|
||||
for (let i = paramTypes.length-1; i >= 0; i--) {
|
||||
result = new TArrow(paramTypes[i], result, node);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public *getTypeVars(): Iterable<TVar> {
|
||||
yield* this.paramType.getTypeVars();
|
||||
yield* this.returnType.getTypeVars();
|
||||
}
|
||||
|
||||
public shallowClone(): TArrow {
|
||||
return new TArrow(
|
||||
this.paramType,
|
||||
this.returnType,
|
||||
this.node,
|
||||
)
|
||||
}
|
||||
|
||||
public substitute(sub: TVSub): Type {
|
||||
let changed = false;
|
||||
const newParamType = this.paramType.substitute(sub);
|
||||
if (newParamType !== this.paramType) {
|
||||
changed = true;
|
||||
}
|
||||
const newReturnType = this.returnType.substitute(sub);
|
||||
if (newReturnType !== this.returnType) {
|
||||
changed = true;
|
||||
}
|
||||
return changed ? new TArrow(newParamType, newReturnType, this.node) : this;
|
||||
}
|
||||
|
||||
public [toStringTag](_depth: number, options: InspectOptions, inspect: InspectFn) {
|
||||
return inspect(this.paramType, options) + ' -> ' + inspect(this.returnType, options);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TCon extends TypeBase {
|
||||
|
||||
public readonly kind = TypeKind.Con;
|
||||
|
||||
public constructor(
|
||||
public id: number,
|
||||
public displayName: string,
|
||||
public node: Syntax | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public *getTypeVars(): Iterable<TVar> {
|
||||
|
||||
}
|
||||
|
||||
public shallowClone(): TCon {
|
||||
return new TCon(
|
||||
this.id,
|
||||
this.displayName,
|
||||
this.node,
|
||||
);
|
||||
}
|
||||
|
||||
public substitute(_sub: TVSub): Type {
|
||||
return this;
|
||||
}
|
||||
|
||||
public [toStringTag](_depth: number, _options: InspectOptions, _inspect: InspectFn) {
|
||||
return this.displayName;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export function buildTupleType(types: Type[]): Type {
|
||||
let out: Type = new TNil();
|
||||
types.forEach((type, i) => {
|
||||
out = new TField(i, new TPresent(type), out);
|
||||
});
|
||||
return out;
|
||||
}
|
||||
|
||||
export function buildTupleTypeWithLoc(elements: Array<[Syntax, Type]>, node: Syntax) {
|
||||
let out: Type = new TNil(node);
|
||||
elements.forEach(([el, type], i) => {
|
||||
out = new TField(i, new TPresent(type, el), out);
|
||||
});
|
||||
return out;
|
||||
}
|
||||
|
||||
export class TField extends TypeBase {
|
||||
|
||||
public readonly kind = TypeKind.Field;
|
||||
|
||||
public constructor(
|
||||
public name: string | number,
|
||||
public type: Type,
|
||||
public restType: Type,
|
||||
public node: Syntax | null = null,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public getTypeVars(): Iterable<TVar> {
|
||||
return this.type.getTypeVars();
|
||||
}
|
||||
|
||||
public shallowClone(): TField {
|
||||
return new TField(
|
||||
this.name,
|
||||
this.type,
|
||||
this.restType,
|
||||
this.node,
|
||||
);
|
||||
}
|
||||
|
||||
public static build(fields: Map<string, Type>, restType: Type): Type {
|
||||
let out = restType;
|
||||
for (const [name, type] of fields) {
|
||||
out = new TField(name, new TPresent(type, type.node), out, type.node);
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
public static sort(type: Type): Type {
|
||||
const fields = new Map<string | number, TField>();
|
||||
while (type.kind === TypeKind.Field) {
|
||||
fields.set(type.name, type);
|
||||
type = type.restType;
|
||||
}
|
||||
const keys = [...fields.keys()].sort().reverse();
|
||||
let out: Type = type;
|
||||
for (const key of keys) {
|
||||
const field = fields.get(key)!;
|
||||
out = new TField(key, field.type, out, field.node);
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
public substitute(sub: TVSub): Type {
|
||||
const newType = this.type.substitute(sub);
|
||||
const newRestType = this.restType.substitute(sub);
|
||||
return newType !== this.type || newRestType !== this.restType
|
||||
? new TField(this.name, newType, newRestType, this.node) : this;
|
||||
}
|
||||
|
||||
public [toStringTag](_depth: number, options: InspectOptions, inspect: InspectFn) {
|
||||
let out = '{ ' + this.name + ': ' + inspect(this.type, options);
|
||||
let type = this.restType;
|
||||
while (type.kind === TypeKind.Field) {
|
||||
out += '; ' + type.name + ': ' + inspect(type.type, options);
|
||||
type = type.restType;
|
||||
}
|
||||
if (type.kind !== TypeKind.Nil) {
|
||||
out += '; ' + inspect(type, options);
|
||||
}
|
||||
return out + ' }'
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TApp extends TypeBase {
|
||||
|
||||
public readonly kind = TypeKind.App;
|
||||
|
||||
public constructor(
|
||||
public left: Type,
|
||||
public right: Type,
|
||||
public node: Syntax | null = null
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public static build(resultType: Type, types: Type[], node: Syntax | null = null): Type {
|
||||
for (let i = 0; i < types.length; i++) {
|
||||
resultType = new TApp(resultType, types[i], node);
|
||||
}
|
||||
return resultType;
|
||||
}
|
||||
|
||||
public *getTypeVars(): Iterable<TVar> {
|
||||
yield* this.left.getTypeVars();
|
||||
yield* this.right.getTypeVars();
|
||||
}
|
||||
|
||||
public shallowClone() {
|
||||
return new TApp(
|
||||
this.left,
|
||||
this.right,
|
||||
this.node
|
||||
);
|
||||
}
|
||||
|
||||
public substitute(sub: TVSub): Type {
|
||||
let changed = false;
|
||||
const newOperatorType = this.left.substitute(sub);
|
||||
if (newOperatorType !== this.left) {
|
||||
changed = true;
|
||||
}
|
||||
const newArgType = this.right.substitute(sub);
|
||||
if (newArgType !== this.right) {
|
||||
changed = true;
|
||||
}
|
||||
return changed ? new TApp(newOperatorType, newArgType, this.node) : this;
|
||||
}
|
||||
|
||||
public [toStringTag](_depth: number, options: InspectOptions, inspect: InspectFn) {
|
||||
return inspect(this.left, options) + ' ' + inspect(this.right, options);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export const labelTag = '____tag';
|
||||
|
||||
// export class TTag extends TypeBase {
|
||||
|
||||
// public readonly kind = TypeKind.Tag;
|
||||
|
||||
// public constructor(
|
||||
// public name: string,
|
||||
// public node: Syntax | null = null,
|
||||
// ) {
|
||||
// super();
|
||||
// }
|
||||
|
||||
// public shallowClone(): Type {
|
||||
// return new TTag(
|
||||
// this.name,
|
||||
// this.node,
|
||||
// );
|
||||
// }
|
||||
|
||||
// public *getTypeVars(): Iterable<TVar> {
|
||||
// // noop
|
||||
// }
|
||||
|
||||
// public substitute(sub: TVSub): Type {
|
||||
// return this;
|
||||
// }
|
||||
|
||||
// public [toStringTag]() {
|
||||
// return this.name;
|
||||
// }
|
||||
|
||||
// }
|
||||
|
||||
export type Type
|
||||
= TCon
|
||||
| TArrow
|
||||
| TRigidVar
|
||||
| TRegularVar
|
||||
| TApp
|
||||
| TField
|
||||
| TNil
|
||||
| TPresent
|
||||
| TAbsent
|
||||
// | TTag
|
||||
|
||||
export type TVar
|
||||
= TRegularVar
|
||||
| TRigidVar
|
||||
|
||||
|
||||
export function getSignature(type: Type): Type[] {
|
||||
const out = [];
|
||||
let stack = [ type ];
|
||||
for (;;) {
|
||||
const child = stack.pop()!;
|
||||
if (child.kind === TypeKind.App) {
|
||||
stack.push(child.left);
|
||||
stack.push(child.right);
|
||||
} else {
|
||||
out.push(child);
|
||||
}
|
||||
if (stack.length === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
export function isSignature(type: Type): boolean {
|
||||
return type.kind === TypeKind.Con
|
||||
|| type.kind === TypeKind.App;
|
||||
}
|
||||
|
||||
export function assignableTo(left: Type, right: Type): boolean {
|
||||
if (left.kind === TypeKind.Con && right.kind == TypeKind.Con) {
|
||||
return left.id === right.id;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function typesEqual(a: Type, b: Type): boolean {
|
||||
if (a.kind !== b.kind) {
|
||||
return false;
|
||||
}
|
||||
switch (a.kind) {
|
||||
case TypeKind.Con:
|
||||
assert(b.kind === TypeKind.Con);
|
||||
return a.id === b.id;
|
||||
case TypeKind.RegularVar:
|
||||
assert(b.kind === TypeKind.RegularVar);
|
||||
return a.id === b.id;
|
||||
case TypeKind.RigidVar:
|
||||
assert(b.kind === TypeKind.RigidVar);
|
||||
return a.id === b.id;
|
||||
case TypeKind.Nil:
|
||||
case TypeKind.Absent:
|
||||
return true;
|
||||
case TypeKind.App:
|
||||
assert(b.kind === TypeKind.App);
|
||||
return typesEqual(a.left, b.left) && typesEqual(a.right, b.right);
|
||||
case TypeKind.Field:
|
||||
assert(b.kind === TypeKind.Field);
|
||||
return a.name === b.name && typesEqual(a.type, b.type) && typesEqual(a.restType, b.restType);
|
||||
case TypeKind.Arrow:
|
||||
assert(b.kind === TypeKind.Arrow);
|
||||
return typesEqual(a.paramType, b.paramType) && typesEqual(a.returnType, b.returnType);
|
||||
case TypeKind.Present:
|
||||
assert(b.kind === TypeKind.Present);
|
||||
return typesEqual(a.type, b.type);
|
||||
default:
|
||||
assertNever(a);
|
||||
}
|
||||
}
|
||||
|
||||
export class TVSet {
|
||||
|
||||
private mapping = new Map<number, TVar>();
|
||||
|
||||
public constructor(iterable?: Iterable<TVar>) {
|
||||
if (iterable !== undefined) {
|
||||
for (const tv of iterable) {
|
||||
this.add(tv);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public add(tv: TVar): void {
|
||||
this.mapping.set(tv.id, tv);
|
||||
}
|
||||
|
||||
public has(tv: TVar): boolean {
|
||||
return this.mapping.has(tv.id);
|
||||
}
|
||||
|
||||
public intersectsType(type: Type): boolean {
|
||||
for (const tv of type.getTypeVars()) {
|
||||
if (this.has(tv)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public delete(tv: TVar): void {
|
||||
this.mapping.delete(tv.id);
|
||||
}
|
||||
|
||||
public get size(): number {
|
||||
return this.mapping.size;
|
||||
}
|
||||
|
||||
public [Symbol.iterator](): Iterator<TVar> {
|
||||
return this.mapping.values();
|
||||
}
|
||||
|
||||
public [toStringTag](_depth: number, options: InspectOptions, inspect: InspectFn) {
|
||||
let out = '{ ';
|
||||
let first = true;
|
||||
for (const tv of this) {
|
||||
if (first) first = false;
|
||||
else out += ', ';
|
||||
out += inspect(tv, options);
|
||||
}
|
||||
return out + ' }';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class TVSub {
|
||||
|
||||
private mapping = new Map<number, Type>();
|
||||
|
||||
public set(tv: TVar, type: Type): void {
|
||||
this.mapping.set(tv.id, type);
|
||||
}
|
||||
|
||||
public get(tv: TVar): Type | undefined {
|
||||
return this.mapping.get(tv.id);
|
||||
}
|
||||
|
||||
public has(tv: TVar): boolean {
|
||||
return this.mapping.has(tv.id);
|
||||
}
|
||||
|
||||
public delete(tv: TVar): void {
|
||||
this.mapping.delete(tv.id);
|
||||
}
|
||||
|
||||
public values(): Iterable<Type> {
|
||||
return this.mapping.values();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,262 +0,0 @@
|
|||
|
||||
import path from "path"
|
||||
import stream from "stream"
|
||||
import { InspectOptions } from "util";
|
||||
|
||||
export const isDebug = process.env['NODE_ENV'] === 'development';
|
||||
|
||||
export const toStringTag = Symbol.for('nodejs.util.inspect.custom');
|
||||
|
||||
export type InspectFn = (value: any, options: InspectOptions) => string;
|
||||
|
||||
export function isIterable(value: any): value is Iterable<any> {
|
||||
if (value === undefined || value === null) {
|
||||
return false;
|
||||
}
|
||||
return typeof(value[Symbol.iterator]) === 'function';
|
||||
}
|
||||
|
||||
export function first<T>(iter: Iterator<T>): T | undefined {
|
||||
return iter.next().value;
|
||||
}
|
||||
|
||||
export function last<T>(iter: Iterator<T>): T | undefined {
|
||||
let prevValue;
|
||||
for (;;) {
|
||||
const { done, value } = iter.next();
|
||||
if (done) {
|
||||
return prevValue;
|
||||
}
|
||||
prevValue = value;
|
||||
}
|
||||
}
|
||||
|
||||
export function stripExtension(filepath: string): string {
|
||||
const basename = path.basename(filepath);
|
||||
const i = basename.lastIndexOf('.');
|
||||
if (i === -1) {
|
||||
return filepath;
|
||||
}
|
||||
return path.join(path.dirname(filepath), basename.substring(0, i));
|
||||
}
|
||||
|
||||
export class IndentWriter {
|
||||
|
||||
private atBlankLine = true;
|
||||
private indentLevel = 0;
|
||||
|
||||
public constructor(
|
||||
private output: stream.Writable,
|
||||
private indentation = ' ',
|
||||
) {
|
||||
|
||||
}
|
||||
|
||||
public write(text: string): void {
|
||||
for (const ch of text) {
|
||||
if (ch === '\n') {
|
||||
this.atBlankLine = true;
|
||||
} else if (!/[\t ]/.test(ch) && this.atBlankLine) {
|
||||
this.output.write(this.indentation.repeat(this.indentLevel));
|
||||
this.atBlankLine = false;
|
||||
}
|
||||
this.output.write(ch);
|
||||
}
|
||||
}
|
||||
|
||||
public indent(): void {
|
||||
this.indentLevel++;
|
||||
}
|
||||
|
||||
public dedent(): void {
|
||||
this.indentLevel--;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const GITHUB_ISSUE_URL = 'https://github.com/boltlang/bolt/issues/'
|
||||
|
||||
export function assert(test: boolean): asserts test {
|
||||
if (!test) {
|
||||
throw new Error(`Assertion failed. See the stack trace for more information. You are invited to search this issue on GitHub or to create a new one at ${GITHUB_ISSUE_URL} .`);
|
||||
}
|
||||
}
|
||||
|
||||
export function implementationLimitation(test: boolean): asserts test {
|
||||
if (!test) {
|
||||
throw new Error(`We encountered a limitation to the implementation of this compiler. You are invited to search this issue on GitHub or to create a new one at ${GITHUB_ISSUE_URL} .`);
|
||||
}
|
||||
}
|
||||
|
||||
export function unreachable(): never {
|
||||
throw new Error(`Code that should never be executed was reached during operation.`);
|
||||
}
|
||||
|
||||
export function assertNever(value: never): never {
|
||||
console.error(value);
|
||||
throw new Error(`Assertion failed. See the stack trace for more information.`);
|
||||
}
|
||||
|
||||
export function countDigits(x: number, base: number = 10) {
|
||||
return x === 0 ? 1 : Math.ceil(Math.log(x+1) / Math.log(base))
|
||||
}
|
||||
|
||||
export function isEmpty<T>(iter: Iterable<T> | Iterator<T>): boolean {
|
||||
if ((iter as any)[Symbol.iterator] !== undefined) {
|
||||
iter = (iter as any)[Symbol.iterator]();
|
||||
}
|
||||
return !!(iter as Iterator<T>).next().done;
|
||||
}
|
||||
|
||||
export type JSONValue = null | boolean | number | string | JSONArray | JSONObject
|
||||
export type JSONArray = Array<JSONValue>;
|
||||
export type JSONObject = { [key: string]: JSONValue };
|
||||
|
||||
export class MultiDict<K, V> {
|
||||
|
||||
private mapping = new Map<K, V[]>();
|
||||
|
||||
public constructor(iterable?: Iterable<[K, V]>) {
|
||||
if (iterable) {
|
||||
for (const [key, value] of iterable) {
|
||||
this.add(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public get(key: K): Iterable<V> {
|
||||
return this.mapping.get(key) ?? [];
|
||||
}
|
||||
|
||||
public add(key: K, value: V): void {
|
||||
const values = this.mapping.get(key);
|
||||
if (values) {
|
||||
values.push(value);
|
||||
} else {
|
||||
this.mapping.set(key, [ value ])
|
||||
}
|
||||
}
|
||||
|
||||
public *[Symbol.iterator](): Iterator<[K, V]> {
|
||||
for (const [key, values] of this.mapping) {
|
||||
for (const value of values) {
|
||||
yield [key, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export interface Stream<T> {
|
||||
get(): T;
|
||||
peek(offset?: number): T;
|
||||
}
|
||||
|
||||
export abstract class BufferedStream<T> {
|
||||
|
||||
private buffer: Array<T> = [];
|
||||
|
||||
public abstract read(): T;
|
||||
|
||||
public get(): T {
|
||||
if (this.buffer.length > 0) {
|
||||
return this.buffer.shift()!;
|
||||
}
|
||||
return this.read();
|
||||
}
|
||||
|
||||
public peek(offset = 1): T {
|
||||
while (this.buffer.length < offset) {
|
||||
this.buffer.push(this.read());
|
||||
}
|
||||
return this.buffer[offset-1];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export class MultiMap<K, V> {
|
||||
|
||||
private mapping = new Map<K, V[]>();
|
||||
|
||||
public get(key: K): V[] {
|
||||
return this.mapping.get(key) ?? [];
|
||||
}
|
||||
|
||||
public add(key: K, value: V): void {
|
||||
let elements = this.mapping.get(key);
|
||||
if (elements === undefined) {
|
||||
elements = [];
|
||||
this.mapping.set(key, elements);
|
||||
}
|
||||
elements.push(value);
|
||||
}
|
||||
|
||||
public has(key: K, value?: V): boolean {
|
||||
if (value === undefined) {
|
||||
return this.mapping.has(key);
|
||||
}
|
||||
const elements = this.mapping.get(key);
|
||||
if (elements === undefined) {
|
||||
return false;
|
||||
}
|
||||
return elements.indexOf(value) !== -1;
|
||||
}
|
||||
|
||||
public keys(): Iterable<K> {
|
||||
return this.mapping.keys();
|
||||
}
|
||||
|
||||
public *values(): Iterable<V> {
|
||||
for (const elements of this.mapping.values()) {
|
||||
yield* elements;
|
||||
}
|
||||
}
|
||||
|
||||
public *[Symbol.iterator](): Iterator<[K, V]> {
|
||||
for (const [key, elements] of this.mapping) {
|
||||
for (const value of elements) {
|
||||
yield [key, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public delete(key: K, value?: V): number {
|
||||
const elements = this.mapping.get(key);
|
||||
if (elements === undefined) {
|
||||
return 0;
|
||||
}
|
||||
if (value === undefined) {
|
||||
this.mapping.delete(key);
|
||||
return elements.length;
|
||||
}
|
||||
const i = elements.indexOf(value);
|
||||
if (i !== -1) {
|
||||
elements.splice(i, 1);
|
||||
if (elements.length === 0) {
|
||||
this.mapping.delete(key);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export const nonenumerable: {
|
||||
(target: any, name: string): void;
|
||||
(target: any, name: string, desc: PropertyDescriptor): PropertyDescriptor;
|
||||
} = (target: any, name: string, desc?: any) => {
|
||||
if (desc) {
|
||||
desc.enumerable = false;
|
||||
return desc;
|
||||
}
|
||||
Object.defineProperty(target, name, {
|
||||
set(value) {
|
||||
Object.defineProperty(this, name, {
|
||||
value, writable: true, configurable: true,
|
||||
});
|
||||
},
|
||||
configurable: true,
|
||||
});
|
||||
};
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2016",
|
||||
"lib": [ "ES2016" ],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "./lib",
|
||||
"removeComments": true,
|
||||
"importHelpers": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"experimentalDecorators": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
190
bootstrap/js/package-lock.json
generated
190
bootstrap/js/package-lock.json
generated
|
@ -1,190 +0,0 @@
|
|||
{
|
||||
"name": "bolt-workspace",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "bolt-workspace",
|
||||
"workspaces": [
|
||||
"compiler"
|
||||
],
|
||||
"dependencies": {
|
||||
"typescript": "^5.1.3"
|
||||
}
|
||||
},
|
||||
"babel-plugin": {
|
||||
"name": "babel-plugin-metadata",
|
||||
"version": "1.0.0",
|
||||
"extraneous": true,
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@babel/types": "^7.21.4",
|
||||
"@types/babel__core": "^7.20.0"
|
||||
}
|
||||
},
|
||||
"babel-plugin-metadata": {
|
||||
"version": "1.0.0",
|
||||
"extraneous": true,
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@babel/types": "^7.21.4",
|
||||
"@types/babel__core": "^7.20.0"
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "@boltlang/bolt",
|
||||
"version": "0.0.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/which": "^3.0.0",
|
||||
"commander": "^11.0.0",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"source-map-support": "^0.5.21",
|
||||
"tslib": "^2.5.3",
|
||||
"which": "^3.0.1",
|
||||
"yagl": "^0.5.1"
|
||||
},
|
||||
"bin": {
|
||||
"bolt": "lib/bin/bolt.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.3.1"
|
||||
}
|
||||
},
|
||||
"compiler/node_modules/commander": {
|
||||
"version": "11.0.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-11.0.0.tgz",
|
||||
"integrity": "sha512-9HMlXtt/BNoYr8ooyjjNRdIilOTkVJXB+GhxMTtOKwk0R4j4lS4NpjuqmRxroBfnfTSHQIHQB7wryHhXarNjmQ==",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"harness": {
|
||||
"name": "@boltlang/harness",
|
||||
"version": "0.0.1",
|
||||
"extraneous": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@boltlang/bolt": "^0.0.1",
|
||||
"chalk": "^4.1.2",
|
||||
"commander": "^10.0.0",
|
||||
"commonmark": "^0.30.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
"glob": "^10.0.0",
|
||||
"minimatch": "^9.0.0",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"typescript-rtti": "^0.8.3"
|
||||
},
|
||||
"bin": {
|
||||
"bolt-self-harness-commit": "lib/bin/bolt-self-harness-commit.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/commonmark": "^0.27.6",
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/glob": "^8.1.0",
|
||||
"ava": "^5.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@boltlang/bolt": {
|
||||
"resolved": "compiler",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.3.1.tgz",
|
||||
"integrity": "sha512-EhcH/wvidPy1WeML3TtYFGR83UzjxeWRen9V402T8aUGYsCHOmfoisV3ZSg03gAFIbLq8TnWOJ0f4cALtnSEUg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/which": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/which/-/which-3.0.0.tgz",
|
||||
"integrity": "sha512-ASCxdbsrwNfSMXALlC3Decif9rwDMu+80KGp5zI2RLRotfMsTv7fHL8W8VDp24wymzDyIFudhUeSCugrgRFfHQ=="
|
||||
},
|
||||
"node_modules/buffer-from": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
|
||||
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
|
||||
},
|
||||
"node_modules/isexe": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
|
||||
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
|
||||
},
|
||||
"node_modules/reflect-metadata": {
|
||||
"version": "0.1.13",
|
||||
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.1.13.tgz",
|
||||
"integrity": "sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg=="
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-support": {
|
||||
"version": "0.5.21",
|
||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
|
||||
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
|
||||
"dependencies": {
|
||||
"buffer-from": "^1.0.0",
|
||||
"source-map": "^0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "2.5.3",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz",
|
||||
"integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w=="
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.1.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.3.tgz",
|
||||
"integrity": "sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw==",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/which": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/which/-/which-3.0.1.tgz",
|
||||
"integrity": "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==",
|
||||
"dependencies": {
|
||||
"isexe": "^2.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"node-which": "bin/which.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/yagl": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/yagl/-/yagl-0.5.1.tgz",
|
||||
"integrity": "sha512-DfJygWCefAq5eEOmwvVkiMFBUEQJs9aijGdhaYGSdj1TM2OqSbe/Vp37e/nMGXsgmWiryZapKMOtpYx3ECUrJQ=="
|
||||
},
|
||||
"testing": {
|
||||
"version": "0.0.1",
|
||||
"extraneous": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chalk": "^5.2.0",
|
||||
"commander": "^10.0.0",
|
||||
"commonmark": "^0.30.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
"glob": "^10.0.0",
|
||||
"minimatch": "^9.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/commonmark": "^0.27.6",
|
||||
"@types/fs-extra": "^11.0.1",
|
||||
"@types/glob": "^8.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"name": "bolt-workspace",
|
||||
"private": true,
|
||||
"workspaces": [
|
||||
"compiler"
|
||||
],
|
||||
"dependencies": {
|
||||
"typescript": "^5.1.3"
|
||||
}
|
||||
}
|
205
cmake/Modules/FindLLVM.cmake
Normal file
205
cmake/Modules/FindLLVM.cmake
Normal file
|
@ -0,0 +1,205 @@
|
|||
# Code taken from LDC – the LLVM-based D Compiler
|
||||
# https://github.com/ldc-developers/ldc
|
||||
#
|
||||
# A given list of COMPONENTS is passed to llvm-config.
|
||||
#
|
||||
# The following variables are defined:
|
||||
# LLVM_FOUND - true if LLVM was found
|
||||
# LLVM_CXXFLAGS - C++ compiler flags for files that include LLVM headers.
|
||||
# LLVM_ENABLE_ASSERTIONS - Whether LLVM was built with enabled assertions (ON/OFF).
|
||||
# LLVM_INCLUDE_DIRS - Directory containing LLVM include files.
|
||||
# LLVM_IS_SHARED - Whether LLVM is going to be linked dynamically (ON) or statically (OFF).
|
||||
# LLVM_LDFLAGS - Linker flags to add when linking against LLVM
|
||||
# (includes -LLLVM_LIBRARY_DIRS).
|
||||
# LLVM_LIBRARIES - Full paths to the library files to link against.
|
||||
# LLVM_LIBRARY_DIRS - Directory containing LLVM libraries.
|
||||
# LLVM_NATIVE_ARCH - Backend corresponding to LLVM_HOST_TARGET, e.g.,
|
||||
# X86 for x86_64 and i686 hosts.
|
||||
# LLVM_ROOT_DIR - The root directory of the LLVM installation.
|
||||
# llvm-config is searched for in ${LLVM_ROOT_DIR}/bin.
|
||||
# LLVM_TARGETS_TO_BUILD - List of built LLVM targets.
|
||||
# LLVM_VERSION_MAJOR - Major version of LLVM.
|
||||
# LLVM_VERSION_MINOR - Minor version of LLVM.
|
||||
# LLVM_VERSION_STRING - Full LLVM version string (e.g. 6.0.0svn).
|
||||
# LLVM_VERSION_BASE_STRING - Base LLVM version string without git/svn suffix (e.g. 6.0.0).
|
||||
#
|
||||
# Note: The variable names were chosen in conformance with the offical CMake
|
||||
# guidelines, see ${CMAKE_ROOT}/Modules/readme.txt.
|
||||
|
||||
# Try suffixed versions to pick up the newest LLVM install available on Debian
|
||||
# derivatives.
|
||||
# We also want an user-specified LLVM_ROOT_DIR to take precedence over the
|
||||
# system default locations such as /usr/local/bin. Executing find_program()
|
||||
# multiples times is the approach recommended in the docs.
|
||||
set(llvm_config_names llvm-config-17.0 llvm-config170 llvm-config-17
|
||||
llvm-config-16.0 llvm-config160 llvm-config-16
|
||||
llvm-config-15.0 llvm-config150 llvm-config-15
|
||||
llvm-config-14.0 llvm-config140 llvm-config-14
|
||||
llvm-config-13.0 llvm-config130 llvm-config-13
|
||||
llvm-config-12.0 llvm-config120 llvm-config-12
|
||||
llvm-config-11.0 llvm-config110 llvm-config-11
|
||||
llvm-config)
|
||||
find_program(LLVM_CONFIG
|
||||
NAMES ${llvm_config_names}
|
||||
PATHS ${LLVM_ROOT_DIR}/bin NO_DEFAULT_PATH
|
||||
DOC "Path to llvm-config tool.")
|
||||
find_program(LLVM_CONFIG NAMES ${llvm_config_names})
|
||||
if(APPLE)
|
||||
# extra fallbacks for MacPorts & Homebrew
|
||||
find_program(LLVM_CONFIG
|
||||
NAMES ${llvm_config_names}
|
||||
PATHS /opt/local/libexec/llvm-17/bin
|
||||
/opt/local/libexec/llvm-16/bin /opt/local/libexec/llvm-15/bin
|
||||
/opt/local/libexec/llvm-14/bin /opt/local/libexec/llvm-13/bin
|
||||
/opt/local/libexec/llvm-12/bin /opt/local/libexec/llvm-11/bin
|
||||
/opt/local/libexec/llvm/bin
|
||||
/usr/local/opt/llvm@17/bin
|
||||
/usr/local/opt/llvm@16/bin /usr/local/opt/llvm@15/bin
|
||||
/usr/local/opt/llvm@14/bin /usr/local/opt/llvm@13/bin
|
||||
/usr/local/opt/llvm@12/bin /usr/local/opt/llvm@11/bin
|
||||
/usr/local/opt/llvm/bin
|
||||
NO_DEFAULT_PATH)
|
||||
endif()
|
||||
|
||||
# Prints a warning/failure message depending on the required/quiet flags. Copied
|
||||
# from FindPackageHandleStandardArgs.cmake because it doesn't seem to be exposed.
|
||||
macro(_LLVM_FAIL _msg)
|
||||
if(LLVM_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "${_msg}")
|
||||
else()
|
||||
if(NOT LLVM_FIND_QUIETLY)
|
||||
message(WARNING "${_msg}")
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
|
||||
if(NOT LLVM_CONFIG)
|
||||
if(NOT LLVM_FIND_QUIETLY)
|
||||
_LLVM_FAIL("No LLVM installation (>= ${LLVM_FIND_VERSION}) found. Try manually setting the 'LLVM_ROOT_DIR' or 'LLVM_CONFIG' variables.")
|
||||
endif()
|
||||
else()
|
||||
macro(llvm_set var flag)
|
||||
if(LLVM_FIND_QUIETLY)
|
||||
set(_quiet_arg ERROR_QUIET)
|
||||
endif()
|
||||
set(result_code)
|
||||
execute_process(
|
||||
COMMAND ${LLVM_CONFIG} --${flag}
|
||||
RESULT_VARIABLE result_code
|
||||
OUTPUT_VARIABLE LLVM_${var}
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
${_quiet_arg}
|
||||
)
|
||||
if(result_code)
|
||||
_LLVM_FAIL("Failed to execute llvm-config ('${LLVM_CONFIG}', result code: '${result_code})'")
|
||||
else()
|
||||
if(${ARGV2})
|
||||
file(TO_CMAKE_PATH "${LLVM_${var}}" LLVM_${var})
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
macro(llvm_set_libs var flag components)
|
||||
if(LLVM_FIND_QUIETLY)
|
||||
set(_quiet_arg ERROR_QUIET)
|
||||
endif()
|
||||
set(result_code)
|
||||
execute_process(
|
||||
COMMAND ${LLVM_CONFIG} --${flag} ${components}
|
||||
RESULT_VARIABLE result_code
|
||||
OUTPUT_VARIABLE tmplibs
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
${_quiet_arg}
|
||||
)
|
||||
if(result_code)
|
||||
_LLVM_FAIL("Failed to execute llvm-config ('${LLVM_CONFIG}', result code: '${result_code})'")
|
||||
else()
|
||||
file(TO_CMAKE_PATH "${tmplibs}" tmplibs)
|
||||
string(REGEX MATCHALL "${pattern}[^ ]+" LLVM_${var} ${tmplibs})
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
llvm_set(VERSION_STRING version)
|
||||
llvm_set(CXXFLAGS cxxflags)
|
||||
llvm_set(INCLUDE_DIRS includedir true)
|
||||
llvm_set(ROOT_DIR prefix true)
|
||||
llvm_set(ENABLE_ASSERTIONS assertion-mode)
|
||||
|
||||
# The LLVM version string _may_ contain a git/svn suffix, so match only the x.y.z part
|
||||
string(REGEX MATCH "^[0-9]+[.][0-9]+[.][0-9]+" LLVM_VERSION_BASE_STRING "${LLVM_VERSION_STRING}")
|
||||
string(REGEX REPLACE "([0-9]+).*" "\\1" LLVM_VERSION_MAJOR "${LLVM_VERSION_STRING}" )
|
||||
string(REGEX REPLACE "[0-9]+\\.([0-9]+).*[A-Za-z]*" "\\1" LLVM_VERSION_MINOR "${LLVM_VERSION_STRING}" )
|
||||
|
||||
llvm_set(SHARED_MODE shared-mode)
|
||||
if(LLVM_SHARED_MODE STREQUAL "shared")
|
||||
set(LLVM_IS_SHARED ON)
|
||||
else()
|
||||
set(LLVM_IS_SHARED OFF)
|
||||
endif()
|
||||
|
||||
llvm_set(LDFLAGS ldflags)
|
||||
llvm_set(SYSTEM_LIBS system-libs)
|
||||
string(REPLACE "\n" " " LLVM_LDFLAGS "${LLVM_LDFLAGS} ${LLVM_SYSTEM_LIBS}")
|
||||
if(APPLE) # unclear why/how this happens
|
||||
string(REPLACE "-llibxml2.tbd" "-lxml2" LLVM_LDFLAGS ${LLVM_LDFLAGS})
|
||||
endif()
|
||||
|
||||
if(${LLVM_VERSION_MAJOR} LESS "15")
|
||||
# Versions below 15.0 do not support component windowsdriver
|
||||
list(REMOVE_ITEM LLVM_FIND_COMPONENTS "windowsdriver")
|
||||
endif()
|
||||
|
||||
llvm_set(LIBRARY_DIRS libdir true)
|
||||
llvm_set_libs(LIBRARIES libs "${LLVM_FIND_COMPONENTS}")
|
||||
# LLVM bug: llvm-config --libs tablegen returns -lLLVM-3.8.0
|
||||
# but code for it is not in shared library
|
||||
if("${LLVM_FIND_COMPONENTS}" MATCHES "tablegen")
|
||||
if (NOT "${LLVM_LIBRARIES}" MATCHES "LLVMTableGen")
|
||||
set(LLVM_LIBRARIES "${LLVM_LIBRARIES};-lLLVMTableGen")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
llvm_set(CMAKEDIR cmakedir)
|
||||
llvm_set(TARGETS_TO_BUILD targets-built)
|
||||
string(REGEX MATCHALL "${pattern}[^ ]+" LLVM_TARGETS_TO_BUILD ${LLVM_TARGETS_TO_BUILD})
|
||||
|
||||
# Parse LLVM_NATIVE_ARCH manually from LLVMConfig.cmake; including it leads to issues like
|
||||
# https://github.com/ldc-developers/ldc/issues/3079.
|
||||
file(STRINGS "${LLVM_CMAKEDIR}/LLVMConfig.cmake" LLVM_NATIVE_ARCH LIMIT_COUNT 1 REGEX "^set\\(LLVM_NATIVE_ARCH (.+)\\)$")
|
||||
string(REGEX MATCH "set\\(LLVM_NATIVE_ARCH (.+)\\)" LLVM_NATIVE_ARCH "${LLVM_NATIVE_ARCH}")
|
||||
set(LLVM_NATIVE_ARCH ${CMAKE_MATCH_1})
|
||||
message(STATUS "LLVM_NATIVE_ARCH: ${LLVM_NATIVE_ARCH}")
|
||||
|
||||
# On CMake builds of LLVM, the output of llvm-config --cxxflags does not
|
||||
# include -fno-rtti, leading to linker errors. Be sure to add it.
|
||||
if(NOT MSVC AND (CMAKE_COMPILER_IS_GNUCXX OR (${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")))
|
||||
if(NOT ${LLVM_CXXFLAGS} MATCHES "-fno-rtti")
|
||||
set(LLVM_CXXFLAGS "${LLVM_CXXFLAGS} -fno-rtti")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Remove some clang-specific flags for gcc.
|
||||
if(CMAKE_COMPILER_IS_GNUCXX)
|
||||
string(REPLACE "-Wcovered-switch-default " "" LLVM_CXXFLAGS ${LLVM_CXXFLAGS})
|
||||
string(REPLACE "-Wstring-conversion " "" LLVM_CXXFLAGS ${LLVM_CXXFLAGS})
|
||||
string(REPLACE "-fcolor-diagnostics " "" LLVM_CXXFLAGS ${LLVM_CXXFLAGS})
|
||||
# this requires more recent gcc versions (not supported by 4.9)
|
||||
string(REPLACE "-Werror=unguarded-availability-new " "" LLVM_CXXFLAGS ${LLVM_CXXFLAGS})
|
||||
endif()
|
||||
|
||||
# Remove gcc-specific flags for clang.
|
||||
if(${CMAKE_CXX_COMPILER_ID} MATCHES "Clang")
|
||||
string(REPLACE "-Wno-maybe-uninitialized " "" LLVM_CXXFLAGS ${LLVM_CXXFLAGS})
|
||||
endif()
|
||||
|
||||
if (${LLVM_VERSION_STRING} VERSION_LESS ${LLVM_FIND_VERSION})
|
||||
_LLVM_FAIL("Unsupported LLVM version ${LLVM_VERSION_STRING} found (${LLVM_CONFIG}). At least version ${LLVM_FIND_VERSION} is required. You can also set variables 'LLVM_ROOT_DIR' or 'LLVM_CONFIG' to use a different LLVM installation.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Use the default CMake facilities for handling QUIET/REQUIRED.
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
find_package_handle_standard_args(LLVM
|
||||
REQUIRED_VARS LLVM_ROOT_DIR
|
||||
VERSION_VAR LLVM_VERSION_STRING)
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue